From 75e584aaf908961aedbfeff47b5e4bca001c5585 Mon Sep 17 00:00:00 2001 From: "mingyu.park" Date: Tue, 25 Feb 2025 10:12:43 +0900 Subject: [PATCH 001/388] Add SetProxy and support for set operations in multiprocessing managers for Python 3.14.0a5+ (https://github.com/python/cpython/pull/129949) --- stdlib/multiprocessing/managers.pyi | 57 ++++++++++++++++++++++++++++- 1 file changed, 56 insertions(+), 1 deletion(-) diff --git a/stdlib/multiprocessing/managers.pyi b/stdlib/multiprocessing/managers.pyi index ad5697e0ab1c..a5190e2fcb98 100644 --- a/stdlib/multiprocessing/managers.pyi +++ b/stdlib/multiprocessing/managers.pyi @@ -2,7 +2,17 @@ import queue import sys import threading from _typeshed import SupportsKeysAndGetItem, SupportsRichComparison, SupportsRichComparisonT -from collections.abc import Callable, Iterable, Iterator, Mapping, MutableMapping, MutableSequence, Sequence +from collections.abc import ( + Callable, + Iterable, + Iterator, + Mapping, + MutableMapping, + MutableSequence, + MutableSet, + Sequence, + Set as AbstractSet, +) from types import TracebackType from typing import Any, AnyStr, ClassVar, Generic, SupportsIndex, TypeVar, overload from typing_extensions import Self, TypeAlias @@ -21,6 +31,7 @@ if sys.version_info >= (3, 9): _T = TypeVar("_T") _KT = TypeVar("_KT") _VT = TypeVar("_VT") +_S = TypeVar("_S") class Namespace: def __init__(self, **kwds: Any) -> None: ... @@ -115,6 +126,45 @@ else: def items(self) -> list[tuple[_KT, _VT]]: ... # type: ignore[override] def values(self) -> list[_VT]: ... # type: ignore[override] +if sys.version_info >= (3, 14): + class _BaseSetProxy(BaseProxy, MutableSet[_T]): + __builtins__: ClassVar[dict[str, Any]] + # Copied from builtins.set + def add(self, element: _T, /) -> None: ... + def copy(self) -> set[_T]: ... + def difference(self, *s: Iterable[Any]) -> set[_T]: ... + def difference_update(self, *s: Iterable[Any]) -> None: ... + def discard(self, element: _T, /) -> None: ... + def intersection(self, *s: Iterable[Any]) -> set[_T]: ... + def intersection_update(self, *s: Iterable[Any]) -> None: ... + def isdisjoint(self, s: Iterable[Any], /) -> bool: ... + def issubset(self, s: Iterable[Any], /) -> bool: ... + def issuperset(self, s: Iterable[Any], /) -> bool: ... + def remove(self, element: _T, /) -> None: ... + def symmetric_difference(self, s: Iterable[_T], /) -> set[_T]: ... + def symmetric_difference_update(self, s: Iterable[_T], /) -> None: ... + def union(self, *s: Iterable[_S]) -> set[_T | _S]: ... + def update(self, *s: Iterable[_T]) -> None: ... + def __len__(self) -> int: ... + def __contains__(self, o: object, /) -> bool: ... + def __iter__(self) -> Iterator[_T]: ... + def __and__(self, value: AbstractSet[object], /) -> set[_T]: ... + def __iand__(self, value: AbstractSet[object], /) -> Self: ... + def __or__(self, value: AbstractSet[_S], /) -> set[_T | _S]: ... + def __ior__(self, value: AbstractSet[_T], /) -> Self: ... # type: ignore[override,misc] + def __sub__(self, value: AbstractSet[_T | None], /) -> set[_T]: ... + def __isub__(self, value: AbstractSet[object], /) -> Self: ... + def __xor__(self, value: AbstractSet[_S], /) -> set[_T | _S]: ... + def __ixor__(self, value: AbstractSet[_T], /) -> Self: ... # type: ignore[override,misc] + def __le__(self, value: AbstractSet[object], /) -> bool: ... + def __lt__(self, value: AbstractSet[object], /) -> bool: ... + def __ge__(self, value: AbstractSet[object], /) -> bool: ... + def __gt__(self, value: AbstractSet[object], /) -> bool: ... + def __eq__(self, value: object, /) -> bool: ... + + class SetProxy(_BaseSetProxy[_T]): + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + class BaseListProxy(BaseProxy, MutableSequence[_T]): __builtins__: ClassVar[dict[str, Any]] def __len__(self) -> int: ... @@ -277,6 +327,11 @@ class SyncManager(BaseManager): def list(self, sequence: Sequence[_T], /) -> ListProxy[_T]: ... @overload def list(self) -> ListProxy[Any]: ... + if sys.version_info >= (3, 14): + @overload + def set(self, iterable: Iterable[_T], /) -> SetProxy[_T]: ... + @overload + def set(self) -> SetProxy[Any]: ... class RemoteError(Exception): ... From 0ed6cfe6f5a3ce747c81aed614eea1e07305b24a Mon Sep 17 00:00:00 2001 From: David Salvisberg Date: Tue, 25 Feb 2025 12:38:27 +0100 Subject: [PATCH 002/388] WebOb: Fix various issues and refactor some things (#13487) --- stubs/WebOb/@tests/stubtest_allowlist.txt | 80 +- .../@tests/test_cases/check_cachecontrol.py | 102 +++ stubs/WebOb/@tests/test_cases/check_wsgify.py | 10 +- stubs/WebOb/webob/__init__.pyi | 15 + stubs/WebOb/webob/_types.pyi | 21 + stubs/WebOb/webob/acceptparse.pyi | 781 +++++++++++------- stubs/WebOb/webob/byterange.pyi | 2 + stubs/WebOb/webob/cachecontrol.pyi | 150 ++-- stubs/WebOb/webob/client.pyi | 2 + stubs/WebOb/webob/compat.pyi | 22 + stubs/WebOb/webob/cookies.pyi | 73 +- stubs/WebOb/webob/datetime_utils.pyi | 16 + stubs/WebOb/webob/dec.pyi | 80 +- stubs/WebOb/webob/descriptors.pyi | 57 +- stubs/WebOb/webob/etag.pyi | 29 +- stubs/WebOb/webob/exc.pyi | 77 +- stubs/WebOb/webob/headers.pyi | 22 +- stubs/WebOb/webob/multidict.pyi | 128 ++- stubs/WebOb/webob/request.pyi | 149 ++-- stubs/WebOb/webob/response.pyi | 124 +-- stubs/WebOb/webob/static.pyi | 21 +- stubs/WebOb/webob/util.pyi | 7 +- 22 files changed, 1236 insertions(+), 732 deletions(-) create mode 100644 stubs/WebOb/@tests/test_cases/check_cachecontrol.py create mode 100644 stubs/WebOb/webob/_types.pyi create mode 100644 stubs/WebOb/webob/compat.pyi diff --git a/stubs/WebOb/@tests/stubtest_allowlist.txt b/stubs/WebOb/@tests/stubtest_allowlist.txt index adb9b93c7e70..dc716bbabce1 100644 --- a/stubs/WebOb/@tests/stubtest_allowlist.txt +++ b/stubs/WebOb/@tests/stubtest_allowlist.txt @@ -1,18 +1,3 @@ -# TODO: missing from stub -webob.__all__ -webob.byterange.__all__ -webob.client.__all__ -webob.cookies.__all__ -webob.datetime_utils.__all__ -webob.dec.__all__ -webob.etag.__all__ -webob.exc.__all__ -webob.headers.__all__ -webob.multidict.__all__ -webob.request.__all__ -webob.response.__all__ -webob.static.__all__ - # Error: is not present in stub # ============================= # These are plain strings, regex strings or compiled regex patterns @@ -62,7 +47,21 @@ webob.descriptors.SCHEME_RE webob.acceptparse.MIMEAccept # Deprecated API # PY2 compat stuff that has already been removed upstream -webob.compat +webob.compat.PY2 +webob.compat.PY3 +webob.compat.bytes_ +webob.compat.class_types +webob.compat.integer_types +webob.compat.iteritems_ +webob.compat.itervalues_ +webob.compat.long +webob.compat.native_ +webob.compat.parse_qsl_text +webob.compat.reraise +webob.compat.string_types +webob.compat.text_ +webob.compat.text_type +webob.compat.unquote webob.multidict.MultiDict.iteritems webob.multidict.MultiDict.iterkeys webob.multidict.MultiDict.itervalues @@ -71,6 +70,9 @@ webob.multidict.NestedMultiDict.iterkeys webob.multidict.NestedMultiDict.itervalues webob.multidict.NoVars.iterkeys +# The implementation details of cgi_FieldStorage shouldn't matter +webob.compat.cgi_FieldStorage.read_multi + # NoVars implements the MultiDict interface for better runtime errors # but it is annoying for type checking, so the methods that are not # valid to call on NoVars have been removed. In the future we would @@ -96,41 +98,18 @@ webob.response.ResponseBodyFile.close webob.Response.set_cookie webob.response.Response.set_cookie -# These methods have been moved from their subclasses to the shared hidden superclass -# since the method signatures are the same, so this saves some copy pasta and should -# not affect type checking or runtime behavior in any way -webob.acceptparse._AcceptCharsetInvalidOrNoHeader.__add__ -webob.acceptparse._AcceptCharsetInvalidOrNoHeader.__radd__ -webob.acceptparse._AcceptCharsetInvalidOrNoHeader.copy -webob.acceptparse._AcceptCharsetInvalidOrNoHeader.parsed -webob.acceptparse._AcceptEncodingInvalidOrNoHeader.__add__ -webob.acceptparse._AcceptEncodingInvalidOrNoHeader.__radd__ -webob.acceptparse._AcceptEncodingInvalidOrNoHeader.copy -webob.acceptparse._AcceptEncodingInvalidOrNoHeader.parsed -webob.acceptparse._AcceptInvalidOrNoHeader.__add__ -webob.acceptparse._AcceptInvalidOrNoHeader.__radd__ -webob.acceptparse._AcceptInvalidOrNoHeader.copy -webob.acceptparse._AcceptInvalidOrNoHeader.parsed -webob.acceptparse._AcceptLanguageInvalidOrNoHeader.__add__ -webob.acceptparse._AcceptLanguageInvalidOrNoHeader.__radd__ -webob.acceptparse._AcceptLanguageInvalidOrNoHeader.copy -webob.acceptparse._AcceptLanguageInvalidOrNoHeader.lookup -webob.acceptparse._AcceptLanguageInvalidOrNoHeader.parsed - # These are here due to the slightly more strict nature of the type annotation # of these descriptors for type checking, it does not really have any runtime # consequences since `_IntValueProperty` derives from `value_property` and # only makes `__set__` slightly more strict. -webob.cachecontrol.CacheControl.max_age -webob.cachecontrol.CacheControl.max_stale -webob.cachecontrol.CacheControl.min_fresh -webob.cachecontrol.CacheControl.s_max_age -webob.cachecontrol.CacheControl.s_maxage -webob.cachecontrol.CacheControl.stale_if_error -webob.cachecontrol.CacheControl.stale_while_revalidate -webob.cachecontrol.CacheControl.update_dict webob.cachecontrol.UpdateDict.setdefault +# Even though at runtime the default argument has a default value of `None` +# that will cause an exception, so we're better off pretending the argument +# is required, and that it can't be `None` +webob.headers.ResponseHeaders.setdefault +webob.multidict.GetDict.setdefault + # These need to be ignored due to how WebOb decided to let people know # that certain methods on `NestedMultiDict` should not be called since # they are immutable, compared to a MultiDict, but still can be used @@ -138,8 +117,14 @@ webob.cachecontrol.UpdateDict.setdefault # that accept any parameters and assign them to methods which should still # satisfy the same interface. The type annotations enforce the correct # input arguments instead of the generic ones. -webob.multidict.NestedMultiDict.popitem +webob.multidict.NestedMultiDict.__delitem__ +webob.multidict.NestedMultiDict.__setitem__ +webob.multidict.NestedMultiDict.add webob.multidict.NestedMultiDict.clear +webob.multidict.NestedMultiDict.pop +webob.multidict.NestedMultiDict.popitem +webob.multidict.NestedMultiDict.setdefault +webob.multidict.NestedMultiDict.update # The `DEFAULT` parameter on these dunder methods don't really make sense as # part of the public API, so they have been removed from the stubs @@ -177,3 +162,6 @@ webob.multidict.NoVars.__bool__ # with a use-case where the distinction matters, besides inheriting from # the class and overwriting the __init__ and forgetting to populate `write`. webob.response.ResponseBodyFile.write + +# A couple of utility types we use in multiple modules +webob._types diff --git a/stubs/WebOb/@tests/test_cases/check_cachecontrol.py b/stubs/WebOb/@tests/test_cases/check_cachecontrol.py new file mode 100644 index 000000000000..dd20460e466a --- /dev/null +++ b/stubs/WebOb/@tests/test_cases/check_cachecontrol.py @@ -0,0 +1,102 @@ +from __future__ import annotations + +from typing import Any, Literal, Union +from typing_extensions import assert_type + +from webob.cachecontrol import CacheControl +from webob.request import BaseRequest +from webob.response import Response + +req = BaseRequest({}) +res = Response() +assert_type(req.cache_control, CacheControl[Literal["request"]]) +assert_type(res.cache_control, CacheControl[Literal["response"]]) + +assert_type(CacheControl.parse(""), CacheControl[None]) +assert_type(CacheControl.parse("", type="request"), CacheControl[Literal["request"]]) +assert_type(CacheControl.parse("", type="response"), CacheControl[Literal["response"]]) + +req_cc = req.cache_control +res_cc = res.cache_control +shared_cc = CacheControl.parse("") +assert_type(req_cc, CacheControl[Literal["request"]]) +assert_type(res_cc, CacheControl[Literal["response"]]) +assert_type(shared_cc, CacheControl[None]) +any_cc = CacheControl[Any]({}, None) + +assert_type(req_cc.max_stale, Union[int, Literal["*"], None]) +res_cc.max_stale # type: ignore +shared_cc.max_stale # type: ignore +assert_type(any_cc.max_stale, Union[int, Literal["*"], None]) + +assert_type(req_cc.min_fresh, Union[int, None]) +res_cc.min_fresh # type: ignore +shared_cc.min_fresh # type: ignore +assert_type(any_cc.min_fresh, Union[int, None]) + +assert_type(req_cc.only_if_cached, bool) +res_cc.only_if_cached # type: ignore +shared_cc.only_if_cached # type: ignore +assert_type(any_cc.only_if_cached, bool) + +req_cc.public # type: ignore +assert_type(res_cc.public, bool) +shared_cc.public # type: ignore +assert_type(any_cc.public, bool) + +# NOTE: pyright gets confused about the `Literal["*"]` the types match +req_cc.private # type: ignore +assert_type(res_cc.private, Union[str, Literal["*"], None]) # pyright: ignore +shared_cc.private # type: ignore +assert_type(any_cc.private, Union[str, Literal["*"], None]) # pyright: ignore + +assert_type(req_cc.no_cache, Union[str, Literal["*"], None]) # pyright: ignore +assert_type(res_cc.no_cache, Union[str, Literal["*"], None]) # pyright: ignore +assert_type(shared_cc.no_cache, Union[str, Literal["*"], None]) # pyright: ignore +assert_type(any_cc.no_cache, Union[str, Literal["*"], None]) # pyright: ignore + +assert_type(req_cc.no_store, bool) +assert_type(res_cc.no_store, bool) +assert_type(shared_cc.no_store, bool) +assert_type(any_cc.no_store, bool) + +assert_type(req_cc.no_transform, bool) +assert_type(res_cc.no_transform, bool) +assert_type(shared_cc.no_transform, bool) +assert_type(any_cc.no_transform, bool) + +req_cc.must_revalidate # type: ignore +assert_type(res_cc.must_revalidate, bool) +shared_cc.must_revalidate # type: ignore +assert_type(any_cc.must_revalidate, bool) + +req_cc.proxy_revalidate # type: ignore +assert_type(res_cc.proxy_revalidate, bool) +shared_cc.proxy_revalidate # type: ignore +assert_type(any_cc.proxy_revalidate, bool) + +# NOTE: pyright gets confused about the `Literal[-1]` the types match +assert_type(req_cc.max_age, Union[int, Literal[-1], None]) # pyright: ignore +assert_type(res_cc.max_age, Union[int, Literal[-1], None]) # pyright: ignore +assert_type(shared_cc.max_age, Union[int, Literal[-1], None]) # pyright: ignore +assert_type(any_cc.max_age, Union[int, Literal[-1], None]) # pyright: ignore + +req_cc.s_maxage # type: ignore +assert_type(res_cc.s_maxage, Union[int, None]) +shared_cc.s_maxage # type: ignore +assert_type(any_cc.s_maxage, Union[int, None]) + +req_cc.s_max_age # type: ignore +assert_type(res_cc.s_max_age, Union[int, None]) +shared_cc.s_max_age # type: ignore +assert_type(any_cc.s_max_age, Union[int, None]) + +req_cc.stale_while_revalidate # type: ignore +assert_type(res_cc.stale_while_revalidate, Union[int, None]) +shared_cc.stale_while_revalidate # type: ignore +assert_type(any_cc.stale_while_revalidate, Union[int, None]) + +req_cc.stale_if_error # type: ignore +assert_type(res_cc.stale_if_error, Union[int, None]) +shared_cc.stale_if_error # type: ignore +assert_type(any_cc.stale_if_error, Union[int, None]) diff --git a/stubs/WebOb/@tests/test_cases/check_wsgify.py b/stubs/WebOb/@tests/test_cases/check_wsgify.py index 0ca91e28ace6..93de11429778 100644 --- a/stubs/WebOb/@tests/test_cases/check_wsgify.py +++ b/stubs/WebOb/@tests/test_cases/check_wsgify.py @@ -56,10 +56,10 @@ def app(request: Request) -> str: application = app -assert_type(app, "wsgify[Request, []]") +assert_type(app, "wsgify[[], Request]") assert_type(app(env, start_response), "Iterable[bytes]") assert_type(app(request), _AnyResponse) -assert_type(app(application), "wsgify[Request, []]") +assert_type(app(application), "wsgify[[], Request]") application = app(application) @@ -75,10 +75,10 @@ def m_app(request: Request) -> str: application = m_app -assert_type(m_app, "wsgify[Request, [WSGIApplication]]") +assert_type(m_app, "wsgify[[WSGIApplication], Request]") assert_type(m_app(env, start_response), "Iterable[bytes]") assert_type(m_app(request), _AnyResponse) -assert_type(m_app(application), "wsgify[Request, [WSGIApplication]]") +assert_type(m_app(application), "wsgify[[WSGIApplication], Request]") application = m_app(application) @@ -93,7 +93,7 @@ def my_request_app(request: MyRequest) -> None: application = my_request_app -assert_type(my_request_app, "wsgify[MyRequest, []]") +assert_type(my_request_app, "wsgify[[], MyRequest]") # we are allowed to accept a less specific request class diff --git a/stubs/WebOb/webob/__init__.pyi b/stubs/WebOb/webob/__init__.pyi index 308be3055c80..b190ee568d5a 100644 --- a/stubs/WebOb/webob/__init__.pyi +++ b/stubs/WebOb/webob/__init__.pyi @@ -11,3 +11,18 @@ from webob.datetime_utils import ( from webob.request import LegacyRequest as LegacyRequest, Request as Request from webob.response import Response as Response from webob.util import html_escape as html_escape + +__all__ = [ + "Request", + "LegacyRequest", + "Response", + "UTC", + "day", + "week", + "hour", + "minute", + "second", + "month", + "year", + "html_escape", +] diff --git a/stubs/WebOb/webob/_types.pyi b/stubs/WebOb/webob/_types.pyi new file mode 100644 index 000000000000..08fdc54b3dd6 --- /dev/null +++ b/stubs/WebOb/webob/_types.pyi @@ -0,0 +1,21 @@ +from typing import Protocol, TypeVar, overload +from typing_extensions import TypeAlias + +_T = TypeVar("_T") +_GetterReturnType_co = TypeVar("_GetterReturnType_co", covariant=True) +_SetterValueType_contra = TypeVar("_SetterValueType_contra", contravariant=True) + +class AsymmetricProperty(Protocol[_GetterReturnType_co, _SetterValueType_contra]): + @overload + def __get__(self, obj: None, type: type[object] | None = ..., /) -> property: ... + @overload + def __get__(self, obj: object, type: type[object] | None = ..., /) -> _GetterReturnType_co: ... + def __set__(self, obj: object, value: _SetterValueType_contra, /) -> None: ... + +class AsymmetricPropertyWithDelete( + AsymmetricProperty[_GetterReturnType_co, _SetterValueType_contra], Protocol[_GetterReturnType_co, _SetterValueType_contra] +): + def __delete__(self, obj: object, /) -> None: ... + +SymmetricProperty: TypeAlias = AsymmetricProperty[_T, _T] +SymmetricPropertyWithDelete: TypeAlias = AsymmetricPropertyWithDelete[_T, _T] diff --git a/stubs/WebOb/webob/acceptparse.pyi b/stubs/WebOb/webob/acceptparse.pyi index 56d5e585362c..5b3043e7b2ce 100644 --- a/stubs/WebOb/webob/acceptparse.pyi +++ b/stubs/WebOb/webob/acceptparse.pyi @@ -1,12 +1,67 @@ from _typeshed import SupportsItems -from collections.abc import Callable, Iterable, Iterator -from typing import Any, Literal, NamedTuple, TypeVar, overload +from collections.abc import Callable, Iterable, Iterator, Sequence +from typing import Any, Literal, NamedTuple, Protocol, TypeVar, overload from typing_extensions import Self, TypeAlias +from webob._types import AsymmetricPropertyWithDelete + _T = TypeVar("_T") _ListOrTuple: TypeAlias = list[_T] | tuple[_T, ...] _ParsedAccept: TypeAlias = tuple[str, float, list[tuple[str, str]], list[str | tuple[str, str]]] +class _SupportsStr(Protocol): + def __str__(self) -> str: ... # noqa: Y029 + +_AnyAcceptHeader: TypeAlias = AcceptValidHeader | AcceptInvalidHeader | AcceptNoHeader +_AnyAcceptCharsetHeader: TypeAlias = AcceptCharsetValidHeader | AcceptCharsetInvalidHeader | AcceptCharsetNoHeader +_AnyAcceptEncodingHeader: TypeAlias = AcceptEncodingValidHeader | AcceptEncodingInvalidHeader | AcceptEncodingNoHeader +_AnyAcceptLanguageHeader: TypeAlias = AcceptLanguageValidHeader | AcceptLanguageInvalidHeader | AcceptLanguageNoHeader + +_AcceptProperty: TypeAlias = AsymmetricPropertyWithDelete[ + _AnyAcceptHeader, + ( + _AnyAcceptHeader + | SupportsItems[str, float | tuple[float, str]] + | _ListOrTuple[str | tuple[str, float, str] | list[Any]] + | _SupportsStr + | str + | None + ), +] +_AcceptCharsetProperty: TypeAlias = AsymmetricPropertyWithDelete[ + _AnyAcceptCharsetHeader, + ( + _AnyAcceptCharsetHeader + | SupportsItems[str, float] + | _ListOrTuple[str | tuple[str, float] | list[Any]] + | _SupportsStr + | str + | None + ), +] +_AcceptEncodingProperty: TypeAlias = AsymmetricPropertyWithDelete[ + _AnyAcceptEncodingHeader, + ( + _AnyAcceptEncodingHeader + | SupportsItems[str, float] + | _ListOrTuple[str | tuple[str, float] | list[Any]] + | _SupportsStr + | str + | None + ), +] +_AcceptLanguageProperty: TypeAlias = AsymmetricPropertyWithDelete[ + _AnyAcceptLanguageHeader, + ( + _AnyAcceptLanguageHeader + | SupportsItems[str, float] + | _ListOrTuple[str | tuple[str, float] | list[Any]] + | _SupportsStr + | str + | None + ), +] + class AcceptOffer(NamedTuple): type: str subtype: str @@ -22,30 +77,38 @@ class AcceptValidHeader(Accept): @property def header_value(self) -> str: ... @property - def parsed(self) -> list[_ParsedAccept] | None: ... + def parsed(self) -> list[_ParsedAccept]: ... def __init__(self, header_value: str) -> None: ... def copy(self) -> Self: ... - @overload - def __add__(self, other: str | None) -> Self: ... - @overload - def __add__(self, other: AcceptNoHeader | AcceptValidHeader | AcceptInvalidHeader) -> Self: ... - @overload - def __add__(self, other: SupportsItems[str, float | tuple[float, str]]) -> Self: ... - @overload - def __add__(self, other: _ListOrTuple[str | tuple[str, float, str] | list[Any]]) -> Self: ... + def __add__( + self, + other: ( + _AnyAcceptHeader + | SupportsItems[str, float | tuple[float, str]] + | _ListOrTuple[str | tuple[str, float, str] | list[Any]] + | _SupportsStr + | str + | None + ), + ) -> Self: ... def __bool__(self) -> Literal[True]: ... def __contains__(self, offer: str) -> bool: ... def __iter__(self) -> Iterator[str]: ... - @overload - def __radd__(self, other: str | None) -> Self: ... - @overload - def __radd__(self, other: SupportsItems[str, float | tuple[float, str]]) -> Self: ... - @overload - def __radd__(self, other: _ListOrTuple[str | tuple[str, float, str] | list[Any]]) -> Self: ... + def __radd__( + self, + other: ( + _AnyAcceptHeader + | SupportsItems[str, float | tuple[float, str]] + | _ListOrTuple[str | tuple[str, float, str] | list[Any]] + | _SupportsStr + | str + | None + ), + ) -> Self: ... def accept_html(self) -> bool: ... @property def accepts_html(self) -> bool: ... - def acceptable_offers(self, offers: Iterable[str | AcceptOffer]) -> list[tuple[str, float]]: ... + def acceptable_offers(self, offers: Sequence[str]) -> list[tuple[str, float]]: ... @overload def best_match(self, offers: Iterable[str | tuple[str, float] | list[Any]], default_match: None = None) -> str | None: ... @overload @@ -53,40 +116,13 @@ class AcceptValidHeader(Accept): def quality(self, offer: str) -> float | None: ... class _AcceptInvalidOrNoHeader(Accept): - @property - def parsed(self) -> None: ... - def copy(self) -> Self: ... - @overload - def __add__(self, other: None) -> AcceptNoHeader: ... - @overload - def __add__(self, other: str) -> AcceptValidHeader | AcceptNoHeader: ... - @overload - def __add__(self, other: AcceptValidHeader) -> AcceptValidHeader: ... - @overload - def __add__(self, other: AcceptNoHeader | AcceptInvalidHeader) -> AcceptNoHeader: ... - @overload - def __add__(self, other: SupportsItems[str, float | tuple[float, str]]) -> AcceptValidHeader | AcceptNoHeader: ... - @overload - def __add__(self, other: _ListOrTuple[str | tuple[str, float, str] | list[Any]]) -> AcceptValidHeader | AcceptNoHeader: ... - @overload - def __radd__(self, other: None) -> AcceptNoHeader: ... - @overload - def __radd__(self, other: str) -> AcceptValidHeader | AcceptNoHeader: ... - @overload - def __radd__(self, other: AcceptValidHeader) -> AcceptValidHeader: ... - @overload - def __radd__(self, other: AcceptNoHeader | AcceptInvalidHeader) -> AcceptNoHeader: ... - @overload - def __radd__(self, other: SupportsItems[str, float | tuple[float, str]]) -> AcceptValidHeader | AcceptNoHeader: ... - @overload - def __radd__(self, other: _ListOrTuple[str | tuple[str, float, str] | list[Any]]) -> AcceptValidHeader | AcceptNoHeader: ... def __bool__(self) -> Literal[False]: ... def __contains__(self, offer: str) -> Literal[True]: ... def __iter__(self) -> Iterator[str]: ... def accept_html(self) -> bool: ... @property def accepts_html(self) -> bool: ... - def acceptable_offers(self, offers: Iterable[str | AcceptOffer]) -> list[tuple[str, float]]: ... + def acceptable_offers(self, offers: Sequence[str]) -> list[tuple[str, float]]: ... @overload def best_match(self, offers: Iterable[str | tuple[str, float] | list[Any]], default_match: None = None) -> str | None: ... @overload @@ -96,33 +132,93 @@ class _AcceptInvalidOrNoHeader(Accept): class AcceptNoHeader(_AcceptInvalidOrNoHeader): @property def header_value(self) -> None: ... + @property + def parsed(self) -> None: ... def __init__(self) -> None: ... + def copy(self) -> Self: ... + @overload + def __add__(self, other: AcceptValidHeader | Literal[""]) -> AcceptValidHeader: ... + @overload + def __add__(self, other: AcceptNoHeader | AcceptInvalidHeader | None) -> Self: ... + @overload + def __add__( + self, + other: ( + _AnyAcceptHeader + | SupportsItems[str, float | tuple[float, str]] + | _ListOrTuple[str | tuple[str, float, str] | list[Any]] + | _SupportsStr + | str + | None + ), + ) -> Self | AcceptValidHeader: ... + @overload + def __radd__(self, other: AcceptValidHeader | Literal[""]) -> AcceptValidHeader: ... + @overload + def __radd__(self, other: AcceptNoHeader | AcceptInvalidHeader | None) -> Self: ... + @overload + def __radd__( + self, + other: ( + _AnyAcceptHeader + | SupportsItems[str, float | tuple[float, str]] + | _ListOrTuple[str | tuple[str, float, str] | list[Any]] + | _SupportsStr + | str + | None + ), + ) -> Self | AcceptValidHeader: ... class AcceptInvalidHeader(_AcceptInvalidOrNoHeader): @property def header_value(self) -> str: ... + @property + def parsed(self) -> None: ... def __init__(self, header_value: str) -> None: ... - -@overload -def create_accept_header(header_value: None) -> AcceptNoHeader: ... -@overload -def create_accept_header(header_value: str) -> AcceptValidHeader | AcceptInvalidHeader: ... - -class _AcceptProperty: + def copy(self) -> Self: ... @overload - def __get__(self, obj: None, type: type | None = ..., /) -> property: ... + def __add__(self, other: AcceptValidHeader | Literal[""]) -> AcceptValidHeader: ... @overload - def __get__(self, obj: Any, type: type | None = ..., /) -> AcceptNoHeader | AcceptValidHeader | AcceptInvalidHeader: ... + def __add__(self, other: AcceptInvalidHeader | AcceptNoHeader | None) -> AcceptNoHeader: ... @overload - def __set__(self, obj: Any, value: str | None, /) -> None: ... + def __add__( + self, + other: ( + _AnyAcceptHeader + | SupportsItems[str, float | tuple[float, str]] + | _ListOrTuple[str | tuple[str, float, str] | list[Any]] + | _SupportsStr + | str + | None + ), + ) -> AcceptValidHeader | AcceptNoHeader: ... @overload - def __set__(self, obj: Any, value: AcceptNoHeader | AcceptValidHeader | AcceptInvalidHeader, /) -> None: ... + def __radd__(self, other: AcceptValidHeader | Literal[""]) -> AcceptValidHeader: ... @overload - def __set__(self, obj: Any, value: SupportsItems[str, float | tuple[float, str]], /) -> None: ... + def __radd__(self, other: AcceptInvalidHeader | AcceptNoHeader | None) -> AcceptNoHeader: ... @overload - def __set__(self, obj: Any, value: _ListOrTuple[str | tuple[str, float, str] | list[Any]], /) -> None: ... - def __delete__(self, obj: Any, /) -> None: ... + def __radd__( + self, + other: ( + _AnyAcceptHeader + | SupportsItems[str, float | tuple[float, str]] + | _ListOrTuple[str | tuple[str, float, str] | list[Any]] + | _SupportsStr + | str + | None + ), + ) -> AcceptValidHeader | AcceptNoHeader: ... +@overload +def create_accept_header(header_value: AcceptValidHeader | Literal[""]) -> AcceptValidHeader: ... +@overload +def create_accept_header(header_value: AcceptInvalidHeader) -> AcceptInvalidHeader: ... +@overload +def create_accept_header(header_value: None | AcceptNoHeader) -> AcceptNoHeader: ... +@overload +def create_accept_header(header_value: str) -> AcceptValidHeader | AcceptInvalidHeader: ... +@overload +def create_accept_header(header_value: _AnyAcceptHeader | str | None) -> _AnyAcceptHeader: ... def accept_property() -> _AcceptProperty: ... class AcceptCharset: @@ -136,108 +232,143 @@ class AcceptCharsetValidHeader(AcceptCharset): def parsed(self) -> list[tuple[str, float]]: ... def __init__(self, header_value: str) -> None: ... def copy(self) -> Self: ... - @overload - def __add__(self, other: str | None) -> Self: ... - @overload - def __add__(self, other: AcceptCharsetValidHeader | AcceptCharsetNoHeader | AcceptCharsetInvalidHeader) -> Self: ... - @overload - def __add__(self, other: SupportsItems[str, float]) -> Self: ... - @overload - def __add__(self, other: _ListOrTuple[str | tuple[str, float] | list[Any]]) -> Self: ... + def __add__( + self, + other: ( + _AnyAcceptCharsetHeader + | SupportsItems[str, float] + | _ListOrTuple[str | tuple[str, float] | list[Any]] + | _SupportsStr + | str + | None + ), + ) -> Self: ... def __bool__(self) -> Literal[True]: ... def __contains__(self, offer: str) -> bool: ... def __iter__(self) -> Iterator[str]: ... + def __radd__( + self, + other: ( + _AnyAcceptCharsetHeader + | SupportsItems[str, float] + | _ListOrTuple[str | tuple[str, float] | list[Any]] + | _SupportsStr + | str + | None + ), + ) -> Self: ... + def acceptable_offers(self, offers: Sequence[str]) -> list[tuple[str, float]]: ... @overload - def __radd__(self, other: str | None) -> Self: ... - @overload - def __radd__(self, other: AcceptCharsetValidHeader | AcceptCharsetNoHeader | AcceptCharsetInvalidHeader) -> Self: ... - @overload - def __radd__(self, other: SupportsItems[str, float]) -> Self: ... - @overload - def __radd__(self, other: _ListOrTuple[str | tuple[str, float] | list[Any]]) -> Self: ... - def acceptable_offers(self, offers: Iterable[str]) -> list[tuple[str, float]]: ... - @overload - def best_match(self, offers: Iterable[str], default_match: None = None) -> str | None: ... + def best_match(self, offers: Iterable[str | tuple[str, float] | list[Any]], default_match: None = None) -> str | None: ... @overload - def best_match(self, offers: Iterable[str], default_match: str) -> str: ... + def best_match(self, offers: Iterable[str | tuple[str, float] | list[Any]], default_match: str) -> str: ... def quality(self, offer: str) -> float | None: ... class _AcceptCharsetInvalidOrNoHeader(AcceptCharset): - @property - def parsed(self) -> None: ... def __bool__(self) -> Literal[False]: ... def __contains__(self, offer: str) -> Literal[True]: ... def __iter__(self) -> Iterator[str]: ... + def acceptable_offers(self, offers: Iterable[str]) -> list[tuple[str, float]]: ... @overload - def __add__(self, other: None) -> AcceptCharsetNoHeader: ... + def best_match(self, offers: Iterable[str | tuple[str, float] | list[Any]], default_match: None = None) -> str | None: ... @overload - def __add__(self, other: str) -> AcceptCharsetValidHeader | AcceptCharsetNoHeader: ... + def best_match(self, offers: Iterable[str | tuple[str, float] | list[Any]], default_match: str) -> str: ... + def quality(self, offer: str) -> float | None: ... + +class AcceptCharsetNoHeader(_AcceptCharsetInvalidOrNoHeader): + @property + def header_value(self) -> None: ... + @property + def parsed(self) -> None: ... + def __init__(self) -> None: ... + def copy(self) -> Self: ... @overload def __add__(self, other: AcceptCharsetValidHeader) -> AcceptCharsetValidHeader: ... @overload - def __add__(self, other: AcceptCharsetNoHeader | AcceptCharsetInvalidHeader) -> AcceptCharsetNoHeader: ... - @overload - def __add__(self, other: SupportsItems[str, float]) -> AcceptCharsetValidHeader | AcceptCharsetNoHeader: ... + def __add__(self, other: AcceptCharsetInvalidHeader | AcceptCharsetNoHeader | Literal[""] | None) -> Self: ... @overload def __add__( - self, other: _ListOrTuple[str | tuple[str, float] | list[Any]] - ) -> AcceptCharsetValidHeader | AcceptCharsetNoHeader: ... - @overload - def __radd__(self, other: None) -> AcceptCharsetNoHeader: ... - @overload - def __radd__(self, other: str) -> AcceptCharsetValidHeader | AcceptCharsetNoHeader: ... + self, + other: ( + _AnyAcceptCharsetHeader + | SupportsItems[str, float] + | _ListOrTuple[str | tuple[str, float] | list[Any]] + | _SupportsStr + | str + | None + ), + ) -> Self | AcceptCharsetValidHeader: ... @overload def __radd__(self, other: AcceptCharsetValidHeader) -> AcceptCharsetValidHeader: ... @overload - def __radd__(self, other: AcceptCharsetNoHeader | AcceptCharsetInvalidHeader) -> AcceptCharsetNoHeader: ... - @overload - def __radd__(self, other: SupportsItems[str, float]) -> AcceptCharsetValidHeader | AcceptCharsetNoHeader: ... + def __radd__(self, other: AcceptCharsetInvalidHeader | AcceptCharsetNoHeader | Literal[""] | None) -> Self: ... @overload def __radd__( - self, other: _ListOrTuple[str | tuple[str, float] | list[Any]] - ) -> AcceptCharsetValidHeader | AcceptCharsetNoHeader: ... - def copy(self) -> Self: ... - def acceptable_offers(self, offers: Iterable[str]) -> list[tuple[str, float]]: ... - @overload - def best_match(self, offers: Iterable[str], default_match: None = None) -> str | None: ... - @overload - def best_match(self, offers: Iterable[str], default_match: str) -> str: ... - def quality(self, offer: str) -> float | None: ... - -class AcceptCharsetNoHeader(_AcceptCharsetInvalidOrNoHeader): - @property - def header_value(self) -> None: ... - def __init__(self) -> None: ... + self, + other: ( + _AnyAcceptCharsetHeader + | SupportsItems[str, float] + | _ListOrTuple[str | tuple[str, float] | list[Any]] + | _SupportsStr + | str + | None + ), + ) -> Self | AcceptCharsetValidHeader: ... class AcceptCharsetInvalidHeader(_AcceptCharsetInvalidOrNoHeader): @property def header_value(self) -> str: ... + @property + def parsed(self) -> None: ... def __init__(self, header_value: str) -> None: ... - -@overload -def create_accept_charset_header(header_value: None) -> AcceptCharsetNoHeader: ... -@overload -def create_accept_charset_header(header_value: str) -> AcceptCharsetValidHeader | AcceptCharsetInvalidHeader: ... - -class _AcceptCharsetProperty: + def copy(self) -> Self: ... @overload - def __get__(self, obj: None, type: type | None = ..., /) -> property: ... + def __add__(self, other: AcceptCharsetValidHeader) -> AcceptCharsetValidHeader: ... @overload - def __get__( - self, obj: Any, type: type | None = ..., / - ) -> AcceptCharsetNoHeader | AcceptCharsetValidHeader | AcceptCharsetInvalidHeader: ... + def __add__( + self, other: AcceptCharsetInvalidHeader | AcceptCharsetNoHeader | Literal[""] | None + ) -> AcceptCharsetNoHeader: ... @overload - def __set__(self, obj: Any, value: str | None, /) -> None: ... + def __add__( + self, + other: ( + _AnyAcceptCharsetHeader + | SupportsItems[str, float] + | _ListOrTuple[str | tuple[str, float] | list[Any]] + | _SupportsStr + | str + | None + ), + ) -> AcceptCharsetValidHeader | AcceptCharsetNoHeader: ... @overload - def __set__( - self, obj: Any, value: AcceptCharsetNoHeader | AcceptCharsetValidHeader | AcceptCharsetInvalidHeader, / - ) -> None: ... + def __radd__(self, other: AcceptCharsetValidHeader) -> AcceptCharsetValidHeader: ... @overload - def __set__(self, obj: Any, value: SupportsItems[str, float], /) -> None: ... + def __radd__( + self, other: AcceptCharsetInvalidHeader | AcceptCharsetNoHeader | Literal[""] | None + ) -> AcceptCharsetNoHeader: ... @overload - def __set__(self, obj: Any, value: _ListOrTuple[str | tuple[str, float] | list[Any]], /) -> None: ... - def __delete__(self, obj: Any, /) -> None: ... + def __radd__( + self, + other: ( + _AnyAcceptCharsetHeader + | SupportsItems[str, float] + | _ListOrTuple[str | tuple[str, float] | list[Any]] + | _SupportsStr + | str + | None + ), + ) -> AcceptCharsetValidHeader | AcceptCharsetNoHeader: ... +@overload +def create_accept_charset_header(header_value: AcceptCharsetValidHeader | Literal[""]) -> AcceptCharsetValidHeader: ... +@overload +def create_accept_charset_header(header_value: AcceptCharsetInvalidHeader) -> AcceptCharsetInvalidHeader: ... +@overload +def create_accept_charset_header(header_value: AcceptCharsetNoHeader | None) -> AcceptCharsetNoHeader: ... +@overload +def create_accept_charset_header(header_value: str) -> AcceptCharsetValidHeader | AcceptCharsetInvalidHeader: ... +@overload +def create_accept_charset_header(header_value: _AnyAcceptCharsetHeader | str | None) -> _AnyAcceptCharsetHeader: ... def accept_charset_property() -> _AcceptCharsetProperty: ... class AcceptEncoding: @@ -251,108 +382,139 @@ class AcceptEncodingValidHeader(AcceptEncoding): def parsed(self) -> list[tuple[str, float]]: ... def __init__(self, header_value: str) -> None: ... def copy(self) -> Self: ... - @overload - def __add__(self, other: str | None) -> Self: ... - @overload - def __add__(self, other: AcceptEncodingValidHeader | AcceptEncodingNoHeader | AcceptEncodingInvalidHeader) -> Self: ... - @overload - def __add__(self, other: SupportsItems[str, float]) -> Self: ... - @overload - def __add__(self, other: _ListOrTuple[str | tuple[str, float] | list[Any]]) -> Self: ... + def __add__( + self, + other: ( + _AnyAcceptEncodingHeader + | SupportsItems[str, float] + | _ListOrTuple[str | tuple[str, float] | list[Any]] + | _SupportsStr + | str + | None + ), + ) -> Self: ... def __bool__(self) -> Literal[True]: ... def __contains__(self, offer: str) -> bool: ... def __iter__(self) -> Iterator[str]: ... + def __radd__( + self, + other: ( + _AnyAcceptEncodingHeader + | SupportsItems[str, float] + | _ListOrTuple[str | tuple[str, float] | list[Any]] + | _SupportsStr + | str + | None + ), + ) -> Self: ... + def acceptable_offers(self, offers: Sequence[str]) -> list[tuple[str, float]]: ... @overload - def __radd__(self, other: str | None) -> Self: ... - @overload - def __radd__(self, other: AcceptEncodingValidHeader | AcceptEncodingNoHeader | AcceptEncodingInvalidHeader) -> Self: ... - @overload - def __radd__(self, other: SupportsItems[str, float]) -> Self: ... - @overload - def __radd__(self, other: _ListOrTuple[str | tuple[str, float] | list[Any]]) -> Self: ... - def acceptable_offers(self, offers: Iterable[str]) -> list[tuple[str, float]]: ... - @overload - def best_match(self, offers: Iterable[str], default_match: None = None) -> str | None: ... + def best_match(self, offers: Iterable[str | tuple[str, float] | list[Any]], default_match: None = None) -> str | None: ... @overload - def best_match(self, offers: Iterable[str], default_match: str) -> str: ... + def best_match(self, offers: Iterable[str | tuple[str, float] | list[Any]], default_match: str) -> str: ... def quality(self, offer: str) -> float | None: ... class _AcceptEncodingInvalidOrNoHeader(AcceptEncoding): - @property - def parsed(self) -> None: ... def __bool__(self) -> Literal[False]: ... def __contains__(self, offer: str) -> Literal[True]: ... def __iter__(self) -> Iterator[str]: ... + def acceptable_offers(self, offers: Iterable[str]) -> list[tuple[str, float]]: ... @overload - def __add__(self, other: None) -> AcceptEncodingNoHeader: ... - @overload - def __add__(self, other: str) -> AcceptEncodingValidHeader | AcceptEncodingNoHeader: ... + def best_match(self, offers: Iterable[str | tuple[str, float] | list[Any]], default_match: None = None) -> str | None: ... @overload - def __add__(self, other: AcceptEncodingValidHeader) -> AcceptEncodingValidHeader: ... + def best_match(self, offers: Iterable[str | tuple[str, float] | list[Any]], default_match: str) -> str: ... + def quality(self, offer: str) -> float | None: ... + +class AcceptEncodingNoHeader(_AcceptEncodingInvalidOrNoHeader): + @property + def header_value(self) -> None: ... + @property + def parsed(self) -> None: ... + def __init__(self) -> None: ... + def copy(self) -> Self: ... @overload - def __add__(self, other: AcceptEncodingNoHeader | AcceptEncodingInvalidHeader) -> AcceptEncodingNoHeader: ... + def __add__(self, other: AcceptEncodingValidHeader | Literal[""]) -> AcceptEncodingValidHeader: ... @overload - def __add__(self, other: SupportsItems[str, float]) -> AcceptEncodingValidHeader | AcceptEncodingNoHeader: ... + def __add__(self, other: AcceptEncodingInvalidHeader | AcceptEncodingNoHeader | None) -> Self: ... @overload def __add__( - self, other: _ListOrTuple[str | tuple[str, float] | list[Any]] - ) -> AcceptEncodingValidHeader | AcceptEncodingNoHeader: ... - @overload - def __radd__(self, other: None) -> AcceptEncodingNoHeader: ... - @overload - def __radd__(self, other: str) -> AcceptEncodingValidHeader | AcceptEncodingNoHeader: ... - @overload - def __radd__(self, other: AcceptEncodingValidHeader) -> AcceptEncodingValidHeader: ... + self, + other: ( + _AnyAcceptEncodingHeader + | SupportsItems[str, float] + | _ListOrTuple[str | tuple[str, float] | list[Any]] + | _SupportsStr + | str + | None + ), + ) -> Self | AcceptEncodingValidHeader: ... @overload - def __radd__(self, other: AcceptEncodingNoHeader | AcceptEncodingInvalidHeader) -> AcceptEncodingNoHeader: ... + def __radd__(self, other: AcceptEncodingValidHeader | Literal[""]) -> AcceptEncodingValidHeader: ... @overload - def __radd__(self, other: SupportsItems[str, float]) -> AcceptEncodingValidHeader | AcceptEncodingNoHeader: ... + def __radd__(self, other: AcceptEncodingInvalidHeader | AcceptEncodingNoHeader | None) -> Self: ... @overload def __radd__( - self, other: _ListOrTuple[str | tuple[str, float] | list[Any]] - ) -> AcceptEncodingValidHeader | AcceptEncodingNoHeader: ... - def copy(self) -> Self: ... - def acceptable_offers(self, offers: Iterable[str]) -> list[tuple[str, float]]: ... - @overload - def best_match(self, offers: Iterable[str], default_match: None = None) -> str | None: ... - @overload - def best_match(self, offers: Iterable[str], default_match: str) -> str: ... - def quality(self, offer: str) -> float | None: ... - -class AcceptEncodingNoHeader(_AcceptEncodingInvalidOrNoHeader): - @property - def header_value(self) -> None: ... - def __init__(self) -> None: ... + self, + other: ( + _AnyAcceptEncodingHeader + | SupportsItems[str, float] + | _ListOrTuple[str | tuple[str, float] | list[Any]] + | _SupportsStr + | str + | None + ), + ) -> Self | AcceptEncodingValidHeader: ... class AcceptEncodingInvalidHeader(_AcceptEncodingInvalidOrNoHeader): @property def header_value(self) -> str: ... + @property + def parsed(self) -> None: ... def __init__(self, header_value: str) -> None: ... - -@overload -def create_accept_encoding_header(header_value: None) -> AcceptEncodingNoHeader: ... -@overload -def create_accept_encoding_header(header_value: str) -> AcceptEncodingValidHeader | AcceptEncodingInvalidHeader: ... - -class _AcceptEncodingProperty: + def copy(self) -> Self: ... @overload - def __get__(self, obj: None, type: type | None = ..., /) -> property: ... + def __add__(self, other: AcceptEncodingValidHeader | Literal[""]) -> AcceptEncodingValidHeader: ... @overload - def __get__( - self, obj: Any, type: type | None = ..., / - ) -> AcceptEncodingNoHeader | AcceptEncodingValidHeader | AcceptEncodingInvalidHeader: ... + def __add__(self, other: AcceptEncodingInvalidHeader | AcceptEncodingNoHeader | None) -> AcceptEncodingNoHeader: ... @overload - def __set__(self, obj: Any, value: str | None, /) -> None: ... + def __add__( + self, + other: ( + _AnyAcceptEncodingHeader + | SupportsItems[str, float] + | _ListOrTuple[str | tuple[str, float] | list[Any]] + | _SupportsStr + | str + | None + ), + ) -> AcceptEncodingValidHeader | AcceptEncodingNoHeader: ... @overload - def __set__( - self, obj: Any, value: AcceptEncodingNoHeader | AcceptEncodingValidHeader | AcceptEncodingInvalidHeader, / - ) -> None: ... + def __radd__(self, other: AcceptEncodingValidHeader | Literal[""]) -> AcceptEncodingValidHeader: ... @overload - def __set__(self, obj: Any, value: SupportsItems[str, float], /) -> None: ... + def __radd__(self, other: AcceptEncodingInvalidHeader | AcceptEncodingNoHeader | None) -> AcceptEncodingNoHeader: ... @overload - def __set__(self, obj: Any, value: _ListOrTuple[str | tuple[str, float] | list[Any]], /) -> None: ... - def __delete__(self, obj: Any, /) -> None: ... + def __radd__( + self, + other: ( + _AnyAcceptEncodingHeader + | SupportsItems[str, float] + | _ListOrTuple[str | tuple[str, float] | list[Any]] + | _SupportsStr + | str + | None + ), + ) -> AcceptEncodingValidHeader | AcceptEncodingNoHeader: ... +@overload +def create_accept_encoding_header(header_value: AcceptEncodingValidHeader | Literal[""]) -> AcceptEncodingValidHeader: ... +@overload +def create_accept_encoding_header(header_value: AcceptEncodingInvalidHeader) -> AcceptEncodingInvalidHeader: ... +@overload +def create_accept_encoding_header(header_value: AcceptEncodingNoHeader | None) -> AcceptEncodingNoHeader: ... +@overload +def create_accept_encoding_header(header_value: str) -> AcceptEncodingValidHeader | AcceptEncodingInvalidHeader: ... +@overload +def create_accept_encoding_header(header_value: _AnyAcceptEncodingHeader | str | None) -> _AnyAcceptEncodingHeader: ... def accept_encoding_property() -> _AcceptEncodingProperty: ... class AcceptLanguage: @@ -360,144 +522,193 @@ class AcceptLanguage: def parse(cls, value: str) -> Iterator[tuple[str, float]]: ... class AcceptLanguageValidHeader(AcceptLanguage): + def __init__(self, header_value: str) -> None: ... + def copy(self) -> Self: ... @property def header_value(self) -> str: ... @property def parsed(self) -> list[tuple[str, float]]: ... - def __init__(self, header_value: str) -> None: ... - def copy(self) -> Self: ... - @overload - def __add__(self, other: str | None) -> Self: ... - @overload - def __add__(self, other: AcceptLanguageValidHeader | AcceptLanguageNoHeader | AcceptLanguageInvalidHeader) -> Self: ... - @overload - def __add__(self, other: SupportsItems[str, float]) -> Self: ... - @overload - def __add__(self, other: _ListOrTuple[str | tuple[str, float] | list[Any]]) -> Self: ... + def __add__( + self, + other: ( + _AnyAcceptLanguageHeader + | SupportsItems[str, float] + | _ListOrTuple[str | tuple[str, float] | list[Any]] + | _SupportsStr + | str + | None + ), + ) -> Self: ... def __bool__(self) -> Literal[True]: ... def __contains__(self, offer: str) -> bool: ... def __iter__(self) -> Iterator[str]: ... + def __radd__( + self, + other: ( + _AnyAcceptLanguageHeader + | SupportsItems[str, float] + | _ListOrTuple[str | tuple[str, float] | list[Any]] + | _SupportsStr + | str + | None + ), + ) -> Self: ... + def basic_filtering(self, language_tags: Sequence[str]) -> list[tuple[str, float]]: ... @overload - def __radd__(self, other: str | None) -> Self: ... + def best_match(self, offers: Iterable[str | tuple[str, float] | list[Any]], default_match: None = None) -> str | None: ... @overload - def __radd__(self, other: AcceptLanguageValidHeader | AcceptLanguageNoHeader | AcceptLanguageInvalidHeader) -> Self: ... + def best_match(self, offers: Iterable[str | tuple[str, float] | list[Any]], default_match: str) -> str: ... @overload - def __radd__(self, other: SupportsItems[str, float]) -> Self: ... + def lookup( + self, language_tags: Sequence[str], default_range: str | None, default_tag: str, default: None = None + ) -> str | None: ... @overload - def __radd__(self, other: _ListOrTuple[str | tuple[str, float] | list[Any]]) -> Self: ... - def basic_filtering(self, language_tags: Iterable[str]) -> list[tuple[str, float]]: ... + def lookup( + self, language_tags: Sequence[str], *, default_range: str | None = None, default_tag: str, default: None = None + ) -> str | None: ... @overload - def best_match(self, offers: Iterable[str], default_match: None = None) -> str | None: ... + def lookup( + self, language_tags: Sequence[str], default_range: str | None, default_tag: None, default: _T | Callable[[], _T] + ) -> _T | str | None: ... @overload - def best_match(self, offers: Iterable[str], default_match: str) -> str: ... + def lookup( + self, language_tags: Sequence[str], default_range: str | None, default_tag: str, default: _T | Callable[[], _T] + ) -> _T | str: ... @overload def lookup( self, - language_tags: Iterable[str], + language_tags: Sequence[str], + *, default_range: str | None = None, - default_tag: str = ..., - default: str | Callable[[], str | None] | None = None, - ) -> str | None: ... + default_tag: None = None, + default: _T | Callable[[], _T], + ) -> _T | str | None: ... @overload def lookup( - self, - language_tags: Iterable[str], - default_range: str | None = None, - default_tag: str | None = None, - default: str | Callable[[], str | None] = ..., - ) -> str | None: ... + self, language_tags: Sequence[str], *, default_range: str | None = None, default_tag: str, default: _T | Callable[[], _T] + ) -> _T | str: ... def quality(self, offer: str) -> float | None: ... class _AcceptLanguageInvalidOrNoHeader(AcceptLanguage): - @property - def parsed(self) -> None: ... def __bool__(self) -> Literal[False]: ... def __contains__(self, offer: str) -> Literal[True]: ... def __iter__(self) -> Iterator[str]: ... - @overload - def __add__(self, other: None) -> AcceptLanguageNoHeader: ... - @overload - def __add__(self, other: str) -> AcceptLanguageValidHeader | AcceptLanguageNoHeader: ... - @overload - def __add__(self, other: AcceptLanguageValidHeader) -> AcceptLanguageValidHeader: ... - @overload - def __add__(self, other: AcceptLanguageNoHeader | AcceptLanguageInvalidHeader) -> AcceptLanguageNoHeader: ... - @overload - def __add__(self, other: SupportsItems[str, float]) -> AcceptLanguageValidHeader | AcceptLanguageNoHeader: ... - @overload - def __add__( - self, other: _ListOrTuple[str | tuple[str, float] | list[Any]] - ) -> AcceptLanguageValidHeader | AcceptLanguageNoHeader: ... - @overload - def __radd__(self, other: None) -> AcceptLanguageNoHeader: ... - @overload - def __radd__(self, other: str) -> AcceptLanguageValidHeader | AcceptLanguageNoHeader: ... - @overload - def __radd__(self, other: AcceptLanguageValidHeader) -> AcceptLanguageValidHeader: ... - @overload - def __radd__(self, other: AcceptLanguageNoHeader | AcceptLanguageInvalidHeader) -> AcceptLanguageNoHeader: ... - @overload - def __radd__(self, other: SupportsItems[str, float]) -> AcceptLanguageValidHeader | AcceptLanguageNoHeader: ... - @overload - def __radd__( - self, other: _ListOrTuple[str | tuple[str, float] | list[Any]] - ) -> AcceptLanguageValidHeader | AcceptLanguageNoHeader: ... - def copy(self) -> Self: ... def basic_filtering(self, language_tags: Iterable[str]) -> list[tuple[str, float]]: ... @overload - def best_match(self, offers: Iterable[str], default_match: None = None) -> str | None: ... + def best_match(self, offers: Iterable[str | tuple[str, float] | list[Any]], default_match: None = None) -> str | None: ... + @overload + def best_match(self, offers: Iterable[str | tuple[str, float] | list[Any]], default_match: str) -> str: ... @overload - def best_match(self, offers: Iterable[str], default_match: str) -> str: ... + def lookup(self, language_tags: object, default_range: object, default_tag: str, default: object = None) -> str: ... @overload def lookup( - self, - language_tags: Iterable[str], - default_range: str | None = None, - default_tag: str = ..., - default: str | Callable[[], str | None] | None = None, - ) -> str | None: ... + self, language_tags: object = None, *, default_range: object = None, default_tag: str, default: object = None + ) -> str: ... + @overload + def lookup(self, language_tags: object, default_range: object, default_tag: None, default: _T | Callable[[], _T]) -> _T: ... @overload def lookup( self, - language_tags: Iterable[str], - default_range: str | None = None, - default_tag: str | None = None, - default: str | Callable[[], str | None] = ..., - ) -> str | None: ... + language_tags: object = None, + *, + default_range: object = None, + default_tag: None = None, + default: _T | Callable[[], _T], + ) -> _T: ... def quality(self, offer: str) -> float | None: ... class AcceptLanguageNoHeader(_AcceptLanguageInvalidOrNoHeader): def __init__(self) -> None: ... + def copy(self) -> Self: ... @property def header_value(self) -> None: ... + @property + def parsed(self) -> None: ... + @overload + def __add__(self, other: AcceptLanguageValidHeader) -> AcceptLanguageValidHeader: ... + @overload + def __add__(self, other: AcceptLanguageInvalidHeader | AcceptLanguageNoHeader | Literal[""] | None) -> Self: ... + @overload + def __add__( + self, + other: ( + _AnyAcceptLanguageHeader + | SupportsItems[str, float] + | _ListOrTuple[str | tuple[str, float] | list[Any]] + | _SupportsStr + | str + | None + ), + ) -> Self | AcceptLanguageValidHeader: ... + @overload + def __radd__(self, other: AcceptLanguageValidHeader) -> AcceptLanguageValidHeader: ... + @overload + def __radd__(self, other: AcceptLanguageInvalidHeader | AcceptLanguageNoHeader | Literal[""] | None) -> Self: ... + @overload + def __radd__( + self, + other: ( + _AnyAcceptLanguageHeader + | SupportsItems[str, float] + | _ListOrTuple[str | tuple[str, float] | list[Any]] + | _SupportsStr + | str + | None + ), + ) -> Self | AcceptLanguageValidHeader: ... class AcceptLanguageInvalidHeader(_AcceptLanguageInvalidOrNoHeader): def __init__(self, header_value: str) -> None: ... + def copy(self) -> Self: ... @property def header_value(self) -> str: ... - -@overload -def create_accept_language_header(header_value: None) -> AcceptLanguageNoHeader: ... -@overload -def create_accept_language_header(header_value: str) -> AcceptLanguageValidHeader | AcceptLanguageInvalidHeader: ... - -class _AcceptLanguageProperty: + @property + def parsed(self) -> None: ... @overload - def __get__(self, obj: None, type: type | None = ..., /) -> property: ... + def __add__(self, other: AcceptLanguageValidHeader) -> AcceptLanguageValidHeader: ... @overload - def __get__( - self, obj: Any, type: type | None = ..., / - ) -> AcceptLanguageNoHeader | AcceptLanguageValidHeader | AcceptLanguageInvalidHeader: ... + def __add__( + self, other: AcceptLanguageInvalidHeader | AcceptLanguageNoHeader | Literal[""] | None + ) -> AcceptLanguageNoHeader: ... @overload - def __set__(self, obj: Any, value: str | None, /) -> None: ... + def __add__( + self, + other: ( + _AnyAcceptLanguageHeader + | SupportsItems[str, float] + | _ListOrTuple[str | tuple[str, float] | list[Any]] + | _SupportsStr + | str + | None + ), + ) -> AcceptLanguageValidHeader | AcceptLanguageNoHeader: ... @overload - def __set__( - self, obj: Any, value: AcceptLanguageNoHeader | AcceptLanguageValidHeader | AcceptLanguageInvalidHeader, / - ) -> None: ... + def __radd__(self, other: AcceptLanguageValidHeader) -> AcceptLanguageValidHeader: ... @overload - def __set__(self, obj: Any, value: SupportsItems[str, float], /) -> None: ... + def __radd__( + self, other: AcceptLanguageInvalidHeader | AcceptLanguageNoHeader | Literal[""] | None + ) -> AcceptLanguageNoHeader: ... @overload - def __set__(self, obj: Any, value: _ListOrTuple[str | tuple[str, float] | list[Any]], /) -> None: ... - def __delete__(self, obj: Any, /) -> None: ... + def __radd__( + self, + other: ( + _AnyAcceptLanguageHeader + | SupportsItems[str, float] + | _ListOrTuple[str | tuple[str, float] | list[Any]] + | _SupportsStr + | str + | None + ), + ) -> AcceptLanguageValidHeader | AcceptLanguageNoHeader: ... +@overload +def create_accept_language_header(header_value: AcceptLanguageValidHeader | Literal[""]) -> AcceptLanguageValidHeader: ... +@overload +def create_accept_language_header(header_value: AcceptLanguageNoHeader | None) -> AcceptLanguageNoHeader: ... +@overload +def create_accept_language_header(header_value: AcceptLanguageInvalidHeader) -> AcceptLanguageInvalidHeader: ... +@overload +def create_accept_language_header(header_value: str) -> AcceptLanguageValidHeader | AcceptLanguageInvalidHeader: ... +@overload +def create_accept_language_header(header_value: _AnyAcceptLanguageHeader | str | None) -> _AnyAcceptLanguageHeader: ... def accept_language_property() -> _AcceptLanguageProperty: ... diff --git a/stubs/WebOb/webob/byterange.pyi b/stubs/WebOb/webob/byterange.pyi index 461d10783086..6d1d76e19702 100644 --- a/stubs/WebOb/webob/byterange.pyi +++ b/stubs/WebOb/webob/byterange.pyi @@ -2,6 +2,8 @@ from collections.abc import Iterator from typing import overload from typing_extensions import Self +__all__ = ["Range", "ContentRange"] + class Range: start: int | None end: int | None diff --git a/stubs/WebOb/webob/cachecontrol.pyi b/stubs/WebOb/webob/cachecontrol.pyi index b3130de755cf..5b6df1e856b6 100644 --- a/stubs/WebOb/webob/cachecontrol.pyi +++ b/stubs/WebOb/webob/cachecontrol.pyi @@ -1,96 +1,96 @@ -from collections.abc import Callable, MutableMapping -from typing import Any, Generic, Literal, TypeVar, overload -from typing_extensions import Self, TypeAlias - -from webob.request import Request -from webob.response import Response +from _typeshed import SupportsItems +from collections.abc import Callable +from typing import Any, Generic, Literal, overload +from typing_extensions import Self, TypeVar _T = TypeVar("_T") -_KT = TypeVar("_KT") -_VT = TypeVar("_VT") -_NoneLiteral = TypeVar("_NoneLiteral") -_Type: TypeAlias = type +_DefaultT = TypeVar("_DefaultT", default=None) +_NoneLiteral = TypeVar("_NoneLiteral", default=None) +_ScopeT = TypeVar("_ScopeT", Literal["request"], Literal["response"], None, default=None) +_ScopeT2 = TypeVar("_ScopeT2", Literal["request"], Literal["response"], None) -class UpdateDict(dict[_KT, _VT]): +class UpdateDict(dict[str, Any]): updated: Callable[..., Any] | None updated_args: tuple[Any, ...] | None -class exists_property: - prop: str - type: str | None - def __init__(self, prop: str, type: str | None = None) -> None: ... +class exists_property(Generic[_ScopeT]): + def __init__(self, prop: str, type: _ScopeT = ...) -> None: ... @overload - def __get__(self, obj: None, type: _Type | None = None) -> Self: ... + def __get__(self, obj: None, type: type[CacheControl[Any]] | None = None) -> Self: ... @overload - def __get__(self, obj: Any, type: _Type | None = None) -> bool: ... - def __set__(self, obj: Any, value: bool | None) -> None: ... - def __delete__(self, obj: Any) -> None: ... - -class value_property(Generic[_T, _NoneLiteral]): - prop: str - default: _T | None - none: _NoneLiteral - type: str | None + def __get__(self: exists_property[None], obj: CacheControl[Any], type: type[CacheControl[Any]] | None = None) -> bool: ... @overload - def __init__(self, prop: str, default: None = None, none: None = None, type: str | None = None) -> None: ... + def __get__(self, obj: CacheControl[_ScopeT], type: type[CacheControl[Any]] | None = None) -> bool: ... @overload - def __init__(self, prop: str, default: _T, none: _NoneLiteral, type: str | None = None) -> None: ... + def __set__(self: exists_property[None], obj: CacheControl[Any], value: bool | None) -> None: ... @overload - def __get__(self, obj: None, type: _Type | None = None) -> Self: ... + def __set__(self, obj: CacheControl[_ScopeT], value: bool | None) -> None: ... @overload - def __get__(self, obj: Any, type: _Type | None = None) -> _T | _NoneLiteral | None: ... - def __set__(self, obj: Any, value: _T | Literal[True] | None) -> None: ... - def __delete__(self, obj: Any) -> None: ... - -class _IntValueProperty(value_property[int, _NoneLiteral]): - def __set__(self, obj: Any, value: int | None) -> None: ... + def __delete__(self, obj: CacheControl[Any]) -> None: ... + @overload + def __delete__(self, obj: CacheControl[_ScopeT]) -> None: ... -class _BaseCacheControl: - update_dict = UpdateDict - properties: MutableMapping[str, Any] - type: Literal["request", "response"] | None - @classmethod +class value_property(Generic[_T, _DefaultT, _NoneLiteral, _ScopeT]): + def __init__(self, prop: str, default: _DefaultT = ..., none: _NoneLiteral = ..., type: _ScopeT = ...) -> None: ... @overload - def parse(cls, header: str, updates_to: None = None, type: None = None) -> _AnyCacheControl: ... - @classmethod + def __get__(self, obj: None, type: type[CacheControl[Any]] | None = None) -> Self: ... @overload - def parse(cls, header: str, updates_to: Request, type: Literal["request"]) -> _RequestCacheControl: ... - @classmethod + def __get__( + self: value_property[_T, _DefaultT, _NoneLiteral, None], + obj: CacheControl[Any] | None, + type: type[CacheControl[Any]] | None = None, + ) -> _T | _DefaultT | _NoneLiteral: ... @overload - def parse(cls, header: str, updates_to: Response, type: Literal["response"]) -> _ResponseCacheControl: ... - - no_cache: value_property[str, Literal["*"]] - no_store: exists_property - no_transform: exists_property - max_age: _IntValueProperty[Literal[-1]] - def copy(self) -> Self: ... - -class _RequestCacheControl(_BaseCacheControl): - type: Literal["request"] - max_stale: _IntValueProperty[Literal["*"]] - min_fresh: _IntValueProperty[None] - only_if_cached: exists_property - -class _ResponseCacheControl(_BaseCacheControl): - type: Literal["response"] - public: exists_property - private: value_property[str, Literal["*"]] - must_revalidate: exists_property - proxy_revalidate: exists_property - s_maxage: _IntValueProperty[None] - s_max_age: _IntValueProperty[None] - stale_while_revalidate: _IntValueProperty[None] - stale_if_error: _IntValueProperty[None] - -class _AnyCacheControl(_RequestCacheControl, _ResponseCacheControl): - type: None # type: ignore[assignment] + def __get__( + self, obj: CacheControl[_ScopeT] | None, type: type[CacheControl[Any]] | None = None + ) -> _T | _DefaultT | _NoneLiteral: ... + @overload + def __set__( + self: value_property[_T, _DefaultT, _NoneLiteral, None], + obj: CacheControl[Any], + value: _T | _DefaultT | Literal[True] | None, + ) -> None: ... + @overload + def __set__(self, obj: CacheControl[_ScopeT], value: _T | _DefaultT | Literal[True] | None) -> None: ... + @overload + def __delete__(self, obj: CacheControl[Any]) -> None: ... + @overload + def __delete__(self, obj: CacheControl[_ScopeT]) -> None: ... -class CacheControl(_AnyCacheControl): +class CacheControl(Generic[_ScopeT]): + header_value: str + update_dict: type[UpdateDict] + properties: dict[str, Any] + type: _ScopeT + def __init__(self, properties: dict[str, Any], type: _ScopeT) -> None: ... @overload - def __init__(self: _AnyCacheControl, properties: MutableMapping[str, Any], type: None) -> None: ... + @classmethod + def parse( + cls, header: str, updates_to: Callable[[dict[str, Any]], Any] | None = None, type: None = None + ) -> CacheControl[None]: ... @overload - def __init__(self: _RequestCacheControl, properties: MutableMapping[str, Any], type: Literal["request"]) -> None: ... + @classmethod + def parse(cls, header: str, updates_to: Callable[[dict[str, Any]], Any] | None, type: _ScopeT2) -> CacheControl[_ScopeT2]: ... @overload - def __init__(self: _ResponseCacheControl, properties: MutableMapping[str, Any], type: Literal["response"]) -> None: ... + @classmethod + def parse( + cls, header: str, updates_to: Callable[[dict[str, Any]], Any] | None = None, *, type: _ScopeT2 + ) -> CacheControl[_ScopeT2]: ... + max_stale: value_property[int, None, Literal["*"], Literal["request"]] + min_fresh: value_property[int, None, None, Literal["request"]] + only_if_cached: exists_property[Literal["request"]] + public: exists_property[Literal["response"]] + private: value_property[str, None, Literal["*"], Literal["response"]] + no_cache: value_property[str, None, Literal["*"], None] + no_store: exists_property[None] + no_transform: exists_property[None] + must_revalidate: exists_property[Literal["response"]] + proxy_revalidate: exists_property[Literal["response"]] + max_age: value_property[int, None, Literal[-1], None] + s_maxage: value_property[int, None, None, Literal["response"]] + s_max_age = s_maxage + stale_while_revalidate: value_property[int, None, None, Literal["response"]] + stale_if_error: value_property[int, None, None, Literal["response"]] + def copy(self) -> Self: ... -def serialize_cache_control(properties: MutableMapping[str, Any] | _BaseCacheControl) -> str: ... +def serialize_cache_control(properties: SupportsItems[str, Any] | CacheControl[Any]) -> str: ... diff --git a/stubs/WebOb/webob/client.pyi b/stubs/WebOb/webob/client.pyi index 869bdec63534..cdabe6fd5379 100644 --- a/stubs/WebOb/webob/client.pyi +++ b/stubs/WebOb/webob/client.pyi @@ -3,6 +3,8 @@ from collections.abc import Iterable from http.client import HTTPConnection, HTTPMessage, HTTPSConnection from typing import ClassVar +__all__ = ["send_request_app", "SendRequest"] + class SendRequest: def __init__(self, HTTPConnection: type[HTTPConnection] = ..., HTTPSConnection: type[HTTPSConnection] = ...) -> None: ... def __call__(self, environ: WSGIEnvironment, start_response: StartResponse) -> Iterable[bytes]: ... diff --git a/stubs/WebOb/webob/compat.pyi b/stubs/WebOb/webob/compat.pyi new file mode 100644 index 000000000000..887dbe069b92 --- /dev/null +++ b/stubs/WebOb/webob/compat.pyi @@ -0,0 +1,22 @@ +import sys +from html import escape as escape +from queue import Empty as Empty, Queue as Queue +from typing import IO + +if sys.version_info >= (3, 13): + # NOTE: These are the only attributes we realistically care about + class cgi_FieldStorage: + filename: str + file: IO[bytes] + + def parse_header(line: str) -> tuple[str, dict[str, str]]: ... + +else: + from cgi import FieldStorage as _cgi_FieldStorage, parse_header as parse_header + + class cgi_FieldStorage(_cgi_FieldStorage): + # NOTE: The only kinds of objects of this type the user is exposed to + # will contain a file with a filename. We're technically lying + # if people create their own instances, but that shouldn't happen + filename: str + file: IO[bytes] diff --git a/stubs/WebOb/webob/cookies.pyi b/stubs/WebOb/webob/cookies.pyi index fa6d349e98b6..9cc7d7acf3fb 100644 --- a/stubs/WebOb/webob/cookies.pyi +++ b/stubs/WebOb/webob/cookies.pyi @@ -1,15 +1,25 @@ from _typeshed import sentinel from _typeshed.wsgi import WSGIEnvironment -from collections.abc import Callable, Collection, ItemsView, Iterator, KeysView, MutableMapping, ValuesView +from collections.abc import Collection, ItemsView, Iterator, KeysView, MutableMapping, ValuesView from datetime import date, datetime, timedelta -from hashlib import _Hash +from time import _TimeTuple, struct_time from typing import Any, Literal, Protocol, TypeVar, overload from typing_extensions import TypeAlias -from webob.descriptors import _AsymmetricProperty -from webob.request import Request +from webob._types import AsymmetricProperty +from webob.request import BaseRequest from webob.response import Response +__all__ = [ + "Cookie", + "CookieProfile", + "SignedCookieProfile", + "SignedSerializer", + "JSONSerializer", + "Base64Serializer", + "make_cookie", +] + _T = TypeVar("_T") # we accept both the official spelling and the one used in the WebOb docs # the implementation compares after lower() so technically there are more @@ -27,7 +37,7 @@ class RequestCookies(MutableMapping[str, str]): @overload def get(self, name: str, default: None = None) -> str | None: ... @overload - def get(self, name: str, default: str | _T) -> str | _T: ... + def get(self, name: str, default: _T) -> str | _T: ... def __delitem__(self, name: str) -> None: ... def keys(self) -> KeysView[str]: ... def values(self) -> ValuesView[str]: ... @@ -37,11 +47,11 @@ class RequestCookies(MutableMapping[str, str]): def __len__(self) -> int: ... def clear(self) -> None: ... -class Cookie(dict[str, Morsel]): +class Cookie(dict[bytes, Morsel]): def __init__(self, input: str | None = None) -> None: ... def load(self, data: str) -> None: ... - def add(self, key: str | bytes, val: str | bytes) -> Morsel: ... - def __setitem__(self, key: str | bytes, val: str | bytes) -> Morsel: ... # type: ignore[override] + def add(self, key: str | bytes, val: str | bytes) -> Morsel | dict[bytes, bytes]: ... + def __setitem__(self, key: str | bytes, val: str | bytes) -> Morsel | dict[bytes, bytes]: ... # type: ignore[override] def serialize(self, full: bool = True) -> str: ... def values(self) -> list[Morsel]: ... # type: ignore[override] def __str__(self, full: bool = True) -> str: ... @@ -62,17 +72,12 @@ class Morsel(dict[bytes, bytes | bool | None]): def comment(self) -> bytes | None: ... @comment.setter def comment(self, v: bytes | None) -> None: ... - expires: _AsymmetricProperty[bytes | None, datetime | date | timedelta | int | str | bytes | None] - max_age: _AsymmetricProperty[bytes | None, timedelta | int | str | bytes] - @property - def httponly(self) -> bool | None: ... - @httponly.setter - def httponly(self, v: bool) -> None: ... - @property - def secure(self) -> bool | None: ... - @secure.setter - def secure(self, v: bool) -> None: ... - samesite: _AsymmetricProperty[bytes | None, _SameSitePolicy | None] + expires: AsymmetricProperty[bytes | None, datetime | date | timedelta | _TimeTuple | struct_time | int | str | bytes | None] + max_age: AsymmetricProperty[bytes | None, timedelta | int | str | bytes | None] + httponly: AsymmetricProperty[bool, bool | None] + secure: AsymmetricProperty[bool, bool | None] + samesite: AsymmetricProperty[bytes, _SameSitePolicy | bytes] + def __setitem__(self, k: str | bytes, v: bytes | bool | None) -> None: ... def serialize(self, full: bool = True) -> str: ... def __str__(self, full: bool = True) -> str: ... @@ -82,8 +87,8 @@ def make_cookie( max_age: int | timedelta | None = None, path: str = "/", domain: str | None = None, - secure: bool = False, - httponly: bool = False, + secure: bool | None = False, + httponly: bool | None = False, comment: str | None = None, samesite: _SameSitePolicy | None = None, ) -> str: ... @@ -99,14 +104,15 @@ class Base64Serializer: def loads(self, bstruct: bytes) -> Any: ... class SignedSerializer: - salt: str - secret: str + salt: str | bytes + secret: str | bytes hashalg: str salted_secret: bytes - digestmod: Callable[[bytes], _Hash] digest_size: int serializer: _Serializer - def __init__(self, secret: str, salt: str, hashalg: str = "sha512", serializer: _Serializer | None = None) -> None: ... + def __init__( + self, secret: str | bytes, salt: str | bytes, hashalg: str = "sha512", serializer: _Serializer | None = None + ) -> None: ... def dumps(self, appstruct: Any) -> bytes: ... def loads(self, bstruct: bytes) -> Any: ... @@ -119,7 +125,7 @@ class CookieProfile: path: str domains: Collection[str] | None serializer: _Serializer - request: Request | None + request: BaseRequest | None def __init__( self, cookie_name: str, @@ -133,8 +139,8 @@ class CookieProfile: domains: Collection[str] | None = None, serializer: _Serializer | None = None, ) -> None: ... - def __call__(self, request: Request) -> CookieProfile: ... - def bind(self, request: Request) -> CookieProfile: ... + def __call__(self, request: BaseRequest) -> CookieProfile: ... + def bind(self, request: BaseRequest) -> CookieProfile: ... def get_value(self) -> Any | None: ... def set_cookies( self, @@ -159,10 +165,9 @@ class CookieProfile: ) -> list[tuple[str, str]]: ... class SignedCookieProfile(CookieProfile): - secret: str - salt: str + secret: str | bytes + salt: str | bytes hashalg: str - serializer: SignedSerializer original_serializer: _Serializer def __init__( self, @@ -171,12 +176,12 @@ class SignedCookieProfile(CookieProfile): cookie_name: str, secure: bool = False, max_age: int | timedelta | None = None, - httponly: bool = False, + httponly: bool | None = False, samesite: _SameSitePolicy | None = None, path: str = "/", domains: Collection[str] | None = None, hashalg: str = "sha512", serializer: _Serializer | None = None, ) -> None: ... - def __call__(self, request: Request) -> SignedCookieProfile: ... - def bind(self, request: Request) -> SignedCookieProfile: ... + def __call__(self, request: BaseRequest) -> SignedCookieProfile: ... + def bind(self, request: BaseRequest) -> SignedCookieProfile: ... diff --git a/stubs/WebOb/webob/datetime_utils.pyi b/stubs/WebOb/webob/datetime_utils.pyi index a20e8a4d0c0c..68c45d2f12db 100644 --- a/stubs/WebOb/webob/datetime_utils.pyi +++ b/stubs/WebOb/webob/datetime_utils.pyi @@ -1,6 +1,22 @@ from datetime import date, datetime, timedelta, tzinfo from time import _TimeTuple, struct_time +__all__ = [ + "UTC", + "timedelta_to_seconds", + "year", + "month", + "week", + "day", + "hour", + "minute", + "second", + "parse_date", + "serialize_date", + "parse_date_delta", + "serialize_date_delta", +] + class _UTC(tzinfo): def dst(self, dt: datetime | None) -> timedelta: ... def utcoffset(self, dt: datetime | None) -> timedelta: ... diff --git a/stubs/WebOb/webob/dec.pyi b/stubs/WebOb/webob/dec.pyi index 3f3d76fd0ed5..41478ba2fb48 100644 --- a/stubs/WebOb/webob/dec.pyi +++ b/stubs/WebOb/webob/dec.pyi @@ -1,42 +1,42 @@ from _typeshed.wsgi import StartResponse, WSGIApplication, WSGIEnvironment from collections.abc import Callable, Iterable, Mapping -from typing import Any, Generic, TypeVar, overload -from typing_extensions import Concatenate, Never, ParamSpec, Self, TypeAlias +from typing import Any, Generic, overload +from typing_extensions import Concatenate, Never, ParamSpec, Self, TypeAlias, TypeVar -from webob.request import Request +from webob.request import BaseRequest, Request from webob.response import Response +__all__ = ["wsgify"] + _AnyResponse: TypeAlias = Response | WSGIApplication | str | None _S = TypeVar("_S") _AppT = TypeVar("_AppT", bound=WSGIApplication) _AppT_contra = TypeVar("_AppT_contra", bound=WSGIApplication, contravariant=True) -_RequestT = TypeVar("_RequestT", bound=Request) -_RequestT_contra = TypeVar("_RequestT_contra", bound=Request, contravariant=True) +_RequestT = TypeVar("_RequestT", bound=BaseRequest) +_RequestT_contra = TypeVar("_RequestT_contra", bound=BaseRequest, default=Request, contravariant=True) _P = ParamSpec("_P") _P2 = ParamSpec("_P2") -_RequestHandlerCallable: TypeAlias = Callable[Concatenate[_RequestT_contra, _P], _AnyResponse] -_RequestHandlerMethod: TypeAlias = Callable[Concatenate[Any, _RequestT_contra, _P], _AnyResponse] -_MiddlewareCallable: TypeAlias = Callable[Concatenate[_RequestT_contra, _AppT_contra, _P], _AnyResponse] -_MiddlewareMethod: TypeAlias = Callable[Concatenate[Any, _RequestT_contra, _AppT_contra, _P], _AnyResponse] -_RequestHandler: TypeAlias = _RequestHandlerCallable[_RequestT_contra, _P] | _RequestHandlerMethod[_RequestT_contra, _P] -_Middleware: TypeAlias = ( - _MiddlewareCallable[_RequestT_contra, _AppT_contra, _P] | _MiddlewareMethod[_RequestT_contra, _AppT_contra, _P] -) +_RequestHandlerCallable: TypeAlias = Callable[Concatenate[_RequestT, _P], _AnyResponse] +_RequestHandlerMethod: TypeAlias = Callable[Concatenate[Any, _RequestT, _P], _AnyResponse] +_MiddlewareCallable: TypeAlias = Callable[Concatenate[_RequestT, _AppT, _P], _AnyResponse] +_MiddlewareMethod: TypeAlias = Callable[Concatenate[Any, _RequestT, _AppT, _P], _AnyResponse] +_RequestHandler: TypeAlias = _RequestHandlerCallable[_RequestT, _P] | _RequestHandlerMethod[_RequestT, _P] +_Middleware: TypeAlias = _MiddlewareCallable[_RequestT, _AppT, _P] | _MiddlewareMethod[_RequestT, _AppT, _P] -class wsgify(Generic[_RequestT_contra, _P]): - RequestClass: type[Request] +class wsgify(Generic[_P, _RequestT_contra]): + RequestClass: type[_RequestT_contra] func: _RequestHandler[_RequestT_contra, _P] | None args: tuple[Any, ...] kwargs: dict[str, Any] middleware_wraps: WSGIApplication | None # NOTE: We disallow passing args/kwargs using this direct API, because # we can't really make it work as a decorator this way, these - # arguments should only really be used indrectly through the + # arguments should only really be used indirectly through the # middleware decorator, where we can be more type safe @overload def __init__( - self: wsgify[Request, []], + self: wsgify[[], Request], func: _RequestHandler[Request, []] | None = None, RequestClass: None = None, args: tuple[()] = (), @@ -45,7 +45,7 @@ class wsgify(Generic[_RequestT_contra, _P]): ) -> None: ... @overload def __init__( - self: wsgify[_RequestT_contra, []], # pyright: ignore[reportInvalidTypeVarUse] #11780 + self: wsgify[[], _RequestT_contra], # pyright: ignore[reportInvalidTypeVarUse] #11780 func: _RequestHandler[_RequestT_contra, []] | None, RequestClass: type[_RequestT_contra], args: tuple[()] = (), @@ -54,7 +54,7 @@ class wsgify(Generic[_RequestT_contra, _P]): ) -> None: ... @overload def __init__( - self: wsgify[_RequestT_contra, []], # pyright: ignore[reportInvalidTypeVarUse] #11780 + self: wsgify[[], _RequestT_contra], # pyright: ignore[reportInvalidTypeVarUse] #11780 func: _RequestHandler[_RequestT_contra, []] | None = None, *, RequestClass: type[_RequestT_contra], @@ -64,7 +64,7 @@ class wsgify(Generic[_RequestT_contra, _P]): ) -> None: ... @overload def __init__( - self: wsgify[Request, [_AppT_contra]], + self: wsgify[[_AppT_contra], Request], func: _Middleware[Request, _AppT_contra, []] | None = None, RequestClass: None = None, args: tuple[()] = (), @@ -74,7 +74,7 @@ class wsgify(Generic[_RequestT_contra, _P]): ) -> None: ... @overload def __init__( - self: wsgify[_RequestT_contra, [_AppT_contra]], # pyright: ignore[reportInvalidTypeVarUse] #11780 + self: wsgify[[_AppT_contra], _RequestT_contra], # pyright: ignore[reportInvalidTypeVarUse] #11780 func: _Middleware[_RequestT_contra, _AppT_contra, []] | None, RequestClass: type[_RequestT_contra], args: tuple[()] = (), @@ -84,7 +84,7 @@ class wsgify(Generic[_RequestT_contra, _P]): ) -> None: ... @overload def __init__( - self: wsgify[_RequestT_contra, [_AppT_contra]], # pyright: ignore[reportInvalidTypeVarUse] #11780 + self: wsgify[[_AppT_contra], _RequestT_contra], # pyright: ignore[reportInvalidTypeVarUse] #11780 func: _Middleware[_RequestT_contra, _AppT_contra, []] | None = None, *, RequestClass: type[_RequestT_contra], @@ -93,7 +93,7 @@ class wsgify(Generic[_RequestT_contra, _P]): middleware_wraps: _AppT_contra, ) -> None: ... @overload - def __get__(self, obj: None, type: type[_S]) -> _unbound_wsgify[_RequestT_contra, _P, _S]: ... + def __get__(self, obj: None, type: type[_S]) -> _unbound_wsgify[_P, _S, _RequestT_contra]: ... @overload def __get__(self, obj: object, type: type | None = None) -> Self: ... @overload @@ -118,34 +118,34 @@ class wsgify(Generic[_RequestT_contra, _P]): @classmethod def middleware( cls, middle_func: None = None, app: None | _AppT = None, *_: _P.args, **kw: _P.kwargs - ) -> _UnboundMiddleware[Any, _AppT, _P]: ... + ) -> _UnboundMiddleware[_P, _AppT, Any]: ... @overload @classmethod def middleware( cls, middle_func: _MiddlewareCallable[_RequestT, _AppT, _P2], app: None = None - ) -> _MiddlewareFactory[_RequestT, _AppT, _P2]: ... + ) -> _MiddlewareFactory[_P2, _AppT, _RequestT]: ... @overload @classmethod def middleware( cls, middle_func: _MiddlewareMethod[_RequestT, _AppT, _P2], app: None = None - ) -> _MiddlewareFactory[_RequestT, _AppT, _P2]: ... + ) -> _MiddlewareFactory[_P2, _AppT, _RequestT]: ... @overload @classmethod def middleware( cls, middle_func: _MiddlewareMethod[_RequestT, _AppT, _P2], app: None = None, *_: _P2.args, **kw: _P2.kwargs - ) -> _MiddlewareFactory[_RequestT, _AppT, _P2]: ... + ) -> _MiddlewareFactory[_P2, _AppT, _RequestT]: ... @overload @classmethod def middleware( cls, middle_func: _MiddlewareMethod[_RequestT, _AppT, _P2], app: _AppT - ) -> type[wsgify[_RequestT, Concatenate[_AppT, _P2]]]: ... + ) -> type[wsgify[Concatenate[_AppT, _P2], _RequestT]]: ... @overload @classmethod def middleware( cls, middle_func: _MiddlewareMethod[_RequestT, _AppT, _P2], app: _AppT, *_: _P2.args, **kw: _P2.kwargs - ) -> type[wsgify[_RequestT, Concatenate[_AppT, _P2]]]: ... + ) -> type[wsgify[Concatenate[_AppT, _P2], _RequestT]]: ... -class _unbound_wsgify(wsgify[_RequestT_contra, _P], Generic[_RequestT_contra, _P, _S]): +class _unbound_wsgify(wsgify[_P, _RequestT_contra], Generic[_P, _S, _RequestT_contra]): @overload # type: ignore[override] def __call__(self, __self: _S, env: WSGIEnvironment, /, start_response: StartResponse) -> Iterable[bytes]: ... @overload @@ -155,13 +155,13 @@ class _unbound_wsgify(wsgify[_RequestT_contra, _P], Generic[_RequestT_contra, _P @overload def __call__(self, __self: _S, /, req: _RequestT_contra, *args: _P.args, **kw: _P.kwargs) -> _AnyResponse: ... -class _UnboundMiddleware(Generic[_RequestT_contra, _AppT_contra, _P]): - wrapper_class: type[wsgify[_RequestT_contra, Concatenate[_AppT_contra, _P]]] +class _UnboundMiddleware(Generic[_P, _AppT_contra, _RequestT_contra]): + wrapper_class: type[wsgify[Concatenate[_AppT_contra, _P], _RequestT_contra]] app: _AppT_contra | None kw: dict[str, Any] def __init__( self, - wrapper_class: type[wsgify[_RequestT_contra, Concatenate[_AppT_contra, _P]]], + wrapper_class: type[wsgify[Concatenate[_AppT_contra, _P], _RequestT_contra]], app: _AppT_contra | None, kw: dict[str, Any], ) -> None: ... @@ -170,19 +170,19 @@ class _UnboundMiddleware(Generic[_RequestT_contra, _AppT_contra, _P]): @overload def __call__( self, func: _Middleware[_RequestT_contra, _AppT_contra, _P], app: None = None - ) -> wsgify[_RequestT_contra, Concatenate[_AppT_contra, _P]]: ... + ) -> wsgify[Concatenate[_AppT_contra, _P], _RequestT_contra]: ... @overload def __call__( self, func: _Middleware[_RequestT_contra, _AppT_contra, _P], app: _AppT_contra - ) -> wsgify[_RequestT_contra, Concatenate[_AppT_contra, _P]]: ... + ) -> wsgify[Concatenate[_AppT_contra, _P], _RequestT_contra]: ... -class _MiddlewareFactory(Generic[_RequestT_contra, _AppT_contra, _P]): - wrapper_class: type[wsgify[_RequestT_contra, Concatenate[_AppT_contra, _P]]] +class _MiddlewareFactory(Generic[_P, _AppT_contra, _RequestT_contra]): + wrapper_class: type[wsgify[Concatenate[_AppT_contra, _P], _RequestT_contra]] middleware: _Middleware[_RequestT_contra, _AppT_contra, _P] kw: dict[str, Any] def __init__( self, - wrapper_class: type[wsgify[_RequestT_contra, Concatenate[_AppT_contra, _P]]], + wrapper_class: type[wsgify[Concatenate[_AppT_contra, _P], _RequestT_contra]], middleware: _Middleware[_RequestT_contra, _AppT_contra, _P], kw: dict[str, Any], ) -> None: ... @@ -191,6 +191,6 @@ class _MiddlewareFactory(Generic[_RequestT_contra, _AppT_contra, _P]): @overload def __call__( self, app: None = None, *_: _P.args, **config: _P.kwargs - ) -> _MiddlewareFactory[_RequestT_contra, _AppT_contra, []]: ... + ) -> _MiddlewareFactory[[], _AppT_contra, _RequestT_contra]: ... @overload - def __call__(self, app: _AppT_contra, *_: _P.args, **config: _P.kwargs) -> wsgify[_RequestT_contra, [_AppT_contra]]: ... + def __call__(self, app: _AppT_contra, *_: _P.args, **config: _P.kwargs) -> wsgify[[_AppT_contra], _RequestT_contra]: ... diff --git a/stubs/WebOb/webob/descriptors.pyi b/stubs/WebOb/webob/descriptors.pyi index a80e04287f14..b9c3e5aff2ec 100644 --- a/stubs/WebOb/webob/descriptors.pyi +++ b/stubs/WebOb/webob/descriptors.pyi @@ -1,18 +1,25 @@ from collections.abc import Callable, Iterable from datetime import date, datetime, timedelta from time import _TimeTuple, struct_time -from typing import Any, Generic, NamedTuple, TypeVar, overload +from typing import Any, NamedTuple, TypeVar, overload from typing_extensions import TypeAlias +from webob._types import AsymmetricProperty, AsymmetricPropertyWithDelete, SymmetricProperty, SymmetricPropertyWithDelete from webob.byterange import ContentRange, Range from webob.etag import IfRange, IfRangeDate -_T = TypeVar("_T") _DefaultT = TypeVar("_DefaultT") _GetterReturnType = TypeVar("_GetterReturnType") _SetterValueType = TypeVar("_SetterValueType") _ConvertedGetterReturnType = TypeVar("_ConvertedGetterReturnType") _ConvertedSetterValueType = TypeVar("_ConvertedSetterValueType") +_DescriptorT = TypeVar("_DescriptorT", bound=AsymmetricPropertyWithDelete[Any, Any]) + +_StringProperty: TypeAlias = SymmetricPropertyWithDelete[str | None] +_ListProperty: TypeAlias = AsymmetricPropertyWithDelete[tuple[str, ...] | None, Iterable[str] | str | None] +_DateProperty: TypeAlias = AsymmetricPropertyWithDelete[ + datetime | None, date | datetime | timedelta | _TimeTuple | struct_time | float | str | None +] _ContentRangeParams: TypeAlias = ( ContentRange | list[int] @@ -26,66 +33,48 @@ _ContentRangeParams: TypeAlias = ( | None ) -class _AsymmetricProperty(Generic[_GetterReturnType, _SetterValueType]): - @overload - def __get__(self, obj: None, type: type | None = ..., /) -> property: ... - @overload - def __get__(self, obj: Any, type: type | None = ..., /) -> _GetterReturnType: ... - def __set__(self, obj: Any, value: _SetterValueType, /) -> None: ... - -class _AsymmetricPropertyWithDelete(_AsymmetricProperty[_GetterReturnType, _SetterValueType]): - def __delete__(self, obj: Any, /) -> None: ... - -class _SymmetricProperty(_AsymmetricProperty[_T, _T]): ... -class _SymmetricPropertyWithDelete(_AsymmetricPropertyWithDelete[_T, _T]): ... -class _StringProperty(_SymmetricPropertyWithDelete[str | None]): ... -class _ListProperty(_AsymmetricPropertyWithDelete[tuple[str, ...] | None, Iterable[str] | str | None]): ... -class _DateProperty( - _AsymmetricPropertyWithDelete[datetime | None, date | datetime | timedelta | _TimeTuple | struct_time | float | str | None] -): ... - @overload -def environ_getter(key: str, default: None, rfc_section: str | None = None) -> _SymmetricPropertyWithDelete[Any | None]: ... +def environ_getter(key: str, *, rfc_section: str | None = None) -> SymmetricProperty[Any]: ... +@overload +def environ_getter(key: str, default: None, rfc_section: str | None = None) -> SymmetricPropertyWithDelete[Any | None]: ... @overload def environ_getter( key: str, default: _DefaultT, rfc_section: str | None = None -) -> _AsymmetricPropertyWithDelete[Any | _DefaultT, Any | _DefaultT | None]: ... +) -> AsymmetricPropertyWithDelete[Any | _DefaultT, Any | _DefaultT | None]: ... @overload -def environ_getter(key: str, *, rfc_section: str | None = None) -> _SymmetricProperty[Any]: ... +def environ_decoder(key: str, *, rfc_section: str | None = None, encattr: str | None = None) -> SymmetricProperty[str]: ... @overload def environ_decoder( key: str, default: str, rfc_section: str | None = None, encattr: str | None = None -) -> _AsymmetricPropertyWithDelete[str, str | None]: ... +) -> AsymmetricPropertyWithDelete[str, str | None]: ... @overload def environ_decoder( key: str, default: None, rfc_section: str | None = None, encattr: str | None = None -) -> _SymmetricPropertyWithDelete[str | None]: ... -@overload -def environ_decoder(key: str, *, rfc_section: str | None = None, encattr: str | None = None) -> _SymmetricProperty[str]: ... -def upath_property(key: str) -> _SymmetricProperty[str]: ... -def deprecated_property(attr: _T, name: str, text: str, version: str) -> _T: ... +) -> SymmetricPropertyWithDelete[str | None]: ... +def upath_property(key: str) -> SymmetricProperty[str]: ... +def deprecated_property(attr: _DescriptorT, name: str, text: str, version: str) -> _DescriptorT: ... def header_getter(header: str, rfc_section: str) -> _StringProperty: ... @overload def converter( - prop: _AsymmetricPropertyWithDelete[_GetterReturnType, _SetterValueType], + prop: AsymmetricPropertyWithDelete[_GetterReturnType, _SetterValueType], parse: Callable[[_GetterReturnType], _ConvertedGetterReturnType], serialize: Callable[[_ConvertedSetterValueType], _SetterValueType], convert_name: str | None = None, -) -> _AsymmetricPropertyWithDelete[_ConvertedGetterReturnType, _ConvertedSetterValueType]: ... +) -> AsymmetricPropertyWithDelete[_ConvertedGetterReturnType, _ConvertedSetterValueType | None]: ... @overload def converter( - prop: _AsymmetricProperty[_GetterReturnType, _SetterValueType], + prop: AsymmetricProperty[_GetterReturnType, _SetterValueType], parse: Callable[[_GetterReturnType], _ConvertedGetterReturnType], serialize: Callable[[_ConvertedSetterValueType], _SetterValueType], convert_name: str | None = None, -) -> _AsymmetricProperty[_ConvertedGetterReturnType, _ConvertedSetterValueType]: ... +) -> AsymmetricProperty[_ConvertedGetterReturnType, _ConvertedSetterValueType | None]: ... def list_header(header: str, rfc_section: str) -> _ListProperty: ... def parse_list(value: str | None) -> tuple[str, ...] | None: ... def serialize_list(value: Iterable[str] | str) -> str: ... def converter_date(prop: _StringProperty) -> _DateProperty: ... def date_header(header: str, rfc_section: str) -> _DateProperty: ... def parse_etag_response(value: str | None, strong: bool = False) -> str | None: ... -def serialize_etag_response(value: str | tuple[str, bool]) -> str: ... +def serialize_etag_response(value: tuple[str, bool] | str) -> str: ... def serialize_if_range(value: IfRange | IfRangeDate | datetime | date | str) -> str | None: ... def parse_range(value: str | None) -> Range | None: ... def serialize_range(value: tuple[int, int | None] | list[int | None] | list[int] | str | None) -> str | None: ... diff --git a/stubs/WebOb/webob/etag.pyi b/stubs/WebOb/webob/etag.pyi index 08a81da59770..5f71a2ac1853 100644 --- a/stubs/WebOb/webob/etag.pyi +++ b/stubs/WebOb/webob/etag.pyi @@ -1,41 +1,34 @@ from collections.abc import Collection from datetime import datetime -from typing import Any, overload +from typing import Literal from typing_extensions import TypeAlias +from webob._types import AsymmetricPropertyWithDelete from webob.response import Response -_ETag: TypeAlias = _AnyETag | _NoETag | ETagMatcher +__all__ = ["AnyETag", "NoETag", "ETagMatcher", "IfRange", "etag_property"] -class _ETagProperty: - @overload - def __get__(self, obj: None, type: type | None = ..., /) -> property: ... - @overload - def __get__(self, obj: Any, type: type | None = ..., /) -> _ETag: ... - @overload - def __set__(self, obj: Any, value: str | None, /) -> None: ... - @overload - def __set__(self, obj: Any, value: _ETag, /) -> None: ... - def __delete__(self, obj: Any, /) -> None: ... +_ETag: TypeAlias = _AnyETag | _NoETag | ETagMatcher +_ETagProperty: TypeAlias = AsymmetricPropertyWithDelete[_ETag, _ETag | str | None] def etag_property(key: str, default: _ETag, rfc_section: str, strong: bool = True) -> _ETagProperty: ... class _AnyETag: - def __bool__(self) -> bool: ... - def __contains__(self, other: str) -> bool: ... + def __bool__(self) -> Literal[False]: ... + def __contains__(self, other: str | None) -> Literal[True]: ... AnyETag: _AnyETag class _NoETag: - def __bool__(self) -> bool: ... - def __contains__(self, other: str) -> bool: ... + def __bool__(self) -> Literal[False]: ... + def __contains__(self, other: str | None) -> Literal[False]: ... NoETag: _NoETag class ETagMatcher: etags: Collection[str] def __init__(self, etags: Collection[str]) -> None: ... - def __contains__(self, other: str) -> bool: ... + def __contains__(self, other: str | None) -> bool: ... @classmethod def parse(cls, value: str, strong: bool = True) -> ETagMatcher | _AnyETag: ... @@ -43,7 +36,7 @@ class IfRange: etag: _ETag def __init__(self, etag: _ETag) -> None: ... @classmethod - def parse(cls, value: str) -> IfRange | IfRangeDate: ... + def parse(cls, value: str | None) -> IfRange | IfRangeDate: ... def __contains__(self, resp: Response) -> bool: ... def __bool__(self) -> bool: ... diff --git a/stubs/WebOb/webob/exc.pyi b/stubs/WebOb/webob/exc.pyi index 5316b33cb081..da9ee25cf563 100644 --- a/stubs/WebOb/webob/exc.pyi +++ b/stubs/WebOb/webob/exc.pyi @@ -3,12 +3,73 @@ from _typeshed.wsgi import StartResponse, WSGIApplication, WSGIEnvironment from collections.abc import Iterable from string import Template from typing import Any, Literal, Protocol -from typing_extensions import Self +from typing_extensions import Self, TypeAlias from webob.response import Response +__all__ = [ + "HTTPAccepted", + "HTTPBadGateway", + "HTTPBadRequest", + "HTTPClientError", + "HTTPConflict", + "HTTPCreated", + "HTTPError", + "HTTPExpectationFailed", + "HTTPFailedDependency", + "HTTPForbidden", + "HTTPFound", + "HTTPGatewayTimeout", + "HTTPGone", + "HTTPInsufficientStorage", + "HTTPInternalServerError", + "HTTPLengthRequired", + "HTTPLocked", + "HTTPMethodNotAllowed", + "HTTPMovedPermanently", + "HTTPMultipleChoices", + "HTTPNetworkAuthenticationRequired", + "HTTPNoContent", + "HTTPNonAuthoritativeInformation", + "HTTPNotAcceptable", + "HTTPNotFound", + "HTTPNotImplemented", + "HTTPNotModified", + "HTTPOk", + "HTTPPartialContent", + "HTTPPaymentRequired", + "HTTPPermanentRedirect", + "HTTPPreconditionFailed", + "HTTPPreconditionRequired", + "HTTPProxyAuthenticationRequired", + "HTTPRedirection", + "HTTPRequestEntityTooLarge", + "HTTPRequestHeaderFieldsTooLarge", + "HTTPRequestRangeNotSatisfiable", + "HTTPRequestTimeout", + "HTTPRequestURITooLong", + "HTTPResetContent", + "HTTPSeeOther", + "HTTPServerError", + "HTTPServiceUnavailable", + "HTTPTemporaryRedirect", + "HTTPTooManyRequests", + "HTTPUnauthorized", + "HTTPUnavailableForLegalReasons", + "HTTPUnprocessableEntity", + "HTTPUnsupportedMediaType", + "HTTPUseProxy", + "HTTPVersionNotSupported", + "WSGIHTTPException", + "HTTPException", + "HTTPExceptionMiddleware", + "status_map", +] + +_Headers: TypeAlias = SupportsItems[str, str] | SupportsKeysAndGetItem[str, str] | Iterable[tuple[str, str]] + class _JSONFormatter(Protocol): - def __call__(self, *, body: str, status: str, title: str, environ: WSGIEnvironment) -> str: ... + def __call__(self, *, body: str, status: str, title: str, environ: WSGIEnvironment) -> Any: ... class HTTPException(Exception): wsgi_response: Response @@ -28,7 +89,7 @@ class WSGIHTTPException(Response, HTTPException): def __init__( self, detail: str | None = None, - headers: SupportsItems[str, str] | SupportsKeysAndGetItem[str, str] | Iterable[tuple[str, str]] | None = None, + headers: _Headers | None = None, comment: str | None = None, body_template: str | None = None, json_formatter: _JSONFormatter | None = None, @@ -36,9 +97,10 @@ class WSGIHTTPException(Response, HTTPException): ) -> None: ... def plain_body(self, environ: WSGIEnvironment) -> str: ... def html_body(self, environ: WSGIEnvironment) -> str: ... - def json_formatter(self, body: str, status: str, title: str, environ: WSGIEnvironment) -> str: ... - def json_body(self, environ: WSGIEnvironment) -> str: ... + def json_formatter(self, body: str, status: str, title: str, environ: WSGIEnvironment) -> Any: ... + def json_body(self, environ: WSGIEnvironment) -> str: ... # type: ignore[override] def generate_response(self, environ: WSGIEnvironment, start_response: StartResponse) -> Iterable[bytes]: ... + def __call__(self, environ: WSGIEnvironment, start_response: StartResponse) -> Iterable[bytes]: ... @property def wsgi_response(self) -> Self: ... # type: ignore[override] def __str__(self) -> str: ... # type: ignore[override] # noqa: Y029 @@ -64,12 +126,13 @@ class _HTTPMove(HTTPRedirection): def __init__( self, detail: str | None = None, - headers: str | None = None, + headers: _Headers | None = None, comment: str | None = None, body_template: str | None = None, location: str | None = None, add_slash: bool = False, ) -> None: ... + def __call__(self, environ: WSGIEnvironment, start_response: StartResponse) -> Iterable[bytes]: ... class HTTPMultipleChoices(_HTTPMove): ... class HTTPMovedPermanently(_HTTPMove): ... @@ -123,4 +186,4 @@ class HTTPExceptionMiddleware: def __init__(self, application: WSGIApplication) -> None: ... def __call__(self, environ: WSGIEnvironment, start_response: StartResponse) -> Iterable[bytes]: ... -status_map: dict[int, type[WSGIHTTPException]] +status_map: dict[int, type[HTTPOk | HTTPRedirection | HTTPClientError | HTTPServerError]] diff --git a/stubs/WebOb/webob/headers.pyi b/stubs/WebOb/webob/headers.pyi index abd938520863..8e14bf642561 100644 --- a/stubs/WebOb/webob/headers.pyi +++ b/stubs/WebOb/webob/headers.pyi @@ -1,9 +1,27 @@ from _typeshed.wsgi import WSGIEnvironment from collections.abc import Iterator, MutableMapping +from typing import TypeVar, overload from webob.multidict import MultiDict -class ResponseHeaders(MultiDict[str, str]): ... +__all__ = ["ResponseHeaders", "EnvironHeaders"] + +_T = TypeVar("_T") + +class ResponseHeaders(MultiDict[str, str]): + def __getitem__(self, key: str) -> str: ... + def getall(self, key: str) -> list[str]: ... + def mixed(self) -> dict[str, str | list[str]]: ... + def dict_of_lists(self) -> dict[str, list[str]]: ... + def __setitem__(self, key: str, value: str) -> None: ... + def __delitem__(self, key: str) -> None: ... + def __contains__(self, key: object) -> bool: ... + has_key = __contains__ + def setdefault(self, key: str, default: str) -> str: ... # type: ignore[override] + @overload + def pop(self, key: str) -> str: ... + @overload + def pop(self, key: str, default: _T) -> str | _T: ... class EnvironHeaders(MutableMapping[str, str]): environ: WSGIEnvironment @@ -11,7 +29,7 @@ class EnvironHeaders(MutableMapping[str, str]): def __getitem__(self, hname: str) -> str: ... def __setitem__(self, hname: str, value: str) -> None: ... def __delitem__(self, hname: str) -> None: ... - def keys(self) -> list[str]: ... # type: ignore[override] + def keys(self) -> Iterator[str]: ... # type: ignore[override] def __contains__(self, hname: object) -> bool: ... def __len__(self) -> int: ... def __iter__(self) -> Iterator[str]: ... diff --git a/stubs/WebOb/webob/multidict.pyi b/stubs/WebOb/webob/multidict.pyi index b1aa4a51050d..1b5f3a0466ea 100644 --- a/stubs/WebOb/webob/multidict.pyi +++ b/stubs/WebOb/webob/multidict.pyi @@ -1,30 +1,49 @@ -import sys -from _typeshed import SupportsItems, SupportsKeysAndGetItem +from _typeshed import SupportsKeysAndGetItem from _typeshed.wsgi import WSGIEnvironment from collections.abc import Collection, Iterable, Iterator, MutableMapping -from typing import Any, Literal, TypeVar, overload -from typing_extensions import Self, TypeAlias +from typing import Literal, Protocol, TypeVar, overload +from typing_extensions import Self -if sys.version_info >= (3, 13): - _FieldStorage: TypeAlias = Any -else: - from cgi import FieldStorage as _FieldStorage +from webob.compat import cgi_FieldStorage, cgi_FieldStorage as _FieldStorageWithFile + +__all__ = ["MultiDict", "NestedMultiDict", "NoVars", "GetDict"] _T = TypeVar("_T") _KT = TypeVar("_KT") _VT = TypeVar("_VT") +_KT_co = TypeVar("_KT_co", covariant=True) +_VT_co = TypeVar("_VT_co", covariant=True) + +class _SupportsItemsWithIterableResult(Protocol[_KT_co, _VT_co]): + def items(self) -> Iterable[tuple[_KT_co, _VT_co]]: ... class MultiDict(MutableMapping[_KT, _VT]): @overload - def __init__(self, m: SupportsItems[_KT, _VT], /, **kwargs: _VT) -> None: ... + def __init__(self) -> None: ... + @overload + def __init__(self: MultiDict[str, _VT], **kwargs: _VT) -> None: ... # pyright: ignore[reportInvalidTypeVarUse] #11780 + @overload + def __init__(self, m: _SupportsItemsWithIterableResult[_KT, _VT], /) -> None: ... + @overload + def __init__( + self: MultiDict[str, _VT], # pyright: ignore[reportInvalidTypeVarUse] #11780 + m: _SupportsItemsWithIterableResult[str, _VT], + /, + **kwargs: _VT, + ) -> None: ... @overload - def __init__(self, m: Iterable[tuple[_KT, _VT]], /, **kwargs: _VT) -> None: ... + def __init__(self, m: Iterable[tuple[_KT, _VT]], /) -> None: ... @overload - def __init__(self, **kwargs: _VT) -> None: ... + def __init__( + self: MultiDict[str, _VT], # pyright: ignore[reportInvalidTypeVarUse] #11780 + m: Iterable[tuple[str, _VT]], + /, + **kwargs: _VT, + ) -> None: ... @classmethod - def view_list(cls, lst: list[tuple[_KT, _VT]]) -> MultiDict[_KT, _VT]: ... + def view_list(cls, lst: list[tuple[_KT, _VT]]) -> Self: ... @classmethod - def from_fieldstorage(cls, fs: _FieldStorage) -> MultiDict[str, str | _FieldStorage]: ... + def from_fieldstorage(cls, fs: cgi_FieldStorage) -> MultiDict[str, str | _FieldStorageWithFile]: ... def __getitem__(self, key: _KT) -> _VT: ... def __setitem__(self, key: _KT, value: _VT) -> None: ... def add(self, key: _KT, value: _VT) -> None: ... @@ -51,43 +70,90 @@ class MultiDict(MutableMapping[_KT, _VT]): def pop(self, key: _KT, default: _T) -> _VT | _T: ... def popitem(self) -> tuple[_KT, _VT]: ... @overload # type: ignore[override] - def update(self, m: Collection[tuple[_KT, _VT]], /, **kwargs: _VT) -> None: ... + def update(self: MultiDict[str, _VT], **kwargs: _VT) -> None: ... + @overload + def update(self, m: Collection[tuple[_KT, _VT]], /) -> None: ... + @overload + def update(self: MultiDict[str, _VT], m: Collection[tuple[str, _VT]], /, **kwargs: _VT) -> None: ... + @overload + def extend(self, other: _SupportsItemsWithIterableResult[_KT, _VT]) -> None: ... + @overload + def extend(self: MultiDict[str, _VT], other: _SupportsItemsWithIterableResult[str, _VT], **kwargs: _VT) -> None: ... @overload - def update(self, **kwargs: _VT) -> None: ... + def extend(self, other: Iterable[tuple[_KT, _VT]]) -> None: ... @overload - def extend(self, other: SupportsItems[_KT, _VT], **kwargs: _VT) -> None: ... + def extend(self: MultiDict[str, _VT], other: Iterable[tuple[str, _VT]], **kwargs: _VT) -> None: ... @overload - def extend(self, other: SupportsKeysAndGetItem[_KT, _VT], **kwargs: _VT) -> None: ... + def extend(self, other: SupportsKeysAndGetItem[_KT, _VT]) -> None: ... @overload - def extend(self, other: Iterable[tuple[_KT, _VT]], **kwargs: _VT) -> None: ... + def extend(self: MultiDict[str, _VT], other: SupportsKeysAndGetItem[str, _VT], **kwargs: _VT) -> None: ... @overload - def extend(self, other: None = None, **kwargs: _VT) -> None: ... + def extend(self: MultiDict[str, _VT], other: None = None, **kwargs: _VT) -> None: ... def __len__(self) -> int: ... def keys(self) -> Iterator[_KT]: ... # type: ignore[override] __iter__ = keys - def values(self) -> Iterator[_VT]: ... # type: ignore[override] def items(self) -> Iterator[tuple[_KT, _VT]]: ... # type: ignore[override] + def values(self) -> Iterator[_VT]: ... # type: ignore[override] class GetDict(MultiDict[str, str]): env: WSGIEnvironment @overload - def __init__(self, data: SupportsItems[str, str], env: WSGIEnvironment) -> None: ... + def __init__(self, data: _SupportsItemsWithIterableResult[str, str], env: WSGIEnvironment) -> None: ... @overload def __init__(self, data: Iterable[tuple[str, str]], env: WSGIEnvironment) -> None: ... def on_change(self) -> None: ... + def __setitem__(self, key: str, value: str) -> None: ... + def add(self, key: str, value: str) -> None: ... + def __delitem__(self, key: str) -> None: ... + def clear(self) -> None: ... + def setdefault(self, key: str, default: str) -> str: ... # type: ignore[override] + @overload + def pop(self, key: str) -> str: ... + @overload + def pop(self, key: str, default: _T) -> str | _T: ... + def popitem(self) -> tuple[str, str]: ... + @overload # type: ignore[override] + def update(self, **kwargs: str) -> None: ... + @overload + def update(self, m: Collection[tuple[str, str]], /, **kwargs: str) -> None: ... + @overload + def extend(self, other: _SupportsItemsWithIterableResult[str, str], **kwargs: str) -> None: ... + @overload + def extend(self, other: Iterable[tuple[str, str]], **kwargs: str) -> None: ... + @overload + def extend(self, other: SupportsKeysAndGetItem[str, str], **kwargs: str) -> None: ... + @overload + def extend(self, other: None = None, **kwargs: str) -> None: ... + def copy(self) -> MultiDict[str, str]: ... # type: ignore[override] class NestedMultiDict(MultiDict[_KT, _VT]): - dicts: tuple[MultiDict[_KT, _VT] | NoVars, ...] - def __init__(self, *dicts: MultiDict[_KT, _VT] | NoVars) -> None: ... - def __setitem__(self, key: _KT, value: _VT) -> None: ... - def add(self, key: _KT, value: _VT) -> None: ... - def __delitem__(self, key: _KT) -> None: ... - def clear(self) -> None: ... - def setdefault(self, key: _KT, default: _VT | None = ...) -> Any: ... - def pop(self, key: _KT, default: Any = ...) -> Any: ... - def popitem(self) -> tuple[_KT, _VT]: ... - def update(self, *args: Any, **kwargs: _VT) -> None: ... + # FIXME: It would be more accurate to use a Protocol here, which has a + # covariant _VT, instead of MultiDict + dicts: tuple[MultiDict[_KT, _VT], ...] + def __init__(self, *dicts: MultiDict[_KT, _VT]) -> None: ... + def __getitem__(self, key: _KT) -> _VT: ... + # NOTE: These methods all return exceptions, so this will give us + # somewhat sane type checker errors, we would prefer to use + # something like @type_error here, if it existed. + # This is only really necessary, because the inheritance hierachy + # is a mess. + __setitem__: None # type: ignore[assignment] + add: None # type: ignore[assignment] + __delitem__: None # type: ignore[assignment] + clear: None # type: ignore[assignment] + setdefault: None # type: ignore[assignment] + pop: None # type: ignore[assignment] + popitem: None # type: ignore[assignment] + update: None # type: ignore[assignment] + def getall(self, key: _KT) -> list[_VT]: ... def copy(self) -> MultiDict[_KT, _VT]: ... # type: ignore[override] + def __contains__(self, key: object) -> bool: ... + has_key = __contains__ + def __len__(self) -> int: ... + def items(self) -> Iterator[tuple[_KT, _VT]]: ... # type: ignore[override] + def values(self) -> Iterator[_VT]: ... # type: ignore[override] + def keys(self) -> Iterator[_KT]: ... # type: ignore[override] + __iter__ = keys class NoVars: reason: str diff --git a/stubs/WebOb/webob/request.pyi b/stubs/WebOb/webob/request.pyi index 4f9af63f9ace..c562838f7b7b 100644 --- a/stubs/WebOb/webob/request.pyi +++ b/stubs/WebOb/webob/request.pyi @@ -1,34 +1,33 @@ import datetime import io -import sys -from _typeshed import ExcInfo, ReadableBuffer, SupportsItems, SupportsKeysAndGetItem, SupportsNoArgReadline, SupportsRead +from _typeshed import OptExcInfo, SupportsKeysAndGetItem, SupportsNoArgReadline, SupportsRead, WriteableBuffer from _typeshed.wsgi import WSGIApplication, WSGIEnvironment from collections.abc import Iterable, Mapping from re import Pattern -from tempfile import _TemporaryFileWrapper from typing import IO, Any, ClassVar, Literal, Protocol, TypedDict, TypeVar, overload from typing_extensions import Self, TypeAlias +from webob._types import AsymmetricProperty, AsymmetricPropertyWithDelete, SymmetricProperty, SymmetricPropertyWithDelete from webob.acceptparse import _AcceptCharsetProperty, _AcceptEncodingProperty, _AcceptLanguageProperty, _AcceptProperty from webob.byterange import Range -from webob.cachecontrol import _RequestCacheControl +from webob.cachecontrol import CacheControl +from webob.client import SendRequest +from webob.compat import cgi_FieldStorage from webob.cookies import RequestCookies -from webob.descriptors import _AsymmetricProperty, _AsymmetricPropertyWithDelete, _authorization, _DateProperty +from webob.descriptors import _authorization, _DateProperty from webob.etag import IfRange, IfRangeDate, _ETagProperty from webob.headers import EnvironHeaders from webob.multidict import GetDict, MultiDict, NestedMultiDict, NoVars -from webob.response import Response, _HTTPHeader +from webob.response import Response -if sys.version_info >= (3, 13): - _FieldStorage: TypeAlias = Any -else: - from cgi import FieldStorage as _FieldStorage +__all__ = ["BaseRequest", "Request", "LegacyRequest"] _T = TypeVar("_T") -_HTTPMethod: TypeAlias = Literal["GET", "HEAD", "POST", "PUT", "DELETE", "PATCH"] +_HTTPMethod: TypeAlias = Literal["GET", "HEAD", "POST", "PUT", "DELETE", "CONNECT", "OPTIONS", "TRACE", "PATCH"] _ListOrTuple: TypeAlias = list[_T] | tuple[_T, ...] +_RequestCacheControl: TypeAlias = CacheControl[Literal["request"]] -class _SupportsReadAndNoArgReadline(SupportsRead[bytes], SupportsNoArgReadline[bytes], Protocol): ... +class _SupportsReadAndNoArgReadline(SupportsRead[str | bytes], SupportsNoArgReadline[str | bytes], Protocol): ... class _RequestCacheControlDict(TypedDict, total=False): max_stale: int @@ -39,11 +38,7 @@ class _RequestCacheControlDict(TypedDict, total=False): no_transform: bool max_age: int -# On py313 this subclasses `Any`, hence the type: ignore. -# This is needed for the regr_test.py script, which uses --disallow-subclassing-any -class _FieldStorageWithFile(_FieldStorage): # type: ignore[misc] - file: IO[bytes] - filename: str +_FieldStorageWithFile = cgi_FieldStorage class _NoDefault: ... @@ -52,7 +47,6 @@ NoDefault: _NoDefault class BaseRequest: request_body_tempfile_limit: ClassVar[int] environ: WSGIEnvironment - method: _HTTPMethod def __init__(self, environ: WSGIEnvironment, **kw: Any) -> None: ... @overload def encget(self, key: str, default: _T, encattr: str | None = None) -> str | _T: ... @@ -66,44 +60,29 @@ class BaseRequest: def body_file(self) -> SupportsRead[bytes]: ... @body_file.setter def body_file(self, value: SupportsRead[bytes]) -> None: ... - content_length: int | None - body_file_raw: SupportsRead[bytes] + @body_file.deleter + def body_file(self) -> None: ... + content_length: SymmetricPropertyWithDelete[int | None] + body_file_raw: SymmetricProperty[SupportsRead[bytes]] is_body_seekable: bool @property def body_file_seekable(self) -> IO[bytes]: ... - url_encoding: str - @property - def scheme(self) -> str | None: ... - @scheme.setter - def scheme(self, value: str | None) -> None: ... - @property - def http_version(self) -> str | None: ... - @http_version.setter - def http_version(self, value: str | None) -> None: ... - remote_user: str | None - remote_host: str | None - remote_addr: str | None - query_string: str - @property - def server_name(self) -> str | None: ... - @server_name.setter - def server_name(self, value: str | None) -> None: ... - @property - def server_port(self) -> int | None: ... - @server_port.setter - def server_port(self, value: int | None) -> None: ... - script_name: str - @property - def path_info(self) -> str | None: ... - @path_info.setter - def path_info(self, value: str | None) -> None: ... - uscript_name: str # bw compat - @property - def upath_info(self) -> str | None: ... # bw compat - @upath_info.setter - def upath_info(self, value: str | None) -> None: ... # bw compat - content_type: str | None - headers: _AsymmetricProperty[EnvironHeaders, SupportsItems[str, str] | Iterable[tuple[str, str]]] + url_encoding: AsymmetricPropertyWithDelete[str, str | None] + scheme: SymmetricProperty[str] + method: AsymmetricPropertyWithDelete[_HTTPMethod, _HTTPMethod | None] + http_version: SymmetricProperty[str] + remote_user: SymmetricPropertyWithDelete[str | None] + remote_host: SymmetricPropertyWithDelete[str | None] + remote_addr: SymmetricPropertyWithDelete[str | None] + query_string: AsymmetricPropertyWithDelete[str, str | None] + server_name: SymmetricProperty[str] + server_port: SymmetricProperty[int] + script_name: AsymmetricPropertyWithDelete[str, str | None] + path_info: SymmetricProperty[str] + uscript_name = script_name # bw compat + upath_info = path_info # bw compat + content_type: AsymmetricPropertyWithDelete[str, str | None] + headers: AsymmetricProperty[EnvironHeaders, SupportsKeysAndGetItem[str, str] | Iterable[tuple[str, str]]] @property def client_addr(self) -> str | None: ... @property @@ -123,24 +102,29 @@ class BaseRequest: def relative_url(self, other_url: str, to_application: bool = False) -> str: ... def path_info_pop(self, pattern: Pattern[str] | None = None) -> str | None: ... def path_info_peek(self) -> str | None: ... - urlvars: dict[str, str] - urlargs: tuple[str] + urlvars: SymmetricPropertyWithDelete[dict[str, str]] + urlargs: SymmetricPropertyWithDelete[tuple[str, ...]] @property def is_xhr(self) -> bool: ... - host: str + host: SymmetricPropertyWithDelete[str] @property def domain(self) -> str: ... - body: bytes - json: Any - json_body: Any - text: str + @property + def body(self) -> bytes: ... + @body.setter + def body(self, value: bytes | None) -> None: ... + @body.deleter + def body(self) -> None: ... + json: SymmetricPropertyWithDelete[Any] + json_body: SymmetricPropertyWithDelete[Any] + text: SymmetricPropertyWithDelete[str] @property def POST(self) -> MultiDict[str, str | _FieldStorageWithFile] | NoVars: ... @property def GET(self) -> GetDict: ... @property def params(self) -> NestedMultiDict[str, str | _FieldStorageWithFile]: ... - cookies: _AsymmetricProperty[RequestCookies, SupportsKeysAndGetItem[str, str] | Iterable[tuple[str, str]]] + cookies: AsymmetricProperty[RequestCookies, SupportsKeysAndGetItem[str, str] | Iterable[tuple[str, str]]] def copy(self) -> Self: ... def copy_get(self) -> Self: ... @property @@ -149,7 +133,7 @@ class BaseRequest: def is_body_readable(self, flag: bool) -> None: ... def make_body_seekable(self) -> None: ... def copy_body(self) -> None: ... - def make_tempfile(self) -> _TemporaryFileWrapper[bytes]: ... + def make_tempfile(self) -> io.BufferedRandom: ... def remove_conditional_headers( self, remove_encoding: bool = True, remove_range: bool = True, remove_match: bool = True, remove_modified: bool = True ) -> None: ... @@ -157,24 +141,24 @@ class BaseRequest: accept_charset: _AcceptCharsetProperty accept_encoding: _AcceptEncodingProperty accept_language: _AcceptLanguageProperty - authorization: _AsymmetricPropertyWithDelete[_authorization | None, tuple[str, str | dict[str, str]] | list[Any] | str | None] - cache_control: _AsymmetricPropertyWithDelete[ - _RequestCacheControl | None, _RequestCacheControl | _RequestCacheControlDict | str | None + authorization: AsymmetricPropertyWithDelete[_authorization | None, tuple[str, str | dict[str, str]] | list[Any] | str | None] + cache_control: AsymmetricPropertyWithDelete[ + _RequestCacheControl, _RequestCacheControl | _RequestCacheControlDict | str | None ] if_match: _ETagProperty if_none_match: _ETagProperty date: _DateProperty if_modified_since: _DateProperty if_unmodified_since: _DateProperty - if_range: _AsymmetricPropertyWithDelete[ - IfRange | IfRangeDate | None, IfRange | IfRangeDate | datetime.datetime | datetime.date | str | None + if_range: AsymmetricPropertyWithDelete[ + IfRange | IfRangeDate, IfRange | IfRangeDate | datetime.datetime | datetime.date | str | None ] - max_forwards: int | None - pragma: str | None - range: _AsymmetricPropertyWithDelete[Range | None, tuple[int, int | None] | list[int | None] | list[int] | str | None] - referer: str | None - referrer: str | None - user_agent: str | None + max_forwards: SymmetricPropertyWithDelete[int | None] + pragma: SymmetricPropertyWithDelete[str | None] + range: AsymmetricPropertyWithDelete[Range | None, tuple[int, int | None] | list[int | None] | list[int] | str | None] + referer: SymmetricPropertyWithDelete[str | None] + referrer = referer + user_agent: SymmetricPropertyWithDelete[str | None] def as_bytes(self, skip_body: bool = False) -> bytes: ... def as_text(self) -> str: ... @classmethod @@ -186,15 +170,21 @@ class BaseRequest: @overload def call_application( self, application: WSGIApplication, catch_exc_info: Literal[False] = False - ) -> tuple[str, list[_HTTPHeader], Iterable[bytes]]: ... + ) -> tuple[str, list[tuple[str, str]], Iterable[bytes]]: ... @overload def call_application( self, application: WSGIApplication, catch_exc_info: Literal[True] - ) -> tuple[str, list[_HTTPHeader], Iterable[bytes], ExcInfo | None]: ... + ) -> tuple[str, list[tuple[str, str]], Iterable[bytes], OptExcInfo | None]: ... + @overload + def call_application( + self, application: WSGIApplication, catch_exc_info: bool + ) -> ( + tuple[str, list[tuple[str, str]], Iterable[bytes], OptExcInfo | None] | tuple[str, list[tuple[str, str]], Iterable[bytes]] + ): ... ResponseClass: type[Response] def send(self, application: WSGIApplication | None = None, catch_exc_info: bool = False) -> Response: ... get_response = send - def make_default_send_app(self) -> WSGIApplication: ... + def make_default_send_app(self) -> SendRequest: ... @classmethod def blank( cls, @@ -207,7 +197,7 @@ class BaseRequest: ) -> Self: ... class LegacyRequest(BaseRequest): - @property + @property # type: ignore[override] def uscript_name(self) -> str: ... @uscript_name.setter def uscript_name(self, value: str) -> None: ... @@ -242,13 +232,14 @@ class LimitedLengthFile(io.RawIOBase): maxlen: int remaining: int def __init__(self, file: SupportsRead[bytes], maxlen: int) -> None: ... + def fileno(self) -> int: ... @staticmethod def readable() -> Literal[True]: ... - def readinto(self, buff: ReadableBuffer) -> int: ... + def readinto(self, buff: WriteableBuffer) -> int: ... class Transcoder: charset: str errors: str def __init__(self, charset: str, errors: str = "strict") -> None: ... def transcode_query(self, q: str) -> str: ... - def transcode_fs(self, fs: _FieldStorage, content_type: str) -> io.BytesIO: ... + def transcode_fs(self, fs: cgi_FieldStorage, content_type: str) -> io.BytesIO: ... diff --git a/stubs/WebOb/webob/response.pyi b/stubs/WebOb/webob/response.pyi index afa81270a8e6..4a22b3530adc 100644 --- a/stubs/WebOb/webob/response.pyi +++ b/stubs/WebOb/webob/response.pyi @@ -2,23 +2,22 @@ from _typeshed import SupportsItems, SupportsRead from _typeshed.wsgi import StartResponse, WSGIApplication, WSGIEnvironment from collections.abc import Iterable, Iterator, Sequence from datetime import timedelta -from typing import IO, Any, Literal, Protocol, TypedDict -from typing_extensions import TypeAlias +from typing import IO, Any, Literal, Protocol, TypedDict, TypeVar, overload +from typing_extensions import Self, TypeAlias +from webob._types import AsymmetricProperty, AsymmetricPropertyWithDelete, SymmetricProperty, SymmetricPropertyWithDelete from webob.byterange import ContentRange -from webob.cachecontrol import _ResponseCacheControl +from webob.cachecontrol import CacheControl from webob.cookies import _SameSitePolicy -from webob.descriptors import ( - _AsymmetricProperty, - _AsymmetricPropertyWithDelete, - _authorization, - _ContentRangeParams, - _DateProperty, - _ListProperty, -) +from webob.descriptors import _authorization, _ContentRangeParams, _DateProperty, _ListProperty from webob.headers import ResponseHeaders from webob.request import Request +__all__ = ["Response"] + +_ResponseT = TypeVar("_ResponseT", bound=Response) +_ResponseCacheControl: TypeAlias = CacheControl[Literal["response"]] + class _ResponseCacheExpires(Protocol): def __call__( self, @@ -52,8 +51,6 @@ class _ResponseCacheControlDict(TypedDict, total=False): stale_while_revalidate: int stale_if_error: int -_HTTPHeader: TypeAlias = tuple[str, str] - class Response: default_content_type: str default_charset: str @@ -62,69 +59,68 @@ class Response: default_body_encoding: str request: Request | None environ: WSGIEnvironment | None - status: str + status: AsymmetricProperty[str, int | str | bytes] conditional_response: bool - def __init__( self, body: bytes | str | None = None, - status: str | None = None, - headerlist: list[_HTTPHeader] | None = None, - app_iter: Iterator[bytes] | None = None, + status: int | str | bytes | None = None, + headerlist: list[tuple[str, str]] | None = None, + app_iter: Iterable[bytes] | None = None, content_type: str | None = None, conditional_response: bool | None = None, charset: str = ..., **kw: Any, ) -> None: ... @classmethod - def from_file(cls, fp: IO[str]) -> Response: ... + def from_file(cls, fp: IO[str] | IO[bytes]) -> Response: ... def copy(self) -> Response: ... - status_code: int - status_int: int - headerlist: _AsymmetricPropertyWithDelete[list[_HTTPHeader], Iterable[_HTTPHeader] | SupportsItems[str, str]] - headers: _AsymmetricProperty[ResponseHeaders, SupportsItems[str, str] | Iterable[tuple[str, str]]] - body: bytes - json: Any - json_body: Any + status_code: SymmetricProperty[int] + status_int: SymmetricProperty[int] + headerlist: AsymmetricPropertyWithDelete[list[tuple[str, str]], Iterable[tuple[str, str]] | SupportsItems[str, str]] + headers: AsymmetricProperty[ResponseHeaders, SupportsItems[str, str] | Iterable[tuple[str, str]]] + body: SymmetricPropertyWithDelete[bytes] + json: SymmetricPropertyWithDelete[Any] + json_body: SymmetricPropertyWithDelete[Any] @property def has_body(self) -> bool: ... - text: str - unicode_body: str # deprecated - ubody: str # deprecated - body_file: _AsymmetricPropertyWithDelete[ResponseBodyFile, SupportsRead[bytes]] - content_length: int | None - def write(self, text: str | bytes) -> None: ... - app_iter: Iterator[bytes] + text: SymmetricPropertyWithDelete[str] + unicode_body: SymmetricPropertyWithDelete[str] # deprecated + ubody: SymmetricPropertyWithDelete[str] # deprecated + body_file: AsymmetricPropertyWithDelete[ResponseBodyFile, SupportsRead[bytes]] + content_length: AsymmetricPropertyWithDelete[int | None, int | str | bytes | None] + def write(self, text: str | bytes) -> int: ... + app_iter: SymmetricPropertyWithDelete[Iterable[bytes]] allow: _ListProperty vary: _ListProperty - content_encoding: str | None - content_language: _ListProperty - content_location: str | None - content_md5: str | None - content_disposition: str | None - accept_ranges: str | None - content_range: _AsymmetricPropertyWithDelete[ContentRange | None, _ContentRangeParams] + content_encoding: SymmetricPropertyWithDelete[str | None] + content_language: SymmetricPropertyWithDelete[str | None] + content_location: SymmetricPropertyWithDelete[str | None] + content_md5: SymmetricPropertyWithDelete[str | None] + content_disposition: SymmetricPropertyWithDelete[str | None] + accept_ranges: SymmetricPropertyWithDelete[str | None] + content_range: AsymmetricPropertyWithDelete[ContentRange | None, _ContentRangeParams] date: _DateProperty expires: _DateProperty last_modified: _DateProperty - etag: _AsymmetricPropertyWithDelete[str | None, tuple[str, bool] | str | None] + etag: AsymmetricPropertyWithDelete[str | None, tuple[str, bool] | str | None] @property def etag_strong(self) -> str | None: ... - location: str | None - pragma: str | None - age: int | None + location: SymmetricPropertyWithDelete[str | None] + pragma: SymmetricPropertyWithDelete[str | None] + age: SymmetricPropertyWithDelete[int | None] retry_after: _DateProperty - server: str | None - www_authenticate: _AsymmetricPropertyWithDelete[ + server: SymmetricPropertyWithDelete[str | None] + www_authenticate: AsymmetricPropertyWithDelete[ _authorization | None, tuple[str, str | dict[str, str]] | list[Any] | str | None ] - charset: str | None - content_type: str | None - content_type_params: _AsymmetricPropertyWithDelete[dict[str, str], SupportsItems[str, str] | None] + charset: SymmetricPropertyWithDelete[str | None] + content_type: SymmetricPropertyWithDelete[str | None] + content_type_params: AsymmetricPropertyWithDelete[dict[str, str], SupportsItems[str, str] | None] def set_cookie( self, - name: str, - value: str | None = "", + name: str | bytes, + value: str | bytes | None = "", max_age: int | timedelta | None = None, path: str = "/", domain: str | None = None, @@ -134,11 +130,14 @@ class Response: overwrite: bool = False, samesite: _SameSitePolicy | None = None, ) -> None: ... - def delete_cookie(self, name: str, path: str = "/", domain: str | None = None) -> None: ... - def unset_cookie(self, name: str, strict: bool = True) -> None: ... - def merge_cookies(self, resp: Response | WSGIApplication) -> None: ... - cache_control: _AsymmetricProperty[_ResponseCacheControl, _ResponseCacheControl | _ResponseCacheControlDict | str | None] - cache_expires: _AsymmetricProperty[_ResponseCacheExpires, timedelta | int | bool | None] + def delete_cookie(self, name: str | bytes, path: str = "/", domain: str | None = None) -> None: ... + def unset_cookie(self, name: str | bytes, strict: bool = True) -> None: ... + @overload + def merge_cookies(self, resp: _ResponseT) -> _ResponseT: ... + @overload + def merge_cookies(self, resp: WSGIApplication) -> WSGIApplication: ... + cache_control: AsymmetricProperty[_ResponseCacheControl, _ResponseCacheControl | _ResponseCacheControlDict | str | None] + cache_expires: AsymmetricProperty[_ResponseCacheExpires, timedelta | int | bool | None] def encode_content(self, encoding: Literal["gzip", "identity"] = "gzip", lazy: bool = False) -> None: ... def decode_content(self) -> None: ... def md5_etag(self, body: bytes | None = None, set_content_md5: bool = False) -> None: ... @@ -154,8 +153,9 @@ class ResponseBodyFile: def __init__(self, response: Response) -> None: ... @property def encoding(self) -> str | None: ... + # NOTE: Technically this is an instance attribute and not a method def write(self, text: str | bytes) -> int: ... - def writelines(self, seq: Sequence[str | bytes]) -> int: ... + def writelines(self, seq: Sequence[str | bytes]) -> None: ... def flush(self) -> None: ... def tell(self) -> int: ... @@ -163,15 +163,15 @@ class AppIterRange: app_iter: Iterator[bytes] start: int stop: int | None - def __init__(self, app_iter: Iterator[bytes], start: int, stop: int | None) -> None: ... - def __iter__(self) -> Iterator[bytes]: ... + def __init__(self, app_iter: Iterable[bytes], start: int, stop: int | None) -> None: ... + def __iter__(self) -> Self: ... def next(self) -> bytes: ... __next__ = next def close(self) -> None: ... class EmptyResponse: - def __init__(self, app_iter: Iterator[bytes] | None = None) -> None: ... - def __iter__(self) -> Iterator[bytes]: ... - def __len__(self) -> int: ... + def __init__(self, app_iter: Iterable[bytes] | None = None) -> None: ... + def __iter__(self) -> Self: ... + def __len__(self) -> Literal[0]: ... def next(self) -> bytes: ... __next__ = next diff --git a/stubs/WebOb/webob/static.pyi b/stubs/WebOb/webob/static.pyi index 0f9e087dc822..c74664103a5b 100644 --- a/stubs/WebOb/webob/static.pyi +++ b/stubs/WebOb/webob/static.pyi @@ -1,4 +1,5 @@ -from _typeshed import StrOrBytesPath +from _typeshed import StrPath +from _typeshed.wsgi import WSGIApplication from collections.abc import Iterator from typing import IO, Any @@ -6,14 +7,16 @@ from webob.dec import wsgify from webob.request import Request from webob.response import Response +__all__ = ["FileApp", "DirectoryApp"] + BLOCK_SIZE: int class FileApp: - filename: StrOrBytesPath + filename: StrPath kw: dict[str, Any] - def __init__(self, filename: StrOrBytesPath, **kw: Any) -> None: ... + def __init__(self, filename: StrPath, **kw: Any) -> None: ... @wsgify - def __call__(self, req: Request) -> Response: ... + def __call__(self, req: Request) -> WSGIApplication: ... class FileIter: file: IO[bytes] @@ -24,14 +27,14 @@ class FileIter: __iter__ = app_iter_range class DirectoryApp: - path: str | bytes - index_page: str | None + path: StrPath + index_page: str hide_index_with_redirect: bool fileapp_kw: dict[str, Any] def __init__( - self, path: StrOrBytesPath, index_page: str = "index.html", hide_index_with_redirect: bool = False, **kw: Any + self, path: StrPath, index_page: str = "index.html", hide_index_with_redirect: bool = False, **kw: Any ) -> None: ... - def make_fileapp(self, path: StrOrBytesPath) -> FileApp: ... + def make_fileapp(self, path: StrPath) -> FileApp: ... @wsgify def __call__(self, req: Request) -> Response | FileApp: ... - def index(self, req: Request, path: StrOrBytesPath) -> Response | FileApp: ... + def index(self, req: Request, path: StrPath) -> Response | FileApp: ... diff --git a/stubs/WebOb/webob/util.pyi b/stubs/WebOb/webob/util.pyi index f0354c2417d5..a02d71666927 100644 --- a/stubs/WebOb/webob/util.pyi +++ b/stubs/WebOb/webob/util.pyi @@ -1,10 +1,7 @@ from collections.abc import Callable -from typing import AnyStr, Protocol +from typing import AnyStr -class _HasHTML(Protocol): - def __html__(self) -> str: ... - -def html_escape(s: str | bytes | _HasHTML) -> str: ... +def html_escape(s: object) -> str: ... def header_docstring(header: str, rfc_section: str) -> str: ... def warn_deprecation(text: str, version: str, stacklevel: int) -> None: ... From 5833893939fc4fb798b94946dde80e37541a4db9 Mon Sep 17 00:00:00 2001 From: Sabfo Date: Tue, 25 Feb 2025 20:40:38 +0200 Subject: [PATCH 003/388] PyInstaller: Complete types in build_main.pyi (#13528) Co-authored-by: sobolevn --- .../PyInstaller/building/build_main.pyi | 26 ++++++++++++++----- 1 file changed, 19 insertions(+), 7 deletions(-) diff --git a/stubs/pyinstaller/PyInstaller/building/build_main.pyi b/stubs/pyinstaller/PyInstaller/building/build_main.pyi index ebd3831beaa9..a5d93774a9f7 100644 --- a/stubs/pyinstaller/PyInstaller/building/build_main.pyi +++ b/stubs/pyinstaller/PyInstaller/building/build_main.pyi @@ -1,4 +1,4 @@ -from _typeshed import Incomplete, StrPath +from _typeshed import StrPath from collections.abc import Iterable from typing import Any, Literal @@ -23,21 +23,33 @@ class Analysis(Target): binaries: list[_TOCTuple] zipfiles: list[_TOCTuple] datas: list[_TOCTuple] + + inputs: list[str] + dependencies: list[_TOCTuple] + noarchive: bool + optimize: int + pathex: list[StrPath] + hiddenimports: list[str] + hookspath: list[tuple[StrPath, int]] + excludes: list[str] + custom_runtime_hooks: list[StrPath] + # https://pyinstaller.org/en/stable/hooks.html#hook-global-variables + module_collection_mode: dict[str, str] def __init__( self, scripts: Iterable[StrPath], - pathex: Incomplete | None = None, + pathex: Iterable[StrPath] | None = None, binaries: Iterable[tuple[StrPath, StrPath]] | None = None, datas: Iterable[tuple[StrPath, StrPath]] | None = None, - hiddenimports: Incomplete | None = None, - hookspath: Incomplete | None = None, + hiddenimports: Iterable[str] | None = None, + hookspath: Iterable[StrPath] | None = None, hooksconfig: dict[str, dict[str, Any]] | None = None, - excludes: Incomplete | None = None, - runtime_hooks: Incomplete | None = None, + excludes: Iterable[str] | None = None, + runtime_hooks: Iterable[StrPath] | None = None, cipher: _PyiBlockCipher = None, win_no_prefer_redirects: bool = False, win_private_assemblies: bool = False, noarchive: bool = False, - module_collection_mode: Incomplete | None = None, + module_collection_mode: dict[str, str] | None = None, optimize: Literal[-1, 0, 1, 2] | None = -1, ) -> None: ... From 95b8a48423f0a7070619f07d47d0911910158657 Mon Sep 17 00:00:00 2001 From: Lennart Behme <44500208+lbhm@users.noreply.github.com> Date: Wed, 26 Feb 2025 09:00:23 +0100 Subject: [PATCH 004/388] [hnswlib] Add ArrayLike annotations and raise Numpy dependency (#13538) --- stubs/hnswlib/METADATA.toml | 2 +- stubs/hnswlib/hnswlib.pyi | 14 ++++++++------ 2 files changed, 9 insertions(+), 7 deletions(-) diff --git a/stubs/hnswlib/METADATA.toml b/stubs/hnswlib/METADATA.toml index 67c290fb0e2c..ac9eaa390c4f 100644 --- a/stubs/hnswlib/METADATA.toml +++ b/stubs/hnswlib/METADATA.toml @@ -1,4 +1,4 @@ version = "0.8.*" # Requires a version of numpy with a `py.typed` file -requires = ["numpy>=1.20"] +requires = ["numpy>=1.21"] upstream_repository = "https://github.com/nmslib/hnswlib" diff --git a/stubs/hnswlib/hnswlib.pyi b/stubs/hnswlib/hnswlib.pyi index 5f6bc2cdf120..8123722c7d47 100644 --- a/stubs/hnswlib/hnswlib.pyi +++ b/stubs/hnswlib/hnswlib.pyi @@ -3,7 +3,7 @@ from collections.abc import Callable from typing import Any, Literal, overload import numpy as np -from numpy.typing import NDArray +from numpy.typing import ArrayLike, NDArray BFIndex: Incomplete @@ -17,16 +17,18 @@ class Index: def __init__(self, index: Index) -> None: ... @overload def __init__(self, space: Literal["l2", "ip", "cosine"], dim: int) -> None: ... - def add_items(self, data, ids: Incomplete | None = None, num_threads: int = -1, replace_deleted: bool = False) -> None: ... + def add_items( + self, data: ArrayLike, ids: ArrayLike | None = None, num_threads: int = -1, replace_deleted: bool = False + ) -> None: ... def get_current_count(self) -> int: ... def get_ids_list(self) -> list[int]: ... @overload - def get_items(self, ids: Incomplete | None = ..., return_type: Literal["list"] = ...) -> list[float]: ... + def get_items(self, ids: ArrayLike | None = ..., return_type: Literal["list"] = ...) -> list[float]: ... @overload - def get_items(self, ids: Incomplete | None = ..., return_type: Literal["numpy"] = ...) -> NDArray[np.float32]: ... + def get_items(self, ids: ArrayLike | None = ..., return_type: Literal["numpy"] = ...) -> NDArray[np.float32]: ... @overload def get_items( - self, ids: Incomplete | None = None, return_type: Literal["numpy", "list"] = "numpy" + self, ids: ArrayLike | None = None, return_type: Literal["numpy", "list"] = "numpy" ) -> NDArray[np.float32] | list[float]: ... def get_max_elements(self) -> int: ... def index_file_size(self) -> int: ... @@ -39,7 +41,7 @@ class Index: allow_replace_delete: bool = False, ) -> None: ... def knn_query( - self, data, k: int = 1, num_threads: int = -1, filter: Callable[[int], bool] | None = None + self, data: ArrayLike, k: int = 1, num_threads: int = -1, filter: Callable[[int], bool] | None = None ) -> tuple[NDArray[np.uint64], NDArray[np.float32]]: ... def load_index(self, path_to_index: str, max_elements: int = 0, allow_replace_delete: bool = False) -> None: ... def mark_deleted(self, label: int) -> None: ... From 0b39a952d84c6c78f85386fd86b536109994dae8 Mon Sep 17 00:00:00 2001 From: Victor Westerhuis Date: Wed, 26 Feb 2025 12:59:39 +0100 Subject: [PATCH 005/388] Relax functools.update_wrapper's parameter types (#13491) --- stdlib/functools.pyi | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/stdlib/functools.pyi b/stdlib/functools.pyi index 9957fa8f1634..10563e654b37 100644 --- a/stdlib/functools.pyi +++ b/stdlib/functools.pyi @@ -1,7 +1,7 @@ import sys import types from _typeshed import SupportsAllComparisons, SupportsItems -from collections.abc import Callable, Hashable, Iterable, Sequence, Sized +from collections.abc import Callable, Hashable, Iterable, Sized from typing import Any, Generic, Literal, NamedTuple, TypedDict, TypeVar, final, overload from typing_extensions import ParamSpec, Self, TypeAlias @@ -97,26 +97,26 @@ if sys.version_info >= (3, 12): def update_wrapper( wrapper: Callable[_PWrapper, _RWrapper], wrapped: Callable[_PWrapped, _RWrapped], - assigned: Sequence[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotations__", "__type_params__"), - updated: Sequence[str] = ("__dict__",), + assigned: Iterable[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotations__", "__type_params__"), + updated: Iterable[str] = ("__dict__",), ) -> _Wrapped[_PWrapped, _RWrapped, _PWrapper, _RWrapper]: ... def wraps( wrapped: Callable[_PWrapped, _RWrapped], - assigned: Sequence[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotations__", "__type_params__"), - updated: Sequence[str] = ("__dict__",), + assigned: Iterable[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotations__", "__type_params__"), + updated: Iterable[str] = ("__dict__",), ) -> _Wrapper[_PWrapped, _RWrapped]: ... else: def update_wrapper( wrapper: Callable[_PWrapper, _RWrapper], wrapped: Callable[_PWrapped, _RWrapped], - assigned: Sequence[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotations__"), - updated: Sequence[str] = ("__dict__",), + assigned: Iterable[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotations__"), + updated: Iterable[str] = ("__dict__",), ) -> _Wrapped[_PWrapped, _RWrapped, _PWrapper, _RWrapper]: ... def wraps( wrapped: Callable[_PWrapped, _RWrapped], - assigned: Sequence[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotations__"), - updated: Sequence[str] = ("__dict__",), + assigned: Iterable[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotations__"), + updated: Iterable[str] = ("__dict__",), ) -> _Wrapper[_PWrapped, _RWrapped]: ... def total_ordering(cls: type[_T]) -> type[_T]: ... From 09a10cea40e76c3d341e61510c616f93901c2133 Mon Sep 17 00:00:00 2001 From: Sebastian Rittau Date: Wed, 26 Feb 2025 14:04:04 +0100 Subject: [PATCH 006/388] Update an itertool recipe from Python docs (#13543) --- stdlib/@tests/test_cases/itertools/check_itertools_recipes.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/stdlib/@tests/test_cases/itertools/check_itertools_recipes.py b/stdlib/@tests/test_cases/itertools/check_itertools_recipes.py index c45ffee28cee..6cba7b37026e 100644 --- a/stdlib/@tests/test_cases/itertools/check_itertools_recipes.py +++ b/stdlib/@tests/test_cases/itertools/check_itertools_recipes.py @@ -284,6 +284,8 @@ def unique_justseen(iterable: Iterable[_T], key: Callable[[_T], bool] | None = N "List unique elements, preserving order. Remember only the element just seen." # unique_justseen('AAAABBBCCDAABBB') --> A B C D A B # unique_justseen('ABBcCAD', str.lower) --> A B c A D + if key is None: + return map(operator.itemgetter(0), groupby(iterable)) g: groupby[_T | bool, _T] = groupby(iterable, key) return map(next, map(operator.itemgetter(1), g)) From 47e5e5439c8c91f72dc038352b78747600e7d75c Mon Sep 17 00:00:00 2001 From: Daniel Darabos Date: Wed, 26 Feb 2025 15:03:13 +0100 Subject: [PATCH 007/388] Extract NetworkX types from docstrings (#13458) --- .../networkx/networkx/algorithms/__init__.pyi | 3 + .../algorithms/approximation/clique.pyi | 9 +- .../approximation/clustering_coefficient.pyi | 6 +- .../algorithms/approximation/connectivity.pyi | 8 +- .../approximation/distance_measures.pyi | 6 +- .../approximation/dominating_set.pyi | 7 +- .../algorithms/approximation/kcomponents.pyi | 3 +- .../algorithms/approximation/matching.pyi | 3 +- .../algorithms/approximation/maxcut.pyi | 10 +- .../algorithms/approximation/ramsey.pyi | 3 +- .../algorithms/approximation/steinertree.pyi | 6 +- .../approximation/traveling_salesman.pyi | 56 ++++--- .../algorithms/approximation/treewidth.pyi | 6 +- .../algorithms/approximation/vertex_cover.pyi | 5 +- .../algorithms/assortativity/connectivity.pyi | 4 +- .../algorithms/assortativity/correlation.pyi | 10 +- .../algorithms/assortativity/mixing.pyi | 24 ++- .../assortativity/neighbor_degree.pyi | 8 +- .../algorithms/assortativity/pairs.pyi | 9 +- .../networkx/algorithms/asteroidal.pyi | 5 +- .../algorithms/bipartite/__init__.pyi | 1 + .../networkx/algorithms/bipartite/basic.pyi | 14 +- .../algorithms/bipartite/centrality.pyi | 7 +- .../networkx/algorithms/bipartite/cluster.pyi | 8 +- .../algorithms/bipartite/covering.pyi | 4 +- .../algorithms/bipartite/edgelist.pyi | 25 +-- .../algorithms/bipartite/extendability.pyi | 5 + .../algorithms/bipartite/generators.pyi | 30 +++- .../algorithms/bipartite/matching.pyi | 16 +- .../networkx/algorithms/bipartite/matrix.pyi | 16 +- .../algorithms/bipartite/projection.pyi | 14 +- .../algorithms/bipartite/redundancy.pyi | 4 +- .../algorithms/bipartite/spectral.pyi | 5 +- .../networkx/networkx/algorithms/boundary.pyi | 111 +++++++------ .../networkx/networkx/algorithms/bridges.pyi | 11 +- .../algorithms/centrality/betweenness.pyi | 12 +- .../centrality/betweenness_subset.pyi | 12 +- .../algorithms/centrality/closeness.pyi | 14 +- .../centrality/current_flow_betweenness.pyi | 18 +-- .../current_flow_betweenness_subset.pyi | 19 ++- .../centrality/current_flow_closeness.pyi | 5 +- .../algorithms/centrality/eigenvector.pyi | 13 +- .../algorithms/centrality/flow_matrix.pyi | 4 + .../networkx/algorithms/centrality/group.pyi | 28 ++-- .../algorithms/centrality/harmonic.pyi | 4 +- .../networkx/algorithms/centrality/katz.pyi | 25 +-- .../algorithms/centrality/laplacian.pyi | 10 +- .../networkx/algorithms/centrality/load.pyi | 7 +- .../algorithms/centrality/percolation.pyi | 8 +- .../algorithms/centrality/reaching.pyi | 12 +- .../algorithms/centrality/second_order.pyi | 3 +- .../algorithms/centrality/subgraph_alg.pyi | 9 +- .../algorithms/centrality/trophic.pyi | 8 +- .../algorithms/centrality/voterank_alg.pyi | 5 +- .../networkx/networkx/algorithms/chordal.pyi | 6 +- stubs/networkx/networkx/algorithms/clique.pyi | 23 ++- .../networkx/networkx/algorithms/cluster.pyi | 17 +- .../coloring/equitable_coloring.pyi | 3 +- .../algorithms/coloring/greedy_coloring.pyi | 22 +-- .../algorithms/communicability_alg.pyi | 5 +- .../algorithms/community/asyn_fluid.pyi | 6 +- .../algorithms/community/centrality.pyi | 7 +- .../algorithms/community/community_utils.pyi | 3 +- .../algorithms/community/divisive.pyi | 7 +- .../networkx/algorithms/community/kclique.pyi | 3 +- .../algorithms/community/kernighan_lin.pyi | 8 +- .../community/label_propagation.pyi | 6 +- .../networkx/algorithms/community/louvain.pyi | 15 +- .../networkx/algorithms/community/lukes.pyi | 5 +- .../algorithms/community/modularity_max.pyi | 7 +- .../networkx/algorithms/community/quality.pyi | 5 +- .../algorithms/components/biconnected.pyi | 9 +- .../algorithms/components/connected.pyi | 9 +- .../algorithms/components/semiconnected.pyi | 5 +- .../components/strongly_connected.pyi | 10 +- .../components/weakly_connected.pyi | 6 +- .../algorithms/connectivity/connectivity.pyi | 52 ++++--- .../networkx/algorithms/connectivity/cuts.pyi | 26 +++- .../connectivity/disjoint_paths.pyi | 33 ++-- .../connectivity/edge_augmentation.pyi | 9 +- .../connectivity/edge_kcomponents.pyi | 12 +- .../algorithms/connectivity/kcomponents.pyi | 5 +- .../algorithms/connectivity/kcutsets.pyi | 8 +- .../algorithms/connectivity/stoerwagner.pyi | 3 +- stubs/networkx/networkx/algorithms/core.pyi | 17 +- .../networkx/networkx/algorithms/covering.pyi | 6 +- stubs/networkx/networkx/algorithms/cuts.pyi | 19 +-- stubs/networkx/networkx/algorithms/cycles.pyi | 15 +- .../networkx/algorithms/d_separation.pyi | 4 +- stubs/networkx/networkx/algorithms/dag.pyi | 23 +-- .../networkx/algorithms/distance_measures.pyi | 17 +- .../networkx/algorithms/distance_regular.pyi | 7 +- .../networkx/algorithms/dominance.pyi | 5 +- .../networkx/algorithms/dominating.pyi | 6 +- .../algorithms/efficiency_measures.pyi | 3 +- stubs/networkx/networkx/algorithms/euler.pyi | 13 +- .../algorithms/flow/boykovkolmogorov.pyi | 13 +- .../algorithms/flow/capacityscaling.pyi | 5 +- .../networkx/algorithms/flow/dinitz_alg.pyi | 13 +- .../networkx/algorithms/flow/edmondskarp.pyi | 13 +- .../networkx/algorithms/flow/gomory_hu.pyi | 5 +- .../networkx/algorithms/flow/maxflow.pyi | 39 ++++- .../networkx/algorithms/flow/mincost.pyi | 11 +- .../algorithms/flow/networksimplex.pyi | 4 +- .../networkx/algorithms/flow/preflowpush.pyi | 11 +- .../flow/shortestaugmentingpath.pyi | 13 +- .../networkx/algorithms/flow/utils.pyi | 1 + .../networkx/algorithms/graph_hashing.pyi | 16 +- .../networkx/algorithms/graphical.pyi | 15 +- .../networkx/algorithms/hierarchy.pyi | 4 +- stubs/networkx/networkx/algorithms/hybrid.pyi | 5 +- .../networkx/networkx/algorithms/isolate.pyi | 7 +- .../algorithms/isomorphism/ismags.pyi | 1 + .../algorithms/isomorphism/isomorph.pyi | 15 +- .../algorithms/isomorphism/isomorphvf2.pyi | 5 + .../isomorphism/temporalisomorphvf2.pyi | 2 + .../isomorphism/tree_isomorphism.pyi | 3 +- .../networkx/algorithms/isomorphism/vf2pp.pyi | 9 +- .../algorithms/isomorphism/vf2userfunc.pyi | 2 + .../algorithms/link_analysis/hits_alg.pyi | 11 +- .../algorithms/link_analysis/pagerank_alg.pyi | 30 ++-- .../networkx/algorithms/link_prediction.pyi | 19 ++- .../algorithms/lowest_common_ancestors.pyi | 8 +- .../networkx/networkx/algorithms/matching.pyi | 13 +- .../algorithms/minors/contraction.pyi | 17 +- stubs/networkx/networkx/algorithms/mis.pyi | 7 +- stubs/networkx/networkx/algorithms/moral.pyi | 3 +- .../algorithms/node_classification.pyi | 5 +- .../networkx/algorithms/non_randomness.pyi | 5 +- .../networkx/algorithms/operators/all.pyi | 11 +- .../networkx/algorithms/operators/binary.pyi | 18 +-- .../networkx/algorithms/operators/product.pyi | 21 ++- .../networkx/algorithms/operators/unary.pyi | 4 +- .../networkx/algorithms/planarity.pyi | 9 +- .../networkx/algorithms/polynomials.pyi | 5 +- .../networkx/algorithms/reciprocity.pyi | 7 +- .../networkx/networkx/algorithms/regular.pyi | 7 +- .../networkx/networkx/algorithms/richclub.pyi | 6 +- .../algorithms/shortest_paths/astar.pyi | 25 ++- .../algorithms/shortest_paths/dense.pyi | 12 +- .../algorithms/shortest_paths/generic.pyi | 44 ++++-- .../algorithms/shortest_paths/unweighted.pyi | 17 +- .../algorithms/shortest_paths/weighted.pyi | 146 ++++++++++++++---- .../networkx/algorithms/similarity.pyi | 124 ++++++++------- .../networkx/algorithms/simple_paths.pyi | 19 ++- .../networkx/algorithms/smallworld.pyi | 12 +- .../networkx/networkx/algorithms/smetric.pyi | 3 +- .../networkx/algorithms/sparsifiers.pyi | 6 +- .../networkx/algorithms/structuralholes.pyi | 8 +- .../networkx/algorithms/summarization.pyi | 8 +- stubs/networkx/networkx/algorithms/swap.pyi | 13 +- .../networkx/algorithms/threshold.pyi | 7 +- .../networkx/algorithms/time_dependent.pyi | 5 + .../networkx/algorithms/tournament.pyi | 16 +- .../algorithms/traversal/beamsearch.pyi | 7 +- .../traversal/breadth_first_search.pyi | 23 ++- .../traversal/depth_first_search.pyi | 28 +++- .../networkx/algorithms/traversal/edgebfs.pyi | 5 +- .../networkx/algorithms/traversal/edgedfs.pyi | 5 +- .../networkx/algorithms/tree/branchings.pyi | 19 ++- .../networkx/algorithms/tree/coding.pyi | 12 +- .../networkx/networkx/algorithms/tree/mst.pyi | 28 +++- .../networkx/algorithms/tree/operations.pyi | 3 +- .../networkx/algorithms/tree/recognition.pyi | 10 +- stubs/networkx/networkx/algorithms/triads.pyi | 19 ++- .../networkx/networkx/algorithms/vitality.pyi | 3 +- .../networkx/networkx/algorithms/voronoi.pyi | 11 +- stubs/networkx/networkx/algorithms/walks.pyi | 5 + stubs/networkx/networkx/algorithms/wiener.pyi | 5 +- .../networkx/networkx/generators/__init__.pyi | 2 + .../networkx/generators/time_series.pyi | 4 + 171 files changed, 1378 insertions(+), 842 deletions(-) create mode 100644 stubs/networkx/networkx/algorithms/bipartite/extendability.pyi create mode 100644 stubs/networkx/networkx/algorithms/time_dependent.pyi create mode 100644 stubs/networkx/networkx/algorithms/walks.pyi create mode 100644 stubs/networkx/networkx/generators/time_series.pyi diff --git a/stubs/networkx/networkx/algorithms/__init__.pyi b/stubs/networkx/networkx/algorithms/__init__.pyi index 8d47145126d6..57141f108e16 100644 --- a/stubs/networkx/networkx/algorithms/__init__.pyi +++ b/stubs/networkx/networkx/algorithms/__init__.pyi @@ -30,6 +30,7 @@ from networkx.algorithms.bipartite import ( ) from networkx.algorithms.boundary import * from networkx.algorithms.bridges import * +from networkx.algorithms.broadcasting import * from networkx.algorithms.centrality import * from networkx.algorithms.chains import * from networkx.algorithms.chordal import * @@ -116,6 +117,7 @@ from networkx.algorithms.sparsifiers import * from networkx.algorithms.structuralholes import * from networkx.algorithms.summarization import * from networkx.algorithms.swap import * +from networkx.algorithms.time_dependent import * from networkx.algorithms.traversal import * from networkx.algorithms.tree.branchings import ( ArborescenceIterator as ArborescenceIterator, @@ -132,4 +134,5 @@ from networkx.algorithms.tree.recognition import * from networkx.algorithms.triads import * from networkx.algorithms.vitality import * from networkx.algorithms.voronoi import * +from networkx.algorithms.walks import * from networkx.algorithms.wiener import * diff --git a/stubs/networkx/networkx/algorithms/approximation/clique.pyi b/stubs/networkx/networkx/algorithms/approximation/clique.pyi index 886ec93f7c02..99c86b3a1c87 100644 --- a/stubs/networkx/networkx/algorithms/approximation/clique.pyi +++ b/stubs/networkx/networkx/algorithms/approximation/clique.pyi @@ -1,10 +1,11 @@ +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable -def maximum_independent_set(G): ... +def maximum_independent_set(G: Graph[_Node]): ... @_dispatchable -def max_clique(G): ... +def max_clique(G: Graph[_Node]): ... @_dispatchable -def clique_removal(G): ... +def clique_removal(G: Graph[_Node]): ... @_dispatchable -def large_clique_size(G): ... +def large_clique_size(G: Graph[_Node]): ... diff --git a/stubs/networkx/networkx/algorithms/approximation/clustering_coefficient.pyi b/stubs/networkx/networkx/algorithms/approximation/clustering_coefficient.pyi index b18a813d1192..b02ea8144d50 100644 --- a/stubs/networkx/networkx/algorithms/approximation/clustering_coefficient.pyi +++ b/stubs/networkx/networkx/algorithms/approximation/clustering_coefficient.pyi @@ -1,6 +1,6 @@ -from _typeshed import Incomplete - +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +from numpy.random import RandomState @_dispatchable -def average_clustering(G, trials: int = 1000, seed: Incomplete | None = None): ... +def average_clustering(G: Graph[_Node], trials: int = 1000, seed: int | RandomState | None = None): ... diff --git a/stubs/networkx/networkx/algorithms/approximation/connectivity.pyi b/stubs/networkx/networkx/algorithms/approximation/connectivity.pyi index 8a431a9acb00..b20466e208e5 100644 --- a/stubs/networkx/networkx/algorithms/approximation/connectivity.pyi +++ b/stubs/networkx/networkx/algorithms/approximation/connectivity.pyi @@ -1,10 +1,12 @@ from _typeshed import Incomplete +from collections.abc import Iterable +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable -def local_node_connectivity(G, source, target, cutoff: Incomplete | None = None): ... +def local_node_connectivity(G: Graph[_Node], source: _Node, target: _Node, cutoff: int | None = None): ... @_dispatchable -def node_connectivity(G, s: Incomplete | None = None, t: Incomplete | None = None): ... +def node_connectivity(G: Graph[_Node], s: _Node | None = None, t: _Node | None = None): ... @_dispatchable -def all_pairs_node_connectivity(G, nbunch: Incomplete | None = None, cutoff: Incomplete | None = None): ... +def all_pairs_node_connectivity(G: Graph[_Node], nbunch: Iterable[Incomplete] | None = None, cutoff: int | None = None): ... diff --git a/stubs/networkx/networkx/algorithms/approximation/distance_measures.pyi b/stubs/networkx/networkx/algorithms/approximation/distance_measures.pyi index 75b45b52003e..08662306c401 100644 --- a/stubs/networkx/networkx/algorithms/approximation/distance_measures.pyi +++ b/stubs/networkx/networkx/algorithms/approximation/distance_measures.pyi @@ -1,6 +1,6 @@ -from _typeshed import Incomplete - +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +from numpy.random import RandomState @_dispatchable -def diameter(G, seed: Incomplete | None = None): ... +def diameter(G: Graph[_Node], seed: int | RandomState | None = None): ... diff --git a/stubs/networkx/networkx/algorithms/approximation/dominating_set.pyi b/stubs/networkx/networkx/algorithms/approximation/dominating_set.pyi index b08dc451db96..0eda30759ea1 100644 --- a/stubs/networkx/networkx/algorithms/approximation/dominating_set.pyi +++ b/stubs/networkx/networkx/algorithms/approximation/dominating_set.pyi @@ -1,8 +1,7 @@ -from _typeshed import Incomplete - +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable -def min_weighted_dominating_set(G, weight: Incomplete | None = None): ... +def min_weighted_dominating_set(G: Graph[_Node], weight: str | None = None): ... @_dispatchable -def min_edge_dominating_set(G): ... +def min_edge_dominating_set(G: Graph[_Node]): ... diff --git a/stubs/networkx/networkx/algorithms/approximation/kcomponents.pyi b/stubs/networkx/networkx/algorithms/approximation/kcomponents.pyi index 46d89ff537d3..27bdc9efc106 100644 --- a/stubs/networkx/networkx/algorithms/approximation/kcomponents.pyi +++ b/stubs/networkx/networkx/algorithms/approximation/kcomponents.pyi @@ -1,4 +1,5 @@ +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable -def k_components(G, min_density: float = 0.95): ... +def k_components(G: Graph[_Node], min_density: float = 0.95): ... diff --git a/stubs/networkx/networkx/algorithms/approximation/matching.pyi b/stubs/networkx/networkx/algorithms/approximation/matching.pyi index 310ef31b91bf..798b8f0ac6be 100644 --- a/stubs/networkx/networkx/algorithms/approximation/matching.pyi +++ b/stubs/networkx/networkx/algorithms/approximation/matching.pyi @@ -1,4 +1,5 @@ +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable -def min_maximal_matching(G): ... +def min_maximal_matching(G: Graph[_Node]): ... diff --git a/stubs/networkx/networkx/algorithms/approximation/maxcut.pyi b/stubs/networkx/networkx/algorithms/approximation/maxcut.pyi index 760c114b4a41..e84b601c8f21 100644 --- a/stubs/networkx/networkx/algorithms/approximation/maxcut.pyi +++ b/stubs/networkx/networkx/algorithms/approximation/maxcut.pyi @@ -1,8 +1,14 @@ from _typeshed import Incomplete +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +from numpy.random import RandomState @_dispatchable -def randomized_partitioning(G, seed: Incomplete | None = None, p: float = 0.5, weight: Incomplete | None = None): ... +def randomized_partitioning( + G: Graph[_Node], seed: int | RandomState | None = None, p: float = 0.5, weight: str | None = None +): ... @_dispatchable -def one_exchange(G, initial_cut: Incomplete | None = None, seed: Incomplete | None = None, weight: Incomplete | None = None): ... +def one_exchange( + G: Graph[_Node], initial_cut: set[Incomplete] | None = None, seed: int | RandomState | None = None, weight: str | None = None +): ... diff --git a/stubs/networkx/networkx/algorithms/approximation/ramsey.pyi b/stubs/networkx/networkx/algorithms/approximation/ramsey.pyi index 5b85df65d7bb..1769ec99285b 100644 --- a/stubs/networkx/networkx/algorithms/approximation/ramsey.pyi +++ b/stubs/networkx/networkx/algorithms/approximation/ramsey.pyi @@ -1,4 +1,5 @@ +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable -def ramsey_R2(G): ... +def ramsey_R2(G: Graph[_Node]): ... diff --git a/stubs/networkx/networkx/algorithms/approximation/steinertree.pyi b/stubs/networkx/networkx/algorithms/approximation/steinertree.pyi index 03d0f6742be6..f75bd9023db9 100644 --- a/stubs/networkx/networkx/algorithms/approximation/steinertree.pyi +++ b/stubs/networkx/networkx/algorithms/approximation/steinertree.pyi @@ -1,8 +1,10 @@ from _typeshed import Incomplete +from collections.abc import Iterable +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable -def metric_closure(G, weight: str = "weight"): ... +def metric_closure(G: Graph[_Node], weight="weight"): ... @_dispatchable -def steiner_tree(G, terminal_nodes, weight: str = "weight", method: Incomplete | None = None): ... +def steiner_tree(G: Graph[_Node], terminal_nodes: Iterable[Incomplete], weight: str = "weight", method: str | None = None): ... diff --git a/stubs/networkx/networkx/algorithms/approximation/traveling_salesman.pyi b/stubs/networkx/networkx/algorithms/approximation/traveling_salesman.pyi index e44e31519d14..bccba05d805d 100644 --- a/stubs/networkx/networkx/algorithms/approximation/traveling_salesman.pyi +++ b/stubs/networkx/networkx/algorithms/approximation/traveling_salesman.pyi @@ -1,41 +1,51 @@ from _typeshed import Incomplete +from collections.abc import Callable +from networkx.classes.digraph import DiGraph +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +from numpy.random import RandomState @_dispatchable -def christofides(G, weight: str = "weight", tree: Incomplete | None = None): ... +def christofides(G: Graph[_Node], weight: str | None = "weight", tree: Graph[_Node] | None = None): ... @_dispatchable def traveling_salesman_problem( - G, weight: str = "weight", nodes: Incomplete | None = None, cycle: bool = True, method: Incomplete | None = None, **kwargs + G: Graph[_Node], + weight: str = "weight", + nodes=None, + cycle: bool = True, + method: Callable[..., Incomplete] | None = None, + **kwargs, ): ... @_dispatchable -def asadpour_atsp(G, weight: str = "weight", seed: Incomplete | None = None, source: Incomplete | None = None): ... +def asadpour_atsp( + G: DiGraph[_Node], weight: str | None = "weight", seed: int | RandomState | None = None, source: str | None = None +): ... @_dispatchable -def greedy_tsp(G, weight: str = "weight", source: Incomplete | None = None): ... +def greedy_tsp(G: Graph[_Node], weight: str | None = "weight", source=None): ... @_dispatchable def simulated_annealing_tsp( - G, + G: Graph[_Node], init_cycle, - weight: str = "weight", - source: Incomplete | None = None, - # docstring says int, but it can be a float and does become a float mid-equation if alpha is also a float - temp: float = 100, - move: str = "1-1", - max_iterations: int = 10, - N_inner: int = 100, - alpha: float = 0.01, - seed: Incomplete | None = None, + weight: str | None = "weight", + source=None, + temp: int | None = 100, + move="1-1", + max_iterations: int | None = 10, + N_inner: int | None = 100, + alpha=0.01, + seed: int | RandomState | None = None, ): ... @_dispatchable def threshold_accepting_tsp( - G, + G: Graph[_Node], init_cycle, - weight: str = "weight", - source: Incomplete | None = None, - threshold: float = 1, - move: str = "1-1", - max_iterations: int = 10, - N_inner: int = 100, - alpha: float = 0.1, - seed: Incomplete | None = None, + weight: str | None = "weight", + source=None, + threshold: int | None = 1, + move="1-1", + max_iterations: int | None = 10, + N_inner: int | None = 100, + alpha=0.1, + seed: int | RandomState | None = None, ): ... diff --git a/stubs/networkx/networkx/algorithms/approximation/treewidth.pyi b/stubs/networkx/networkx/algorithms/approximation/treewidth.pyi index 5a68247f5b49..0b970f4c5cc1 100644 --- a/stubs/networkx/networkx/algorithms/approximation/treewidth.pyi +++ b/stubs/networkx/networkx/algorithms/approximation/treewidth.pyi @@ -1,15 +1,17 @@ from _typeshed import Incomplete +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable __all__ = ["treewidth_min_degree", "treewidth_min_fill_in"] @_dispatchable -def treewidth_min_degree(G): ... +def treewidth_min_degree(G: Graph[_Node]): ... @_dispatchable -def treewidth_min_fill_in(G): ... +def treewidth_min_fill_in(G: Graph[_Node]): ... class MinDegreeHeuristic: count: Incomplete + def __init__(self, graph) -> None: ... def best_node(self, graph): ... diff --git a/stubs/networkx/networkx/algorithms/approximation/vertex_cover.pyi b/stubs/networkx/networkx/algorithms/approximation/vertex_cover.pyi index 22c5787bf983..26d3fb82b1bb 100644 --- a/stubs/networkx/networkx/algorithms/approximation/vertex_cover.pyi +++ b/stubs/networkx/networkx/algorithms/approximation/vertex_cover.pyi @@ -1,6 +1,5 @@ -from _typeshed import Incomplete - +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable -def min_weighted_vertex_cover(G, weight: Incomplete | None = None): ... +def min_weighted_vertex_cover(G: Graph[_Node], weight: str | None = None): ... diff --git a/stubs/networkx/networkx/algorithms/assortativity/connectivity.pyi b/stubs/networkx/networkx/algorithms/assortativity/connectivity.pyi index 36c7f64e46e2..3c0c202c56e8 100644 --- a/stubs/networkx/networkx/algorithms/assortativity/connectivity.pyi +++ b/stubs/networkx/networkx/algorithms/assortativity/connectivity.pyi @@ -1,8 +1,10 @@ from _typeshed import Incomplete +from collections.abc import Iterable +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable def average_degree_connectivity( - G, source: str = "in+out", target: str = "in+out", nodes: Incomplete | None = None, weight: Incomplete | None = None + G: Graph[_Node], source="in+out", target="in+out", nodes: Iterable[Incomplete] | None = None, weight: str | None = None ): ... diff --git a/stubs/networkx/networkx/algorithms/assortativity/correlation.pyi b/stubs/networkx/networkx/algorithms/assortativity/correlation.pyi index dec6b0595556..9fe4866db3d8 100644 --- a/stubs/networkx/networkx/algorithms/assortativity/correlation.pyi +++ b/stubs/networkx/networkx/algorithms/assortativity/correlation.pyi @@ -1,16 +1,18 @@ from _typeshed import Incomplete +from collections.abc import Iterable +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable def degree_assortativity_coefficient( - G, x: str = "out", y: str = "in", weight: Incomplete | None = None, nodes: Incomplete | None = None + G: Graph[_Node], x: str = "out", y: str = "in", weight: str | None = None, nodes: Iterable[Incomplete] | None = None ): ... @_dispatchable def degree_pearson_correlation_coefficient( - G, x: str = "out", y: str = "in", weight: Incomplete | None = None, nodes: Incomplete | None = None + G: Graph[_Node], x: str = "out", y: str = "in", weight: str | None = None, nodes: Iterable[Incomplete] | None = None ): ... @_dispatchable -def attribute_assortativity_coefficient(G, attribute, nodes: Incomplete | None = None): ... +def attribute_assortativity_coefficient(G: Graph[_Node], attribute: str, nodes: Iterable[Incomplete] | None = None): ... @_dispatchable -def numeric_assortativity_coefficient(G, attribute, nodes: Incomplete | None = None): ... +def numeric_assortativity_coefficient(G: Graph[_Node], attribute: str, nodes: Iterable[Incomplete] | None = None): ... diff --git a/stubs/networkx/networkx/algorithms/assortativity/mixing.pyi b/stubs/networkx/networkx/algorithms/assortativity/mixing.pyi index 06e19e1f68d5..c80fdae4eadb 100644 --- a/stubs/networkx/networkx/algorithms/assortativity/mixing.pyi +++ b/stubs/networkx/networkx/algorithms/assortativity/mixing.pyi @@ -1,26 +1,34 @@ -from _typeshed import Incomplete +from _typeshed import Incomplete, SupportsGetItem +from collections.abc import Iterable +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable -def attribute_mixing_dict(G, attribute, nodes: Incomplete | None = None, normalized: bool = False): ... +def attribute_mixing_dict( + G: Graph[_Node], attribute: str, nodes: Iterable[Incomplete] | None = None, normalized: bool = False +): ... @_dispatchable def attribute_mixing_matrix( - G, attribute, nodes: Incomplete | None = None, mapping: Incomplete | None = None, normalized: bool = True + G: Graph[_Node], + attribute: str, + nodes: Iterable[Incomplete] | None = None, + mapping: SupportsGetItem[Incomplete, Incomplete] | None = None, + normalized: bool = True, ): ... @_dispatchable def degree_mixing_dict( - G, x: str = "out", y: str = "in", weight: Incomplete | None = None, nodes: Incomplete | None = None, normalized: bool = False + G: Graph[_Node], x: str = "out", y: str = "in", weight: str | None = None, nodes=None, normalized: bool = False ): ... @_dispatchable def degree_mixing_matrix( - G, + G: Graph[_Node], x: str = "out", y: str = "in", - weight: Incomplete | None = None, - nodes: Incomplete | None = None, + weight: str | None = None, + nodes: Iterable[Incomplete] | None = None, normalized: bool = True, - mapping: Incomplete | None = None, + mapping: SupportsGetItem[Incomplete, Incomplete] | None = None, ): ... @_dispatchable def mixing_dict(xy, normalized: bool = False): ... diff --git a/stubs/networkx/networkx/algorithms/assortativity/neighbor_degree.pyi b/stubs/networkx/networkx/algorithms/assortativity/neighbor_degree.pyi index 6cc1b1322d2a..042777067212 100644 --- a/stubs/networkx/networkx/algorithms/assortativity/neighbor_degree.pyi +++ b/stubs/networkx/networkx/algorithms/assortativity/neighbor_degree.pyi @@ -1,8 +1,14 @@ from _typeshed import Incomplete +from collections.abc import Iterable +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable def average_neighbor_degree( - G, source: str = "out", target: str = "out", nodes: Incomplete | None = None, weight: Incomplete | None = None + G: Graph[_Node], + source: str | None = "out", + target: str | None = "out", + nodes: Iterable[Incomplete] | None = None, + weight: str | None = None, ): ... diff --git a/stubs/networkx/networkx/algorithms/assortativity/pairs.pyi b/stubs/networkx/networkx/algorithms/assortativity/pairs.pyi index 569d52653ee5..4e9fb3c7516b 100644 --- a/stubs/networkx/networkx/algorithms/assortativity/pairs.pyi +++ b/stubs/networkx/networkx/algorithms/assortativity/pairs.pyi @@ -1,11 +1,14 @@ from _typeshed import Incomplete -from collections.abc import Generator +from collections.abc import Generator, Iterable +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable -def node_attribute_xy(G, attribute, nodes: Incomplete | None = None) -> Generator[Incomplete, None, None]: ... +def node_attribute_xy( + G: Graph[_Node], attribute, nodes: Iterable[Incomplete] | None = None +) -> Generator[Incomplete, None, None]: ... @_dispatchable def node_degree_xy( - G, x: str = "out", y: str = "in", weight: Incomplete | None = None, nodes: Incomplete | None = None + G: Graph[_Node], x: str = "out", y: str = "in", weight: str | None = None, nodes: Iterable[Incomplete] | None = None ) -> Generator[Incomplete, None, None]: ... diff --git a/stubs/networkx/networkx/algorithms/asteroidal.pyi b/stubs/networkx/networkx/algorithms/asteroidal.pyi index 4d12ecadaecd..21fdc6879686 100644 --- a/stubs/networkx/networkx/algorithms/asteroidal.pyi +++ b/stubs/networkx/networkx/algorithms/asteroidal.pyi @@ -1,6 +1,7 @@ +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable -def find_asteroidal_triple(G): ... +def find_asteroidal_triple(G: Graph[_Node]): ... @_dispatchable -def is_at_free(G): ... +def is_at_free(G: Graph[_Node]): ... diff --git a/stubs/networkx/networkx/algorithms/bipartite/__init__.pyi b/stubs/networkx/networkx/algorithms/bipartite/__init__.pyi index 1f279a092959..0df90e4f1d7d 100644 --- a/stubs/networkx/networkx/algorithms/bipartite/__init__.pyi +++ b/stubs/networkx/networkx/algorithms/bipartite/__init__.pyi @@ -3,6 +3,7 @@ from networkx.algorithms.bipartite.centrality import * from networkx.algorithms.bipartite.cluster import * from networkx.algorithms.bipartite.covering import * from networkx.algorithms.bipartite.edgelist import * +from networkx.algorithms.bipartite.extendability import * from networkx.algorithms.bipartite.generators import * from networkx.algorithms.bipartite.matching import * from networkx.algorithms.bipartite.matrix import * diff --git a/stubs/networkx/networkx/algorithms/bipartite/basic.pyi b/stubs/networkx/networkx/algorithms/bipartite/basic.pyi index f7fd955bb524..c8d80a1b642d 100644 --- a/stubs/networkx/networkx/algorithms/bipartite/basic.pyi +++ b/stubs/networkx/networkx/algorithms/bipartite/basic.pyi @@ -1,16 +1,18 @@ from _typeshed import Incomplete +from collections.abc import Iterable +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable -def color(G): ... +def color(G: Graph[_Node]): ... @_dispatchable -def is_bipartite(G): ... +def is_bipartite(G: Graph[_Node]): ... @_dispatchable -def is_bipartite_node_set(G, nodes): ... +def is_bipartite_node_set(G: Graph[_Node], nodes): ... @_dispatchable -def sets(G, top_nodes: Incomplete | None = None): ... +def sets(G: Graph[_Node], top_nodes: Iterable[Incomplete] | None = None): ... @_dispatchable -def density(B, nodes): ... +def density(B: Graph[_Node], nodes): ... @_dispatchable -def degrees(B, nodes, weight: Incomplete | None = None): ... +def degrees(B: Graph[_Node], nodes, weight: str | None = None): ... diff --git a/stubs/networkx/networkx/algorithms/bipartite/centrality.pyi b/stubs/networkx/networkx/algorithms/bipartite/centrality.pyi index 9d011ac139c2..fdd6212831f1 100644 --- a/stubs/networkx/networkx/algorithms/bipartite/centrality.pyi +++ b/stubs/networkx/networkx/algorithms/bipartite/centrality.pyi @@ -1,8 +1,9 @@ +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable -def degree_centrality(G, nodes): ... +def degree_centrality(G: Graph[_Node], nodes): ... @_dispatchable -def betweenness_centrality(G, nodes): ... +def betweenness_centrality(G: Graph[_Node], nodes): ... @_dispatchable -def closeness_centrality(G, nodes, normalized: bool = True): ... +def closeness_centrality(G: Graph[_Node], nodes, normalized: bool | None = True): ... diff --git a/stubs/networkx/networkx/algorithms/bipartite/cluster.pyi b/stubs/networkx/networkx/algorithms/bipartite/cluster.pyi index e6d15445c985..7fed50d545d0 100644 --- a/stubs/networkx/networkx/algorithms/bipartite/cluster.pyi +++ b/stubs/networkx/networkx/algorithms/bipartite/cluster.pyi @@ -1,13 +1,15 @@ from _typeshed import Incomplete +from collections.abc import Iterable +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable -def latapy_clustering(G, nodes: Incomplete | None = None, mode: str = "dot"): ... +def latapy_clustering(G: Graph[_Node], nodes: Iterable[Incomplete] | None = None, mode: str = "dot"): ... clustering = latapy_clustering @_dispatchable -def average_clustering(G, nodes: Incomplete | None = None, mode: str = "dot"): ... +def average_clustering(G: Graph[_Node], nodes: Iterable[Incomplete] | None = None, mode: str = "dot"): ... @_dispatchable -def robins_alexander_clustering(G): ... +def robins_alexander_clustering(G: Graph[_Node]): ... diff --git a/stubs/networkx/networkx/algorithms/bipartite/covering.pyi b/stubs/networkx/networkx/algorithms/bipartite/covering.pyi index 521e6d4956af..9d9fe9758baf 100644 --- a/stubs/networkx/networkx/algorithms/bipartite/covering.pyi +++ b/stubs/networkx/networkx/algorithms/bipartite/covering.pyi @@ -1,6 +1,8 @@ from _typeshed import Incomplete +from collections.abc import Callable +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable -def min_edge_cover(G, matching_algorithm: Incomplete | None = None): ... +def min_edge_cover(G: Graph[_Node], matching_algorithm: Callable[..., Incomplete] | None = None): ... diff --git a/stubs/networkx/networkx/algorithms/bipartite/edgelist.pyi b/stubs/networkx/networkx/algorithms/bipartite/edgelist.pyi index 009222121126..73ca8035db9e 100644 --- a/stubs/networkx/networkx/algorithms/bipartite/edgelist.pyi +++ b/stubs/networkx/networkx/algorithms/bipartite/edgelist.pyi @@ -1,6 +1,7 @@ from _typeshed import Incomplete from collections.abc import Generator +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable @@ -10,20 +11,20 @@ def generate_edgelist(G, delimiter: str = " ", data: bool = True) -> Generator[I @_dispatchable def parse_edgelist( lines, - comments: str = "#", - delimiter: Incomplete | None = None, - create_using: Incomplete | None = None, - nodetype: Incomplete | None = None, - data: bool = True, + comments: str | None = "#", + delimiter: str | None = None, + create_using: Graph[_Node] | None = None, + nodetype=None, + data=True, ): ... @_dispatchable def read_edgelist( path, - comments: str = "#", - delimiter: Incomplete | None = None, - create_using: Incomplete | None = None, - nodetype: Incomplete | None = None, - data: bool = True, - edgetype: Incomplete | None = None, - encoding: str = "utf-8", + comments: str | None = "#", + delimiter: str | None = None, + create_using=None, + nodetype=None, + data=True, + edgetype=None, + encoding: str | None = "utf-8", ): ... diff --git a/stubs/networkx/networkx/algorithms/bipartite/extendability.pyi b/stubs/networkx/networkx/algorithms/bipartite/extendability.pyi new file mode 100644 index 000000000000..9bfee7872513 --- /dev/null +++ b/stubs/networkx/networkx/algorithms/bipartite/extendability.pyi @@ -0,0 +1,5 @@ +from networkx.classes.graph import Graph, _Node +from networkx.utils.backends import _dispatchable + +@_dispatchable +def maximal_extendability(G: Graph[_Node]): ... diff --git a/stubs/networkx/networkx/algorithms/bipartite/generators.pyi b/stubs/networkx/networkx/algorithms/bipartite/generators.pyi index 9e1914567a50..3fe8b22ca59e 100644 --- a/stubs/networkx/networkx/algorithms/bipartite/generators.pyi +++ b/stubs/networkx/networkx/algorithms/bipartite/generators.pyi @@ -1,20 +1,34 @@ from _typeshed import Incomplete +from collections.abc import Iterable +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +from numpy.random import RandomState @_dispatchable -def complete_bipartite_graph(n1, n2, create_using: Incomplete | None = None): ... +def complete_bipartite_graph(n1, n2, create_using: Graph[_Node] | None = None): ... @_dispatchable -def configuration_model(aseq, bseq, create_using: Incomplete | None = None, seed: Incomplete | None = None): ... +def configuration_model( + aseq: Iterable[Incomplete], + bseq: Iterable[Incomplete], + create_using: Graph[_Node] | None = None, + seed: int | RandomState | None = None, +): ... @_dispatchable -def havel_hakimi_graph(aseq, bseq, create_using: Incomplete | None = None): ... +def havel_hakimi_graph(aseq: Iterable[Incomplete], bseq: Iterable[Incomplete], create_using: Graph[_Node] | None = None): ... @_dispatchable -def reverse_havel_hakimi_graph(aseq, bseq, create_using: Incomplete | None = None): ... +def reverse_havel_hakimi_graph( + aseq: Iterable[Incomplete], bseq: Iterable[Incomplete], create_using: Graph[_Node] | None = None +): ... @_dispatchable -def alternating_havel_hakimi_graph(aseq, bseq, create_using: Incomplete | None = None): ... +def alternating_havel_hakimi_graph( + aseq: Iterable[Incomplete], bseq: Iterable[Incomplete], create_using: Graph[_Node] | None = None +): ... @_dispatchable -def preferential_attachment_graph(aseq, p, create_using: Incomplete | None = None, seed: Incomplete | None = None): ... +def preferential_attachment_graph( + aseq: Iterable[Incomplete], p: float, create_using: Graph[_Node] | None = None, seed: int | RandomState | None = None +): ... @_dispatchable -def random_graph(n, m, p, seed: Incomplete | None = None, directed: bool = False): ... +def random_graph(n: int, m: int, p: float, seed: int | RandomState | None = None, directed: bool | None = False): ... @_dispatchable -def gnmk_random_graph(n, m, k, seed: Incomplete | None = None, directed: bool = False): ... +def gnmk_random_graph(n: int, m: int, k: int, seed: int | RandomState | None = None, directed: bool | None = False): ... diff --git a/stubs/networkx/networkx/algorithms/bipartite/matching.pyi b/stubs/networkx/networkx/algorithms/bipartite/matching.pyi index a731c33b88d2..b744c44f2e34 100644 --- a/stubs/networkx/networkx/algorithms/bipartite/matching.pyi +++ b/stubs/networkx/networkx/algorithms/bipartite/matching.pyi @@ -1,15 +1,21 @@ -from _typeshed import Incomplete +from _typeshed import Incomplete, SupportsGetItem +from collections.abc import Iterable +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable -def hopcroft_karp_matching(G, top_nodes: Incomplete | None = None): ... +def hopcroft_karp_matching(G: Graph[_Node], top_nodes: Iterable[_Node] | None = None): ... @_dispatchable -def eppstein_matching(G, top_nodes: Incomplete | None = None): ... +def eppstein_matching(G: Graph[_Node], top_nodes: Iterable[Incomplete] | None = None): ... @_dispatchable -def to_vertex_cover(G, matching, top_nodes: Incomplete | None = None): ... +def to_vertex_cover( + G: Graph[_Node], matching: SupportsGetItem[Incomplete, Incomplete], top_nodes: Iterable[Incomplete] | None = None +): ... maximum_matching = hopcroft_karp_matching @_dispatchable -def minimum_weight_full_matching(G, top_nodes: Incomplete | None = None, weight: str = "weight"): ... +def minimum_weight_full_matching( + G: Graph[_Node], top_nodes: Iterable[Incomplete] | None = None, weight: str | None = "weight" +): ... diff --git a/stubs/networkx/networkx/algorithms/bipartite/matrix.pyi b/stubs/networkx/networkx/algorithms/bipartite/matrix.pyi index 51a032fff0d8..f8a7af78c99b 100644 --- a/stubs/networkx/networkx/algorithms/bipartite/matrix.pyi +++ b/stubs/networkx/networkx/algorithms/bipartite/matrix.pyi @@ -1,15 +1,17 @@ from _typeshed import Incomplete +from collections.abc import Iterable +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable def biadjacency_matrix( - G, - row_order, - column_order: Incomplete | None = None, - dtype: Incomplete | None = None, - weight: str = "weight", - format: str = "csr", + G: Graph[_Node], + row_order: Iterable[_Node], + column_order: Iterable[Incomplete] | None = None, + dtype=None, + weight: str | None = "weight", + format="csr", ): ... @_dispatchable -def from_biadjacency_matrix(A, create_using: Incomplete | None = None, edge_attribute: str = "weight"): ... +def from_biadjacency_matrix(A, create_using: Graph[_Node] | None = None, edge_attribute: str = "weight"): ... diff --git a/stubs/networkx/networkx/algorithms/bipartite/projection.pyi b/stubs/networkx/networkx/algorithms/bipartite/projection.pyi index 37712b7840a4..c545b78ff61e 100644 --- a/stubs/networkx/networkx/algorithms/bipartite/projection.pyi +++ b/stubs/networkx/networkx/algorithms/bipartite/projection.pyi @@ -1,14 +1,18 @@ from _typeshed import Incomplete +from collections.abc import Callable, Iterable +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable -def projected_graph(B, nodes, multigraph: bool = False): ... +def projected_graph(B: Graph[_Node], nodes: Iterable[Incomplete], multigraph: bool = False): ... @_dispatchable -def weighted_projected_graph(B, nodes, ratio: bool = False): ... +def weighted_projected_graph(B: Graph[_Node], nodes: Iterable[Incomplete], ratio: bool = False): ... @_dispatchable -def collaboration_weighted_projected_graph(B, nodes): ... +def collaboration_weighted_projected_graph(B: Graph[_Node], nodes: Iterable[Incomplete]): ... @_dispatchable -def overlap_weighted_projected_graph(B, nodes, jaccard: bool = True): ... +def overlap_weighted_projected_graph(B: Graph[_Node], nodes: Iterable[Incomplete], jaccard: bool = True): ... @_dispatchable -def generic_weighted_projected_graph(B, nodes, weight_function: Incomplete | None = None): ... +def generic_weighted_projected_graph( + B: Graph[_Node], nodes: Iterable[Incomplete], weight_function: Callable[..., Incomplete] | None = None +): ... diff --git a/stubs/networkx/networkx/algorithms/bipartite/redundancy.pyi b/stubs/networkx/networkx/algorithms/bipartite/redundancy.pyi index 8c66512fc95a..474329b6e68e 100644 --- a/stubs/networkx/networkx/algorithms/bipartite/redundancy.pyi +++ b/stubs/networkx/networkx/algorithms/bipartite/redundancy.pyi @@ -1,6 +1,8 @@ from _typeshed import Incomplete +from collections.abc import Iterable +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable -def node_redundancy(G, nodes: Incomplete | None = None): ... +def node_redundancy(G: Graph[_Node], nodes: Iterable[Incomplete] | None = None): ... diff --git a/stubs/networkx/networkx/algorithms/bipartite/spectral.pyi b/stubs/networkx/networkx/algorithms/bipartite/spectral.pyi index 4a81f118acbf..c3060cce5cda 100644 --- a/stubs/networkx/networkx/algorithms/bipartite/spectral.pyi +++ b/stubs/networkx/networkx/algorithms/bipartite/spectral.pyi @@ -1,6 +1,5 @@ -from _typeshed import Incomplete - +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable -def spectral_bipartivity(G, nodes: Incomplete | None = None, weight: str = "weight"): ... +def spectral_bipartivity(G: Graph[_Node], nodes=None, weight: str = "weight"): ... diff --git a/stubs/networkx/networkx/algorithms/boundary.pyi b/stubs/networkx/networkx/algorithms/boundary.pyi index 3b5cfa511a38..e85644bef42f 100644 --- a/stubs/networkx/networkx/algorithms/boundary.pyi +++ b/stubs/networkx/networkx/algorithms/boundary.pyi @@ -1,6 +1,6 @@ from _typeshed import Incomplete from collections.abc import Generator, Iterable -from typing import Literal, TypeVar, overload +from typing import TypeVar, overload from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @@ -10,106 +10,101 @@ _U = TypeVar("_U") @overload def edge_boundary( G: Graph[_Node], - nbunch1: Iterable[_Node], - nbunch2: Iterable[_Node] | None = None, - data: Literal[False] = False, - keys: Literal[False] = False, - default=None, + nbunch1: Iterable[Incomplete], + nbunch2: Iterable[Incomplete] | None = None, + data=False, + keys: bool = False, + default: Incomplete | None = None, ) -> Generator[tuple[_Node, _Node], None, None]: ... @overload def edge_boundary( G: Graph[_Node], - nbunch1: Iterable[_Node], - nbunch2: Iterable[_Node] | None, - data: Literal[True], - keys: Literal[False] = False, - default=None, + nbunch1: Iterable[Incomplete], + nbunch2: Iterable[Incomplete] | None = None, + data=False, + keys: bool = False, + default: Incomplete | None = None, ) -> Generator[tuple[_Node, _Node, dict[str, Incomplete]], None, None]: ... @overload def edge_boundary( G: Graph[_Node], - nbunch1: Iterable[_Node], - nbunch2: Iterable[_Node] | None = None, - *, - data: Literal[True], - keys: Literal[False] = False, - default=None, + nbunch1: Iterable[Incomplete], + nbunch2: Iterable[Incomplete] | None = None, + data=False, + keys: bool = False, + default: Incomplete | None = None, ) -> Generator[tuple[_Node, _Node, dict[str, Incomplete]], None, None]: ... @overload def edge_boundary( G: Graph[_Node], - nbunch1: Iterable[_Node], - nbunch2: Iterable[_Node] | None, - data: str, - keys: Literal[False] = False, + nbunch1: Iterable[Incomplete], + nbunch2: Iterable[Incomplete] | None = None, + data=False, + keys: bool = False, default: _U | None = None, ) -> Generator[tuple[_Node, _Node, dict[str, _U]], None, None]: ... @overload def edge_boundary( G: Graph[_Node], - nbunch1: Iterable[_Node], - nbunch2: Iterable[_Node] | None = None, - *, - data: str, - keys: Literal[False] = False, + nbunch1: Iterable[Incomplete], + nbunch2: Iterable[Incomplete] | None = None, + data=False, + keys: bool = False, default: _U | None = None, ) -> Generator[tuple[_Node, _Node, dict[str, _U]], None, None]: ... @overload def edge_boundary( G: Graph[_Node], - nbunch1: Iterable[_Node], - nbunch2: Iterable[_Node] | None, - data: Literal[False], - keys: Literal[True], - default=None, + nbunch1: Iterable[Incomplete], + nbunch2: Iterable[Incomplete] | None = None, + data=False, + keys: bool = False, + default: Incomplete | None = None, ) -> Generator[tuple[_Node, _Node, int], None, None]: ... @overload def edge_boundary( G: Graph[_Node], - nbunch1: Iterable[_Node], - nbunch2: Iterable[_Node] | None = None, - data: Literal[False] = False, - *, - keys: Literal[True], - default=None, + nbunch1: Iterable[Incomplete], + nbunch2: Iterable[Incomplete] | None = None, + data=False, + keys: bool = False, + default: Incomplete | None = None, ) -> Generator[tuple[_Node, _Node, int], None, None]: ... @overload def edge_boundary( G: Graph[_Node], - nbunch1: Iterable[_Node], - nbunch2: Iterable[_Node] | None, - data: Literal[True], - keys: Literal[True], - default=None, + nbunch1: Iterable[Incomplete], + nbunch2: Iterable[Incomplete] | None = None, + data=False, + keys: bool = False, + default: Incomplete | None = None, ) -> Generator[tuple[_Node, _Node, int, dict[str, Incomplete]], None, None]: ... @overload def edge_boundary( G: Graph[_Node], - nbunch1: Iterable[_Node], - nbunch2: Iterable[_Node] | None = None, - *, - data: Literal[True], - keys: Literal[True], - default=None, + nbunch1: Iterable[Incomplete], + nbunch2: Iterable[Incomplete] | None = None, + data=False, + keys: bool = False, + default: Incomplete | None = None, ) -> Generator[tuple[_Node, _Node, int, dict[str, Incomplete]], None, None]: ... @overload def edge_boundary( G: Graph[_Node], - nbunch1: Iterable[_Node], - nbunch2: Iterable[_Node] | None, - data: str, - keys: Literal[True], + nbunch1: Iterable[Incomplete], + nbunch2: Iterable[Incomplete] | None = None, + data=False, + keys: bool = False, default: _U | None = None, ) -> Generator[tuple[_Node, _Node, int, dict[str, _U]], None, None]: ... @overload def edge_boundary( G: Graph[_Node], - nbunch1: Iterable[_Node], - nbunch2: Iterable[_Node] | None = None, - *, - data: str, - keys: Literal[True], + nbunch1: Iterable[Incomplete], + nbunch2: Iterable[Incomplete] | None = None, + data=False, + keys: bool = False, default: _U | None = None, ) -> Generator[tuple[_Node, _Node, int, dict[str, _U]], None, None]: ... @_dispatchable -def node_boundary(G: Graph[_Node], nbunch1: Iterable[_Node], nbunch2: Iterable[_Node] | None = None) -> set[_Node]: ... +def node_boundary(G: Graph[_Node], nbunch1: Iterable[Incomplete], nbunch2: Iterable[Incomplete] | None = None) -> set[_Node]: ... diff --git a/stubs/networkx/networkx/algorithms/bridges.pyi b/stubs/networkx/networkx/algorithms/bridges.pyi index 35fa914d6f90..2beb94e80077 100644 --- a/stubs/networkx/networkx/algorithms/bridges.pyi +++ b/stubs/networkx/networkx/algorithms/bridges.pyi @@ -1,6 +1,5 @@ -from _typeshed import Incomplete -from collections.abc import Callable, Generator -from typing import Literal, overload +from collections.abc import Generator +from typing import overload from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @@ -8,12 +7,12 @@ from networkx.utils.backends import _dispatchable @_dispatchable def bridges(G: Graph[_Node], root: _Node | None = None) -> Generator[_Node, None, None]: ... @_dispatchable -def has_bridges(G: Graph[_Node], root: Incomplete | None = None) -> bool: ... +def has_bridges(G: Graph[_Node], root: _Node | None = None) -> bool: ... @overload def local_bridges( - G: Graph[_Node], with_span: Literal[False], weight: str | Callable[[_Node], float] | None = None + G: Graph[_Node], with_span: bool = True, weight: str | None = None ) -> Generator[tuple[_Node, _Node], None, None]: ... @overload def local_bridges( - G: Graph[_Node], with_span: Literal[True] = True, weight: str | Callable[[_Node], float] | None = None + G: Graph[_Node], with_span: bool = True, weight: str | None = None ) -> Generator[tuple[_Node, _Node, int], None, None]: ... diff --git a/stubs/networkx/networkx/algorithms/centrality/betweenness.pyi b/stubs/networkx/networkx/algorithms/centrality/betweenness.pyi index 5475cfecb789..7d8351a67c5a 100644 --- a/stubs/networkx/networkx/algorithms/centrality/betweenness.pyi +++ b/stubs/networkx/networkx/algorithms/centrality/betweenness.pyi @@ -1,5 +1,3 @@ -from _typeshed import Incomplete - from networkx.classes.graph import Graph, _Edge, _Node from networkx.utils.backends import _dispatchable from numpy.random import RandomState @@ -8,12 +6,16 @@ from numpy.random import RandomState def betweenness_centrality( G: Graph[_Node], k: int | None = None, - normalized: bool = True, + normalized: bool | None = True, weight: str | None = None, - endpoints: bool = False, + endpoints: bool | None = False, seed: int | RandomState | None = None, ) -> dict[_Node, float]: ... @_dispatchable def edge_betweenness_centrality( - G: Graph[_Node], k: int | None = None, normalized: bool = True, weight: str | None = None, seed: Incomplete | None = None + G: Graph[_Node], + k: int | None = None, + normalized: bool | None = True, + weight: str | None = None, + seed: int | RandomState | None = None, ) -> dict[_Edge[_Node], float]: ... diff --git a/stubs/networkx/networkx/algorithms/centrality/betweenness_subset.pyi b/stubs/networkx/networkx/algorithms/centrality/betweenness_subset.pyi index 66abb8533ad4..3f3af3e5bbae 100644 --- a/stubs/networkx/networkx/algorithms/centrality/betweenness_subset.pyi +++ b/stubs/networkx/networkx/algorithms/centrality/betweenness_subset.pyi @@ -5,9 +5,17 @@ from networkx.utils.backends import _dispatchable @_dispatchable def betweenness_centrality_subset( - G: Graph[_Node], sources: Iterable[_Node], targets: Iterable[_Node], normalized: bool = False, weight: str | None = None + G: Graph[_Node], + sources: Iterable[_Node], + targets: Iterable[_Node], + normalized: bool | None = False, + weight: str | None = None, ) -> dict[_Node, float]: ... @_dispatchable def edge_betweenness_centrality_subset( - G: Graph[_Node], sources: Iterable[_Node], targets: Iterable[_Node], normalized: bool = False, weight: str | None = None + G: Graph[_Node], + sources: Iterable[_Node], + targets: Iterable[_Node], + normalized: bool | None = False, + weight: str | None = None, ) -> dict[_Edge[_Node], float]: ... diff --git a/stubs/networkx/networkx/algorithms/centrality/closeness.pyi b/stubs/networkx/networkx/algorithms/centrality/closeness.pyi index 210e88bc7218..edfae3a7b828 100644 --- a/stubs/networkx/networkx/algorithms/centrality/closeness.pyi +++ b/stubs/networkx/networkx/algorithms/centrality/closeness.pyi @@ -1,17 +1,17 @@ -from _typeshed import SupportsKeysAndGetItem +from _typeshed import Incomplete, SupportsGetItem -from networkx.classes.graph import Graph, _Edge, _Node +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable def closeness_centrality( - G: Graph[_Node], u: _Node | None = None, distance: str | None = None, wf_improved: bool = True + G: Graph[_Node], u: _Node | None = None, distance=None, wf_improved: bool | None = True ) -> dict[_Node, float]: ... @_dispatchable def incremental_closeness_centrality( G: Graph[_Node], - edge: _Edge[_Node], - prev_cc: SupportsKeysAndGetItem[_Node, float] | None = None, - insertion: bool = True, - wf_improved: bool = True, + edge: tuple[Incomplete], + prev_cc: SupportsGetItem[Incomplete, Incomplete] | None = None, + insertion: bool | None = True, + wf_improved: bool | None = True, ) -> dict[_Node, float]: ... diff --git a/stubs/networkx/networkx/algorithms/centrality/current_flow_betweenness.pyi b/stubs/networkx/networkx/algorithms/centrality/current_flow_betweenness.pyi index 612702be5b69..3995a217fefc 100644 --- a/stubs/networkx/networkx/algorithms/centrality/current_flow_betweenness.pyi +++ b/stubs/networkx/networkx/algorithms/centrality/current_flow_betweenness.pyi @@ -1,23 +1,23 @@ -from _typeshed import Incomplete - +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +from numpy.random import RandomState @_dispatchable def approximate_current_flow_betweenness_centrality( - G, - normalized: bool = True, - weight: Incomplete | None = None, - dtype=..., + G: Graph[_Node], + normalized: bool | None = True, + weight: str | None = None, + dtype: type = ..., solver: str = "full", epsilon: float = 0.5, kmax: int = 10000, - seed: Incomplete | None = None, + seed: int | RandomState | None = None, ): ... @_dispatchable def current_flow_betweenness_centrality( - G, normalized: bool = True, weight: Incomplete | None = None, dtype=..., solver: str = "full" + G: Graph[_Node], normalized: bool | None = True, weight: str | None = None, dtype: type = ..., solver: str = "full" ): ... @_dispatchable def edge_current_flow_betweenness_centrality( - G, normalized: bool = True, weight: Incomplete | None = None, dtype=..., solver: str = "full" + G: Graph[_Node], normalized: bool | None = True, weight: str | None = None, dtype: type = ..., solver: str = "full" ): ... diff --git a/stubs/networkx/networkx/algorithms/centrality/current_flow_betweenness_subset.pyi b/stubs/networkx/networkx/algorithms/centrality/current_flow_betweenness_subset.pyi index a34c8461d5c4..7712c02ac954 100644 --- a/stubs/networkx/networkx/algorithms/centrality/current_flow_betweenness_subset.pyi +++ b/stubs/networkx/networkx/algorithms/centrality/current_flow_betweenness_subset.pyi @@ -1,12 +1,25 @@ -from _typeshed import Incomplete +from collections.abc import Iterable +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable def current_flow_betweenness_centrality_subset( - G, sources, targets, normalized: bool = True, weight: Incomplete | None = None, dtype=..., solver: str = "lu" + G: Graph[_Node], + sources: Iterable[_Node], + targets: Iterable[_Node], + normalized: bool | None = True, + weight: str | None = None, + dtype: type = ..., + solver: str = "lu", ): ... @_dispatchable def edge_current_flow_betweenness_centrality_subset( - G, sources, targets, normalized: bool = True, weight: Incomplete | None = None, dtype=..., solver: str = "lu" + G: Graph[_Node], + sources: Iterable[_Node], + targets: Iterable[_Node], + normalized: bool | None = True, + weight: str | None = None, + dtype: type = ..., + solver: str = "lu", ): ... diff --git a/stubs/networkx/networkx/algorithms/centrality/current_flow_closeness.pyi b/stubs/networkx/networkx/algorithms/centrality/current_flow_closeness.pyi index 81587a748915..534c06845d13 100644 --- a/stubs/networkx/networkx/algorithms/centrality/current_flow_closeness.pyi +++ b/stubs/networkx/networkx/algorithms/centrality/current_flow_closeness.pyi @@ -1,8 +1,7 @@ -from _typeshed import Incomplete - +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable -def current_flow_closeness_centrality(G, weight: Incomplete | None = None, dtype=..., solver: str = "lu"): ... +def current_flow_closeness_centrality(G: Graph[_Node], weight: str | None = None, dtype: type = ..., solver: str = "lu"): ... information_centrality = current_flow_closeness_centrality diff --git a/stubs/networkx/networkx/algorithms/centrality/eigenvector.pyi b/stubs/networkx/networkx/algorithms/centrality/eigenvector.pyi index cb0736e69f93..8c9ccd34c568 100644 --- a/stubs/networkx/networkx/algorithms/centrality/eigenvector.pyi +++ b/stubs/networkx/networkx/algorithms/centrality/eigenvector.pyi @@ -1,10 +1,17 @@ -from _typeshed import Incomplete +from _typeshed import Incomplete, SupportsGetItem +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable def eigenvector_centrality( - G, max_iter: int = 100, tol: float = 1e-06, nstart: Incomplete | None = None, weight: Incomplete | None = None + G: Graph[_Node], + max_iter: int | None = 100, + tol: float | None = 1e-06, + nstart: SupportsGetItem[Incomplete, Incomplete] | None = None, + weight: str | None = None, ): ... @_dispatchable -def eigenvector_centrality_numpy(G, weight: Incomplete | None = None, max_iter: int = 50, tol: float = 0): ... +def eigenvector_centrality_numpy( + G: Graph[_Node], weight: str | None = None, max_iter: int | None = 50, tol: float | None = 0 +): ... diff --git a/stubs/networkx/networkx/algorithms/centrality/flow_matrix.pyi b/stubs/networkx/networkx/algorithms/centrality/flow_matrix.pyi index 12a98580dbde..7b7f43908ffb 100644 --- a/stubs/networkx/networkx/algorithms/centrality/flow_matrix.pyi +++ b/stubs/networkx/networkx/algorithms/centrality/flow_matrix.pyi @@ -12,6 +12,7 @@ class InverseLaplacian: w: Incomplete C: Incomplete L1: Incomplete + def __init__(self, L, width: Incomplete | None = None, dtype: Incomplete | None = None) -> None: ... def init_solver(self, L) -> None: ... def solve(self, r) -> None: ... @@ -22,18 +23,21 @@ class InverseLaplacian: class FullInverseLaplacian(InverseLaplacian): IL: Incomplete + def init_solver(self, L) -> None: ... def solve(self, rhs): ... def solve_inverse(self, r): ... class SuperLUInverseLaplacian(InverseLaplacian): lusolve: Incomplete + def init_solver(self, L) -> None: ... def solve_inverse(self, r): ... def solve(self, rhs): ... class CGInverseLaplacian(InverseLaplacian): M: Incomplete + def init_solver(self, L) -> None: ... def solve(self, rhs): ... def solve_inverse(self, r): ... diff --git a/stubs/networkx/networkx/algorithms/centrality/group.pyi b/stubs/networkx/networkx/algorithms/centrality/group.pyi index a7cfa7a7c58a..3229bdec3789 100644 --- a/stubs/networkx/networkx/algorithms/centrality/group.pyi +++ b/stubs/networkx/networkx/algorithms/centrality/group.pyi @@ -1,24 +1,28 @@ from _typeshed import Incomplete +from collections.abc import Iterable +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable -def group_betweenness_centrality(G, C, normalized: bool = True, weight: Incomplete | None = None, endpoints: bool = False): ... +def group_betweenness_centrality( + G: Graph[_Node], C, normalized: bool | None = True, weight: str | None = None, endpoints: bool | None = False +): ... @_dispatchable def prominent_group( - G, - k, - weight: Incomplete | None = None, - C: Incomplete | None = None, - endpoints: bool = False, - normalized: bool = True, - greedy: bool = False, + G: Graph[_Node], + k: int, + weight: str | None = None, + C: Iterable[Incomplete] | None = None, + endpoints: bool | None = False, + normalized: bool | None = True, + greedy: bool | None = False, ): ... @_dispatchable -def group_closeness_centrality(G, S, weight: Incomplete | None = None): ... +def group_closeness_centrality(G: Graph[_Node], S: Iterable[Incomplete], weight: str | None = None): ... @_dispatchable -def group_degree_centrality(G, S): ... +def group_degree_centrality(G: Graph[_Node], S: Iterable[Incomplete]): ... @_dispatchable -def group_in_degree_centrality(G, S): ... +def group_in_degree_centrality(G: Graph[_Node], S: Iterable[Incomplete]): ... @_dispatchable -def group_out_degree_centrality(G, S): ... +def group_out_degree_centrality(G: Graph[_Node], S: Iterable[Incomplete]): ... diff --git a/stubs/networkx/networkx/algorithms/centrality/harmonic.pyi b/stubs/networkx/networkx/algorithms/centrality/harmonic.pyi index cd2aa8ca1747..af566d1c4d7d 100644 --- a/stubs/networkx/networkx/algorithms/centrality/harmonic.pyi +++ b/stubs/networkx/networkx/algorithms/centrality/harmonic.pyi @@ -1,8 +1,10 @@ from _typeshed import Incomplete +from collections.abc import Iterable +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable def harmonic_centrality( - G, nbunch: Incomplete | None = None, distance: Incomplete | None = None, sources: Incomplete | None = None + G: Graph[_Node], nbunch: Iterable[Incomplete] | None = None, distance=None, sources: Iterable[Incomplete] | None = None ): ... diff --git a/stubs/networkx/networkx/algorithms/centrality/katz.pyi b/stubs/networkx/networkx/algorithms/centrality/katz.pyi index 509456ff0ef9..ba353da7a8ce 100644 --- a/stubs/networkx/networkx/algorithms/centrality/katz.pyi +++ b/stubs/networkx/networkx/algorithms/centrality/katz.pyi @@ -1,19 +1,24 @@ -from _typeshed import Incomplete +from _typeshed import Incomplete, SupportsGetItem +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable def katz_centrality( - G, - alpha: float = 0.1, - beta: float = 1.0, - max_iter: int = 1000, - tol: float = 1e-06, - nstart: Incomplete | None = None, - normalized: bool = True, - weight: Incomplete | None = None, + G: Graph[_Node], + alpha: float | None = 0.1, + beta: float | SupportsGetItem[Incomplete, Incomplete] | None = 1.0, + max_iter: int | None = 1000, + tol: float | None = 1e-06, + nstart: SupportsGetItem[Incomplete, Incomplete] | None = None, + normalized: bool | None = True, + weight: str | None = None, ): ... @_dispatchable def katz_centrality_numpy( - G, alpha: float = 0.1, beta: float = 1.0, normalized: bool = True, weight: Incomplete | None = None + G: Graph[_Node], + alpha: float = 0.1, + beta: float | SupportsGetItem[Incomplete, Incomplete] | None = 1.0, + normalized: bool = True, + weight: str | None = None, ): ... diff --git a/stubs/networkx/networkx/algorithms/centrality/laplacian.pyi b/stubs/networkx/networkx/algorithms/centrality/laplacian.pyi index a1a79318dcb2..8dfdb2d0d11a 100644 --- a/stubs/networkx/networkx/algorithms/centrality/laplacian.pyi +++ b/stubs/networkx/networkx/algorithms/centrality/laplacian.pyi @@ -1,13 +1,15 @@ from _typeshed import Incomplete +from collections.abc import Iterable +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable def laplacian_centrality( - G, + G: Graph[_Node], normalized: bool = True, - nodelist: Incomplete | None = None, - weight: str = "weight", - walk_type: Incomplete | None = None, + nodelist: Iterable[Incomplete] | None = None, + weight: str | None = "weight", + walk_type: str | None = None, alpha: float = 0.95, ): ... diff --git a/stubs/networkx/networkx/algorithms/centrality/load.pyi b/stubs/networkx/networkx/algorithms/centrality/load.pyi index a3cb20b97eae..cdec3b843e77 100644 --- a/stubs/networkx/networkx/algorithms/centrality/load.pyi +++ b/stubs/networkx/networkx/algorithms/centrality/load.pyi @@ -1,15 +1,14 @@ -from _typeshed import Incomplete - +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable __all__ = ["load_centrality", "edge_load_centrality"] @_dispatchable def newman_betweenness_centrality( - G, v: Incomplete | None = None, cutoff: Incomplete | None = None, normalized: bool = True, weight: Incomplete | None = None + G: Graph[_Node], v=None, cutoff: bool | None = None, normalized: bool | None = True, weight: str | None = None ): ... load_centrality = newman_betweenness_centrality @_dispatchable -def edge_load_centrality(G, cutoff: bool = False): ... +def edge_load_centrality(G: Graph[_Node], cutoff: bool | None = False): ... diff --git a/stubs/networkx/networkx/algorithms/centrality/percolation.pyi b/stubs/networkx/networkx/algorithms/centrality/percolation.pyi index b64230f68108..b9124b5e7a0a 100644 --- a/stubs/networkx/networkx/algorithms/centrality/percolation.pyi +++ b/stubs/networkx/networkx/algorithms/centrality/percolation.pyi @@ -1,8 +1,12 @@ -from _typeshed import Incomplete +from _typeshed import Incomplete, SupportsGetItem +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable def percolation_centrality( - G, attribute: str = "percolation", states: Incomplete | None = None, weight: Incomplete | None = None + G: Graph[_Node], + attribute: str | None = "percolation", + states: SupportsGetItem[Incomplete, Incomplete] | None = None, + weight: str | None = None, ): ... diff --git a/stubs/networkx/networkx/algorithms/centrality/reaching.pyi b/stubs/networkx/networkx/algorithms/centrality/reaching.pyi index 4cb0a9ef7a59..b2a1a2a0deeb 100644 --- a/stubs/networkx/networkx/algorithms/centrality/reaching.pyi +++ b/stubs/networkx/networkx/algorithms/centrality/reaching.pyi @@ -1,10 +1,16 @@ -from _typeshed import Incomplete +from _typeshed import Incomplete, SupportsGetItem +from networkx.classes.digraph import DiGraph +from networkx.classes.graph import _Node from networkx.utils.backends import _dispatchable @_dispatchable -def global_reaching_centrality(G, weight: Incomplete | None = None, normalized: bool = True): ... +def global_reaching_centrality(G: DiGraph[_Node], weight: str | None = None, normalized: bool | None = True): ... @_dispatchable def local_reaching_centrality( - G, v, paths: Incomplete | None = None, weight: Incomplete | None = None, normalized: bool = True + G: DiGraph[_Node], + v: _Node, + paths: SupportsGetItem[Incomplete, Incomplete] | None = None, + weight: str | None = None, + normalized: bool | None = True, ): ... diff --git a/stubs/networkx/networkx/algorithms/centrality/second_order.pyi b/stubs/networkx/networkx/algorithms/centrality/second_order.pyi index 32f37fabeabf..df8b706360c8 100644 --- a/stubs/networkx/networkx/algorithms/centrality/second_order.pyi +++ b/stubs/networkx/networkx/algorithms/centrality/second_order.pyi @@ -1,4 +1,5 @@ +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable -def second_order_centrality(G): ... +def second_order_centrality(G: Graph[_Node], weight: str | None = "weight"): ... diff --git a/stubs/networkx/networkx/algorithms/centrality/subgraph_alg.pyi b/stubs/networkx/networkx/algorithms/centrality/subgraph_alg.pyi index f826a0b38022..4035e39ea41d 100644 --- a/stubs/networkx/networkx/algorithms/centrality/subgraph_alg.pyi +++ b/stubs/networkx/networkx/algorithms/centrality/subgraph_alg.pyi @@ -1,10 +1,11 @@ +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable -def subgraph_centrality_exp(G): ... +def subgraph_centrality_exp(G: Graph[_Node]): ... @_dispatchable -def subgraph_centrality(G): ... +def subgraph_centrality(G: Graph[_Node]): ... @_dispatchable -def communicability_betweenness_centrality(G): ... +def communicability_betweenness_centrality(G: Graph[_Node]): ... @_dispatchable -def estrada_index(G): ... +def estrada_index(G: Graph[_Node]): ... diff --git a/stubs/networkx/networkx/algorithms/centrality/trophic.pyi b/stubs/networkx/networkx/algorithms/centrality/trophic.pyi index efbd8e54f2a6..654ecef945be 100644 --- a/stubs/networkx/networkx/algorithms/centrality/trophic.pyi +++ b/stubs/networkx/networkx/algorithms/centrality/trophic.pyi @@ -1,8 +1,10 @@ +from networkx.classes.digraph import DiGraph +from networkx.classes.graph import _Node from networkx.utils.backends import _dispatchable @_dispatchable -def trophic_levels(G, weight: str = "weight"): ... +def trophic_levels(G: DiGraph[_Node], weight="weight"): ... @_dispatchable -def trophic_differences(G, weight: str = "weight"): ... +def trophic_differences(G: DiGraph[_Node], weight="weight"): ... @_dispatchable -def trophic_incoherence_parameter(G, weight: str = "weight", cannibalism: bool = False): ... +def trophic_incoherence_parameter(G: DiGraph[_Node], weight="weight", cannibalism: bool = False): ... diff --git a/stubs/networkx/networkx/algorithms/centrality/voterank_alg.pyi b/stubs/networkx/networkx/algorithms/centrality/voterank_alg.pyi index c25d90ce7ea2..c313b9c8e60d 100644 --- a/stubs/networkx/networkx/algorithms/centrality/voterank_alg.pyi +++ b/stubs/networkx/networkx/algorithms/centrality/voterank_alg.pyi @@ -1,6 +1,5 @@ -from _typeshed import Incomplete - +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable -def voterank(G, number_of_nodes: Incomplete | None = None): ... +def voterank(G: Graph[_Node], number_of_nodes: int | None = None): ... diff --git a/stubs/networkx/networkx/algorithms/chordal.pyi b/stubs/networkx/networkx/algorithms/chordal.pyi index ba37c5b35b58..264e5c769a5d 100644 --- a/stubs/networkx/networkx/algorithms/chordal.pyi +++ b/stubs/networkx/networkx/algorithms/chordal.pyi @@ -1,5 +1,5 @@ import sys -from collections.abc import Generator, Hashable +from collections.abc import Generator from networkx.classes.graph import Graph, _Node from networkx.exception import NetworkXException @@ -8,10 +8,10 @@ from networkx.utils.backends import _dispatchable class NetworkXTreewidthBoundExceeded(NetworkXException): ... @_dispatchable -def is_chordal(G: Graph[Hashable]) -> bool: ... +def is_chordal(G: Graph[_Node]) -> bool: ... @_dispatchable def find_induced_nodes(G: Graph[_Node], s: _Node, t: _Node, treewidth_bound: float = sys.maxsize) -> set[_Node]: ... @_dispatchable def chordal_graph_cliques(G: Graph[_Node]) -> Generator[frozenset[_Node], None, None]: ... @_dispatchable -def chordal_graph_treewidth(G: Graph[Hashable]) -> int: ... +def chordal_graph_treewidth(G: Graph[_Node]) -> int: ... diff --git a/stubs/networkx/networkx/algorithms/clique.pyi b/stubs/networkx/networkx/algorithms/clique.pyi index e7409ca915d8..72af1e40f811 100644 --- a/stubs/networkx/networkx/algorithms/clique.pyi +++ b/stubs/networkx/networkx/algorithms/clique.pyi @@ -1,5 +1,5 @@ -from _typeshed import SupportsGetItem, Unused -from collections.abc import Generator, Iterable, Iterator, Sized +from _typeshed import Incomplete +from collections.abc import Generator, Iterable, Iterator from typing import overload from networkx.classes.graph import Graph, _Node @@ -8,23 +8,18 @@ from networkx.utils.backends import _dispatchable @_dispatchable def enumerate_all_cliques(G: Graph[_Node]) -> Generator[list[_Node], None, None]: ... @_dispatchable -def find_cliques(G: Graph[_Node], nodes: SupportsGetItem[slice, _Node] | None = None) -> Generator[list[_Node], None, None]: ... +def find_cliques(G: Graph[_Node], nodes: Iterable[Incomplete] | None = None) -> Generator[list[_Node], None, None]: ... @_dispatchable -def find_cliques_recursive(G: Graph[_Node], nodes: SupportsGetItem[slice, _Node] | None = None) -> Iterator[list[_Node]]: ... +def find_cliques_recursive(G: Graph[_Node], nodes: Iterable[Incomplete] | None = None) -> Iterator[list[_Node]]: ... @_dispatchable -def make_max_clique_graph(G: Graph[_Node], create_using: type[Graph[_Node]] | None = None) -> Graph[_Node]: ... +def make_max_clique_graph(G: Graph[_Node], create_using: Graph[_Node] | None = None) -> Graph[_Node]: ... @_dispatchable def make_clique_bipartite( - G: Graph[_Node], fpos: Unused = None, create_using: type[Graph[_Node]] | None = None, name: Unused = None + G: Graph[_Node], fpos: bool | None = None, create_using: Graph[_Node] | None = None, name=None ) -> Graph[_Node]: ... @overload -def node_clique_number( # type: ignore[misc] # Incompatible return types - G: Graph[_Node], - nodes: Iterable[_Node] | None = None, - cliques: Iterable[Iterable[_Node]] | None = None, - separate_nodes: Unused = False, +def node_clique_number( + G: Graph[_Node], nodes=None, cliques: Iterable[Incomplete] | None = None, separate_nodes=False ) -> dict[_Node, int]: ... @overload -def node_clique_number( - G: Graph[_Node], nodes: _Node, cliques: Iterable[Sized] | None = None, separate_nodes: Unused = False -) -> int: ... +def node_clique_number(G: Graph[_Node], nodes=None, cliques: Iterable[Incomplete] | None = None, separate_nodes=False) -> int: ... diff --git a/stubs/networkx/networkx/algorithms/cluster.pyi b/stubs/networkx/networkx/algorithms/cluster.pyi index d0dc06de3ce9..4558712c7761 100644 --- a/stubs/networkx/networkx/algorithms/cluster.pyi +++ b/stubs/networkx/networkx/algorithms/cluster.pyi @@ -1,16 +1,19 @@ -from _typeshed import Incomplete +from collections.abc import Iterable +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable -def triangles(G, nodes: Incomplete | None = None): ... +def triangles(G: Graph[_Node], nodes=None): ... @_dispatchable -def average_clustering(G, nodes: Incomplete | None = None, weight: Incomplete | None = None, count_zeros: bool = True): ... +def average_clustering( + G: Graph[_Node], nodes: Iterable[_Node] | None = None, weight: str | None = None, count_zeros: bool = True +): ... @_dispatchable -def clustering(G, nodes: Incomplete | None = None, weight: Incomplete | None = None): ... +def clustering(G: Graph[_Node], nodes=None, weight: str | None = None): ... @_dispatchable -def transitivity(G): ... +def transitivity(G: Graph[_Node]): ... @_dispatchable -def square_clustering(G, nodes: Incomplete | None = None): ... +def square_clustering(G: Graph[_Node], nodes: Iterable[_Node] | None = None): ... @_dispatchable -def generalized_degree(G, nodes: Incomplete | None = None): ... +def generalized_degree(G: Graph[_Node], nodes: Iterable[_Node] | None = None): ... diff --git a/stubs/networkx/networkx/algorithms/coloring/equitable_coloring.pyi b/stubs/networkx/networkx/algorithms/coloring/equitable_coloring.pyi index 94f61af53c5b..488bcd7fa6ba 100644 --- a/stubs/networkx/networkx/algorithms/coloring/equitable_coloring.pyi +++ b/stubs/networkx/networkx/algorithms/coloring/equitable_coloring.pyi @@ -1,4 +1,5 @@ +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable -def equitable_color(G, num_colors): ... +def equitable_color(G: Graph[_Node], num_colors): ... diff --git a/stubs/networkx/networkx/algorithms/coloring/greedy_coloring.pyi b/stubs/networkx/networkx/algorithms/coloring/greedy_coloring.pyi index 26b8767083d0..0ce06b531e84 100644 --- a/stubs/networkx/networkx/algorithms/coloring/greedy_coloring.pyi +++ b/stubs/networkx/networkx/algorithms/coloring/greedy_coloring.pyi @@ -1,6 +1,7 @@ from _typeshed import Incomplete from collections.abc import Generator +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable __all__ = [ @@ -32,23 +33,4 @@ def strategy_connected_sequential(G, colors, traversal: str = "bfs") -> Generato @_dispatchable def strategy_saturation_largest_first(G, colors) -> Generator[Incomplete, None, Incomplete]: ... @_dispatchable -def greedy_color(G, strategy: str = "largest_first", interchange: bool = False): ... - -class _Node: - node_id: Incomplete - color: int - adj_list: Incomplete - adj_color: Incomplete - def __init__(self, node_id, n) -> None: ... - def assign_color(self, adj_entry, color) -> None: ... - def clear_color(self, adj_entry, color) -> None: ... - def iter_neighbors(self) -> Generator[Incomplete, None, None]: ... - def iter_neighbors_color(self, color) -> Generator[Incomplete, None, None]: ... - -class _AdjEntry: - node_id: Incomplete - next: Incomplete - mate: Incomplete - col_next: Incomplete - col_prev: Incomplete - def __init__(self, node_id) -> None: ... +def greedy_color(G: Graph[_Node], strategy="largest_first", interchange: bool = False): ... diff --git a/stubs/networkx/networkx/algorithms/communicability_alg.pyi b/stubs/networkx/networkx/algorithms/communicability_alg.pyi index 9eede08f8535..a36df0c93379 100644 --- a/stubs/networkx/networkx/algorithms/communicability_alg.pyi +++ b/stubs/networkx/networkx/algorithms/communicability_alg.pyi @@ -1,6 +1,7 @@ +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable -def communicability(G): ... +def communicability(G: Graph[_Node]): ... @_dispatchable -def communicability_exp(G): ... +def communicability_exp(G: Graph[_Node]): ... diff --git a/stubs/networkx/networkx/algorithms/community/asyn_fluid.pyi b/stubs/networkx/networkx/algorithms/community/asyn_fluid.pyi index d78fdc2db5e5..17a087cd6ce5 100644 --- a/stubs/networkx/networkx/algorithms/community/asyn_fluid.pyi +++ b/stubs/networkx/networkx/algorithms/community/asyn_fluid.pyi @@ -1,6 +1,6 @@ -from _typeshed import Incomplete - +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +from numpy.random import RandomState @_dispatchable -def asyn_fluidc(G, k, max_iter: int = 100, seed: Incomplete | None = None): ... +def asyn_fluidc(G: Graph[_Node], k: int, max_iter: int = 100, seed: int | RandomState | None = None): ... diff --git a/stubs/networkx/networkx/algorithms/community/centrality.pyi b/stubs/networkx/networkx/algorithms/community/centrality.pyi index b6b2712c1f62..8fbd47609966 100644 --- a/stubs/networkx/networkx/algorithms/community/centrality.pyi +++ b/stubs/networkx/networkx/algorithms/community/centrality.pyi @@ -1,7 +1,10 @@ from _typeshed import Incomplete -from collections.abc import Generator +from collections.abc import Callable, Generator +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable -def girvan_newman(G, most_valuable_edge: Incomplete | None = None) -> Generator[Incomplete, None, Incomplete]: ... +def girvan_newman( + G: Graph[_Node], most_valuable_edge: Callable[..., Incomplete] | None = None +) -> Generator[Incomplete, None, Incomplete]: ... diff --git a/stubs/networkx/networkx/algorithms/community/community_utils.pyi b/stubs/networkx/networkx/algorithms/community/community_utils.pyi index 882dbf8ab1bf..2528f102b3b6 100644 --- a/stubs/networkx/networkx/algorithms/community/community_utils.pyi +++ b/stubs/networkx/networkx/algorithms/community/community_utils.pyi @@ -1,4 +1,5 @@ +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable -def is_partition(G, communities): ... +def is_partition(G: Graph[_Node], communities): ... diff --git a/stubs/networkx/networkx/algorithms/community/divisive.pyi b/stubs/networkx/networkx/algorithms/community/divisive.pyi index 6eeed32d21ab..4ae890eab5f1 100644 --- a/stubs/networkx/networkx/algorithms/community/divisive.pyi +++ b/stubs/networkx/networkx/algorithms/community/divisive.pyi @@ -1,10 +1,13 @@ from _typeshed import Incomplete import networkx as nx +from networkx.classes.graph import Graph, _Node __all__ = ["edge_betweenness_partition", "edge_current_flow_betweenness_partition"] @nx._dispatchable -def edge_betweenness_partition(G, number_of_sets: int, *, weight: Incomplete | None = None) -> list[Incomplete]: ... +def edge_betweenness_partition(G: Graph[_Node], number_of_sets: int, *, weight: str | None = None) -> list[Incomplete]: ... @nx._dispatchable -def edge_current_flow_betweenness_partition(G, number_of_sets: int, *, weight: Incomplete | None = None) -> list[Incomplete]: ... +def edge_current_flow_betweenness_partition( + G: Graph[_Node], number_of_sets: int, *, weight: str | None = None +) -> list[Incomplete]: ... diff --git a/stubs/networkx/networkx/algorithms/community/kclique.pyi b/stubs/networkx/networkx/algorithms/community/kclique.pyi index 39bc56a86fad..13c777be295e 100644 --- a/stubs/networkx/networkx/algorithms/community/kclique.pyi +++ b/stubs/networkx/networkx/algorithms/community/kclique.pyi @@ -1,7 +1,8 @@ from _typeshed import Incomplete from collections.abc import Generator +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable -def k_clique_communities(G, k, cliques: Incomplete | None = None) -> Generator[Incomplete, None, None]: ... +def k_clique_communities(G: Graph[_Node], k: int, cliques=None) -> Generator[Incomplete, None, None]: ... diff --git a/stubs/networkx/networkx/algorithms/community/kernighan_lin.pyi b/stubs/networkx/networkx/algorithms/community/kernighan_lin.pyi index cd15c6541079..4bb3fce53e9a 100644 --- a/stubs/networkx/networkx/algorithms/community/kernighan_lin.pyi +++ b/stubs/networkx/networkx/algorithms/community/kernighan_lin.pyi @@ -1,8 +1,14 @@ from _typeshed import Incomplete +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +from numpy.random import RandomState @_dispatchable def kernighan_lin_bisection( - G, partition: Incomplete | None = None, max_iter: int = 10, weight: str = "weight", seed: Incomplete | None = None + G: Graph[_Node], + partition: tuple[Incomplete] | None = None, + max_iter: int = 10, + weight: str = "weight", + seed: int | RandomState | None = None, ): ... diff --git a/stubs/networkx/networkx/algorithms/community/label_propagation.pyi b/stubs/networkx/networkx/algorithms/community/label_propagation.pyi index c79a95fd2829..7679d01bc6d3 100644 --- a/stubs/networkx/networkx/algorithms/community/label_propagation.pyi +++ b/stubs/networkx/networkx/algorithms/community/label_propagation.pyi @@ -1,11 +1,13 @@ from _typeshed import Incomplete from collections.abc import Generator +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +from numpy.random import RandomState @_dispatchable def asyn_lpa_communities( - G, weight: Incomplete | None = None, seed: Incomplete | None = None + G: Graph[_Node], weight: str | None = None, seed: int | RandomState | None = None ) -> Generator[Incomplete, Incomplete, None]: ... @_dispatchable -def label_propagation_communities(G): ... +def label_propagation_communities(G: Graph[_Node]): ... diff --git a/stubs/networkx/networkx/algorithms/community/louvain.pyi b/stubs/networkx/networkx/algorithms/community/louvain.pyi index e086829b451e..be5194a9174a 100644 --- a/stubs/networkx/networkx/algorithms/community/louvain.pyi +++ b/stubs/networkx/networkx/algorithms/community/louvain.pyi @@ -1,13 +1,24 @@ from _typeshed import Incomplete from collections.abc import Generator +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +from numpy.random import RandomState @_dispatchable def louvain_communities( - G, weight: str = "weight", resolution: float = 1, threshold: float = 1e-07, seed: Incomplete | None = None + G: Graph[_Node], + weight: str | None = "weight", + resolution: float | None = 1, + threshold: float | None = 1e-07, + max_level: int | None = None, + seed: int | RandomState | None = None, ): ... @_dispatchable def louvain_partitions( - G, weight: str = "weight", resolution: float = 1, threshold: float = 1e-07, seed: Incomplete | None = None + G: Graph[_Node], + weight: str | None = "weight", + resolution: float | None = 1, + threshold: float | None = 1e-07, + seed: int | RandomState | None = None, ) -> Generator[Incomplete, None, None]: ... diff --git a/stubs/networkx/networkx/algorithms/community/lukes.pyi b/stubs/networkx/networkx/algorithms/community/lukes.pyi index 0044043faf7c..81a7a1335170 100644 --- a/stubs/networkx/networkx/algorithms/community/lukes.pyi +++ b/stubs/networkx/networkx/algorithms/community/lukes.pyi @@ -1,6 +1,5 @@ -from _typeshed import Incomplete - +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable -def lukes_partitioning(G, max_size, node_weight: Incomplete | None = None, edge_weight: Incomplete | None = None): ... +def lukes_partitioning(G: Graph[_Node], max_size: int, node_weight=None, edge_weight=None): ... diff --git a/stubs/networkx/networkx/algorithms/community/modularity_max.pyi b/stubs/networkx/networkx/algorithms/community/modularity_max.pyi index 9842451eaba9..b93dbf52033a 100644 --- a/stubs/networkx/networkx/algorithms/community/modularity_max.pyi +++ b/stubs/networkx/networkx/algorithms/community/modularity_max.pyi @@ -1,10 +1,9 @@ -from _typeshed import Incomplete - +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable def greedy_modularity_communities( - G, weight: Incomplete | None = None, resolution: float = 1, cutoff: int = 1, best_n: Incomplete | None = None + G: Graph[_Node], weight: str | None = None, resolution: float | None = 1, cutoff: int | None = 1, best_n: int | None = None ): ... @_dispatchable -def naive_greedy_modularity_communities(G, resolution: float = 1, weight: Incomplete | None = None): ... +def naive_greedy_modularity_communities(G: Graph[_Node], resolution: float = 1, weight: str | None = None): ... diff --git a/stubs/networkx/networkx/algorithms/community/quality.pyi b/stubs/networkx/networkx/algorithms/community/quality.pyi index 12f9e93f2f50..2ce826f6e83e 100644 --- a/stubs/networkx/networkx/algorithms/community/quality.pyi +++ b/stubs/networkx/networkx/algorithms/community/quality.pyi @@ -1,3 +1,4 @@ +from networkx.classes.graph import Graph, _Node from networkx.exception import NetworkXError from networkx.utils.backends import _dispatchable @@ -7,6 +8,6 @@ class NotAPartition(NetworkXError): def __init__(self, G, collection) -> None: ... @_dispatchable -def modularity(G, communities, weight: str = "weight", resolution: float = 1): ... +def modularity(G: Graph[_Node], communities, weight: str | None = "weight", resolution: float = 1): ... @_dispatchable -def partition_quality(G, partition): ... +def partition_quality(G: Graph[_Node], partition): ... diff --git a/stubs/networkx/networkx/algorithms/components/biconnected.pyi b/stubs/networkx/networkx/algorithms/components/biconnected.pyi index 1f58e5d23192..782f1c69014b 100644 --- a/stubs/networkx/networkx/algorithms/components/biconnected.pyi +++ b/stubs/networkx/networkx/algorithms/components/biconnected.pyi @@ -1,13 +1,14 @@ from _typeshed import Incomplete from collections.abc import Generator +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable -def is_biconnected(G): ... +def is_biconnected(G: Graph[_Node]): ... @_dispatchable -def biconnected_component_edges(G) -> Generator[Incomplete, Incomplete, None]: ... +def biconnected_component_edges(G: Graph[_Node]) -> Generator[Incomplete, Incomplete, None]: ... @_dispatchable -def biconnected_components(G) -> Generator[Incomplete, None, None]: ... +def biconnected_components(G: Graph[_Node]) -> Generator[Incomplete, None, None]: ... @_dispatchable -def articulation_points(G) -> Generator[Incomplete, None, None]: ... +def articulation_points(G: Graph[_Node]) -> Generator[Incomplete, None, None]: ... diff --git a/stubs/networkx/networkx/algorithms/components/connected.pyi b/stubs/networkx/networkx/algorithms/components/connected.pyi index 240517110407..67a380977343 100644 --- a/stubs/networkx/networkx/algorithms/components/connected.pyi +++ b/stubs/networkx/networkx/algorithms/components/connected.pyi @@ -1,13 +1,14 @@ from _typeshed import Incomplete from collections.abc import Generator +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable -def connected_components(G) -> Generator[Incomplete, None, None]: ... +def connected_components(G: Graph[_Node]) -> Generator[Incomplete, None, None]: ... @_dispatchable -def number_connected_components(G): ... +def number_connected_components(G: Graph[_Node]): ... @_dispatchable -def is_connected(G): ... +def is_connected(G: Graph[_Node]): ... @_dispatchable -def node_connected_component(G, n): ... +def node_connected_component(G: Graph[_Node], n: str): ... diff --git a/stubs/networkx/networkx/algorithms/components/semiconnected.pyi b/stubs/networkx/networkx/algorithms/components/semiconnected.pyi index 2a65d32221aa..178a602e4e47 100644 --- a/stubs/networkx/networkx/algorithms/components/semiconnected.pyi +++ b/stubs/networkx/networkx/algorithms/components/semiconnected.pyi @@ -1,6 +1,5 @@ -from _typeshed import Incomplete - +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable -def is_semiconnected(G, topo_order: Incomplete | None = None): ... +def is_semiconnected(G: Graph[_Node]): ... diff --git a/stubs/networkx/networkx/algorithms/components/strongly_connected.pyi b/stubs/networkx/networkx/algorithms/components/strongly_connected.pyi index 93d76806aed3..4747f4950e01 100644 --- a/stubs/networkx/networkx/algorithms/components/strongly_connected.pyi +++ b/stubs/networkx/networkx/algorithms/components/strongly_connected.pyi @@ -1,4 +1,4 @@ -from collections.abc import Generator, Hashable, Iterable +from collections.abc import Generator from networkx.classes.digraph import DiGraph from networkx.classes.graph import Graph, _Node @@ -7,10 +7,10 @@ from networkx.utils.backends import _dispatchable @_dispatchable def strongly_connected_components(G: Graph[_Node]) -> Generator[set[_Node], None, None]: ... @_dispatchable -def kosaraju_strongly_connected_components(G: Graph[_Node], source: _Node | None = None) -> Generator[set[_Node], None, None]: ... +def kosaraju_strongly_connected_components(G: Graph[_Node], source=None) -> Generator[set[_Node], None, None]: ... @_dispatchable -def number_strongly_connected_components(G: Graph[Hashable]) -> int: ... +def number_strongly_connected_components(G: Graph[_Node]) -> int: ... @_dispatchable -def is_strongly_connected(G: Graph[Hashable]) -> bool: ... +def is_strongly_connected(G: Graph[_Node]) -> bool: ... @_dispatchable -def condensation(G: DiGraph[_Node], scc: Iterable[Iterable[_Node]] | None = None) -> DiGraph[int]: ... +def condensation(G: DiGraph[_Node], scc=None) -> DiGraph[int]: ... diff --git a/stubs/networkx/networkx/algorithms/components/weakly_connected.pyi b/stubs/networkx/networkx/algorithms/components/weakly_connected.pyi index 66c4f7b61dfe..84cccf2f0250 100644 --- a/stubs/networkx/networkx/algorithms/components/weakly_connected.pyi +++ b/stubs/networkx/networkx/algorithms/components/weakly_connected.pyi @@ -1,4 +1,4 @@ -from collections.abc import Generator, Hashable +from collections.abc import Generator from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @@ -6,6 +6,6 @@ from networkx.utils.backends import _dispatchable @_dispatchable def weakly_connected_components(G: Graph[_Node]) -> Generator[set[_Node], None, None]: ... @_dispatchable -def number_weakly_connected_components(G: Graph[Hashable]) -> int: ... +def number_weakly_connected_components(G: Graph[_Node]) -> int: ... @_dispatchable -def is_weakly_connected(G: Graph[Hashable]) -> bool: ... +def is_weakly_connected(G: Graph[_Node]) -> bool: ... diff --git a/stubs/networkx/networkx/algorithms/connectivity/connectivity.pyi b/stubs/networkx/networkx/algorithms/connectivity/connectivity.pyi index a13069989845..8060b1e332c0 100644 --- a/stubs/networkx/networkx/algorithms/connectivity/connectivity.pyi +++ b/stubs/networkx/networkx/algorithms/connectivity/connectivity.pyi @@ -1,6 +1,9 @@ from _typeshed import Incomplete +from collections.abc import Callable, Iterable from networkx.algorithms.flow import edmonds_karp +from networkx.classes.digraph import DiGraph +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable __all__ = [ @@ -11,40 +14,43 @@ __all__ = [ "edge_connectivity", "all_pairs_node_connectivity", ] - default_flow_func = edmonds_karp @_dispatchable def local_node_connectivity( - G, - s, - t, - flow_func: Incomplete | None = None, - auxiliary: Incomplete | None = None, - residual: Incomplete | None = None, - cutoff: Incomplete | None = None, + G: Graph[_Node], + s: _Node, + t: _Node, + flow_func: Callable[..., Incomplete] | None = None, + auxiliary: DiGraph[_Node] | None = None, + residual: DiGraph[_Node] | None = None, + cutoff: float | None = None, ): ... @_dispatchable -def node_connectivity(G, s: Incomplete | None = None, t: Incomplete | None = None, flow_func: Incomplete | None = None): ... +def node_connectivity( + G: Graph[_Node], s: _Node | None = None, t: _Node | None = None, flow_func: Callable[..., Incomplete] | None = None +): ... @_dispatchable -def average_node_connectivity(G, flow_func: Incomplete | None = None): ... +def average_node_connectivity(G: Graph[_Node], flow_func: Callable[..., Incomplete] | None = None): ... @_dispatchable -def all_pairs_node_connectivity(G, nbunch: Incomplete | None = None, flow_func: Incomplete | None = None): ... +def all_pairs_node_connectivity( + G: Graph[_Node], nbunch: Iterable[Incomplete] | None = None, flow_func: Callable[..., Incomplete] | None = None +): ... @_dispatchable def local_edge_connectivity( - G, - s, - t, - flow_func: Incomplete | None = None, - auxiliary: Incomplete | None = None, - residual: Incomplete | None = None, - cutoff: Incomplete | None = None, + G: Graph[_Node], + s: _Node, + t: _Node, + flow_func: Callable[..., Incomplete] | None = None, + auxiliary: DiGraph[_Node] | None = None, + residual: DiGraph[_Node] | None = None, + cutoff: float | None = None, ): ... @_dispatchable def edge_connectivity( - G, - s: Incomplete | None = None, - t: Incomplete | None = None, - flow_func: Incomplete | None = None, - cutoff: Incomplete | None = None, + G: Graph[_Node], + s: _Node | None = None, + t: _Node | None = None, + flow_func: Callable[..., Incomplete] | None = None, + cutoff: float | None = None, ): ... diff --git a/stubs/networkx/networkx/algorithms/connectivity/cuts.pyi b/stubs/networkx/networkx/algorithms/connectivity/cuts.pyi index 44372c2d915c..d9c41c57195d 100644 --- a/stubs/networkx/networkx/algorithms/connectivity/cuts.pyi +++ b/stubs/networkx/networkx/algorithms/connectivity/cuts.pyi @@ -1,21 +1,37 @@ from _typeshed import Incomplete +from collections.abc import Callable from networkx.algorithms.flow import edmonds_karp +from networkx.classes.digraph import DiGraph +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable __all__ = ["minimum_st_node_cut", "minimum_node_cut", "minimum_st_edge_cut", "minimum_edge_cut"] - default_flow_func = edmonds_karp @_dispatchable def minimum_st_edge_cut( - G, s, t, flow_func: Incomplete | None = None, auxiliary: Incomplete | None = None, residual: Incomplete | None = None + G: Graph[_Node], + s: _Node, + t: _Node, + flow_func: Callable[..., Incomplete] | None = None, + auxiliary: DiGraph[_Node] | None = None, + residual: DiGraph[_Node] | None = None, ): ... @_dispatchable def minimum_st_node_cut( - G, s, t, flow_func: Incomplete | None = None, auxiliary: Incomplete | None = None, residual: Incomplete | None = None + G: Graph[_Node], + s: _Node, + t: _Node, + flow_func: Callable[..., Incomplete] | None = None, + auxiliary: DiGraph[_Node] | None = None, + residual: DiGraph[_Node] | None = None, ): ... @_dispatchable -def minimum_node_cut(G, s: Incomplete | None = None, t: Incomplete | None = None, flow_func: Incomplete | None = None): ... +def minimum_node_cut( + G: Graph[_Node], s: _Node | None = None, t: _Node | None = None, flow_func: Callable[..., Incomplete] | None = None +): ... @_dispatchable -def minimum_edge_cut(G, s: Incomplete | None = None, t: Incomplete | None = None, flow_func: Incomplete | None = None): ... +def minimum_edge_cut( + G: Graph[_Node], s: _Node | None = None, t: _Node | None = None, flow_func: Callable[..., Incomplete] | None = None +): ... diff --git a/stubs/networkx/networkx/algorithms/connectivity/disjoint_paths.pyi b/stubs/networkx/networkx/algorithms/connectivity/disjoint_paths.pyi index 2f8fc3abe242..254973fa6a68 100644 --- a/stubs/networkx/networkx/algorithms/connectivity/disjoint_paths.pyi +++ b/stubs/networkx/networkx/algorithms/connectivity/disjoint_paths.pyi @@ -1,30 +1,31 @@ from _typeshed import Incomplete -from collections.abc import Generator +from collections.abc import Callable, Generator from networkx.algorithms.flow import edmonds_karp +from networkx.classes.digraph import DiGraph +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable __all__ = ["edge_disjoint_paths", "node_disjoint_paths"] - default_flow_func = edmonds_karp @_dispatchable def edge_disjoint_paths( - G, - s, - t, - flow_func: Incomplete | None = None, - cutoff: Incomplete | None = None, - auxiliary: Incomplete | None = None, - residual: Incomplete | None = None, + G: Graph[_Node], + s: _Node, + t: _Node, + flow_func: Callable[..., Incomplete] | None = None, + cutoff: int | None = None, + auxiliary: DiGraph[_Node] | None = None, + residual: DiGraph[_Node] | None = None, ) -> Generator[Incomplete, None, None]: ... @_dispatchable def node_disjoint_paths( - G, - s, - t, - flow_func: Incomplete | None = None, - cutoff: Incomplete | None = None, - auxiliary: Incomplete | None = None, - residual: Incomplete | None = None, + G: Graph[_Node], + s: _Node, + t: _Node, + flow_func: Callable[..., Incomplete] | None = None, + cutoff: int | None = None, + auxiliary: DiGraph[_Node] | None = None, + residual: DiGraph[_Node] | None = None, ) -> Generator[Incomplete, None, None]: ... diff --git a/stubs/networkx/networkx/algorithms/connectivity/edge_augmentation.pyi b/stubs/networkx/networkx/algorithms/connectivity/edge_augmentation.pyi index fe32ff99d112..de9d9d64763b 100644 --- a/stubs/networkx/networkx/algorithms/connectivity/edge_augmentation.pyi +++ b/stubs/networkx/networkx/algorithms/connectivity/edge_augmentation.pyi @@ -1,17 +1,18 @@ -from collections.abc import Generator, Hashable +from _typeshed import SupportsGetItem +from collections.abc import Generator from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable -def is_k_edge_connected(G: Graph[Hashable], k: int): ... +def is_k_edge_connected(G: Graph[_Node], k: int): ... @_dispatchable -def is_locally_k_edge_connected(G, s, t, k): ... +def is_locally_k_edge_connected(G: Graph[_Node], s: _Node, t: _Node, k: int): ... @_dispatchable def k_edge_augmentation( G: Graph[_Node], k: int, - avail: tuple[_Node, _Node] | tuple[_Node, _Node, dict[str, int]] | None = None, + avail: set[tuple[int, int]] | set[tuple[int, int, float]] | SupportsGetItem[tuple[int, int], float] | None = None, weight: str | None = None, partial: bool = False, ) -> Generator[tuple[_Node, _Node], None, None]: ... diff --git a/stubs/networkx/networkx/algorithms/connectivity/edge_kcomponents.pyi b/stubs/networkx/networkx/algorithms/connectivity/edge_kcomponents.pyi index 2578606fb13f..67927d5cb64a 100644 --- a/stubs/networkx/networkx/algorithms/connectivity/edge_kcomponents.pyi +++ b/stubs/networkx/networkx/algorithms/connectivity/edge_kcomponents.pyi @@ -1,19 +1,21 @@ from _typeshed import Incomplete from collections.abc import Generator +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable -def k_edge_components(G, k): ... +def k_edge_components(G: Graph[_Node], k: int): ... @_dispatchable -def k_edge_subgraphs(G, k): ... +def k_edge_subgraphs(G: Graph[_Node], k: int): ... @_dispatchable -def bridge_components(G) -> Generator[Incomplete, Incomplete, None]: ... +def bridge_components(G: Graph[_Node]) -> Generator[Incomplete, Incomplete, None]: ... class EdgeComponentAuxGraph: A: Incomplete H: Incomplete + @classmethod def construct(cls, G): ... - def k_edge_components(self, k) -> Generator[Incomplete, Incomplete, None]: ... - def k_edge_subgraphs(self, k) -> Generator[Incomplete, Incomplete, None]: ... + def k_edge_components(self, k: int) -> Generator[Incomplete, Incomplete, None]: ... + def k_edge_subgraphs(self, k: int) -> Generator[Incomplete, Incomplete, None]: ... diff --git a/stubs/networkx/networkx/algorithms/connectivity/kcomponents.pyi b/stubs/networkx/networkx/algorithms/connectivity/kcomponents.pyi index ee074a6a682d..32010e9ed331 100644 --- a/stubs/networkx/networkx/algorithms/connectivity/kcomponents.pyi +++ b/stubs/networkx/networkx/algorithms/connectivity/kcomponents.pyi @@ -1,11 +1,12 @@ from _typeshed import Incomplete +from collections.abc import Callable from networkx.algorithms.flow import edmonds_karp +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable __all__ = ["k_components"] - default_flow_func = edmonds_karp @_dispatchable -def k_components(G, flow_func: Incomplete | None = None): ... +def k_components(G: Graph[_Node], flow_func: Callable[..., Incomplete] | None = None): ... diff --git a/stubs/networkx/networkx/algorithms/connectivity/kcutsets.pyi b/stubs/networkx/networkx/algorithms/connectivity/kcutsets.pyi index 8f527058659c..78b0cb1befbf 100644 --- a/stubs/networkx/networkx/algorithms/connectivity/kcutsets.pyi +++ b/stubs/networkx/networkx/algorithms/connectivity/kcutsets.pyi @@ -1,12 +1,14 @@ from _typeshed import Incomplete -from collections.abc import Generator +from collections.abc import Callable, Generator from networkx.algorithms.flow import edmonds_karp +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable __all__ = ["all_node_cuts"] - default_flow_func = edmonds_karp @_dispatchable -def all_node_cuts(G, k: Incomplete | None = None, flow_func: Incomplete | None = None) -> Generator[Incomplete, None, None]: ... +def all_node_cuts( + G: Graph[_Node], k: int | None = None, flow_func: Callable[..., Incomplete] | None = None +) -> Generator[Incomplete, None, None]: ... diff --git a/stubs/networkx/networkx/algorithms/connectivity/stoerwagner.pyi b/stubs/networkx/networkx/algorithms/connectivity/stoerwagner.pyi index 1bd01705dec3..c0b8cd3a733c 100644 --- a/stubs/networkx/networkx/algorithms/connectivity/stoerwagner.pyi +++ b/stubs/networkx/networkx/algorithms/connectivity/stoerwagner.pyi @@ -1,4 +1,5 @@ +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable -def stoer_wagner(G, weight: str = "weight", heap=...): ... +def stoer_wagner(G: Graph[_Node], weight: str = "weight", heap: type = ...): ... diff --git a/stubs/networkx/networkx/algorithms/core.pyi b/stubs/networkx/networkx/algorithms/core.pyi index 44bde570425a..401797dc8be1 100644 --- a/stubs/networkx/networkx/algorithms/core.pyi +++ b/stubs/networkx/networkx/algorithms/core.pyi @@ -1,18 +1,19 @@ -from _typeshed import Incomplete +from _typeshed import Incomplete, SupportsGetItem +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable -def core_number(G): ... +def core_number(G: Graph[_Node]): ... @_dispatchable -def k_core(G, k: Incomplete | None = None, core_number: Incomplete | None = None): ... +def k_core(G: Graph[_Node], k: int | None = None, core_number: SupportsGetItem[Incomplete, Incomplete] | None = None): ... @_dispatchable -def k_shell(G, k: Incomplete | None = None, core_number: Incomplete | None = None): ... +def k_shell(G: Graph[_Node], k: int | None = None, core_number: SupportsGetItem[Incomplete, Incomplete] | None = None): ... @_dispatchable -def k_crust(G, k: Incomplete | None = None, core_number: Incomplete | None = None): ... +def k_crust(G: Graph[_Node], k: int | None = None, core_number: SupportsGetItem[Incomplete, Incomplete] | None = None): ... @_dispatchable -def k_corona(G, k, core_number: Incomplete | None = None): ... +def k_corona(G: Graph[_Node], k: int, core_number: SupportsGetItem[Incomplete, Incomplete] | None = None): ... @_dispatchable -def k_truss(G, k): ... +def k_truss(G: Graph[_Node], k: int): ... @_dispatchable -def onion_layers(G): ... +def onion_layers(G: Graph[_Node]): ... diff --git a/stubs/networkx/networkx/algorithms/covering.pyi b/stubs/networkx/networkx/algorithms/covering.pyi index 8b50c52a1b37..0680bbc27e41 100644 --- a/stubs/networkx/networkx/algorithms/covering.pyi +++ b/stubs/networkx/networkx/algorithms/covering.pyi @@ -1,8 +1,10 @@ from _typeshed import Incomplete +from collections.abc import Callable +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable -def min_edge_cover(G, matching_algorithm: Incomplete | None = None): ... +def min_edge_cover(G: Graph[_Node], matching_algorithm: Callable[..., Incomplete] | None = None): ... @_dispatchable -def is_edge_cover(G, cover): ... +def is_edge_cover(G: Graph[_Node], cover: set[Incomplete]): ... diff --git a/stubs/networkx/networkx/algorithms/cuts.pyi b/stubs/networkx/networkx/algorithms/cuts.pyi index 11ef7637a259..4f3e3474b5b9 100644 --- a/stubs/networkx/networkx/algorithms/cuts.pyi +++ b/stubs/networkx/networkx/algorithms/cuts.pyi @@ -1,20 +1,21 @@ -from _typeshed import Incomplete +from collections.abc import Iterable +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable -def cut_size(G, S, T: Incomplete | None = None, weight: Incomplete | None = None): ... +def cut_size(G: Graph[_Node], S: Iterable[_Node], T: Iterable[_Node] | None = None, weight: str | None = None): ... @_dispatchable -def volume(G, S, weight: Incomplete | None = None): ... +def volume(G: Graph[_Node], S: Iterable[_Node], weight: str | None = None): ... @_dispatchable -def normalized_cut_size(G, S, T: Incomplete | None = None, weight: Incomplete | None = None): ... +def normalized_cut_size(G: Graph[_Node], S: Iterable[_Node], T: Iterable[_Node] | None = None, weight: str | None = None): ... @_dispatchable -def conductance(G, S, T: Incomplete | None = None, weight: Incomplete | None = None): ... +def conductance(G: Graph[_Node], S: Iterable[_Node], T: Iterable[_Node] | None = None, weight: str | None = None): ... @_dispatchable -def edge_expansion(G, S, T: Incomplete | None = None, weight: Incomplete | None = None): ... +def edge_expansion(G: Graph[_Node], S: Iterable[_Node], T: Iterable[_Node] | None = None, weight: str | None = None): ... @_dispatchable -def mixing_expansion(G, S, T: Incomplete | None = None, weight: Incomplete | None = None): ... +def mixing_expansion(G: Graph[_Node], S: Iterable[_Node], T: Iterable[_Node] | None = None, weight: str | None = None): ... @_dispatchable -def node_expansion(G, S): ... +def node_expansion(G: Graph[_Node], S: Iterable[_Node]): ... @_dispatchable -def boundary_expansion(G, S): ... +def boundary_expansion(G: Graph[_Node], S: Iterable[_Node]): ... diff --git a/stubs/networkx/networkx/algorithms/cycles.pyi b/stubs/networkx/networkx/algorithms/cycles.pyi index 8236680f3f05..9f8b887516e7 100644 --- a/stubs/networkx/networkx/algorithms/cycles.pyi +++ b/stubs/networkx/networkx/algorithms/cycles.pyi @@ -1,23 +1,26 @@ from _typeshed import Incomplete from collections.abc import Generator +from networkx.classes.digraph import DiGraph +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable -def cycle_basis(G, root: Incomplete | None = None): ... +def cycle_basis(G: Graph[_Node], root: _Node | None = None): ... @_dispatchable -def simple_cycles(G, length_bound: Incomplete | None = None) -> Generator[Incomplete, Incomplete, None]: ... +def simple_cycles(G: Graph[_Node], length_bound: int | None = None) -> Generator[Incomplete, Incomplete, None]: ... class _NeighborhoodCache(dict[Incomplete, Incomplete]): G: Incomplete + def __init__(self, G) -> None: ... def __missing__(self, v): ... @_dispatchable -def chordless_cycles(G, length_bound: Incomplete | None = None) -> Generator[Incomplete, Incomplete, None]: ... +def chordless_cycles(G: DiGraph[_Node], length_bound: int | None = None) -> Generator[Incomplete, Incomplete, None]: ... @_dispatchable -def recursive_simple_cycles(G): ... +def recursive_simple_cycles(G: DiGraph[_Node]): ... @_dispatchable -def find_cycle(G, source: Incomplete | None = None, orientation: Incomplete | None = None): ... +def find_cycle(G: Graph[_Node], source=None, orientation=None): ... @_dispatchable -def minimum_cycle_basis(G, weight: Incomplete | None = None): ... +def minimum_cycle_basis(G: Graph[_Node], weight: str | None = None): ... diff --git a/stubs/networkx/networkx/algorithms/d_separation.pyi b/stubs/networkx/networkx/algorithms/d_separation.pyi index c672127feb42..e1eb575409fd 100644 --- a/stubs/networkx/networkx/algorithms/d_separation.pyi +++ b/stubs/networkx/networkx/algorithms/d_separation.pyi @@ -1,3 +1,5 @@ +from networkx.classes.digraph import DiGraph +from networkx.classes.graph import _Node from networkx.utils.backends import _dispatchable @_dispatchable @@ -5,4 +7,4 @@ def d_separated(G, x, y, z): ... @_dispatchable def minimal_d_separator(G, u, v): ... @_dispatchable -def is_minimal_d_separator(G, u, v, z): ... +def is_minimal_d_separator(G: DiGraph[_Node], x, y, z, *, included=None, restricted=None): ... diff --git a/stubs/networkx/networkx/algorithms/dag.pyi b/stubs/networkx/networkx/algorithms/dag.pyi index b85fff5cb09b..07c3b3deff43 100644 --- a/stubs/networkx/networkx/algorithms/dag.pyi +++ b/stubs/networkx/networkx/algorithms/dag.pyi @@ -1,14 +1,14 @@ -from _typeshed import SupportsRichComparison -from collections.abc import Callable, Generator, Iterable, Reversible +from _typeshed import Incomplete +from collections.abc import Callable, Generator, Iterable from networkx.classes.digraph import DiGraph from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable -def descendants(G: Graph[_Node], source: _Node) -> set[_Node]: ... +def descendants(G: Graph[_Node], source) -> set[_Node]: ... @_dispatchable -def ancestors(G: Graph[_Node], source: _Node) -> set[_Node]: ... +def ancestors(G: Graph[_Node], source) -> set[_Node]: ... @_dispatchable def is_directed_acyclic_graph(G: Graph[_Node]) -> bool: ... @_dispatchable @@ -17,25 +17,28 @@ def topological_generations(G: DiGraph[_Node]) -> Generator[list[_Node], None, N def topological_sort(G: DiGraph[_Node]) -> Generator[_Node, None, None]: ... @_dispatchable def lexicographical_topological_sort( - G: DiGraph[_Node], key: Callable[[_Node], SupportsRichComparison] | None = None + G: DiGraph[_Node], key: Callable[..., Incomplete] | None = None ) -> Generator[_Node, None, None]: ... @_dispatchable def all_topological_sorts(G: DiGraph[_Node]) -> Generator[list[_Node], None, None]: ... @_dispatchable def is_aperiodic(G: DiGraph[_Node]) -> bool: ... @_dispatchable -def transitive_closure(G: Graph[_Node], reflexive: bool = False) -> Graph[_Node]: ... +def transitive_closure(G: Graph[_Node], reflexive=False) -> Graph[_Node]: ... @_dispatchable -def transitive_closure_dag(G: DiGraph[_Node], reflexive: bool = False) -> DiGraph[_Node]: ... +def transitive_closure_dag(G: DiGraph[_Node], topo_order: Iterable[Incomplete] | None = None) -> DiGraph[_Node]: ... @_dispatchable def transitive_reduction(G: DiGraph[_Node]) -> DiGraph[_Node]: ... @_dispatchable -def antichains(G: DiGraph[_Node], topo_order: Reversible[_Node] | None = None) -> Generator[list[_Node], None, None]: ... +def antichains(G: DiGraph[_Node], topo_order: Iterable[Incomplete] | None = None) -> Generator[list[_Node], None, None]: ... @_dispatchable def dag_longest_path( - G: DiGraph[_Node], weight: str = "weight", default_weight: int = 1, topo_order: Iterable[_Node] | None = None + G: DiGraph[_Node], + weight: str | None = "weight", + default_weight: int | None = 1, + topo_order: Iterable[Incomplete] | None = None, ) -> list[_Node]: ... @_dispatchable -def dag_longest_path_length(G: DiGraph[_Node], weight: str = "weight", default_weight: int = 1) -> int: ... +def dag_longest_path_length(G: DiGraph[_Node], weight: str | None = "weight", default_weight: int | None = 1) -> int: ... @_dispatchable def dag_to_branching(G: Graph[_Node]) -> Graph[_Node]: ... diff --git a/stubs/networkx/networkx/algorithms/distance_measures.pyi b/stubs/networkx/networkx/algorithms/distance_measures.pyi index 35e356e71cb7..1f768011a2e9 100644 --- a/stubs/networkx/networkx/algorithms/distance_measures.pyi +++ b/stubs/networkx/networkx/algorithms/distance_measures.pyi @@ -1,18 +1,17 @@ -from _typeshed import Incomplete - +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable -def eccentricity(G, v: Incomplete | None = None, sp: Incomplete | None = None, weight: Incomplete | None = None): ... +def eccentricity(G: Graph[_Node], v: _Node | None = None, sp=None, weight: str | None = None): ... @_dispatchable -def diameter(G, e: Incomplete | None = None, usebounds: bool = False, weight: Incomplete | None = None): ... +def diameter(G: Graph[_Node], e=None, usebounds=False, weight: str | None = None): ... @_dispatchable -def periphery(G, e: Incomplete | None = None, usebounds: bool = False, weight: Incomplete | None = None): ... +def periphery(G: Graph[_Node], e=None, usebounds=False, weight: str | None = None): ... @_dispatchable -def radius(G, e: Incomplete | None = None, usebounds: bool = False, weight: Incomplete | None = None): ... +def radius(G: Graph[_Node], e=None, usebounds=False, weight: str | None = None): ... @_dispatchable -def center(G, e: Incomplete | None = None, usebounds: bool = False, weight: Incomplete | None = None): ... +def center(G: Graph[_Node], e=None, usebounds=False, weight: str | None = None): ... @_dispatchable -def barycenter(G, weight: Incomplete | None = None, attr: Incomplete | None = None, sp: Incomplete | None = None): ... +def barycenter(G, weight: str | None = None, attr=None, sp=None): ... @_dispatchable -def resistance_distance(G, nodeA, nodeB, weight: Incomplete | None = None, invert_weight: bool = True): ... +def resistance_distance(G: Graph[_Node], nodeA=None, nodeB=None, weight: str | None = None, invert_weight: bool = True): ... diff --git a/stubs/networkx/networkx/algorithms/distance_regular.pyi b/stubs/networkx/networkx/algorithms/distance_regular.pyi index e477deb4a0cb..182d37110f07 100644 --- a/stubs/networkx/networkx/algorithms/distance_regular.pyi +++ b/stubs/networkx/networkx/algorithms/distance_regular.pyi @@ -1,10 +1,11 @@ +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable -def is_distance_regular(G): ... +def is_distance_regular(G: Graph[_Node]): ... @_dispatchable def global_parameters(b, c): ... @_dispatchable -def intersection_array(G): ... +def intersection_array(G: Graph[_Node]): ... @_dispatchable -def is_strongly_regular(G): ... +def is_strongly_regular(G: Graph[_Node]): ... diff --git a/stubs/networkx/networkx/algorithms/dominance.pyi b/stubs/networkx/networkx/algorithms/dominance.pyi index a87b0172e7da..cc431c676e73 100644 --- a/stubs/networkx/networkx/algorithms/dominance.pyi +++ b/stubs/networkx/networkx/algorithms/dominance.pyi @@ -1,6 +1,7 @@ +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable -def immediate_dominators(G, start): ... +def immediate_dominators(G: Graph[_Node], start: _Node): ... @_dispatchable -def dominance_frontiers(G, start): ... +def dominance_frontiers(G: Graph[_Node], start: _Node): ... diff --git a/stubs/networkx/networkx/algorithms/dominating.pyi b/stubs/networkx/networkx/algorithms/dominating.pyi index 0f9535464f32..3786bb538077 100644 --- a/stubs/networkx/networkx/algorithms/dominating.pyi +++ b/stubs/networkx/networkx/algorithms/dominating.pyi @@ -1,8 +1,10 @@ from _typeshed import Incomplete +from collections.abc import Iterable +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable -def dominating_set(G, start_with: Incomplete | None = None): ... +def dominating_set(G: Graph[_Node], start_with: _Node | None = None): ... @_dispatchable -def is_dominating_set(G, nbunch): ... +def is_dominating_set(G: Graph[_Node], nbunch: Iterable[Incomplete]): ... diff --git a/stubs/networkx/networkx/algorithms/efficiency_measures.pyi b/stubs/networkx/networkx/algorithms/efficiency_measures.pyi index a0083616a943..420795a59a58 100644 --- a/stubs/networkx/networkx/algorithms/efficiency_measures.pyi +++ b/stubs/networkx/networkx/algorithms/efficiency_measures.pyi @@ -1,7 +1,8 @@ +from networkx.classes.graph import _Node from networkx.utils.backends import _dispatchable @_dispatchable -def efficiency(G, u, v): ... +def efficiency(G, u: _Node, v: _Node): ... @_dispatchable def global_efficiency(G): ... @_dispatchable diff --git a/stubs/networkx/networkx/algorithms/euler.pyi b/stubs/networkx/networkx/algorithms/euler.pyi index b88b278cc30e..0e8552402c8d 100644 --- a/stubs/networkx/networkx/algorithms/euler.pyi +++ b/stubs/networkx/networkx/algorithms/euler.pyi @@ -1,17 +1,20 @@ from _typeshed import Incomplete from collections.abc import Generator +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable -def is_eulerian(G): ... +def is_eulerian(G: Graph[_Node]): ... @_dispatchable def is_semieulerian(G): ... @_dispatchable -def eulerian_circuit(G, source: Incomplete | None = None, keys: bool = False) -> Generator[Incomplete, Incomplete, None]: ... +def eulerian_circuit( + G: Graph[_Node], source: _Node | None = None, keys: bool = False +) -> Generator[Incomplete, Incomplete, None]: ... @_dispatchable -def has_eulerian_path(G, source: Incomplete | None = None): ... +def has_eulerian_path(G: Graph[_Node], source: _Node | None = None): ... @_dispatchable -def eulerian_path(G, source: Incomplete | None = None, keys: bool = False) -> Generator[Incomplete, Incomplete, None]: ... +def eulerian_path(G: Graph[_Node], source=None, keys: bool = False) -> Generator[Incomplete, Incomplete, None]: ... @_dispatchable -def eulerize(G): ... +def eulerize(G: Graph[_Node]): ... diff --git a/stubs/networkx/networkx/algorithms/flow/boykovkolmogorov.pyi b/stubs/networkx/networkx/algorithms/flow/boykovkolmogorov.pyi index f5a6c21b3f23..1389aac0009d 100644 --- a/stubs/networkx/networkx/algorithms/flow/boykovkolmogorov.pyi +++ b/stubs/networkx/networkx/algorithms/flow/boykovkolmogorov.pyi @@ -1,14 +1,13 @@ -from _typeshed import Incomplete - +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable def boykov_kolmogorov( - G, - s, - t, + G: Graph[_Node], + s: _Node, + t: _Node, capacity: str = "capacity", - residual: Incomplete | None = None, + residual: Graph[_Node] | None = None, value_only: bool = False, - cutoff: Incomplete | None = None, + cutoff: float | None = None, ): ... diff --git a/stubs/networkx/networkx/algorithms/flow/capacityscaling.pyi b/stubs/networkx/networkx/algorithms/flow/capacityscaling.pyi index 36918442e80e..8f5b3bef312a 100644 --- a/stubs/networkx/networkx/algorithms/flow/capacityscaling.pyi +++ b/stubs/networkx/networkx/algorithms/flow/capacityscaling.pyi @@ -1,4 +1,7 @@ +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable -def capacity_scaling(G, demand: str = "demand", capacity: str = "capacity", weight: str = "weight", heap=...): ... +def capacity_scaling( + G: Graph[_Node], demand: str = "demand", capacity: str = "capacity", weight: str = "weight", heap: type = ... +): ... diff --git a/stubs/networkx/networkx/algorithms/flow/dinitz_alg.pyi b/stubs/networkx/networkx/algorithms/flow/dinitz_alg.pyi index 2a37c2973077..bd4c8bb1bdd6 100644 --- a/stubs/networkx/networkx/algorithms/flow/dinitz_alg.pyi +++ b/stubs/networkx/networkx/algorithms/flow/dinitz_alg.pyi @@ -1,14 +1,13 @@ -from _typeshed import Incomplete - +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable def dinitz( - G, - s, - t, + G: Graph[_Node], + s: _Node, + t: _Node, capacity: str = "capacity", - residual: Incomplete | None = None, + residual: Graph[_Node] | None = None, value_only: bool = False, - cutoff: Incomplete | None = None, + cutoff: float | None = None, ): ... diff --git a/stubs/networkx/networkx/algorithms/flow/edmondskarp.pyi b/stubs/networkx/networkx/algorithms/flow/edmondskarp.pyi index 0989f90f39d3..95fb8d5a3809 100644 --- a/stubs/networkx/networkx/algorithms/flow/edmondskarp.pyi +++ b/stubs/networkx/networkx/algorithms/flow/edmondskarp.pyi @@ -1,14 +1,13 @@ -from _typeshed import Incomplete - +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable def edmonds_karp( - G, - s, - t, + G: Graph[_Node], + s: _Node, + t: _Node, capacity: str = "capacity", - residual: Incomplete | None = None, + residual: Graph[_Node] | None = None, value_only: bool = False, - cutoff: Incomplete | None = None, + cutoff: float | None = None, ): ... diff --git a/stubs/networkx/networkx/algorithms/flow/gomory_hu.pyi b/stubs/networkx/networkx/algorithms/flow/gomory_hu.pyi index 5c7f878564d1..a1680ad0b727 100644 --- a/stubs/networkx/networkx/algorithms/flow/gomory_hu.pyi +++ b/stubs/networkx/networkx/algorithms/flow/gomory_hu.pyi @@ -1,12 +1,13 @@ from _typeshed import Incomplete +from collections.abc import Callable +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable from .edmondskarp import edmonds_karp __all__ = ["gomory_hu_tree"] - default_flow_func = edmonds_karp @_dispatchable -def gomory_hu_tree(G, capacity: str = "capacity", flow_func: Incomplete | None = None): ... +def gomory_hu_tree(G: Graph[_Node], capacity: str = "capacity", flow_func: Callable[..., Incomplete] | None = None): ... diff --git a/stubs/networkx/networkx/algorithms/flow/maxflow.pyi b/stubs/networkx/networkx/algorithms/flow/maxflow.pyi index f64d48aa8cdb..a94415cfb00d 100644 --- a/stubs/networkx/networkx/algorithms/flow/maxflow.pyi +++ b/stubs/networkx/networkx/algorithms/flow/maxflow.pyi @@ -1,18 +1,47 @@ from _typeshed import Incomplete +from collections.abc import Callable +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable from .preflowpush import preflow_push __all__ = ["maximum_flow", "maximum_flow_value", "minimum_cut", "minimum_cut_value"] - default_flow_func = preflow_push @_dispatchable -def maximum_flow(flowG, _s, _t, capacity: str = "capacity", flow_func: Incomplete | None = None, **kwargs): ... +def maximum_flow( + flowG: Graph[_Node], + _s: _Node, + _t: _Node, + capacity: str = "capacity", + flow_func: Callable[..., Incomplete] | None = None, + **kwargs, +): ... @_dispatchable -def maximum_flow_value(flowG, _s, _t, capacity: str = "capacity", flow_func: Incomplete | None = None, **kwargs): ... +def maximum_flow_value( + flowG: Graph[_Node], + _s: _Node, + _t: _Node, + capacity: str = "capacity", + flow_func: Callable[..., Incomplete] | None = None, + **kwargs, +): ... @_dispatchable -def minimum_cut(flowG, _s, _t, capacity: str = "capacity", flow_func: Incomplete | None = None, **kwargs): ... +def minimum_cut( + flowG: Graph[_Node], + _s: _Node, + _t: _Node, + capacity: str = "capacity", + flow_func: Callable[..., Incomplete] | None = None, + **kwargs, +): ... @_dispatchable -def minimum_cut_value(flowG, _s, _t, capacity: str = "capacity", flow_func: Incomplete | None = None, **kwargs): ... +def minimum_cut_value( + flowG: Graph[_Node], + _s: _Node, + _t: _Node, + capacity: str = "capacity", + flow_func: Callable[..., Incomplete] | None = None, + **kwargs, +): ... diff --git a/stubs/networkx/networkx/algorithms/flow/mincost.pyi b/stubs/networkx/networkx/algorithms/flow/mincost.pyi index 41f70569369b..86e0834f567e 100644 --- a/stubs/networkx/networkx/algorithms/flow/mincost.pyi +++ b/stubs/networkx/networkx/algorithms/flow/mincost.pyi @@ -1,10 +1,13 @@ +from _typeshed import Incomplete, SupportsGetItem + +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable -def min_cost_flow_cost(G, demand: str = "demand", capacity: str = "capacity", weight: str = "weight"): ... +def min_cost_flow_cost(G: Graph[_Node], demand: str = "demand", capacity: str = "capacity", weight: str = "weight"): ... @_dispatchable -def min_cost_flow(G, demand: str = "demand", capacity: str = "capacity", weight: str = "weight"): ... +def min_cost_flow(G: Graph[_Node], demand: str = "demand", capacity: str = "capacity", weight: str = "weight"): ... @_dispatchable -def cost_of_flow(G, flowDict, weight: str = "weight"): ... +def cost_of_flow(G: Graph[_Node], flowDict: SupportsGetItem[Incomplete, Incomplete], weight: str = "weight"): ... @_dispatchable -def max_flow_min_cost(G, s, t, capacity: str = "capacity", weight: str = "weight"): ... +def max_flow_min_cost(G: Graph[_Node], s: str, t: str, capacity: str = "capacity", weight: str = "weight"): ... diff --git a/stubs/networkx/networkx/algorithms/flow/networksimplex.pyi b/stubs/networkx/networkx/algorithms/flow/networksimplex.pyi index dcbc88ac35c6..b9cf6bae5b18 100644 --- a/stubs/networkx/networkx/algorithms/flow/networksimplex.pyi +++ b/stubs/networkx/networkx/algorithms/flow/networksimplex.pyi @@ -1,6 +1,7 @@ from _typeshed import Incomplete from collections.abc import Generator +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable class _DataEssentialsAndFunctions: @@ -22,6 +23,7 @@ class _DataEssentialsAndFunctions: next_node_dft: Incomplete prev_node_dft: Incomplete last_descendent_dft: Incomplete + def __init__(self, G, multigraph, demand: str = "demand", capacity: str = "capacity", weight: str = "weight") -> None: ... def initialize_spanning_tree(self, n, faux_inf) -> None: ... def find_apex(self, p, q): ... @@ -39,4 +41,4 @@ class _DataEssentialsAndFunctions: def find_leaving_edge(self, Wn, We): ... @_dispatchable -def network_simplex(G, demand: str = "demand", capacity: str = "capacity", weight: str = "weight"): ... +def network_simplex(G: Graph[_Node], demand: str = "demand", capacity: str = "capacity", weight: str = "weight"): ... diff --git a/stubs/networkx/networkx/algorithms/flow/preflowpush.pyi b/stubs/networkx/networkx/algorithms/flow/preflowpush.pyi index 51547a1c03a1..cd25f1cb00bd 100644 --- a/stubs/networkx/networkx/algorithms/flow/preflowpush.pyi +++ b/stubs/networkx/networkx/algorithms/flow/preflowpush.pyi @@ -1,14 +1,13 @@ -from _typeshed import Incomplete - +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable def preflow_push( - G, - s, - t, + G: Graph[_Node], + s: _Node, + t: _Node, capacity: str = "capacity", - residual: Incomplete | None = None, + residual: Graph[_Node] | None = None, global_relabel_freq: float = 1, value_only: bool = False, ): ... diff --git a/stubs/networkx/networkx/algorithms/flow/shortestaugmentingpath.pyi b/stubs/networkx/networkx/algorithms/flow/shortestaugmentingpath.pyi index 93fdf09f637c..43b5ae4af72c 100644 --- a/stubs/networkx/networkx/algorithms/flow/shortestaugmentingpath.pyi +++ b/stubs/networkx/networkx/algorithms/flow/shortestaugmentingpath.pyi @@ -1,15 +1,14 @@ -from _typeshed import Incomplete - +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable def shortest_augmenting_path( - G, - s, - t, + G: Graph[_Node], + s: _Node, + t: _Node, capacity: str = "capacity", - residual: Incomplete | None = None, + residual: Graph[_Node] | None = None, value_only: bool = False, two_phase: bool = False, - cutoff: Incomplete | None = None, + cutoff: float | None = None, ): ... diff --git a/stubs/networkx/networkx/algorithms/flow/utils.pyi b/stubs/networkx/networkx/algorithms/flow/utils.pyi index 04c99b4ed081..3b041a5fc2b1 100644 --- a/stubs/networkx/networkx/algorithms/flow/utils.pyi +++ b/stubs/networkx/networkx/algorithms/flow/utils.pyi @@ -10,6 +10,7 @@ class CurrentEdge: class Level: active: Incomplete inactive: Incomplete + def __init__(self) -> None: ... class GlobalRelabelThreshold: diff --git a/stubs/networkx/networkx/algorithms/graph_hashing.pyi b/stubs/networkx/networkx/algorithms/graph_hashing.pyi index 218bf2f6a0d5..98b93f08f7b1 100644 --- a/stubs/networkx/networkx/algorithms/graph_hashing.pyi +++ b/stubs/networkx/networkx/algorithms/graph_hashing.pyi @@ -1,12 +1,20 @@ -from _typeshed import Incomplete - +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable def weisfeiler_lehman_graph_hash( - G, edge_attr: Incomplete | None = None, node_attr: Incomplete | None = None, iterations: int = 3, digest_size: int = 16 + G: Graph[_Node], + edge_attr: str | None = None, + node_attr: str | None = None, + iterations: int | None = 3, + digest_size: int | None = 16, ): ... @_dispatchable def weisfeiler_lehman_subgraph_hashes( - G, edge_attr: Incomplete | None = None, node_attr: Incomplete | None = None, iterations: int = 3, digest_size: int = 16 + G: Graph[_Node], + edge_attr: str | None = None, + node_attr: str | None = None, + iterations: int | None = 3, + digest_size: int | None = 16, + include_initial_labels: bool | None = False, ): ... diff --git a/stubs/networkx/networkx/algorithms/graphical.pyi b/stubs/networkx/networkx/algorithms/graphical.pyi index 5f886377b9b7..87c3a9068dfd 100644 --- a/stubs/networkx/networkx/algorithms/graphical.pyi +++ b/stubs/networkx/networkx/algorithms/graphical.pyi @@ -1,14 +1,17 @@ +from _typeshed import Incomplete +from collections.abc import Iterable + from networkx.utils.backends import _dispatchable @_dispatchable -def is_graphical(sequence, method: str = "eg"): ... +def is_graphical(sequence: Iterable[Incomplete], method="eg"): ... @_dispatchable -def is_valid_degree_sequence_havel_hakimi(deg_sequence): ... +def is_valid_degree_sequence_havel_hakimi(deg_sequence: Iterable[Incomplete]): ... @_dispatchable -def is_valid_degree_sequence_erdos_gallai(deg_sequence): ... +def is_valid_degree_sequence_erdos_gallai(deg_sequence: Iterable[Incomplete]): ... @_dispatchable -def is_multigraphical(sequence): ... +def is_multigraphical(sequence: Iterable[Incomplete]): ... @_dispatchable -def is_pseudographical(sequence): ... +def is_pseudographical(sequence: Iterable[Incomplete]): ... @_dispatchable -def is_digraphical(in_sequence, out_sequence): ... +def is_digraphical(in_sequence: Iterable[Incomplete], out_sequence: Iterable[Incomplete]): ... diff --git a/stubs/networkx/networkx/algorithms/hierarchy.pyi b/stubs/networkx/networkx/algorithms/hierarchy.pyi index 09f4dc139b70..e747ffc5109e 100644 --- a/stubs/networkx/networkx/algorithms/hierarchy.pyi +++ b/stubs/networkx/networkx/algorithms/hierarchy.pyi @@ -1,6 +1,4 @@ -from _typeshed import Incomplete - from networkx.utils.backends import _dispatchable @_dispatchable -def flow_hierarchy(G, weight: Incomplete | None = None): ... +def flow_hierarchy(G, weight: str | None = None): ... diff --git a/stubs/networkx/networkx/algorithms/hybrid.pyi b/stubs/networkx/networkx/algorithms/hybrid.pyi index 3bbc857d1feb..171162022da3 100644 --- a/stubs/networkx/networkx/algorithms/hybrid.pyi +++ b/stubs/networkx/networkx/algorithms/hybrid.pyi @@ -1,6 +1,7 @@ +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable -def kl_connected_subgraph(G, k, l, low_memory: bool = False, same_as_graph: bool = False): ... +def kl_connected_subgraph(G: Graph[_Node], k: int, l: int, low_memory: bool = False, same_as_graph: bool = False): ... @_dispatchable -def is_kl_connected(G, k, l, low_memory: bool = False): ... +def is_kl_connected(G: Graph[_Node], k: int, l: int, low_memory: bool = False): ... diff --git a/stubs/networkx/networkx/algorithms/isolate.pyi b/stubs/networkx/networkx/algorithms/isolate.pyi index 1030cb1c5bb4..d08ac5b19588 100644 --- a/stubs/networkx/networkx/algorithms/isolate.pyi +++ b/stubs/networkx/networkx/algorithms/isolate.pyi @@ -1,8 +1,9 @@ +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable -def is_isolate(G, n): ... +def is_isolate(G: Graph[_Node], n: _Node): ... @_dispatchable -def isolates(G): ... +def isolates(G: Graph[_Node]): ... @_dispatchable -def number_of_isolates(G): ... +def number_of_isolates(G: Graph[_Node]): ... diff --git a/stubs/networkx/networkx/algorithms/isomorphism/ismags.pyi b/stubs/networkx/networkx/algorithms/isomorphism/ismags.pyi index 259d9f7735ce..4a64a8191674 100644 --- a/stubs/networkx/networkx/algorithms/isomorphism/ismags.pyi +++ b/stubs/networkx/networkx/algorithms/isomorphism/ismags.pyi @@ -6,6 +6,7 @@ class ISMAGS: subgraph: Incomplete node_equality: Incomplete edge_equality: Incomplete + def __init__( self, graph, diff --git a/stubs/networkx/networkx/algorithms/isomorphism/isomorph.pyi b/stubs/networkx/networkx/algorithms/isomorphism/isomorph.pyi index 895b71cb59af..369615e1ba06 100644 --- a/stubs/networkx/networkx/algorithms/isomorphism/isomorph.pyi +++ b/stubs/networkx/networkx/algorithms/isomorphism/isomorph.pyi @@ -1,23 +1,30 @@ from _typeshed import Incomplete +from collections.abc import Callable +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable __all__ = ["could_be_isomorphic", "fast_could_be_isomorphic", "faster_could_be_isomorphic", "is_isomorphic"] @_dispatchable -def could_be_isomorphic(G1, G2): ... +def could_be_isomorphic(G1: Graph[_Node], G2: Graph[_Node]): ... graph_could_be_isomorphic = could_be_isomorphic @_dispatchable -def fast_could_be_isomorphic(G1, G2): ... +def fast_could_be_isomorphic(G1: Graph[_Node], G2: Graph[_Node]): ... fast_graph_could_be_isomorphic = fast_could_be_isomorphic @_dispatchable -def faster_could_be_isomorphic(G1, G2): ... +def faster_could_be_isomorphic(G1: Graph[_Node], G2: Graph[_Node]): ... faster_graph_could_be_isomorphic = faster_could_be_isomorphic @_dispatchable -def is_isomorphic(G1, G2, node_match: Incomplete | None = None, edge_match: Incomplete | None = None): ... +def is_isomorphic( + G1: Graph[_Node], + G2: Graph[_Node], + node_match: Callable[..., Incomplete] | None = None, + edge_match: Callable[..., Incomplete] | None = None, +): ... diff --git a/stubs/networkx/networkx/algorithms/isomorphism/isomorphvf2.pyi b/stubs/networkx/networkx/algorithms/isomorphism/isomorphvf2.pyi index 7b2e2c3d296d..dd57778fcc7a 100644 --- a/stubs/networkx/networkx/algorithms/isomorphism/isomorphvf2.pyi +++ b/stubs/networkx/networkx/algorithms/isomorphism/isomorphvf2.pyi @@ -11,6 +11,7 @@ class GraphMatcher: G2_node_order: Incomplete old_recursion_limit: Incomplete test: str + def __init__(self, G1, G2) -> None: ... def reset_recursion_limit(self) -> None: ... def candidate_pairs_iter(self) -> Generator[Incomplete, None, None]: ... @@ -20,6 +21,7 @@ class GraphMatcher: inout_2: Incomplete state: Incomplete mapping: Incomplete + def initialize(self) -> None: ... def is_isomorphic(self): ... def isomorphisms_iter(self) -> Generator[Incomplete, Incomplete, None]: ... @@ -42,6 +44,7 @@ class DiGraphMatcher(GraphMatcher): out_2: Incomplete state: Incomplete mapping: Incomplete + def initialize(self) -> None: ... def syntactic_feasibility(self, G1_node, G2_node): ... @@ -50,6 +53,7 @@ class GMState: G1_node: Incomplete G2_node: Incomplete depth: Incomplete + def __init__(self, GM, G1_node: Incomplete | None = None, G2_node: Incomplete | None = None) -> None: ... def restore(self) -> None: ... @@ -58,5 +62,6 @@ class DiGMState: G1_node: Incomplete G2_node: Incomplete depth: Incomplete + def __init__(self, GM, G1_node: Incomplete | None = None, G2_node: Incomplete | None = None) -> None: ... def restore(self) -> None: ... diff --git a/stubs/networkx/networkx/algorithms/isomorphism/temporalisomorphvf2.pyi b/stubs/networkx/networkx/algorithms/isomorphism/temporalisomorphvf2.pyi index a3200872c5e1..060f3d193541 100644 --- a/stubs/networkx/networkx/algorithms/isomorphism/temporalisomorphvf2.pyi +++ b/stubs/networkx/networkx/algorithms/isomorphism/temporalisomorphvf2.pyi @@ -7,6 +7,7 @@ __all__ = ["TimeRespectingGraphMatcher", "TimeRespectingDiGraphMatcher"] class TimeRespectingGraphMatcher(GraphMatcher): temporal_attribute_name: Incomplete delta: Incomplete + def __init__(self, G1, G2, temporal_attribute_name, delta) -> None: ... def one_hop(self, Gx, Gx_node, neighbors): ... def two_hop(self, Gx, core_x, Gx_node, neighbors): ... @@ -15,6 +16,7 @@ class TimeRespectingGraphMatcher(GraphMatcher): class TimeRespectingDiGraphMatcher(DiGraphMatcher): temporal_attribute_name: Incomplete delta: Incomplete + def __init__(self, G1, G2, temporal_attribute_name, delta) -> None: ... def get_pred_dates(self, Gx, Gx_node, core_x, pred): ... def get_succ_dates(self, Gx, Gx_node, core_x, succ): ... diff --git a/stubs/networkx/networkx/algorithms/isomorphism/tree_isomorphism.pyi b/stubs/networkx/networkx/algorithms/isomorphism/tree_isomorphism.pyi index 8c56107e1774..5cfad7cd15cd 100644 --- a/stubs/networkx/networkx/algorithms/isomorphism/tree_isomorphism.pyi +++ b/stubs/networkx/networkx/algorithms/isomorphism/tree_isomorphism.pyi @@ -1,6 +1,7 @@ +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable def rooted_tree_isomorphism(t1, root1, t2, root2): ... @_dispatchable -def tree_isomorphism(t1, t2): ... +def tree_isomorphism(t1: Graph[_Node], t2: Graph[_Node]): ... diff --git a/stubs/networkx/networkx/algorithms/isomorphism/vf2pp.pyi b/stubs/networkx/networkx/algorithms/isomorphism/vf2pp.pyi index d8916fbadba8..704bdcfc0f26 100644 --- a/stubs/networkx/networkx/algorithms/isomorphism/vf2pp.pyi +++ b/stubs/networkx/networkx/algorithms/isomorphism/vf2pp.pyi @@ -2,6 +2,7 @@ from _typeshed import Incomplete from collections.abc import Generator from typing import NamedTuple +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable class _GraphParameters(NamedTuple): @@ -26,10 +27,12 @@ class _StateParameters(NamedTuple): T2_tilde_in: Incomplete @_dispatchable -def vf2pp_isomorphism(G1, G2, node_label: Incomplete | None = None, default_label: Incomplete | None = None): ... +def vf2pp_isomorphism(G1: Graph[_Node], G2: Graph[_Node], node_label: str | None = None, default_label: float | None = None): ... @_dispatchable -def vf2pp_is_isomorphic(G1, G2, node_label: Incomplete | None = None, default_label: Incomplete | None = None): ... +def vf2pp_is_isomorphic( + G1: Graph[_Node], G2: Graph[_Node], node_label: str | None = None, default_label: float | None = None +): ... @_dispatchable def vf2pp_all_isomorphisms( - G1, G2, node_label: Incomplete | None = None, default_label: Incomplete | None = None + G1: Graph[_Node], G2: Graph[_Node], node_label: str | None = None, default_label: float | None = None ) -> Generator[Incomplete, None, Incomplete]: ... diff --git a/stubs/networkx/networkx/algorithms/isomorphism/vf2userfunc.pyi b/stubs/networkx/networkx/algorithms/isomorphism/vf2userfunc.pyi index 5c84c8f6c340..3800601bc5e9 100644 --- a/stubs/networkx/networkx/algorithms/isomorphism/vf2userfunc.pyi +++ b/stubs/networkx/networkx/algorithms/isomorphism/vf2userfunc.pyi @@ -9,6 +9,7 @@ class GraphMatcher(vf2.GraphMatcher): edge_match: Incomplete G1_adj: Incomplete G2_adj: Incomplete + def __init__(self, G1, G2, node_match: Incomplete | None = None, edge_match: Incomplete | None = None) -> None: ... semantic_feasibility: Incomplete @@ -17,6 +18,7 @@ class DiGraphMatcher(vf2.DiGraphMatcher): edge_match: Incomplete G1_adj: Incomplete G2_adj: Incomplete + def __init__(self, G1, G2, node_match: Incomplete | None = None, edge_match: Incomplete | None = None) -> None: ... def semantic_feasibility(self, G1_node, G2_node): ... diff --git a/stubs/networkx/networkx/algorithms/link_analysis/hits_alg.pyi b/stubs/networkx/networkx/algorithms/link_analysis/hits_alg.pyi index d2dba97399b5..b91bc4610a9d 100644 --- a/stubs/networkx/networkx/algorithms/link_analysis/hits_alg.pyi +++ b/stubs/networkx/networkx/algorithms/link_analysis/hits_alg.pyi @@ -1,6 +1,13 @@ -from _typeshed import Incomplete +from _typeshed import Incomplete, SupportsGetItem +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable -def hits(G, max_iter: int = 100, tol: float = 1e-08, nstart: Incomplete | None = None, normalized: bool = True): ... +def hits( + G: Graph[_Node], + max_iter: int | None = 100, + tol: float | None = 1e-08, + nstart: SupportsGetItem[Incomplete, Incomplete] | None = None, + normalized: bool = True, +): ... diff --git a/stubs/networkx/networkx/algorithms/link_analysis/pagerank_alg.pyi b/stubs/networkx/networkx/algorithms/link_analysis/pagerank_alg.pyi index 2ed3765aecce..cb3ab19ce19d 100644 --- a/stubs/networkx/networkx/algorithms/link_analysis/pagerank_alg.pyi +++ b/stubs/networkx/networkx/algorithms/link_analysis/pagerank_alg.pyi @@ -1,24 +1,26 @@ -from _typeshed import Incomplete +from _typeshed import Incomplete, SupportsGetItem +from collections.abc import Iterable +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable def pagerank( - G, - alpha: float = 0.85, - personalization: Incomplete | None = None, - max_iter: int = 100, - tol: float = 1e-06, - nstart: Incomplete | None = None, - weight: str = "weight", - dangling: Incomplete | None = None, + G: Graph[_Node], + alpha: float | None = 0.85, + personalization: SupportsGetItem[Incomplete, Incomplete] | None = None, + max_iter: int | None = 100, + tol: float | None = 1e-06, + nstart: SupportsGetItem[Incomplete, Incomplete] | None = None, + weight: str | None = "weight", + dangling: SupportsGetItem[Incomplete, Incomplete] | None = None, ): ... @_dispatchable def google_matrix( - G, + G: Graph[_Node], alpha: float = 0.85, - personalization: Incomplete | None = None, - nodelist: Incomplete | None = None, - weight: str = "weight", - dangling: Incomplete | None = None, + personalization: SupportsGetItem[Incomplete, Incomplete] | None = None, + nodelist: Iterable[Incomplete] | None = None, + weight: str | None = "weight", + dangling: SupportsGetItem[Incomplete, Incomplete] | None = None, ): ... diff --git a/stubs/networkx/networkx/algorithms/link_prediction.pyi b/stubs/networkx/networkx/algorithms/link_prediction.pyi index 1ef94518386e..6c298d3ca531 100644 --- a/stubs/networkx/networkx/algorithms/link_prediction.pyi +++ b/stubs/networkx/networkx/algorithms/link_prediction.pyi @@ -1,20 +1,19 @@ -from _typeshed import Incomplete - +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable -def resource_allocation_index(G, ebunch: Incomplete | None = None): ... +def resource_allocation_index(G: Graph[_Node], ebunch=None): ... @_dispatchable -def jaccard_coefficient(G, ebunch: Incomplete | None = None): ... +def jaccard_coefficient(G: Graph[_Node], ebunch=None): ... @_dispatchable -def adamic_adar_index(G, ebunch: Incomplete | None = None): ... +def adamic_adar_index(G: Graph[_Node], ebunch=None): ... @_dispatchable -def common_neighbor_centrality(G, ebunch: Incomplete | None = None, alpha: float = 0.8): ... +def common_neighbor_centrality(G: Graph[_Node], ebunch=None, alpha=0.8): ... @_dispatchable -def preferential_attachment(G, ebunch: Incomplete | None = None): ... +def preferential_attachment(G: Graph[_Node], ebunch=None): ... @_dispatchable -def cn_soundarajan_hopcroft(G, ebunch: Incomplete | None = None, community: str = "community"): ... +def cn_soundarajan_hopcroft(G: Graph[_Node], ebunch=None, community: str | None = "community"): ... @_dispatchable -def ra_index_soundarajan_hopcroft(G, ebunch: Incomplete | None = None, community: str = "community"): ... +def ra_index_soundarajan_hopcroft(G: Graph[_Node], ebunch=None, community: str | None = "community"): ... @_dispatchable -def within_inter_cluster(G, ebunch: Incomplete | None = None, delta: float = 0.001, community: str = "community"): ... +def within_inter_cluster(G: Graph[_Node], ebunch=None, delta: float | None = 0.001, community: str | None = "community"): ... diff --git a/stubs/networkx/networkx/algorithms/lowest_common_ancestors.pyi b/stubs/networkx/networkx/algorithms/lowest_common_ancestors.pyi index 700a7fce8c73..b884ed0cdd2c 100644 --- a/stubs/networkx/networkx/algorithms/lowest_common_ancestors.pyi +++ b/stubs/networkx/networkx/algorithms/lowest_common_ancestors.pyi @@ -1,13 +1,15 @@ from _typeshed import Incomplete from collections.abc import Generator +from networkx.classes.digraph import DiGraph +from networkx.classes.graph import _Node from networkx.utils.backends import _dispatchable @_dispatchable -def all_pairs_lowest_common_ancestor(G, pairs: Incomplete | None = None): ... +def all_pairs_lowest_common_ancestor(G: DiGraph[_Node], pairs=None): ... @_dispatchable -def lowest_common_ancestor(G, node1, node2, default: Incomplete | None = None): ... +def lowest_common_ancestor(G: DiGraph[_Node], node1, node2, default: Incomplete | None = None): ... @_dispatchable def tree_all_pairs_lowest_common_ancestor( - G, root: Incomplete | None = None, pairs: Incomplete | None = None + G: DiGraph[_Node], root: _Node | None = None, pairs=None ) -> Generator[Incomplete, None, None]: ... diff --git a/stubs/networkx/networkx/algorithms/matching.pyi b/stubs/networkx/networkx/algorithms/matching.pyi index ba797a5a12b3..cf7301679632 100644 --- a/stubs/networkx/networkx/algorithms/matching.pyi +++ b/stubs/networkx/networkx/algorithms/matching.pyi @@ -1,14 +1,15 @@ +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable -def maximal_matching(G): ... +def maximal_matching(G: Graph[_Node]): ... @_dispatchable -def is_matching(G, matching): ... +def is_matching(G: Graph[_Node], matching): ... @_dispatchable -def is_maximal_matching(G, matching): ... +def is_maximal_matching(G: Graph[_Node], matching): ... @_dispatchable -def is_perfect_matching(G, matching): ... +def is_perfect_matching(G: Graph[_Node], matching): ... @_dispatchable -def min_weight_matching(G, weight: str = "weight"): ... +def min_weight_matching(G: Graph[_Node], weight: str | None = "weight"): ... @_dispatchable -def max_weight_matching(G, maxcardinality: bool = False, weight: str = "weight"): ... +def max_weight_matching(G: Graph[_Node], maxcardinality: bool | None = False, weight: str | None = "weight"): ... diff --git a/stubs/networkx/networkx/algorithms/minors/contraction.pyi b/stubs/networkx/networkx/algorithms/minors/contraction.pyi index c76c1b1b5d48..40dc9a0adda3 100644 --- a/stubs/networkx/networkx/algorithms/minors/contraction.pyi +++ b/stubs/networkx/networkx/algorithms/minors/contraction.pyi @@ -1,23 +1,26 @@ from _typeshed import Incomplete +from collections.abc import Callable +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable def equivalence_classes(iterable, relation): ... @_dispatchable def quotient_graph( - G, + G: Graph[_Node], partition, - edge_relation: Incomplete | None = None, - node_data: Incomplete | None = None, - edge_data: Incomplete | None = None, + edge_relation=None, + node_data: Callable[..., Incomplete] | None = None, + edge_data: Callable[..., Incomplete] | None = None, + weight: str | None = "weight", relabel: bool = False, - create_using: Incomplete | None = None, + create_using: Graph[_Node] | None = None, ): ... @_dispatchable -def contracted_nodes(G, u, v, self_loops: bool = True, copy: bool = True): ... +def contracted_nodes(G: Graph[_Node], u, v, self_loops: bool = True, copy: bool = True): ... identified_nodes = contracted_nodes @_dispatchable -def contracted_edge(G, edge, self_loops: bool = True, copy: bool = True): ... +def contracted_edge(G: Graph[_Node], edge: tuple[Incomplete], self_loops: bool = True, copy: bool = True): ... diff --git a/stubs/networkx/networkx/algorithms/mis.pyi b/stubs/networkx/networkx/algorithms/mis.pyi index 1d4b262dde29..66ec315178b7 100644 --- a/stubs/networkx/networkx/algorithms/mis.pyi +++ b/stubs/networkx/networkx/algorithms/mis.pyi @@ -1,6 +1,11 @@ from _typeshed import Incomplete +from collections.abc import Iterable +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +from numpy.random import RandomState @_dispatchable -def maximal_independent_set(G, nodes: Incomplete | None = None, seed: Incomplete | None = None): ... +def maximal_independent_set( + G: Graph[_Node], nodes: Iterable[Incomplete] | None = None, seed: int | RandomState | None = None +): ... diff --git a/stubs/networkx/networkx/algorithms/moral.pyi b/stubs/networkx/networkx/algorithms/moral.pyi index 570061651f86..626c6c3f0393 100644 --- a/stubs/networkx/networkx/algorithms/moral.pyi +++ b/stubs/networkx/networkx/algorithms/moral.pyi @@ -1,4 +1,5 @@ +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable -def moral_graph(G): ... +def moral_graph(G: Graph[_Node]): ... diff --git a/stubs/networkx/networkx/algorithms/node_classification.pyi b/stubs/networkx/networkx/algorithms/node_classification.pyi index 510c72a25e3d..45f0e4df7886 100644 --- a/stubs/networkx/networkx/algorithms/node_classification.pyi +++ b/stubs/networkx/networkx/algorithms/node_classification.pyi @@ -1,6 +1,7 @@ +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable -def harmonic_function(G, max_iter: int = 30, label_name: str = "label"): ... +def harmonic_function(G: Graph[_Node], max_iter: int = 30, label_name: str = "label"): ... @_dispatchable -def local_and_global_consistency(G, alpha: float = 0.99, max_iter: int = 30, label_name: str = "label"): ... +def local_and_global_consistency(G: Graph[_Node], alpha: float = 0.99, max_iter: int = 30, label_name: str = "label"): ... diff --git a/stubs/networkx/networkx/algorithms/non_randomness.pyi b/stubs/networkx/networkx/algorithms/non_randomness.pyi index 5e0941436883..fc1f04b00f2e 100644 --- a/stubs/networkx/networkx/algorithms/non_randomness.pyi +++ b/stubs/networkx/networkx/algorithms/non_randomness.pyi @@ -1,6 +1,5 @@ -from _typeshed import Incomplete - +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable -def non_randomness(G, k: Incomplete | None = None, weight: str = "weight"): ... +def non_randomness(G: Graph[_Node], k: int | None = None, weight: str | None = "weight"): ... diff --git a/stubs/networkx/networkx/algorithms/operators/all.pyi b/stubs/networkx/networkx/algorithms/operators/all.pyi index a595296a7853..67e66bfcf29c 100644 --- a/stubs/networkx/networkx/algorithms/operators/all.pyi +++ b/stubs/networkx/networkx/algorithms/operators/all.pyi @@ -1,10 +1,13 @@ +from _typeshed import Incomplete +from collections.abc import Iterable + from networkx.utils.backends import _dispatchable @_dispatchable -def union_all(graphs, rename=()): ... +def union_all(graphs: Iterable[Incomplete], rename: Iterable[Incomplete] | None = ()): ... @_dispatchable -def disjoint_union_all(graphs): ... +def disjoint_union_all(graphs: Iterable[Incomplete]): ... @_dispatchable -def compose_all(graphs): ... +def compose_all(graphs: Iterable[Incomplete]): ... @_dispatchable -def intersection_all(graphs): ... +def intersection_all(graphs: Iterable[Incomplete]): ... diff --git a/stubs/networkx/networkx/algorithms/operators/binary.pyi b/stubs/networkx/networkx/algorithms/operators/binary.pyi index ef7f4943ca52..9e794329668f 100644 --- a/stubs/networkx/networkx/algorithms/operators/binary.pyi +++ b/stubs/networkx/networkx/algorithms/operators/binary.pyi @@ -1,24 +1,24 @@ -from collections.abc import Hashable +from _typeshed import Incomplete +from collections.abc import Hashable, Iterable from typing import TypeVar from networkx.classes.digraph import DiGraph +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable -def disjoint_union(G, H): ... +def disjoint_union(G: Graph[_Node], H: Graph[_Node]): ... @_dispatchable -def intersection(G, H): ... +def intersection(G: Graph[_Node], H: Graph[_Node]): ... @_dispatchable -def difference(G, H): ... +def difference(G: Graph[_Node], H: Graph[_Node]): ... @_dispatchable -def symmetric_difference(G, H): ... +def symmetric_difference(G: Graph[_Node], H: Graph[_Node]): ... _X = TypeVar("_X", bound=Hashable, covariant=True) _Y = TypeVar("_Y", bound=Hashable, covariant=True) -# GT = TypeVar('GT', bound=Graph[_Node]) -# TODO: This does not handle the cases when graphs of different types are passed which is allowed @_dispatchable -def compose(G: DiGraph[_X], H: DiGraph[_Y]) -> DiGraph[_X | _Y]: ... +def compose(G: Graph[_X], H: Graph[_Y]) -> DiGraph[_X | _Y]: ... @_dispatchable -def union(G: DiGraph[_X], H: DiGraph[_Y], rename=()) -> DiGraph[_X | _Y]: ... +def union(G: Graph[_X], H: Graph[_Y], rename: Iterable[Incomplete] | None = ()) -> DiGraph[_X | _Y]: ... diff --git a/stubs/networkx/networkx/algorithms/operators/product.pyi b/stubs/networkx/networkx/algorithms/operators/product.pyi index 96b54b2d4275..de12cd51bd36 100644 --- a/stubs/networkx/networkx/algorithms/operators/product.pyi +++ b/stubs/networkx/networkx/algorithms/operators/product.pyi @@ -1,16 +1,23 @@ +from collections.abc import Hashable +from typing import TypeVar + +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +_X = TypeVar("_X", bound=Hashable) +_Y = TypeVar("_Y", bound=Hashable) + @_dispatchable -def tensor_product(G, H): ... +def tensor_product(G: Graph[_X], H: Graph[_Y]) -> Graph[tuple[_X, _Y]]: ... @_dispatchable -def cartesian_product(G, H): ... +def cartesian_product(G: Graph[_X], H: Graph[_Y]) -> Graph[tuple[_X, _Y]]: ... @_dispatchable -def lexicographic_product(G, H): ... +def lexicographic_product(G: Graph[_X], H: Graph[_Y]) -> Graph[tuple[_X, _Y]]: ... @_dispatchable -def strong_product(G, H): ... +def strong_product(G: Graph[_X], H: Graph[_Y]) -> Graph[tuple[_X, _Y]]: ... @_dispatchable -def power(G, k): ... +def power(G: Graph[_Node], k): ... @_dispatchable -def rooted_product(G, H, root): ... +def rooted_product(G: Graph[_X], H: Graph[_Y], root: _Y) -> Graph[tuple[_X, _Y]]: ... @_dispatchable -def corona_product(G, H): ... +def corona_product(G: Graph[_X], H: Graph[_Y]) -> Graph[tuple[_X, _Y]]: ... diff --git a/stubs/networkx/networkx/algorithms/operators/unary.pyi b/stubs/networkx/networkx/algorithms/operators/unary.pyi index b17f988839b3..cda8ebbf6b45 100644 --- a/stubs/networkx/networkx/algorithms/operators/unary.pyi +++ b/stubs/networkx/networkx/algorithms/operators/unary.pyi @@ -1,12 +1,12 @@ from collections.abc import Hashable from typing import TypeVar -from networkx.classes.graph import Graph +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable _G = TypeVar("_G", bound=Graph[Hashable]) @_dispatchable -def complement(G): ... +def complement(G: Graph[_Node]): ... @_dispatchable def reverse(G: _G, copy: bool = True) -> _G: ... diff --git a/stubs/networkx/networkx/algorithms/planarity.pyi b/stubs/networkx/networkx/algorithms/planarity.pyi index 5df3c945c7c4..9b1ba3b297fe 100644 --- a/stubs/networkx/networkx/algorithms/planarity.pyi +++ b/stubs/networkx/networkx/algorithms/planarity.pyi @@ -2,19 +2,20 @@ from _typeshed import Incomplete from collections.abc import Generator, Mapping, MutableSet, Reversible from networkx.classes.digraph import DiGraph -from networkx.classes.graph import _Node +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable __all__ = ["check_planarity", "is_planar", "PlanarEmbedding"] @_dispatchable -def is_planar(G) -> bool: ... +def is_planar(G: Graph[_Node]) -> bool: ... @_dispatchable -def check_planarity(G, counterexample: bool = False): ... +def check_planarity(G: Graph[_Node], counterexample: bool = False): ... class Interval: low: Incomplete high: Incomplete + def __init__(self, low: Incomplete | None = None, high: Incomplete | None = None) -> None: ... def empty(self): ... def copy(self): ... @@ -23,6 +24,7 @@ class Interval: class ConflictPair: left: Incomplete right: Incomplete + def __init__(self, left=..., right=...) -> None: ... def swap(self) -> None: ... def lowest(self, planarity_state): ... @@ -46,6 +48,7 @@ class LRPlanarity: left_ref: Incomplete right_ref: Incomplete embedding: Incomplete + def __init__(self, G) -> None: ... def lr_planarity(self): ... def lr_planarity_recursive(self): ... diff --git a/stubs/networkx/networkx/algorithms/polynomials.pyi b/stubs/networkx/networkx/algorithms/polynomials.pyi index 805beff63985..cadffc38a170 100644 --- a/stubs/networkx/networkx/algorithms/polynomials.pyi +++ b/stubs/networkx/networkx/algorithms/polynomials.pyi @@ -1,6 +1,7 @@ +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable -def tutte_polynomial(G): ... +def tutte_polynomial(G: Graph[_Node]): ... @_dispatchable -def chromatic_polynomial(G): ... +def chromatic_polynomial(G: Graph[_Node]): ... diff --git a/stubs/networkx/networkx/algorithms/reciprocity.pyi b/stubs/networkx/networkx/algorithms/reciprocity.pyi index 3af842628188..547dc885d5cc 100644 --- a/stubs/networkx/networkx/algorithms/reciprocity.pyi +++ b/stubs/networkx/networkx/algorithms/reciprocity.pyi @@ -1,8 +1,9 @@ -from _typeshed import Incomplete +from collections.abc import Iterable +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable -def reciprocity(G, nodes: Incomplete | None = None): ... +def reciprocity(G: Graph[_Node], nodes: Iterable[_Node] | None = None): ... @_dispatchable -def overall_reciprocity(G): ... +def overall_reciprocity(G: Graph[_Node]): ... diff --git a/stubs/networkx/networkx/algorithms/regular.pyi b/stubs/networkx/networkx/algorithms/regular.pyi index 1e66cfe1c92d..61a19ea04a3b 100644 --- a/stubs/networkx/networkx/algorithms/regular.pyi +++ b/stubs/networkx/networkx/algorithms/regular.pyi @@ -1,8 +1,9 @@ +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable -def is_regular(G): ... +def is_regular(G: Graph[_Node]): ... @_dispatchable -def is_k_regular(G, k): ... +def is_k_regular(G: Graph[_Node], k): ... @_dispatchable -def k_factor(G, k, matching_weight: str = "weight"): ... +def k_factor(G: Graph[_Node], k, matching_weight: str | None = "weight"): ... diff --git a/stubs/networkx/networkx/algorithms/richclub.pyi b/stubs/networkx/networkx/algorithms/richclub.pyi index fa8f1f06df26..2b7b8b21fb2c 100644 --- a/stubs/networkx/networkx/algorithms/richclub.pyi +++ b/stubs/networkx/networkx/algorithms/richclub.pyi @@ -1,6 +1,6 @@ -from _typeshed import Incomplete - +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +from numpy.random import RandomState @_dispatchable -def rich_club_coefficient(G, normalized: bool = True, Q: float = 100, seed: Incomplete | None = None): ... +def rich_club_coefficient(G: Graph[_Node], normalized: bool = True, Q: float = 100, seed: int | RandomState | None = None): ... diff --git a/stubs/networkx/networkx/algorithms/shortest_paths/astar.pyi b/stubs/networkx/networkx/algorithms/shortest_paths/astar.pyi index 6b5160269014..63260ea30639 100644 --- a/stubs/networkx/networkx/algorithms/shortest_paths/astar.pyi +++ b/stubs/networkx/networkx/algorithms/shortest_paths/astar.pyi @@ -1,8 +1,27 @@ -from _typeshed import Incomplete +from _typeshed import Incomplete, SupportsGetItem +from collections.abc import Callable +from typing import Any +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable -def astar_path(G, source, target, heuristic: Incomplete | None = None, weight: str = "weight"): ... +def astar_path( + G: Graph[_Node], + source: _Node, + target: _Node, + heuristic: Callable[..., Incomplete] | None = None, + weight: str | Callable[[Any, Any, SupportsGetItem[str, Any]], float | None] | None = "weight", + *, + cutoff: float | None = None, +): ... @_dispatchable -def astar_path_length(G, source, target, heuristic: Incomplete | None = None, weight: str = "weight"): ... +def astar_path_length( + G: Graph[_Node], + source: _Node, + target: _Node, + heuristic: Callable[..., Incomplete] | None = None, + weight: str | Callable[[Any, Any, SupportsGetItem[str, Any]], float | None] | None = "weight", + *, + cutoff: float | None = None, +): ... diff --git a/stubs/networkx/networkx/algorithms/shortest_paths/dense.pyi b/stubs/networkx/networkx/algorithms/shortest_paths/dense.pyi index 355782732342..88e66d572eb0 100644 --- a/stubs/networkx/networkx/algorithms/shortest_paths/dense.pyi +++ b/stubs/networkx/networkx/algorithms/shortest_paths/dense.pyi @@ -1,12 +1,14 @@ -from _typeshed import Incomplete +from _typeshed import Incomplete, SupportsGetItem +from collections.abc import Iterable +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable -def floyd_warshall_numpy(G, nodelist: Incomplete | None = None, weight: str = "weight"): ... +def floyd_warshall_numpy(G: Graph[_Node], nodelist: Iterable[Incomplete] | None = None, weight: str | None = "weight"): ... @_dispatchable -def floyd_warshall_predecessor_and_distance(G, weight: str = "weight"): ... +def floyd_warshall_predecessor_and_distance(G: Graph[_Node], weight: str | None = "weight"): ... @_dispatchable -def reconstruct_path(source, target, predecessors): ... +def reconstruct_path(source: _Node, target: _Node, predecessors: SupportsGetItem[Incomplete, Incomplete]): ... @_dispatchable -def floyd_warshall(G, weight: str = "weight"): ... +def floyd_warshall(G: Graph[_Node], weight: str | None = "weight"): ... diff --git a/stubs/networkx/networkx/algorithms/shortest_paths/generic.pyi b/stubs/networkx/networkx/algorithms/shortest_paths/generic.pyi index 82723d3eb9e8..f623aeb9ee90 100644 --- a/stubs/networkx/networkx/algorithms/shortest_paths/generic.pyi +++ b/stubs/networkx/networkx/algorithms/shortest_paths/generic.pyi @@ -1,5 +1,5 @@ from _typeshed import Incomplete -from collections.abc import Generator +from collections.abc import Callable, Generator from typing import overload from networkx.classes.graph import Graph, _Node @@ -9,23 +9,45 @@ from networkx.utils.backends import _dispatchable def has_path(G: Graph[_Node], source: _Node, target: _Node) -> bool: ... @overload def shortest_path( - G: Graph[_Node], source: _Node, target: _Node, weight: Incomplete | None = None, method: str = "dijkstra" + G: Graph[_Node], + source: _Node | None = None, + target: _Node | None = None, + weight: str | Callable[..., Incomplete] | None = None, + method: str | None = "dijkstra", ) -> list[_Node]: ... @overload -def shortest_path(G: Graph[_Node], target: _Node, method: str = "dijkstra") -> dict[_Node, list[_Node]]: ... +def shortest_path( + G: Graph[_Node], + source: _Node | None = None, + target: _Node | None = None, + weight: str | Callable[..., Incomplete] | None = None, + method: str | None = "dijkstra", +) -> dict[_Node, list[_Node]]: ... @overload -def shortest_path(G: Graph[_Node], source: _Node, method: str = "dijkstra") -> dict[_Node, list[_Node]]: ... +def shortest_path( + G: Graph[_Node], + source: _Node | None = None, + target: _Node | None = None, + weight: str | Callable[..., Incomplete] | None = None, + method: str | None = "dijkstra", +) -> dict[_Node, list[_Node]]: ... @_dispatchable def shortest_path_length( - G, - source: Incomplete | None = None, - target: Incomplete | None = None, - weight: Incomplete | None = None, - method: str = "dijkstra", + G: Graph[_Node], + source: _Node | None = None, + target: _Node | None = None, + weight: str | Callable[..., Incomplete] | None = None, + method: str | None = "dijkstra", ): ... @_dispatchable -def average_shortest_path_length(G, weight: Incomplete | None = None, method: str | None = None): ... +def average_shortest_path_length( + G: Graph[_Node], weight: str | Callable[..., Incomplete] | None = None, method: str | None = None +): ... @_dispatchable def all_shortest_paths( - G: Graph[_Node], source: _Node, target: _Node, weight: Incomplete | None = None, method: str = "dijkstra" + G: Graph[_Node], + source: _Node, + target: _Node, + weight: str | Callable[..., Incomplete] | None = None, + method: str | None = "dijkstra", ) -> Generator[list[_Node], None, None]: ... diff --git a/stubs/networkx/networkx/algorithms/shortest_paths/unweighted.pyi b/stubs/networkx/networkx/algorithms/shortest_paths/unweighted.pyi index cb4eeab99d23..8250e0f2c229 100644 --- a/stubs/networkx/networkx/algorithms/shortest_paths/unweighted.pyi +++ b/stubs/networkx/networkx/algorithms/shortest_paths/unweighted.pyi @@ -1,23 +1,24 @@ from _typeshed import Incomplete from collections.abc import Generator +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable -def single_source_shortest_path_length(G, source, cutoff: Incomplete | None = None): ... +def single_source_shortest_path_length(G: Graph[_Node], source: _Node, cutoff: int | None = None): ... @_dispatchable -def single_target_shortest_path_length(G, target, cutoff: Incomplete | None = None): ... +def single_target_shortest_path_length(G: Graph[_Node], target: _Node, cutoff: int | None = None): ... @_dispatchable -def all_pairs_shortest_path_length(G, cutoff: Incomplete | None = None) -> Generator[Incomplete, None, None]: ... +def all_pairs_shortest_path_length(G: Graph[_Node], cutoff: int | None = None) -> Generator[Incomplete, None, None]: ... @_dispatchable -def bidirectional_shortest_path(G, source, target): ... +def bidirectional_shortest_path(G: Graph[_Node], source: str, target: str): ... @_dispatchable -def single_source_shortest_path(G, source, cutoff: Incomplete | None = None): ... +def single_source_shortest_path(G: Graph[_Node], source: str, cutoff: int | None = None): ... @_dispatchable -def single_target_shortest_path(G, target, cutoff: Incomplete | None = None): ... +def single_target_shortest_path(G: Graph[_Node], target: str, cutoff: int | None = None): ... @_dispatchable -def all_pairs_shortest_path(G, cutoff: Incomplete | None = None) -> Generator[Incomplete, None, None]: ... +def all_pairs_shortest_path(G: Graph[_Node], cutoff: int | None = None) -> Generator[Incomplete, None, None]: ... @_dispatchable def predecessor( - G, source, target: Incomplete | None = None, cutoff: Incomplete | None = None, return_seen: Incomplete | None = None + G: Graph[_Node], source: str, target: str | None = None, cutoff: int | None = None, return_seen: bool | None = None ): ... diff --git a/stubs/networkx/networkx/algorithms/shortest_paths/weighted.pyi b/stubs/networkx/networkx/algorithms/shortest_paths/weighted.pyi index 2716cbfbc84c..8c898e28bff5 100644 --- a/stubs/networkx/networkx/algorithms/shortest_paths/weighted.pyi +++ b/stubs/networkx/networkx/algorithms/shortest_paths/weighted.pyi @@ -1,72 +1,158 @@ -from _typeshed import Incomplete +from _typeshed import Incomplete, SupportsGetItem from collections.abc import Callable, Generator from typing import Any -from typing_extensions import TypeAlias +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable -# type alias for the weight function -_WeightFunction: TypeAlias = Callable[[Any, Any, dict[str, Any]], float | None] - @_dispatchable -def dijkstra_path(G, source, target, weight: str | _WeightFunction = "weight"): ... +def dijkstra_path( + G: Graph[_Node], + source: _Node, + target: _Node, + weight: str | Callable[[Any, Any, SupportsGetItem[str, Any]], float | None] | None = "weight", +): ... @_dispatchable -def dijkstra_path_length(G, source, target, weight: str | _WeightFunction = "weight"): ... +def dijkstra_path_length( + G: Graph[_Node], + source: str, + target: str, + weight: str | Callable[[Any, Any, SupportsGetItem[str, Any]], float | None] | None = "weight", +): ... @_dispatchable -def single_source_dijkstra_path(G, source, cutoff: Incomplete | None = None, weight: str | _WeightFunction = "weight"): ... +def single_source_dijkstra_path( + G: Graph[_Node], + source: _Node, + cutoff: float | None = None, + weight: str | Callable[[Any, Any, SupportsGetItem[str, Any]], float | None] | None = "weight", +): ... @_dispatchable -def single_source_dijkstra_path_length(G, source, cutoff: Incomplete | None = None, weight: str | _WeightFunction = "weight"): ... +def single_source_dijkstra_path_length( + G: Graph[_Node], + source: str, + cutoff: float | None = None, + weight: str | Callable[[Any, Any, SupportsGetItem[str, Any]], float | None] | None = "weight", +): ... @_dispatchable def single_source_dijkstra( - G, source, target: Incomplete | None = None, cutoff: Incomplete | None = None, weight: str | _WeightFunction = "weight" + G: Graph[_Node], + source: str, + target: str | None = None, + cutoff: float | None = None, + weight: str | Callable[[Any, Any, SupportsGetItem[str, Any]], float | None] | None = "weight", ): ... @_dispatchable -def multi_source_dijkstra_path(G, sources, cutoff: Incomplete | None = None, weight: str | _WeightFunction = "weight"): ... +def multi_source_dijkstra_path( + G: Graph[_Node], + sources, + cutoff: float | None = None, + weight: str | Callable[[Any, Any, SupportsGetItem[str, Any]], float | None] | None = "weight", +): ... @_dispatchable -def multi_source_dijkstra_path_length(G, sources, cutoff: Incomplete | None = None, weight: str | _WeightFunction = "weight"): ... +def multi_source_dijkstra_path_length( + G: Graph[_Node], + sources, + cutoff: float | None = None, + weight: str | Callable[[Any, Any, SupportsGetItem[str, Any]], float | None] | None = "weight", +): ... @_dispatchable def multi_source_dijkstra( - G, sources, target: Incomplete | None = None, cutoff: Incomplete | None = None, weight: str | _WeightFunction = "weight" + G: Graph[_Node], + sources, + target: str | None = None, + cutoff: float | None = None, + weight: str | Callable[[Any, Any, SupportsGetItem[str, Any]], float | None] | None = "weight", ): ... @_dispatchable -def dijkstra_predecessor_and_distance(G, source, cutoff: Incomplete | None = None, weight: str | _WeightFunction = "weight"): ... +def dijkstra_predecessor_and_distance( + G: Graph[_Node], + source: str, + cutoff: float | None = None, + weight: str | Callable[[Any, Any, SupportsGetItem[str, Any]], float | None] | None = "weight", +): ... @_dispatchable def all_pairs_dijkstra( - G, cutoff: Incomplete | None = None, weight: str | _WeightFunction = "weight" + G: Graph[_Node], + cutoff: float | None = None, + weight: str | Callable[[Any, Any, SupportsGetItem[str, Any]], float | None] | None = "weight", ) -> Generator[Incomplete, None, None]: ... @_dispatchable def all_pairs_dijkstra_path_length( - G, cutoff: Incomplete | None = None, weight: str | _WeightFunction = "weight" + G: Graph[_Node], + cutoff: float | None = None, + weight: str | Callable[[Any, Any, SupportsGetItem[str, Any]], float | None] | None = "weight", ) -> Generator[Incomplete, None, None]: ... @_dispatchable def all_pairs_dijkstra_path( - G, cutoff: Incomplete | None = None, weight: str | _WeightFunction = "weight" + G: Graph[_Node], + cutoff: float | None = None, + weight: str | Callable[[Any, Any, SupportsGetItem[str, Any]], float | None] | None = "weight", ) -> Generator[Incomplete, None, None]: ... @_dispatchable def bellman_ford_predecessor_and_distance( - G, source, target: Incomplete | None = None, weight: str | _WeightFunction = "weight", heuristic: bool = False + G: Graph[_Node], + source: str, + target: str | None = None, + weight: str | Callable[[Any, Any, SupportsGetItem[str, Any]], float | None] | None = "weight", + heuristic: bool = False, ): ... @_dispatchable -def bellman_ford_path(G, source, target, weight: str | _WeightFunction = "weight"): ... +def bellman_ford_path( + G: Graph[_Node], + source: _Node, + target: _Node, + weight: str | Callable[[Any, Any, SupportsGetItem[str, Any]], float | None] | None = "weight", +): ... @_dispatchable -def bellman_ford_path_length(G, source, target, weight: str | _WeightFunction = "weight"): ... +def bellman_ford_path_length( + G: Graph[_Node], + source: str, + target: str, + weight: str | Callable[[Any, Any, SupportsGetItem[str, Any]], float | None] | None = "weight", +): ... @_dispatchable -def single_source_bellman_ford_path(G, source, weight: str | _WeightFunction = "weight"): ... +def single_source_bellman_ford_path( + G: Graph[_Node], source: _Node, weight: str | Callable[[Any, Any, SupportsGetItem[str, Any]], float | None] | None = "weight" +): ... @_dispatchable -def single_source_bellman_ford_path_length(G, source, weight: str | _WeightFunction = "weight"): ... +def single_source_bellman_ford_path_length( + G: Graph[_Node], source: str, weight: str | Callable[[Any, Any, SupportsGetItem[str, Any]], float | None] | None = "weight" +): ... @_dispatchable -def single_source_bellman_ford(G, source, target: Incomplete | None = None, weight: str | _WeightFunction = "weight"): ... +def single_source_bellman_ford( + G: Graph[_Node], + source: str, + target: str | None = None, + weight: str | Callable[[Any, Any, SupportsGetItem[str, Any]], float | None] | None = "weight", +): ... @_dispatchable -def all_pairs_bellman_ford_path_length(G, weight: str | _WeightFunction = "weight") -> Generator[Incomplete, None, None]: ... +def all_pairs_bellman_ford_path_length( + G: Graph[_Node], weight: str | Callable[[Any, Any, SupportsGetItem[str, Any]], float | None] | None = "weight" +) -> Generator[Incomplete, None, None]: ... @_dispatchable -def all_pairs_bellman_ford_path(G, weight: str | _WeightFunction = "weight") -> Generator[Incomplete, None, None]: ... +def all_pairs_bellman_ford_path( + G: Graph[_Node], weight: str | Callable[[Any, Any, SupportsGetItem[str, Any]], float | None] | None = "weight" +) -> Generator[Incomplete, None, None]: ... @_dispatchable -def goldberg_radzik(G, source, weight: str | _WeightFunction = "weight"): ... +def goldberg_radzik( + G: Graph[_Node], source: str, weight: str | Callable[[Any, Any, SupportsGetItem[str, Any]], float | None] | None = "weight" +): ... @_dispatchable -def negative_edge_cycle(G, weight: str | _WeightFunction = "weight", heuristic: bool = True): ... +def negative_edge_cycle( + G: Graph[_Node], + weight: str | Callable[[Any, Any, SupportsGetItem[str, Any]], float | None] | None = "weight", + heuristic: bool = True, +): ... @_dispatchable -def find_negative_cycle(G, source, weight: str | _WeightFunction = "weight"): ... +def find_negative_cycle( + G: Graph[_Node], source: str, weight: str | Callable[[Any, Any, SupportsGetItem[str, Any]], float | None] | None = "weight" +): ... @_dispatchable -def bidirectional_dijkstra(G, source, target, weight: str | _WeightFunction = "weight"): ... +def bidirectional_dijkstra( + G: Graph[_Node], + source: _Node, + target: _Node, + weight: str | Callable[[Any, Any, SupportsGetItem[str, Any]], float | None] | None = "weight", +): ... @_dispatchable -def johnson(G, weight: str | _WeightFunction = "weight"): ... +def johnson(G: Graph[_Node], weight: str | Callable[[Any, Any, SupportsGetItem[str, Any]], float | None] | None = "weight"): ... diff --git a/stubs/networkx/networkx/algorithms/similarity.pyi b/stubs/networkx/networkx/algorithms/similarity.pyi index bec692662f97..92164d71d9d8 100644 --- a/stubs/networkx/networkx/algorithms/similarity.pyi +++ b/stubs/networkx/networkx/algorithms/similarity.pyi @@ -1,83 +1,97 @@ -from _typeshed import Incomplete -from collections.abc import Generator +from _typeshed import Incomplete, SupportsGetItem +from collections.abc import Callable, Generator +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +from numpy.random import RandomState @_dispatchable def graph_edit_distance( - G1, - G2, - node_match: Incomplete | None = None, - edge_match: Incomplete | None = None, - node_subst_cost: Incomplete | None = None, - node_del_cost: Incomplete | None = None, - node_ins_cost: Incomplete | None = None, - edge_subst_cost: Incomplete | None = None, - edge_del_cost: Incomplete | None = None, - edge_ins_cost: Incomplete | None = None, - roots: Incomplete | None = None, - upper_bound: Incomplete | None = None, - timeout: Incomplete | None = None, + G1: Graph[_Node], + G2: Graph[_Node], + node_match: Callable[..., Incomplete] | None = None, + edge_match: Callable[..., Incomplete] | None = None, + node_subst_cost: Callable[..., Incomplete] | None = None, + node_del_cost: Callable[..., Incomplete] | None = None, + node_ins_cost: Callable[..., Incomplete] | None = None, + edge_subst_cost: Callable[..., Incomplete] | None = None, + edge_del_cost: Callable[..., Incomplete] | None = None, + edge_ins_cost: Callable[..., Incomplete] | None = None, + roots=None, + upper_bound: float | None = None, + timeout: float | None = None, ): ... @_dispatchable def optimal_edit_paths( - G1, - G2, - node_match: Incomplete | None = None, - edge_match: Incomplete | None = None, - node_subst_cost: Incomplete | None = None, - node_del_cost: Incomplete | None = None, - node_ins_cost: Incomplete | None = None, - edge_subst_cost: Incomplete | None = None, - edge_del_cost: Incomplete | None = None, - edge_ins_cost: Incomplete | None = None, - upper_bound: Incomplete | None = None, + G1: Graph[_Node], + G2: Graph[_Node], + node_match: Callable[..., Incomplete] | None = None, + edge_match: Callable[..., Incomplete] | None = None, + node_subst_cost: Callable[..., Incomplete] | None = None, + node_del_cost: Callable[..., Incomplete] | None = None, + node_ins_cost: Callable[..., Incomplete] | None = None, + edge_subst_cost: Callable[..., Incomplete] | None = None, + edge_del_cost: Callable[..., Incomplete] | None = None, + edge_ins_cost: Callable[..., Incomplete] | None = None, + upper_bound: float | None = None, ): ... @_dispatchable def optimize_graph_edit_distance( - G1, - G2, - node_match: Incomplete | None = None, - edge_match: Incomplete | None = None, - node_subst_cost: Incomplete | None = None, - node_del_cost: Incomplete | None = None, - node_ins_cost: Incomplete | None = None, - edge_subst_cost: Incomplete | None = None, - edge_del_cost: Incomplete | None = None, - edge_ins_cost: Incomplete | None = None, - upper_bound: Incomplete | None = None, + G1: Graph[_Node], + G2: Graph[_Node], + node_match: Callable[..., Incomplete] | None = None, + edge_match: Callable[..., Incomplete] | None = None, + node_subst_cost: Callable[..., Incomplete] | None = None, + node_del_cost: Callable[..., Incomplete] | None = None, + node_ins_cost: Callable[..., Incomplete] | None = None, + edge_subst_cost: Callable[..., Incomplete] | None = None, + edge_del_cost: Callable[..., Incomplete] | None = None, + edge_ins_cost: Callable[..., Incomplete] | None = None, + upper_bound: float | None = None, ) -> Generator[Incomplete, None, None]: ... @_dispatchable def optimize_edit_paths( - G1, - G2, - node_match: Incomplete | None = None, - edge_match: Incomplete | None = None, - node_subst_cost: Incomplete | None = None, - node_del_cost: Incomplete | None = None, - node_ins_cost: Incomplete | None = None, - edge_subst_cost: Incomplete | None = None, - edge_del_cost: Incomplete | None = None, - edge_ins_cost: Incomplete | None = None, - upper_bound: Incomplete | None = None, + G1: Graph[_Node], + G2: Graph[_Node], + node_match: Callable[..., Incomplete] | None = None, + edge_match: Callable[..., Incomplete] | None = None, + node_subst_cost: Callable[..., Incomplete] | None = None, + node_del_cost: Callable[..., Incomplete] | None = None, + node_ins_cost: Callable[..., Incomplete] | None = None, + edge_subst_cost: Callable[..., Incomplete] | None = None, + edge_del_cost: Callable[..., Incomplete] | None = None, + edge_ins_cost: Callable[..., Incomplete] | None = None, + upper_bound: float | None = None, strictly_decreasing: bool = True, - roots: Incomplete | None = None, - timeout: Incomplete | None = None, + roots=None, + timeout: float | None = None, ) -> Generator[Incomplete, None, Incomplete]: ... @_dispatchable def simrank_similarity( - G, - source: Incomplete | None = None, - target: Incomplete | None = None, + G: Graph[_Node], + source: _Node | None = None, + target: _Node | None = None, importance_factor: float = 0.9, max_iterations: int = 1000, tolerance: float = 0.0001, ): ... @_dispatchable def panther_similarity( - G, source, k: int = 5, path_length: int = 5, c: float = 0.5, delta: float = 0.1, eps: Incomplete | None = None + G: Graph[_Node], + source: _Node, + k: int = 5, + path_length: int = 5, + c: float = 0.5, + delta: float = 0.1, + eps=None, + weight: str | None = "weight", ): ... @_dispatchable def generate_random_paths( - G, sample_size, path_length: int = 5, index_map: Incomplete | None = None + G: Graph[_Node], + sample_size: int, + path_length: int = 5, + index_map: SupportsGetItem[Incomplete, Incomplete] | None = None, + weight: str | None = "weight", + seed: int | RandomState | None = None, ) -> Generator[Incomplete, None, None]: ... diff --git a/stubs/networkx/networkx/algorithms/simple_paths.pyi b/stubs/networkx/networkx/algorithms/simple_paths.pyi index 6e69d1d1386e..a33ebdbb3bdc 100644 --- a/stubs/networkx/networkx/algorithms/simple_paths.pyi +++ b/stubs/networkx/networkx/algorithms/simple_paths.pyi @@ -1,5 +1,6 @@ -from _typeshed import Incomplete -from collections.abc import Generator, Sequence +from _typeshed import Incomplete, SupportsGetItem +from collections.abc import Callable, Generator, Iterable +from typing import Any from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @@ -7,24 +8,26 @@ from networkx.utils.backends import _dispatchable __all__ = ["all_simple_paths", "is_simple_path", "shortest_simple_paths", "all_simple_edge_paths"] @_dispatchable -def is_simple_path(G: Graph[_Node], nodes: Sequence[_Node]): ... +def is_simple_path(G: Graph[_Node], nodes: Iterable[Incomplete]): ... @_dispatchable -def all_simple_paths( - G: Graph[_Node], source: _Node, target: _Node, cutoff: Incomplete | None = None -) -> Generator[list[_Node], None, None]: ... +def all_simple_paths(G: Graph[_Node], source: _Node, target, cutoff: int | None = None) -> Generator[list[_Node], None, None]: ... @_dispatchable def all_simple_edge_paths( - G: Graph[_Node], source: _Node, target: _Node, cutoff: Incomplete | None = None + G: Graph[_Node], source: _Node, target, cutoff: int | None = None ) -> Generator[list[_Node] | list[tuple[_Node, _Node]], None, list[_Node] | None]: ... @_dispatchable def shortest_simple_paths( - G: Graph[_Node], source: _Node, target: _Node, weight: Incomplete | None = None + G: Graph[_Node], + source: _Node, + target: _Node, + weight: str | Callable[[Any, Any, SupportsGetItem[str, Any]], float | None] | None = None, ) -> Generator[list[_Node], None, None]: ... class PathBuffer: paths: Incomplete sortedpaths: Incomplete counter: Incomplete + def __init__(self) -> None: ... def __len__(self): ... def push(self, cost, path) -> None: ... diff --git a/stubs/networkx/networkx/algorithms/smallworld.pyi b/stubs/networkx/networkx/algorithms/smallworld.pyi index cb9c7b252e04..1489f56198ae 100644 --- a/stubs/networkx/networkx/algorithms/smallworld.pyi +++ b/stubs/networkx/networkx/algorithms/smallworld.pyi @@ -1,14 +1,14 @@ -from _typeshed import Incomplete - +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +from numpy.random import RandomState @_dispatchable -def random_reference(G, niter: int = 1, connectivity: bool = True, seed: Incomplete | None = None): ... +def random_reference(G: Graph[_Node], niter: int = 1, connectivity: bool = True, seed: int | RandomState | None = None): ... @_dispatchable def lattice_reference( - G, niter: int = 5, D: Incomplete | None = None, connectivity: bool = True, seed: Incomplete | None = None + G: Graph[_Node], niter: int = 5, D=None, connectivity: bool = True, seed: int | RandomState | None = None ): ... @_dispatchable -def sigma(G, niter: int = 100, nrand: int = 10, seed: Incomplete | None = None): ... +def sigma(G: Graph[_Node], niter: int = 100, nrand: int = 10, seed: int | RandomState | None = None): ... @_dispatchable -def omega(G, niter: int = 5, nrand: int = 10, seed: Incomplete | None = None): ... +def omega(G: Graph[_Node], niter: int = 5, nrand: int = 10, seed: int | RandomState | None = None): ... diff --git a/stubs/networkx/networkx/algorithms/smetric.pyi b/stubs/networkx/networkx/algorithms/smetric.pyi index 61eb1aebdb14..ee2e7115e94a 100644 --- a/stubs/networkx/networkx/algorithms/smetric.pyi +++ b/stubs/networkx/networkx/algorithms/smetric.pyi @@ -1,4 +1,5 @@ +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable -def s_metric(G, normalized: bool = True): ... +def s_metric(G: Graph[_Node]): ... diff --git a/stubs/networkx/networkx/algorithms/sparsifiers.pyi b/stubs/networkx/networkx/algorithms/sparsifiers.pyi index e556626f172a..e2c456c9c1c1 100644 --- a/stubs/networkx/networkx/algorithms/sparsifiers.pyi +++ b/stubs/networkx/networkx/algorithms/sparsifiers.pyi @@ -1,6 +1,6 @@ -from _typeshed import Incomplete - +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +from numpy.random import RandomState @_dispatchable -def spanner(G, stretch, weight: Incomplete | None = None, seed: Incomplete | None = None): ... +def spanner(G: Graph[_Node], stretch: float, weight: str | None = None, seed: int | RandomState | None = None): ... diff --git a/stubs/networkx/networkx/algorithms/structuralholes.pyi b/stubs/networkx/networkx/algorithms/structuralholes.pyi index bc396853a637..76e026c4415d 100644 --- a/stubs/networkx/networkx/algorithms/structuralholes.pyi +++ b/stubs/networkx/networkx/algorithms/structuralholes.pyi @@ -1,10 +1,12 @@ from _typeshed import Incomplete +from collections.abc import Iterable +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable -def effective_size(G, nodes: Incomplete | None = None, weight: Incomplete | None = None): ... +def effective_size(G: Graph[_Node], nodes: Iterable[Incomplete] | None = None, weight: str | None = None): ... @_dispatchable -def constraint(G, nodes: Incomplete | None = None, weight: Incomplete | None = None): ... +def constraint(G: Graph[_Node], nodes: Iterable[Incomplete] | None = None, weight: str | None = None): ... @_dispatchable -def local_constraint(G, u, v, weight: Incomplete | None = None): ... +def local_constraint(G: Graph[_Node], u: _Node, v: _Node, weight: str | None = None): ... diff --git a/stubs/networkx/networkx/algorithms/summarization.pyi b/stubs/networkx/networkx/algorithms/summarization.pyi index e1a359b8c72d..05116996663b 100644 --- a/stubs/networkx/networkx/algorithms/summarization.pyi +++ b/stubs/networkx/networkx/algorithms/summarization.pyi @@ -1,14 +1,16 @@ from _typeshed import Incomplete +from collections.abc import Iterable +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable -def dedensify(G, threshold, prefix: Incomplete | None = None, copy: bool = True): ... +def dedensify(G: Graph[_Node], threshold: int, prefix=None, copy: bool | None = True): ... @_dispatchable def snap_aggregation( - G, + G: Graph[_Node], node_attributes, - edge_attributes=(), + edge_attributes: Iterable[Incomplete] | None = (), prefix: str = "Supernode-", supernode_attribute: str = "group", superedge_attribute: str = "types", diff --git a/stubs/networkx/networkx/algorithms/swap.pyi b/stubs/networkx/networkx/algorithms/swap.pyi index be15bb2236fd..006bb38c9cd0 100644 --- a/stubs/networkx/networkx/algorithms/swap.pyi +++ b/stubs/networkx/networkx/algorithms/swap.pyi @@ -1,10 +1,13 @@ -from _typeshed import Incomplete - +from networkx.classes.digraph import DiGraph +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +from numpy.random import RandomState @_dispatchable -def directed_edge_swap(G, *, nswap: int = 1, max_tries: int = 100, seed: Incomplete | None = None): ... +def directed_edge_swap(G: DiGraph[_Node], *, nswap: int = 1, max_tries: int = 100, seed: int | RandomState | None = None): ... @_dispatchable -def double_edge_swap(G, nswap: int = 1, max_tries: int = 100, seed: Incomplete | None = None): ... +def double_edge_swap(G: Graph[_Node], nswap: int = 1, max_tries: int = 100, seed: int | RandomState | None = None): ... @_dispatchable -def connected_double_edge_swap(G, nswap: int = 1, _window_threshold: int = 3, seed: Incomplete | None = None): ... +def connected_double_edge_swap( + G: Graph[_Node], nswap: int = 1, _window_threshold: int = 3, seed: int | RandomState | None = None +): ... diff --git a/stubs/networkx/networkx/algorithms/threshold.pyi b/stubs/networkx/networkx/algorithms/threshold.pyi index 4649c6638d33..8a06957f891a 100644 --- a/stubs/networkx/networkx/algorithms/threshold.pyi +++ b/stubs/networkx/networkx/algorithms/threshold.pyi @@ -1,8 +1,7 @@ -from _typeshed import Incomplete - +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable -def is_threshold_graph(G): ... +def is_threshold_graph(G: Graph[_Node]): ... @_dispatchable -def find_threshold_graph(G, create_using: Incomplete | None = None): ... +def find_threshold_graph(G: Graph[_Node], create_using: Graph[_Node] | None = None): ... diff --git a/stubs/networkx/networkx/algorithms/time_dependent.pyi b/stubs/networkx/networkx/algorithms/time_dependent.pyi new file mode 100644 index 000000000000..4a77f32bb5dc --- /dev/null +++ b/stubs/networkx/networkx/algorithms/time_dependent.pyi @@ -0,0 +1,5 @@ +from networkx.classes.graph import Graph, _Node +from networkx.utils.backends import _dispatchable + +@_dispatchable +def cd_index(G: Graph[_Node], node: _Node, time_delta, *, time: str = "time", weight: str | None = None): ... diff --git a/stubs/networkx/networkx/algorithms/tournament.pyi b/stubs/networkx/networkx/algorithms/tournament.pyi index a8f06989d1a2..9d92b1a46429 100644 --- a/stubs/networkx/networkx/algorithms/tournament.pyi +++ b/stubs/networkx/networkx/algorithms/tournament.pyi @@ -1,16 +1,16 @@ -from _typeshed import Incomplete - +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +from numpy.random import RandomState @_dispatchable -def is_tournament(G): ... +def is_tournament(G: Graph[_Node]): ... @_dispatchable -def hamiltonian_path(G): ... +def hamiltonian_path(G: Graph[_Node]): ... @_dispatchable -def random_tournament(n, seed: Incomplete | None = None): ... +def random_tournament(n: int, seed: int | RandomState | None = None): ... @_dispatchable -def score_sequence(G): ... +def score_sequence(G: Graph[_Node]): ... @_dispatchable -def is_reachable(G, s, t): ... +def is_reachable(G: Graph[_Node], s: _Node, t: _Node): ... @_dispatchable -def is_strongly_connected(G): ... +def is_strongly_connected(G: Graph[_Node]): ... diff --git a/stubs/networkx/networkx/algorithms/traversal/beamsearch.pyi b/stubs/networkx/networkx/algorithms/traversal/beamsearch.pyi index 057bc9e8972d..7040e297b596 100644 --- a/stubs/networkx/networkx/algorithms/traversal/beamsearch.pyi +++ b/stubs/networkx/networkx/algorithms/traversal/beamsearch.pyi @@ -1,7 +1,10 @@ from _typeshed import Incomplete -from collections.abc import Generator +from collections.abc import Callable, Generator +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable -def bfs_beam_edges(G, source, value, width: Incomplete | None = None) -> Generator[Incomplete, Incomplete, Incomplete]: ... +def bfs_beam_edges( + G: Graph[_Node], source: _Node, value: Callable[..., Incomplete], width: int | None = None +) -> Generator[Incomplete, Incomplete, Incomplete]: ... diff --git a/stubs/networkx/networkx/algorithms/traversal/breadth_first_search.pyi b/stubs/networkx/networkx/algorithms/traversal/breadth_first_search.pyi index dbcf268d599c..9832210f2917 100644 --- a/stubs/networkx/networkx/algorithms/traversal/breadth_first_search.pyi +++ b/stubs/networkx/networkx/algorithms/traversal/breadth_first_search.pyi @@ -1,25 +1,34 @@ from _typeshed import Incomplete -from collections.abc import Generator +from collections.abc import Callable, Generator +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable def bfs_edges( - G, source, reverse: bool = False, depth_limit: Incomplete | None = None, sort_neighbors: Incomplete | None = None + G: Graph[_Node], + source: _Node, + reverse: bool | None = False, + depth_limit=None, + sort_neighbors: Callable[..., Incomplete] | None = None, ) -> Generator[Incomplete, Incomplete, None]: ... @_dispatchable def bfs_tree( - G, source, reverse: bool = False, depth_limit: Incomplete | None = None, sort_neighbors: Incomplete | None = None + G: Graph[_Node], + source: _Node, + reverse: bool | None = False, + depth_limit=None, + sort_neighbors: Callable[..., Incomplete] | None = None, ): ... @_dispatchable def bfs_predecessors( - G, source, depth_limit: Incomplete | None = None, sort_neighbors: Incomplete | None = None + G: Graph[_Node], source: _Node, depth_limit=None, sort_neighbors: Callable[..., Incomplete] | None = None ) -> Generator[Incomplete, None, None]: ... @_dispatchable def bfs_successors( - G, source, depth_limit: Incomplete | None = None, sort_neighbors: Incomplete | None = None + G: Graph[_Node], source: _Node, depth_limit=None, sort_neighbors: Callable[..., Incomplete] | None = None ) -> Generator[Incomplete, None, None]: ... @_dispatchable -def bfs_layers(G, sources) -> Generator[Incomplete, None, None]: ... +def bfs_layers(G: Graph[_Node], sources) -> Generator[Incomplete, None, None]: ... @_dispatchable -def descendants_at_distance(G, source, distance): ... +def descendants_at_distance(G: Graph[_Node], source, distance): ... diff --git a/stubs/networkx/networkx/algorithms/traversal/depth_first_search.pyi b/stubs/networkx/networkx/algorithms/traversal/depth_first_search.pyi index 0b1abaedf280..b4f3e1bc0a21 100644 --- a/stubs/networkx/networkx/algorithms/traversal/depth_first_search.pyi +++ b/stubs/networkx/networkx/algorithms/traversal/depth_first_search.pyi @@ -1,22 +1,34 @@ from _typeshed import Incomplete -from collections.abc import Generator +from collections.abc import Callable, Generator from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable def dfs_edges( - G: Graph[_Node], source: _Node | None = None, depth_limit: int | None = None + G: Graph[_Node], source: _Node | None = None, depth_limit=None, *, sort_neighbors: Callable[..., Incomplete] | None = None ) -> Generator[tuple[_Node, _Node], None, None]: ... @_dispatchable -def dfs_tree(G, source: Incomplete | None = None, depth_limit: Incomplete | None = None): ... +def dfs_tree( + G: Graph[_Node], source: _Node | None = None, depth_limit=None, *, sort_neighbors: Callable[..., Incomplete] | None = None +): ... @_dispatchable -def dfs_predecessors(G, source: Incomplete | None = None, depth_limit: Incomplete | None = None): ... +def dfs_predecessors( + G: Graph[_Node], source: _Node | None = None, depth_limit=None, *, sort_neighbors: Callable[..., Incomplete] | None = None +): ... @_dispatchable -def dfs_successors(G, source: Incomplete | None = None, depth_limit: Incomplete | None = None): ... +def dfs_successors( + G: Graph[_Node], source: _Node | None = None, depth_limit=None, *, sort_neighbors: Callable[..., Incomplete] | None = None +): ... @_dispatchable -def dfs_postorder_nodes(G, source: Incomplete | None = None, depth_limit: Incomplete | None = None): ... +def dfs_postorder_nodes( + G: Graph[_Node], source: _Node | None = None, depth_limit=None, *, sort_neighbors: Callable[..., Incomplete] | None = None +): ... @_dispatchable -def dfs_preorder_nodes(G, source: Incomplete | None = None, depth_limit: Incomplete | None = None): ... +def dfs_preorder_nodes( + G: Graph[_Node], source: _Node | None = None, depth_limit=None, *, sort_neighbors: Callable[..., Incomplete] | None = None +): ... @_dispatchable -def dfs_labeled_edges(G, source: Incomplete | None = None, depth_limit: Incomplete | None = None) -> None: ... +def dfs_labeled_edges( + G: Graph[_Node], source: _Node | None = None, depth_limit=None, *, sort_neighbors: Callable[..., Incomplete] | None = None +) -> None: ... diff --git a/stubs/networkx/networkx/algorithms/traversal/edgebfs.pyi b/stubs/networkx/networkx/algorithms/traversal/edgebfs.pyi index 4999e56ee880..1829e0bbe401 100644 --- a/stubs/networkx/networkx/algorithms/traversal/edgebfs.pyi +++ b/stubs/networkx/networkx/algorithms/traversal/edgebfs.pyi @@ -1,9 +1,8 @@ from _typeshed import Incomplete from collections.abc import Generator +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable -def edge_bfs( - G, source: Incomplete | None = None, orientation: Incomplete | None = None -) -> Generator[Incomplete, None, Incomplete]: ... +def edge_bfs(G: Graph[_Node], source=None, orientation=None) -> Generator[Incomplete, None, Incomplete]: ... diff --git a/stubs/networkx/networkx/algorithms/traversal/edgedfs.pyi b/stubs/networkx/networkx/algorithms/traversal/edgedfs.pyi index f2b11d4736f2..3e8b6485fd2d 100644 --- a/stubs/networkx/networkx/algorithms/traversal/edgedfs.pyi +++ b/stubs/networkx/networkx/algorithms/traversal/edgedfs.pyi @@ -1,9 +1,8 @@ from _typeshed import Incomplete from collections.abc import Generator +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable -def edge_dfs( - G, source: Incomplete | None = None, orientation: Incomplete | None = None -) -> Generator[Incomplete, None, Incomplete]: ... +def edge_dfs(G: Graph[_Node], source=None, orientation=None) -> Generator[Incomplete, None, Incomplete]: ... diff --git a/stubs/networkx/networkx/algorithms/tree/branchings.pyi b/stubs/networkx/networkx/algorithms/tree/branchings.pyi index 5bed2ab210ef..00a09bdafdbe 100644 --- a/stubs/networkx/networkx/algorithms/tree/branchings.pyi +++ b/stubs/networkx/networkx/algorithms/tree/branchings.pyi @@ -2,7 +2,10 @@ from _typeshed import Incomplete from collections.abc import Iterator from dataclasses import dataclass +from networkx.classes.digraph import DiGraph +from networkx.classes.graph import _Node from networkx.utils.backends import _dispatchable +from numpy.random import RandomState __all__ = [ "branching_weight", @@ -15,24 +18,26 @@ __all__ = [ ] @_dispatchable -def branching_weight(G, attr: str = "weight", default: float = 1): ... +def branching_weight(G: DiGraph[_Node], attr: str = "weight", default: float = 1): ... @_dispatchable -def greedy_branching(G, attr: str = "weight", default: float = 1, kind: str = "max", seed: Incomplete | None = None): ... +def greedy_branching( + G: DiGraph[_Node], attr: str = "weight", default: float = 1, kind: str = "max", seed: int | RandomState | None = None +): ... @_dispatchable def maximum_branching( - G, attr: str = "weight", default: float = 1, preserve_attrs: bool = False, partition: Incomplete | None = None + G: DiGraph[_Node], attr: str = "weight", default: float = 1, preserve_attrs: bool = False, partition: str | None = None ): ... @_dispatchable def minimum_branching( - G, attr: str = "weight", default: float = 1, preserve_attrs: bool = False, partition: Incomplete | None = None + G: DiGraph[_Node], attr: str = "weight", default: float = 1, preserve_attrs: bool = False, partition: str | None = None ): ... @_dispatchable def maximum_spanning_arborescence( - G, attr: str = "weight", default: float = 1, preserve_attrs: bool = False, partition: Incomplete | None = None + G: DiGraph[_Node], attr: str = "weight", default: float = 1, preserve_attrs: bool = False, partition: str | None = None ): ... @_dispatchable def minimum_spanning_arborescence( - G, attr: str = "weight", default: float = 1, preserve_attrs: bool = False, partition: Incomplete | None = None + G: DiGraph[_Node], attr: str = "weight", default: float = 1, preserve_attrs: bool = False, partition: str | None = None ): ... class ArborescenceIterator: @@ -47,7 +52,9 @@ class ArborescenceIterator: method: Incomplete partition_key: str init_partition: Incomplete + def __init__(self, G, weight: str = "weight", minimum: bool = True, init_partition: Incomplete | None = None) -> None: ... partition_queue: Incomplete + def __iter__(self) -> Iterator[Incomplete]: ... def __next__(self): ... diff --git a/stubs/networkx/networkx/algorithms/tree/coding.pyi b/stubs/networkx/networkx/algorithms/tree/coding.pyi index 180a04a7c1ac..14906e0ba3c3 100644 --- a/stubs/networkx/networkx/algorithms/tree/coding.pyi +++ b/stubs/networkx/networkx/algorithms/tree/coding.pyi @@ -1,13 +1,17 @@ +from _typeshed import Incomplete +from collections.abc import Iterable + +from networkx.classes.graph import Graph, _Node from networkx.exception import NetworkXException from networkx.utils.backends import _dispatchable class NotATree(NetworkXException): ... @_dispatchable -def to_nested_tuple(T, root, canonical_form: bool = False): ... +def to_nested_tuple(T: Graph[_Node], root: _Node, canonical_form: bool = False): ... @_dispatchable -def from_nested_tuple(sequence, sensible_relabeling: bool = False): ... +def from_nested_tuple(sequence: tuple[Incomplete], sensible_relabeling: bool = False): ... @_dispatchable -def to_prufer_sequence(T): ... +def to_prufer_sequence(T: Graph[_Node]): ... @_dispatchable -def from_prufer_sequence(sequence): ... +def from_prufer_sequence(sequence: Iterable[Incomplete]): ... diff --git a/stubs/networkx/networkx/algorithms/tree/mst.pyi b/stubs/networkx/networkx/algorithms/tree/mst.pyi index 178b5a771cb7..2a55ed99c20a 100644 --- a/stubs/networkx/networkx/algorithms/tree/mst.pyi +++ b/stubs/networkx/networkx/algorithms/tree/mst.pyi @@ -3,7 +3,9 @@ from collections.abc import Iterator from dataclasses import dataclass from enum import Enum +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +from numpy.random import RandomState class EdgePartition(Enum): OPEN = 0 @@ -12,22 +14,34 @@ class EdgePartition(Enum): @_dispatchable def minimum_spanning_edges( - G, algorithm: str = "kruskal", weight: str = "weight", keys: bool = True, data: bool = True, ignore_nan: bool = False + G: Graph[_Node], + algorithm: str = "kruskal", + weight: str = "weight", + keys: bool = True, + data: bool | None = True, + ignore_nan: bool = False, ): ... @_dispatchable def maximum_spanning_edges( - G, algorithm: str = "kruskal", weight: str = "weight", keys: bool = True, data: bool = True, ignore_nan: bool = False + G: Graph[_Node], + algorithm: str = "kruskal", + weight: str = "weight", + keys: bool = True, + data: bool | None = True, + ignore_nan: bool = False, ): ... @_dispatchable -def minimum_spanning_tree(G, weight: str = "weight", algorithm: str = "kruskal", ignore_nan: bool = False): ... +def minimum_spanning_tree(G: Graph[_Node], weight: str = "weight", algorithm: str = "kruskal", ignore_nan: bool = False): ... @_dispatchable def partition_spanning_tree( - G, minimum: bool = True, weight: str = "weight", partition: str = "partition", ignore_nan: bool = False + G: Graph[_Node], minimum: bool = True, weight: str = "weight", partition: str = "partition", ignore_nan: bool = False ): ... @_dispatchable -def maximum_spanning_tree(G, weight: str = "weight", algorithm: str = "kruskal", ignore_nan: bool = False): ... +def maximum_spanning_tree(G: Graph[_Node], weight: str = "weight", algorithm: str = "kruskal", ignore_nan: bool = False): ... @_dispatchable -def random_spanning_tree(G, weight: Incomplete | None = None, *, multiplicative: bool = True, seed: Incomplete | None = None): ... +def random_spanning_tree( + G: Graph[_Node], weight: str | None = None, *, multiplicative=True, seed: int | RandomState | None = None +): ... class SpanningTreeIterator: @dataclass @@ -40,7 +54,9 @@ class SpanningTreeIterator: minimum: Incomplete ignore_nan: Incomplete partition_key: str + def __init__(self, G, weight: str = "weight", minimum: bool = True, ignore_nan: bool = False) -> None: ... partition_queue: Incomplete + def __iter__(self) -> Iterator[Incomplete]: ... def __next__(self): ... diff --git a/stubs/networkx/networkx/algorithms/tree/operations.pyi b/stubs/networkx/networkx/algorithms/tree/operations.pyi index feab02594790..a88e48eca6e1 100644 --- a/stubs/networkx/networkx/algorithms/tree/operations.pyi +++ b/stubs/networkx/networkx/algorithms/tree/operations.pyi @@ -1,6 +1,7 @@ from _typeshed import Incomplete +from collections.abc import Iterable from networkx.utils.backends import _dispatchable @_dispatchable -def join_trees(rooted_trees, label_attribute: Incomplete | None = None): ... +def join_trees(rooted_trees: Iterable[Incomplete], *, label_attribute: str | None = None, first_label: int | None = 0): ... diff --git a/stubs/networkx/networkx/algorithms/tree/recognition.pyi b/stubs/networkx/networkx/algorithms/tree/recognition.pyi index b5b303b66c81..219e48d02f33 100644 --- a/stubs/networkx/networkx/algorithms/tree/recognition.pyi +++ b/stubs/networkx/networkx/algorithms/tree/recognition.pyi @@ -1,10 +1,12 @@ +from networkx.classes.digraph import DiGraph +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable -def is_arborescence(G): ... +def is_arborescence(G: Graph[_Node]): ... @_dispatchable -def is_branching(G): ... +def is_branching(G: DiGraph[_Node]): ... @_dispatchable -def is_forest(G): ... +def is_forest(G: Graph[_Node]): ... @_dispatchable -def is_tree(G): ... +def is_tree(G: Graph[_Node]): ... diff --git a/stubs/networkx/networkx/algorithms/triads.pyi b/stubs/networkx/networkx/algorithms/triads.pyi index 510b01cbdf2a..d6bea369c744 100644 --- a/stubs/networkx/networkx/algorithms/triads.pyi +++ b/stubs/networkx/networkx/algorithms/triads.pyi @@ -1,19 +1,22 @@ from _typeshed import Incomplete -from collections.abc import Generator +from collections.abc import Generator, Iterable +from networkx.classes.digraph import DiGraph +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +from numpy.random import RandomState @_dispatchable -def triadic_census(G, nodelist: Incomplete | None = None): ... +def triadic_census(G: DiGraph[_Node], nodelist: Iterable[Incomplete] | None = None): ... @_dispatchable -def is_triad(G): ... +def is_triad(G: Graph[_Node]): ... @_dispatchable -def all_triplets(G): ... +def all_triplets(G: DiGraph[_Node]): ... @_dispatchable -def all_triads(G) -> Generator[Incomplete, None, None]: ... +def all_triads(G: DiGraph[_Node]) -> Generator[Incomplete, None, None]: ... @_dispatchable -def triads_by_type(G): ... +def triads_by_type(G: DiGraph[_Node]): ... @_dispatchable -def triad_type(G): ... +def triad_type(G: DiGraph[_Node]): ... @_dispatchable -def random_triad(G, seed: Incomplete | None = None): ... +def random_triad(G: DiGraph[_Node], seed: int | RandomState | None = None): ... diff --git a/stubs/networkx/networkx/algorithms/vitality.pyi b/stubs/networkx/networkx/algorithms/vitality.pyi index 9a7cfcb2b64a..ec105de8f4be 100644 --- a/stubs/networkx/networkx/algorithms/vitality.pyi +++ b/stubs/networkx/networkx/algorithms/vitality.pyi @@ -1,8 +1,9 @@ from _typeshed import Incomplete +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable def closeness_vitality( - G, node: Incomplete | None = None, weight: Incomplete | None = None, wiener_index: Incomplete | None = None + G: Graph[_Node], node: Incomplete | None = None, weight: str | None = None, wiener_index: float | None = None ): ... diff --git a/stubs/networkx/networkx/algorithms/voronoi.pyi b/stubs/networkx/networkx/algorithms/voronoi.pyi index 6713ec759232..806b4b12656c 100644 --- a/stubs/networkx/networkx/algorithms/voronoi.pyi +++ b/stubs/networkx/networkx/algorithms/voronoi.pyi @@ -1,4 +1,13 @@ +from _typeshed import Incomplete, SupportsGetItem +from collections.abc import Callable +from typing import Any + +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable -def voronoi_cells(G, center_nodes, weight: str = "weight"): ... +def voronoi_cells( + G: Graph[_Node], + center_nodes: set[Incomplete], + weight: str | Callable[[Any, Any, SupportsGetItem[str, Any]], float | None] | None = "weight", +): ... diff --git a/stubs/networkx/networkx/algorithms/walks.pyi b/stubs/networkx/networkx/algorithms/walks.pyi new file mode 100644 index 000000000000..9c5d25a38a22 --- /dev/null +++ b/stubs/networkx/networkx/algorithms/walks.pyi @@ -0,0 +1,5 @@ +from networkx.classes.graph import Graph, _Node +from networkx.utils.backends import _dispatchable + +@_dispatchable +def number_of_walks(G: Graph[_Node], walk_length: int): ... diff --git a/stubs/networkx/networkx/algorithms/wiener.pyi b/stubs/networkx/networkx/algorithms/wiener.pyi index 1cff8ccd0f76..e55f890203a8 100644 --- a/stubs/networkx/networkx/algorithms/wiener.pyi +++ b/stubs/networkx/networkx/algorithms/wiener.pyi @@ -1,6 +1,5 @@ -from _typeshed import Incomplete - +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable -def wiener_index(G, weight: Incomplete | None = None): ... +def wiener_index(G: Graph[_Node], weight: str | None = None): ... diff --git a/stubs/networkx/networkx/generators/__init__.pyi b/stubs/networkx/networkx/generators/__init__.pyi index c19ff74b7229..878faee7cacc 100644 --- a/stubs/networkx/networkx/generators/__init__.pyi +++ b/stubs/networkx/networkx/generators/__init__.pyi @@ -8,6 +8,7 @@ from networkx.generators.duplication import * from networkx.generators.ego import * from networkx.generators.expanders import * from networkx.generators.geometric import * +from networkx.generators.harary_graph import * from networkx.generators.internet_as_graphs import * from networkx.generators.intersection import * from networkx.generators.interval_graph import * @@ -23,5 +24,6 @@ from networkx.generators.social import * from networkx.generators.spectral_graph_forge import * from networkx.generators.stochastic import * from networkx.generators.sudoku import * +from networkx.generators.time_series import * from networkx.generators.trees import * from networkx.generators.triads import * diff --git a/stubs/networkx/networkx/generators/time_series.pyi b/stubs/networkx/networkx/generators/time_series.pyi new file mode 100644 index 000000000000..b5655e820964 --- /dev/null +++ b/stubs/networkx/networkx/generators/time_series.pyi @@ -0,0 +1,4 @@ +from networkx.utils.backends import _dispatchable + +@_dispatchable +def visibility_graph(series): ... From 6979149d3f9fc96638ca088ed2ed6f31276d408c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ho=C3=ABl=20Bagard?= <34478245+hoel-bagard@users.noreply.github.com> Date: Wed, 26 Feb 2025 23:07:07 +0900 Subject: [PATCH 008/388] `tensorflow`: add a few TensorFlow functions (#13364) --- stubs/tensorflow/tensorflow/__init__.pyi | 33 ++++++++++++++++++++++-- stubs/tensorflow/tensorflow/math.pyi | 6 +++++ stubs/tensorflow/tensorflow/signal.pyi | 6 +++++ 3 files changed, 43 insertions(+), 2 deletions(-) create mode 100644 stubs/tensorflow/tensorflow/signal.pyi diff --git a/stubs/tensorflow/tensorflow/__init__.pyi b/stubs/tensorflow/tensorflow/__init__.pyi index 21c8df673cbd..731979b41276 100644 --- a/stubs/tensorflow/tensorflow/__init__.pyi +++ b/stubs/tensorflow/tensorflow/__init__.pyi @@ -6,7 +6,7 @@ from collections.abc import Callable, Generator, Iterable, Iterator, Sequence from contextlib import contextmanager from enum import Enum from types import TracebackType -from typing import Any, Generic, NoReturn, TypeVar, overload +from typing import Any, Generic, Literal, NoReturn, TypeVar, overload from typing_extensions import ParamSpec, Self from google.protobuf.message import Message @@ -20,7 +20,17 @@ from tensorflow import ( math as math, types as types, ) -from tensorflow._aliases import AnyArray, DTypeLike, ShapeLike, Slice, TensorCompatible +from tensorflow._aliases import ( + AnyArray, + DTypeLike, + IntArray, + ScalarTensorCompatible, + ShapeLike, + Slice, + SparseTensorCompatible, + TensorCompatible, + UIntTensorCompatible, +) from tensorflow.autodiff import GradientTape as GradientTape from tensorflow.core.protobuf import struct_pb2 from tensorflow.dtypes import * @@ -56,6 +66,7 @@ from tensorflow.math import ( reduce_min as reduce_min, reduce_prod as reduce_prod, reduce_sum as reduce_sum, + round as round, sigmoid as sigmoid, sign as sign, sin as sin, @@ -403,4 +414,22 @@ def ones_like( input: RaggedTensor, dtype: DTypeLike | None = None, name: str | None = None, layout: Layout | None = None ) -> RaggedTensor: ... def reshape(tensor: TensorCompatible, shape: ShapeLike | Tensor, name: str | None = None) -> Tensor: ... +def pad( + tensor: TensorCompatible, + paddings: Tensor | IntArray | Iterable[Iterable[int]], + mode: Literal["CONSTANT", "constant", "REFLECT", "reflect", "SYMMETRIC", "symmectric"] = "CONSTANT", + constant_values: ScalarTensorCompatible = 0, + name: str | None = None, +) -> Tensor: ... +def shape(input: SparseTensorCompatible, out_type: DTypeLike | None = None, name: str | None = None) -> Tensor: ... +def where( + condition: TensorCompatible, x: TensorCompatible | None = None, y: TensorCompatible | None = None, name: str | None = None +) -> Tensor: ... +def gather_nd( + params: TensorCompatible, + indices: UIntTensorCompatible, + batch_dims: UIntTensorCompatible = 0, + name: str | None = None, + bad_indices_policy: Literal["", "DEFAULT", "ERROR", "IGNORE"] = "", +) -> Tensor: ... def __getattr__(name: str) -> Incomplete: ... diff --git a/stubs/tensorflow/tensorflow/math.pyi b/stubs/tensorflow/tensorflow/math.pyi index d3b02bf7a748..2e72ada0a8f2 100644 --- a/stubs/tensorflow/tensorflow/math.pyi +++ b/stubs/tensorflow/tensorflow/math.pyi @@ -219,6 +219,12 @@ def square(x: RaggedTensor, name: str | None = None) -> RaggedTensor: ... def softplus(features: TensorCompatible, name: str | None = None) -> Tensor: ... @overload def softplus(features: RaggedTensor, name: str | None = None) -> RaggedTensor: ... +@overload +def round(x: TensorCompatible, name: str | None = None) -> Tensor: ... +@overload +def round(x: SparseTensor, name: str | None = None) -> SparseTensor: ... +@overload +def round(x: RaggedTensor, name: str | None = None) -> RaggedTensor: ... # Depending on the method axis is either a rank 0 tensor or a rank 0/1 tensor. def reduce_mean( diff --git a/stubs/tensorflow/tensorflow/signal.pyi b/stubs/tensorflow/tensorflow/signal.pyi new file mode 100644 index 000000000000..c0ce254f1e36 --- /dev/null +++ b/stubs/tensorflow/tensorflow/signal.pyi @@ -0,0 +1,6 @@ +from tensorflow import Tensor +from tensorflow._aliases import DTypeLike, TensorCompatible + +def hamming_window( + window_length: TensorCompatible, periodic: bool | TensorCompatible = True, dtype: DTypeLike = ..., name: str | None = None +) -> Tensor: ... From 08aeab26ffeb0b6aeb4e295beaee060049056119 Mon Sep 17 00:00:00 2001 From: Leonardo-Pike-Excell Date: Thu, 27 Feb 2025 01:20:45 +1100 Subject: [PATCH 009/388] [networkx] Add annotations for `MultiDiGraph` (#13319) --- .../networkx/classes/multidigraph.pyi | 2 + .../networkx/networkx/classes/reportviews.pyi | 48 +++++++++++++++---- 2 files changed, 41 insertions(+), 9 deletions(-) diff --git a/stubs/networkx/networkx/classes/multidigraph.pyi b/stubs/networkx/networkx/classes/multidigraph.pyi index fac4ea0c0acd..ebfe40bc2df9 100644 --- a/stubs/networkx/networkx/classes/multidigraph.pyi +++ b/stubs/networkx/networkx/classes/multidigraph.pyi @@ -13,6 +13,8 @@ class MultiDiGraph(MultiGraph[_Node], DiGraph[_Node]): @cached_property def pred(self) -> MultiAdjacencyView[_Node, _Node, dict[str, Incomplete]]: ... @cached_property + def edges(self) -> OutMultiEdgeView[_Node]: ... + @cached_property def out_edges(self) -> OutMultiEdgeView[_Node]: ... @cached_property def in_edges(self) -> OutMultiEdgeView[_Node]: ... diff --git a/stubs/networkx/networkx/classes/reportviews.pyi b/stubs/networkx/networkx/classes/reportviews.pyi index e64320435cd5..6a654dcccfe3 100644 --- a/stubs/networkx/networkx/classes/reportviews.pyi +++ b/stubs/networkx/networkx/classes/reportviews.pyi @@ -110,36 +110,66 @@ class EdgeView(OutEdgeView[_Node]): ... class InEdgeView(OutEdgeView[_Node]): ... class OutMultiEdgeView(OutEdgeView[_Node]): + def __iter__(self) -> Iterator[tuple[_Node, _Node, Incomplete]]: ... # type: ignore[override] + def __getitem__(self, e: tuple[_Node, _Node, Incomplete]) -> dict[str, Any]: ... # type: ignore[override] @overload # type: ignore[override] # Has an additional `keys` keyword argument - def __call__( + def __call__( # type: ignore[overload-overlap] self, nbunch: None = None, data: Literal[False] = False, *, default: Unused = None, keys: Literal[True] ) -> Self: ... @overload def __call__( - self, nbunch: _NBunch[_Node], data: Literal[True], *, default: None = None, keys: bool = False - ) -> OutMultiEdgeDataView[_Node, tuple[_Node, _Node, dict[str, Incomplete]]]: ... + self, nbunch: _NBunch[_Node] = None, data: Literal[False] = False, *, default: None = None, keys: Literal[False] = False + ) -> OutMultiEdgeDataView[_Node, tuple[_Node, _Node]]: ... + @overload + def __call__( + self, nbunch: _Node | Iterable[_Node], data: Literal[False] = False, *, default: None = None, keys: Literal[True] + ) -> OutMultiEdgeDataView[_Node, tuple[_Node, _Node, Incomplete]]: ... + @overload + def __call__( + self, nbunch: _NBunch[_Node] = None, *, data: Literal[True], default: None = None, keys: Literal[True] + ) -> OutMultiEdgeDataView[_Node, tuple[_Node, _Node, Incomplete, dict[str, Incomplete]]]: ... + @overload + def __call__( + self, nbunch: _NBunch[_Node], data: Literal[True], *, default: None = None, keys: Literal[True] + ) -> OutMultiEdgeDataView[_Node, tuple[_Node, _Node, Incomplete, dict[str, Incomplete]]]: ... @overload def __call__( - self, nbunch: _NBunch[_Node] = None, *, data: Literal[True], default: None = None, keys: bool = False + self, nbunch: _NBunch[_Node] = None, *, data: Literal[True], default: None = None, keys: Literal[False] = False ) -> OutMultiEdgeDataView[_Node, tuple[_Node, _Node, dict[str, Incomplete]]]: ... @overload def __call__( - self, nbunch: _NBunch[_Node], data: str, *, default: _U | None = None, keys: bool = False + self, nbunch: _NBunch[_Node], data: str, *, default: _U | None = None, keys: Literal[False] = False ) -> OutMultiEdgeDataView[_Node, tuple[_Node, _Node, _U]]: ... @overload def __call__( - self, nbunch: _NBunch[_Node] = None, *, data: str, default: _U | None = None, keys: bool = False + self, nbunch: _NBunch[_Node] = None, *, data: str, default: _U | None = None, keys: Literal[True] + ) -> OutMultiEdgeDataView[_Node, tuple[_Node, _Node, Incomplete, _U]]: ... + @overload + def __call__( + self, nbunch: _NBunch[_Node] = None, *, data: str, default: _U | None = None, keys: Literal[False] = False ) -> OutMultiEdgeDataView[_Node, tuple[_Node, _Node, _U]]: ... + @overload # type: ignore[override] + def data(self, data: Literal[False], default: Unused = None, nbunch: None = None, *, keys: Literal[True]) -> Self: ... @overload - def data(self, data: Literal[False], default: Unused = None, nbunch: None = None, keys: Literal[False] = False) -> Self: ... + def data( + self, data: Literal[False], default: None = None, nbunch: _NBunch[_Node] = None, keys: Literal[False] = False + ) -> OutMultiEdgeDataView[_Node, tuple[_Node, _Node]]: ... @overload def data( - self, data: Literal[True] = True, default: None = None, nbunch: _NBunch[_Node] = None, keys: bool = False + self, data: Literal[True] = True, default: None = None, nbunch: _NBunch[_Node] = None, keys: Literal[False] = False ) -> OutMultiEdgeDataView[_Node, tuple[_Node, _Node, dict[str, Incomplete]]]: ... @overload def data( - self, data: str, default: _U | None = None, nbunch: _NBunch[_Node] = None, keys: bool = False + self, data: Literal[True] = True, default: None = None, nbunch: _NBunch[_Node] = None, *, keys: Literal[True] + ) -> OutMultiEdgeDataView[_Node, tuple[_Node, _Node, Incomplete, dict[str, Incomplete]]]: ... + @overload + def data( + self, data: str, default: _U | None = None, nbunch: _NBunch[_Node] = None, keys: Literal[False] = False ) -> OutMultiEdgeDataView[_Node, tuple[_Node, _Node, _U]]: ... + @overload + def data( + self, data: str, default: _U | None = None, nbunch: _NBunch[_Node] = None, *, keys: Literal[True] + ) -> OutMultiEdgeDataView[_Node, tuple[_Node, _Node, Incomplete, _U]]: ... class MultiEdgeView(OutMultiEdgeView[_Node]): ... class InMultiEdgeView(OutMultiEdgeView[_Node]): ... From d35de438948a8accbcbf7597b0d6da7ebc2e0316 Mon Sep 17 00:00:00 2001 From: Sebastian Rittau Date: Wed, 26 Feb 2025 21:44:10 +0100 Subject: [PATCH 010/388] Third party stubtest: Print time per distribution (#13547) --- lib/ts_utils/utils.py | 4 ++++ pyproject.toml | 2 ++ tests/stubtest_third_party.py | 8 ++++++++ 3 files changed, 14 insertions(+) diff --git a/lib/ts_utils/utils.py b/lib/ts_utils/utils.py index 66d48bd78a7e..522db807a29e 100644 --- a/lib/ts_utils/utils.py +++ b/lib/ts_utils/utils.py @@ -72,6 +72,10 @@ def print_divider() -> None: print() +def print_time(t: float) -> None: + print(f"({t:.2f} s) ", end="") + + # ==================================================================== # Dynamic venv creation # ==================================================================== diff --git a/pyproject.toml b/pyproject.toml index 39f196cdb595..69eb3d1290a1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -112,6 +112,8 @@ ignore = [ ### # We're not a library, no need to document everything "D1", # Missing docstring in ... + # Sometimes, an extra blank line is more readable + "D202", # No blank lines allowed after function docstring # Doesn't support split "summary line" "D205", # 1 blank line required between summary line and description # Used for direct, non-subclass type comparison, for example: `type(val) is str` diff --git a/tests/stubtest_third_party.py b/tests/stubtest_third_party.py index cf6f213d9736..bc1db5a413d2 100755 --- a/tests/stubtest_third_party.py +++ b/tests/stubtest_third_party.py @@ -12,6 +12,7 @@ from pathlib import Path from shutil import rmtree from textwrap import dedent +from time import time from typing import NoReturn from ts_utils.metadata import NoSuchStubError, get_recursive_requirements, read_metadata @@ -25,6 +26,7 @@ print_error, print_info, print_success_msg, + print_time, ) @@ -36,6 +38,8 @@ def run_stubtest( specified_platforms_only: bool = False, keep_tmp_dir: bool = False, ) -> bool: + """Run stubtest for a single distribution.""" + dist_name = dist.name try: metadata = read_metadata(dist_name) @@ -43,6 +47,8 @@ def run_stubtest( parser.error(str(e)) print(f"{dist_name}... ", end="", flush=True) + t = time() + stubtest_settings = metadata.stubtest_settings if stubtest_settings.skip: print(colored("skipping", "yellow")) @@ -136,6 +142,7 @@ def run_stubtest( try: subprocess.run(stubtest_cmd, env=stubtest_env, check=True, capture_output=True) except subprocess.CalledProcessError as e: + print_time(time() - t) print_error("fail") print_divider() @@ -175,6 +182,7 @@ def run_stubtest( return False else: + print_time(time() - t) print_success_msg() if keep_tmp_dir: print_info(f"Virtual environment kept at: {venv_dir}") From ba49f6270e919a1bc6eb94249cc9c79f02c42b89 Mon Sep 17 00:00:00 2001 From: Avasam Date: Wed, 26 Feb 2025 17:19:36 -0500 Subject: [PATCH 011/388] Enable Ruff SIM (#13309) --- pyproject.toml | 34 ++++++- stdlib/asyncio/__init__.pyi | 178 ------------------------------------ 2 files changed, 29 insertions(+), 183 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 69eb3d1290a1..e6eb6cf3656d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -55,11 +55,6 @@ select = [ "W", # pycodestyle Warning # Only include flake8-annotations rules that are autofixable. Otherwise leave this to mypy+pyright "ANN2", - # Don't include TC rules that create a TYPE_CHECKING block or stringifies annotations - "TC004", # Move import `{qualified_name}` out of type-checking block. Import is used for more than type hinting. - "TC005", # Found empty type-checking block - # "TC008", # TODO: Enable when out of preview - "TC010", # Invalid string member in `X | Y`-style union type # Most refurb rules are in preview and can be opinionated, # consider them individually as they come out of preview (last check: 0.8.4) "FURB105", # Unnecessary empty string passed to `print` @@ -94,6 +89,35 @@ select = [ # "PYI061", # TODO: Enable when out of preview "PYI062", # Duplicate literal member `{}` "PYI064", # `Final[Literal[{literal}]]` can be replaced with a bare Final + # flake8-simplify, excluding rules that can reduce performance or readability due to long line formatting + "SIM101", # Multiple `isinstance` calls for `{name}`, merge into a single call + "SIM103", # Return the condition `{condition}` directly + "SIM107", # Don't use return in `try-except` and `finally` + "SIM109", # Use `{replacement}` instead of multiple equality comparisons + "SIM112", # Use capitalized environment variable `{expected}` instead of `{actual}` + "SIM113", # Use `enumerate()` for index variable `{index}` in `for` loop + "SIM114", # Combine `if` branches using logical `or` operator + "SIM115", # Use a context manager for opening files + "SIM118", # Use key `{operator}` dict instead of key `{operator} dict.keys()` + "SIM201", # Use `{left} != {right}` instead of not `{left} == {right}` + "SIM202", # Use `{left} == {right}` instead of not `{left} != {right}` + "SIM208", # Use `{expr}` instead of `not (not {expr})` + "SIM210", # Remove unnecessary `True if ... else False` + "SIM211", # Use `not ...` instead of `False if ... else True` + "SIM212", # Use `{expr_else} if {expr_else} else {expr_body}` instead of `{expr_body} if not {expr_else} else {expr_else}` + "SIM220", # Use `False` instead of `{name} and not {name}` + "SIM221", # Use `True` instead of `{name} or not {name}` + "SIM222", # Use `{expr}` instead of `{replaced}` + "SIM223", # Use `{expr}` instead of `{replaced}` + "SIM300", # Yoda condition detected + "SIM401", # Use `{contents}` instead of an if block + "SIM910", # Use `{expected}` instead of `{actual}` (dict-get-with-none-default) + "SIM911", # Use `{expected}` instead of `{actual}` (zip-dict-keys-and-values) + # Don't include TC rules that create a TYPE_CHECKING block or stringifies annotations + "TC004", # Move import `{qualified_name}` out of type-checking block. Import is used for more than type hinting. + "TC005", # Found empty type-checking block + # "TC008", # TODO: Enable when out of preview + "TC010", # Invalid string member in `X | Y`-style union type ] extend-safe-fixes = [ "UP036", # Remove unnecessary `sys.version_info` blocks diff --git a/stdlib/asyncio/__init__.pyi b/stdlib/asyncio/__init__.pyi index 7c3ac6ede4fe..89a8143c5f7f 100644 --- a/stdlib/asyncio/__init__.pyi +++ b/stdlib/asyncio/__init__.pyi @@ -410,93 +410,6 @@ if sys.platform == "win32": "WindowsSelectorEventLoopPolicy", # from windows_events "WindowsProactorEventLoopPolicy", # from windows_events ) - elif sys.version_info >= (3, 10): - __all__ = ( - "BaseEventLoop", # from base_events - "Server", # from base_events - "coroutine", # from coroutines - "iscoroutinefunction", # from coroutines - "iscoroutine", # from coroutines - "AbstractEventLoopPolicy", # from events - "AbstractEventLoop", # from events - "AbstractServer", # from events - "Handle", # from events - "TimerHandle", # from events - "get_event_loop_policy", # from events - "set_event_loop_policy", # from events - "get_event_loop", # from events - "set_event_loop", # from events - "new_event_loop", # from events - "get_child_watcher", # from events - "set_child_watcher", # from events - "_set_running_loop", # from events - "get_running_loop", # from events - "_get_running_loop", # from events - "CancelledError", # from exceptions - "InvalidStateError", # from exceptions - "TimeoutError", # from exceptions - "IncompleteReadError", # from exceptions - "LimitOverrunError", # from exceptions - "SendfileNotAvailableError", # from exceptions - "Future", # from futures - "wrap_future", # from futures - "isfuture", # from futures - "Lock", # from locks - "Event", # from locks - "Condition", # from locks - "Semaphore", # from locks - "BoundedSemaphore", # from locks - "BaseProtocol", # from protocols - "Protocol", # from protocols - "DatagramProtocol", # from protocols - "SubprocessProtocol", # from protocols - "BufferedProtocol", # from protocols - "run", # from runners - "Queue", # from queues - "PriorityQueue", # from queues - "LifoQueue", # from queues - "QueueFull", # from queues - "QueueEmpty", # from queues - "StreamReader", # from streams - "StreamWriter", # from streams - "StreamReaderProtocol", # from streams - "open_connection", # from streams - "start_server", # from streams - "create_subprocess_exec", # from subprocess - "create_subprocess_shell", # from subprocess - "Task", # from tasks - "create_task", # from tasks - "FIRST_COMPLETED", # from tasks - "FIRST_EXCEPTION", # from tasks - "ALL_COMPLETED", # from tasks - "wait", # from tasks - "wait_for", # from tasks - "as_completed", # from tasks - "sleep", # from tasks - "gather", # from tasks - "shield", # from tasks - "ensure_future", # from tasks - "run_coroutine_threadsafe", # from tasks - "current_task", # from tasks - "all_tasks", # from tasks - "_register_task", # from tasks - "_unregister_task", # from tasks - "_enter_task", # from tasks - "_leave_task", # from tasks - "to_thread", # from threads - "BaseTransport", # from transports - "ReadTransport", # from transports - "WriteTransport", # from transports - "Transport", # from transports - "DatagramTransport", # from transports - "SubprocessTransport", # from transports - "SelectorEventLoop", # from windows_events - "ProactorEventLoop", # from windows_events - "IocpProactor", # from windows_events - "DefaultEventLoopPolicy", # from windows_events - "WindowsSelectorEventLoopPolicy", # from windows_events - "WindowsProactorEventLoopPolicy", # from windows_events - ) elif sys.version_info >= (3, 9): __all__ = ( "BaseEventLoop", # from base_events @@ -1059,97 +972,6 @@ else: "ThreadedChildWatcher", # from unix_events "DefaultEventLoopPolicy", # from unix_events ) - elif sys.version_info >= (3, 10): - __all__ = ( - "BaseEventLoop", # from base_events - "Server", # from base_events - "coroutine", # from coroutines - "iscoroutinefunction", # from coroutines - "iscoroutine", # from coroutines - "AbstractEventLoopPolicy", # from events - "AbstractEventLoop", # from events - "AbstractServer", # from events - "Handle", # from events - "TimerHandle", # from events - "get_event_loop_policy", # from events - "set_event_loop_policy", # from events - "get_event_loop", # from events - "set_event_loop", # from events - "new_event_loop", # from events - "get_child_watcher", # from events - "set_child_watcher", # from events - "_set_running_loop", # from events - "get_running_loop", # from events - "_get_running_loop", # from events - "CancelledError", # from exceptions - "InvalidStateError", # from exceptions - "TimeoutError", # from exceptions - "IncompleteReadError", # from exceptions - "LimitOverrunError", # from exceptions - "SendfileNotAvailableError", # from exceptions - "Future", # from futures - "wrap_future", # from futures - "isfuture", # from futures - "Lock", # from locks - "Event", # from locks - "Condition", # from locks - "Semaphore", # from locks - "BoundedSemaphore", # from locks - "BaseProtocol", # from protocols - "Protocol", # from protocols - "DatagramProtocol", # from protocols - "SubprocessProtocol", # from protocols - "BufferedProtocol", # from protocols - "run", # from runners - "Queue", # from queues - "PriorityQueue", # from queues - "LifoQueue", # from queues - "QueueFull", # from queues - "QueueEmpty", # from queues - "StreamReader", # from streams - "StreamWriter", # from streams - "StreamReaderProtocol", # from streams - "open_connection", # from streams - "start_server", # from streams - "open_unix_connection", # from streams - "start_unix_server", # from streams - "create_subprocess_exec", # from subprocess - "create_subprocess_shell", # from subprocess - "Task", # from tasks - "create_task", # from tasks - "FIRST_COMPLETED", # from tasks - "FIRST_EXCEPTION", # from tasks - "ALL_COMPLETED", # from tasks - "wait", # from tasks - "wait_for", # from tasks - "as_completed", # from tasks - "sleep", # from tasks - "gather", # from tasks - "shield", # from tasks - "ensure_future", # from tasks - "run_coroutine_threadsafe", # from tasks - "current_task", # from tasks - "all_tasks", # from tasks - "_register_task", # from tasks - "_unregister_task", # from tasks - "_enter_task", # from tasks - "_leave_task", # from tasks - "to_thread", # from threads - "BaseTransport", # from transports - "ReadTransport", # from transports - "WriteTransport", # from transports - "Transport", # from transports - "DatagramTransport", # from transports - "SubprocessTransport", # from transports - "SelectorEventLoop", # from unix_events - "AbstractChildWatcher", # from unix_events - "SafeChildWatcher", # from unix_events - "FastChildWatcher", # from unix_events - "PidfdChildWatcher", # from unix_events - "MultiLoopChildWatcher", # from unix_events - "ThreadedChildWatcher", # from unix_events - "DefaultEventLoopPolicy", # from unix_events - ) elif sys.version_info >= (3, 9): __all__ = ( "BaseEventLoop", # from base_events From a0dda4d31c8830e2d8bf266f176d045a0c94e565 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ho=C3=ABl=20Bagard?= <34478245+hoel-bagard@users.noreply.github.com> Date: Thu, 27 Feb 2025 20:19:20 +0900 Subject: [PATCH 012/388] `tensorflow`: Allow `tf.Tensor` to be used in `__getitem__` (#13549) --- stubs/tensorflow/tensorflow/_aliases.pyi | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stubs/tensorflow/tensorflow/_aliases.pyi b/stubs/tensorflow/tensorflow/_aliases.pyi index 7e3ae19a8b6a..d8beae33eb11 100644 --- a/stubs/tensorflow/tensorflow/_aliases.pyi +++ b/stubs/tensorflow/tensorflow/_aliases.pyi @@ -32,7 +32,7 @@ KerasSerializable: TypeAlias = KerasSerializable1 | KerasSerializable2 TensorValue: TypeAlias = tf.Tensor # Alias for a 0D Tensor Integer: TypeAlias = TensorValue | int | IntArray | np.number[Any] # Here IntArray are assumed to be 0D. Float: TypeAlias = Integer | float | FloatArray -Slice: TypeAlias = int | slice | None +Slice: TypeAlias = tf.Tensor | tf.RaggedTensor | int | slice | None FloatDataSequence: TypeAlias = Sequence[float] | Sequence[FloatDataSequence] IntDataSequence: TypeAlias = Sequence[int] | Sequence[IntDataSequence] StrDataSequence: TypeAlias = Sequence[str] | Sequence[StrDataSequence] From b9cde5643ad9fdc5fdcf3931af025ddbd6d0a375 Mon Sep 17 00:00:00 2001 From: mtnpke Date: Thu, 27 Feb 2025 12:19:38 +0100 Subject: [PATCH 013/388] Support configparser.UNNAMED_SECTION (#13542) (#13544) --- stdlib/configparser.pyi | 92 +++++++++++++++++++++++------------------ 1 file changed, 52 insertions(+), 40 deletions(-) diff --git a/stdlib/configparser.pyi b/stdlib/configparser.pyi index a44dc2e1c035..bc3e22771ca5 100644 --- a/stdlib/configparser.pyi +++ b/stdlib/configparser.pyi @@ -77,6 +77,19 @@ else: "MAX_INTERPOLATION_DEPTH", ] +if sys.version_info >= (3, 13): + class _UNNAMED_SECTION: ... + UNNAMED_SECTION: _UNNAMED_SECTION + + _SectionName: TypeAlias = str | _UNNAMED_SECTION + # A list of sections can only include an unnamed section if the parser was initialized with + # allow_unnamed_section=True. Any prevents users from having to use explicit + # type checks if allow_unnamed_section is False (the default). + _SectionNameList: TypeAlias = list[Any] +else: + _SectionName: TypeAlias = str + _SectionNameList: TypeAlias = list[str] + _Section: TypeAlias = Mapping[str, str] _Parser: TypeAlias = MutableMapping[str, _Section] _ConverterCallback: TypeAlias = Callable[[str], Any] @@ -87,17 +100,17 @@ DEFAULTSECT: Final = "DEFAULT" MAX_INTERPOLATION_DEPTH: Final = 10 class Interpolation: - def before_get(self, parser: _Parser, section: str, option: str, value: str, defaults: _Section) -> str: ... - def before_set(self, parser: _Parser, section: str, option: str, value: str) -> str: ... - def before_read(self, parser: _Parser, section: str, option: str, value: str) -> str: ... - def before_write(self, parser: _Parser, section: str, option: str, value: str) -> str: ... + def before_get(self, parser: _Parser, section: _SectionName, option: str, value: str, defaults: _Section) -> str: ... + def before_set(self, parser: _Parser, section: _SectionName, option: str, value: str) -> str: ... + def before_read(self, parser: _Parser, section: _SectionName, option: str, value: str) -> str: ... + def before_write(self, parser: _Parser, section: _SectionName, option: str, value: str) -> str: ... class BasicInterpolation(Interpolation): ... class ExtendedInterpolation(Interpolation): ... if sys.version_info < (3, 13): class LegacyInterpolation(Interpolation): - def before_get(self, parser: _Parser, section: str, option: str, value: str, vars: _Section) -> str: ... + def before_get(self, parser: _Parser, section: _SectionName, option: str, value: str, vars: _Section) -> str: ... class RawConfigParser(_Parser): _SECT_TMPL: ClassVar[str] # undocumented @@ -220,11 +233,11 @@ class RawConfigParser(_Parser): def __iter__(self) -> Iterator[str]: ... def __contains__(self, key: object) -> bool: ... def defaults(self) -> _Section: ... - def sections(self) -> list[str]: ... - def add_section(self, section: str) -> None: ... - def has_section(self, section: str) -> bool: ... - def options(self, section: str) -> list[str]: ... - def has_option(self, section: str, option: str) -> bool: ... + def sections(self) -> _SectionNameList: ... + def add_section(self, section: _SectionName) -> None: ... + def has_section(self, section: _SectionName) -> bool: ... + def options(self, section: _SectionName) -> list[str]: ... + def has_option(self, section: _SectionName, option: str) -> bool: ... def read(self, filenames: StrOrBytesPath | Iterable[StrOrBytesPath], encoding: str | None = None) -> list[str]: ... def read_file(self, f: Iterable[str], source: str | None = None) -> None: ... def read_string(self, string: str, source: str = "") -> None: ... @@ -234,26 +247,26 @@ class RawConfigParser(_Parser): # These get* methods are partially applied (with the same names) in # SectionProxy; the stubs should be kept updated together @overload - def getint(self, section: str, option: str, *, raw: bool = False, vars: _Section | None = None) -> int: ... + def getint(self, section: _SectionName, option: str, *, raw: bool = False, vars: _Section | None = None) -> int: ... @overload def getint( - self, section: str, option: str, *, raw: bool = False, vars: _Section | None = None, fallback: _T = ... + self, section: _SectionName, option: str, *, raw: bool = False, vars: _Section | None = None, fallback: _T = ... ) -> int | _T: ... @overload - def getfloat(self, section: str, option: str, *, raw: bool = False, vars: _Section | None = None) -> float: ... + def getfloat(self, section: _SectionName, option: str, *, raw: bool = False, vars: _Section | None = None) -> float: ... @overload def getfloat( - self, section: str, option: str, *, raw: bool = False, vars: _Section | None = None, fallback: _T = ... + self, section: _SectionName, option: str, *, raw: bool = False, vars: _Section | None = None, fallback: _T = ... ) -> float | _T: ... @overload - def getboolean(self, section: str, option: str, *, raw: bool = False, vars: _Section | None = None) -> bool: ... + def getboolean(self, section: _SectionName, option: str, *, raw: bool = False, vars: _Section | None = None) -> bool: ... @overload def getboolean( - self, section: str, option: str, *, raw: bool = False, vars: _Section | None = None, fallback: _T = ... + self, section: _SectionName, option: str, *, raw: bool = False, vars: _Section | None = None, fallback: _T = ... ) -> bool | _T: ... def _get_conv( self, - section: str, + section: _SectionName, option: str, conv: Callable[[str], _T], *, @@ -263,19 +276,19 @@ class RawConfigParser(_Parser): ) -> _T: ... # This is incompatible with MutableMapping so we ignore the type @overload # type: ignore[override] - def get(self, section: str, option: str, *, raw: bool = False, vars: _Section | None = None) -> str | MaybeNone: ... + def get(self, section: _SectionName, option: str, *, raw: bool = False, vars: _Section | None = None) -> str | MaybeNone: ... @overload def get( - self, section: str, option: str, *, raw: bool = False, vars: _Section | None = None, fallback: _T + self, section: _SectionName, option: str, *, raw: bool = False, vars: _Section | None = None, fallback: _T ) -> str | _T | MaybeNone: ... @overload def items(self, *, raw: bool = False, vars: _Section | None = None) -> ItemsView[str, SectionProxy]: ... @overload - def items(self, section: str, raw: bool = False, vars: _Section | None = None) -> list[tuple[str, str]]: ... - def set(self, section: str, option: str, value: str | None = None) -> None: ... + def items(self, section: _SectionName, raw: bool = False, vars: _Section | None = None) -> list[tuple[str, str]]: ... + def set(self, section: _SectionName, option: str, value: str | None = None) -> None: ... def write(self, fp: SupportsWrite[str], space_around_delimiters: bool = True) -> None: ... - def remove_option(self, section: str, option: str) -> bool: ... - def remove_section(self, section: str) -> bool: ... + def remove_option(self, section: _SectionName, option: str) -> bool: ... + def remove_section(self, section: _SectionName) -> bool: ... def optionxform(self, optionstr: str) -> str: ... @property def converters(self) -> ConverterMapping: ... @@ -283,9 +296,11 @@ class RawConfigParser(_Parser): class ConfigParser(RawConfigParser): # This is incompatible with MutableMapping so we ignore the type @overload # type: ignore[override] - def get(self, section: str, option: str, *, raw: bool = False, vars: _Section | None = None) -> str: ... + def get(self, section: _SectionName, option: str, *, raw: bool = False, vars: _Section | None = None) -> str: ... @overload - def get(self, section: str, option: str, *, raw: bool = False, vars: _Section | None = None, fallback: _T) -> str | _T: ... + def get( + self, section: _SectionName, option: str, *, raw: bool = False, vars: _Section | None = None, fallback: _T + ) -> str | _T: ... if sys.version_info < (3, 12): class SafeConfigParser(ConfigParser): ... # deprecated alias @@ -349,38 +364,38 @@ class Error(Exception): def __init__(self, msg: str = "") -> None: ... class NoSectionError(Error): - section: str - def __init__(self, section: str) -> None: ... + section: _SectionName + def __init__(self, section: _SectionName) -> None: ... class DuplicateSectionError(Error): - section: str + section: _SectionName source: str | None lineno: int | None - def __init__(self, section: str, source: str | None = None, lineno: int | None = None) -> None: ... + def __init__(self, section: _SectionName, source: str | None = None, lineno: int | None = None) -> None: ... class DuplicateOptionError(Error): - section: str + section: _SectionName option: str source: str | None lineno: int | None - def __init__(self, section: str, option: str, source: str | None = None, lineno: int | None = None) -> None: ... + def __init__(self, section: _SectionName, option: str, source: str | None = None, lineno: int | None = None) -> None: ... class NoOptionError(Error): - section: str + section: _SectionName option: str - def __init__(self, option: str, section: str) -> None: ... + def __init__(self, option: str, section: _SectionName) -> None: ... class InterpolationError(Error): - section: str + section: _SectionName option: str - def __init__(self, option: str, section: str, msg: str) -> None: ... + def __init__(self, option: str, section: _SectionName, msg: str) -> None: ... class InterpolationDepthError(InterpolationError): - def __init__(self, option: str, section: str, rawval: object) -> None: ... + def __init__(self, option: str, section: _SectionName, rawval: object) -> None: ... class InterpolationMissingOptionError(InterpolationError): reference: str - def __init__(self, option: str, section: str, rawval: object, reference: str) -> None: ... + def __init__(self, option: str, section: _SectionName, rawval: object, reference: str) -> None: ... class InterpolationSyntaxError(InterpolationError): ... @@ -403,9 +418,6 @@ class MissingSectionHeaderError(ParsingError): def __init__(self, filename: str, lineno: int, line: str) -> None: ... if sys.version_info >= (3, 13): - class _UNNAMED_SECTION: ... - UNNAMED_SECTION: _UNNAMED_SECTION - class MultilineContinuationError(ParsingError): lineno: int line: str From 12fc1afddacfd96abb1908b2c25d60246bd881de Mon Sep 17 00:00:00 2001 From: Stephen Morton Date: Thu, 27 Feb 2025 03:50:09 -0800 Subject: [PATCH 014/388] Major update for the xml module (#13349) --- pyrightconfig.stricter.json | 5 - stdlib/xml/dom/NodeFilter.pyi | 11 +- stdlib/xml/dom/__init__.pyi | 137 +++--- stdlib/xml/dom/expatbuilder.pyi | 111 +++-- stdlib/xml/dom/minidom.pyi | 667 ++++++++++++++++++++-------- stdlib/xml/dom/pulldom.pyi | 125 +++--- stdlib/xml/dom/xmlbuilder.pyi | 69 +-- stdlib/xml/etree/ElementInclude.pyi | 23 +- stdlib/xml/etree/ElementPath.pyi | 31 +- stdlib/xml/etree/ElementTree.pyi | 124 ++++-- stdlib/xml/sax/_exceptions.pyi | 10 +- stdlib/xml/sax/expatreader.pyi | 77 +++- stdlib/xml/sax/handler.pyi | 65 ++- stdlib/xml/sax/saxutils.pyi | 40 +- stdlib/xml/sax/xmlreader.pyi | 115 ++--- 15 files changed, 1029 insertions(+), 581 deletions(-) diff --git a/pyrightconfig.stricter.json b/pyrightconfig.stricter.json index e8dbd272921e..ec592725686d 100644 --- a/pyrightconfig.stricter.json +++ b/pyrightconfig.stricter.json @@ -23,11 +23,6 @@ "stdlib/tkinter/scrolledtext.pyi", "stdlib/tkinter/tix.pyi", "stdlib/tkinter/ttk.pyi", - "stdlib/xml/dom/NodeFilter.pyi", - "stdlib/xml/dom/expatbuilder.pyi", - "stdlib/xml/dom/minidom.pyi", - "stdlib/xml/dom/pulldom.pyi", - "stdlib/xml/sax", "stubs/aiofiles/aiofiles/tempfile/temptypes.pyi", "stubs/antlr4-python3-runtime", "stubs/Authlib", diff --git a/stdlib/xml/dom/NodeFilter.pyi b/stdlib/xml/dom/NodeFilter.pyi index 80fb73d23433..007df982e06a 100644 --- a/stdlib/xml/dom/NodeFilter.pyi +++ b/stdlib/xml/dom/NodeFilter.pyi @@ -1,7 +1,10 @@ +from typing import Literal +from xml.dom.minidom import Node + class NodeFilter: - FILTER_ACCEPT: int - FILTER_REJECT: int - FILTER_SKIP: int + FILTER_ACCEPT: Literal[1] + FILTER_REJECT: Literal[2] + FILTER_SKIP: Literal[3] SHOW_ALL: int SHOW_ELEMENT: int @@ -16,4 +19,4 @@ class NodeFilter: SHOW_DOCUMENT_TYPE: int SHOW_DOCUMENT_FRAGMENT: int SHOW_NOTATION: int - def acceptNode(self, node) -> int: ... + def acceptNode(self, node: Node) -> int: ... diff --git a/stdlib/xml/dom/__init__.pyi b/stdlib/xml/dom/__init__.pyi index 8738015638a9..d9615f9aacfe 100644 --- a/stdlib/xml/dom/__init__.pyi +++ b/stdlib/xml/dom/__init__.pyi @@ -1,69 +1,100 @@ -from typing import Any, Final +from typing import Any, Final, Literal from .domreg import getDOMImplementation as getDOMImplementation, registerDOMImplementation as registerDOMImplementation class Node: - ELEMENT_NODE: int - ATTRIBUTE_NODE: int - TEXT_NODE: int - CDATA_SECTION_NODE: int - ENTITY_REFERENCE_NODE: int - ENTITY_NODE: int - PROCESSING_INSTRUCTION_NODE: int - COMMENT_NODE: int - DOCUMENT_NODE: int - DOCUMENT_TYPE_NODE: int - DOCUMENT_FRAGMENT_NODE: int - NOTATION_NODE: int + ELEMENT_NODE: Literal[1] + ATTRIBUTE_NODE: Literal[2] + TEXT_NODE: Literal[3] + CDATA_SECTION_NODE: Literal[4] + ENTITY_REFERENCE_NODE: Literal[5] + ENTITY_NODE: Literal[6] + PROCESSING_INSTRUCTION_NODE: Literal[7] + COMMENT_NODE: Literal[8] + DOCUMENT_NODE: Literal[9] + DOCUMENT_TYPE_NODE: Literal[10] + DOCUMENT_FRAGMENT_NODE: Literal[11] + NOTATION_NODE: Literal[12] # ExceptionCode -INDEX_SIZE_ERR: Final[int] -DOMSTRING_SIZE_ERR: Final[int] -HIERARCHY_REQUEST_ERR: Final[int] -WRONG_DOCUMENT_ERR: Final[int] -INVALID_CHARACTER_ERR: Final[int] -NO_DATA_ALLOWED_ERR: Final[int] -NO_MODIFICATION_ALLOWED_ERR: Final[int] -NOT_FOUND_ERR: Final[int] -NOT_SUPPORTED_ERR: Final[int] -INUSE_ATTRIBUTE_ERR: Final[int] -INVALID_STATE_ERR: Final[int] -SYNTAX_ERR: Final[int] -INVALID_MODIFICATION_ERR: Final[int] -NAMESPACE_ERR: Final[int] -INVALID_ACCESS_ERR: Final[int] -VALIDATION_ERR: Final[int] +INDEX_SIZE_ERR: Final = 1 +DOMSTRING_SIZE_ERR: Final = 2 +HIERARCHY_REQUEST_ERR: Final = 3 +WRONG_DOCUMENT_ERR: Final = 4 +INVALID_CHARACTER_ERR: Final = 5 +NO_DATA_ALLOWED_ERR: Final = 6 +NO_MODIFICATION_ALLOWED_ERR: Final = 7 +NOT_FOUND_ERR: Final = 8 +NOT_SUPPORTED_ERR: Final = 9 +INUSE_ATTRIBUTE_ERR: Final = 10 +INVALID_STATE_ERR: Final = 11 +SYNTAX_ERR: Final = 12 +INVALID_MODIFICATION_ERR: Final = 13 +NAMESPACE_ERR: Final = 14 +INVALID_ACCESS_ERR: Final = 15 +VALIDATION_ERR: Final = 16 class DOMException(Exception): code: int def __init__(self, *args: Any, **kw: Any) -> None: ... def _get_code(self) -> int: ... -class IndexSizeErr(DOMException): ... -class DomstringSizeErr(DOMException): ... -class HierarchyRequestErr(DOMException): ... -class WrongDocumentErr(DOMException): ... -class InvalidCharacterErr(DOMException): ... -class NoDataAllowedErr(DOMException): ... -class NoModificationAllowedErr(DOMException): ... -class NotFoundErr(DOMException): ... -class NotSupportedErr(DOMException): ... -class InuseAttributeErr(DOMException): ... -class InvalidStateErr(DOMException): ... -class SyntaxErr(DOMException): ... -class InvalidModificationErr(DOMException): ... -class NamespaceErr(DOMException): ... -class InvalidAccessErr(DOMException): ... -class ValidationErr(DOMException): ... +class IndexSizeErr(DOMException): + code: Literal[1] + +class DomstringSizeErr(DOMException): + code: Literal[2] + +class HierarchyRequestErr(DOMException): + code: Literal[3] + +class WrongDocumentErr(DOMException): + code: Literal[4] + +class InvalidCharacterErr(DOMException): + code: Literal[5] + +class NoDataAllowedErr(DOMException): + code: Literal[6] + +class NoModificationAllowedErr(DOMException): + code: Literal[7] + +class NotFoundErr(DOMException): + code: Literal[8] + +class NotSupportedErr(DOMException): + code: Literal[9] + +class InuseAttributeErr(DOMException): + code: Literal[10] + +class InvalidStateErr(DOMException): + code: Literal[11] + +class SyntaxErr(DOMException): + code: Literal[12] + +class InvalidModificationErr(DOMException): + code: Literal[13] + +class NamespaceErr(DOMException): + code: Literal[14] + +class InvalidAccessErr(DOMException): + code: Literal[15] + +class ValidationErr(DOMException): + code: Literal[16] class UserDataHandler: - NODE_CLONED: int - NODE_IMPORTED: int - NODE_DELETED: int - NODE_RENAMED: int - -XML_NAMESPACE: Final[str] -XMLNS_NAMESPACE: Final[str] -XHTML_NAMESPACE: Final[str] + NODE_CLONED: Literal[1] + NODE_IMPORTED: Literal[2] + NODE_DELETED: Literal[3] + NODE_RENAMED: Literal[4] + +XML_NAMESPACE: Final = "http://www.w3.org/XML/1998/namespace" +XMLNS_NAMESPACE: Final = "http://www.w3.org/2000/xmlns/" +XHTML_NAMESPACE: Final = "http://www.w3.org/1999/xhtml" EMPTY_NAMESPACE: Final[None] EMPTY_PREFIX: Final[None] diff --git a/stdlib/xml/dom/expatbuilder.pyi b/stdlib/xml/dom/expatbuilder.pyi index 45f0af7aa979..228ad07e15ad 100644 --- a/stdlib/xml/dom/expatbuilder.pyi +++ b/stdlib/xml/dom/expatbuilder.pyi @@ -1,7 +1,11 @@ -from _typeshed import Incomplete, ReadableBuffer, SupportsRead +from _typeshed import ReadableBuffer, SupportsRead from typing import Any, NoReturn -from xml.dom.minidom import Document, DOMImplementation, Node, TypeInfo +from typing_extensions import TypeAlias +from xml.dom.minidom import Document, DocumentFragment, DOMImplementation, Element, Node, TypeInfo from xml.dom.xmlbuilder import DOMBuilderFilter, Options +from xml.parsers.expat import XMLParserType + +_Model: TypeAlias = tuple[int, int, str | None, tuple[Any, ...]] # same as in pyexpat TEXT_NODE = Node.TEXT_NODE CDATA_SECTION_NODE = Node.CDATA_SECTION_NODE @@ -10,45 +14,56 @@ FILTER_ACCEPT = DOMBuilderFilter.FILTER_ACCEPT FILTER_REJECT = DOMBuilderFilter.FILTER_REJECT FILTER_SKIP = DOMBuilderFilter.FILTER_SKIP FILTER_INTERRUPT = DOMBuilderFilter.FILTER_INTERRUPT -theDOMImplementation: DOMImplementation | None +theDOMImplementation: DOMImplementation class ElementInfo: - tagName: Incomplete - def __init__(self, tagName, model: Incomplete | None = None) -> None: ... - def getAttributeType(self, aname) -> TypeInfo: ... - def getAttributeTypeNS(self, namespaceURI, localName) -> TypeInfo: ... + tagName: str + def __init__(self, tagName: str, model: _Model | None = None) -> None: ... + def getAttributeType(self, aname: str) -> TypeInfo: ... + def getAttributeTypeNS(self, namespaceURI: str | None, localName: str) -> TypeInfo: ... def isElementContent(self) -> bool: ... def isEmpty(self) -> bool: ... - def isId(self, aname) -> bool: ... - def isIdNS(self, euri, ename, auri, aname) -> bool: ... + def isId(self, aname: str) -> bool: ... + def isIdNS(self, euri: str, ename: str, auri: str, aname: str) -> bool: ... class ExpatBuilder: document: Document # Created in self.reset() - curNode: Incomplete # Created in self.reset() + curNode: DocumentFragment | Element | Document # Created in self.reset() def __init__(self, options: Options | None = None) -> None: ... - def createParser(self): ... - def getParser(self): ... + def createParser(self) -> XMLParserType: ... + def getParser(self) -> XMLParserType: ... def reset(self) -> None: ... - def install(self, parser) -> None: ... + def install(self, parser: XMLParserType) -> None: ... def parseFile(self, file: SupportsRead[ReadableBuffer | str]) -> Document: ... def parseString(self, string: str | ReadableBuffer) -> Document: ... - def start_doctype_decl_handler(self, doctypeName, systemId, publicId, has_internal_subset) -> None: ... + def start_doctype_decl_handler( + self, doctypeName: str, systemId: str | None, publicId: str | None, has_internal_subset: bool + ) -> None: ... def end_doctype_decl_handler(self) -> None: ... - def pi_handler(self, target, data) -> None: ... - def character_data_handler_cdata(self, data) -> None: ... - def character_data_handler(self, data) -> None: ... + def pi_handler(self, target: str, data: str) -> None: ... + def character_data_handler_cdata(self, data: str) -> None: ... + def character_data_handler(self, data: str) -> None: ... def start_cdata_section_handler(self) -> None: ... def end_cdata_section_handler(self) -> None: ... - def entity_decl_handler(self, entityName, is_parameter_entity, value, base, systemId, publicId, notationName) -> None: ... - def notation_decl_handler(self, notationName, base, systemId, publicId) -> None: ... - def comment_handler(self, data) -> None: ... - def external_entity_ref_handler(self, context, base, systemId, publicId) -> int: ... - def first_element_handler(self, name, attributes) -> None: ... - def start_element_handler(self, name, attributes) -> None: ... - def end_element_handler(self, name) -> None: ... - def element_decl_handler(self, name, model) -> None: ... - def attlist_decl_handler(self, elem, name, type, default, required) -> None: ... - def xml_decl_handler(self, version, encoding, standalone) -> None: ... + def entity_decl_handler( + self, + entityName: str, + is_parameter_entity: bool, + value: str | None, + base: str | None, + systemId: str, + publicId: str | None, + notationName: str | None, + ) -> None: ... + def notation_decl_handler(self, notationName: str, base: str | None, systemId: str, publicId: str | None) -> None: ... + def comment_handler(self, data: str) -> None: ... + def external_entity_ref_handler(self, context: str, base: str | None, systemId: str | None, publicId: str | None) -> int: ... + def first_element_handler(self, name: str, attributes: list[str]) -> None: ... + def start_element_handler(self, name: str, attributes: list[str]) -> None: ... + def end_element_handler(self, name: str) -> None: ... + def element_decl_handler(self, name: str, model: _Model) -> None: ... + def attlist_decl_handler(self, elem: str, name: str, type: str, default: str | None, required: bool) -> None: ... + def xml_decl_handler(self, version: str, encoding: str | None, standalone: int) -> None: ... class FilterVisibilityController: filter: DOMBuilderFilter @@ -57,7 +72,7 @@ class FilterVisibilityController: def acceptNode(self, node: Node) -> int: ... class FilterCrutch: - def __init__(self, builder) -> None: ... + def __init__(self, builder: ExpatBuilder) -> None: ... class Rejecter(FilterCrutch): def start_element_handler(self, *args: Any) -> None: ... @@ -68,33 +83,39 @@ class Skipper(FilterCrutch): def end_element_handler(self, *args: Any) -> None: ... class FragmentBuilder(ExpatBuilder): - fragment: Incomplete | None - originalDocument: Incomplete - context: Incomplete - def __init__(self, context, options: Options | None = None) -> None: ... + fragment: DocumentFragment | None + originalDocument: Document + context: Node + def __init__(self, context: Node, options: Options | None = None) -> None: ... + def reset(self) -> None: ... + def parseFile(self, file: SupportsRead[ReadableBuffer | str]) -> DocumentFragment: ... # type: ignore[override] + def parseString(self, string: ReadableBuffer | str) -> DocumentFragment: ... # type: ignore[override] + def external_entity_ref_handler(self, context: str, base: str | None, systemId: str | None, publicId: str | None) -> int: ... class Namespaces: - def createParser(self): ... - def install(self, parser) -> None: ... - def start_namespace_decl_handler(self, prefix, uri) -> None: ... - def start_element_handler(self, name, attributes) -> None: ... - def end_element_handler(self, name) -> None: ... + def createParser(self) -> XMLParserType: ... + def install(self, parser: XMLParserType) -> None: ... + def start_namespace_decl_handler(self, prefix: str | None, uri: str) -> None: ... + def start_element_handler(self, name: str, attributes: list[str]) -> None: ... + def end_element_handler(self, name: str) -> None: ... # only exists if __debug__ class ExpatBuilderNS(Namespaces, ExpatBuilder): ... class FragmentBuilderNS(Namespaces, FragmentBuilder): ... class ParseEscape(Exception): ... class InternalSubsetExtractor(ExpatBuilder): - subset: Any | None - def getSubset(self) -> Any | None: ... + subset: str | list[str] | None = None + def getSubset(self) -> str: ... def parseFile(self, file: SupportsRead[ReadableBuffer | str]) -> None: ... # type: ignore[override] def parseString(self, string: str | ReadableBuffer) -> None: ... # type: ignore[override] - def start_doctype_decl_handler(self, name, publicId, systemId, has_internal_subset) -> None: ... # type: ignore[override] + def start_doctype_decl_handler( # type: ignore[override] + self, name: str, publicId: str | None, systemId: str | None, has_internal_subset: bool + ) -> None: ... def end_doctype_decl_handler(self) -> NoReturn: ... - def start_element_handler(self, name, attrs) -> NoReturn: ... + def start_element_handler(self, name: str, attrs: list[str]) -> NoReturn: ... -def parse(file: str | SupportsRead[ReadableBuffer | str], namespaces: bool = True): ... -def parseString(string: str | ReadableBuffer, namespaces: bool = True): ... -def parseFragment(file, context, namespaces: bool = True): ... -def parseFragmentString(string: str, context, namespaces: bool = True): ... +def parse(file: str | SupportsRead[ReadableBuffer | str], namespaces: bool = True) -> Document: ... +def parseString(string: str | ReadableBuffer, namespaces: bool = True) -> Document: ... +def parseFragment(file: str | SupportsRead[ReadableBuffer | str], context: Node, namespaces: bool = True) -> DocumentFragment: ... +def parseFragmentString(string: str | ReadableBuffer, context: Node, namespaces: bool = True) -> DocumentFragment: ... def makeBuilder(options: Options) -> ExpatBuilderNS | ExpatBuilder: ... diff --git a/stdlib/xml/dom/minidom.pyi b/stdlib/xml/dom/minidom.pyi index d7da59a7ed4b..51bbf4993657 100644 --- a/stdlib/xml/dom/minidom.pyi +++ b/stdlib/xml/dom/minidom.pyi @@ -1,33 +1,92 @@ import sys import xml.dom +from _collections_abc import dict_keys, dict_values from _typeshed import Incomplete, ReadableBuffer, SupportsRead, SupportsWrite -from typing import ClassVar, Literal, NoReturn, TypeVar, overload -from typing_extensions import Self -from xml.dom.minicompat import NodeList +from collections.abc import Iterable, Sequence +from types import TracebackType +from typing import Any, ClassVar, Generic, Literal, NoReturn, Protocol, TypeVar, overload +from typing_extensions import Self, TypeAlias +from xml.dom.minicompat import EmptyNodeList, NodeList from xml.dom.xmlbuilder import DocumentLS, DOMImplementationLS from xml.sax.xmlreader import XMLReader +_NSName: TypeAlias = tuple[str | None, str] + +# Entity can also have children, but it's not implemented the same way as the +# others, so is deliberately omitted here. +_NodesWithChildren: TypeAlias = DocumentFragment | Attr | Element | Document +_NodesThatAreChildren: TypeAlias = CDATASection | Comment | DocumentType | Element | Notation | ProcessingInstruction | Text + +_AttrChildren: TypeAlias = Text # Also EntityReference, but we don't implement it +_ElementChildren: TypeAlias = Element | ProcessingInstruction | Comment | Text | CDATASection +_EntityChildren: TypeAlias = Text # I think; documentation is a little unclear +_DocumentFragmentChildren: TypeAlias = Element | Text | CDATASection | ProcessingInstruction | Comment | Notation +_DocumentChildren: TypeAlias = Comment | DocumentType | Element | ProcessingInstruction + _N = TypeVar("_N", bound=Node) +_ChildNodeVar = TypeVar("_ChildNodeVar", bound=_NodesThatAreChildren) +_ChildNodePlusFragmentVar = TypeVar("_ChildNodePlusFragmentVar", bound=_NodesThatAreChildren | DocumentFragment) +_DocumentChildrenVar = TypeVar("_DocumentChildrenVar", bound=_DocumentChildren) +_ImportableNodeVar = TypeVar( + "_ImportableNodeVar", + bound=DocumentFragment + | Attr + | Element + | ProcessingInstruction + | CharacterData + | Text + | Comment + | CDATASection + | Entity + | Notation, +) + +class _DOMErrorHandler(Protocol): + def handleError(self, error: Exception) -> bool: ... + +class _UserDataHandler(Protocol): + def handle(self, operation: int, key: str, data: Any, src: Node, dst: Node) -> None: ... def parse( file: str | SupportsRead[ReadableBuffer | str], parser: XMLReader | None = None, bufsize: int | None = None ) -> Document: ... def parseString(string: str | ReadableBuffer, parser: XMLReader | None = None) -> Document: ... -def getDOMImplementation(features=None) -> DOMImplementation | None: ... +@overload +def getDOMImplementation(features: None = None) -> DOMImplementation: ... +@overload +def getDOMImplementation(features: str | Iterable[tuple[str, str | None]]) -> DOMImplementation | None: ... class Node(xml.dom.Node): - namespaceURI: str | None - parentNode: Incomplete - ownerDocument: Incomplete - nextSibling: Incomplete - previousSibling: Incomplete - prefix: Incomplete + parentNode: _NodesWithChildren | Entity | None + ownerDocument: Document | None + nextSibling: _NodesThatAreChildren | None + previousSibling: _NodesThatAreChildren | None + namespaceURI: str | None # non-null only for Element and Attr + prefix: str | None # non-null only for NS Element and Attr + + # These aren't defined on Node, but they exist on all Node subclasses + # and various methods of Node require them to exist. + childNodes: ( + NodeList[_DocumentFragmentChildren] + | NodeList[_AttrChildren] + | NodeList[_ElementChildren] + | NodeList[_DocumentChildren] + | NodeList[_EntityChildren] + | EmptyNodeList + ) + nodeType: ClassVar[Literal[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]] + nodeName: str | None # only possibly None on DocumentType + + # Not defined on Node, but exist on all Node subclasses. + nodeValue: str | None # non-null for Attr, ProcessingInstruction, Text, Comment, and CDATASection + attributes: NamedNodeMap | None # non-null only for Element + @property - def firstChild(self) -> Node | None: ... + def firstChild(self) -> _NodesThatAreChildren | None: ... @property - def lastChild(self) -> Node | None: ... + def lastChild(self) -> _NodesThatAreChildren | None: ... @property - def localName(self) -> str | None: ... + def localName(self) -> str | None: ... # non-null only for Element and Attr def __bool__(self) -> Literal[True]: ... if sys.version_info >= (3, 9): @overload @@ -95,62 +154,125 @@ class Node(xml.dom.Node): ) -> bytes: ... def hasChildNodes(self) -> bool: ... - def insertBefore(self, newChild, refChild): ... - def appendChild(self, node: _N) -> _N: ... - def replaceChild(self, newChild, oldChild): ... - def removeChild(self, oldChild): ... - def normalize(self) -> None: ... - def cloneNode(self, deep): ... - def isSupported(self, feature, version): ... - def isSameNode(self, other): ... - def getInterface(self, feature): ... - def getUserData(self, key): ... - def setUserData(self, key, data, handler): ... - childNodes: Incomplete + def insertBefore( # type: ignore[misc] + self: _NodesWithChildren, # pyright: ignore[reportGeneralTypeIssues] + newChild: _ChildNodePlusFragmentVar, + refChild: _NodesThatAreChildren | None, + ) -> _ChildNodePlusFragmentVar: ... + def appendChild( # type: ignore[misc] + self: _NodesWithChildren, node: _ChildNodePlusFragmentVar # pyright: ignore[reportGeneralTypeIssues] + ) -> _ChildNodePlusFragmentVar: ... + @overload + def replaceChild( # type: ignore[misc] + self: _NodesWithChildren, newChild: DocumentFragment, oldChild: _ChildNodeVar + ) -> _ChildNodeVar | DocumentFragment: ... + @overload + def replaceChild( # type: ignore[misc] + self: _NodesWithChildren, newChild: _NodesThatAreChildren, oldChild: _ChildNodeVar + ) -> _ChildNodeVar | None: ... + def removeChild(self: _NodesWithChildren, oldChild: _ChildNodeVar) -> _ChildNodeVar: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] + def normalize(self: _NodesWithChildren) -> None: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] + def cloneNode(self, deep: bool) -> Self | None: ... + def isSupported(self, feature: str, version: str | None) -> bool: ... + def isSameNode(self, other: Node) -> bool: ... + def getInterface(self, feature: str) -> Self | None: ... + def getUserData(self, key: str) -> Any | None: ... + def setUserData(self, key: str, data: Any, handler: _UserDataHandler) -> Any: ... def unlink(self) -> None: ... def __enter__(self) -> Self: ... - def __exit__(self, et, ev, tb) -> None: ... + def __exit__(self, et: type[BaseException] | None, ev: BaseException | None, tb: TracebackType | None) -> None: ... + +_DFChildrenVar = TypeVar("_DFChildrenVar", bound=_DocumentFragmentChildren) +_DFChildrenPlusFragment = TypeVar("_DFChildrenPlusFragment", bound=_DocumentFragmentChildren | DocumentFragment) class DocumentFragment(Node): - nodeType: int - nodeName: str - nodeValue: Incomplete - attributes: Incomplete - parentNode: Incomplete - childNodes: Incomplete + nodeType: ClassVar[Literal[11]] + nodeName: Literal["#document-fragment"] + nodeValue: None + attributes: None + + parentNode: None + nextSibling: None + previousSibling: None + childNodes: NodeList[_DocumentFragmentChildren] + @property + def firstChild(self) -> _DocumentFragmentChildren | None: ... + @property + def lastChild(self) -> _DocumentFragmentChildren | None: ... + + namespaceURI: None + prefix: None + @property + def localName(self) -> None: ... def __init__(self) -> None: ... + def insertBefore( # type: ignore[override] + self, newChild: _DFChildrenPlusFragment, refChild: _DocumentFragmentChildren | None + ) -> _DFChildrenPlusFragment: ... + def appendChild(self, node: _DFChildrenPlusFragment) -> _DFChildrenPlusFragment: ... # type: ignore[override] + @overload # type: ignore[override] + def replaceChild(self, newChild: DocumentFragment, oldChild: _DFChildrenVar) -> _DFChildrenVar | DocumentFragment: ... + @overload + def replaceChild(self, newChild: _DocumentFragmentChildren, oldChild: _DFChildrenVar) -> _DFChildrenVar | None: ... # type: ignore[override] + def removeChild(self, oldChild: _DFChildrenVar) -> _DFChildrenVar: ... # type: ignore[override] + +_AttrChildrenVar = TypeVar("_AttrChildrenVar", bound=_AttrChildren) +_AttrChildrenPlusFragment = TypeVar("_AttrChildrenPlusFragment", bound=_AttrChildren | DocumentFragment) class Attr(Node): - name: str - nodeType: int - attributes: Incomplete - specified: bool - ownerElement: Incomplete + nodeType: ClassVar[Literal[2]] + nodeName: str # same as Attr.name + nodeValue: str # same as Attr.value + attributes: None + + parentNode: None + nextSibling: None + previousSibling: None + childNodes: NodeList[_AttrChildren] + @property + def firstChild(self) -> _AttrChildren | None: ... + @property + def lastChild(self) -> _AttrChildren | None: ... + namespaceURI: str | None - childNodes: Incomplete - nodeName: Incomplete - nodeValue: str + prefix: str | None + @property + def localName(self) -> str: ... + + name: str value: str - prefix: Incomplete + specified: bool + ownerElement: Element | None + def __init__( - self, qName: str, namespaceURI: str | None = None, localName: str | None = None, prefix: Incomplete | None = None + self, qName: str, namespaceURI: str | None = None, localName: str | None = None, prefix: str | None = None ) -> None: ... def unlink(self) -> None: ... @property def isId(self) -> bool: ... @property - def schemaType(self): ... + def schemaType(self) -> TypeInfo: ... + def insertBefore(self, newChild: _AttrChildrenPlusFragment, refChild: _AttrChildren | None) -> _AttrChildrenPlusFragment: ... # type: ignore[override] + def appendChild(self, node: _AttrChildrenPlusFragment) -> _AttrChildrenPlusFragment: ... # type: ignore[override] + @overload # type: ignore[override] + def replaceChild(self, newChild: DocumentFragment, oldChild: _AttrChildrenVar) -> _AttrChildrenVar | DocumentFragment: ... + @overload + def replaceChild(self, newChild: _AttrChildren, oldChild: _AttrChildrenVar) -> _AttrChildrenVar | None: ... # type: ignore[override] + def removeChild(self, oldChild: _AttrChildrenVar) -> _AttrChildrenVar: ... # type: ignore[override] +# In the DOM, this interface isn't specific to Attr, but our implementation is +# because that's the only place we use it. class NamedNodeMap: - def __init__(self, attrs, attrsNS, ownerElement) -> None: ... - def item(self, index): ... - def items(self): ... - def itemsNS(self): ... - def __contains__(self, key): ... - def keys(self): ... - def keysNS(self): ... - def values(self): ... - def get(self, name: str, value: Incomplete | None = None): ... + def __init__(self, attrs: dict[str, Attr], attrsNS: dict[_NSName, Attr], ownerElement: Element) -> None: ... + @property + def length(self) -> int: ... + def item(self, index: int) -> Node | None: ... + def items(self) -> list[tuple[str, str]]: ... + def itemsNS(self) -> list[tuple[_NSName, str]]: ... + def __contains__(self, key: str | _NSName) -> bool: ... + def keys(self) -> dict_keys[str, Attr]: ... + def keysNS(self) -> dict_keys[_NSName, Attr]: ... + def values(self) -> dict_values[str, Attr]: ... + def get(self, name: str, value: Attr | None = None) -> Attr | None: ... __hash__: ClassVar[None] # type: ignore[assignment] def __len__(self) -> int: ... def __eq__(self, other: object) -> bool: ... @@ -158,135 +280,227 @@ class NamedNodeMap: def __gt__(self, other: NamedNodeMap) -> bool: ... def __le__(self, other: NamedNodeMap) -> bool: ... def __lt__(self, other: NamedNodeMap) -> bool: ... - def __getitem__(self, attname_or_tuple: tuple[str, str | None] | str): ... + def __getitem__(self, attname_or_tuple: _NSName | str) -> Attr: ... def __setitem__(self, attname: str, value: Attr | str) -> None: ... def getNamedItem(self, name: str) -> Attr | None: ... - def getNamedItemNS(self, namespaceURI: str, localName: str | None) -> Attr | None: ... + def getNamedItemNS(self, namespaceURI: str | None, localName: str) -> Attr | None: ... def removeNamedItem(self, name: str) -> Attr: ... - def removeNamedItemNS(self, namespaceURI: str, localName: str | None): ... - def setNamedItem(self, node: Attr) -> Attr: ... - def setNamedItemNS(self, node: Attr) -> Attr: ... - def __delitem__(self, attname_or_tuple: tuple[str, str | None] | str) -> None: ... - @property - def length(self) -> int: ... + def removeNamedItemNS(self, namespaceURI: str | None, localName: str) -> Attr: ... + def setNamedItem(self, node: Attr) -> Attr | None: ... + def setNamedItemNS(self, node: Attr) -> Attr | None: ... + def __delitem__(self, attname_or_tuple: _NSName | str) -> None: ... AttributeList = NamedNodeMap class TypeInfo: - namespace: Incomplete | None - name: str - def __init__(self, namespace: Incomplete | None, name: str) -> None: ... + namespace: str | None + name: str | None + def __init__(self, namespace: Incomplete | None, name: str | None) -> None: ... + +_ElementChildrenVar = TypeVar("_ElementChildrenVar", bound=_ElementChildren) +_ElementChildrenPlusFragment = TypeVar("_ElementChildrenPlusFragment", bound=_ElementChildren | DocumentFragment) class Element(Node): - nodeType: int - nodeValue: Incomplete - schemaType: Incomplete - parentNode: Incomplete - tagName: str - nodeName: str - prefix: Incomplete + nodeType: ClassVar[Literal[1]] + nodeName: str # same as Element.tagName + nodeValue: None + @property + def attributes(self) -> NamedNodeMap: ... # type: ignore[override] + + parentNode: Document | Element | DocumentFragment | None + nextSibling: _DocumentChildren | _ElementChildren | _DocumentFragmentChildren | None + previousSibling: _DocumentChildren | _ElementChildren | _DocumentFragmentChildren | None + childNodes: NodeList[_ElementChildren] + @property + def firstChild(self) -> _ElementChildren | None: ... + @property + def lastChild(self) -> _ElementChildren | None: ... + namespaceURI: str | None - childNodes: Incomplete - nextSibling: Incomplete + prefix: str | None + @property + def localName(self) -> str: ... + + schemaType: TypeInfo + tagName: str + def __init__( - self, tagName, namespaceURI: str | None = None, prefix: Incomplete | None = None, localName: Incomplete | None = None + self, tagName: str, namespaceURI: str | None = None, prefix: str | None = None, localName: str | None = None ) -> None: ... def unlink(self) -> None: ... def getAttribute(self, attname: str) -> str: ... - def getAttributeNS(self, namespaceURI: str, localName): ... + def getAttributeNS(self, namespaceURI: str | None, localName: str) -> str: ... def setAttribute(self, attname: str, value: str) -> None: ... - def setAttributeNS(self, namespaceURI: str, qualifiedName: str, value) -> None: ... - def getAttributeNode(self, attrname: str): ... - def getAttributeNodeNS(self, namespaceURI: str, localName): ... - def setAttributeNode(self, attr): ... - setAttributeNodeNS: Incomplete + def setAttributeNS(self, namespaceURI: str | None, qualifiedName: str, value: str) -> None: ... + def getAttributeNode(self, attrname: str) -> Attr | None: ... + def getAttributeNodeNS(self, namespaceURI: str | None, localName: str) -> Attr | None: ... + def setAttributeNode(self, attr: Attr) -> Attr | None: ... + setAttributeNodeNS = setAttributeNode def removeAttribute(self, name: str) -> None: ... - def removeAttributeNS(self, namespaceURI: str, localName) -> None: ... - def removeAttributeNode(self, node): ... - removeAttributeNodeNS: Incomplete + def removeAttributeNS(self, namespaceURI: str | None, localName: str) -> None: ... + def removeAttributeNode(self, node: Attr) -> Attr: ... + removeAttributeNodeNS = removeAttributeNode def hasAttribute(self, name: str) -> bool: ... - def hasAttributeNS(self, namespaceURI: str, localName) -> bool: ... + def hasAttributeNS(self, namespaceURI: str | None, localName: str) -> bool: ... def getElementsByTagName(self, name: str) -> NodeList[Element]: ... - def getElementsByTagNameNS(self, namespaceURI: str, localName: str) -> NodeList[Element]: ... + def getElementsByTagNameNS(self, namespaceURI: str | None, localName: str) -> NodeList[Element]: ... def writexml(self, writer: SupportsWrite[str], indent: str = "", addindent: str = "", newl: str = "") -> None: ... def hasAttributes(self) -> bool: ... - def setIdAttribute(self, name) -> None: ... - def setIdAttributeNS(self, namespaceURI: str, localName) -> None: ... - def setIdAttributeNode(self, idAttr) -> None: ... - @property - def attributes(self) -> NamedNodeMap: ... + def setIdAttribute(self, name: str) -> None: ... + def setIdAttributeNS(self, namespaceURI: str | None, localName: str) -> None: ... + def setIdAttributeNode(self, idAttr: Attr) -> None: ... + def insertBefore( # type: ignore[override] + self, newChild: _ElementChildrenPlusFragment, refChild: _ElementChildren | None + ) -> _ElementChildrenPlusFragment: ... + def appendChild(self, node: _ElementChildrenPlusFragment) -> _ElementChildrenPlusFragment: ... # type: ignore[override] + @overload # type: ignore[override] + def replaceChild( + self, newChild: DocumentFragment, oldChild: _ElementChildrenVar + ) -> _ElementChildrenVar | DocumentFragment: ... + @overload + def replaceChild(self, newChild: _ElementChildren, oldChild: _ElementChildrenVar) -> _ElementChildrenVar | None: ... # type: ignore[override] + def removeChild(self, oldChild: _ElementChildrenVar) -> _ElementChildrenVar: ... # type: ignore[override] class Childless: - attributes: Incomplete - childNodes: Incomplete - firstChild: Incomplete - lastChild: Incomplete - def appendChild(self, node) -> NoReturn: ... - def hasChildNodes(self) -> bool: ... - def insertBefore(self, newChild, refChild) -> NoReturn: ... - def removeChild(self, oldChild) -> NoReturn: ... + attributes: None + childNodes: EmptyNodeList + @property + def firstChild(self) -> None: ... + @property + def lastChild(self) -> None: ... + def appendChild(self, node: _NodesThatAreChildren | DocumentFragment) -> NoReturn: ... + def hasChildNodes(self) -> Literal[False]: ... + def insertBefore( + self, newChild: _NodesThatAreChildren | DocumentFragment, refChild: _NodesThatAreChildren | None + ) -> NoReturn: ... + def removeChild(self, oldChild: _NodesThatAreChildren) -> NoReturn: ... def normalize(self) -> None: ... - def replaceChild(self, newChild, oldChild) -> NoReturn: ... + def replaceChild(self, newChild: _NodesThatAreChildren | DocumentFragment, oldChild: _NodesThatAreChildren) -> NoReturn: ... class ProcessingInstruction(Childless, Node): - nodeType: int - target: Incomplete - data: Incomplete - def __init__(self, target, data) -> None: ... - nodeValue: Incomplete - nodeName: Incomplete + nodeType: ClassVar[Literal[7]] + nodeName: str # same as ProcessingInstruction.target + nodeValue: str # same as ProcessingInstruction.data + attributes: None + + parentNode: Document | Element | DocumentFragment | None + nextSibling: _DocumentChildren | _ElementChildren | _DocumentFragmentChildren | None + previousSibling: _DocumentChildren | _ElementChildren | _DocumentFragmentChildren | None + childNodes: EmptyNodeList + @property + def firstChild(self) -> None: ... + @property + def lastChild(self) -> None: ... + + namespaceURI: None + prefix: None + @property + def localName(self) -> None: ... + + target: str + data: str + + def __init__(self, target: str, data: str) -> None: ... def writexml(self, writer: SupportsWrite[str], indent: str = "", addindent: str = "", newl: str = "") -> None: ... class CharacterData(Childless, Node): - ownerDocument: Incomplete - previousSibling: Incomplete + nodeValue: str + attributes: None + + childNodes: EmptyNodeList + nextSibling: _NodesThatAreChildren | None + previousSibling: _NodesThatAreChildren | None + + @property + def localName(self) -> None: ... + + ownerDocument: Document | None + data: str + def __init__(self) -> None: ... + @property + def length(self) -> int: ... def __len__(self) -> int: ... - data: str - nodeValue: Incomplete def substringData(self, offset: int, count: int) -> str: ... def appendData(self, arg: str) -> None: ... def insertData(self, offset: int, arg: str) -> None: ... def deleteData(self, offset: int, count: int) -> None: ... def replaceData(self, offset: int, count: int, arg: str) -> None: ... - @property - def length(self) -> int: ... class Text(CharacterData): - nodeType: int - nodeName: str - attributes: Incomplete - data: Incomplete + nodeType: ClassVar[Literal[3]] + nodeName: Literal["#text"] + nodeValue: str # same as CharacterData.data, the content of the text node + attributes: None + + parentNode: Attr | Element | DocumentFragment | None + nextSibling: _DocumentFragmentChildren | _ElementChildren | _AttrChildren | None + previousSibling: _DocumentFragmentChildren | _ElementChildren | _AttrChildren | None + childNodes: EmptyNodeList + @property + def firstChild(self) -> None: ... + @property + def lastChild(self) -> None: ... + + namespaceURI: None + prefix: None + @property + def localName(self) -> None: ... + + data: str def splitText(self, offset: int) -> Self: ... def writexml(self, writer: SupportsWrite[str], indent: str = "", addindent: str = "", newl: str = "") -> None: ... - def replaceWholeText(self, content) -> Self | None: ... + def replaceWholeText(self, content: str) -> Self | None: ... @property def isWhitespaceInElementContent(self) -> bool: ... @property def wholeText(self) -> str: ... class Comment(CharacterData): - nodeType: int - nodeName: str - def __init__(self, data) -> None: ... + nodeType: ClassVar[Literal[8]] + nodeName: Literal["#comment"] + nodeValue: str # same as CharacterData.data, the content of the comment + attributes: None + + parentNode: Document | Element | DocumentFragment | None + nextSibling: _DocumentChildren | _ElementChildren | _DocumentFragmentChildren | None + previousSibling: _DocumentChildren | _ElementChildren | _DocumentFragmentChildren | None + childNodes: EmptyNodeList + @property + def firstChild(self) -> None: ... + @property + def lastChild(self) -> None: ... + + namespaceURI: None + prefix: None + @property + def localName(self) -> None: ... + def __init__(self, data: str) -> None: ... def writexml(self, writer: SupportsWrite[str], indent: str = "", addindent: str = "", newl: str = "") -> None: ... class CDATASection(Text): - nodeType: int - nodeName: str + nodeType: ClassVar[Literal[4]] # type: ignore[assignment] + nodeName: Literal["#cdata-section"] # type: ignore[assignment] + nodeValue: str # same as CharacterData.data, the content of the CDATA Section + attributes: None + + parentNode: Element | DocumentFragment | None + nextSibling: _DocumentFragmentChildren | _ElementChildren | None + previousSibling: _DocumentFragmentChildren | _ElementChildren | None + def writexml(self, writer: SupportsWrite[str], indent: str = "", addindent: str = "", newl: str = "") -> None: ... -class ReadOnlySequentialNamedNodeMap: - def __init__(self, seq=()) -> None: ... +class ReadOnlySequentialNamedNodeMap(Generic[_N]): + def __init__(self, seq: Sequence[_N] = ()) -> None: ... def __len__(self) -> int: ... - def getNamedItem(self, name): ... - def getNamedItemNS(self, namespaceURI: str, localName): ... - def __getitem__(self, name_or_tuple): ... - def item(self, index): ... - def removeNamedItem(self, name) -> None: ... - def removeNamedItemNS(self, namespaceURI: str, localName) -> None: ... - def setNamedItem(self, node) -> None: ... - def setNamedItemNS(self, node) -> None: ... + def getNamedItem(self, name: str) -> _N | None: ... + def getNamedItemNS(self, namespaceURI: str | None, localName: str) -> _N | None: ... + def __getitem__(self, name_or_tuple: str | _NSName) -> _N | None: ... + def item(self, index: int) -> _N | None: ... + def removeNamedItem(self, name: str) -> NoReturn: ... + def removeNamedItemNS(self, namespaceURI: str | None, localName: str) -> NoReturn: ... + def setNamedItem(self, node: Node) -> NoReturn: ... + def setNamedItemNS(self, node: Node) -> NoReturn: ... @property def length(self) -> int: ... @@ -295,38 +509,85 @@ class Identified: systemId: str | None class DocumentType(Identified, Childless, Node): - nodeType: int - nodeValue: Incomplete - name: Incomplete - internalSubset: Incomplete - entities: Incomplete - notations: Incomplete - nodeName: Incomplete - def __init__(self, qualifiedName: str) -> None: ... - def cloneNode(self, deep): ... + nodeType: ClassVar[Literal[10]] + nodeName: str | None # same as DocumentType.name + nodeValue: None + attributes: None + + parentNode: Document | None + nextSibling: _DocumentChildren | None + previousSibling: _DocumentChildren | None + childNodes: EmptyNodeList + @property + def firstChild(self) -> None: ... + @property + def lastChild(self) -> None: ... + + namespaceURI: None + prefix: None + @property + def localName(self) -> None: ... + + name: str | None + internalSubset: str | None + entities: ReadOnlySequentialNamedNodeMap[Entity] + notations: ReadOnlySequentialNamedNodeMap[Notation] + + def __init__(self, qualifiedName: str | None) -> None: ... + def cloneNode(self, deep: bool) -> DocumentType | None: ... def writexml(self, writer: SupportsWrite[str], indent: str = "", addindent: str = "", newl: str = "") -> None: ... class Entity(Identified, Node): - attributes: Incomplete - nodeType: int - nodeValue: Incomplete - actualEncoding: Incomplete - encoding: Incomplete - version: Incomplete - nodeName: Incomplete - notationName: Incomplete - childNodes: Incomplete - def __init__(self, name, publicId, systemId, notation) -> None: ... - def appendChild(self, newChild) -> NoReturn: ... - def insertBefore(self, newChild, refChild) -> NoReturn: ... - def removeChild(self, oldChild) -> NoReturn: ... - def replaceChild(self, newChild, oldChild) -> NoReturn: ... + nodeType: ClassVar[Literal[6]] + nodeName: str # entity name + nodeValue: None + attributes: None + + parentNode: None + nextSibling: None + previousSibling: None + childNodes: NodeList[_EntityChildren] + @property + def firstChild(self) -> _EntityChildren | None: ... + @property + def lastChild(self) -> _EntityChildren | None: ... + + namespaceURI: None + prefix: None + @property + def localName(self) -> None: ... + + actualEncoding: str | None + encoding: str | None + version: str | None + notationName: str | None + + def __init__(self, name: str, publicId: str | None, systemId: str | None, notation: str | None) -> None: ... + def appendChild(self, newChild: _EntityChildren) -> NoReturn: ... # type: ignore[override] + def insertBefore(self, newChild: _EntityChildren, refChild: _EntityChildren | None) -> NoReturn: ... # type: ignore[override] + def removeChild(self, oldChild: _EntityChildren) -> NoReturn: ... # type: ignore[override] + def replaceChild(self, newChild: _EntityChildren, oldChild: _EntityChildren) -> NoReturn: ... # type: ignore[override] class Notation(Identified, Childless, Node): - nodeType: int - nodeValue: Incomplete - nodeName: Incomplete - def __init__(self, name, publicId, systemId) -> None: ... + nodeType: ClassVar[Literal[12]] + nodeName: str # notation name + nodeValue: None + attributes: None + + parentNode: DocumentFragment | None + nextSibling: _DocumentFragmentChildren | None + previousSibling: _DocumentFragmentChildren | None + childNodes: EmptyNodeList + @property + def firstChild(self) -> None: ... + @property + def lastChild(self) -> None: ... + + namespaceURI: None + prefix: None + @property + def localName(self) -> None: ... + def __init__(self, name: str, publicId: str | None, systemId: str | None) -> None: ... class DOMImplementation(DOMImplementationLS): def hasFeature(self, feature: str, version: str | None) -> bool: ... @@ -335,53 +596,67 @@ class DOMImplementation(DOMImplementationLS): def getInterface(self, feature: str) -> Self | None: ... class ElementInfo: - tagName: Incomplete - def __init__(self, name) -> None: ... - def getAttributeType(self, aname): ... - def getAttributeTypeNS(self, namespaceURI: str, localName): ... - def isElementContent(self): ... - def isEmpty(self): ... - def isId(self, aname): ... - def isIdNS(self, namespaceURI: str, localName): ... + tagName: str + def __init__(self, name: str) -> None: ... + def getAttributeType(self, aname: str) -> TypeInfo: ... + def getAttributeTypeNS(self, namespaceURI: str | None, localName: str) -> TypeInfo: ... + def isElementContent(self) -> bool: ... + def isEmpty(self) -> bool: ... + def isId(self, aname: str) -> bool: ... + def isIdNS(self, namespaceURI: str | None, localName: str) -> bool: ... + +_DocumentChildrenPlusFragment = TypeVar("_DocumentChildrenPlusFragment", bound=_DocumentChildren | DocumentFragment) class Document(Node, DocumentLS): - implementation: Incomplete - nodeType: int - nodeName: str - nodeValue: Incomplete - attributes: Incomplete - parentNode: Incomplete - previousSibling: Incomplete - nextSibling: Incomplete - actualEncoding: Incomplete + nodeType: ClassVar[Literal[9]] + nodeName: Literal["#document"] + nodeValue: None + attributes: None + + parentNode: None + previousSibling: None + nextSibling: None + childNodes: NodeList[_DocumentChildren] + @property + def firstChild(self) -> _DocumentChildren | None: ... + @property + def lastChild(self) -> _DocumentChildren | None: ... + + namespaceURI: None + prefix: None + @property + def localName(self) -> None: ... + + implementation: DOMImplementation + actualEncoding: str | None encoding: str | None standalone: bool | None - version: Incomplete + version: str | None strictErrorChecking: bool - errorHandler: Incomplete - documentURI: Incomplete + errorHandler: _DOMErrorHandler | None + documentURI: str | None doctype: DocumentType | None - childNodes: Incomplete + documentElement: Element | None + def __init__(self) -> None: ... - def appendChild(self, node: _N) -> _N: ... - documentElement: Incomplete - def removeChild(self, oldChild): ... + def appendChild(self, node: _DocumentChildrenVar) -> _DocumentChildrenVar: ... # type: ignore[override] + def removeChild(self, oldChild: _DocumentChildrenVar) -> _DocumentChildrenVar: ... # type: ignore[override] def unlink(self) -> None: ... - def cloneNode(self, deep): ... + def cloneNode(self, deep: bool) -> Document | None: ... def createDocumentFragment(self) -> DocumentFragment: ... def createElement(self, tagName: str) -> Element: ... def createTextNode(self, data: str) -> Text: ... def createCDATASection(self, data: str) -> CDATASection: ... def createComment(self, data: str) -> Comment: ... - def createProcessingInstruction(self, target, data): ... - def createAttribute(self, qName) -> Attr: ... - def createElementNS(self, namespaceURI: str, qualifiedName: str): ... - def createAttributeNS(self, namespaceURI: str, qualifiedName: str) -> Attr: ... + def createProcessingInstruction(self, target: str, data: str) -> ProcessingInstruction: ... + def createAttribute(self, qName: str) -> Attr: ... + def createElementNS(self, namespaceURI: str | None, qualifiedName: str) -> Element: ... + def createAttributeNS(self, namespaceURI: str | None, qualifiedName: str) -> Attr: ... def getElementById(self, id: str) -> Element | None: ... def getElementsByTagName(self, name: str) -> NodeList[Element]: ... - def getElementsByTagNameNS(self, namespaceURI: str, localName: str) -> NodeList[Element]: ... + def getElementsByTagNameNS(self, namespaceURI: str | None, localName: str) -> NodeList[Element]: ... def isSupported(self, feature: str, version: str | None) -> bool: ... - def importNode(self, node, deep): ... + def importNode(self, node: _ImportableNodeVar, deep: bool) -> _ImportableNodeVar: ... if sys.version_info >= (3, 9): def writexml( self, @@ -402,4 +677,18 @@ class Document(Node, DocumentLS): encoding: Incomplete | None = None, ) -> None: ... - def renameNode(self, n, namespaceURI: str, name): ... + @overload + def renameNode(self, n: Element, namespaceURI: str, name: str) -> Element: ... + @overload + def renameNode(self, n: Attr, namespaceURI: str, name: str) -> Attr: ... + @overload + def renameNode(self, n: Element | Attr, namespaceURI: str, name: str) -> Element | Attr: ... + def insertBefore( + self, newChild: _DocumentChildrenPlusFragment, refChild: _DocumentChildren | None # type: ignore[override] + ) -> _DocumentChildrenPlusFragment: ... + @overload # type: ignore[override] + def replaceChild( + self, newChild: DocumentFragment, oldChild: _DocumentChildrenVar + ) -> _DocumentChildrenVar | DocumentFragment: ... + @overload + def replaceChild(self, newChild: _DocumentChildren, oldChild: _DocumentChildrenVar) -> _DocumentChildrenVar | None: ... diff --git a/stdlib/xml/dom/pulldom.pyi b/stdlib/xml/dom/pulldom.pyi index 50250de5cb2f..d9458654c185 100644 --- a/stdlib/xml/dom/pulldom.pyi +++ b/stdlib/xml/dom/pulldom.pyi @@ -1,11 +1,12 @@ import sys -from _typeshed import Incomplete, SupportsRead -from collections.abc import Sequence -from typing import Final, Literal -from typing_extensions import TypeAlias -from xml.dom.minidom import Document, DOMImplementation, Element, Text +from _typeshed import Incomplete, Unused +from collections.abc import MutableSequence, Sequence +from typing import Final, Literal, NoReturn +from typing_extensions import Self, TypeAlias +from xml.dom.minidom import Comment, Document, DOMImplementation, Element, ProcessingInstruction, Text +from xml.sax import _SupportsReadClose from xml.sax.handler import ContentHandler -from xml.sax.xmlreader import XMLReader +from xml.sax.xmlreader import AttributesImpl, AttributesNSImpl, Locator, XMLReader START_ELEMENT: Final = "START_ELEMENT" END_ELEMENT: Final = "END_ELEMENT" @@ -16,79 +17,93 @@ PROCESSING_INSTRUCTION: Final = "PROCESSING_INSTRUCTION" IGNORABLE_WHITESPACE: Final = "IGNORABLE_WHITESPACE" CHARACTERS: Final = "CHARACTERS" +_NSName: TypeAlias = tuple[str | None, str] _DocumentFactory: TypeAlias = DOMImplementation | None -_Node: TypeAlias = Document | Element | Text -_Event: TypeAlias = tuple[ - Literal[ - Literal["START_ELEMENT"], - Literal["END_ELEMENT"], - Literal["COMMENT"], - Literal["START_DOCUMENT"], - Literal["END_DOCUMENT"], - Literal["PROCESSING_INSTRUCTION"], - Literal["IGNORABLE_WHITESPACE"], - Literal["CHARACTERS"], - ], - _Node, -] +_Event: TypeAlias = ( + tuple[Literal["START_ELEMENT"], Element] + | tuple[Literal["END_ELEMENT"], Element] + | tuple[Literal["COMMENT"], Comment] + | tuple[Literal["START_DOCUMENT"], Document] + | tuple[Literal["END_DOCUMENT"], Document] + | tuple[Literal["PROCESSING_INSTRUCTION"], ProcessingInstruction] + | tuple[Literal["IGNORABLE_WHITESPACE"], Text] + | tuple[Literal["CHARACTERS"], Text] +) class PullDOM(ContentHandler): document: Document | None documentFactory: _DocumentFactory - firstEvent: Incomplete - lastEvent: Incomplete - elementStack: Sequence[Incomplete] - pending_events: Sequence[Incomplete] + + # firstEvent is a list of length 2 + # firstEvent[0] is always None + # firstEvent[1] is None prior to any events, after which it's a + # list of length 2, where the first item is of type _Event + # and the second item is None. + firstEvent: list[Incomplete] + + # lastEvent is also a list of length 2. The second item is always None, + # and the first item is of type _Event + # This is a slight lie: The second item is sometimes temporarily what was just + # described for the type of lastEvent, after which lastEvent is always updated + # with `self.lastEvent = self.lastEvent[1]`. + lastEvent: list[Incomplete] + + elementStack: MutableSequence[Element | Document] + pending_events: ( + list[Sequence[tuple[Literal["COMMENT"], str] | tuple[Literal["PROCESSING_INSTRUCTION"], str, str] | None]] | None + ) def __init__(self, documentFactory: _DocumentFactory = None) -> None: ... - def pop(self) -> Element: ... - def setDocumentLocator(self, locator) -> None: ... - def startPrefixMapping(self, prefix, uri) -> None: ... - def endPrefixMapping(self, prefix) -> None: ... - def startElementNS(self, name, tagName, attrs) -> None: ... - def endElementNS(self, name, tagName) -> None: ... - def startElement(self, name, attrs) -> None: ... - def endElement(self, name) -> None: ... - def comment(self, s) -> None: ... - def processingInstruction(self, target, data) -> None: ... - def ignorableWhitespace(self, chars) -> None: ... - def characters(self, chars) -> None: ... + def pop(self) -> Element | Document: ... + def setDocumentLocator(self, locator: Locator) -> None: ... + def startPrefixMapping(self, prefix: str | None, uri: str) -> None: ... + def endPrefixMapping(self, prefix: str | None) -> None: ... + def startElementNS(self, name: _NSName, tagName: str | None, attrs: AttributesNSImpl) -> None: ... + def endElementNS(self, name: _NSName, tagName: str | None) -> None: ... + def startElement(self, name: str, attrs: AttributesImpl) -> None: ... + def endElement(self, name: str) -> None: ... + def comment(self, s: str) -> None: ... + def processingInstruction(self, target: str, data: str) -> None: ... + def ignorableWhitespace(self, chars: str) -> None: ... + def characters(self, chars: str) -> None: ... def startDocument(self) -> None: ... - def buildDocument(self, uri, tagname): ... + def buildDocument(self, uri: str | None, tagname: str | None) -> Element: ... def endDocument(self) -> None: ... def clear(self) -> None: ... class ErrorHandler: - def warning(self, exception) -> None: ... - def error(self, exception) -> None: ... - def fatalError(self, exception) -> None: ... + def warning(self, exception: BaseException) -> None: ... + def error(self, exception: BaseException) -> NoReturn: ... + def fatalError(self, exception: BaseException) -> NoReturn: ... class DOMEventStream: - stream: SupportsRead[bytes] | SupportsRead[str] - parser: XMLReader + stream: _SupportsReadClose[bytes] | _SupportsReadClose[str] + parser: XMLReader # Set to none after .clear() is called bufsize: int - def __init__(self, stream: SupportsRead[bytes] | SupportsRead[str], parser: XMLReader, bufsize: int) -> None: ... - pulldom: Incomplete + pulldom: PullDOM + def __init__(self, stream: _SupportsReadClose[bytes] | _SupportsReadClose[str], parser: XMLReader, bufsize: int) -> None: ... if sys.version_info < (3, 11): - def __getitem__(self, pos): ... + def __getitem__(self, pos: Unused) -> _Event: ... - def __next__(self): ... - def __iter__(self): ... - def getEvent(self) -> _Event: ... - def expandNode(self, node: _Node) -> None: ... + def __next__(self) -> _Event: ... + def __iter__(self) -> Self: ... + def getEvent(self) -> _Event | None: ... + def expandNode(self, node: Document) -> None: ... def reset(self) -> None: ... def clear(self) -> None: ... class SAX2DOM(PullDOM): - def startElementNS(self, name, tagName, attrs) -> None: ... - def startElement(self, name, attrs) -> None: ... - def processingInstruction(self, target, data) -> None: ... - def ignorableWhitespace(self, chars) -> None: ... - def characters(self, chars) -> None: ... + def startElementNS(self, name: _NSName, tagName: str | None, attrs: AttributesNSImpl) -> None: ... + def startElement(self, name: str, attrs: AttributesImpl) -> None: ... + def processingInstruction(self, target: str, data: str) -> None: ... + def ignorableWhitespace(self, chars: str) -> None: ... + def characters(self, chars: str) -> None: ... default_bufsize: int def parse( - stream_or_string: str | SupportsRead[bytes] | SupportsRead[str], parser: XMLReader | None = None, bufsize: int | None = None + stream_or_string: str | _SupportsReadClose[bytes] | _SupportsReadClose[str], + parser: XMLReader | None = None, + bufsize: int | None = None, ) -> DOMEventStream: ... def parseString(string: str, parser: XMLReader | None = None) -> DOMEventStream: ... diff --git a/stdlib/xml/dom/xmlbuilder.pyi b/stdlib/xml/dom/xmlbuilder.pyi index ab76d362e23f..6fb18bbc4eda 100644 --- a/stdlib/xml/dom/xmlbuilder.pyi +++ b/stdlib/xml/dom/xmlbuilder.pyi @@ -1,32 +1,9 @@ -from _typeshed import Incomplete, Unused +from _typeshed import SupportsRead from typing import Any, Literal, NoReturn -from typing_extensions import TypeAlias -from urllib.request import OpenerDirector -from xml.dom.expatbuilder import ExpatBuilder, ExpatBuilderNS -from xml.dom.minidom import Node +from xml.dom.minidom import Document, Node, _DOMErrorHandler __all__ = ["DOMBuilder", "DOMEntityResolver", "DOMInputSource"] -# UNKNOWN TYPES: -# - `Options.errorHandler`. -# The same as `_DOMBuilderErrorHandlerType`? -# Maybe `xml.sax.handler.ErrorHandler`? -# - Return type of DOMBuilder.getFeature(). -# We could get rid of the `Incomplete` if we knew more -# about `Options.errorHandler`. - -# ALIASES REPRESENTING MORE UNKNOWN TYPES: - -# probably the same as `Options.errorHandler`? -# Maybe `xml.sax.handler.ErrorHandler`? -_DOMBuilderErrorHandlerType: TypeAlias = Incomplete | None -# probably some kind of IO... -_DOMInputSourceCharacterStreamType: TypeAlias = Incomplete | None -# probably a string?? -_DOMInputSourceStringDataType: TypeAlias = Incomplete | None -# probably a string?? -_DOMInputSourceEncodingType: TypeAlias = Incomplete | None - class Options: namespaces: int namespace_declarations: bool @@ -45,37 +22,35 @@ class Options: charset_overrides_xml_encoding: bool infoset: bool supported_mediatypes_only: bool - errorHandler: Any | None - filter: DOMBuilderFilter | None # a guess, but seems likely + errorHandler: _DOMErrorHandler | None + filter: DOMBuilderFilter | None class DOMBuilder: - entityResolver: DOMEntityResolver | None # a guess, but seems likely - errorHandler: _DOMBuilderErrorHandlerType - filter: DOMBuilderFilter | None # a guess, but seems likely + entityResolver: DOMEntityResolver | None + errorHandler: _DOMErrorHandler | None + filter: DOMBuilderFilter | None ACTION_REPLACE: Literal[1] ACTION_APPEND_AS_CHILDREN: Literal[2] ACTION_INSERT_AFTER: Literal[3] ACTION_INSERT_BEFORE: Literal[4] + def __init__(self) -> None: ... def setFeature(self, name: str, state: int) -> None: ... def supportsFeature(self, name: str) -> bool: ... - def canSetFeature(self, name: str, state: int) -> bool: ... + def canSetFeature(self, name: str, state: Literal[1, 0]) -> bool: ... # getFeature could return any attribute from an instance of `Options` def getFeature(self, name: str) -> Any: ... - def parseURI(self, uri: str) -> ExpatBuilder | ExpatBuilderNS: ... - def parse(self, input: DOMInputSource) -> ExpatBuilder | ExpatBuilderNS: ... - # `input` and `cnode` argtypes for `parseWithContext` are unknowable - # as the function does nothing with them, and always raises an exception. - # But `input` is *probably* `DOMInputSource`? - def parseWithContext(self, input: Unused, cnode: Unused, action: Literal[1, 2, 3, 4]) -> NoReturn: ... + def parseURI(self, uri: str) -> Document: ... + def parse(self, input: DOMInputSource) -> Document: ... + def parseWithContext(self, input: DOMInputSource, cnode: Node, action: Literal[1, 2, 3, 4]) -> NoReturn: ... class DOMEntityResolver: def resolveEntity(self, publicId: str | None, systemId: str) -> DOMInputSource: ... class DOMInputSource: - byteStream: OpenerDirector | None - characterStream: _DOMInputSourceCharacterStreamType - stringData: _DOMInputSourceStringDataType - encoding: _DOMInputSourceEncodingType + byteStream: SupportsRead[bytes] | None + characterStream: SupportsRead[str] | None + stringData: str | None + encoding: str | None publicId: str | None systemId: str | None baseURI: str | None @@ -86,18 +61,14 @@ class DOMBuilderFilter: FILTER_SKIP: Literal[3] FILTER_INTERRUPT: Literal[4] whatToShow: int - def acceptNode(self, element: Unused) -> Literal[1]: ... - def startContainer(self, element: Unused) -> Literal[1]: ... + def acceptNode(self, element: Node) -> Literal[1, 2, 3, 4]: ... + def startContainer(self, element: Node) -> Literal[1, 2, 3, 4]: ... class DocumentLS: async_: bool def abort(self) -> NoReturn: ... - # `load()` and `loadXML()` always raise exceptions - # so the argtypes of `uri` and `source` are unknowable. - # `source` is *probably* `DOMInputSource`? - # `uri` is *probably* a str? (see DOMBuilder.parseURI()) - def load(self, uri: Unused) -> NoReturn: ... - def loadXML(self, source: Unused) -> NoReturn: ... + def load(self, uri: str) -> NoReturn: ... + def loadXML(self, source: str) -> NoReturn: ... def saveXML(self, snode: Node | None) -> str: ... class DOMImplementationLS: diff --git a/stdlib/xml/etree/ElementInclude.pyi b/stdlib/xml/etree/ElementInclude.pyi index 5a15772ec2a9..10c305826453 100644 --- a/stdlib/xml/etree/ElementInclude.pyi +++ b/stdlib/xml/etree/ElementInclude.pyi @@ -1,9 +1,14 @@ import sys from _typeshed import FileDescriptorOrPath -from collections.abc import Callable -from typing import Final +from typing import Final, Literal, Protocol, overload from xml.etree.ElementTree import Element +class _Loader(Protocol): + @overload + def __call__(self, href: FileDescriptorOrPath, parse: Literal["xml"], encoding: str | None = None) -> Element: ... + @overload + def __call__(self, href: FileDescriptorOrPath, parse: Literal["text"], encoding: str | None = None) -> str: ... + XINCLUDE: Final[str] XINCLUDE_INCLUDE: Final[str] XINCLUDE_FALLBACK: Final[str] @@ -13,17 +18,15 @@ if sys.version_info >= (3, 9): class FatalIncludeError(SyntaxError): ... -def default_loader(href: FileDescriptorOrPath, parse: str, encoding: str | None = None) -> str | Element: ... +@overload +def default_loader(href: FileDescriptorOrPath, parse: Literal["xml"], encoding: str | None = None) -> Element: ... +@overload +def default_loader(href: FileDescriptorOrPath, parse: Literal["text"], encoding: str | None = None) -> str: ... -# TODO: loader is of type default_loader ie it takes a callable that has the -# same signature as default_loader. But default_loader has a keyword argument -# Which can't be represented using Callable... if sys.version_info >= (3, 9): - def include( - elem: Element, loader: Callable[..., str | Element] | None = None, base_url: str | None = None, max_depth: int | None = 6 - ) -> None: ... + def include(elem: Element, loader: _Loader | None = None, base_url: str | None = None, max_depth: int | None = 6) -> None: ... class LimitedRecursiveIncludeError(FatalIncludeError): ... else: - def include(elem: Element, loader: Callable[..., str | Element] | None = None) -> None: ... + def include(elem: Element, loader: _Loader | None = None) -> None: ... diff --git a/stdlib/xml/etree/ElementPath.pyi b/stdlib/xml/etree/ElementPath.pyi index c3f6207ea241..ebfb4f1ffbb9 100644 --- a/stdlib/xml/etree/ElementPath.pyi +++ b/stdlib/xml/etree/ElementPath.pyi @@ -1,6 +1,6 @@ -from collections.abc import Callable, Generator +from collections.abc import Callable, Generator, Iterable from re import Pattern -from typing import TypeVar +from typing import Any, Literal, TypeVar, overload from typing_extensions import TypeAlias from xml.etree.ElementTree import Element @@ -8,27 +8,34 @@ xpath_tokenizer_re: Pattern[str] _Token: TypeAlias = tuple[str, str] _Next: TypeAlias = Callable[[], _Token] -_Callback: TypeAlias = Callable[[_SelectorContext, list[Element]], Generator[Element, None, None]] +_Callback: TypeAlias = Callable[[_SelectorContext, Iterable[Element]], Generator[Element, None, None]] +_T = TypeVar("_T") def xpath_tokenizer(pattern: str, namespaces: dict[str, str] | None = None) -> Generator[_Token, None, None]: ... def get_parent_map(context: _SelectorContext) -> dict[Element, Element]: ... def prepare_child(next: _Next, token: _Token) -> _Callback: ... def prepare_star(next: _Next, token: _Token) -> _Callback: ... def prepare_self(next: _Next, token: _Token) -> _Callback: ... -def prepare_descendant(next: _Next, token: _Token) -> _Callback: ... +def prepare_descendant(next: _Next, token: _Token) -> _Callback | None: ... def prepare_parent(next: _Next, token: _Token) -> _Callback: ... -def prepare_predicate(next: _Next, token: _Token) -> _Callback: ... +def prepare_predicate(next: _Next, token: _Token) -> _Callback | None: ... -ops: dict[str, Callable[[_Next, _Token], _Callback]] +ops: dict[str, Callable[[_Next, _Token], _Callback | None]] class _SelectorContext: parent_map: dict[Element, Element] | None root: Element def __init__(self, root: Element) -> None: ... -_T = TypeVar("_T") - -def iterfind(elem: Element, path: str, namespaces: dict[str, str] | None = None) -> Generator[Element, None, None]: ... -def find(elem: Element, path: str, namespaces: dict[str, str] | None = None) -> Element | None: ... -def findall(elem: Element, path: str, namespaces: dict[str, str] | None = None) -> list[Element]: ... -def findtext(elem: Element, path: str, default: _T | None = None, namespaces: dict[str, str] | None = None) -> _T | str: ... +@overload +def iterfind( # type: ignore[overload-overlap] + elem: Element[Any], path: Literal[""], namespaces: dict[str, str] | None = None +) -> None: ... +@overload +def iterfind(elem: Element[Any], path: str, namespaces: dict[str, str] | None = None) -> Generator[Element, None, None]: ... +def find(elem: Element[Any], path: str, namespaces: dict[str, str] | None = None) -> Element | None: ... +def findall(elem: Element[Any], path: str, namespaces: dict[str, str] | None = None) -> list[Element]: ... +@overload +def findtext(elem: Element[Any], path: str, default: None = None, namespaces: dict[str, str] | None = None) -> str | None: ... +@overload +def findtext(elem: Element[Any], path: str, default: _T, namespaces: dict[str, str] | None = None) -> _T | str: ... diff --git a/stdlib/xml/etree/ElementTree.pyi b/stdlib/xml/etree/ElementTree.pyi index 64ebbd3ee63f..4a9113868d7e 100644 --- a/stdlib/xml/etree/ElementTree.pyi +++ b/stdlib/xml/etree/ElementTree.pyi @@ -2,8 +2,9 @@ import sys from _collections_abc import dict_keys from _typeshed import FileDescriptorOrPath, ReadableBuffer, SupportsRead, SupportsWrite from collections.abc import Callable, Generator, ItemsView, Iterable, Iterator, Mapping, Sequence -from typing import Any, Final, Literal, SupportsIndex, TypeVar, overload +from typing import Any, Final, Generic, Literal, Protocol, SupportsIndex, TypeVar, overload, type_check_only from typing_extensions import TypeAlias, TypeGuard, deprecated +from xml.parsers.expat import XMLParserType __all__ = [ "C14NWriterTarget", @@ -78,13 +79,22 @@ def canonicalize( exclude_tags: Iterable[str] | None = None, ) -> None: ... -class Element: - tag: str +# The tag for Element can be set to the Comment or ProcessingInstruction +# functions defined in this module. _ElementCallable could be a recursive +# type, but defining it that way uncovered a bug in pytype. +_ElementCallable: TypeAlias = Callable[..., Element[Any]] +_CallableElement: TypeAlias = Element[_ElementCallable] + +_Tag = TypeVar("_Tag", default=str, bound=str | _ElementCallable) +_OtherTag = TypeVar("_OtherTag", default=str, bound=str | _ElementCallable) + +class Element(Generic[_Tag]): + tag: _Tag attrib: dict[str, str] text: str | None tail: str | None - def __init__(self, tag: str, attrib: dict[str, str] = ..., **extra: str) -> None: ... - def append(self, subelement: Element, /) -> None: ... + def __init__(self, tag: _Tag, attrib: dict[str, str] = {}, **extra: str) -> None: ... + def append(self, subelement: Element[Any], /) -> None: ... def clear(self) -> None: ... def extend(self, elements: Iterable[Element], /) -> None: ... def find(self, path: str, namespaces: dict[str, str] | None = None) -> Element | None: ... @@ -100,14 +110,17 @@ class Element: def insert(self, index: int, subelement: Element, /) -> None: ... def items(self) -> ItemsView[str, str]: ... def iter(self, tag: str | None = None) -> Generator[Element, None, None]: ... + @overload + def iterfind(self, path: Literal[""], namespaces: dict[str, str] | None = None) -> None: ... # type: ignore[overload-overlap] + @overload def iterfind(self, path: str, namespaces: dict[str, str] | None = None) -> Generator[Element, None, None]: ... def itertext(self) -> Generator[str, None, None]: ... def keys(self) -> dict_keys[str, str]: ... # makeelement returns the type of self in Python impl, but not in C impl - def makeelement(self, tag: str, attrib: dict[str, str], /) -> Element: ... + def makeelement(self, tag: _OtherTag, attrib: dict[str, str], /) -> Element[_OtherTag]: ... def remove(self, subelement: Element, /) -> None: ... def set(self, key: str, value: str, /) -> None: ... - def __copy__(self) -> Element: ... # returns the type of self in Python impl, but not in C impl + def __copy__(self) -> Element[_Tag]: ... # returns the type of self in Python impl, but not in C impl def __deepcopy__(self, memo: Any, /) -> Element: ... # Only exists in C impl def __delitem__(self, key: SupportsIndex | slice, /) -> None: ... @overload @@ -130,8 +143,8 @@ class Element: def getiterator(self, tag: str | None = None) -> list[Element]: ... def SubElement(parent: Element, tag: str, attrib: dict[str, str] = ..., **extra: str) -> Element: ... -def Comment(text: str | None = None) -> Element: ... -def ProcessingInstruction(target: str, text: str | None = None) -> Element: ... +def Comment(text: str | None = None) -> _CallableElement: ... +def ProcessingInstruction(target: str, text: str | None = None) -> _CallableElement: ... PI = ProcessingInstruction @@ -145,9 +158,11 @@ class QName: def __eq__(self, other: object) -> bool: ... def __hash__(self) -> int: ... -class ElementTree: +_Root = TypeVar("_Root", Element, Element | None, default=Element | None) + +class ElementTree(Generic[_Root]): def __init__(self, element: Element | None = None, file: _FileRead | None = None) -> None: ... - def getroot(self) -> Element | Any: ... + def getroot(self) -> _Root: ... def parse(self, source: _FileRead, parser: XMLParser | None = None) -> Element: ... def iter(self, tag: str | None = None) -> Generator[Element, None, None]: ... if sys.version_info < (3, 9): @@ -159,6 +174,9 @@ class ElementTree: @overload def findtext(self, path: str, default: _T, namespaces: dict[str, str] | None = None) -> _T | str: ... def findall(self, path: str, namespaces: dict[str, str] | None = None) -> list[Element]: ... + @overload + def iterfind(self, path: Literal[""], namespaces: dict[str, str] | None = None) -> None: ... # type: ignore[overload-overlap] + @overload def iterfind(self, path: str, namespaces: dict[str, str] | None = None) -> Generator[Element, None, None]: ... def write( self, @@ -166,18 +184,20 @@ class ElementTree: encoding: str | None = None, xml_declaration: bool | None = None, default_namespace: str | None = None, - method: str | None = None, + method: Literal["xml", "html", "text", "c14n"] | None = None, *, short_empty_elements: bool = True, ) -> None: ... def write_c14n(self, file: _FileWriteC14N) -> None: ... +HTML_EMPTY: set[str] + def register_namespace(prefix: str, uri: str) -> None: ... @overload def tostring( element: Element, encoding: None = None, - method: str | None = None, + method: Literal["xml", "html", "text", "c14n"] | None = None, *, xml_declaration: bool | None = None, default_namespace: str | None = None, @@ -187,7 +207,7 @@ def tostring( def tostring( element: Element, encoding: Literal["unicode"], - method: str | None = None, + method: Literal["xml", "html", "text", "c14n"] | None = None, *, xml_declaration: bool | None = None, default_namespace: str | None = None, @@ -197,7 +217,7 @@ def tostring( def tostring( element: Element, encoding: str, - method: str | None = None, + method: Literal["xml", "html", "text", "c14n"] | None = None, *, xml_declaration: bool | None = None, default_namespace: str | None = None, @@ -207,7 +227,7 @@ def tostring( def tostringlist( element: Element, encoding: None = None, - method: str | None = None, + method: Literal["xml", "html", "text", "c14n"] | None = None, *, xml_declaration: bool | None = None, default_namespace: str | None = None, @@ -217,7 +237,7 @@ def tostringlist( def tostringlist( element: Element, encoding: Literal["unicode"], - method: str | None = None, + method: Literal["xml", "html", "text", "c14n"] | None = None, *, xml_declaration: bool | None = None, default_namespace: str | None = None, @@ -227,21 +247,23 @@ def tostringlist( def tostringlist( element: Element, encoding: str, - method: str | None = None, + method: Literal["xml", "html", "text", "c14n"] | None = None, *, xml_declaration: bool | None = None, default_namespace: str | None = None, short_empty_elements: bool = True, ) -> list[Any]: ... -def dump(elem: Element) -> None: ... +def dump(elem: Element | ElementTree[Any]) -> None: ... if sys.version_info >= (3, 9): - def indent(tree: Element | ElementTree, space: str = " ", level: int = 0) -> None: ... + def indent(tree: Element | ElementTree[Any], space: str = " ", level: int = 0) -> None: ... -def parse(source: _FileRead, parser: XMLParser | None = None) -> ElementTree: ... +def parse(source: _FileRead, parser: XMLParser[Any] | None = None) -> ElementTree[Element]: ... -class _IterParseIterator(Iterator[tuple[str, Any]]): - def __next__(self) -> tuple[str, Any]: ... +# This class is defined inside the body of iterparse +@type_check_only +class _IterParseIterator(Iterator[tuple[str, Element]], Protocol): + def __next__(self) -> tuple[str, Element]: ... if sys.version_info >= (3, 13): def close(self) -> None: ... if sys.version_info >= (3, 11): @@ -249,13 +271,13 @@ class _IterParseIterator(Iterator[tuple[str, Any]]): def iterparse(source: _FileRead, events: Sequence[str] | None = None, parser: XMLParser | None = None) -> _IterParseIterator: ... -class XMLPullParser: - def __init__(self, events: Sequence[str] | None = None, *, _parser: XMLParser | None = None) -> None: ... +_EventQueue: TypeAlias = tuple[str] | tuple[str, tuple[str, str]] | tuple[str, None] + +class XMLPullParser(Generic[_E]): + def __init__(self, events: Sequence[str] | None = None, *, _parser: XMLParser[_E] | None = None) -> None: ... def feed(self, data: str | ReadableBuffer) -> None: ... def close(self) -> None: ... - # Second element in the tuple could be `Element`, `tuple[str, str]` or `None`. - # Use `Any` to avoid false-positive errors. - def read_events(self) -> Iterator[tuple[str, Any]]: ... + def read_events(self) -> Iterator[_EventQueue | tuple[str, _E]]: ... def flush(self) -> None: ... def XML(text: str | ReadableBuffer, parser: XMLParser | None = None) -> Element: ... @@ -281,12 +303,12 @@ class TreeBuilder: # comment_factory can take None because passing None to Comment is not an error def __init__( self, - element_factory: _ElementFactory | None = ..., + element_factory: _ElementFactory | None = None, *, - comment_factory: Callable[[str | None], Element] | None = ..., - pi_factory: Callable[[str, str | None], Element] | None = ..., - insert_comments: bool = ..., - insert_pis: bool = ..., + comment_factory: Callable[[str | None], Element[Any]] | None = None, + pi_factory: Callable[[str, str | None], Element[Any]] | None = None, + insert_comments: bool = False, + insert_pis: bool = False, ) -> None: ... insert_comments: bool insert_pis: bool @@ -298,8 +320,8 @@ class TreeBuilder: def start(self, tag: Any, attrs: dict[Any, Any], /) -> Element: ... def end(self, tag: str, /) -> Element: ... # These two methods have pos-only parameters in the C implementation - def comment(self, text: str | None, /) -> Element: ... - def pi(self, target: str, text: str | None = None, /) -> Element: ... + def comment(self, text: str | None, /) -> Element[Any]: ... + def pi(self, target: str, text: str | None = None, /) -> Element[Any]: ... class C14NWriterTarget: def __init__( @@ -321,13 +343,33 @@ class C14NWriterTarget: def comment(self, text: str) -> None: ... def pi(self, target: str, data: str) -> None: ... -class XMLParser: - parser: Any - target: Any +# The target type is tricky, because the implementation doesn't +# require any particular attribute to be present. This documents the attributes +# that can be present, but uncommenting any of them would require them. +class _Target(Protocol): + # start: Callable[str, dict[str, str], Any] | None + # end: Callable[[str], Any] | None + # start_ns: Callable[[str, str], Any] | None + # end_ns: Callable[[str], Any] | None + # data: Callable[[str], Any] | None + # comment: Callable[[str], Any] + # pi: Callable[[str, str], Any] | None + # close: Callable[[], Any] | None + ... + +_E = TypeVar("_E", default=Element) + +# This is generic because the return type of close() depends on the target. +# The default target is TreeBuilder, which returns Element. +# C14NWriterTarget does not implement a close method, so using it results +# in a type of XMLParser[None]. +class XMLParser(Generic[_E]): + parser: XMLParserType + target: _Target # TODO-what is entity used for??? - entity: Any + entity: dict[str, str] version: str - def __init__(self, *, target: Any = ..., encoding: str | None = ...) -> None: ... - def close(self) -> Any: ... + def __init__(self, *, target: _Target | None = None, encoding: str | None = None) -> None: ... + def close(self) -> _E: ... def feed(self, data: str | ReadableBuffer, /) -> None: ... def flush(self) -> None: ... diff --git a/stdlib/xml/sax/_exceptions.pyi b/stdlib/xml/sax/_exceptions.pyi index 8a437a971f13..e9cc8856a9c8 100644 --- a/stdlib/xml/sax/_exceptions.pyi +++ b/stdlib/xml/sax/_exceptions.pyi @@ -4,15 +4,15 @@ from xml.sax.xmlreader import Locator class SAXException(Exception): def __init__(self, msg: str, exception: Exception | None = None) -> None: ... def getMessage(self) -> str: ... - def getException(self) -> Exception: ... + def getException(self) -> Exception | None: ... def __getitem__(self, ix: object) -> NoReturn: ... class SAXParseException(SAXException): def __init__(self, msg: str, exception: Exception | None, locator: Locator) -> None: ... - def getColumnNumber(self) -> int: ... - def getLineNumber(self) -> int: ... - def getPublicId(self): ... - def getSystemId(self): ... + def getColumnNumber(self) -> int | None: ... + def getLineNumber(self) -> int | None: ... + def getPublicId(self) -> str | None: ... + def getSystemId(self) -> str | None: ... class SAXNotRecognizedException(SAXException): ... class SAXNotSupportedException(SAXException): ... diff --git a/stdlib/xml/sax/expatreader.pyi b/stdlib/xml/sax/expatreader.pyi index 0f7bda5872c0..6a68f52f0e99 100644 --- a/stdlib/xml/sax/expatreader.pyi +++ b/stdlib/xml/sax/expatreader.pyi @@ -1,53 +1,82 @@ import sys -from _typeshed import Unused -from xml.sax import xmlreader +from _typeshed import ReadableBuffer +from collections.abc import Mapping +from typing import Any, Literal, overload +from typing_extensions import TypeAlias +from xml.sax import _Source, xmlreader +from xml.sax.handler import _ContentHandlerProtocol + +if sys.version_info >= (3, 10): + from xml.sax.handler import LexicalHandler + +_BoolType: TypeAlias = Literal[0, 1] | bool version: str AttributesImpl = xmlreader.AttributesImpl AttributesNSImpl = xmlreader.AttributesNSImpl -class _ClosedParser: ... +class _ClosedParser: + ErrorColumnNumber: int + ErrorLineNumber: int class ExpatLocator(xmlreader.Locator): def __init__(self, parser: ExpatParser) -> None: ... - def getColumnNumber(self) -> int: ... + def getColumnNumber(self) -> int | None: ... def getLineNumber(self) -> int: ... - def getPublicId(self): ... - def getSystemId(self): ... + def getPublicId(self) -> str | None: ... + def getSystemId(self) -> str | None: ... class ExpatParser(xmlreader.IncrementalParser, xmlreader.Locator): - def __init__(self, namespaceHandling: int = 0, bufsize: int = 65516) -> None: ... - def parse(self, source) -> None: ... - def prepareParser(self, source) -> None: ... - def setContentHandler(self, handler) -> None: ... - def getFeature(self, name: str): ... - def setFeature(self, name: str, state) -> None: ... - def getProperty(self, name: str): ... - def setProperty(self, name: str, value) -> None: ... + def __init__(self, namespaceHandling: _BoolType = 0, bufsize: int = 65516) -> None: ... + def parse(self, source: xmlreader.InputSource | _Source) -> None: ... + def prepareParser(self, source: xmlreader.InputSource) -> None: ... + def setContentHandler(self, handler: _ContentHandlerProtocol) -> None: ... + def getFeature(self, name: str) -> _BoolType: ... + def setFeature(self, name: str, state: _BoolType) -> None: ... + if sys.version_info >= (3, 10): + @overload + def getProperty(self, name: Literal["http://xml.org/sax/properties/lexical-handler"]) -> LexicalHandler | None: ... + + @overload + def getProperty(self, name: Literal["http://www.python.org/sax/properties/interning-dict"]) -> dict[str, Any] | None: ... + @overload + def getProperty(self, name: Literal["http://xml.org/sax/properties/xml-string"]) -> bytes | None: ... + @overload + def getProperty(self, name: str) -> object: ... + if sys.version_info >= (3, 10): + @overload + def setProperty(self, name: Literal["http://xml.org/sax/properties/lexical-handler"], value: LexicalHandler) -> None: ... + + @overload + def setProperty( + self, name: Literal["http://www.python.org/sax/properties/interning-dict"], value: dict[str, Any] + ) -> None: ... + @overload + def setProperty(self, name: str, value: object) -> None: ... if sys.version_info >= (3, 9): - def feed(self, data, isFinal: bool = False) -> None: ... + def feed(self, data: str | ReadableBuffer, isFinal: bool = False) -> None: ... else: - def feed(self, data, isFinal: int = 0) -> None: ... + def feed(self, data: str | ReadableBuffer, isFinal: _BoolType = 0) -> None: ... def flush(self) -> None: ... def close(self) -> None: ... def reset(self) -> None: ... def getColumnNumber(self) -> int | None: ... def getLineNumber(self) -> int: ... - def getPublicId(self): ... - def getSystemId(self): ... - def start_element(self, name: str, attrs: xmlreader.AttributesImpl) -> None: ... + def getPublicId(self) -> str | None: ... + def getSystemId(self) -> str | None: ... + def start_element(self, name: str, attrs: Mapping[str, str]) -> None: ... def end_element(self, name: str) -> None: ... - def start_element_ns(self, name: str, attrs) -> None: ... + def start_element_ns(self, name: str, attrs: Mapping[str, str]) -> None: ... def end_element_ns(self, name: str) -> None: ... def processing_instruction(self, target: str, data: str) -> None: ... def character_data(self, data: str) -> None: ... def start_namespace_decl(self, prefix: str | None, uri: str) -> None: ... def end_namespace_decl(self, prefix: str | None) -> None: ... - def start_doctype_decl(self, name: str, sysid: str | None, pubid: str | None, has_internal_subset: Unused) -> None: ... - def unparsed_entity_decl(self, name, base, sysid, pubid, notation_name) -> None: ... - def notation_decl(self, name, base, sysid, pubid) -> None: ... - def external_entity_ref(self, context, base, sysid, pubid): ... + def start_doctype_decl(self, name: str, sysid: str | None, pubid: str | None, has_internal_subset: bool) -> None: ... + def unparsed_entity_decl(self, name: str, base: str | None, sysid: str, pubid: str | None, notation_name: str) -> None: ... + def notation_decl(self, name: str, base: str | None, sysid: str, pubid: str | None) -> None: ... + def external_entity_ref(self, context: str, base: str | None, sysid: str, pubid: str | None) -> int: ... def skipped_entity_handler(self, name: str, is_pe: bool) -> None: ... def create_parser(namespaceHandling: int = 0, bufsize: int = 65516) -> ExpatParser: ... diff --git a/stdlib/xml/sax/handler.pyi b/stdlib/xml/sax/handler.pyi index 7b7c69048efd..550911734596 100644 --- a/stdlib/xml/sax/handler.pyi +++ b/stdlib/xml/sax/handler.pyi @@ -1,14 +1,36 @@ import sys -from typing import NoReturn +from typing import Literal, NoReturn, Protocol, type_check_only from xml.sax import xmlreader version: str +@type_check_only +class _ErrorHandlerProtocol(Protocol): # noqa: Y046 # Protocol is not used + def error(self, exception: BaseException) -> NoReturn: ... + def fatalError(self, exception: BaseException) -> NoReturn: ... + def warning(self, exception: BaseException) -> None: ... + class ErrorHandler: def error(self, exception: BaseException) -> NoReturn: ... def fatalError(self, exception: BaseException) -> NoReturn: ... def warning(self, exception: BaseException) -> None: ... +@type_check_only +class _ContentHandlerProtocol(Protocol): # noqa: Y046 # Protocol is not used + def setDocumentLocator(self, locator: xmlreader.Locator) -> None: ... + def startDocument(self) -> None: ... + def endDocument(self) -> None: ... + def startPrefixMapping(self, prefix: str | None, uri: str) -> None: ... + def endPrefixMapping(self, prefix: str | None) -> None: ... + def startElement(self, name: str, attrs: xmlreader.AttributesImpl) -> None: ... + def endElement(self, name: str) -> None: ... + def startElementNS(self, name: tuple[str | None, str], qname: str | None, attrs: xmlreader.AttributesNSImpl) -> None: ... + def endElementNS(self, name: tuple[str | None, str], qname: str | None) -> None: ... + def characters(self, content: str) -> None: ... + def ignorableWhitespace(self, whitespace: str) -> None: ... + def processingInstruction(self, target: str, data: str) -> None: ... + def skippedEntity(self, name: str) -> None: ... + class ContentHandler: def setDocumentLocator(self, locator: xmlreader.Locator) -> None: ... def startDocument(self) -> None: ... @@ -17,19 +39,28 @@ class ContentHandler: def endPrefixMapping(self, prefix: str | None) -> None: ... def startElement(self, name: str, attrs: xmlreader.AttributesImpl) -> None: ... def endElement(self, name: str) -> None: ... - def startElementNS(self, name: tuple[str, str], qname: str, attrs: xmlreader.AttributesNSImpl) -> None: ... - def endElementNS(self, name: tuple[str, str], qname: str) -> None: ... + def startElementNS(self, name: tuple[str | None, str], qname: str | None, attrs: xmlreader.AttributesNSImpl) -> None: ... + def endElementNS(self, name: tuple[str | None, str], qname: str | None) -> None: ... def characters(self, content: str) -> None: ... def ignorableWhitespace(self, whitespace: str) -> None: ... def processingInstruction(self, target: str, data: str) -> None: ... def skippedEntity(self, name: str) -> None: ... +@type_check_only +class _DTDHandlerProtocol(Protocol): # noqa: Y046 # Protocol is not used + def notationDecl(self, name: str, publicId: str | None, systemId: str) -> None: ... + def unparsedEntityDecl(self, name: str, publicId: str | None, systemId: str, ndata: str) -> None: ... + class DTDHandler: - def notationDecl(self, name, publicId, systemId): ... - def unparsedEntityDecl(self, name, publicId, systemId, ndata): ... + def notationDecl(self, name: str, publicId: str | None, systemId: str) -> None: ... + def unparsedEntityDecl(self, name: str, publicId: str | None, systemId: str, ndata: str) -> None: ... + +@type_check_only +class _EntityResolverProtocol(Protocol): # noqa: Y046 # Protocol is not used + def resolveEntity(self, publicId: str | None, systemId: str) -> str: ... class EntityResolver: - def resolveEntity(self, publicId, systemId): ... + def resolveEntity(self, publicId: str | None, systemId: str) -> str: ... feature_namespaces: str feature_namespace_prefixes: str @@ -38,18 +69,18 @@ feature_validation: str feature_external_ges: str feature_external_pes: str all_features: list[str] -property_lexical_handler: str -property_declaration_handler: str -property_dom_node: str -property_xml_string: str -property_encoding: str -property_interning_dict: str +property_lexical_handler: Literal["http://xml.org/sax/properties/lexical-handler"] +property_declaration_handler: Literal["http://xml.org/sax/properties/declaration-handler"] +property_dom_node: Literal["http://xml.org/sax/properties/dom-node"] +property_xml_string: Literal["http://xml.org/sax/properties/xml-string"] +property_encoding: Literal["http://www.python.org/sax/properties/encoding"] +property_interning_dict: Literal["http://www.python.org/sax/properties/interning-dict"] all_properties: list[str] if sys.version_info >= (3, 10): class LexicalHandler: - def comment(self, content: str) -> object: ... - def startDTD(self, name: str, public_id: str | None, system_id: str | None) -> object: ... - def endDTD(self) -> object: ... - def startCDATA(self) -> object: ... - def endCDATA(self) -> object: ... + def comment(self, content: str) -> None: ... + def startDTD(self, name: str, public_id: str | None, system_id: str | None) -> None: ... + def endDTD(self) -> None: ... + def startCDATA(self) -> None: ... + def endCDATA(self) -> None: ... diff --git a/stdlib/xml/sax/saxutils.pyi b/stdlib/xml/sax/saxutils.pyi index 528f35963947..a29588faae2a 100644 --- a/stdlib/xml/sax/saxutils.pyi +++ b/stdlib/xml/sax/saxutils.pyi @@ -2,6 +2,7 @@ from _typeshed import SupportsWrite from codecs import StreamReaderWriter, StreamWriter from collections.abc import Mapping from io import RawIOBase, TextIOBase +from typing import Literal, NoReturn from xml.sax import _Source, handler, xmlreader def escape(data: str, entities: Mapping[str, str] = {}) -> str: ... @@ -15,23 +16,26 @@ class XMLGenerator(handler.ContentHandler): encoding: str = "iso-8859-1", short_empty_elements: bool = False, ) -> None: ... + def _qname(self, name: tuple[str | None, str]) -> str: ... def startDocument(self) -> None: ... def endDocument(self) -> None: ... def startPrefixMapping(self, prefix: str | None, uri: str) -> None: ... def endPrefixMapping(self, prefix: str | None) -> None: ... def startElement(self, name: str, attrs: xmlreader.AttributesImpl) -> None: ... def endElement(self, name: str) -> None: ... - def startElementNS(self, name: tuple[str, str], qname: str, attrs: xmlreader.AttributesNSImpl) -> None: ... - def endElementNS(self, name: tuple[str, str], qname: str) -> None: ... + def startElementNS(self, name: tuple[str | None, str], qname: str | None, attrs: xmlreader.AttributesNSImpl) -> None: ... + def endElementNS(self, name: tuple[str | None, str], qname: str | None) -> None: ... def characters(self, content: str) -> None: ... def ignorableWhitespace(self, content: str) -> None: ... def processingInstruction(self, target: str, data: str) -> None: ... class XMLFilterBase(xmlreader.XMLReader): def __init__(self, parent: xmlreader.XMLReader | None = None) -> None: ... - def error(self, exception): ... - def fatalError(self, exception): ... - def warning(self, exception): ... + # ErrorHandler methods + def error(self, exception: BaseException) -> NoReturn: ... + def fatalError(self, exception: BaseException) -> NoReturn: ... + def warning(self, exception: BaseException) -> None: ... + # ContentHandler methods def setDocumentLocator(self, locator: xmlreader.Locator) -> None: ... def startDocument(self) -> None: ... def endDocument(self) -> None: ... @@ -39,22 +43,26 @@ class XMLFilterBase(xmlreader.XMLReader): def endPrefixMapping(self, prefix: str | None) -> None: ... def startElement(self, name: str, attrs: xmlreader.AttributesImpl) -> None: ... def endElement(self, name: str) -> None: ... - def startElementNS(self, name: tuple[str, str], qname: str, attrs: xmlreader.AttributesNSImpl) -> None: ... - def endElementNS(self, name: tuple[str, str], qname: str) -> None: ... + def startElementNS(self, name: tuple[str | None, str], qname: str | None, attrs: xmlreader.AttributesNSImpl) -> None: ... + def endElementNS(self, name: tuple[str | None, str], qname: str | None) -> None: ... def characters(self, content: str) -> None: ... def ignorableWhitespace(self, chars: str) -> None: ... def processingInstruction(self, target: str, data: str) -> None: ... def skippedEntity(self, name: str) -> None: ... - def notationDecl(self, name, publicId, systemId): ... - def unparsedEntityDecl(self, name, publicId, systemId, ndata): ... - def resolveEntity(self, publicId, systemId): ... - def parse(self, source: _Source) -> None: ... - def setLocale(self, locale): ... - def getFeature(self, name: str) -> object: ... - def setFeature(self, name: str, state: object) -> None: ... + # DTDHandler methods + def notationDecl(self, name: str, publicId: str | None, systemId: str) -> None: ... + def unparsedEntityDecl(self, name: str, publicId: str | None, systemId: str, ndata: str) -> None: ... + # EntityResolver methods + def resolveEntity(self, publicId: str | None, systemId: str) -> str: ... + # XMLReader methods + def parse(self, source: xmlreader.InputSource | _Source) -> None: ... + def setLocale(self, locale: str) -> None: ... + def getFeature(self, name: str) -> Literal[1, 0] | bool: ... + def setFeature(self, name: str, state: Literal[1, 0] | bool) -> None: ... def getProperty(self, name: str) -> object: ... def setProperty(self, name: str, value: object) -> None: ... - def getParent(self) -> xmlreader.XMLReader: ... + # XMLFilter methods + def getParent(self) -> xmlreader.XMLReader | None: ... def setParent(self, parent: xmlreader.XMLReader) -> None: ... -def prepare_input_source(source, base=""): ... +def prepare_input_source(source: xmlreader.InputSource | _Source, base: str = "") -> xmlreader.InputSource: ... diff --git a/stdlib/xml/sax/xmlreader.pyi b/stdlib/xml/sax/xmlreader.pyi index 2ccbc95bbef0..e7d04ddeadb8 100644 --- a/stdlib/xml/sax/xmlreader.pyi +++ b/stdlib/xml/sax/xmlreader.pyi @@ -1,87 +1,90 @@ +from _typeshed import ReadableBuffer from collections.abc import Mapping -from typing import overload +from typing import Generic, Literal, TypeVar, overload from typing_extensions import Self, TypeAlias -from xml.sax.handler import ContentHandler, DTDHandler, EntityResolver, ErrorHandler +from xml.sax import _Source, _SupportsReadClose +from xml.sax.handler import _ContentHandlerProtocol, _DTDHandlerProtocol, _EntityResolverProtocol, _ErrorHandlerProtocol class XMLReader: - def parse(self, source): ... - def getContentHandler(self) -> ContentHandler: ... - def setContentHandler(self, handler: ContentHandler) -> None: ... - def getDTDHandler(self) -> DTDHandler: ... - def setDTDHandler(self, handler: DTDHandler) -> None: ... - def getEntityResolver(self) -> EntityResolver: ... - def setEntityResolver(self, resolver: EntityResolver) -> None: ... - def getErrorHandler(self) -> ErrorHandler: ... - def setErrorHandler(self, handler: ErrorHandler) -> None: ... - def setLocale(self, locale): ... - def getFeature(self, name: str) -> object: ... - def setFeature(self, name: str, state: object) -> None: ... + def parse(self, source: InputSource | _Source) -> None: ... + def getContentHandler(self) -> _ContentHandlerProtocol: ... + def setContentHandler(self, handler: _ContentHandlerProtocol) -> None: ... + def getDTDHandler(self) -> _DTDHandlerProtocol: ... + def setDTDHandler(self, handler: _DTDHandlerProtocol) -> None: ... + def getEntityResolver(self) -> _EntityResolverProtocol: ... + def setEntityResolver(self, resolver: _EntityResolverProtocol) -> None: ... + def getErrorHandler(self) -> _ErrorHandlerProtocol: ... + def setErrorHandler(self, handler: _ErrorHandlerProtocol) -> None: ... + def setLocale(self, locale: str) -> None: ... + def getFeature(self, name: str) -> Literal[0, 1] | bool: ... + def setFeature(self, name: str, state: Literal[0, 1] | bool) -> None: ... def getProperty(self, name: str) -> object: ... def setProperty(self, name: str, value: object) -> None: ... class IncrementalParser(XMLReader): def __init__(self, bufsize: int = 65536) -> None: ... - def parse(self, source): ... - def feed(self, data): ... - def prepareParser(self, source): ... - def close(self): ... - def reset(self): ... + def parse(self, source: InputSource | _Source) -> None: ... + def feed(self, data: str | ReadableBuffer) -> None: ... + def prepareParser(self, source: InputSource) -> None: ... + def close(self) -> None: ... + def reset(self) -> None: ... class Locator: - def getColumnNumber(self): ... - def getLineNumber(self): ... - def getPublicId(self): ... - def getSystemId(self): ... + def getColumnNumber(self) -> int | None: ... + def getLineNumber(self) -> int | None: ... + def getPublicId(self) -> str | None: ... + def getSystemId(self) -> str | None: ... class InputSource: def __init__(self, system_id: str | None = None) -> None: ... - def setPublicId(self, public_id): ... - def getPublicId(self): ... - def setSystemId(self, system_id): ... - def getSystemId(self): ... - def setEncoding(self, encoding): ... - def getEncoding(self): ... - def setByteStream(self, bytefile): ... - def getByteStream(self): ... - def setCharacterStream(self, charfile): ... - def getCharacterStream(self): ... + def setPublicId(self, public_id: str | None) -> None: ... + def getPublicId(self) -> str | None: ... + def setSystemId(self, system_id: str | None) -> None: ... + def getSystemId(self) -> str | None: ... + def setEncoding(self, encoding: str | None) -> None: ... + def getEncoding(self) -> str | None: ... + def setByteStream(self, bytefile: _SupportsReadClose[bytes] | None) -> None: ... + def getByteStream(self) -> _SupportsReadClose[bytes] | None: ... + def setCharacterStream(self, charfile: _SupportsReadClose[str] | None) -> None: ... + def getCharacterStream(self) -> _SupportsReadClose[str] | None: ... -class AttributesImpl: - def __init__(self, attrs: Mapping[str, str]) -> None: ... +_AttrKey = TypeVar("_AttrKey", default=str) + +class AttributesImpl(Generic[_AttrKey]): + def __init__(self, attrs: Mapping[_AttrKey, str]) -> None: ... def getLength(self) -> int: ... def getType(self, name: str) -> str: ... - def getValue(self, name: str) -> str: ... + def getValue(self, name: _AttrKey) -> str: ... def getValueByQName(self, name: str) -> str: ... - def getNameByQName(self, name: str) -> str: ... - def getQNameByName(self, name: str) -> str: ... - def getNames(self) -> list[str]: ... + def getNameByQName(self, name: str) -> _AttrKey: ... + def getQNameByName(self, name: _AttrKey) -> str: ... + def getNames(self) -> list[_AttrKey]: ... def getQNames(self) -> list[str]: ... def __len__(self) -> int: ... - def __getitem__(self, name: str) -> str: ... - def keys(self) -> list[str]: ... - def __contains__(self, name: str) -> bool: ... + def __getitem__(self, name: _AttrKey) -> str: ... + def keys(self) -> list[_AttrKey]: ... + def __contains__(self, name: _AttrKey) -> bool: ... @overload - def get(self, name: str, alternative: None = None) -> str | None: ... + def get(self, name: _AttrKey, alternative: None = None) -> str | None: ... @overload - def get(self, name: str, alternative: str) -> str: ... + def get(self, name: _AttrKey, alternative: str) -> str: ... def copy(self) -> Self: ... - def items(self) -> list[tuple[str, str]]: ... + def items(self) -> list[tuple[_AttrKey, str]]: ... def values(self) -> list[str]: ... _NSName: TypeAlias = tuple[str | None, str] -class AttributesNSImpl(AttributesImpl): +class AttributesNSImpl(AttributesImpl[_NSName]): def __init__(self, attrs: Mapping[_NSName, str], qnames: Mapping[_NSName, str]) -> None: ... - def getType(self, name: _NSName) -> str: ... # type: ignore[override] - def getValue(self, name: _NSName) -> str: ... # type: ignore[override] - def getNameByQName(self, name: str) -> _NSName: ... # type: ignore[override] - def getQNameByName(self, name: _NSName) -> str: ... # type: ignore[override] - def getNames(self) -> list[_NSName]: ... # type: ignore[override] - def __getitem__(self, name: _NSName) -> str: ... # type: ignore[override] - def keys(self) -> list[_NSName]: ... # type: ignore[override] - def __contains__(self, name: _NSName) -> bool: ... # type: ignore[override] - @overload # type: ignore[override] + def getValue(self, name: _NSName) -> str: ... + def getNameByQName(self, name: str) -> _NSName: ... + def getQNameByName(self, name: _NSName) -> str: ... + def getNames(self) -> list[_NSName]: ... + def __getitem__(self, name: _NSName) -> str: ... + def keys(self) -> list[_NSName]: ... + def __contains__(self, name: _NSName) -> bool: ... + @overload def get(self, name: _NSName, alternative: None = None) -> str | None: ... @overload def get(self, name: _NSName, alternative: str) -> str: ... - def items(self) -> list[tuple[_NSName, str]]: ... # type: ignore[override] + def items(self) -> list[tuple[_NSName, str]]: ... From e72f4a27fe31f1dd556e95408c1084c7162361c7 Mon Sep 17 00:00:00 2001 From: Alex Waygood Date: Thu, 27 Feb 2025 12:44:40 +0000 Subject: [PATCH 015/388] Use stricter pyright settings when testing `hnswlib` in CI (#13552) --- pyrightconfig.stricter.json | 1 - 1 file changed, 1 deletion(-) diff --git a/pyrightconfig.stricter.json b/pyrightconfig.stricter.json index ec592725686d..30aa8ff800cd 100644 --- a/pyrightconfig.stricter.json +++ b/pyrightconfig.stricter.json @@ -47,7 +47,6 @@ "stubs/geopandas", "stubs/google-cloud-ndb", "stubs/hdbcli/hdbcli/dbapi.pyi", - "stubs/hnswlib", "stubs/html5lib", "stubs/httplib2", "stubs/humanfriendly", From 5768a7420a1af453019c8abd8684a2dca46fa718 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lo=C3=AFc=20Simon?= Date: Thu, 27 Feb 2025 14:07:22 +0100 Subject: [PATCH 016/388] Add stubs for `dirhash` package (#13437) --- pyrightconfig.stricter.json | 1 + stubs/dirhash/METADATA.toml | 2 + stubs/dirhash/dirhash/__init__.pyi | 93 ++++++++++++++++++++++++++++++ stubs/dirhash/dirhash/cli.pyi | 5 ++ 4 files changed, 101 insertions(+) create mode 100644 stubs/dirhash/METADATA.toml create mode 100644 stubs/dirhash/dirhash/__init__.pyi create mode 100644 stubs/dirhash/dirhash/cli.pyi diff --git a/pyrightconfig.stricter.json b/pyrightconfig.stricter.json index 30aa8ff800cd..c68a490f7b3b 100644 --- a/pyrightconfig.stricter.json +++ b/pyrightconfig.stricter.json @@ -39,6 +39,7 @@ "stubs/corus", "stubs/dateparser", "stubs/defusedxml", + "stubs/dirhash", "stubs/docker", "stubs/docutils", "stubs/Flask-SocketIO", diff --git a/stubs/dirhash/METADATA.toml b/stubs/dirhash/METADATA.toml new file mode 100644 index 000000000000..a39d6c7bc868 --- /dev/null +++ b/stubs/dirhash/METADATA.toml @@ -0,0 +1,2 @@ +version = "0.5.*" +upstream_repository = "https://github.com/andhus/dirhash-python" diff --git a/stubs/dirhash/dirhash/__init__.pyi b/stubs/dirhash/dirhash/__init__.pyi new file mode 100644 index 000000000000..5a24f69dc17c --- /dev/null +++ b/stubs/dirhash/dirhash/__init__.pyi @@ -0,0 +1,93 @@ +from _typeshed import Incomplete +from collections.abc import Generator, Iterable +from os import PathLike +from typing import TypeVar +from typing_extensions import TypeAlias + +_DirNode: TypeAlias = Incomplete # scantree.DirNode +_RecursionPath: TypeAlias = Incomplete # scantree.RecursionPath +_RP = TypeVar("_RP", bound=_RecursionPath) + +__all__ = [ + "__version__", + "algorithms_guaranteed", + "algorithms_available", + "dirhash", + "dirhash_impl", + "included_paths", + "Filter", + "get_match_patterns", + "Protocol", +] + +__version__: str +algorithms_guaranteed: set[str] +algorithms_available: set[str] + +def dirhash( + directory: str | PathLike[str], + algorithm: str, + match: Iterable[str] = ("*",), + ignore: Iterable[str] | None = None, + linked_dirs: bool = True, + linked_files: bool = True, + empty_dirs: bool = False, + entry_properties: Iterable[str] = ("name", "data"), + allow_cyclic_links: bool = False, + chunk_size: int = 1048576, + jobs: int = 1, +) -> str: ... +def dirhash_impl( + directory: str | PathLike[str], + algorithm: str, + filter_: Filter | None = None, + protocol: Protocol | None = None, + chunk_size: int = 1048576, + jobs: int = 1, +) -> str: ... +def included_paths( + directory: str | PathLike[str], + match: Iterable[str] = ("*",), + ignore: Iterable[str] | None = None, + linked_dirs: bool = True, + linked_files: bool = True, + empty_dirs: bool = False, + allow_cyclic_links: bool = False, +) -> list[str]: ... + +class Filter: + linked_dirs: bool + linked_files: bool + empty_dirs: bool + + def __init__( + self, + match_patterns: Iterable[str] | None = None, + linked_dirs: bool = True, + linked_files: bool = True, + empty_dirs: bool = False, + ) -> None: ... + @property + def match_patterns(self) -> tuple[str, ...]: ... + def include(self, recursion_path: _RecursionPath) -> bool: ... + def match_file(self, filepath: str | PathLike[str]) -> bool: ... + def __call__(self, paths: Iterable[_RP]) -> Generator[_RP, None, None]: ... + +def get_match_patterns( + match: Iterable[str] | None = None, + ignore: Iterable[str] | None = None, + ignore_extensions: Iterable[str] | None = None, + ignore_hidden: bool = False, +) -> list[str]: ... + +class Protocol: + class EntryProperties: + NAME: str + DATA: str + IS_LINK: str + options: set[str] + + entry_properties: Iterable[str] + allow_cyclic_links: bool + def __init__(self, entry_properties: Iterable[str] = ("name", "data"), allow_cyclic_links: bool = False) -> None: ... + def get_descriptor(self, dir_node: _DirNode) -> str: ... diff --git a/stubs/dirhash/dirhash/cli.pyi b/stubs/dirhash/dirhash/cli.pyi new file mode 100644 index 000000000000..b8229142d858 --- /dev/null +++ b/stubs/dirhash/dirhash/cli.pyi @@ -0,0 +1,5 @@ +from collections.abc import Sequence +from typing import Any + +def main() -> None: ... +def get_kwargs(args: Sequence[str]) -> dict[str, Any]: ... # value depends on the key From 9fa4fd2c5a206499128c58c096b47fc4e7fc3207 Mon Sep 17 00:00:00 2001 From: Lucas Hoffmann Date: Thu, 27 Feb 2025 14:08:49 +0100 Subject: [PATCH 017/388] Fix constructor args for two vobject classes (#13360) --- stubs/vobject/vobject/vcard.pyi | 23 +++++++++++++++-------- 1 file changed, 15 insertions(+), 8 deletions(-) diff --git a/stubs/vobject/vobject/vcard.pyi b/stubs/vobject/vobject/vcard.pyi index e5ef420bc4a1..803acb03e4d7 100644 --- a/stubs/vobject/vobject/vcard.pyi +++ b/stubs/vobject/vobject/vcard.pyi @@ -8,7 +8,14 @@ class Name: additional: Incomplete prefix: Incomplete suffix: Incomplete - def __init__(self, family: str = "", given: str = "", additional: str = "", prefix: str = "", suffix: str = "") -> None: ... + def __init__( + self, + family: str | list[str] = "", + given: str | list[str] = "", + additional: str | list[str] = "", + prefix: str | list[str] = "", + suffix: str | list[str] = "", + ) -> None: ... @staticmethod def toString(val): ... def __eq__(self, other): ... @@ -23,13 +30,13 @@ class Address: country: Incomplete def __init__( self, - street: str = "", - city: str = "", - region: str = "", - code: str = "", - country: str = "", - box: str = "", - extended: str = "", + street: str | list[str] = "", + city: str | list[str] = "", + region: str | list[str] = "", + code: str | list[str] = "", + country: str | list[str] = "", + box: str | list[str] = "", + extended: str | list[str] = "", ) -> None: ... @staticmethod def toString(val, join_char: str = "\n"): ... From 66a6f1cd61b29829eeb1805ca506e60ade1eae1b Mon Sep 17 00:00:00 2001 From: Matthew Bradbury Date: Thu, 27 Feb 2025 13:44:03 +0000 Subject: [PATCH 018/388] tqdm: Improve wrapattr (#13361) --- stubs/tqdm/tqdm/std.pyi | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/stubs/tqdm/tqdm/std.pyi b/stubs/tqdm/tqdm/std.pyi index 297b01784d10..8ffbb0eaa60e 100644 --- a/stubs/tqdm/tqdm/std.pyi +++ b/stubs/tqdm/tqdm/std.pyi @@ -1,5 +1,5 @@ import contextlib -from _typeshed import Incomplete, SupportsWrite +from _typeshed import Incomplete, SupportsRead, SupportsWrite from collections.abc import Callable, Iterable, Iterator, Mapping, MutableMapping from types import TracebackType from typing import Any, ClassVar, Generic, Literal, NoReturn, TypeVar, overload @@ -30,6 +30,7 @@ class TqdmDeprecationWarning(TqdmWarning, DeprecationWarning): ... class TqdmMonitorWarning(TqdmWarning, RuntimeWarning): ... _T = TypeVar("_T") +_U = TypeVar("_U") class tqdm(Comparable, Generic[_T]): monitor_interval: ClassVar[int] @@ -222,10 +223,16 @@ class tqdm(Comparable, Generic[_T]): @property def format_dict(self) -> MutableMapping[str, Any]: ... def display(self, msg: str | None = None, pos: int | None = None) -> None: ... + @overload + @classmethod + def wrapattr( + cls, stream: SupportsRead[_U], method: Literal["read"], total: float | None = None, bytes: bool = True, **tqdm_kwargs + ) -> contextlib._GeneratorContextManager[SupportsRead[_U]]: ... + @overload @classmethod def wrapattr( - cls, stream, method: Literal["read", "write"], total: float | None = None, bytes: bool | None = True, **tqdm_kwargs - ) -> contextlib._GeneratorContextManager[Incomplete]: ... + cls, stream: SupportsWrite[_U], method: Literal["write"], total: float | None = None, bytes: bool = True, **tqdm_kwargs + ) -> contextlib._GeneratorContextManager[SupportsWrite[_U]]: ... @overload def trange( From d5acce23e88922d7a303baa05a3a1f3a813fd2a7 Mon Sep 17 00:00:00 2001 From: Stephen Morton Date: Thu, 27 Feb 2025 05:44:46 -0800 Subject: [PATCH 019/388] Complete `importlib.readers` (#13356) --- pyrightconfig.stricter.json | 1 - stdlib/importlib/readers.pyi | 14 +++++++++----- 2 files changed, 9 insertions(+), 6 deletions(-) diff --git a/pyrightconfig.stricter.json b/pyrightconfig.stricter.json index c68a490f7b3b..bc4da3eae855 100644 --- a/pyrightconfig.stricter.json +++ b/pyrightconfig.stricter.json @@ -10,7 +10,6 @@ "**/@tests/test_cases", "stdlib/distutils/command", "stdlib/distutils/dist.pyi", - "stdlib/importlib/readers.pyi", "stdlib/lib2to3/fixes/*.pyi", "stdlib/numbers.pyi", "stdlib/optparse.pyi", diff --git a/stdlib/importlib/readers.pyi b/stdlib/importlib/readers.pyi index 41d7af966d58..ceb3e731e7a5 100644 --- a/stdlib/importlib/readers.pyi +++ b/stdlib/importlib/readers.pyi @@ -5,12 +5,16 @@ import pathlib import sys import zipfile -from _typeshed import Incomplete, StrPath +from _typeshed import StrPath from collections.abc import Iterable, Iterator from io import BufferedReader from typing import Literal, NoReturn, TypeVar from typing_extensions import Never +if sys.version_info >= (3, 10): + from importlib._bootstrap_external import FileLoader + from zipimport import zipimporter + if sys.version_info >= (3, 11): import importlib.resources.abc as abc else: @@ -27,14 +31,14 @@ if sys.version_info >= (3, 10): class FileReader(abc.TraversableResources): path: pathlib.Path - def __init__(self, loader) -> None: ... + def __init__(self, loader: FileLoader) -> None: ... def resource_path(self, resource: StrPath) -> str: ... def files(self) -> pathlib.Path: ... class ZipReader(abc.TraversableResources): prefix: str - archive: Incomplete - def __init__(self, loader, module: str) -> None: ... + archive: str + def __init__(self, loader: zipimporter, module: str) -> None: ... def open_resource(self, resource: str) -> BufferedReader: ... def is_resource(self, path: StrPath) -> bool: ... def files(self) -> zipfile.Path: ... @@ -63,6 +67,6 @@ if sys.version_info >= (3, 10): class NamespaceReader(abc.TraversableResources): path: MultiplexedPath - def __init__(self, namespace_path) -> None: ... + def __init__(self, namespace_path: Iterable[str]) -> None: ... def resource_path(self, resource: str) -> str: ... def files(self) -> MultiplexedPath: ... From 2a7e133410fef7b706d2aca4ae1e6d9f276da11c Mon Sep 17 00:00:00 2001 From: Pierre Chapuis Date: Thu, 27 Feb 2025 15:31:56 +0100 Subject: [PATCH 020/388] Introduce the _HashObject protocol (#13553) This protocol corresponds to what is called "hash object" in the hashlib documentation. In particular, it includes the non-OpenSSL BLAKE2 implementations which do not inherit HASH. --- stdlib/_hashlib.pyi | 17 +++++++++++++++-- stdlib/hashlib.pyi | 3 ++- stdlib/hmac.pyi | 4 ++-- 3 files changed, 19 insertions(+), 5 deletions(-) diff --git a/stdlib/_hashlib.pyi b/stdlib/_hashlib.pyi index 5cf85e4cacaa..e91f2cdb331c 100644 --- a/stdlib/_hashlib.pyi +++ b/stdlib/_hashlib.pyi @@ -2,13 +2,26 @@ import sys from _typeshed import ReadableBuffer from collections.abc import Callable from types import ModuleType -from typing import AnyStr, final, overload +from typing import AnyStr, Protocol, final, overload, type_check_only from typing_extensions import Self, TypeAlias -_DigestMod: TypeAlias = str | Callable[[], HASH] | ModuleType | None +_DigestMod: TypeAlias = str | Callable[[], _HashObject] | ModuleType | None openssl_md_meth_names: frozenset[str] +@type_check_only +class _HashObject(Protocol): + @property + def digest_size(self) -> int: ... + @property + def block_size(self) -> int: ... + @property + def name(self) -> str: ... + def copy(self) -> Self: ... + def digest(self) -> bytes: ... + def hexdigest(self) -> str: ... + def update(self, obj: ReadableBuffer, /) -> None: ... + class HASH: @property def digest_size(self) -> int: ... diff --git a/stdlib/hashlib.pyi b/stdlib/hashlib.pyi index db6f8635054d..84666a7fa725 100644 --- a/stdlib/hashlib.pyi +++ b/stdlib/hashlib.pyi @@ -2,6 +2,7 @@ import sys from _blake2 import blake2b as blake2b, blake2s as blake2s from _hashlib import ( HASH, + _HashObject, openssl_md5 as md5, openssl_sha1 as sha1, openssl_sha224 as sha224, @@ -97,7 +98,7 @@ if sys.version_info >= (3, 11): def readable(self) -> bool: ... def file_digest( - fileobj: _BytesIOLike | _FileDigestFileObj, digest: str | Callable[[], HASH], /, *, _bufsize: int = 262144 + fileobj: _BytesIOLike | _FileDigestFileObj, digest: str | Callable[[], _HashObject], /, *, _bufsize: int = 262144 ) -> HASH: ... # Legacy typing-only alias diff --git a/stdlib/hmac.pyi b/stdlib/hmac.pyi index efd649ec39a8..dfb574c177cd 100644 --- a/stdlib/hmac.pyi +++ b/stdlib/hmac.pyi @@ -1,12 +1,12 @@ import sys -from _hashlib import HASH as _HashlibHash +from _hashlib import _HashObject from _typeshed import ReadableBuffer, SizedBuffer from collections.abc import Callable from types import ModuleType from typing import AnyStr, overload from typing_extensions import TypeAlias -_DigestMod: TypeAlias = str | Callable[[], _HashlibHash] | ModuleType +_DigestMod: TypeAlias = str | Callable[[], _HashObject] | ModuleType trans_5C: bytes trans_36: bytes From 878ddaf998ea5d302e581ed0c9110a38a693527c Mon Sep 17 00:00:00 2001 From: Toshiki Kataoka Date: Fri, 28 Feb 2025 18:52:08 +0900 Subject: [PATCH 021/388] fix yield type of `tqdm.as_completed` (#13557) --- stubs/tqdm/tqdm/asyncio.pyi | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/stubs/tqdm/tqdm/asyncio.pyi b/stubs/tqdm/tqdm/asyncio.pyi index ca28b90f0df7..ec11fe561c69 100644 --- a/stubs/tqdm/tqdm/asyncio.pyi +++ b/stubs/tqdm/tqdm/asyncio.pyi @@ -1,5 +1,6 @@ -from _typeshed import Incomplete, SupportsWrite -from collections.abc import AsyncIterator, Awaitable, Callable, Generator, Iterable, Iterator, Mapping +from _typeshed import SupportsWrite +from asyncio import Future +from collections.abc import AsyncIterator, Awaitable, Callable, Iterable, Iterator, Mapping from typing import NoReturn, TypeVar, overload from typing_extensions import Self @@ -48,7 +49,7 @@ class tqdm_asyncio(std_tqdm[_T]): nrows: int | None = ..., colour: str | None = ..., delay: float | None = ..., - ) -> Generator[Incomplete, Incomplete, None]: ... + ) -> Iterator[Future[_T]]: ... @classmethod async def gather( cls, From d2bcdb5ba206453e95873bdd100a11f8221386c2 Mon Sep 17 00:00:00 2001 From: Alex Waygood Date: Fri, 28 Feb 2025 09:56:19 +0000 Subject: [PATCH 022/388] Use stricter pyright settings on `dirhash` in CI (#13558) --- pyrightconfig.stricter.json | 1 - 1 file changed, 1 deletion(-) diff --git a/pyrightconfig.stricter.json b/pyrightconfig.stricter.json index bc4da3eae855..9f20ff2c9c93 100644 --- a/pyrightconfig.stricter.json +++ b/pyrightconfig.stricter.json @@ -38,7 +38,6 @@ "stubs/corus", "stubs/dateparser", "stubs/defusedxml", - "stubs/dirhash", "stubs/docker", "stubs/docutils", "stubs/Flask-SocketIO", From 19f53f6f12e756a582c197935ad91ba25d9cac38 Mon Sep 17 00:00:00 2001 From: Colin Watson Date: Fri, 28 Feb 2025 11:15:07 +0000 Subject: [PATCH 023/388] Weaken return type of Path.{glob,rglob} in 3.13 (#13223) Since https://github.com/python/cpython/pull/117589 (at least), `Path.glob` and `Path.rglob` return an `Iterator` rather than a `Generator`. --- stdlib/pathlib.pyi | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/stdlib/pathlib.pyi b/stdlib/pathlib.pyi index bdca375f626d..e2a816ae1ca4 100644 --- a/stdlib/pathlib.pyi +++ b/stdlib/pathlib.pyi @@ -129,12 +129,10 @@ class Path(PurePath): def read_text(self, encoding: str | None = None, errors: str | None = None) -> str: ... if sys.version_info >= (3, 13): - def glob( - self, pattern: str, *, case_sensitive: bool | None = None, recurse_symlinks: bool = False - ) -> Generator[Self, None, None]: ... + def glob(self, pattern: str, *, case_sensitive: bool | None = None, recurse_symlinks: bool = False) -> Iterator[Self]: ... def rglob( self, pattern: str, *, case_sensitive: bool | None = None, recurse_symlinks: bool = False - ) -> Generator[Self, None, None]: ... + ) -> Iterator[Self]: ... elif sys.version_info >= (3, 12): def glob(self, pattern: str, *, case_sensitive: bool | None = None) -> Generator[Self, None, None]: ... def rglob(self, pattern: str, *, case_sensitive: bool | None = None) -> Generator[Self, None, None]: ... From 1809e67cbb29d99dd95dac0dc88ef57e2d23b42b Mon Sep 17 00:00:00 2001 From: Stephen Morton Date: Fri, 28 Feb 2025 03:18:56 -0800 Subject: [PATCH 024/388] test suite characterizing `dict.get()` (#13225) --- .../@tests/test_cases/builtins/check_dict.py | 92 ++++++++++++++++++- 1 file changed, 91 insertions(+), 1 deletion(-) diff --git a/stdlib/@tests/test_cases/builtins/check_dict.py b/stdlib/@tests/test_cases/builtins/check_dict.py index 96c60e779038..dd4569eccbe5 100644 --- a/stdlib/@tests/test_cases/builtins/check_dict.py +++ b/stdlib/@tests/test_cases/builtins/check_dict.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import Dict, Generic, Iterable, TypeVar +from typing import Any, Dict, Generic, Iterable, TypeVar, Union from typing_extensions import assert_type # These do follow `__init__` overloads order: @@ -57,3 +57,93 @@ def test_iterable_tuple_overload(x: Iterable[tuple[int, str]]) -> dict[int, str] dict(["foo", "bar", "baz"]) # type: ignore dict([b"foo", b"bar", b"baz"]) # type: ignore + +# Exploring corner cases of dict.get() +d_any: dict[str, Any] = {} +d_str: dict[str, str] = {} +any_value: Any = None +str_value = "value" +int_value = 1 + +assert_type(d_any["key"], Any) +assert_type(d_any.get("key"), Union[Any, None]) +assert_type(d_any.get("key", None), Any) +assert_type(d_any.get("key", any_value), Any) +assert_type(d_any.get("key", str_value), Any) +assert_type(d_any.get("key", int_value), Any) + +assert_type(d_str["key"], str) +assert_type(d_str.get("key"), Union[str, None]) +assert_type(d_str.get("key", None), Union[str, None]) +# Pyright has str instead of Any here +assert_type(d_str.get("key", any_value), Any) # pyright: ignore[reportAssertTypeFailure] +assert_type(d_str.get("key", str_value), str) +assert_type(d_str.get("key", int_value), Union[str, int]) + +# Now with context! +result: str +result = d_any["key"] +result = d_any.get("key") # type: ignore[assignment] +result = d_any.get("key", None) +result = d_any.get("key", any_value) +result = d_any.get("key", str_value) +result = d_any.get("key", int_value) + +result = d_str["key"] +result = d_str.get("key") # type: ignore[assignment] +result = d_str.get("key", None) # type: ignore[arg-type] +result = d_str.get("key", any_value) +result = d_str.get("key", str_value) +result = d_str.get("key", int_value) # type: ignore[arg-type] + + +# Return values also make things weird + +# Pyright doesn't have a version of no-any-return, +# and mypy doesn't have a type: ignore that pyright will ignore. +# def test1() -> str: +# return d_any["key"] # mypy: ignore[no-any-return] + + +def test2() -> str: + return d_any.get("key") # type: ignore[return-value] + + +# def test3() -> str: +# return d_any.get("key", None) # mypy: ignore[no-any-return] +# +# +# def test4() -> str: +# return d_any.get("key", any_value) # mypy: ignore[no-any-return] +# +# +# def test5() -> str: +# return d_any.get("key", str_value) # mypy: ignore[no-any-return] +# +# +# def test6() -> str: +# return d_any.get("key", int_value) # mypy: ignore[no-any-return] + + +def test7() -> str: + return d_str["key"] + + +def test8() -> str: + return d_str.get("key") # type: ignore[return-value] + + +def test9() -> str: + return d_str.get("key", None) # type: ignore[arg-type] + + +def test10() -> str: + return d_str.get("key", any_value) + + +def test11() -> str: + return d_str.get("key", str_value) + + +def test12() -> str: + return d_str.get("key", int_value) # type: ignore[arg-type] From c783ab5af6b9d762c3589b0414487e3b996977b1 Mon Sep 17 00:00:00 2001 From: Sebastian Rittau Date: Fri, 28 Feb 2025 12:33:10 +0100 Subject: [PATCH 025/388] [configparser] Fix missing fallback argument in SectionProxy.get (#13559) Closes: #13556 --- stdlib/@tests/test_cases/check_configparser.py | 5 +++++ stdlib/configparser.pyi | 11 +++++++++-- 2 files changed, 14 insertions(+), 2 deletions(-) create mode 100644 stdlib/@tests/test_cases/check_configparser.py diff --git a/stdlib/@tests/test_cases/check_configparser.py b/stdlib/@tests/test_cases/check_configparser.py new file mode 100644 index 000000000000..28c355f385ff --- /dev/null +++ b/stdlib/@tests/test_cases/check_configparser.py @@ -0,0 +1,5 @@ +from configparser import RawConfigParser, SectionProxy +from typing_extensions import assert_type + +sp = SectionProxy(RawConfigParser(), "") +assert_type(sp.get("foo", fallback="hi"), str) diff --git a/stdlib/configparser.pyi b/stdlib/configparser.pyi index bc3e22771ca5..8996c85d9a53 100644 --- a/stdlib/configparser.pyi +++ b/stdlib/configparser.pyi @@ -320,7 +320,14 @@ class SectionProxy(MutableMapping[str, str]): # This is incompatible with MutableMapping so we ignore the type @overload # type: ignore[override] def get( - self, option: str, *, raw: bool = False, vars: _Section | None = None, _impl: Any | None = None, **kwargs: Any + self, + option: str, + fallback: None = None, + *, + raw: bool = False, + vars: _Section | None = None, + _impl: Any | None = None, + **kwargs: Any, # passed to the underlying parser's get() method ) -> str | None: ... @overload def get( @@ -331,7 +338,7 @@ class SectionProxy(MutableMapping[str, str]): raw: bool = False, vars: _Section | None = None, _impl: Any | None = None, - **kwargs: Any, + **kwargs: Any, # passed to the underlying parser's get() method ) -> str | _T: ... # These are partially-applied version of the methods with the same names in # RawConfigParser; the stubs should be kept updated together From 3107cf0cf854b60508c24edcd30df3b4e5c17c5e Mon Sep 17 00:00:00 2001 From: Avasam Date: Fri, 28 Feb 2025 07:23:11 -0500 Subject: [PATCH 026/388] Fix `pyinstaller.utils.hooks.collect_entry_point` return type (#13111) --- stubs/pyinstaller/PyInstaller/utils/hooks/__init__.pyi | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stubs/pyinstaller/PyInstaller/utils/hooks/__init__.pyi b/stubs/pyinstaller/PyInstaller/utils/hooks/__init__.pyi index 6d331d9140dd..17f0ddf5c4ac 100644 --- a/stubs/pyinstaller/PyInstaller/utils/hooks/__init__.pyi +++ b/stubs/pyinstaller/PyInstaller/utils/hooks/__init__.pyi @@ -61,7 +61,7 @@ def collect_all( include_datas: Iterable[str] | None = None, on_error: Literal["ignore", "warn once", "warn", "raise"] = "warn once", ) -> tuple[list[tuple[str, str]], list[tuple[str, str]], list[str]]: ... -def collect_entry_point(name: str) -> tuple[tuple[str, str], list[str]]: ... +def collect_entry_point(name: str) -> tuple[list[tuple[str, str]], list[str]]: ... def get_hook_config(hook_api: PostGraphAPI, module_name: str, key: str) -> None: ... def include_or_exclude_file( filename: StrOrBytesPath, From e2a40d4c3ce843d8d883be7df67ba385f579ac8d Mon Sep 17 00:00:00 2001 From: Randolf Scholz Date: Fri, 28 Feb 2025 13:33:07 +0100 Subject: [PATCH 027/388] `builtins.slice`: more precise `__new__` overloads and defaults for `StopT` and `StepT`. (#13008) --- .../@tests/test_cases/builtins/check_slice.py | 251 ++++++++++++++++++ stdlib/builtins.pyi | 44 ++- 2 files changed, 281 insertions(+), 14 deletions(-) create mode 100644 stdlib/@tests/test_cases/builtins/check_slice.py diff --git a/stdlib/@tests/test_cases/builtins/check_slice.py b/stdlib/@tests/test_cases/builtins/check_slice.py new file mode 100644 index 000000000000..596d8c63c1c5 --- /dev/null +++ b/stdlib/@tests/test_cases/builtins/check_slice.py @@ -0,0 +1,251 @@ +""" +Assuming X, Y and Z are types other than None, the following rules apply to the slice type: + +- The type hint `slice` should be compatible with all slices, including: + - `slice(None)`, `slice(None, None)` and `slice(None, None, None)`. (⟿ `slice[?, ?, ?]`) +- The type hint `slice[T]` should be compatible with: + - `slice(None)`, `slice(None, None)` and `slice(None, None, None)` (⟿ `slice[?, ?, ?]`) + - `slice(t)`, `slice(None, t)` and `slice(None, t, None)`. (⟿ `slice[?, T, ?]`) + - `slice(t, None)` and `slice(t, None, None)`. (⟿ `slice[T, ?, ?]`) + - `slice(t, t)` and `slice(t, t, None)`. (⟿ `slice[T, T, ?]`) +- The type hint `slice[X, Y]` should be compatible with: + - `slice(None)`, `slice(None, None)` and `slice(None, None, None)` (⟿ `slice[?, ?, ?]`) + - `slice(y)`, `slice(None, y)` and `slice(None, y, None)`. (⟿ `slice[?, Y, ?]`) + - `slice(x, None)` and `slice(x, None, None)` (⟿ `slice[X, ?, ?]`) + - `slice(x, y)` and `slice(x, y, None)`. (⟿ `slice[X, Y, ?]`) +- The type hint `slice[X, Y, Z]` should be compatible with: + - `slice(None)`, `slice(None, None)` and `slice(None, None, None)`. (⟿ `slice[?, ?, ?]`) + - `slice(y)`, `slice(None, y)` and `slice(None, y, None)`. (⟿ `slice[?, Y, ?]`) + - `slice(x, None)` and `slice(x, None, None)` (⟿ `slice[X, ?, ?]`) + - `slice(x, y)` and `slice(x, y, None)`. (⟿ `slice[X, Y, ?]`) + - `slice(None, None, z)` (⟿ `slice[?, ?, Z]`) + - `slice(None, y, z)` (⟿ `slice[?, Y, Z]`) + - `slice(x, None, z)` (⟿ `slice[X, ?, Z]`) + - `slice(x, y, z)` (⟿ `slice[X, Y, Z]`) + +Consistency criterion: Assuming now X, Y, Z can potentially be None, the following rules apply: + +- `slice(x)` must be compatible with `slice[None, X, None]`, even if X is None. +- `slice(x, y)` must be compatible with `slice[X,Y,None]`, even if X is None or Y is None. +- `slice(x, y, z)` must be compatible with `slice[X, Y, Z]`, even if X, Y, or Z are `None`. +""" + +from __future__ import annotations + +from datetime import date, datetime as DT, timedelta as TD +from typing import Any, SupportsIndex, cast +from typing_extensions import assert_type + +# region Tests for slice constructor overloads ----------------------------------------- +assert_type(slice(None), "slice[Any, Any, Any]") +assert_type(slice(1234), "slice[Any, int, Any]") + +assert_type(slice(None, None), "slice[Any, Any, Any]") +assert_type(slice(None, 5678), "slice[Any, int, Any]") +assert_type(slice(1234, None), "slice[int, Any, Any]") +assert_type(slice(1234, 5678), "slice[int, int, Any]") + +assert_type(slice(None, None, None), "slice[Any, Any, Any]") +assert_type(slice(None, 5678, None), "slice[Any, int, Any]") +assert_type(slice(1234, None, None), "slice[int, Any, Any]") +assert_type(slice(1234, 5678, None), "slice[int, int, Any]") +assert_type(slice(1234, 5678, 9012), "slice[int, int, int]") +# endregion Tests for slice constructor overloads -------------------------------------- + +# region Test parameter defaults for slice constructor --------------------------------- +# Note: need to cast, because pyright specializes regardless of type annotations +slc1 = cast("slice[SupportsIndex | None]", slice(1)) +slc2 = cast("slice[int | None, int | None]", slice(1, 2)) +fake_key_val = cast("slice[str, int]", slice("1", 2)) +assert_type(slc1, "slice[SupportsIndex | None, SupportsIndex | None, SupportsIndex | None]") +assert_type(slc2, "slice[int | None, int | None, int | None]") +assert_type(fake_key_val, "slice[str, int, str | int]") +# endregion Test parameter defaults for slice constructor ------------------------------ + +# region Tests for slice properties ---------------------------------------------------- +# Note: if an argument is not None, we should get precisely the same type back +assert_type(slice(1234).stop, int) + +assert_type(slice(1234, None).start, int) +assert_type(slice(None, 5678).stop, int) + +assert_type(slice(1234, None, None).start, int) +assert_type(slice(None, 5678, None).stop, int) +assert_type(slice(None, None, 9012).step, int) +# endregion Tests for slice properties ------------------------------------------------- + + +# region Test for slice assignments ---------------------------------------------------- +# exhaustively test all possible assignments: miss (X), None (N), int (I), and str (S) +rXNX: slice = slice(None) +rXIX: slice = slice(1234) +rXSX: slice = slice("70") + +rNNX: slice = slice(None, None) +rINX: slice = slice(1234, None) +rSNX: slice = slice("70", None) + +rNIX: slice = slice(None, 5678) +rIIX: slice = slice(1234, 5678) +rSIX: slice = slice("70", 9012) + +rNSX: slice = slice(None, "71") +rISX: slice = slice(1234, "71") +rSSX: slice = slice("70", "71") + +rNNN: slice = slice(None, None, None) +rINN: slice = slice(1234, None, None) +rSNN: slice = slice("70", None, None) +rNIN: slice = slice(None, 5678, None) +rIIN: slice = slice(1234, 5678, None) +rSIN: slice = slice("70", 5678, None) +rNSN: slice = slice(None, "71", None) +rISN: slice = slice(1234, "71", None) +rSSN: slice = slice("70", "71", None) + +rNNI: slice = slice(None, None, 9012) +rINI: slice = slice(1234, None, 9012) +rSNI: slice = slice("70", None, 9012) +rNII: slice = slice(None, 5678, 9012) +rIII: slice = slice(1234, 5678, 9012) +rSII: slice = slice("70", 5678, 9012) +rNSI: slice = slice(None, "71", 9012) +rISI: slice = slice(1234, "71", 9012) +rSSI: slice = slice("70", "71", 9012) + +rNNS: slice = slice(None, None, "1d") +rINS: slice = slice(1234, None, "1d") +rSNS: slice = slice("70", None, "1d") +rNIS: slice = slice(None, 5678, "1d") +rIIS: slice = slice(1234, 5678, "1d") +rSIS: slice = slice("70", 5678, "1d") +rNSS: slice = slice(None, "71", "1d") +rISS: slice = slice(1234, "71", "1d") +rSSS: slice = slice("70", "71", "1d") +# endregion Test for slice assignments ------------------------------------------------- + + +# region Tests for slice[T] assignments ------------------------------------------------ +sXNX: "slice[int]" = slice(None) +sXIX: "slice[int]" = slice(1234) + +sNNX: "slice[int]" = slice(None, None) +sNIX: "slice[int]" = slice(None, 5678) +sINX: "slice[int]" = slice(1234, None) +sIIX: "slice[int]" = slice(1234, 5678) + +sNNN: "slice[int]" = slice(None, None, None) +sNIN: "slice[int]" = slice(None, 5678, None) +sNNS: "slice[int]" = slice(None, None, 9012) +sINN: "slice[int]" = slice(1234, None, None) +sINS: "slice[int]" = slice(1234, None, 9012) +sIIN: "slice[int]" = slice(1234, 5678, None) +sIIS: "slice[int]" = slice(1234, 5678, 9012) +# endregion Tests for slice[T] assignments --------------------------------------------- + + +# region Tests for slice[X, Y] assignments --------------------------------------------- +# Note: start=int is illegal and hence we add an explicit "type: ignore" comment. +tXNX: "slice[None, int]" = slice(None) # since slice(None) is slice[Any, Any, Any] +tXIX: "slice[None, int]" = slice(1234) + +tNNX: "slice[None, int]" = slice(None, None) +tNIX: "slice[None, int]" = slice(None, 5678) +tINX: "slice[None, int]" = slice(1234, None) # type: ignore +tIIX: "slice[None, int]" = slice(1234, 5678) # type: ignore + +tNNN: "slice[None, int]" = slice(None, None, None) +tNIN: "slice[None, int]" = slice(None, 5678, None) +tINN: "slice[None, int]" = slice(1234, None, None) # type: ignore +tIIN: "slice[None, int]" = slice(1234, 5678, None) # type: ignore +tNNS: "slice[None, int]" = slice(None, None, 9012) +tINS: "slice[None, int]" = slice(None, 5678, 9012) +tNIS: "slice[None, int]" = slice(1234, None, 9012) # type: ignore +tIIS: "slice[None, int]" = slice(1234, 5678, 9012) # type: ignore +# endregion Tests for slice[X, Y] assignments ------------------------------------------ + + +# region Tests for slice[X, Y, Z] assignments ------------------------------------------ +uXNX: "slice[int, int, int]" = slice(None) +uXIX: "slice[int, int, int]" = slice(1234) + +uNNX: "slice[int, int, int]" = slice(None, None) +uNIX: "slice[int, int, int]" = slice(None, 5678) +uINX: "slice[int, int, int]" = slice(1234, None) +uIIX: "slice[int, int, int]" = slice(1234, 5678) + +uNNN: "slice[int, int, int]" = slice(None, None, None) +uNNI: "slice[int, int, int]" = slice(None, None, 9012) +uNIN: "slice[int, int, int]" = slice(None, 5678, None) +uNII: "slice[int, int, int]" = slice(None, 5678, 9012) +uINN: "slice[int, int, int]" = slice(1234, None, None) +uINI: "slice[int, int, int]" = slice(1234, None, 9012) +uIIN: "slice[int, int, int]" = slice(1234, 5678, None) +uIII: "slice[int, int, int]" = slice(1234, 5678, 9012) +# endregion Tests for slice[X, Y, Z] assignments --------------------------------------- + + +# region Test for slice consistency criterion ------------------------------------------ +year = date(2021, 1, 1) +vXNX: "slice[None, None, None]" = slice(None) +vXIX: "slice[None, date, None]" = slice(year) + +vNNX: "slice[None, None, None]" = slice(None, None) +vNIX: "slice[None, date, None]" = slice(None, year) +vINX: "slice[date, None, None]" = slice(year, None) +vIIX: "slice[date, date, None]" = slice(year, year) + +vNNN: "slice[None, None, None]" = slice(None, None, None) +vNIN: "slice[None, date, None]" = slice(None, year, None) +vINN: "slice[date, None, None]" = slice(year, None, None) +vIIN: "slice[date, date, None]" = slice(year, year, None) +vNNI: "slice[None, None, str]" = slice(None, None, "1d") +vNII: "slice[None, date, str]" = slice(None, year, "1d") +vINI: "slice[date, None, str]" = slice(year, None, "1d") +vIII: "slice[date, date, str]" = slice(year, year, "1d") +# endregion Test for slice consistency criterion --------------------------------------- + + +# region Integration tests for slices with datetimes ----------------------------------- +class TimeSeries: # similar to pandas.Series with datetime index + def __getitem__(self, key: "slice[DT | str | None, DT | str | None]") -> Any: + """Subsample the time series at the given dates.""" + ... + + +class TimeSeriesInterpolator: # similar to pandas.Series with datetime index + def __getitem__(self, key: "slice[DT, DT, TD | None]") -> Any: + """Subsample the time series at the given dates.""" + ... + + +# tests slices as an argument +start = DT(1970, 1, 1) +stop = DT(1971, 1, 10) +step = TD(days=1) +# see: https://pandas.pydata.org/docs/user_guide/timeseries.html#partial-string-indexing +# FIXME: https://github.com/python/mypy/issues/2410 (use literal slices) +series = TimeSeries() +_ = series[slice(None, "1970-01-10")] +_ = series[slice("1970-01-01", None)] +_ = series[slice("1970-01-01", "1971-01-10")] +_ = series[slice(None, stop)] +_ = series[slice(start, None)] +_ = series[slice(start, stop)] +_ = series[slice(None)] + +model = TimeSeriesInterpolator() +_ = model[slice(start, stop)] +_ = model[slice(start, stop, step)] +_ = model[slice(start, stop, None)] + + +# test slices as a return type +def foo(flag: bool, value: DT) -> "slice[DT, None] | slice[None, DT]": + if flag: + return slice(value, None) # slice[DT, DT|Any, Any] incompatible + else: + return slice(None, value) # slice[DT|Any, DT, Any] incompatible + + +# endregion Integration tests for slices with datetimes -------------------------------- diff --git a/stdlib/builtins.pyi b/stdlib/builtins.pyi index 0a6dc57b05b8..25144609d518 100644 --- a/stdlib/builtins.pyi +++ b/stdlib/builtins.pyi @@ -10,7 +10,6 @@ from _typeshed import ( ConvertibleToFloat, ConvertibleToInt, FileDescriptorOrPath, - MaybeNone, OpenBinaryMode, OpenBinaryModeReading, OpenBinaryModeUpdating, @@ -95,9 +94,14 @@ _SupportsAnextT = TypeVar("_SupportsAnextT", bound=SupportsAnext[Any], covariant _AwaitableT = TypeVar("_AwaitableT", bound=Awaitable[Any]) _AwaitableT_co = TypeVar("_AwaitableT_co", bound=Awaitable[Any], covariant=True) _P = ParamSpec("_P") -_StartT = TypeVar("_StartT", covariant=True, default=Any) -_StopT = TypeVar("_StopT", covariant=True, default=Any) -_StepT = TypeVar("_StepT", covariant=True, default=Any) + +# Type variables for slice +_StartT_co = TypeVar("_StartT_co", covariant=True, default=Any) # slice -> slice[Any, Any, Any] +_StopT_co = TypeVar("_StopT_co", covariant=True, default=_StartT_co) # slice[A] -> slice[A, A, A] +# NOTE: step could differ from start and stop, (e.g. datetime/timedelta)l +# the default (start|stop) is chosen to cater to the most common case of int/index slices. +# FIXME: https://github.com/python/typing/issues/213 (replace step=start|stop with step=start&stop) +_StepT_co = TypeVar("_StepT_co", covariant=True, default=_StartT_co | _StopT_co) # slice[A,B] -> slice[A, B, A|B] class object: __doc__: str | None @@ -940,23 +944,35 @@ class bool(int): def __invert__(self) -> int: ... @final -class slice(Generic[_StartT, _StopT, _StepT]): +class slice(Generic[_StartT_co, _StopT_co, _StepT_co]): @property - def start(self) -> _StartT: ... + def start(self) -> _StartT_co: ... @property - def step(self) -> _StepT: ... + def step(self) -> _StepT_co: ... @property - def stop(self) -> _StopT: ... - @overload - def __new__(cls, stop: int | None, /) -> slice[int | MaybeNone, int | MaybeNone, int | MaybeNone]: ... + def stop(self) -> _StopT_co: ... + # Note: __new__ overloads map `None` to `Any`, since users expect slice(x, None) + # to be compatible with slice(None, x). + # generic slice -------------------------------------------------------------------- @overload - def __new__( - cls, start: int | None, stop: int | None, step: int | None = None, / - ) -> slice[int | MaybeNone, int | MaybeNone, int | MaybeNone]: ... + def __new__(cls, start: None, stop: None = None, step: None = None, /) -> slice[Any, Any, Any]: ... + # unary overloads ------------------------------------------------------------------ @overload def __new__(cls, stop: _T2, /) -> slice[Any, _T2, Any]: ... + # binary overloads ----------------------------------------------------------------- + @overload + def __new__(cls, start: _T1, stop: None, step: None = None, /) -> slice[_T1, Any, Any]: ... + @overload + def __new__(cls, start: None, stop: _T2, step: None = None, /) -> slice[Any, _T2, Any]: ... + @overload + def __new__(cls, start: _T1, stop: _T2, step: None = None, /) -> slice[_T1, _T2, Any]: ... + # ternary overloads ---------------------------------------------------------------- + @overload + def __new__(cls, start: None, stop: None, step: _T3, /) -> slice[Any, Any, _T3]: ... + @overload + def __new__(cls, start: _T1, stop: None, step: _T3, /) -> slice[_T1, Any, _T3]: ... @overload - def __new__(cls, start: _T1, stop: _T2, /) -> slice[_T1, _T2, Any]: ... + def __new__(cls, start: None, stop: _T2, step: _T3, /) -> slice[Any, _T2, _T3]: ... @overload def __new__(cls, start: _T1, stop: _T2, step: _T3, /) -> slice[_T1, _T2, _T3]: ... def __eq__(self, value: object, /) -> bool: ... From b69b909a642e3834466b23d10745ebba2c695b3c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ho=C3=ABl=20Bagard?= <34478245+hoel-bagard@users.noreply.github.com> Date: Sat, 1 Mar 2025 03:20:14 +0900 Subject: [PATCH 028/388] `tensorflow`: Change `Tensor.__bool__` return from `NoReturn` to `bool` (#13562) --- stubs/tensorflow/tensorflow/__init__.pyi | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/stubs/tensorflow/tensorflow/__init__.pyi b/stubs/tensorflow/tensorflow/__init__.pyi index 731979b41276..6ef9e1cb54f3 100644 --- a/stubs/tensorflow/tensorflow/__init__.pyi +++ b/stubs/tensorflow/tensorflow/__init__.pyi @@ -6,7 +6,7 @@ from collections.abc import Callable, Generator, Iterable, Iterator, Sequence from contextlib import contextmanager from enum import Enum from types import TracebackType -from typing import Any, Generic, Literal, NoReturn, TypeVar, overload +from typing import Any, Generic, Literal, TypeVar, overload from typing_extensions import ParamSpec, Self from google.protobuf.message import Message @@ -125,7 +125,7 @@ class Tensor: def __gt__(self, other: TensorCompatible, name: str | None = None) -> Tensor: ... def __le__(self, other: TensorCompatible, name: str | None = None) -> Tensor: ... def __lt__(self, other: TensorCompatible, name: str | None = None) -> Tensor: ... - def __bool__(self) -> NoReturn: ... + def __bool__(self) -> _bool: ... def __getitem__(self, slice_spec: Slice | tuple[Slice, ...]) -> Tensor: ... def __len__(self) -> int: ... # This only works for rank 0 tensors. From c9fd49865aaa179ea524d2e99f14c5e97865a449 Mon Sep 17 00:00:00 2001 From: Sebastian Rittau Date: Fri, 28 Feb 2025 19:20:35 +0100 Subject: [PATCH 029/388] Fix conflicting imports (#13561) --- stubs/icalendar/icalendar/__init__.pyi | 12 ++++++------ stubs/requests/requests/__init__.pyi | 25 ++++++++++++------------- 2 files changed, 18 insertions(+), 19 deletions(-) diff --git a/stubs/icalendar/icalendar/__init__.pyi b/stubs/icalendar/icalendar/__init__.pyi index b4651f094bf6..0e862945c3e2 100644 --- a/stubs/icalendar/icalendar/__init__.pyi +++ b/stubs/icalendar/icalendar/__init__.pyi @@ -1,3 +1,4 @@ +from . import version as version_mod from .alarms import ( Alarms as Alarms, AlarmTime as AlarmTime, @@ -45,12 +46,6 @@ from .prop import ( vWeekday as vWeekday, ) from .timezone import use_pytz, use_zoneinfo -from .version import ( - __version__ as __version__, - __version_tuple__ as __version_tuple__, - version as version, - version_tuple as version_tuple, -) __all__ = [ "Calendar", @@ -102,3 +97,8 @@ __all__ = [ "IncompleteAlarmInformation", "LocalTimezoneMissing", ] + +__version__ = version_mod.__version__ +__version_tuple__ = version_mod.__version_tuple__ +version = version_mod.version +version_tuple = version_mod.version_tuple diff --git a/stubs/requests/requests/__init__.pyi b/stubs/requests/requests/__init__.pyi index 8d4acc1ad27a..199c59e9eea6 100644 --- a/stubs/requests/requests/__init__.pyi +++ b/stubs/requests/requests/__init__.pyi @@ -1,16 +1,4 @@ -from . import packages as packages, utils as utils -from .__version__ import ( - __author__ as __author__, - __author_email__ as __author_email__, - __build__ as __build__, - __cake__ as __cake__, - __copyright__ as __copyright__, - __description__ as __description__, - __license__ as __license__, - __title__ as __title__, - __url__ as __url__, - __version__ as __version__, -) +from . import __version__ as version_mod, packages as packages, utils as utils from .api import ( delete as delete, get as get, @@ -37,4 +25,15 @@ from .models import PreparedRequest as PreparedRequest, Request as Request, Resp from .sessions import Session as Session, session as session from .status_codes import codes as codes +__author__ = version_mod.__author__ +__author_email__ = version_mod.__author_email__ +__build__ = version_mod.__build__ +__cake__ = version_mod.__cake__ +__copyright__ = version_mod.__copyright__ +__description__ = version_mod.__description__ +__license__ = version_mod.__license__ +__title__ = version_mod.__title__ +__url__ = version_mod.__url__ +__version__ = version_mod.__version__ + def check_compatibility(urllib3_version: str, chardet_version: str | None, charset_normalizer_version: str | None) -> None: ... From 98eedfac21b7402bb4bcb3a5857b223d72429ca1 Mon Sep 17 00:00:00 2001 From: Sebastian Rittau Date: Fri, 28 Feb 2025 23:42:05 +0100 Subject: [PATCH 030/388] [setuptools] Update to 75.8.2 (#13563) --- stubs/setuptools/METADATA.toml | 2 +- stubs/setuptools/setuptools/command/bdist_wheel.pyi | 2 -- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/stubs/setuptools/METADATA.toml b/stubs/setuptools/METADATA.toml index 2c4861bf93f4..45dd6c8a88ce 100644 --- a/stubs/setuptools/METADATA.toml +++ b/stubs/setuptools/METADATA.toml @@ -1,4 +1,4 @@ -version = "75.8.*" +version = "~=75.8.2" upstream_repository = "https://github.com/pypa/setuptools" extra_description = """\ If using `setuptools >= 71.1` *only* for `pkg_resources`, diff --git a/stubs/setuptools/setuptools/command/bdist_wheel.pyi b/stubs/setuptools/setuptools/command/bdist_wheel.pyi index 08a298dfbbc2..d6538ab52d4c 100644 --- a/stubs/setuptools/setuptools/command/bdist_wheel.pyi +++ b/stubs/setuptools/setuptools/command/bdist_wheel.pyi @@ -4,7 +4,6 @@ from typing import ClassVar, Final, Literal from setuptools import Command -def safe_name(name: str) -> str: ... def safe_version(version: str) -> str: ... setuptools_major_version: Final[int] @@ -15,7 +14,6 @@ def python_tag() -> str: ... def get_platform(archive_root: str | None) -> str: ... def get_flag(var: str, fallback: bool, expected: bool = True, warn: bool = True) -> bool: ... def get_abi_tag() -> str | None: ... -def safer_name(name: str) -> str: ... def safer_version(version: str) -> str: ... class bdist_wheel(Command): From e831b0797696bee00f14fe65906d549cee4fe092 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Sat, 1 Mar 2025 01:31:26 +0100 Subject: [PATCH 031/388] [stubsabot] Bump pynput to 1.7.8 (#13565) --- stubs/pynput/METADATA.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stubs/pynput/METADATA.toml b/stubs/pynput/METADATA.toml index 32e0d9d64446..dc7e2e94f366 100644 --- a/stubs/pynput/METADATA.toml +++ b/stubs/pynput/METADATA.toml @@ -1,4 +1,4 @@ -version = "1.7.7" +version = "1.7.8" upstream_repository = "https://github.com/moses-palmer/pynput" [tool.stubtest] From eefa1f8325c5d75e1a911180b3723547f2386702 Mon Sep 17 00:00:00 2001 From: Christoph Reiter Date: Mon, 3 Mar 2025 15:35:43 +0100 Subject: [PATCH 032/388] [Deprecated]: fix missing extra_stacklevel kwarg for deprecated() (#13573) --- stubs/Deprecated/deprecated/classic.pyi | 7 ++++++- stubs/Deprecated/deprecated/sphinx.pyi | 1 + 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/stubs/Deprecated/deprecated/classic.pyi b/stubs/Deprecated/deprecated/classic.pyi index 7eadb72db809..3f4a2b87e87d 100644 --- a/stubs/Deprecated/deprecated/classic.pyi +++ b/stubs/Deprecated/deprecated/classic.pyi @@ -27,5 +27,10 @@ class ClassicAdapter: def deprecated(wrapped: _F, /) -> _F: ... @overload def deprecated( - reason: str = ..., *, version: str = ..., action: _Actions | None = ..., category: type[Warning] | None = ... + reason: str = ..., + *, + version: str = ..., + action: _Actions | None = ..., + category: type[Warning] | None = ..., + extra_stacklevel: int = 0, ) -> Callable[[_F], _F]: ... diff --git a/stubs/Deprecated/deprecated/sphinx.pyi b/stubs/Deprecated/deprecated/sphinx.pyi index b99aef3de208..d5ae7e23fe24 100644 --- a/stubs/Deprecated/deprecated/sphinx.pyi +++ b/stubs/Deprecated/deprecated/sphinx.pyi @@ -32,4 +32,5 @@ def deprecated( *, action: _Actions | None = ..., category: type[Warning] | None = ..., + extra_stacklevel: int = 0, ) -> Callable[[_F], _F]: ... From 47ca7a0af4acb6bd22c54e8fbffbf810252face3 Mon Sep 17 00:00:00 2001 From: Avasam Date: Mon, 3 Mar 2025 09:39:40 -0500 Subject: [PATCH 033/388] Enable Ruff PLC (Pylint Convention) (#13306) --- pyproject.toml | 24 ++++-- stdlib/builtins.pyi | 30 +++---- stdlib/contextlib.pyi | 10 +-- stdlib/inspect.pyi | 12 +-- stdlib/multiprocessing/connection.pyi | 18 ++-- stdlib/typing.pyi | 14 +-- stubs/WTForms/wtforms/validators.pyi | 18 ++-- stubs/boltons/boltons/tbutils.pyi | 26 +++--- .../networkx/algorithms/operators/binary.pyi | 8 +- stubs/pynput/pynput/_util.pyi | 8 +- stubs/pyserial/serial/tools/miniterm.pyi | 4 +- stubs/tensorflow/tensorflow/data/__init__.pyi | 86 +++++++++---------- .../distribute/experimental/coordinator.pyi | 6 +- .../tensorflow/keras/layers/__init__.pyi | 18 ++-- stubs/tensorflow/tensorflow/keras/models.pyi | 20 +++-- .../tensorflow/saved_model/__init__.pyi | 8 +- .../tensorflow/types/experimental.pyi | 16 ++-- 17 files changed, 173 insertions(+), 153 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index e6eb6cf3656d..2a68a74cf3bf 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -45,6 +45,7 @@ select = [ "I", # isort "N", # pep8-naming "PGH", # pygrep-hooks + "PLC", # Pylint Convention "RUF", # Ruff-specific and unused-noqa "TRY", # tryceratops "UP", # pyupgrade @@ -159,19 +160,26 @@ ignore = [ # A lot of stubs are incomplete on purpose, and that's configured through pyright # Some ANN204 (special method) are autofixable in stubs, but not all. "ANN2", # Missing return type annotation for ... - # Most pep8-naming rules don't apply for third-party stubs like typeshed. - # N811 to N814 could apply, but we often use them to disambiguate a name whilst making it look like a more common one - "N8", + # Ruff 0.8.0 added sorting of __all__ and __slots_. + # There is no consensus on whether we want to apply this to stubs, so keeping the status quo. + # See https://github.com/python/typeshed/pull/13108 + "RUF022", # `__all__` is not sorted + "RUF023", # `{}.__slots__` is not sorted + ### # Rules that are out of the control of stub authors: + ### "F403", # `from . import *` used; unable to detect undefined names # Stubs can sometimes re-export entire modules. # Issues with using a star-imported name will be caught by type-checkers. "F405", # may be undefined, or defined from star imports - # Ruff 0.8.0 added sorting of __all__ and __slots_. - # There is no consensus on whether we want to apply this to stubs, so keeping the status quo. - # See https://github.com/python/typeshed/pull/13108 - "RUF022", - "RUF023", + # Most pep8-naming rules don't apply for third-party stubs like typeshed. + # N811 to N814 could apply, but we often use them to disambiguate a name whilst making it look like a more common one + "N8", # pep8-naming + "PLC2701", # Private name import from external module +] +"lib/ts_utils/**" = [ + # Doesn't affect stubs. The only re-exports we have should be in our local lib ts_utils + "PLC0414", # Import alias does not rename original package ] "*_pb2.pyi" = [ # Leave the docstrings as-is, matching source diff --git a/stdlib/builtins.pyi b/stdlib/builtins.pyi index 25144609d518..02557c718eff 100644 --- a/stdlib/builtins.pyi +++ b/stdlib/builtins.pyi @@ -89,8 +89,8 @@ _T2 = TypeVar("_T2") _T3 = TypeVar("_T3") _T4 = TypeVar("_T4") _T5 = TypeVar("_T5") -_SupportsNextT = TypeVar("_SupportsNextT", bound=SupportsNext[Any], covariant=True) -_SupportsAnextT = TypeVar("_SupportsAnextT", bound=SupportsAnext[Any], covariant=True) +_SupportsNextT_co = TypeVar("_SupportsNextT_co", bound=SupportsNext[Any], covariant=True) +_SupportsAnextT_co = TypeVar("_SupportsAnextT_co", bound=SupportsAnext[Any], covariant=True) _AwaitableT = TypeVar("_AwaitableT", bound=Awaitable[Any]) _AwaitableT_co = TypeVar("_AwaitableT_co", bound=Awaitable[Any], covariant=True) _P = ParamSpec("_P") @@ -1319,7 +1319,7 @@ class _PathLike(Protocol[AnyStr_co]): def __fspath__(self) -> AnyStr_co: ... if sys.version_info >= (3, 10): - def aiter(async_iterable: SupportsAiter[_SupportsAnextT], /) -> _SupportsAnextT: ... + def aiter(async_iterable: SupportsAiter[_SupportsAnextT_co], /) -> _SupportsAnextT_co: ... class _SupportsSynchronousAnext(Protocol[_AwaitableT_co]): def __anext__(self) -> _AwaitableT_co: ... @@ -1481,7 +1481,7 @@ class _GetItemIterable(Protocol[_T_co]): def __getitem__(self, i: int, /) -> _T_co: ... @overload -def iter(object: SupportsIter[_SupportsNextT], /) -> _SupportsNextT: ... +def iter(object: SupportsIter[_SupportsNextT_co], /) -> _SupportsNextT_co: ... @overload def iter(object: _GetItemIterable[_T], /) -> Iterator[_T]: ... @overload @@ -1688,17 +1688,17 @@ def print( *values: object, sep: str | None = " ", end: str | None = "\n", file: _SupportsWriteAndFlush[str] | None = None, flush: bool ) -> None: ... -_E = TypeVar("_E", contravariant=True) -_M = TypeVar("_M", contravariant=True) +_E_contra = TypeVar("_E_contra", contravariant=True) +_M_contra = TypeVar("_M_contra", contravariant=True) -class _SupportsPow2(Protocol[_E, _T_co]): - def __pow__(self, other: _E, /) -> _T_co: ... +class _SupportsPow2(Protocol[_E_contra, _T_co]): + def __pow__(self, other: _E_contra, /) -> _T_co: ... -class _SupportsPow3NoneOnly(Protocol[_E, _T_co]): - def __pow__(self, other: _E, modulo: None = None, /) -> _T_co: ... +class _SupportsPow3NoneOnly(Protocol[_E_contra, _T_co]): + def __pow__(self, other: _E_contra, modulo: None = None, /) -> _T_co: ... -class _SupportsPow3(Protocol[_E, _M, _T_co]): - def __pow__(self, other: _E, modulo: _M, /) -> _T_co: ... +class _SupportsPow3(Protocol[_E_contra, _M_contra, _T_co]): + def __pow__(self, other: _E_contra, modulo: _M_contra, /) -> _T_co: ... _SupportsSomeKindOfPow = ( # noqa: Y026 # TODO: Use TypeAlias once mypy bugs are fixed _SupportsPow2[Any, Any] | _SupportsPow3NoneOnly[Any, Any] | _SupportsPow3[Any, Any, Any] @@ -1734,11 +1734,11 @@ def pow(base: float, exp: complex | _SupportsSomeKindOfPow, mod: None = None) -> @overload def pow(base: complex, exp: complex | _SupportsSomeKindOfPow, mod: None = None) -> complex: ... @overload -def pow(base: _SupportsPow2[_E, _T_co], exp: _E, mod: None = None) -> _T_co: ... # type: ignore[overload-overlap] +def pow(base: _SupportsPow2[_E_contra, _T_co], exp: _E_contra, mod: None = None) -> _T_co: ... # type: ignore[overload-overlap] @overload -def pow(base: _SupportsPow3NoneOnly[_E, _T_co], exp: _E, mod: None = None) -> _T_co: ... # type: ignore[overload-overlap] +def pow(base: _SupportsPow3NoneOnly[_E_contra, _T_co], exp: _E_contra, mod: None = None) -> _T_co: ... # type: ignore[overload-overlap] @overload -def pow(base: _SupportsPow3[_E, _M, _T_co], exp: _E, mod: _M) -> _T_co: ... +def pow(base: _SupportsPow3[_E_contra, _M_contra, _T_co], exp: _E_contra, mod: _M_contra) -> _T_co: ... @overload def pow(base: _SupportsSomeKindOfPow, exp: float, mod: None = None) -> Any: ... @overload diff --git a/stdlib/contextlib.pyi b/stdlib/contextlib.pyi index f57e7fa67036..08ac5a28b8b8 100644 --- a/stdlib/contextlib.pyi +++ b/stdlib/contextlib.pyi @@ -33,7 +33,7 @@ _T_co = TypeVar("_T_co", covariant=True) _T_io = TypeVar("_T_io", bound=IO[str] | None) _ExitT_co = TypeVar("_ExitT_co", covariant=True, bound=bool | None, default=bool | None) _F = TypeVar("_F", bound=Callable[..., Any]) -_G = TypeVar("_G", bound=Generator[Any, Any, Any] | AsyncGenerator[Any, Any], covariant=True) +_G_co = TypeVar("_G_co", bound=Generator[Any, Any, Any] | AsyncGenerator[Any, Any], covariant=True) _P = ParamSpec("_P") _SendT_contra = TypeVar("_SendT_contra", contravariant=True, default=None) @@ -68,11 +68,11 @@ class ContextDecorator: def _recreate_cm(self) -> Self: ... def __call__(self, func: _F) -> _F: ... -class _GeneratorContextManagerBase(Generic[_G]): +class _GeneratorContextManagerBase(Generic[_G_co]): # Ideally this would use ParamSpec, but that requires (*args, **kwargs), which this isn't. see #6676 - def __init__(self, func: Callable[..., _G], args: tuple[Any, ...], kwds: dict[str, Any]) -> None: ... - gen: _G - func: Callable[..., _G] + def __init__(self, func: Callable[..., _G_co], args: tuple[Any, ...], kwds: dict[str, Any]) -> None: ... + gen: _G_co + func: Callable[..., _G_co] args: tuple[Any, ...] kwds: dict[str, Any] diff --git a/stdlib/inspect.pyi b/stdlib/inspect.pyi index 43b3dd529887..229eb2135690 100644 --- a/stdlib/inspect.pyi +++ b/stdlib/inspect.pyi @@ -143,8 +143,8 @@ if sys.version_info >= (3, 11): _P = ParamSpec("_P") _T = TypeVar("_T") _F = TypeVar("_F", bound=Callable[..., Any]) -_T_cont = TypeVar("_T_cont", contravariant=True) -_V_cont = TypeVar("_V_cont", contravariant=True) +_T_contra = TypeVar("_T_contra", contravariant=True) +_V_contra = TypeVar("_V_contra", contravariant=True) # # Types and members @@ -228,11 +228,11 @@ def isasyncgenfunction(obj: Callable[_P, Any]) -> TypeGuard[Callable[_P, AsyncGe @overload def isasyncgenfunction(obj: object) -> TypeGuard[Callable[..., AsyncGeneratorType[Any, Any]]]: ... -class _SupportsSet(Protocol[_T_cont, _V_cont]): - def __set__(self, instance: _T_cont, value: _V_cont, /) -> None: ... +class _SupportsSet(Protocol[_T_contra, _V_contra]): + def __set__(self, instance: _T_contra, value: _V_contra, /) -> None: ... -class _SupportsDelete(Protocol[_T_cont]): - def __delete__(self, instance: _T_cont, /) -> None: ... +class _SupportsDelete(Protocol[_T_contra]): + def __delete__(self, instance: _T_contra, /) -> None: ... def isasyncgen(object: object) -> TypeIs[AsyncGeneratorType[Any, Any]]: ... def istraceback(object: object) -> TypeIs[TracebackType]: ... diff --git a/stdlib/multiprocessing/connection.pyi b/stdlib/multiprocessing/connection.pyi index 9998239d3119..cd4fa102c0f3 100644 --- a/stdlib/multiprocessing/connection.pyi +++ b/stdlib/multiprocessing/connection.pyi @@ -12,10 +12,10 @@ __all__ = ["Client", "Listener", "Pipe", "wait"] _Address: TypeAlias = str | tuple[str, int] # Defaulting to Any to avoid forcing generics on a lot of pre-existing code -_SendT = TypeVar("_SendT", contravariant=True, default=Any) -_RecvT = TypeVar("_RecvT", covariant=True, default=Any) +_SendT_contra = TypeVar("_SendT_contra", contravariant=True, default=Any) +_RecvT_co = TypeVar("_RecvT_co", covariant=True, default=Any) -class _ConnectionBase(Generic[_SendT, _RecvT]): +class _ConnectionBase(Generic[_SendT_contra, _RecvT_co]): def __init__(self, handle: SupportsIndex, readable: bool = True, writable: bool = True) -> None: ... @property def closed(self) -> bool: ... # undocumented @@ -26,10 +26,10 @@ class _ConnectionBase(Generic[_SendT, _RecvT]): def fileno(self) -> int: ... def close(self) -> None: ... def send_bytes(self, buf: ReadableBuffer, offset: int = 0, size: int | None = None) -> None: ... - def send(self, obj: _SendT) -> None: ... + def send(self, obj: _SendT_contra) -> None: ... def recv_bytes(self, maxlength: int | None = None) -> bytes: ... def recv_bytes_into(self, buf: Any, offset: int = 0) -> int: ... - def recv(self) -> _RecvT: ... + def recv(self) -> _RecvT_co: ... def poll(self, timeout: float | None = 0.0) -> bool: ... def __enter__(self) -> Self: ... def __exit__( @@ -37,10 +37,10 @@ class _ConnectionBase(Generic[_SendT, _RecvT]): ) -> None: ... def __del__(self) -> None: ... -class Connection(_ConnectionBase[_SendT, _RecvT]): ... +class Connection(_ConnectionBase[_SendT_contra, _RecvT_co]): ... if sys.platform == "win32": - class PipeConnection(_ConnectionBase[_SendT, _RecvT]): ... + class PipeConnection(_ConnectionBase[_SendT_contra, _RecvT_co]): ... class Listener: def __init__( @@ -66,8 +66,8 @@ else: def answer_challenge(connection: Connection[Any, Any], authkey: bytes) -> None: ... def wait( - object_list: Iterable[Connection[_SendT, _RecvT] | socket.socket | int], timeout: float | None = None -) -> list[Connection[_SendT, _RecvT] | socket.socket | int]: ... + object_list: Iterable[Connection[_SendT_contra, _RecvT_co] | socket.socket | int], timeout: float | None = None +) -> list[Connection[_SendT_contra, _RecvT_co] | socket.socket | int]: ... def Client(address: _Address, family: str | None = None, authkey: bytes | None = None) -> Connection[Any, Any]: ... # N.B. Keep this in sync with multiprocessing.context.BaseContext.Pipe. diff --git a/stdlib/typing.pyi b/stdlib/typing.pyi index 7c1b171a730b..5875b6915762 100644 --- a/stdlib/typing.pyi +++ b/stdlib/typing.pyi @@ -510,15 +510,15 @@ class Awaitable(Protocol[_T_co]): def __await__(self) -> Generator[Any, Any, _T_co]: ... # Non-default variations to accommodate couroutines, and `AwaitableGenerator` having a 4th type parameter. -_SendT_contra_nd = TypeVar("_SendT_contra_nd", contravariant=True) -_ReturnT_co_nd = TypeVar("_ReturnT_co_nd", covariant=True) +_SendT_nd_contra = TypeVar("_SendT_nd_contra", contravariant=True) +_ReturnT_nd_co = TypeVar("_ReturnT_nd_co", covariant=True) -class Coroutine(Awaitable[_ReturnT_co_nd], Generic[_YieldT_co, _SendT_contra_nd, _ReturnT_co_nd]): +class Coroutine(Awaitable[_ReturnT_nd_co], Generic[_YieldT_co, _SendT_nd_contra, _ReturnT_nd_co]): __name__: str __qualname__: str @abstractmethod - def send(self, value: _SendT_contra_nd, /) -> _YieldT_co: ... + def send(self, value: _SendT_nd_contra, /) -> _YieldT_co: ... @overload @abstractmethod def throw( @@ -534,9 +534,9 @@ class Coroutine(Awaitable[_ReturnT_co_nd], Generic[_YieldT_co, _SendT_contra_nd, # The parameters correspond to Generator, but the 4th is the original type. @type_check_only class AwaitableGenerator( - Awaitable[_ReturnT_co_nd], - Generator[_YieldT_co, _SendT_contra_nd, _ReturnT_co_nd], - Generic[_YieldT_co, _SendT_contra_nd, _ReturnT_co_nd, _S], + Awaitable[_ReturnT_nd_co], + Generator[_YieldT_co, _SendT_nd_contra, _ReturnT_nd_co], + Generic[_YieldT_co, _SendT_nd_contra, _ReturnT_nd_co, _S], metaclass=ABCMeta, ): ... diff --git a/stubs/WTForms/wtforms/validators.pyi b/stubs/WTForms/wtforms/validators.pyi index a5d1c4ca6304..cbe04b288fc4 100644 --- a/stubs/WTForms/wtforms/validators.pyi +++ b/stubs/WTForms/wtforms/validators.pyi @@ -42,7 +42,7 @@ __all__ = ( "Disabled", ) -_ValuesT = TypeVar("_ValuesT", bound=Collection[Any], contravariant=True) +_ValuesT_contra = TypeVar("_ValuesT_contra", bound=Collection[Any], contravariant=True) class ValidationError(ValueError): def __init__(self, message: str = "", *args: object) -> None: ... @@ -150,9 +150,13 @@ class AnyOf: @overload def __init__(self, values: Collection[Any], message: str | None = None, values_formatter: None = None) -> None: ... @overload - def __init__(self, values: _ValuesT, message: str | None, values_formatter: Callable[[_ValuesT], str]) -> None: ... + def __init__( + self, values: _ValuesT_contra, message: str | None, values_formatter: Callable[[_ValuesT_contra], str] + ) -> None: ... @overload - def __init__(self, values: _ValuesT, message: str | None = None, *, values_formatter: Callable[[_ValuesT], str]) -> None: ... + def __init__( + self, values: _ValuesT_contra, message: str | None = None, *, values_formatter: Callable[[_ValuesT_contra], str] + ) -> None: ... def __call__(self, form: BaseForm, field: Field) -> None: ... @staticmethod def default_values_formatter(values: Iterable[object]) -> str: ... @@ -164,9 +168,13 @@ class NoneOf: @overload def __init__(self, values: Collection[Any], message: str | None = None, values_formatter: None = None) -> None: ... @overload - def __init__(self, values: _ValuesT, message: str | None, values_formatter: Callable[[_ValuesT], str]) -> None: ... + def __init__( + self, values: _ValuesT_contra, message: str | None, values_formatter: Callable[[_ValuesT_contra], str] + ) -> None: ... @overload - def __init__(self, values: _ValuesT, message: str | None = None, *, values_formatter: Callable[[_ValuesT], str]) -> None: ... + def __init__( + self, values: _ValuesT_contra, message: str | None = None, *, values_formatter: Callable[[_ValuesT_contra], str] + ) -> None: ... def __call__(self, form: BaseForm, field: Field) -> None: ... @staticmethod def default_values_formatter(v: Iterable[object]) -> str: ... diff --git a/stubs/boltons/boltons/tbutils.pyi b/stubs/boltons/boltons/tbutils.pyi index 6548787bb96f..5900a48b7a74 100644 --- a/stubs/boltons/boltons/tbutils.pyi +++ b/stubs/boltons/boltons/tbutils.pyi @@ -22,31 +22,31 @@ class Callpoint: def from_tb(cls, tb: TracebackType) -> Self: ... def tb_frame_str(self) -> str: ... -_CallpointT = TypeVar("_CallpointT", bound=Callpoint, covariant=True, default=Callpoint) +_CallpointT_co = TypeVar("_CallpointT_co", bound=Callpoint, covariant=True, default=Callpoint) -class TracebackInfo(Generic[_CallpointT]): - callpoint_type: type[_CallpointT] - frames: list[_CallpointT] - def __init__(self, frames: list[_CallpointT]) -> None: ... +class TracebackInfo(Generic[_CallpointT_co]): + callpoint_type: type[_CallpointT_co] + frames: list[_CallpointT_co] + def __init__(self, frames: list[_CallpointT_co]) -> None: ... @classmethod def from_frame(cls, frame: FrameType | None = None, level: int = 1, limit: int | None = None) -> Self: ... @classmethod def from_traceback(cls, tb: TracebackType | None = None, limit: int | None = None) -> Self: ... @classmethod - def from_dict(cls, d: Mapping[Literal["frames"], list[_CallpointT]]) -> Self: ... - def to_dict(self) -> dict[str, list[dict[str, _CallpointT]]]: ... + def from_dict(cls, d: Mapping[Literal["frames"], list[_CallpointT_co]]) -> Self: ... + def to_dict(self) -> dict[str, list[dict[str, _CallpointT_co]]]: ... def __len__(self) -> int: ... - def __iter__(self) -> Iterator[_CallpointT]: ... + def __iter__(self) -> Iterator[_CallpointT_co]: ... def get_formatted(self) -> str: ... -_TracebackInfoT = TypeVar("_TracebackInfoT", bound=TracebackInfo, covariant=True, default=TracebackInfo) +_TracebackInfoT_co = TypeVar("_TracebackInfoT_co", bound=TracebackInfo, covariant=True, default=TracebackInfo) -class ExceptionInfo(Generic[_TracebackInfoT]): - tb_info_type: type[_TracebackInfoT] +class ExceptionInfo(Generic[_TracebackInfoT_co]): + tb_info_type: type[_TracebackInfoT_co] exc_type: str exc_msg: str - tb_info: _TracebackInfoT - def __init__(self, exc_type: str, exc_msg: str, tb_info: _TracebackInfoT) -> None: ... + tb_info: _TracebackInfoT_co + def __init__(self, exc_type: str, exc_msg: str, tb_info: _TracebackInfoT_co) -> None: ... @classmethod def from_exc_info(cls, exc_type: type[BaseException], exc_value: BaseException, traceback: TracebackType) -> Self: ... @classmethod diff --git a/stubs/networkx/networkx/algorithms/operators/binary.pyi b/stubs/networkx/networkx/algorithms/operators/binary.pyi index 9e794329668f..7666bdbef236 100644 --- a/stubs/networkx/networkx/algorithms/operators/binary.pyi +++ b/stubs/networkx/networkx/algorithms/operators/binary.pyi @@ -15,10 +15,10 @@ def difference(G: Graph[_Node], H: Graph[_Node]): ... @_dispatchable def symmetric_difference(G: Graph[_Node], H: Graph[_Node]): ... -_X = TypeVar("_X", bound=Hashable, covariant=True) -_Y = TypeVar("_Y", bound=Hashable, covariant=True) +_X_co = TypeVar("_X_co", bound=Hashable, covariant=True) +_Y_co = TypeVar("_Y_co", bound=Hashable, covariant=True) @_dispatchable -def compose(G: Graph[_X], H: Graph[_Y]) -> DiGraph[_X | _Y]: ... +def compose(G: Graph[_X_co], H: Graph[_Y_co]) -> DiGraph[_X_co | _Y_co]: ... @_dispatchable -def union(G: Graph[_X], H: Graph[_Y], rename: Iterable[Incomplete] | None = ()) -> DiGraph[_X | _Y]: ... +def union(G: Graph[_X_co], H: Graph[_Y_co], rename: Iterable[Incomplete] | None = ()) -> DiGraph[_X_co | _Y_co]: ... diff --git a/stubs/pynput/pynput/_util.pyi b/stubs/pynput/pynput/_util.pyi index 9b9affebf48d..c561e504bca2 100644 --- a/stubs/pynput/pynput/_util.pyi +++ b/stubs/pynput/pynput/_util.pyi @@ -7,7 +7,7 @@ from typing import Any, ClassVar, Generic, TypedDict, TypeVar from typing_extensions import ParamSpec, Self _T = TypeVar("_T") -_AbstractListener_T = TypeVar("_AbstractListener_T", bound=AbstractListener) +_AbstractListenerT = TypeVar("_AbstractListenerT", bound=AbstractListener) _P = ParamSpec("_P") class _RESOLUTIONS(TypedDict): @@ -49,15 +49,15 @@ class AbstractListener(threading.Thread): def _stop_platform(self) -> None: ... # undocumented def join(self, timeout: float | None = None, *args: Any) -> None: ... -class Events(Generic[_T, _AbstractListener_T]): - _Listener: type[_AbstractListener_T] | None # undocumented +class Events(Generic[_T, _AbstractListenerT]): + _Listener: type[_AbstractListenerT] | None # undocumented class Event: def __eq__(self, other: object) -> bool: ... _event_queue: Queue[_T] # undocumented _sentinel: object # undocumented - _listener: _AbstractListener_T # undocumented + _listener: _AbstractListenerT # undocumented start: Callable[[], None] def __init__(self, *args: Any, **kwargs: Any) -> None: ... def __enter__(self) -> Self: ... diff --git a/stubs/pyserial/serial/tools/miniterm.pyi b/stubs/pyserial/serial/tools/miniterm.pyi index b53d8d074a61..2d541ea056ba 100644 --- a/stubs/pyserial/serial/tools/miniterm.pyi +++ b/stubs/pyserial/serial/tools/miniterm.pyi @@ -8,10 +8,10 @@ from typing_extensions import Self from serial import Serial -_AnyStr_T = TypeVar("_AnyStr_T", contravariant=True) +_AnyStrT_contra = TypeVar("_AnyStrT_contra", contravariant=True) @type_check_only -class _SupportsWriteAndFlush(SupportsWrite[_AnyStr_T], SupportsFlush, Protocol): ... +class _SupportsWriteAndFlush(SupportsWrite[_AnyStrT_contra], SupportsFlush, Protocol): ... @type_check_only class _SupportsRead(Protocol): diff --git a/stubs/tensorflow/tensorflow/data/__init__.pyi b/stubs/tensorflow/tensorflow/data/__init__.pyi index 0ff8a7ea6a91..305043f7248f 100644 --- a/stubs/tensorflow/tensorflow/data/__init__.pyi +++ b/stubs/tensorflow/tensorflow/data/__init__.pyi @@ -14,21 +14,21 @@ from tensorflow.dtypes import DType from tensorflow.io import _CompressionTypes from tensorflow.python.trackable.base import Trackable -_T1 = TypeVar("_T1", covariant=True) +_T1_co = TypeVar("_T1_co", covariant=True) _T2 = TypeVar("_T2") _T3 = TypeVar("_T3") -class Iterator(_Iterator[_T1], Trackable, ABC): +class Iterator(_Iterator[_T1_co], Trackable, ABC): @property @abstractmethod def element_spec(self) -> ContainerGeneric[TypeSpec[Any]]: ... @abstractmethod - def get_next(self) -> _T1: ... + def get_next(self) -> _T1_co: ... @abstractmethod - def get_next_as_optional(self) -> tf.experimental.Optional[_T1]: ... + def get_next_as_optional(self) -> tf.experimental.Optional[_T1_co]: ... -class Dataset(ABC, Generic[_T1]): - def apply(self, transformation_func: Callable[[Dataset[_T1]], Dataset[_T2]]) -> Dataset[_T2]: ... +class Dataset(ABC, Generic[_T1_co]): + def apply(self, transformation_func: Callable[[Dataset[_T1_co]], Dataset[_T2]]) -> Dataset[_T2]: ... def as_numpy_iterator(self) -> Iterator[np.ndarray[Any, Any]]: ... def batch( self, @@ -37,10 +37,10 @@ class Dataset(ABC, Generic[_T1]): num_parallel_calls: int | None = None, deterministic: bool | None = None, name: str | None = None, - ) -> Dataset[_T1]: ... + ) -> Dataset[_T1_co]: ... def bucket_by_sequence_length( self, - element_length_func: Callable[[_T1], ScalarTensorCompatible], + element_length_func: Callable[[_T1_co], ScalarTensorCompatible], bucket_boundaries: Sequence[int], bucket_batch_sizes: Sequence[int], padded_shapes: ContainerGeneric[tf.TensorShape | TensorCompatible] | None = None, @@ -49,14 +49,14 @@ class Dataset(ABC, Generic[_T1]): no_padding: bool = False, drop_remainder: bool = False, name: str | None = None, - ) -> Dataset[_T1]: ... - def cache(self, filename: str = "", name: str | None = None) -> Dataset[_T1]: ... + ) -> Dataset[_T1_co]: ... + def cache(self, filename: str = "", name: str | None = None) -> Dataset[_T1_co]: ... def cardinality(self) -> int: ... @staticmethod def choose_from_datasets( datasets: Sequence[Dataset[_T2]], choice_dataset: Dataset[tf.Tensor], stop_on_empty_dataset: bool = True ) -> Dataset[_T2]: ... - def concatenate(self, dataset: Dataset[_T1], name: str | None = None) -> Dataset[_T1]: ... + def concatenate(self, dataset: Dataset[_T1_co], name: str | None = None) -> Dataset[_T1_co]: ... @staticmethod def counter( start: ScalarTensorCompatible = 0, step: ScalarTensorCompatible = 1, dtype: DType = ..., name: str | None = None @@ -64,9 +64,9 @@ class Dataset(ABC, Generic[_T1]): @property @abstractmethod def element_spec(self) -> ContainerGeneric[TypeSpec[Any]]: ... - def enumerate(self, start: ScalarTensorCompatible = 0, name: str | None = None) -> Dataset[tuple[int, _T1]]: ... - def filter(self, predicate: Callable[[_T1], bool | tf.Tensor], name: str | None = None) -> Dataset[_T1]: ... - def flat_map(self, map_func: Callable[[_T1], Dataset[_T2]], name: str | None = None) -> Dataset[_T2]: ... + def enumerate(self, start: ScalarTensorCompatible = 0, name: str | None = None) -> Dataset[tuple[int, _T1_co]]: ... + def filter(self, predicate: Callable[[_T1_co], bool | tf.Tensor], name: str | None = None) -> Dataset[_T1_co]: ... + def flat_map(self, map_func: Callable[[_T1_co], Dataset[_T2]], name: str | None = None) -> Dataset[_T2]: ... # PEP 646 can be used here for a more precise type when better supported. @staticmethod def from_generator( @@ -81,26 +81,26 @@ class Dataset(ABC, Generic[_T1]): def from_tensors(tensors: Any, name: str | None = None) -> Dataset[Any]: ... @staticmethod def from_tensor_slices(tensors: TensorCompatible, name: str | None = None) -> Dataset[Any]: ... - def get_single_element(self, name: str | None = None) -> _T1: ... + def get_single_element(self, name: str | None = None) -> _T1_co: ... def group_by_window( self, - key_func: Callable[[_T1], tf.Tensor], - reduce_func: Callable[[tf.Tensor, Dataset[_T1]], Dataset[_T2]], + key_func: Callable[[_T1_co], tf.Tensor], + reduce_func: Callable[[tf.Tensor, Dataset[_T1_co]], Dataset[_T2]], window_size: ScalarTensorCompatible | None = None, window_size_func: Callable[[tf.Tensor], tf.Tensor] | None = None, name: str | None = None, ) -> Dataset[_T2]: ... - def ignore_errors(self, log_warning: bool = False, name: str | None = None) -> Dataset[_T1]: ... + def ignore_errors(self, log_warning: bool = False, name: str | None = None) -> Dataset[_T1_co]: ... def interleave( self, - map_func: Callable[[_T1], Dataset[_T2]], + map_func: Callable[[_T1_co], Dataset[_T2]], cycle_length: int | None = None, block_length: int | None = None, num_parallel_calls: int | None = None, deterministic: bool | None = None, name: str | None = None, ) -> Dataset[_T2]: ... - def __iter__(self) -> Iterator[_T1]: ... + def __iter__(self) -> Iterator[_T1_co]: ... @staticmethod def list_files( file_pattern: str | Sequence[str] | TensorCompatible, @@ -134,8 +134,8 @@ class Dataset(ABC, Generic[_T1]): padding_values: ContainerGeneric[ScalarTensorCompatible] | None = None, drop_remainder: bool = False, name: str | None = None, - ) -> Dataset[_T1]: ... - def prefetch(self, buffer_size: ScalarTensorCompatible, name: str | None = None) -> Dataset[_T1]: ... + ) -> Dataset[_T1_co]: ... + def prefetch(self, buffer_size: ScalarTensorCompatible, name: str | None = None) -> Dataset[_T1_co]: ... def ragged_batch( self, batch_size: ScalarTensorCompatible, @@ -162,62 +162,62 @@ class Dataset(ABC, Generic[_T1]): ) -> Dataset[tf.Tensor]: ... def rebatch( self, batch_size: ScalarTensorCompatible, drop_remainder: bool = False, name: str | None = None - ) -> Dataset[_T1]: ... - def reduce(self, initial_state: _T2, reduce_func: Callable[[_T2, _T1], _T2], name: str | None = None) -> _T2: ... + ) -> Dataset[_T1_co]: ... + def reduce(self, initial_state: _T2, reduce_func: Callable[[_T2, _T1_co], _T2], name: str | None = None) -> _T2: ... def rejection_resample( self, - class_func: Callable[[_T1], ScalarTensorCompatible], + class_func: Callable[[_T1_co], ScalarTensorCompatible], target_dist: TensorCompatible, initial_dist: TensorCompatible | None = None, seed: int | None = None, name: str | None = None, - ) -> Dataset[_T1]: ... - def repeat(self, count: ScalarTensorCompatible | None = None, name: str | None = None) -> Dataset[_T1]: ... + ) -> Dataset[_T1_co]: ... + def repeat(self, count: ScalarTensorCompatible | None = None, name: str | None = None) -> Dataset[_T1_co]: ... @staticmethod def sample_from_datasets( - datasets: Sequence[Dataset[_T1]], + datasets: Sequence[Dataset[_T1_co]], weights: TensorCompatible | None = None, seed: int | None = None, stop_on_empty_dataset: bool = False, rerandomize_each_iteration: bool | None = None, - ) -> Dataset[_T1]: ... + ) -> Dataset[_T1_co]: ... # Incomplete as tf.train.CheckpointOptions not yet covered. def save( self, path: str, compression: _CompressionTypes = None, - shard_func: Callable[[_T1], int] | None = None, + shard_func: Callable[[_T1_co], int] | None = None, checkpoint_args: Incomplete | None = None, ) -> None: ... def scan( - self, initial_state: _T2, scan_func: Callable[[_T2, _T1], tuple[_T2, _T3]], name: str | None = None + self, initial_state: _T2, scan_func: Callable[[_T2, _T1_co], tuple[_T2, _T3]], name: str | None = None ) -> Dataset[_T3]: ... def shard( self, num_shards: ScalarTensorCompatible, index: ScalarTensorCompatible, name: str | None = None - ) -> Dataset[_T1]: ... + ) -> Dataset[_T1_co]: ... def shuffle( self, buffer_size: ScalarTensorCompatible, seed: int | None = None, reshuffle_each_iteration: bool = True, name: str | None = None, - ) -> Dataset[_T1]: ... - def skip(self, count: ScalarTensorCompatible, name: str | None = None) -> Dataset[_T1]: ... + ) -> Dataset[_T1_co]: ... + def skip(self, count: ScalarTensorCompatible, name: str | None = None) -> Dataset[_T1_co]: ... def snapshot( self, path: str, compression: _CompressionTypes = "AUTO", - reader_func: Callable[[Dataset[Dataset[_T1]]], Dataset[_T1]] | None = None, - shard_func: Callable[[_T1], ScalarTensorCompatible] | None = None, + reader_func: Callable[[Dataset[Dataset[_T1_co]]], Dataset[_T1_co]] | None = None, + shard_func: Callable[[_T1_co], ScalarTensorCompatible] | None = None, name: str | None = None, - ) -> Dataset[_T1]: ... + ) -> Dataset[_T1_co]: ... def sparse_batch( self, batch_size: ScalarTensorCompatible, row_shape: tf.TensorShape | TensorCompatible, name: str | None = None ) -> Dataset[tf.SparseTensor]: ... - def take(self, count: ScalarTensorCompatible, name: str | None = None) -> Dataset[_T1]: ... - def take_while(self, predicate: Callable[[_T1], ScalarTensorCompatible], name: str | None = None) -> Dataset[_T1]: ... - def unbatch(self, name: str | None = None) -> Dataset[_T1]: ... - def unique(self, name: str | None = None) -> Dataset[_T1]: ... + def take(self, count: ScalarTensorCompatible, name: str | None = None) -> Dataset[_T1_co]: ... + def take_while(self, predicate: Callable[[_T1_co], ScalarTensorCompatible], name: str | None = None) -> Dataset[_T1_co]: ... + def unbatch(self, name: str | None = None) -> Dataset[_T1_co]: ... + def unique(self, name: str | None = None) -> Dataset[_T1_co]: ... def window( self, size: ScalarTensorCompatible, @@ -225,8 +225,8 @@ class Dataset(ABC, Generic[_T1]): stride: ScalarTensorCompatible = 1, drop_remainder: bool = False, name: str | None = None, - ) -> Dataset[Dataset[_T1]]: ... - def with_options(self, options: Options, name: str | None = None) -> Dataset[_T1]: ... + ) -> Dataset[Dataset[_T1_co]]: ... + def with_options(self, options: Options, name: str | None = None) -> Dataset[_T1_co]: ... @overload @staticmethod def zip( diff --git a/stubs/tensorflow/tensorflow/distribute/experimental/coordinator.pyi b/stubs/tensorflow/tensorflow/distribute/experimental/coordinator.pyi index cc59692e72ec..92d1f6bcb9ed 100644 --- a/stubs/tensorflow/tensorflow/distribute/experimental/coordinator.pyi +++ b/stubs/tensorflow/tensorflow/distribute/experimental/coordinator.pyi @@ -3,10 +3,10 @@ from typing import Generic, TypeVar from tensorflow._aliases import AnyArray -_Value = TypeVar("_Value", covariant=True) +_Value_co = TypeVar("_Value_co", covariant=True) -class RemoteValue(Generic[_Value]): +class RemoteValue(Generic[_Value_co]): def fetch(self) -> AnyArray: ... - def get(self) -> _Value: ... + def get(self) -> _Value_co: ... def __getattr__(name: str) -> Incomplete: ... diff --git a/stubs/tensorflow/tensorflow/keras/layers/__init__.pyi b/stubs/tensorflow/tensorflow/keras/layers/__init__.pyi index a2a2211b56cf..21e4606f6680 100644 --- a/stubs/tensorflow/tensorflow/keras/layers/__init__.pyi +++ b/stubs/tensorflow/tensorflow/keras/layers/__init__.pyi @@ -11,8 +11,8 @@ from tensorflow.keras.constraints import Constraint from tensorflow.keras.initializers import _Initializer from tensorflow.keras.regularizers import Regularizer, _Regularizer -_InputT = TypeVar("_InputT", contravariant=True) -_OutputT = TypeVar("_OutputT", covariant=True) +_InputT_contra = TypeVar("_InputT_contra", contravariant=True) +_OutputT_co = TypeVar("_OutputT_co", covariant=True) class InputSpec: dtype: str | None @@ -39,9 +39,9 @@ class InputSpec: # Most layers have input and output type of just Tensor and when we support default type variables, # maybe worth trying. -class Layer(tf.Module, Generic[_InputT, _OutputT]): +class Layer(tf.Module, Generic[_InputT_contra, _OutputT_co]): # The most general type is ContainerGeneric[InputSpec] as it really - # depends on _InputT. For most Layers it is just InputSpec + # depends on _InputT_contra. For most Layers it is just InputSpec # though. Maybe describable with HKT? input_spec: InputSpec | Any @@ -65,11 +65,13 @@ class Layer(tf.Module, Generic[_InputT, _OutputT]): # *args/**kwargs are allowed, but have obscure footguns and tensorflow documentation discourages their usage. # First argument will automatically be cast to layer's compute dtype, but any other tensor arguments will not be. # Also various tensorflow tools/apis can misbehave if they encounter a layer with *args/**kwargs. - def __call__(self, inputs: _InputT, *, training: bool = False, mask: TensorCompatible | None = None) -> _OutputT: ... - def call(self, inputs: _InputT, /) -> _OutputT: ... + def __call__( + self, inputs: _InputT_contra, *, training: bool = False, mask: TensorCompatible | None = None + ) -> _OutputT_co: ... + def call(self, inputs: _InputT_contra, /) -> _OutputT_co: ... - # input_shape's real type depends on _InputT, but we can't express that without HKT. - # For example _InputT tf.Tensor -> tf.TensorShape, _InputT dict[str, tf.Tensor] -> dict[str, tf.TensorShape]. + # input_shape's real type depends on _InputT_contra, but we can't express that without HKT. + # For example _InputT_contra tf.Tensor -> tf.TensorShape, _InputT_contra dict[str, tf.Tensor] -> dict[str, tf.TensorShape]. def build(self, input_shape: Any, /) -> None: ... @overload def compute_output_shape(self: Layer[tf.Tensor, tf.Tensor], input_shape: tf.TensorShape, /) -> tf.TensorShape: ... diff --git a/stubs/tensorflow/tensorflow/keras/models.pyi b/stubs/tensorflow/tensorflow/keras/models.pyi index e024ed5c26fb..262bd49c897f 100644 --- a/stubs/tensorflow/tensorflow/keras/models.pyi +++ b/stubs/tensorflow/tensorflow/keras/models.pyi @@ -9,14 +9,14 @@ import numpy.typing as npt import tensorflow as tf from tensorflow import Variable from tensorflow._aliases import ContainerGeneric, ShapeLike, TensorCompatible -from tensorflow.keras.layers import Layer, _InputT, _OutputT +from tensorflow.keras.layers import Layer, _InputT_contra, _OutputT_co from tensorflow.keras.optimizers import Optimizer _Loss: TypeAlias = str | tf.keras.losses.Loss | Callable[[TensorCompatible, TensorCompatible], tf.Tensor] _Metric: TypeAlias = str | tf.keras.metrics.Metric | Callable[[TensorCompatible, TensorCompatible], tf.Tensor] | None # Missing keras.src.backend.tensorflow.trainer.TensorFlowTrainer as a base class, which is not exposed by tensorflow -class Model(Layer[_InputT, _OutputT]): +class Model(Layer[_InputT_contra, _OutputT_co]): _train_counter: tf.Variable _test_counter: tf.Variable optimizer: Optimizer | None @@ -27,13 +27,15 @@ class Model(Layer[_InputT, _OutputT]): ) -> tf.Tensor | None: ... stop_training: bool - def __new__(cls, *args: Any, **kwargs: Any) -> Model[_InputT, _OutputT]: ... + def __new__(cls, *args: Any, **kwargs: Any) -> Model[_InputT_contra, _OutputT_co]: ... def __init__(self, *args: Any, **kwargs: Any) -> None: ... def __setattr__(self, name: str, value: Any) -> None: ... def __reduce__(self): ... def build(self, input_shape: ShapeLike) -> None: ... - def __call__(self, inputs: _InputT, *, training: bool = False, mask: TensorCompatible | None = None) -> _OutputT: ... - def call(self, inputs: _InputT, training: bool | None = None, mask: TensorCompatible | None = None) -> _OutputT: ... + def __call__( + self, inputs: _InputT_contra, *, training: bool = False, mask: TensorCompatible | None = None + ) -> _OutputT_co: ... + def call(self, inputs: _InputT_contra, training: bool | None = None, mask: TensorCompatible | None = None) -> _OutputT_co: ... # Ideally loss/metrics/output would share the same structure but higher kinded types are not supported. def compile( self, @@ -106,8 +108,8 @@ class Model(Layer[_InputT, _OutputT]): return_dict: bool = False, **kwargs: Any, ) -> float | list[float]: ... - def predict_step(self, data: _InputT) -> _OutputT: ... - def make_predict_function(self, force: bool = False) -> Callable[[tf.data.Iterator[Incomplete]], _OutputT]: ... + def predict_step(self, data: _InputT_contra) -> _OutputT_co: ... + def make_predict_function(self, force: bool = False) -> Callable[[tf.data.Iterator[Incomplete]], _OutputT_co]: ... def predict( self, x: TensorCompatible | tf.data.Dataset[Incomplete], @@ -115,7 +117,7 @@ class Model(Layer[_InputT, _OutputT]): verbose: Literal["auto", 0, 1, 2] = "auto", steps: int | None = None, callbacks: list[tf.keras.callbacks.Callback] | None = None, - ) -> _OutputT: ... + ) -> _OutputT_co: ... def reset_metrics(self) -> None: ... def train_on_batch( self, @@ -132,7 +134,7 @@ class Model(Layer[_InputT, _OutputT]): sample_weight: npt.NDArray[np.float64] | None = None, return_dict: bool = False, ) -> float | list[float]: ... - def predict_on_batch(self, x: Iterator[_InputT]) -> npt.NDArray[Incomplete]: ... + def predict_on_batch(self, x: Iterator[_InputT_contra]) -> npt.NDArray[Incomplete]: ... @property def trainable_weights(self) -> list[Variable]: ... @property diff --git a/stubs/tensorflow/tensorflow/saved_model/__init__.pyi b/stubs/tensorflow/tensorflow/saved_model/__init__.pyi index 538e8acabdbe..203d144c751f 100644 --- a/stubs/tensorflow/tensorflow/saved_model/__init__.pyi +++ b/stubs/tensorflow/tensorflow/saved_model/__init__.pyi @@ -10,7 +10,7 @@ from tensorflow.saved_model.experimental import VariablePolicy from tensorflow.types.experimental import ConcreteFunction, PolymorphicFunction _P = ParamSpec("_P") -_R = TypeVar("_R", covariant=True) +_R_co = TypeVar("_R_co", covariant=True) class Asset: @property @@ -77,10 +77,10 @@ class SaveOptions: def contains_saved_model(export_dir: str | Path) -> bool: ... -class _LoadedAttributes(Generic[_P, _R]): - signatures: Mapping[str, ConcreteFunction[_P, _R]] +class _LoadedAttributes(Generic[_P, _R_co]): + signatures: Mapping[str, ConcreteFunction[_P, _R_co]] -class _LoadedModel(AutoTrackable, _LoadedAttributes[_P, _R]): +class _LoadedModel(AutoTrackable, _LoadedAttributes[_P, _R_co]): variables: list[tf.Variable] trainable_variables: list[tf.Variable] # TF1 model artifact specific diff --git a/stubs/tensorflow/tensorflow/types/experimental.pyi b/stubs/tensorflow/tensorflow/types/experimental.pyi index 15d2ee854093..5d4a792cb2e3 100644 --- a/stubs/tensorflow/tensorflow/types/experimental.pyi +++ b/stubs/tensorflow/tensorflow/types/experimental.pyi @@ -7,23 +7,23 @@ import tensorflow as tf from tensorflow._aliases import ContainerGeneric _P = ParamSpec("_P") -_R = TypeVar("_R", covariant=True) +_R_co = TypeVar("_R_co", covariant=True) -class Callable(Generic[_P, _R], metaclass=abc.ABCMeta): - def __call__(self, *args: _P.args, **kwargs: _P.kwargs) -> _R: ... +class Callable(Generic[_P, _R_co], metaclass=abc.ABCMeta): + def __call__(self, *args: _P.args, **kwargs: _P.kwargs) -> _R_co: ... -class ConcreteFunction(Callable[_P, _R], metaclass=abc.ABCMeta): - def __call__(self, *args: _P.args, **kwargs: _P.kwargs) -> _R: ... +class ConcreteFunction(Callable[_P, _R_co], metaclass=abc.ABCMeta): + def __call__(self, *args: _P.args, **kwargs: _P.kwargs) -> _R_co: ... -class PolymorphicFunction(Callable[_P, _R], metaclass=abc.ABCMeta): +class PolymorphicFunction(Callable[_P, _R_co], metaclass=abc.ABCMeta): @overload @abc.abstractmethod - def get_concrete_function(self, *args: _P.args, **kwargs: _P.kwargs) -> ConcreteFunction[_P, _R]: ... + def get_concrete_function(self, *args: _P.args, **kwargs: _P.kwargs) -> ConcreteFunction[_P, _R_co]: ... @overload @abc.abstractmethod def get_concrete_function( self, *args: ContainerGeneric[tf.TypeSpec[Any]], **kwargs: ContainerGeneric[tf.TypeSpec[Any]] - ) -> ConcreteFunction[_P, _R]: ... + ) -> ConcreteFunction[_P, _R_co]: ... def experimental_get_compiler_ir(self, *args, **kwargs): ... GenericFunction = PolymorphicFunction From 3ddc5b06035a51c8fb7ffca21f389f0ed950447a Mon Sep 17 00:00:00 2001 From: Sebastian Rittau Date: Mon, 3 Mar 2025 15:44:26 +0100 Subject: [PATCH 034/388] Remove SupportsGetItem.__contains__ (#13541) --- stdlib/_typeshed/__init__.pyi | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/stdlib/_typeshed/__init__.pyi b/stdlib/_typeshed/__init__.pyi index 7201819b25ed..7d8e3083671e 100644 --- a/stdlib/_typeshed/__init__.pyi +++ b/stdlib/_typeshed/__init__.pyi @@ -151,11 +151,8 @@ class SupportsKeysAndGetItem(Protocol[_KT, _VT_co]): def keys(self) -> Iterable[_KT]: ... def __getitem__(self, key: _KT, /) -> _VT_co: ... -# This protocol is currently under discussion. Use SupportsContainsAndGetItem -# instead, if you require the __contains__ method. -# See https://github.com/python/typeshed/issues/11822. +# stable class SupportsGetItem(Protocol[_KT_contra, _VT_co]): - def __contains__(self, x: Any, /) -> bool: ... def __getitem__(self, key: _KT_contra, /) -> _VT_co: ... # stable From e7412b1ca61c767fd5dc83df914b044a4e39aada Mon Sep 17 00:00:00 2001 From: Avasam Date: Mon, 3 Mar 2025 09:48:59 -0500 Subject: [PATCH 035/388] Drop flake8-noqa and remove workarounds to work with Ruff (#13571) --- .flake8 | 6 +----- .pre-commit-config.yaml | 3 +-- CONTRIBUTING.md | 5 ++--- pyproject.toml | 8 +++----- stdlib/builtins.pyi | 7 +++++-- stdlib/typing_extensions.pyi | 3 +-- stubs/boltons/boltons/dictutils.pyi | 4 +--- stubs/geopandas/geopandas/io/sql.pyi | 3 --- stubs/six/six/moves/builtins.pyi | 1 - 9 files changed, 14 insertions(+), 26 deletions(-) diff --git a/.flake8 b/.flake8 index cf6578d7bb11..3b99b881864f 100644 --- a/.flake8 +++ b/.flake8 @@ -1,13 +1,10 @@ [flake8] -# NQA: Ruff won't warn about redundant `# noqa: Y` # Y: Flake8 is only used to run flake8-pyi, everything else is in Ruff # F821: Typeshed is a testing ground for flake8-pyi, which monkeypatches F821 -select = NQA, Y, F821 +select = Y, F821 # Ignore rules normally excluded by default extend-ignore = Y090 per-file-ignores = - # We should only need to noqa Y and F821 codes in .pyi files - *.py: NQA # Generated protobuf files: # Y021: Include docstrings # Y023: Alias typing as typing_extensions @@ -16,4 +13,3 @@ per-file-ignores = stubs/*_pb2.pyi: Y021, Y023, Y026, Y053 exclude = .venv*,.git -noqa_require_code = true diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 66fd399115a8..ca5972135220 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -31,12 +31,11 @@ repos: hooks: - id: black - repo: https://github.com/pycqa/flake8 - rev: 7.1.1 + rev: 7.1.2 hooks: - id: flake8 language: python additional_dependencies: - - "flake8-noqa==1.4.0" - "flake8-pyi==24.9.0" types: [file] types_or: [python, pyi] diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index b99fc46b7b9f..e4f95ca4ed1c 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -28,7 +28,7 @@ it takes a bit longer. For more details, read below. Typeshed runs continuous integration (CI) on all pull requests. This means that if you file a pull request (PR), our full test suite --- including our linter, [Flake8](https://github.com/PyCQA/flake8) -- +-- including our linter, [`flake8-pyi`](https://github.com/pycqa/flake8-pyi) -- is run on your PR. It also means that bots will automatically apply changes to your PR (using [Black](https://github.com/psf/black) and [Ruff](https://github.com/astral-sh/ruff)) to fix any formatting issues. @@ -88,8 +88,7 @@ The code is formatted using [`Black`](https://github.com/psf/black). Various other autofixes and lint rules are also performed by [`Ruff`](https://github.com/astral-sh/ruff) and [`Flake8`](https://github.com/pycqa/flake8), -with plugins [`flake8-pyi`](https://github.com/pycqa/flake8-pyi), -and [`flake8-noqa`](https://github.com/plinss/flake8-noqa). +with plugin [`flake8-pyi`](https://github.com/pycqa/flake8-pyi). The repository is equipped with a [pre-commit.ci](https://pre-commit.ci/) configuration file. This means that you don't *need* to do anything yourself to diff --git a/pyproject.toml b/pyproject.toml index 2a68a74cf3bf..33f06b5b6635 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -33,9 +33,9 @@ exclude = [ # are invoked via separate runs of ruff in pre-commit: # see our .pre-commit-config.yaml file for details exclude = ["**/test_cases/**/*.py"] -# We still use flake8-pyi and flake8-noqa to check these (see .flake8 config file); +# We still use flake8-pyi to check these (see .flake8 config file); # tell ruff not to flag these as e.g. "unused noqa comments" -external = ["F821", "NQA", "Y"] +external = ["F821", "Y"] select = [ "ARG", # flake8-unused-arguments "B", # flake8-bugbear @@ -67,8 +67,7 @@ select = [ "FURB177", # Prefer `Path.cwd()` over `Path().resolve()` for current-directory lookups "FURB187", # Use of assignment of `reversed` on list `{name}` # PYI: only enable rules that have autofixes and that we always want to fix (even manually), - # avoids duplicate # noqa with flake8-pyi and flake8-noqa flagging `PYI` codes - # See https://github.com/plinss/flake8-noqa/issues/22 + # avoids duplicate # noqa with flake8-pyi "PYI009", # Empty body should contain `...`, not pass "PYI010", # Function body must contain only `...` "PYI012", # Class bodies must not contain `pass` @@ -82,7 +81,6 @@ select = [ # "PYI026", Waiting for this mypy bug to be fixed: https://github.com/python/mypy/issues/16581 "PYI030", # Multiple literal members in a union. Use a single literal, e.g. `Literal[{}]` "PYI032", # Prefer `object` to `Any` for the second parameter to `{method_name}` - "PYI034", # `__new__` methods usually return self at runtime "PYI036", # Star-args in `{method_name}` should be annotated with `object` "PYI044", # `from __future__ import annotations` has no effect in stub files, since type checkers automatically treat stubs as having those semantics "PYI055", # Multiple `type[T]` usages in a union. Combine them into one, e.g., `type[{union_str}]`. diff --git a/stdlib/builtins.pyi b/stdlib/builtins.pyi index 02557c718eff..b9a1f3c9e456 100644 --- a/stdlib/builtins.pyi +++ b/stdlib/builtins.pyi @@ -1,4 +1,3 @@ -# ruff: noqa: PYI036 # This is the module declaring BaseException import _ast import _sitebuiltins import _typeshed @@ -870,7 +869,11 @@ class memoryview(Sequence[_I]): def __new__(cls, obj: ReadableBuffer) -> Self: ... def __enter__(self) -> Self: ... def __exit__( - self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None, / + self, + exc_type: type[BaseException] | None, # noqa: PYI036 # This is the module declaring BaseException + exc_val: BaseException | None, + exc_tb: TracebackType | None, + /, ) -> None: ... @overload def cast(self, format: Literal["c", "@c"], shape: list[int] | tuple[int, ...] = ...) -> memoryview[bytes]: ... diff --git a/stdlib/typing_extensions.pyi b/stdlib/typing_extensions.pyi index 33af1a388aa5..fd98722b10a8 100644 --- a/stdlib/typing_extensions.pyi +++ b/stdlib/typing_extensions.pyi @@ -1,5 +1,3 @@ -# Since this module defines "Self" it is not recognized by Ruff as typing_extensions.Self -# ruff: noqa: PYI034 import abc import sys import typing @@ -251,6 +249,7 @@ class _TypedDict(Mapping[str, object], metaclass=abc.ABCMeta): @overload def __ror__(self, value: dict[str, Any], /) -> dict[str, object]: ... # supposedly incompatible definitions of `__ior__` and `__or__`: + # Since this module defines "Self" it is not recognized by Ruff as typing_extensions.Self def __ior__(self, value: Self, /) -> Self: ... # type: ignore[misc] OrderedDict = _Alias() diff --git a/stubs/boltons/boltons/dictutils.pyi b/stubs/boltons/boltons/dictutils.pyi index 26c99bb70eb9..6a6b851a9ffb 100644 --- a/stubs/boltons/boltons/dictutils.pyi +++ b/stubs/boltons/boltons/dictutils.pyi @@ -91,9 +91,7 @@ class FrozenDict(dict[_KT, _VT]): @classmethod def fromkeys(cls, keys: Iterable[_KT], value: _VT | None = None) -> Self: ... # type: ignore[override] def updated(self, *a, **kw) -> Self: ... - # Can't noqa because of https://github.com/plinss/flake8-noqa/pull/30 - # Signature conflicts with superclass, so let's just omit it - # def __ior__(self, *a, **kw) -> NoReturn: ... + def __ior__(self, *a, **kw) -> NoReturn: ... # type: ignore[misc] # noqa: Y034 # Signature conflicts with superclass def __setitem__(self, *a, **kw) -> NoReturn: ... def __delitem__(self, *a, **kw) -> NoReturn: ... def update(self, *a, **kw) -> NoReturn: ... diff --git a/stubs/geopandas/geopandas/io/sql.pyi b/stubs/geopandas/geopandas/io/sql.pyi index c8f785cfc6a7..743439f873b8 100644 --- a/stubs/geopandas/geopandas/io/sql.pyi +++ b/stubs/geopandas/geopandas/io/sql.pyi @@ -10,9 +10,6 @@ from pandas._typing import Scalar from ..base import _ConvertibleToCRS from ..geodataframe import GeoDataFrame -# inline ruff noqa at _SqlalchemyConnectionLike.__enter__ confuses flake8 -# ruff: noqa: PYI034 - # Start SQLAlchemy hack # --------------------- # The code actually explicitly checks for SQLAlchemy's `Connection` and `Engine` with diff --git a/stubs/six/six/moves/builtins.pyi b/stubs/six/six/moves/builtins.pyi index 1f3812602c5e..eee6b75c0554 100644 --- a/stubs/six/six/moves/builtins.pyi +++ b/stubs/six/six/moves/builtins.pyi @@ -1,4 +1,3 @@ -# flake8: noqa: NQA102 # https://github.com/plinss/flake8-noqa/issues/22 # six explicitly re-exports builtins. Normally this is something we'd want to avoid. # But this is specifically a compatibility package. from builtins import * # noqa: UP029 From fec4a0e9456bad3c5d8dec8a99ac976135f88369 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Mon, 3 Mar 2025 15:50:07 +0100 Subject: [PATCH 036/388] [stubsabot] Bump zstd to 1.5.6.5 (#13569) --- stubs/zstd/METADATA.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stubs/zstd/METADATA.toml b/stubs/zstd/METADATA.toml index 08c2e52d9ed8..d98e417ca6ea 100644 --- a/stubs/zstd/METADATA.toml +++ b/stubs/zstd/METADATA.toml @@ -1,2 +1,2 @@ -version = "1.5.6.4" +version = "1.5.6.5" upstream_repository = "https://github.com/sergey-dryabzhinsky/python-zstd" From 1b409e4cca012370eece03e22b2fdc853149979d Mon Sep 17 00:00:00 2001 From: Avasam Date: Mon, 3 Mar 2025 09:50:44 -0500 Subject: [PATCH 037/388] Remove uncompletable TODO in test.yml (#13570) --- .github/workflows/tests.yml | 3 --- 1 file changed, 3 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 1626ebcb51a5..da646296677b 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -110,9 +110,6 @@ jobs: DEPENDENCIES=$( python tests/get_external_stub_requirements.py ) if [ -n "$DEPENDENCIES" ]; then printf "Installing packages:\n $(echo $DEPENDENCIES | sed 's/ /\n /g')\n" - # TODO: We need to specify the platform here, but the platforms - # strings supported by uv are different from the ones supported by - # pyright. uv pip install --python-version ${{ matrix.python-version }} $DEPENDENCIES fi - name: Activate the isolated venv for the rest of the job From c5583fe2c43c5a95202a6ed45c1169b80b3bff54 Mon Sep 17 00:00:00 2001 From: Avasam Date: Mon, 3 Mar 2025 09:52:57 -0500 Subject: [PATCH 038/388] Allow any sliceable sequence as getopt args (#13116) --- stdlib/distutils/fancy_getopt.pyi | 20 +++++++++++++++----- stdlib/getopt.pyi | 18 +++++++++++++++--- 2 files changed, 30 insertions(+), 8 deletions(-) diff --git a/stdlib/distutils/fancy_getopt.pyi b/stdlib/distutils/fancy_getopt.pyi index c4d37419ed06..e66d8cc9f2c5 100644 --- a/stdlib/distutils/fancy_getopt.pyi +++ b/stdlib/distutils/fancy_getopt.pyi @@ -1,10 +1,10 @@ from collections.abc import Iterable, Mapping +from getopt import _SliceableT, _StrSequenceT_co from re import Pattern from typing import Any, Final, overload from typing_extensions import TypeAlias _Option: TypeAlias = tuple[str, str | None, str] -_GR: TypeAlias = tuple[list[str], OptionDummy] longopt_pat: Final = r"[a-zA-Z](?:[a-zA-Z0-9-]*)" longopt_re: Final[Pattern[str]] @@ -15,15 +15,25 @@ class FancyGetopt: def __init__(self, option_table: list[_Option] | None = None) -> None: ... # TODO kinda wrong, `getopt(object=object())` is invalid @overload - def getopt(self, args: list[str] | None = None) -> _GR: ... + def getopt( + self, args: _SliceableT[_StrSequenceT_co] | None = None, object: None = None + ) -> tuple[_StrSequenceT_co, OptionDummy]: ... @overload - def getopt(self, args: list[str] | None, object: Any) -> list[str]: ... + def getopt( + self, args: _SliceableT[_StrSequenceT_co] | None, object: Any + ) -> _StrSequenceT_co: ... # object is an arbitrary non-slotted object def get_option_order(self) -> list[tuple[str, str]]: ... def generate_help(self, header: str | None = None) -> list[str]: ... +# Same note as FancyGetopt.getopt +@overload def fancy_getopt( - options: list[_Option], negative_opt: Mapping[_Option, _Option], object: Any, args: list[str] | None -) -> list[str] | _GR: ... + options: list[_Option], negative_opt: Mapping[_Option, _Option], object: None, args: _SliceableT[_StrSequenceT_co] | None +) -> tuple[_StrSequenceT_co, OptionDummy]: ... +@overload +def fancy_getopt( + options: list[_Option], negative_opt: Mapping[_Option, _Option], object: Any, args: _SliceableT[_StrSequenceT_co] | None +) -> _StrSequenceT_co: ... WS_TRANS: Final[dict[int, str]] diff --git a/stdlib/getopt.pyi b/stdlib/getopt.pyi index bcc8d9750b19..c15db8122cfc 100644 --- a/stdlib/getopt.pyi +++ b/stdlib/getopt.pyi @@ -1,10 +1,22 @@ -from collections.abc import Iterable +from collections.abc import Iterable, Sequence +from typing import Protocol, TypeVar, overload, type_check_only + +_StrSequenceT_co = TypeVar("_StrSequenceT_co", covariant=True, bound=Sequence[str]) + +@type_check_only +class _SliceableT(Protocol[_StrSequenceT_co]): + @overload + def __getitem__(self, key: int, /) -> str: ... + @overload + def __getitem__(self, key: slice, /) -> _StrSequenceT_co: ... __all__ = ["GetoptError", "error", "getopt", "gnu_getopt"] -def getopt(args: list[str], shortopts: str, longopts: Iterable[str] | str = []) -> tuple[list[tuple[str, str]], list[str]]: ... +def getopt( + args: _SliceableT[_StrSequenceT_co], shortopts: str, longopts: Iterable[str] | str = [] +) -> tuple[list[tuple[str, str]], _StrSequenceT_co]: ... def gnu_getopt( - args: list[str], shortopts: str, longopts: Iterable[str] | str = [] + args: Sequence[str], shortopts: str, longopts: Iterable[str] | str = [] ) -> tuple[list[tuple[str, str]], list[str]]: ... class GetoptError(Exception): From d39eb9292d5842a3e1b529b38069cae5f5a7847e Mon Sep 17 00:00:00 2001 From: iconchris <84409222+iconchris@users.noreply.github.com> Date: Mon, 3 Mar 2025 09:03:41 -0600 Subject: [PATCH 039/388] Add edges() override in MultiGraph to support the "keys" argument (#13567) --- stubs/networkx/networkx/classes/multigraph.pyi | 3 +++ 1 file changed, 3 insertions(+) diff --git a/stubs/networkx/networkx/classes/multigraph.pyi b/stubs/networkx/networkx/classes/multigraph.pyi index fac0920609ff..f6b7766ccf9d 100644 --- a/stubs/networkx/networkx/classes/multigraph.pyi +++ b/stubs/networkx/networkx/classes/multigraph.pyi @@ -5,6 +5,7 @@ from typing_extensions import TypeAlias from networkx.classes.coreviews import MultiAdjacencyView from networkx.classes.graph import Graph, _Node from networkx.classes.multidigraph import MultiDiGraph +from networkx.classes.reportviews import OutMultiEdgeView _MultiEdge: TypeAlias = tuple[_Node, _Node, int] # noqa: Y047 @@ -23,3 +24,5 @@ class MultiGraph(Graph[_Node]): def to_directed(self, as_view: bool = False) -> MultiDiGraph[_Node]: ... def to_undirected(self, as_view: bool = False) -> MultiGraph[_Node]: ... def number_of_edges(self, u: _Node | None = None, v: _Node | None = None) -> int: ... + @cached_property + def edges(self) -> OutMultiEdgeView[_Node]: ... From 9f4c989a4b1e18f14228c56529765fab60cfd87c Mon Sep 17 00:00:00 2001 From: Stephen Morton Date: Mon, 3 Mar 2025 07:38:43 -0800 Subject: [PATCH 040/388] Push items down from socketserver.BaseServer to TCPServer (#13082) Cf. https://github.com/python/cpython/issues/127209 --- stdlib/@tests/stubtest_allowlists/common.txt | 4 +--- stdlib/socketserver.pyi | 16 ++++++++-------- 2 files changed, 9 insertions(+), 11 deletions(-) diff --git a/stdlib/@tests/stubtest_allowlists/common.txt b/stdlib/@tests/stubtest_allowlists/common.txt index f5d74200e417..451875d795f1 100644 --- a/stdlib/@tests/stubtest_allowlists/common.txt +++ b/stdlib/@tests/stubtest_allowlists/common.txt @@ -10,9 +10,6 @@ http.client.HTTPConnection.response_class # the actual type at runtime is abc.A importlib.abc.Loader.exec_module # See Lib/importlib/_abc.py. Might be defined for backwards compatibility importlib.abc.MetaPathFinder.find_spec # Not defined on the actual class, but expected to exist. importlib.abc.PathEntryFinder.find_spec # Not defined on the actual class, but expected to exist. -socketserver.BaseServer.fileno # implemented in derived classes -socketserver.BaseServer.get_request # implemented in derived classes -socketserver.BaseServer.server_bind # implemented in derived classes tkinter.simpledialog.[A-Z_]+ tkinter.simpledialog.TclVersion tkinter.simpledialog.TkVersion @@ -411,6 +408,7 @@ pickle._Pickler\..* # Best effort typing for undocumented internals pickle._Unpickler\..* # Best effort typing for undocumented internals _?queue.SimpleQueue.__init__ # C signature is broader than what is actually accepted shutil.rmtree # function with attributes, which we approximate with a callable protocol +socketserver.BaseServer.get_request # Not implemented, but expected to exist on subclasses. ssl.PROTOCOL_SSLv2 # Depends on the existence and flags of SSL ssl.PROTOCOL_SSLv3 # Depends on the existence and flags of SSL sys.implementation # Actually SimpleNamespace but then you wouldn't have convenient attributes diff --git a/stdlib/socketserver.pyi b/stdlib/socketserver.pyi index ae6575d85082..061932f0fac7 100644 --- a/stdlib/socketserver.pyi +++ b/stdlib/socketserver.pyi @@ -38,29 +38,22 @@ _AfInetAddress: TypeAlias = tuple[str | bytes | bytearray, int] # address accep # This can possibly be generic at some point: class BaseServer: - address_family: int server_address: _Address - socket: _socket - allow_reuse_address: bool - request_queue_size: int - socket_type: int timeout: float | None RequestHandlerClass: Callable[[Any, _RetAddress, Self], BaseRequestHandler] def __init__( self, server_address: _Address, RequestHandlerClass: Callable[[Any, _RetAddress, Self], BaseRequestHandler] ) -> None: ... - def fileno(self) -> int: ... def handle_request(self) -> None: ... def serve_forever(self, poll_interval: float = 0.5) -> None: ... def shutdown(self) -> None: ... def server_close(self) -> None: ... def finish_request(self, request: _RequestType, client_address: _RetAddress) -> None: ... - def get_request(self) -> tuple[Any, Any]: ... + def get_request(self) -> tuple[Any, Any]: ... # Not implemented here, but expected to exist on subclasses def handle_error(self, request: _RequestType, client_address: _RetAddress) -> None: ... def handle_timeout(self) -> None: ... def process_request(self, request: _RequestType, client_address: _RetAddress) -> None: ... def server_activate(self) -> None: ... - def server_bind(self) -> None: ... def verify_request(self, request: _RequestType, client_address: _RetAddress) -> bool: ... def __enter__(self) -> Self: ... def __exit__( @@ -71,6 +64,11 @@ class BaseServer: def close_request(self, request: _RequestType) -> None: ... # undocumented class TCPServer(BaseServer): + address_family: int + socket: _socket + allow_reuse_address: bool + request_queue_size: int + socket_type: int if sys.version_info >= (3, 11): allow_reuse_port: bool server_address: _AfInetAddress @@ -80,7 +78,9 @@ class TCPServer(BaseServer): RequestHandlerClass: Callable[[Any, _RetAddress, Self], BaseRequestHandler], bind_and_activate: bool = True, ) -> None: ... + def fileno(self) -> int: ... def get_request(self) -> tuple[_socket, _RetAddress]: ... + def server_bind(self) -> None: ... class UDPServer(TCPServer): max_packet_size: ClassVar[int] From 0e95274dafaead35b3aa401170ccdf21b329e0c1 Mon Sep 17 00:00:00 2001 From: cake-monotone Date: Tue, 4 Mar 2025 00:41:34 +0900 Subject: [PATCH 041/388] Improving type support for `math.prod` (#13572) --- stdlib/@tests/test_cases/check_math.py | 63 ++++++++++++++++++++++++++ stdlib/_typeshed/__init__.pyi | 6 +++ stdlib/math.pyi | 26 +++++++++-- 3 files changed, 92 insertions(+), 3 deletions(-) create mode 100644 stdlib/@tests/test_cases/check_math.py diff --git a/stdlib/@tests/test_cases/check_math.py b/stdlib/@tests/test_cases/check_math.py new file mode 100644 index 000000000000..d637c15ff178 --- /dev/null +++ b/stdlib/@tests/test_cases/check_math.py @@ -0,0 +1,63 @@ +from __future__ import annotations + +from decimal import Decimal +from fractions import Fraction +from math import prod +from typing import Any, Literal, Union +from typing_extensions import assert_type + + +class SupportsMul: + def __mul__(self, other: Any) -> SupportsMul: + return SupportsMul() + + +class SupportsRMul: + def __rmul__(self, other: Any) -> SupportsRMul: + return SupportsRMul() + + +class SupportsMulAndRMul: + def __mul__(self, other: Any) -> SupportsMulAndRMul: + return SupportsMulAndRMul() + + def __rmul__(self, other: Any) -> SupportsMulAndRMul: + return SupportsMulAndRMul() + + +literal_list: list[Literal[0, 1]] = [0, 1, 1] + +assert_type(prod([2, 4]), int) +assert_type(prod([3, 5], start=4), int) + +assert_type(prod([True, False]), int) +assert_type(prod([True, False], start=True), int) +assert_type(prod(literal_list), int) + +assert_type(prod([SupportsMul(), SupportsMul()], start=SupportsMul()), SupportsMul) +assert_type(prod([SupportsMulAndRMul(), SupportsMulAndRMul()]), Union[SupportsMulAndRMul, Literal[1]]) + +assert_type(prod([5.6, 3.2]), Union[float, Literal[1]]) +assert_type(prod([5.6, 3.2], start=3), Union[float, int]) + +assert_type(prod([Fraction(7, 2), Fraction(3, 5)]), Union[Fraction, Literal[1]]) +assert_type(prod([Fraction(7, 2), Fraction(3, 5)], start=Fraction(1)), Fraction) +assert_type(prod([Decimal("3.14"), Decimal("2.71")]), Union[Decimal, Literal[1]]) +assert_type(prod([Decimal("3.14"), Decimal("2.71")], start=Decimal("1.00")), Decimal) +assert_type(prod([complex(7, 2), complex(3, 5)]), Union[complex, Literal[1]]) +assert_type(prod([complex(7, 2), complex(3, 5)], start=complex(1, 0)), complex) + + +# mypy and pyright infer the types differently for these, so we can't use assert_type +# Just test that no error is emitted for any of these +prod([5.6, 3.2]) # mypy: `float`; pyright: `float | Literal[0]` +prod([2.5, 5.8], start=5) # mypy: `float`; pyright: `float | int` + +# These all fail at runtime +prod([SupportsMul(), SupportsMul()]) # type: ignore +prod([SupportsRMul(), SupportsRMul()], start=SupportsRMul()) # type: ignore +prod([SupportsRMul(), SupportsRMul()]) # type: ignore + +# TODO: these pass pyright with the current stubs, but mypy erroneously emits an error: +# prod([3, Fraction(7, 22), complex(8, 0), 9.83]) +# prod([3, Decimal("0.98")]) diff --git a/stdlib/_typeshed/__init__.pyi b/stdlib/_typeshed/__init__.pyi index 7d8e3083671e..2b56a4e97519 100644 --- a/stdlib/_typeshed/__init__.pyi +++ b/stdlib/_typeshed/__init__.pyi @@ -117,6 +117,12 @@ class SupportsSub(Protocol[_T_contra, _T_co]): class SupportsRSub(Protocol[_T_contra, _T_co]): def __rsub__(self, x: _T_contra, /) -> _T_co: ... +class SupportsMul(Protocol[_T_contra, _T_co]): + def __mul__(self, x: _T_contra, /) -> _T_co: ... + +class SupportsRMul(Protocol[_T_contra, _T_co]): + def __rmul__(self, x: _T_contra, /) -> _T_co: ... + class SupportsDivMod(Protocol[_T_contra, _T_co]): def __divmod__(self, other: _T_contra, /) -> _T_co: ... diff --git a/stdlib/math.pyi b/stdlib/math.pyi index 86f71f27580a..f73429cf6940 100644 --- a/stdlib/math.pyi +++ b/stdlib/math.pyi @@ -1,6 +1,7 @@ import sys +from _typeshed import SupportsMul, SupportsRMul from collections.abc import Iterable -from typing import Final, Protocol, SupportsFloat, SupportsIndex, TypeVar, overload +from typing import Any, Final, Literal, Protocol, SupportsFloat, SupportsIndex, TypeVar, overload from typing_extensions import TypeAlias _T = TypeVar("_T") @@ -99,10 +100,29 @@ elif sys.version_info >= (3, 9): def perm(n: SupportsIndex, k: SupportsIndex | None = None, /) -> int: ... def pow(x: _SupportsFloatOrIndex, y: _SupportsFloatOrIndex, /) -> float: ... + +_PositiveInteger: TypeAlias = Literal[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25] +_NegativeInteger: TypeAlias = Literal[-1, -2, -3, -4, -5, -6, -7, -8, -9, -10, -11, -12, -13, -14, -15, -16, -17, -18, -19, -20] +_LiteralInteger = _PositiveInteger | _NegativeInteger | Literal[0] # noqa: Y026 # TODO: Use TypeAlias once mypy bugs are fixed + +_MultiplicableT1 = TypeVar("_MultiplicableT1", bound=SupportsMul[Any, Any]) +_MultiplicableT2 = TypeVar("_MultiplicableT2", bound=SupportsMul[Any, Any]) + +class _SupportsProdWithNoDefaultGiven(SupportsMul[Any, Any], SupportsRMul[int, Any], Protocol): ... + +_SupportsProdNoDefaultT = TypeVar("_SupportsProdNoDefaultT", bound=_SupportsProdWithNoDefaultGiven) + +# This stub is based on the type stub for `builtins.sum`. +# Like `builtins.sum`, it cannot be precisely represented in a type stub +# without introducing many false positives. +# For more details on its limitations and false positives, see #13572. +# Instead, just like `builtins.sum`, we explicitly handle several useful cases. +@overload +def prod(iterable: Iterable[bool | _LiteralInteger], /, *, start: int = 1) -> int: ... # type: ignore[overload-overlap] @overload -def prod(iterable: Iterable[SupportsIndex], /, *, start: SupportsIndex = 1) -> int: ... # type: ignore[overload-overlap] +def prod(iterable: Iterable[_SupportsProdNoDefaultT], /) -> _SupportsProdNoDefaultT | Literal[1]: ... @overload -def prod(iterable: Iterable[_SupportsFloatOrIndex], /, *, start: _SupportsFloatOrIndex = 1) -> float: ... +def prod(iterable: Iterable[_MultiplicableT1], /, *, start: _MultiplicableT2) -> _MultiplicableT1 | _MultiplicableT2: ... def radians(x: _SupportsFloatOrIndex, /) -> float: ... def remainder(x: _SupportsFloatOrIndex, y: _SupportsFloatOrIndex, /) -> float: ... def sin(x: _SupportsFloatOrIndex, /) -> float: ... From a435b3f001b8cc21388836d6b4e0dd9b94441645 Mon Sep 17 00:00:00 2001 From: Sebastian Rittau Date: Mon, 3 Mar 2025 17:49:41 +0100 Subject: [PATCH 042/388] Update pyright to v1.1.395; disable a pow() check (#13564) pyright and mypy disagree about the exact type due to differing overloads handling. --------- Co-authored-by: Avasam --- requirements-tests.txt | 2 +- stdlib/@tests/test_cases/builtins/check_pow.py | 4 +++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/requirements-tests.txt b/requirements-tests.txt index f49905636e83..1c4ad7897ed3 100644 --- a/requirements-tests.txt +++ b/requirements-tests.txt @@ -1,7 +1,7 @@ # Type checkers that we test our stubs against. These should always # be pinned to a specific version to make failure reproducible. mypy==1.15.0 -pyright==1.1.389 +pyright==1.1.395 # pytype can be installed on Windows, but requires building wheels, let's not do that on the CI pytype==2024.10.11; platform_system != "Windows" and python_version >= "3.10" and python_version < "3.13" diff --git a/stdlib/@tests/test_cases/builtins/check_pow.py b/stdlib/@tests/test_cases/builtins/check_pow.py index 6fe7aacffa6e..448d41ad80fe 100644 --- a/stdlib/@tests/test_cases/builtins/check_pow.py +++ b/stdlib/@tests/test_cases/builtins/check_pow.py @@ -47,7 +47,9 @@ assert_type(complex(6) ** 6.2, complex) assert_type(pow(complex(9), 7.3, None), complex) -assert_type(pow(Fraction(), 4, None), Fraction) +# pyright infers Fraction | float | complex, while mypy infers Fraction. +# This is probably because of differences in @overload handling. +assert_type(pow(Fraction(), 4, None), Fraction) # pyright: ignore[reportAssertTypeFailure] assert_type(Fraction() ** 4, Fraction) assert_type(pow(Fraction(3, 7), complex(1, 8)), complex) From d4fb9db7a8d159fbb520aba71f6af577a04c66a7 Mon Sep 17 00:00:00 2001 From: Avasam Date: Mon, 3 Mar 2025 12:03:34 -0500 Subject: [PATCH 043/388] Enable Ruff PLR (Pylint Refactor) (#13307) --- pyproject.toml | 7 +++++++ scripts/create_baseline_stubs.py | 5 +++-- scripts/stubsabot.py | 2 +- stdlib/asyncio/__init__.pyi | 2 ++ stdlib/importlib/readers.pyi | 4 ++-- stubs/gevent/gevent/libev/corecext.pyi | 2 +- stubs/gevent/gevent/libev/corecffi.pyi | 2 +- stubs/gevent/gevent/libuv/loop.pyi | 2 +- 8 files changed, 18 insertions(+), 8 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 33f06b5b6635..00cb7501b847 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -46,6 +46,7 @@ select = [ "N", # pep8-naming "PGH", # pygrep-hooks "PLC", # Pylint Convention + "PLR", # Pylint Refactor "RUF", # Ruff-specific and unused-noqa "TRY", # tryceratops "UP", # pyupgrade @@ -142,6 +143,12 @@ ignore = [ # Used for direct, non-subclass type comparison, for example: `type(val) is str` # see https://github.com/astral-sh/ruff/issues/6465 "E721", # Do not compare types, use `isinstance()` + # Leave the size and complexity of tests to human interpretation + "PLR09", # Too many ... + # Too many magic number "2" that are preferable inline. https://github.com/astral-sh/ruff/issues/10009 + "PLR2004", # Magic value used in comparison, consider replacing `{value}` with a constant variable + # Keep codeflow path separation explicit + "PLR5501", # Use `elif` instead of `else` then `if`, to reduce indentation # Mostly from scripts and tests, it's ok to have messages passed directly to exceptions "TRY003", # Avoid specifying long messages outside the exception class # Slower and more verbose https://github.com/astral-sh/ruff/issues/7871 diff --git a/scripts/create_baseline_stubs.py b/scripts/create_baseline_stubs.py index 46de29b026c7..3766266adc91 100755 --- a/scripts/create_baseline_stubs.py +++ b/scripts/create_baseline_stubs.py @@ -18,6 +18,7 @@ import subprocess import sys import urllib.parse +from http import HTTPStatus from importlib.metadata import distribution import aiohttp @@ -72,7 +73,7 @@ def run_ruff(stub_dir: str) -> None: async def get_project_urls_from_pypi(project: str, session: aiohttp.ClientSession) -> dict[str, str]: pypi_root = f"https://pypi.org/pypi/{urllib.parse.quote(project)}" async with session.get(f"{pypi_root}/json") as response: - if response.status != 200: + if response.status != HTTPStatus.OK: return {} j: dict[str, dict[str, dict[str, str]]] j = await response.json() @@ -107,7 +108,7 @@ async def get_upstream_repo_url(project: str) -> str | None: # truncate to https://site.com/user/repo upstream_repo_url = "/".join(url.split("/")[:5]) async with session.get(upstream_repo_url) as response: - if response.status == 200: + if response.status == HTTPStatus.OK: return upstream_repo_url return None diff --git a/scripts/stubsabot.py b/scripts/stubsabot.py index f121bed5f5f0..3e755ca2d5d4 100755 --- a/scripts/stubsabot.py +++ b/scripts/stubsabot.py @@ -308,7 +308,7 @@ async def get_github_repo_info(session: aiohttp.ClientSession, stub_info: StubMe assert len(Path(url_path).parts) == 2 github_tags_info_url = f"https://api.github.com/repos/{url_path}/tags" async with session.get(github_tags_info_url, headers=get_github_api_headers()) as response: - if response.status == 200: + if response.status == HTTPStatus.OK: tags: list[dict[str, Any]] = await response.json() assert isinstance(tags, list) return GitHubInfo(repo_path=url_path, tags=tags) diff --git a/stdlib/asyncio/__init__.pyi b/stdlib/asyncio/__init__.pyi index 89a8143c5f7f..e47f640a1f9b 100644 --- a/stdlib/asyncio/__init__.pyi +++ b/stdlib/asyncio/__init__.pyi @@ -1,3 +1,5 @@ +# ruff: noqa: PLR5501 # This condition is so big, it's clearer to keep to platform condition in two blocks +# Can't NOQA on a specific line: https://github.com/plinss/flake8-noqa/issues/22 import sys from collections.abc import Awaitable, Coroutine, Generator from typing import Any, TypeVar diff --git a/stdlib/importlib/readers.pyi b/stdlib/importlib/readers.pyi index ceb3e731e7a5..4a6c73921535 100644 --- a/stdlib/importlib/readers.pyi +++ b/stdlib/importlib/readers.pyi @@ -16,9 +16,9 @@ if sys.version_info >= (3, 10): from zipimport import zipimporter if sys.version_info >= (3, 11): - import importlib.resources.abc as abc + from importlib.resources import abc else: - import importlib.abc as abc + from importlib import abc if sys.version_info >= (3, 10): if sys.version_info >= (3, 11): diff --git a/stubs/gevent/gevent/libev/corecext.pyi b/stubs/gevent/gevent/libev/corecext.pyi index 994793ee8a25..35a46bbf3655 100644 --- a/stubs/gevent/gevent/libev/corecext.pyi +++ b/stubs/gevent/gevent/libev/corecext.pyi @@ -5,9 +5,9 @@ from types import TracebackType from typing import Any from typing_extensions import ParamSpec -import gevent.libev.watcher as watcher from gevent._ffi.loop import _ErrorHandler from gevent._types import _Callback +from gevent.libev import watcher # this c extension is only available on posix if sys.platform != "win32": diff --git a/stubs/gevent/gevent/libev/corecffi.pyi b/stubs/gevent/gevent/libev/corecffi.pyi index 4dc59a997f30..e814fb9b90b3 100644 --- a/stubs/gevent/gevent/libev/corecffi.pyi +++ b/stubs/gevent/gevent/libev/corecffi.pyi @@ -2,8 +2,8 @@ import sys from _typeshed import FileDescriptor from collections.abc import Sequence -import gevent.libev.watcher as watcher from gevent._ffi.loop import AbstractLoop +from gevent.libev import watcher def get_version() -> str: ... def get_header_version() -> str: ... diff --git a/stubs/gevent/gevent/libuv/loop.pyi b/stubs/gevent/gevent/libuv/loop.pyi index e5d539acb3c0..950b82118375 100644 --- a/stubs/gevent/gevent/libuv/loop.pyi +++ b/stubs/gevent/gevent/libuv/loop.pyi @@ -2,9 +2,9 @@ import sys from _typeshed import FileDescriptor from typing import NamedTuple -import gevent.libuv.watcher as watcher from gevent._ffi.loop import AbstractLoop from gevent._types import _IoWatcher +from gevent.libuv import watcher def get_version() -> str: ... def get_header_version() -> str: ... From d40a7064d053f619a53e36e25239ac1682b50175 Mon Sep 17 00:00:00 2001 From: bzoracler <50305397+bzoracler@users.noreply.github.com> Date: Tue, 4 Mar 2025 12:17:40 +1300 Subject: [PATCH 044/388] Add stubs for watchpoints (#13248) --- pyrightconfig.stricter.json | 1 + .../watchpoints/@tests/stubtest_allowlist.txt | 1 + stubs/watchpoints/METADATA.toml | 2 + stubs/watchpoints/watchpoints/__init__.pyi | 10 +++ stubs/watchpoints/watchpoints/ast_monkey.pyi | 3 + stubs/watchpoints/watchpoints/util.pyi | 6 ++ stubs/watchpoints/watchpoints/watch.pyi | 68 +++++++++++++++++++ .../watchpoints/watchpoints/watch_element.pyi | 56 +++++++++++++++ stubs/watchpoints/watchpoints/watch_print.pyi | 24 +++++++ 9 files changed, 171 insertions(+) create mode 100644 stubs/watchpoints/@tests/stubtest_allowlist.txt create mode 100644 stubs/watchpoints/METADATA.toml create mode 100644 stubs/watchpoints/watchpoints/__init__.pyi create mode 100644 stubs/watchpoints/watchpoints/ast_monkey.pyi create mode 100644 stubs/watchpoints/watchpoints/util.pyi create mode 100644 stubs/watchpoints/watchpoints/watch.pyi create mode 100644 stubs/watchpoints/watchpoints/watch_element.pyi create mode 100644 stubs/watchpoints/watchpoints/watch_print.pyi diff --git a/pyrightconfig.stricter.json b/pyrightconfig.stricter.json index 9f20ff2c9c93..5b652a171efc 100644 --- a/pyrightconfig.stricter.json +++ b/pyrightconfig.stricter.json @@ -95,6 +95,7 @@ "stubs/tqdm", "stubs/ttkthemes", "stubs/vobject", + "stubs/watchpoints", "stubs/workalendar", "stubs/wurlitzer", ], diff --git a/stubs/watchpoints/@tests/stubtest_allowlist.txt b/stubs/watchpoints/@tests/stubtest_allowlist.txt new file mode 100644 index 000000000000..5755d26ce61f --- /dev/null +++ b/stubs/watchpoints/@tests/stubtest_allowlist.txt @@ -0,0 +1 @@ +watchpoints.all diff --git a/stubs/watchpoints/METADATA.toml b/stubs/watchpoints/METADATA.toml new file mode 100644 index 000000000000..0d08dbc0d9da --- /dev/null +++ b/stubs/watchpoints/METADATA.toml @@ -0,0 +1,2 @@ +version = "0.2.5" +upstream_repository = "https://github.com/gaogaotiantian/watchpoints" diff --git a/stubs/watchpoints/watchpoints/__init__.pyi b/stubs/watchpoints/watchpoints/__init__.pyi new file mode 100644 index 000000000000..53f34b3a1610 --- /dev/null +++ b/stubs/watchpoints/watchpoints/__init__.pyi @@ -0,0 +1,10 @@ +from collections.abc import Callable +from typing import Final +from typing_extensions import LiteralString, Unpack + +from .watch import Watch + +__version__: Final[LiteralString] + +watch: Watch +unwatch: Final[Callable[[Unpack[tuple[object, ...]]], None]] diff --git a/stubs/watchpoints/watchpoints/ast_monkey.pyi b/stubs/watchpoints/watchpoints/ast_monkey.pyi new file mode 100644 index 000000000000..1409982075af --- /dev/null +++ b/stubs/watchpoints/watchpoints/ast_monkey.pyi @@ -0,0 +1,3 @@ +import ast + +def ast_parse_node(node: ast.expr) -> ast.Module: ... diff --git a/stubs/watchpoints/watchpoints/util.pyi b/stubs/watchpoints/watchpoints/util.pyi new file mode 100644 index 000000000000..37d456070dbc --- /dev/null +++ b/stubs/watchpoints/watchpoints/util.pyi @@ -0,0 +1,6 @@ +import ast +from collections.abc import Iterable +from types import FrameType + +def getline(frame: FrameType) -> str: ... +def getargnodes(frame: FrameType) -> Iterable[tuple[ast.expr, str]]: ... diff --git a/stubs/watchpoints/watchpoints/watch.pyi b/stubs/watchpoints/watchpoints/watch.pyi new file mode 100644 index 000000000000..320bc4afdbac --- /dev/null +++ b/stubs/watchpoints/watchpoints/watch.pyi @@ -0,0 +1,68 @@ +import threading +from _typeshed import SupportsWrite, TraceFunction +from collections.abc import Callable +from pdb import Pdb +from types import FrameType +from typing import Any, Literal, Protocol, TypeVar +from typing_extensions import TypeAlias + +from .watch_element import WatchElement + +_T = TypeVar("_T") + +# Alias used for fields that must always be valid identifiers +# A string `x` counts as a valid identifier if both the following are True +# (1) `x.isidentifier()` evaluates to `True` +# (2) `keyword.iskeyword(x)` evaluates to `False` +_Identifier: TypeAlias = str + +class Watch: + # User-defined callbacks passed to `__call__()` or `config()` set as instance variables have arguments of type `Any` to be + # compatible with more precisely-annotated signatures. + + custom_printer: Callable[[Any], None] | None + enable: bool + file: str | SupportsWrite[str] | None + pdb: Pdb | None + pdb_enable: bool + set_lock: threading.Lock + stack_limit: int | None + tracefunc_lock: threading.Lock + tracefunc_stack: list[TraceFunction | None] + watch_list: list[WatchElement] + + def __init__(self) -> None: ... + def __call__( + self, + *args: object, + alias: str = ..., + callback: Callable[[FrameType, WatchElement, tuple[str, str, int | None]], None] = ..., + cmp: Callable[[Any, Any], bool] = ..., # User-defined comparison callback; compares 2 arguments of any type + copy: Callable[[_T], _T] = ..., + # User-defined printing callback; writes a string representation of any object to a stream + custom_printer: Callable[[Any], None] = ..., + deepcopy: bool = False, + file: str | SupportsWrite[str] = ..., + stack_limit: int | None = 5, + track: Literal["object", "variable"] = ..., + when: Callable[[Any], bool] = ..., # User-defined callback for conditional watchpoints + ) -> None: ... + def config( + self, + *, + callback: Callable[[FrameType, WatchElement, tuple[str, str, int | None]], None] = ..., + pdb: Literal[True] = ..., + file: str | SupportsWrite[str] = ..., + stack_limit: int | None = 5, + custom_printer: Callable[[Any], None] = ..., # User-defined printing callback + ) -> None: ... + def install(self, func: _Identifier = "watch") -> None: ... + def restore(self) -> None: ... + def start_trace(self, frame: FrameType) -> None: ... + def stop_trace(self, frame: FrameType) -> None: ... + def tracefunc(self, frame: FrameType, event: str, arg: object) -> _TraceFunc: ... + def uninstall(self, func: _Identifier = "watch") -> None: ... + def unwatch(self, *args: object) -> None: ... + +class _TraceFunc(Protocol): + def __call__(self, frame: FrameType, event: str, arg: object) -> _TraceFunc: ... diff --git a/stubs/watchpoints/watchpoints/watch_element.pyi b/stubs/watchpoints/watchpoints/watch_element.pyi new file mode 100644 index 000000000000..b99f7e65f1c3 --- /dev/null +++ b/stubs/watchpoints/watchpoints/watch_element.pyi @@ -0,0 +1,56 @@ +import ast +from collections.abc import Callable, Iterable +from types import FrameType +from typing import Any, Literal, TypeVar +from typing_extensions import TypeAlias + +from .watch_print import WatchPrint + +_T = TypeVar("_T") +_TrackKind: TypeAlias = Literal["object", "variable"] | list[Literal["object", "variable"]] + +class WatchElement: + # User-defined callbacks passed to `__init__` set as instance variables have arguments of type `Any` to be + # compatible with more precisely-annotated signatures. These callbacks are passed from `watchpoints.watch.Watch`. + + alias: str | None + attr: str | None + cmp: Callable[[Any, Any], bool] | None + copy: Callable[[Any], object] | None # User-defined copy callback + default_alias: str | None + deepcopy: bool + exist: bool + frame: FrameType + localvar: str | None + obj: Any + parent: Any + prev_obj: Any + prev_obj_repr: str + subscr: Any + watch_print: WatchPrint + when: Callable[[Any], bool] | None + + def __init__( + self, + frame: FrameType, + node: ast.expr, + *, + alias: str | None = ..., + callback: Callable[[FrameType, WatchElement, tuple[str, str, int | None]], None] | None = ..., + cmp: Callable[[Any, Any], bool] | None = ..., # User-defined comparison callback + copy: Callable[[_T], _T] | None = ..., + deepcopy: bool = False, + default_alias: str | None = ..., + track: _TrackKind = ..., + watch_print: WatchPrint = ..., + when: Callable[[Any], bool] | None = ..., # User-defined callback for conditional watchpoints + ) -> None: ... + def belong_to(self, lst: Iterable[object]) -> bool: ... + def changed(self, frame: FrameType) -> tuple[bool, bool]: ... + def obj_changed(self, other: object) -> bool: ... + def same(self, other: object) -> bool: ... + @property + def track(self) -> _TrackKind: ... + @track.setter + def track(self, val: _TrackKind) -> None: ... + def update(self) -> None: ... diff --git a/stubs/watchpoints/watchpoints/watch_print.pyi b/stubs/watchpoints/watchpoints/watch_print.pyi new file mode 100644 index 000000000000..715d914daf07 --- /dev/null +++ b/stubs/watchpoints/watchpoints/watch_print.pyi @@ -0,0 +1,24 @@ +from _typeshed import SupportsWrite +from collections.abc import Callable +from types import FrameType +from typing import Any + +from .watch_element import WatchElement + +class WatchPrint: + # User-defined callbacks passed to `__init__` set as instance variables have arguments of type `Any` to be + # compatible with more precisely-annotated signatures. These callbacks are passed from `watchpoints.watch.Watch`. + + custom_printer: Callable[[Any], None] | None + file: str | SupportsWrite[str] | None + stack_limit: int | None + + def __init__( + self, + file: str | SupportsWrite[str] | None = ..., + stack_limit: int | None = ..., + custom_printer: Callable[[Any], None] | None = ..., # User-defined printing callback + ) -> None: ... + def __call__(self, frame: FrameType, elem: WatchElement, exec_info: tuple[str, str, int | None]) -> None: ... + def getsourceline(self, exec_info: tuple[str, str, int | None]) -> str: ... + def printer(self, obj: object) -> None: ... From a86529bdae6ecf69de7cbec96f4e9b0cb551c155 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Tue, 4 Mar 2025 08:10:35 +0100 Subject: [PATCH 045/388] Update dependency pyright to v1.1.396 (#13576) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- requirements-tests.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-tests.txt b/requirements-tests.txt index 1c4ad7897ed3..0f7faa5dbf4a 100644 --- a/requirements-tests.txt +++ b/requirements-tests.txt @@ -1,7 +1,7 @@ # Type checkers that we test our stubs against. These should always # be pinned to a specific version to make failure reproducible. mypy==1.15.0 -pyright==1.1.395 +pyright==1.1.396 # pytype can be installed on Windows, but requires building wheels, let's not do that on the CI pytype==2024.10.11; platform_system != "Windows" and python_version >= "3.10" and python_version < "3.13" From f5379bb9d07fab146ee1c597afba4ac63497abd3 Mon Sep 17 00:00:00 2001 From: Alex Waygood Date: Tue, 4 Mar 2025 09:31:12 +0100 Subject: [PATCH 046/388] Use Stricter pyright settings in CI for watchpoints (#13577) The stubs are fully annotated. Fixes https://github.com/AlexWaygood/typeshed-stats/issues/323 --- pyrightconfig.stricter.json | 1 - 1 file changed, 1 deletion(-) diff --git a/pyrightconfig.stricter.json b/pyrightconfig.stricter.json index 5b652a171efc..9f20ff2c9c93 100644 --- a/pyrightconfig.stricter.json +++ b/pyrightconfig.stricter.json @@ -95,7 +95,6 @@ "stubs/tqdm", "stubs/ttkthemes", "stubs/vobject", - "stubs/watchpoints", "stubs/workalendar", "stubs/wurlitzer", ], From 28ac15db65c0216c372f3a0b23055cfc8f366a25 Mon Sep 17 00:00:00 2001 From: Avasam Date: Tue, 4 Mar 2025 06:11:10 -0500 Subject: [PATCH 047/388] Remove `setuptools/pkg_resources` (#13369) --- stubs/Pygments/METADATA.toml | 2 +- stubs/Pygments/pygments/plugin.pyi | 14 +- stubs/fanstatic/fanstatic/registry.pyi | 11 +- .../setuptools/@tests/stubtest_allowlist.txt | 9 - .../@tests/test_cases/check_protocols.py | 35 -- stubs/setuptools/METADATA.toml | 5 +- stubs/setuptools/pkg_resources/__init__.pyi | 543 ------------------ .../_vendored_packaging/__init__.pyi | 8 - .../_vendored_packaging/markers.pyi | 13 - .../_vendored_packaging/requirements.pyi | 14 - .../_vendored_packaging/specifiers.pyi | 66 --- .../_vendored_packaging/version.pyi | 49 -- 12 files changed, 23 insertions(+), 746 deletions(-) delete mode 100644 stubs/setuptools/pkg_resources/__init__.pyi delete mode 100644 stubs/setuptools/pkg_resources/_vendored_packaging/__init__.pyi delete mode 100644 stubs/setuptools/pkg_resources/_vendored_packaging/markers.pyi delete mode 100644 stubs/setuptools/pkg_resources/_vendored_packaging/requirements.pyi delete mode 100644 stubs/setuptools/pkg_resources/_vendored_packaging/specifiers.pyi delete mode 100644 stubs/setuptools/pkg_resources/_vendored_packaging/version.pyi diff --git a/stubs/Pygments/METADATA.toml b/stubs/Pygments/METADATA.toml index 6d0e6326d02c..f6fea02473cc 100644 --- a/stubs/Pygments/METADATA.toml +++ b/stubs/Pygments/METADATA.toml @@ -1,6 +1,6 @@ version = "2.19.*" upstream_repository = "https://github.com/pygments/pygments" -requires = ["types-docutils", "types-setuptools"] +requires = ["types-docutils"] partial_stub = true [tool.stubtest] diff --git a/stubs/Pygments/pygments/plugin.pyi b/stubs/Pygments/pygments/plugin.pyi index 308e21957495..e47d66b02c53 100644 --- a/stubs/Pygments/pygments/plugin.pyi +++ b/stubs/Pygments/pygments/plugin.pyi @@ -1,7 +1,7 @@ -from collections.abc import Generator, Iterable +import sys +from collections.abc import Generator from typing import Any -from pkg_resources import EntryPoint from pygments.filter import Filter from pygments.formatter import Formatter from pygments.lexer import Lexer @@ -12,7 +12,15 @@ FORMATTER_ENTRY_POINT: str STYLE_ENTRY_POINT: str FILTER_ENTRY_POINT: str -def iter_entry_points(group_name: str) -> Iterable[EntryPoint]: ... +if sys.version_info >= (3, 10): + from importlib.metadata import EntryPoints + def iter_entry_points(group_name: str) -> EntryPoints: ... + +else: + from importlib.metadata import EntryPoint + + def iter_entry_points(group_name: str) -> tuple[EntryPoint, ...] | list[EntryPoint]: ... + def find_plugin_lexers() -> Generator[type[Lexer], None, None]: ... def find_plugin_formatters() -> Generator[tuple[str, type[Formatter[Any]]], None, None]: ... def find_plugin_styles() -> Generator[tuple[str, type[Style]], None, None]: ... diff --git a/stubs/fanstatic/fanstatic/registry.pyi b/stubs/fanstatic/fanstatic/registry.pyi index aa32a8f58d3b..c0a78589feea 100644 --- a/stubs/fanstatic/fanstatic/registry.pyi +++ b/stubs/fanstatic/fanstatic/registry.pyi @@ -1,14 +1,19 @@ from abc import abstractmethod from collections.abc import Iterable from threading import Lock -from typing import Any, ClassVar, Literal, Protocol, TypeVar +from typing import Any, ClassVar, Literal, Protocol, TypeVar, type_check_only from typing_extensions import Self from fanstatic.compiler import Compiler, Minifier from fanstatic.core import Library from fanstatic.injector import InjectorPlugin -from pkg_resources import EntryPoint +# Used to be pkg_resources.EntryPoint, but any EntryPoint-like class with a `load` method works +@type_check_only +class _EntryPoint(Protocol): + def load(self) -> Any: ... # Can be any attribute in the module + +@type_check_only class _HasName(Protocol): @property def name(self) -> str: ... @@ -24,7 +29,7 @@ class Registry(dict[str, _NamedT]): def __init__(self, items: Iterable[_NamedT] = ()) -> None: ... def add(self, item: _NamedT) -> None: ... def load_items_from_entry_points(self) -> None: ... - def make_item_from_entry_point(self, entry_point: EntryPoint) -> Any: ... + def make_item_from_entry_point(self, entry_point: _EntryPoint) -> Any: ... @classmethod def instance(cls) -> Self: ... diff --git a/stubs/setuptools/@tests/stubtest_allowlist.txt b/stubs/setuptools/@tests/stubtest_allowlist.txt index 68bcbb61ceef..4ab351051980 100644 --- a/stubs/setuptools/@tests/stubtest_allowlist.txt +++ b/stubs/setuptools/@tests/stubtest_allowlist.txt @@ -1,11 +1,3 @@ -# Is always set in __init__ -pkg_resources.PathMetadata.egg_info -pkg_resources.EggMetadata.loader -pkg_resources.ZipProvider.loader - -# @classmethod alias not handled correctly by stubtest -pkg_resources.ZipManifests.load - # Is a functools.partial, so stubtest says "is not a function" setuptools.modified.newer_pairwise_group setuptools._distutils._modified.newer_pairwise_group @@ -109,4 +101,3 @@ setuptools.config._validate_pyproject.* setuptools.compat.* setuptools.command.build_py.build_py.existing_egg_info_dir .+?\.tests.* -.+?\._vendor.* diff --git a/stubs/setuptools/@tests/test_cases/check_protocols.py b/stubs/setuptools/@tests/test_cases/check_protocols.py index 66237a4bc2ef..20573855ac44 100644 --- a/stubs/setuptools/@tests/test_cases/check_protocols.py +++ b/stubs/setuptools/@tests/test_cases/check_protocols.py @@ -2,18 +2,6 @@ from typing import Any -from pkg_resources import ( - DefaultProvider, - EggMetadata, - EggProvider, - EmptyProvider, - FileMetadata, - IMetadataProvider, - IResourceProvider, - NullProvider, - PathMetadata, - ZipProvider, -) from setuptools.command.editable_wheel import EditableStrategy, _LinkTree, _StaticPth, _TopLevelFinder from setuptools.config.expand import EnsurePackagesDiscovered from setuptools.config.pyprojecttoml import _EnsurePackagesDiscovered @@ -21,29 +9,6 @@ # We don't care about the __init__ methods, only about if an instance respects the Protocol _: Any = object() -# Test IMetadataProvider Protocol implementers -metadata_provider: IMetadataProvider -metadata_provider = NullProvider(_) -metadata_provider = EggProvider(_) -metadata_provider = EmptyProvider() -metadata_provider = DefaultProvider(_) -metadata_provider = ZipProvider(_) -metadata_provider = FileMetadata(_) -metadata_provider = PathMetadata(_, _) -metadata_provider = EggMetadata(_) - -# Test IResourceProvider Protocol implementers -resource_provider: IResourceProvider -resource_provider = NullProvider(_) -resource_provider = EggProvider(_) -resource_provider = EmptyProvider() -resource_provider = DefaultProvider(_) -resource_provider = ZipProvider(_) -resource_provider = FileMetadata(_) -resource_provider = PathMetadata(_, _) -resource_provider = EggMetadata(_) - - # Test EditableStrategy Protocol implementers editable_strategy: EditableStrategy editable_strategy = _StaticPth(_, _, _) diff --git a/stubs/setuptools/METADATA.toml b/stubs/setuptools/METADATA.toml index 45dd6c8a88ce..b5e6c51d7221 100644 --- a/stubs/setuptools/METADATA.toml +++ b/stubs/setuptools/METADATA.toml @@ -1,9 +1,10 @@ version = "~=75.8.2" upstream_repository = "https://github.com/pypa/setuptools" extra_description = """\ -If using `setuptools >= 71.1` *only* for `pkg_resources`, -you don't need `types-setuptools` since `pkg_resources` is now typed.\ +Given that `pkg_resources` is typed since `setuptools >= 71.1`, \ +it is no longer included with `types-setuptools`. """ +requires = ["setuptools"] # For pkg_resources [tool.stubtest] # darwin is equivalent to linux for OS-specific methods diff --git a/stubs/setuptools/pkg_resources/__init__.pyi b/stubs/setuptools/pkg_resources/__init__.pyi deleted file mode 100644 index a4cd7df6396c..000000000000 --- a/stubs/setuptools/pkg_resources/__init__.pyi +++ /dev/null @@ -1,543 +0,0 @@ -# `pkg_resources` package of `types-setuptools` is now obsolete. -# Changes here should be mirrored to https://github.com/pypa/setuptools/tree/main/pkg_resources - -import types -import zipimport -from _typeshed import BytesPath, Incomplete, StrOrBytesPath, StrPath, Unused -from _typeshed.importlib import LoaderProtocol -from collections.abc import Callable, Generator, Iterable, Iterator, Sequence -from io import BytesIO -from itertools import chain -from pkgutil import get_importer as get_importer -from re import Pattern -from typing import IO, Any, ClassVar, Final, Literal, NamedTuple, NoReturn, Protocol, TypeVar, overload -from typing_extensions import Self, TypeAlias -from zipfile import ZipInfo - -from ._vendored_packaging import requirements as _packaging_requirements, version as _packaging_version - -# defined in setuptools -_T = TypeVar("_T") -_DistributionT = TypeVar("_DistributionT", bound=Distribution) -_NestedStr: TypeAlias = str | Iterable[_NestedStr] -_StrictInstallerType: TypeAlias = Callable[[Requirement], _DistributionT] -_InstallerType: TypeAlias = Callable[[Requirement], Distribution | None] -_PkgReqType: TypeAlias = str | Requirement -_EPDistType: TypeAlias = Distribution | _PkgReqType -_MetadataType: TypeAlias = IResourceProvider | None -_ResolvedEntryPoint: TypeAlias = Any # Can be any attribute in the module -_ResourceStream: TypeAlias = Incomplete # A readable file-like object -_ModuleLike: TypeAlias = object | types.ModuleType # Any object that optionally has __loader__ or __file__, usually a module -# Any: Should be _ModuleLike but we end up with issues where _ModuleLike doesn't have _ZipLoaderModule's __loader__ -_ProviderFactoryType: TypeAlias = Callable[[Any], IResourceProvider] -_DistFinderType: TypeAlias = Callable[[_T, str, bool], Iterable[Distribution]] -_NSHandlerType: TypeAlias = Callable[[_T, str, str, types.ModuleType], str | None] - -__all__ = [ - "require", - "run_script", - "get_provider", - "get_distribution", - "load_entry_point", - "get_entry_map", - "get_entry_info", - "iter_entry_points", - "resource_string", - "resource_stream", - "resource_filename", - "resource_listdir", - "resource_exists", - "resource_isdir", - "declare_namespace", - "working_set", - "add_activation_listener", - "find_distributions", - "set_extraction_path", - "cleanup_resources", - "get_default_cache", - "Environment", - "WorkingSet", - "ResourceManager", - "Distribution", - "Requirement", - "EntryPoint", - "ResolutionError", - "VersionConflict", - "DistributionNotFound", - "UnknownExtra", - "ExtractionError", - "PEP440Warning", - "parse_requirements", - "parse_version", - "safe_name", - "safe_version", - "get_platform", - "compatible_platforms", - "yield_lines", - "split_sections", - "safe_extra", - "to_filename", - "invalid_marker", - "evaluate_marker", - "ensure_directory", - "normalize_path", - "EGG_DIST", - "BINARY_DIST", - "SOURCE_DIST", - "CHECKOUT_DIST", - "DEVELOP_DIST", - "IMetadataProvider", - "IResourceProvider", - "FileMetadata", - "PathMetadata", - "EggMetadata", - "EmptyProvider", - "empty_provider", - "NullProvider", - "EggProvider", - "DefaultProvider", - "ZipProvider", - "register_finder", - "register_namespace_handler", - "register_loader_type", - "fixup_namespace_packages", - "get_importer", - "PkgResourcesDeprecationWarning", - "run_main", - "AvailableDistributions", -] - -class _ZipLoaderModule(Protocol): - __loader__: zipimport.zipimporter - -def declare_namespace(packageName: str) -> None: ... -def fixup_namespace_packages(path_item: str, parent: str | None = None) -> None: ... - -class WorkingSet: - entries: list[str] - entry_keys: dict[str | None, list[str]] - by_key: dict[str, Distribution] - normalized_to_canonical_keys: dict[str, str] - callbacks: list[Callable[[Distribution], object]] - def __init__(self, entries: Iterable[str] | None = None) -> None: ... - def add_entry(self, entry: str) -> None: ... - def __contains__(self, dist: Distribution) -> bool: ... - def find(self, req: Requirement) -> Distribution | None: ... - def iter_entry_points(self, group: str, name: str | None = None) -> Generator[EntryPoint, None, None]: ... - def run_script(self, requires: str, script_name: str) -> None: ... - def __iter__(self) -> Iterator[Distribution]: ... - def add(self, dist: Distribution, entry: str | None = None, insert: bool = True, replace: bool = False) -> None: ... - @overload - def resolve( - self, - requirements: Iterable[Requirement], - env: Environment | None, - installer: _StrictInstallerType[_DistributionT], - replace_conflicting: bool = False, - extras: tuple[str, ...] | None = None, - ) -> list[_DistributionT]: ... - @overload - def resolve( - self, - requirements: Iterable[Requirement], - env: Environment | None = None, - *, - installer: _StrictInstallerType[_DistributionT], - replace_conflicting: bool = False, - extras: tuple[str, ...] | None = None, - ) -> list[_DistributionT]: ... - @overload - def resolve( - self, - requirements: Iterable[Requirement], - env: Environment | None = None, - installer: _InstallerType | None = None, - replace_conflicting: bool = False, - extras: tuple[str, ...] | None = None, - ) -> list[Distribution]: ... - @overload - def find_plugins( - self, - plugin_env: Environment, - full_env: Environment | None, - installer: _StrictInstallerType[_DistributionT], - fallback: bool = True, - ) -> tuple[list[_DistributionT], dict[Distribution, Exception]]: ... - @overload - def find_plugins( - self, - plugin_env: Environment, - full_env: Environment | None = None, - *, - installer: _StrictInstallerType[_DistributionT], - fallback: bool = True, - ) -> tuple[list[_DistributionT], dict[Distribution, Exception]]: ... - @overload - def find_plugins( - self, - plugin_env: Environment, - full_env: Environment | None = None, - installer: _InstallerType | None = None, - fallback: bool = True, - ) -> tuple[list[Distribution], dict[Distribution, Exception]]: ... - def require(self, *requirements: _NestedStr) -> Sequence[Distribution]: ... - def subscribe(self, callback: Callable[[Distribution], Unused], existing: bool = True) -> None: ... - -class Environment: - def __init__( - self, search_path: Iterable[str] | None = None, platform: str | None = ..., python: str | None = ... - ) -> None: ... - def can_add(self, dist: Distribution) -> bool: ... - def remove(self, dist: Distribution) -> None: ... - def scan(self, search_path: Iterable[str] | None = None) -> None: ... - def __getitem__(self, project_name: str) -> list[Distribution]: ... - def add(self, dist: Distribution) -> None: ... - @overload - def best_match( - self, - req: Requirement, - working_set: WorkingSet, - installer: _StrictInstallerType[_DistributionT], - replace_conflicting: bool = False, - ) -> _DistributionT: ... - @overload - def best_match( - self, - req: Requirement, - working_set: WorkingSet, - installer: _InstallerType | None = None, - replace_conflicting: bool = False, - ) -> Distribution | None: ... - @overload - def obtain(self, requirement: Requirement, installer: _StrictInstallerType[_DistributionT]) -> _DistributionT: ... - @overload - def obtain(self, requirement: Requirement, installer: Callable[[Requirement], None] | None = None) -> None: ... - @overload - def obtain(self, requirement: Requirement, installer: _InstallerType | None = None) -> Distribution | None: ... - def __iter__(self) -> Iterator[str]: ... - def __iadd__(self, other: Distribution | Environment) -> Self: ... - def __add__(self, other: Distribution | Environment) -> Self: ... - -AvailableDistributions = Environment - -def parse_requirements(strs: _NestedStr) -> Iterator[Requirement]: ... - -class RequirementParseError(_packaging_requirements.InvalidRequirement): ... - -class Requirement(_packaging_requirements.Requirement): - unsafe_name: str - project_name: str - key: str - # packaging.requirements.Requirement uses a set for its extras. setuptools/pkg_resources uses a variable-length tuple - extras: tuple[str, ...] # type: ignore[assignment] - specs: list[tuple[str, str]] - def __init__(self, requirement_string: str) -> None: ... - def __eq__(self, other: object) -> bool: ... - def __contains__(self, item: Distribution | str | tuple[str, ...]) -> bool: ... - @staticmethod - def parse(s: str | Iterable[str]) -> Requirement: ... - -def load_entry_point(dist: _EPDistType, group: str, name: str) -> _ResolvedEntryPoint: ... -@overload -def get_entry_map(dist: _EPDistType, group: None = None) -> dict[str, dict[str, EntryPoint]]: ... -@overload -def get_entry_map(dist: _EPDistType, group: str) -> dict[str, EntryPoint]: ... -def get_entry_info(dist: _EPDistType, group: str, name: str) -> EntryPoint | None: ... - -class EntryPoint: - name: str - module_name: str - attrs: tuple[str, ...] - extras: tuple[str, ...] - dist: Distribution | None - def __init__( - self, name: str, module_name: str, attrs: Iterable[str] = (), extras: Iterable[str] = (), dist: Distribution | None = None - ) -> None: ... - @overload - def load( - self, require: Literal[True] = True, env: Environment | None = None, installer: _InstallerType | None = None - ) -> _ResolvedEntryPoint: ... - @overload - def load( - self, require: Literal[False], *args: Environment | _InstallerType | None, **kwargs: Environment | _InstallerType | None - ) -> _ResolvedEntryPoint: ... - def resolve(self) -> _ResolvedEntryPoint: ... - def require(self, env: Environment | None = None, installer: _InstallerType | None = None) -> None: ... - pattern: ClassVar[Pattern[str]] - @classmethod - def parse(cls, src: str, dist: Distribution | None = None) -> Self: ... - @classmethod - def parse_group(cls, group: str, lines: _NestedStr, dist: Distribution | None = None) -> dict[str, Self]: ... - @classmethod - def parse_map( - cls, data: str | Iterable[str] | dict[str, str | Iterable[str]], dist: Distribution | None = None - ) -> dict[str, dict[str, Self]]: ... - -def find_distributions(path_item: str, only: bool = False) -> Generator[Distribution, None, None]: ... -def find_eggs_in_zip(importer: zipimport.zipimporter, path_item: str, only: bool = False) -> Iterator[Distribution]: ... -def find_nothing(importer: object | None, path_item: str | None, only: bool | None = False) -> tuple[Distribution, ...]: ... -def find_on_path(importer: object | None, path_item: str, only: bool = False) -> Generator[Distribution, None, None]: ... -def dist_factory(path_item: StrPath, entry: str, only: bool) -> Callable[[str], Iterable[Distribution]]: ... - -class NoDists: - def __bool__(self) -> Literal[False]: ... - def __call__(self, fullpath: Unused) -> Iterator[Distribution]: ... - -@overload -def get_distribution(dist: _DistributionT) -> _DistributionT: ... -@overload -def get_distribution(dist: _PkgReqType) -> Distribution: ... - -PY_MAJOR: Final[str] -EGG_DIST: Final = 3 -BINARY_DIST: Final = 2 -SOURCE_DIST: Final = 1 -CHECKOUT_DIST: Final = 0 -DEVELOP_DIST: Final = -1 - -class ResourceManager: - extraction_path: str | None - cached_files: Incomplete - def resource_exists(self, package_or_requirement: _PkgReqType, resource_name: str) -> bool: ... - def resource_isdir(self, package_or_requirement: _PkgReqType, resource_name: str) -> bool: ... - def resource_filename(self, package_or_requirement: _PkgReqType, resource_name: str) -> str: ... - def resource_stream(self, package_or_requirement: _PkgReqType, resource_name: str) -> IO[bytes]: ... - def resource_string(self, package_or_requirement: _PkgReqType, resource_name: str) -> bytes: ... - def resource_listdir(self, package_or_requirement: _PkgReqType, resource_name: str) -> list[str]: ... - def extraction_error(self) -> NoReturn: ... - def get_cache_path(self, archive_name: str, names: Iterable[StrPath] = ()) -> str: ... - def postprocess(self, tempname: StrOrBytesPath, filename: StrOrBytesPath) -> None: ... - def set_extraction_path(self, path: str) -> None: ... - def cleanup_resources(self, force: bool = False) -> list[str]: ... - -@overload -def get_provider(moduleOrReq: str) -> IResourceProvider: ... -@overload -def get_provider(moduleOrReq: Requirement) -> Distribution: ... - -class IMetadataProvider(Protocol): - def has_metadata(self, name: str) -> bool: ... - def get_metadata(self, name: str) -> str: ... - def get_metadata_lines(self, name: str) -> Iterator[str]: ... - def metadata_isdir(self, name: str) -> bool: ... - def metadata_listdir(self, name: str) -> list[str]: ... - def run_script(self, script_name: str, namespace: dict[str, Any]) -> None: ... - -class ResolutionError(Exception): ... - -class VersionConflict(ResolutionError): - def __init__(self, dist: Distribution, req: Requirement, /, *args: object) -> None: ... - @property - def dist(self) -> Distribution: ... - @property - def req(self) -> Requirement: ... - def report(self) -> str: ... - def with_context(self, required_by: set[str]) -> Self | ContextualVersionConflict: ... - -class ContextualVersionConflict(VersionConflict): - def __init__(self, dist: Distribution, req: Requirement, required_by: set[str], /, *args: object) -> None: ... - @property - def required_by(self) -> set[str]: ... - -class DistributionNotFound(ResolutionError): - def __init__(self, req: Requirement, requirers: set[str] | None, /, *args: object) -> None: ... - @property - def req(self) -> Requirement: ... - @property - def requirers(self) -> set[str] | None: ... - @property - def requirers_str(self) -> str: ... - def report(self) -> str: ... - -class UnknownExtra(ResolutionError): ... - -class ExtractionError(Exception): - manager: ResourceManager - cache_path: str - original_error: BaseException | None - -def register_finder(importer_type: type[_T], distribution_finder: _DistFinderType[_T]) -> None: ... -def register_loader_type(loader_type: type[_ModuleLike], provider_factory: _ProviderFactoryType) -> None: ... -def resolve_egg_link(path: str) -> Iterable[Distribution]: ... -def register_namespace_handler(importer_type: type[_T], namespace_handler: _NSHandlerType[_T]) -> None: ... - -class IResourceProvider(IMetadataProvider, Protocol): - def get_resource_filename(self, manager: ResourceManager, resource_name: str) -> StrPath: ... - def get_resource_stream(self, manager: ResourceManager, resource_name: str) -> _ResourceStream: ... - def get_resource_string(self, manager: ResourceManager, resource_name: str) -> bytes: ... - def has_resource(self, resource_name: str) -> bool: ... - def resource_isdir(self, resource_name: str) -> bool: ... - def resource_listdir(self, resource_name: str) -> list[str]: ... - -def invalid_marker(text: str) -> SyntaxError | Literal[False]: ... -def evaluate_marker(text: str, extra: Incomplete | None = None) -> bool: ... - -class NullProvider: - egg_name: str | None - egg_info: str | None - loader: LoaderProtocol | None - module_path: str - - def __init__(self, module: _ModuleLike) -> None: ... - def get_resource_filename(self, manager: ResourceManager, resource_name: str) -> str: ... - def get_resource_stream(self, manager: ResourceManager, resource_name: str) -> BytesIO: ... - def get_resource_string(self, manager: ResourceManager, resource_name: str) -> bytes: ... - def has_resource(self, resource_name: str) -> bool: ... - def has_metadata(self, name: str) -> bool: ... - def get_metadata(self, name: str) -> str: ... - def get_metadata_lines(self, name: str) -> chain[str]: ... - def resource_isdir(self, resource_name: str) -> bool: ... - def metadata_isdir(self, name: str) -> bool: ... - def resource_listdir(self, resource_name: str) -> list[str]: ... - def metadata_listdir(self, name: str) -> list[str]: ... - def run_script(self, script_name: str, namespace: dict[str, Any]) -> None: ... - -# Doesn't actually extend NullProvider, solves a typing issue in pytype_test.py -class Distribution(NullProvider): - PKG_INFO: ClassVar[str] - project_name: str - py_version: str | None - platform: str | None - location: str | None - precedence: int - def __init__( - self, - location: str | None = None, - metadata: _MetadataType = None, - project_name: str | None = None, - version: str | None = None, - py_version: str | None = ..., - platform: str | None = None, - precedence: int = 3, - ) -> None: ... - @classmethod - def from_location( - cls, location: str, basename: StrPath, metadata: _MetadataType = None, *, precedence: int = 3 - ) -> Distribution: ... - @property - def hashcmp(self) -> tuple[parse_version, int, str, str | None, str, str]: ... - def __hash__(self) -> int: ... - def __lt__(self, other: Distribution) -> bool: ... - def __le__(self, other: Distribution) -> bool: ... - def __gt__(self, other: Distribution) -> bool: ... - def __ge__(self, other: Distribution) -> bool: ... - def __eq__(self, other: object) -> bool: ... - def __ne__(self, other: object) -> bool: ... - @property - def key(self) -> str: ... - @property - def parsed_version(self) -> _packaging_version.Version: ... - @property - def version(self) -> str: ... - def requires(self, extras: Iterable[str] = ()) -> list[Requirement]: ... - def activate(self, path: list[str] | None = None, replace: bool = False) -> None: ... - def egg_name(self) -> str: ... # type: ignore[override] # supertype's egg_name is a variable, not a method - def __getattr__(self, attr: str) -> Any: ... # Delegate all unrecognized public attributes to .metadata provider - @classmethod - def from_filename(cls, filename: StrPath, metadata: _MetadataType = None, *, precedence: int = 3) -> Distribution: ... - def as_requirement(self) -> Requirement: ... - def load_entry_point(self, group: str, name: str) -> _ResolvedEntryPoint: ... - @overload - def get_entry_map(self, group: None = None) -> dict[str, dict[str, EntryPoint]]: ... - @overload - def get_entry_map(self, group: str) -> dict[str, EntryPoint]: ... - def get_entry_info(self, group: str, name: str) -> EntryPoint | None: ... - def insert_on(self, path: list[str], loc: Incomplete | None = None, replace: bool = False) -> None: ... - def check_version_conflict(self) -> None: ... - def has_version(self) -> bool: ... - def clone(self, **kw: str | int | IResourceProvider | None) -> Requirement: ... - @property - def extras(self) -> list[str]: ... - -class DistInfoDistribution(Distribution): - PKG_INFO: ClassVar[Literal["METADATA"]] - EQEQ: ClassVar[Pattern[str]] - -class EggProvider(NullProvider): - egg_root: str - -class DefaultProvider(EggProvider): ... - -class PathMetadata(DefaultProvider): - egg_info: str - module_path: str - def __init__(self, path: str, egg_info: str) -> None: ... - -class ZipProvider(EggProvider): - eagers: list[str] | None - zip_pre: str - # ZipProvider's loader should always be a zipimporter - loader: zipimport.zipimporter - def __init__(self, module: _ZipLoaderModule) -> None: ... - @property - def zipinfo(self) -> dict[str, ZipInfo]: ... - -class EggMetadata(ZipProvider): - loader: zipimport.zipimporter - module_path: str - def __init__(self, importer: zipimport.zipimporter) -> None: ... - -class EmptyProvider(NullProvider): - # A special case, we don't want all Providers inheriting from NullProvider to have a potentially None module_path - module_path: str | None # type:ignore[assignment] - def __init__(self) -> None: ... - -empty_provider: EmptyProvider - -class ZipManifests(dict[str, MemoizedZipManifests.manifest_mod]): - @classmethod - def build(cls, path: str) -> dict[str, ZipInfo]: ... - load = build - -class MemoizedZipManifests(ZipManifests): - class manifest_mod(NamedTuple): - manifest: dict[str, ZipInfo] - mtime: float - - def load(self, path: str) -> dict[str, ZipInfo]: ... # type: ignore[override] - -class FileMetadata(EmptyProvider): - path: StrPath - def __init__(self, path: StrPath) -> None: ... - -class PEP440Warning(RuntimeWarning): ... - -parse_version = _packaging_version.Version - -def yield_lines(iterable: _NestedStr) -> chain[str]: ... -def split_sections(s: _NestedStr) -> Generator[tuple[str | None, list[str]], None, None]: ... -def safe_name(name: str) -> str: ... -def safe_version(version: str) -> str: ... -def safe_extra(extra: str) -> str: ... -def to_filename(name: str) -> str: ... -def get_build_platform() -> str: ... - -get_platform = get_build_platform - -def get_supported_platform() -> str: ... -def compatible_platforms(provided: str | None, required: str | None) -> bool: ... -def get_default_cache() -> str: ... -def ensure_directory(path: StrOrBytesPath) -> None: ... -@overload -def normalize_path(filename: StrPath) -> str: ... -@overload -def normalize_path(filename: BytesPath) -> bytes: ... - -class PkgResourcesDeprecationWarning(Warning): ... - -__resource_manager: ResourceManager # Doesn't exist at runtime -resource_exists = __resource_manager.resource_exists -resource_isdir = __resource_manager.resource_isdir -resource_filename = __resource_manager.resource_filename -resource_stream = __resource_manager.resource_stream -resource_string = __resource_manager.resource_string -resource_listdir = __resource_manager.resource_listdir -set_extraction_path = __resource_manager.set_extraction_path -cleanup_resources = __resource_manager.cleanup_resources - -working_set: WorkingSet -require = working_set.require -iter_entry_points = working_set.iter_entry_points -add_activation_listener = working_set.subscribe -run_script = working_set.run_script -run_main = run_script diff --git a/stubs/setuptools/pkg_resources/_vendored_packaging/__init__.pyi b/stubs/setuptools/pkg_resources/_vendored_packaging/__init__.pyi deleted file mode 100644 index 1981f2d9b273..000000000000 --- a/stubs/setuptools/pkg_resources/_vendored_packaging/__init__.pyi +++ /dev/null @@ -1,8 +0,0 @@ -__title__: str -__summary__: str -__uri__: str -__version__: str -__author__: str -__email__: str -__license__: str -__copyright__: str diff --git a/stubs/setuptools/pkg_resources/_vendored_packaging/markers.pyi b/stubs/setuptools/pkg_resources/_vendored_packaging/markers.pyi deleted file mode 100644 index 0a7acbbf3a75..000000000000 --- a/stubs/setuptools/pkg_resources/_vendored_packaging/markers.pyi +++ /dev/null @@ -1,13 +0,0 @@ -__all__ = ["InvalidMarker", "UndefinedComparison", "UndefinedEnvironmentName", "Marker", "default_environment"] - -class InvalidMarker(ValueError): ... -class UndefinedComparison(ValueError): ... -class UndefinedEnvironmentName(ValueError): ... - -def default_environment() -> dict[str, str]: ... - -class Marker: - def __init__(self, marker: str) -> None: ... - def __hash__(self) -> int: ... - def __eq__(self, other: object) -> bool: ... - def evaluate(self, environment: dict[str, str] | None = None) -> bool: ... diff --git a/stubs/setuptools/pkg_resources/_vendored_packaging/requirements.pyi b/stubs/setuptools/pkg_resources/_vendored_packaging/requirements.pyi deleted file mode 100644 index 4f909fa75a23..000000000000 --- a/stubs/setuptools/pkg_resources/_vendored_packaging/requirements.pyi +++ /dev/null @@ -1,14 +0,0 @@ -from .markers import Marker -from .specifiers import SpecifierSet - -class InvalidRequirement(ValueError): ... - -class Requirement: - name: str - url: str | None - extras: set[str] - specifier: SpecifierSet - marker: Marker | None - def __init__(self, requirement_str: str) -> None: ... - def __hash__(self) -> int: ... - def __eq__(self, other: object) -> bool: ... diff --git a/stubs/setuptools/pkg_resources/_vendored_packaging/specifiers.pyi b/stubs/setuptools/pkg_resources/_vendored_packaging/specifiers.pyi deleted file mode 100644 index ec86ea306e74..000000000000 --- a/stubs/setuptools/pkg_resources/_vendored_packaging/specifiers.pyi +++ /dev/null @@ -1,66 +0,0 @@ -import abc -from collections.abc import Iterable, Iterator -from typing import TypeVar -from typing_extensions import TypeAlias - -from .version import Version - -# These exist at runtime, hence the public names -UnparsedVersion: TypeAlias = Version | str -UnparsedVersionVar = TypeVar("UnparsedVersionVar", bound=UnparsedVersion) # noqa: Y001 - -class InvalidSpecifier(ValueError): ... - -class BaseSpecifier(metaclass=abc.ABCMeta): - @abc.abstractmethod - def __str__(self) -> str: ... - @abc.abstractmethod - def __hash__(self) -> int: ... - @abc.abstractmethod - def __eq__(self, other: object) -> bool: ... - @property - @abc.abstractmethod - def prereleases(self) -> bool | None: ... - @prereleases.setter - def prereleases(self, value: bool) -> None: ... - @abc.abstractmethod - def contains(self, item: str, prereleases: bool | None = None) -> bool: ... - @abc.abstractmethod - def filter(self, iterable: Iterable[UnparsedVersionVar], prereleases: bool | None = None) -> Iterator[UnparsedVersionVar]: ... - -class Specifier(BaseSpecifier): - def __init__(self, spec: str = "", prereleases: bool | None = None) -> None: ... - @property # type: ignore[override] - def prereleases(self) -> bool: ... - @prereleases.setter - def prereleases(self, value: bool) -> None: ... - @property - def operator(self) -> str: ... - @property - def version(self) -> str: ... - def __str__(self) -> str: ... # noqa: Y029 # needed as it's abstract on the superclass - def __hash__(self) -> int: ... - def __eq__(self, other: object) -> bool: ... - def __contains__(self, item: Version | str) -> bool: ... - def contains(self, item: UnparsedVersion, prereleases: bool | None = None) -> bool: ... - def filter(self, iterable: Iterable[UnparsedVersionVar], prereleases: bool | None = None) -> Iterator[UnparsedVersionVar]: ... - -class SpecifierSet(BaseSpecifier): - def __init__(self, spec: str = "", prereleases: bool | None = None) -> None: ... - @property - def prereleases(self) -> bool | None: ... - @prereleases.setter - def prereleases(self, value: bool) -> None: ... - @property - def operator(self) -> str: ... - @property - def version(self) -> str: ... - def __str__(self) -> str: ... # noqa: Y029 # needed as it's abstract on the superclass - def __hash__(self) -> int: ... - def __and__(self, other: SpecifierSet | str) -> SpecifierSet: ... - def __len__(self) -> int: ... - def __iter__(self) -> Iterator[Specifier]: ... - def __eq__(self, other: object) -> bool: ... - def __contains__(self, item: UnparsedVersion) -> bool: ... - def contains(self, item: UnparsedVersion, prereleases: bool | None = None, installed: bool | None = None) -> bool: ... - def filter(self, iterable: Iterable[UnparsedVersionVar], prereleases: bool | None = None) -> Iterator[UnparsedVersionVar]: ... diff --git a/stubs/setuptools/pkg_resources/_vendored_packaging/version.pyi b/stubs/setuptools/pkg_resources/_vendored_packaging/version.pyi deleted file mode 100644 index 05e45eea17c7..000000000000 --- a/stubs/setuptools/pkg_resources/_vendored_packaging/version.pyi +++ /dev/null @@ -1,49 +0,0 @@ -from typing import Final - -__all__ = ["VERSION_PATTERN", "parse", "Version", "InvalidVersion"] - -def parse(version: str) -> Version: ... - -class InvalidVersion(ValueError): ... - -VERSION_PATTERN: Final[str] - -class _BaseVersion: - def __hash__(self) -> int: ... - def __lt__(self, other: _BaseVersion) -> bool: ... - def __le__(self, other: _BaseVersion) -> bool: ... - def __ge__(self, other: _BaseVersion) -> bool: ... - def __gt__(self, other: _BaseVersion) -> bool: ... - def __eq__(self, other: object) -> bool: ... - def __ne__(self, other: object) -> bool: ... - -class Version(_BaseVersion): - def __init__(self, version: str) -> None: ... - @property - def epoch(self) -> int: ... - @property - def release(self) -> tuple[int, ...]: ... - @property - def pre(self) -> tuple[str, int] | None: ... - @property - def post(self) -> int | None: ... - @property - def dev(self) -> int | None: ... - @property - def local(self) -> str | None: ... - @property - def public(self) -> str: ... - @property - def base_version(self) -> str: ... - @property - def is_prerelease(self) -> bool: ... - @property - def is_postrelease(self) -> bool: ... - @property - def is_devrelease(self) -> bool: ... - @property - def major(self) -> int: ... - @property - def minor(self) -> int: ... - @property - def micro(self) -> int: ... From 670c3b10a9393ff23d347bcd19287b456653ad41 Mon Sep 17 00:00:00 2001 From: Avasam Date: Tue, 4 Mar 2025 06:11:32 -0500 Subject: [PATCH 048/388] Enable Ruff PLE (Pylint Error) (#13305) --- pyproject.toml | 1 + stubs/gevent/gevent/hub.pyi | 2 +- stubs/regex/regex/regex.pyi | 6 +++--- 3 files changed, 5 insertions(+), 4 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 00cb7501b847..2d6e8f2ea013 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -46,6 +46,7 @@ select = [ "N", # pep8-naming "PGH", # pygrep-hooks "PLC", # Pylint Convention + "PLE", # Pylint Error "PLR", # Pylint Refactor "RUF", # Ruff-specific and unused-noqa "TRY", # tryceratops diff --git a/stubs/gevent/gevent/hub.pyi b/stubs/gevent/gevent/hub.pyi index b949c9817ae0..44a8282ae316 100644 --- a/stubs/gevent/gevent/hub.pyi +++ b/stubs/gevent/gevent/hub.pyi @@ -29,7 +29,7 @@ class _DefaultReturnProperty(Protocol[_T]): @overload def __get__(self, obj: object, owner: type[object] | None = None) -> _T: ... def __set__(self, obj: object, value: _T | None) -> None: ... - def __del__(self, obj: object) -> None: ... + def __del__(self) -> None: ... def spawn_raw(function: Callable[..., object], *args: object, **kwargs: object) -> greenlet.greenlet: ... def sleep(seconds: float = 0, ref: bool = True) -> None: ... diff --git a/stubs/regex/regex/regex.pyi b/stubs/regex/regex/regex.pyi index 8b35a370ad8c..07b11b145dbb 100644 --- a/stubs/regex/regex/regex.pyi +++ b/stubs/regex/regex/regex.pyi @@ -1,5 +1,5 @@ import sys -from _typeshed import ReadableBuffer +from _typeshed import ReadableBuffer, Unused from collections.abc import Callable, Mapping from typing import Any, AnyStr, Generic, Literal, TypeVar, final, overload from typing_extensions import Self @@ -568,7 +568,7 @@ class Pattern(Generic[AnyStr]): timeout: float | None = None, ) -> _regex.Scanner[bytes]: ... def __copy__(self) -> Self: ... - def __deepcopy__(self) -> Self: ... + def __deepcopy__(self, memo: Unused, /) -> Self: ... if sys.version_info >= (3, 9): def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... @@ -647,6 +647,6 @@ class Match(Generic[AnyStr]): @overload def __getitem__(self, key: int | str, /) -> AnyStr | Any: ... def __copy__(self) -> Self: ... - def __deepcopy__(self) -> Self: ... + def __deepcopy__(self, memo: Unused, /) -> Self: ... if sys.version_info >= (3, 9): def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... From 8fbce07e33f471505d16c04258a19a6575840427 Mon Sep 17 00:00:00 2001 From: "Philipp A." Date: Tue, 4 Mar 2025 14:52:07 +0100 Subject: [PATCH 049/388] Fix singledispatch register signature (#13578) --- stdlib/functools.pyi | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/stdlib/functools.pyi b/stdlib/functools.pyi index 10563e654b37..f786167e322d 100644 --- a/stdlib/functools.pyi +++ b/stdlib/functools.pyi @@ -151,20 +151,25 @@ class partialmethod(Generic[_T]): if sys.version_info >= (3, 9): def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... +if sys.version_info >= (3, 11): + _RegType: TypeAlias = type[Any] | types.UnionType +else: + _RegType: TypeAlias = type[Any] + class _SingleDispatchCallable(Generic[_T]): registry: types.MappingProxyType[Any, Callable[..., _T]] def dispatch(self, cls: Any) -> Callable[..., _T]: ... # @fun.register(complex) # def _(arg, verbose=False): ... @overload - def register(self, cls: type[Any], func: None = None) -> Callable[[Callable[..., _T]], Callable[..., _T]]: ... + def register(self, cls: _RegType, func: None = None) -> Callable[[Callable[..., _T]], Callable[..., _T]]: ... # @fun.register # def _(arg: int, verbose=False): @overload def register(self, cls: Callable[..., _T], func: None = None) -> Callable[..., _T]: ... # fun.register(int, lambda x: x) @overload - def register(self, cls: type[Any], func: Callable[..., _T]) -> Callable[..., _T]: ... + def register(self, cls: _RegType, func: Callable[..., _T]) -> Callable[..., _T]: ... def _clear_cache(self) -> None: ... def __call__(self, /, *args: Any, **kwargs: Any) -> _T: ... @@ -177,11 +182,11 @@ class singledispatchmethod(Generic[_T]): @property def __isabstractmethod__(self) -> bool: ... @overload - def register(self, cls: type[Any], method: None = None) -> Callable[[Callable[..., _T]], Callable[..., _T]]: ... + def register(self, cls: _RegType, method: None = None) -> Callable[[Callable[..., _T]], Callable[..., _T]]: ... @overload def register(self, cls: Callable[..., _T], method: None = None) -> Callable[..., _T]: ... @overload - def register(self, cls: type[Any], method: Callable[..., _T]) -> Callable[..., _T]: ... + def register(self, cls: _RegType, method: Callable[..., _T]) -> Callable[..., _T]: ... def __get__(self, obj: _S, cls: type[_S] | None = None) -> Callable[..., _T]: ... class cached_property(Generic[_T_co]): From 8f8496f0558e236d48bb7a8aea009161c7252d20 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Wed, 5 Mar 2025 07:42:04 +0100 Subject: [PATCH 050/388] [stubsabot] Bump zstd to 1.5.6.6 (#13584) Release: https://pypi.org/pypi/zstd/1.5.6.6 Homepage: https://github.com/sergey-dryabzhinsky/python-zstd Repository: https://github.com/sergey-dryabzhinsky/python-zstd Typeshed stubs: https://github.com/python/typeshed/tree/main/stubs/zstd Diff: https://github.com/sergey-dryabzhinsky/python-zstd/compare/v1.5.6.5...v1.5.6.6 Stubsabot analysis of the diff between the two releases: - Total lines of Python code added: 12. - Total lines of Python code deleted: 11. If stubtest fails for this PR: - Leave this PR open (as a reminder, and to prevent stubsabot from opening another PR) - Fix stubtest failures in another PR, then close this PR Note that you will need to close and re-open the PR in order to trigger CI Co-authored-by: stubsabot <> --- stubs/zstd/METADATA.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stubs/zstd/METADATA.toml b/stubs/zstd/METADATA.toml index d98e417ca6ea..1797040d3e9d 100644 --- a/stubs/zstd/METADATA.toml +++ b/stubs/zstd/METADATA.toml @@ -1,2 +1,2 @@ -version = "1.5.6.5" +version = "1.5.6.6" upstream_repository = "https://github.com/sergey-dryabzhinsky/python-zstd" From aa158a8c9506be9c0049641f80a0fb20a6b1f3e8 Mon Sep 17 00:00:00 2001 From: Avasam Date: Wed, 5 Mar 2025 02:54:48 -0500 Subject: [PATCH 051/388] `shutil.which` cannot return `PathLike`, and fails with `cmd: PathLike` on Windows Python < 3.12 (#13580) --- stdlib/shutil.pyi | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/stdlib/shutil.pyi b/stdlib/shutil.pyi index 4a19a96a306c..0fe560fd9b6a 100644 --- a/stdlib/shutil.pyi +++ b/stdlib/shutil.pyi @@ -3,7 +3,7 @@ import sys from _typeshed import BytesPath, ExcInfo, FileDescriptorOrPath, StrOrBytesPath, StrPath, SupportsRead, SupportsWrite from collections.abc import Callable, Iterable, Sequence from tarfile import _TarfileFilter -from typing import Any, AnyStr, NamedTuple, Protocol, TypeVar, overload +from typing import Any, AnyStr, NamedTuple, NoReturn, Protocol, TypeVar, overload from typing_extensions import TypeAlias, deprecated __all__ = [ @@ -36,7 +36,6 @@ __all__ = [ ] _StrOrBytesPathT = TypeVar("_StrOrBytesPathT", bound=StrOrBytesPath) -_StrPathT = TypeVar("_StrPathT", bound=StrPath) # Return value of some functions that may either return a path-like object that was passed in or # a string _PathReturn: TypeAlias = Any @@ -185,8 +184,13 @@ else: @overload def chown(path: FileDescriptorOrPath, user: str | int, group: str | int) -> None: ... +if sys.platform == "win32" and sys.version_info < (3, 12): + @overload + @deprecated("On Windows before Python 3.12, using a PathLike as `cmd` would always fail or return `None`.") + def which(cmd: os.PathLike[str], mode: int = 1, path: StrPath | None = None) -> NoReturn: ... + @overload -def which(cmd: _StrPathT, mode: int = 1, path: StrPath | None = None) -> str | _StrPathT | None: ... +def which(cmd: StrPath, mode: int = 1, path: StrPath | None = None) -> str | None: ... @overload def which(cmd: bytes, mode: int = 1, path: StrPath | None = None) -> bytes | None: ... def make_archive( From 98f8564e7b1b8d0b695412caf61d6c46a1c95187 Mon Sep 17 00:00:00 2001 From: sobolevn Date: Wed, 5 Mar 2025 10:57:35 +0300 Subject: [PATCH 052/388] Bump mock to 5.2.* (#13581) --- stubs/mock/METADATA.toml | 2 +- stubs/mock/mock/backports.pyi | 7 ++++++- stubs/mock/mock/mock.pyi | 2 +- 3 files changed, 8 insertions(+), 3 deletions(-) diff --git a/stubs/mock/METADATA.toml b/stubs/mock/METADATA.toml index e6a45ed4581f..84a697269e35 100644 --- a/stubs/mock/METADATA.toml +++ b/stubs/mock/METADATA.toml @@ -1,2 +1,2 @@ -version = "5.1.*" +version = "5.2.*" upstream_repository = "https://github.com/testing-cabal/mock" diff --git a/stubs/mock/mock/backports.pyi b/stubs/mock/mock/backports.pyi index b612302a29fe..fd968db5d6cb 100644 --- a/stubs/mock/mock/backports.pyi +++ b/stubs/mock/mock/backports.pyi @@ -1,2 +1,7 @@ -from asyncio import iscoroutinefunction as iscoroutinefunction +import sys from unittest import IsolatedAsyncioTestCase as IsolatedAsyncioTestCase + +if sys.version_info >= (3, 10): + from inspect import iscoroutinefunction as iscoroutinefunction +else: + from asyncio import iscoroutinefunction as iscoroutinefunction diff --git a/stubs/mock/mock/mock.pyi b/stubs/mock/mock/mock.pyi index b0098457f192..132f1ffde4ee 100644 --- a/stubs/mock/mock/mock.pyi +++ b/stubs/mock/mock/mock.pyi @@ -117,7 +117,7 @@ class NonCallableMock(Base, Any): **kwargs: Any, ) -> None: ... def __getattr__(self, name: str) -> Any: ... - def _calls_repr(self, prefix: str = "Calls") -> str: ... + def _calls_repr(self) -> str: ... def assert_called_with(_mock_self, *args: Any, **kwargs: Any) -> None: ... def assert_not_called(_mock_self) -> None: ... def assert_called_once_with(_mock_self, *args: Any, **kwargs: Any) -> None: ... From 7553603fbd8cd250377fb3461084ec872d2db7d9 Mon Sep 17 00:00:00 2001 From: sobolevn Date: Wed, 5 Mar 2025 14:00:36 +0300 Subject: [PATCH 053/388] Bump pynput to 1.8.* (#13583) --- stubs/pynput/METADATA.toml | 2 +- stubs/pynput/pynput/keyboard/_base.pyi | 3 +++ 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/stubs/pynput/METADATA.toml b/stubs/pynput/METADATA.toml index dc7e2e94f366..4955690e2e4a 100644 --- a/stubs/pynput/METADATA.toml +++ b/stubs/pynput/METADATA.toml @@ -1,4 +1,4 @@ -version = "1.7.8" +version = "1.8.*" upstream_repository = "https://github.com/moses-palmer/pynput" [tool.stubtest] diff --git a/stubs/pynput/pynput/keyboard/_base.pyi b/stubs/pynput/pynput/keyboard/_base.pyi index 8574e597ed4d..59922d1900e6 100644 --- a/stubs/pynput/pynput/keyboard/_base.pyi +++ b/stubs/pynput/pynput/keyboard/_base.pyi @@ -79,11 +79,14 @@ class Key(enum.Enum): tab = cast(KeyCode, ...) up = cast(KeyCode, ...) media_play_pause = cast(KeyCode, ...) + media_stop = cast(KeyCode, ...) media_volume_mute = cast(KeyCode, ...) media_volume_down = cast(KeyCode, ...) media_volume_up = cast(KeyCode, ...) media_previous = cast(KeyCode, ...) media_next = cast(KeyCode, ...) + if sys.platform == "darwin": + media_eject = cast(KeyCode, ...) insert = cast(KeyCode, ...) menu = cast(KeyCode, ...) num_lock = cast(KeyCode, ...) From fd466864bc6b79dc7bddfb83809bd6d80a419300 Mon Sep 17 00:00:00 2001 From: sobolevn Date: Wed, 5 Mar 2025 16:24:56 +0300 Subject: [PATCH 054/388] Update tools versions in `stubtest` workflow (#13582) --- .github/workflows/daily.yml | 12 +++--------- .github/workflows/stubtest_stdlib.yml | 4 +--- .github/workflows/stubtest_third_party.yml | 8 ++------ stubs/atheris/METADATA.toml | 4 ++++ stubs/gdb/METADATA.toml | 3 +++ stubs/openpyxl/@tests/stubtest_allowlist.txt | 4 ---- stubs/requests/@tests/stubtest_allowlist.txt | 3 --- 7 files changed, 13 insertions(+), 25 deletions(-) diff --git a/.github/workflows/daily.yml b/.github/workflows/daily.yml index c015c7c6bbb3..7dac40f12878 100644 --- a/.github/workflows/daily.yml +++ b/.github/workflows/daily.yml @@ -34,9 +34,7 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: - # As of 2024-10-18, ubuntu-latest can refer to different Ubuntu versions, - # which can can cause problems with os module constants. - os: ["ubuntu-24.04", "windows-latest", "macos-latest"] + os: ["ubuntu-latest", "windows-latest", "macos-latest"] python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"] fail-fast: false @@ -61,18 +59,14 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: - # As of 2024-10-18, ubuntu-latest can refer to different Ubuntu versions, - # which causes problems when testing gdb. - os: ["ubuntu-24.04", "windows-latest", "macos-latest"] + os: ["ubuntu-latest", "windows-latest", "macos-latest"] shard-index: [0, 1, 2, 3] fail-fast: false steps: - uses: actions/checkout@v4 - uses: actions/setup-python@v5 with: - # TODO: Use Python 3.12. As of 2024-03-08, several third-party - # packages fail to install with Python 3.12. - python-version: "3.11" + python-version: "3.12" cache: pip cache-dependency-path: | requirements-tests.txt diff --git a/.github/workflows/stubtest_stdlib.yml b/.github/workflows/stubtest_stdlib.yml index ca2081809150..a13d747c2504 100644 --- a/.github/workflows/stubtest_stdlib.yml +++ b/.github/workflows/stubtest_stdlib.yml @@ -30,9 +30,7 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: - # As of 2024-10-18, ubuntu-latest can refer to different Ubuntu versions, - # which can can cause problems with os module constants. - os: ["ubuntu-24.04", "windows-latest", "macos-latest"] + os: ["ubuntu-latest", "windows-latest", "macos-latest"] python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"] fail-fast: false diff --git a/.github/workflows/stubtest_third_party.yml b/.github/workflows/stubtest_third_party.yml index 91eb3c51b1cb..4b4b1dd0e47b 100644 --- a/.github/workflows/stubtest_third_party.yml +++ b/.github/workflows/stubtest_third_party.yml @@ -32,9 +32,7 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: - # As of 2024-10-18, ubuntu-latest can refer to different Ubuntu versions, - # which causes problems when testing gdb. - os: ["ubuntu-24.04", "windows-latest", "macos-latest"] + os: ["ubuntu-latest", "windows-latest", "macos-latest"] fail-fast: false steps: @@ -43,9 +41,7 @@ jobs: fetch-depth: 0 - uses: actions/setup-python@v5 with: - # TODO: Use Python 3.12. As of 2024-03-08, several third-party - # packages fail to install with Python 3.12. - python-version: "3.11" + python-version: "3.12" cache: pip cache-dependency-path: | requirements-tests.txt diff --git a/stubs/atheris/METADATA.toml b/stubs/atheris/METADATA.toml index 5a014d0c8f63..2fe88957c58a 100644 --- a/stubs/atheris/METADATA.toml +++ b/stubs/atheris/METADATA.toml @@ -4,3 +4,7 @@ partial_stub = true [tool.stubtest] ignore_missing_stub = true +# TODO (2025-03-05): unskip once `atheris` can be installed on `ubuntu-24.04`, +# see https://github.com/python/typeshed/pull/13582 and +# https://github.com/google/atheris/issues/82 +skip = true diff --git a/stubs/gdb/METADATA.toml b/stubs/gdb/METADATA.toml index 475788246998..00a3b24159b8 100644 --- a/stubs/gdb/METADATA.toml +++ b/stubs/gdb/METADATA.toml @@ -13,3 +13,6 @@ extra_description = """\ [tool.stubtest] platforms = ["linux"] apt_dependencies = ["gdb"] +# TODO (2025-03-05): unskip once `gdb` can be installed on `ubuntu-24.04`, +# see https://github.com/python/typeshed/pull/13582 +skip = true diff --git a/stubs/openpyxl/@tests/stubtest_allowlist.txt b/stubs/openpyxl/@tests/stubtest_allowlist.txt index 1a9bcaf7b121..d548945f4d0c 100644 --- a/stubs/openpyxl/@tests/stubtest_allowlist.txt +++ b/stubs/openpyxl/@tests/stubtest_allowlist.txt @@ -195,7 +195,3 @@ openpyxl.worksheet.smart_tag.CellSmartTagPr.__init__ openpyxl.worksheet.smart_tag.CellSmartTags.__init__ openpyxl.worksheet.table.TableColumn.__init__ openpyxl.worksheet.table.XMLColumnProps.__init__ - -# Inherited from the stdlib, where we lie about the presence -# of this method in order to mark it as deprecated. -openpyxl.xml.functions.Element.__bool__ diff --git a/stubs/requests/@tests/stubtest_allowlist.txt b/stubs/requests/@tests/stubtest_allowlist.txt index 667b6b8ae327..3bfd86463261 100644 --- a/stubs/requests/@tests/stubtest_allowlist.txt +++ b/stubs/requests/@tests/stubtest_allowlist.txt @@ -2,6 +2,3 @@ requests.packages.mod requests.packages.package requests.packages.target - -# Alias for builtins.bytes -requests.compat.bytes.__buffer__ From b9ef514b90f332e1ef9f7c0854b2b6959f3f3fd7 Mon Sep 17 00:00:00 2001 From: Kanishk Pachauri Date: Thu, 6 Mar 2025 01:24:47 +0530 Subject: [PATCH 055/388] Fix: OAuth1 type hints to include realm parameter (#13579) --- stubs/requests-oauthlib/requests_oauthlib/oauth1_auth.pyi | 3 +++ 1 file changed, 3 insertions(+) diff --git a/stubs/requests-oauthlib/requests_oauthlib/oauth1_auth.pyi b/stubs/requests-oauthlib/requests_oauthlib/oauth1_auth.pyi index d5fcd8ace105..494dd12a4193 100644 --- a/stubs/requests-oauthlib/requests_oauthlib/oauth1_auth.pyi +++ b/stubs/requests-oauthlib/requests_oauthlib/oauth1_auth.pyi @@ -1,5 +1,6 @@ from _typeshed import Incomplete from logging import Logger +from typing import Any from oauthlib.oauth1 import Client from requests.auth import AuthBase @@ -27,7 +28,9 @@ class OAuth1(AuthBase): client_class: type[Client] | None = None, force_include_body: bool = False, *, + realm: Incomplete | None = None, encoding: str = "utf-8", nonce: Incomplete | None = None, timestamp: Incomplete | None = None, + **kwargs: Any, # passed to client_class's __init__ ) -> None: ... From 8f12268b10a8196c819002f250ac1aaeab4503f4 Mon Sep 17 00:00:00 2001 From: Sebastian Rittau Date: Thu, 6 Mar 2025 10:39:56 +0100 Subject: [PATCH 056/388] Fix tarfile.open overloads (#13441) * Allow `compresslevel` argument for modes `w|gz` and `w|bz2`. * Remove `preset` argument from modes where it's not allowed. Closes: #13440 --- stdlib/tarfile.pyi | 22 +++++++++++++++++++--- 1 file changed, 19 insertions(+), 3 deletions(-) diff --git a/stdlib/tarfile.pyi b/stdlib/tarfile.pyi index 009aa9070aa8..2584eb93c9fe 100644 --- a/stdlib/tarfile.pyi +++ b/stdlib/tarfile.pyi @@ -242,13 +242,12 @@ def open( pax_headers: Mapping[str, str] | None = ..., debug: int | None = ..., errorlevel: int | None = ..., - preset: int | None = ..., ) -> TarFile: ... @overload def open( name: StrOrBytesPath | WriteableBuffer | None = None, *, - mode: Literal["w|", "w|gz", "w|bz2", "w|xz"], + mode: Literal["w|", "w|xz"], fileobj: IO[bytes] | None = None, bufsize: int = 10240, format: int | None = ..., @@ -260,7 +259,24 @@ def open( pax_headers: Mapping[str, str] | None = ..., debug: int | None = ..., errorlevel: int | None = ..., - preset: int | None = ..., +) -> TarFile: ... +@overload +def open( + name: StrOrBytesPath | WriteableBuffer | None = None, + *, + mode: Literal["w|gz", "w|bz2"], + fileobj: IO[bytes] | None = None, + bufsize: int = 10240, + format: int | None = ..., + tarinfo: type[TarInfo] | None = ..., + dereference: bool | None = ..., + ignore_zeros: bool | None = ..., + encoding: str | None = ..., + errors: str = ..., + pax_headers: Mapping[str, str] | None = ..., + debug: int | None = ..., + errorlevel: int | None = ..., + compresslevel: int = 9, ) -> TarFile: ... class ExFileObject(io.BufferedReader): From f0aae2166a1e493a904fccfd6116985e37f4b258 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Thu, 6 Mar 2025 10:46:48 +0100 Subject: [PATCH 057/388] [stubsabot] Bump fanstatic to 1.5.* (#13586) --- stubs/fanstatic/METADATA.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stubs/fanstatic/METADATA.toml b/stubs/fanstatic/METADATA.toml index f0e10ed21df4..563c927555fc 100644 --- a/stubs/fanstatic/METADATA.toml +++ b/stubs/fanstatic/METADATA.toml @@ -1,3 +1,3 @@ -version = "1.4.*" +version = "1.5.*" upstream_repository = "https://github.com/zopefoundation/fanstatic" requires = ["types-setuptools", "types-WebOb"] From 91dd6abac3e3a684c07802d469a568aaff5ca5d0 Mon Sep 17 00:00:00 2001 From: Sebastian Rittau Date: Thu, 6 Mar 2025 13:54:35 +0100 Subject: [PATCH 058/388] Rework tarfile.open/TarFile.open (#13177) * Copy overloads of `open()` to `TarFile.open()`. * Replace remaining instances of `IO` with `_Fileobj`. * Replace `open()` with alias to `TarFile.open()` to match implementation. --- stdlib/tarfile.pyi | 359 +++++++++++++++++++++++---------------------- 1 file changed, 181 insertions(+), 178 deletions(-) diff --git a/stdlib/tarfile.pyi b/stdlib/tarfile.pyi index 2584eb93c9fe..6a00e070aee9 100644 --- a/stdlib/tarfile.pyi +++ b/stdlib/tarfile.pyi @@ -103,182 +103,6 @@ PAX_NAME_FIELDS: set[str] ENCODING: str -@overload -def open( - name: StrOrBytesPath | None = None, - mode: Literal["r", "r:*", "r:", "r:gz", "r:bz2", "r:xz"] = "r", - fileobj: IO[bytes] | None = None, - bufsize: int = 10240, - *, - format: int | None = ..., - tarinfo: type[TarInfo] | None = ..., - dereference: bool | None = ..., - ignore_zeros: bool | None = ..., - encoding: str | None = ..., - errors: str = ..., - pax_headers: Mapping[str, str] | None = ..., - debug: int | None = ..., - errorlevel: int | None = ..., -) -> TarFile: ... -@overload -def open( - name: StrOrBytesPath | None, - mode: Literal["x", "x:", "a", "a:", "w", "w:", "w:tar"], - fileobj: _Fileobj | None = None, - bufsize: int = 10240, - *, - format: int | None = ..., - tarinfo: type[TarInfo] | None = ..., - dereference: bool | None = ..., - ignore_zeros: bool | None = ..., - encoding: str | None = ..., - errors: str = ..., - pax_headers: Mapping[str, str] | None = ..., - debug: int | None = ..., - errorlevel: int | None = ..., -) -> TarFile: ... -@overload -def open( - name: StrOrBytesPath | None = None, - *, - mode: Literal["x", "x:", "a", "a:", "w", "w:", "w:tar"], - fileobj: _Fileobj | None = None, - bufsize: int = 10240, - format: int | None = ..., - tarinfo: type[TarInfo] | None = ..., - dereference: bool | None = ..., - ignore_zeros: bool | None = ..., - encoding: str | None = ..., - errors: str = ..., - pax_headers: Mapping[str, str] | None = ..., - debug: int | None = ..., - errorlevel: int | None = ..., -) -> TarFile: ... -@overload -def open( - name: StrOrBytesPath | None, - mode: Literal["x:gz", "x:bz2", "w:gz", "w:bz2"], - fileobj: _Fileobj | None = None, - bufsize: int = 10240, - *, - format: int | None = ..., - tarinfo: type[TarInfo] | None = ..., - dereference: bool | None = ..., - ignore_zeros: bool | None = ..., - encoding: str | None = ..., - errors: str = ..., - pax_headers: Mapping[str, str] | None = ..., - debug: int | None = ..., - errorlevel: int | None = ..., - compresslevel: int = 9, -) -> TarFile: ... -@overload -def open( - name: StrOrBytesPath | None = None, - *, - mode: Literal["x:gz", "x:bz2", "w:gz", "w:bz2"], - fileobj: _Fileobj | None = None, - bufsize: int = 10240, - format: int | None = ..., - tarinfo: type[TarInfo] | None = ..., - dereference: bool | None = ..., - ignore_zeros: bool | None = ..., - encoding: str | None = ..., - errors: str = ..., - pax_headers: Mapping[str, str] | None = ..., - debug: int | None = ..., - errorlevel: int | None = ..., - compresslevel: int = 9, -) -> TarFile: ... -@overload -def open( - name: StrOrBytesPath | None, - mode: Literal["x:xz", "w:xz"], - fileobj: _Fileobj | None = None, - bufsize: int = 10240, - *, - format: int | None = ..., - tarinfo: type[TarInfo] | None = ..., - dereference: bool | None = ..., - ignore_zeros: bool | None = ..., - encoding: str | None = ..., - errors: str = ..., - pax_headers: Mapping[str, str] | None = ..., - debug: int | None = ..., - errorlevel: int | None = ..., - preset: Literal[0, 1, 2, 3, 4, 5, 6, 7, 8, 9] | None = ..., -) -> TarFile: ... -@overload -def open( - name: StrOrBytesPath | None = None, - *, - mode: Literal["x:xz", "w:xz"], - fileobj: _Fileobj | None = None, - bufsize: int = 10240, - format: int | None = ..., - tarinfo: type[TarInfo] | None = ..., - dereference: bool | None = ..., - ignore_zeros: bool | None = ..., - encoding: str | None = ..., - errors: str = ..., - pax_headers: Mapping[str, str] | None = ..., - debug: int | None = ..., - errorlevel: int | None = ..., - preset: Literal[0, 1, 2, 3, 4, 5, 6, 7, 8, 9] | None = ..., -) -> TarFile: ... -@overload -def open( - name: StrOrBytesPath | ReadableBuffer | None = None, - *, - mode: Literal["r|*", "r|", "r|gz", "r|bz2", "r|xz"], - fileobj: IO[bytes] | None = None, - bufsize: int = 10240, - format: int | None = ..., - tarinfo: type[TarInfo] | None = ..., - dereference: bool | None = ..., - ignore_zeros: bool | None = ..., - encoding: str | None = ..., - errors: str = ..., - pax_headers: Mapping[str, str] | None = ..., - debug: int | None = ..., - errorlevel: int | None = ..., -) -> TarFile: ... -@overload -def open( - name: StrOrBytesPath | WriteableBuffer | None = None, - *, - mode: Literal["w|", "w|xz"], - fileobj: IO[bytes] | None = None, - bufsize: int = 10240, - format: int | None = ..., - tarinfo: type[TarInfo] | None = ..., - dereference: bool | None = ..., - ignore_zeros: bool | None = ..., - encoding: str | None = ..., - errors: str = ..., - pax_headers: Mapping[str, str] | None = ..., - debug: int | None = ..., - errorlevel: int | None = ..., -) -> TarFile: ... -@overload -def open( - name: StrOrBytesPath | WriteableBuffer | None = None, - *, - mode: Literal["w|gz", "w|bz2"], - fileobj: IO[bytes] | None = None, - bufsize: int = 10240, - format: int | None = ..., - tarinfo: type[TarInfo] | None = ..., - dereference: bool | None = ..., - ignore_zeros: bool | None = ..., - encoding: str | None = ..., - errors: str = ..., - pax_headers: Mapping[str, str] | None = ..., - debug: int | None = ..., - errorlevel: int | None = ..., - compresslevel: int = 9, -) -> TarFile: ... - class ExFileObject(io.BufferedReader): def __init__(self, tarfile: TarFile, tarinfo: TarInfo) -> None: ... @@ -341,14 +165,152 @@ class TarFile: self, type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None ) -> None: ... def __iter__(self) -> Iterator[TarInfo]: ... + @overload + @classmethod + def open( + cls, + name: StrOrBytesPath | None = None, + mode: Literal["r", "r:*", "r:", "r:gz", "r:bz2", "r:xz"] = "r", + fileobj: _Fileobj | None = None, + bufsize: int = 10240, + *, + format: int | None = ..., + tarinfo: type[TarInfo] | None = ..., + dereference: bool | None = ..., + ignore_zeros: bool | None = ..., + encoding: str | None = ..., + errors: str = ..., + pax_headers: Mapping[str, str] | None = ..., + debug: int | None = ..., + errorlevel: int | None = ..., + ) -> Self: ... + @overload + @classmethod + def open( + cls, + name: StrOrBytesPath | None, + mode: Literal["x", "x:", "a", "a:", "w", "w:", "w:tar"], + fileobj: _Fileobj | None = None, + bufsize: int = 10240, + *, + format: int | None = ..., + tarinfo: type[TarInfo] | None = ..., + dereference: bool | None = ..., + ignore_zeros: bool | None = ..., + encoding: str | None = ..., + errors: str = ..., + pax_headers: Mapping[str, str] | None = ..., + debug: int | None = ..., + errorlevel: int | None = ..., + ) -> Self: ... + @overload + @classmethod + def open( + cls, + name: StrOrBytesPath | None = None, + *, + mode: Literal["x", "x:", "a", "a:", "w", "w:", "w:tar"], + fileobj: _Fileobj | None = None, + bufsize: int = 10240, + format: int | None = ..., + tarinfo: type[TarInfo] | None = ..., + dereference: bool | None = ..., + ignore_zeros: bool | None = ..., + encoding: str | None = ..., + errors: str = ..., + pax_headers: Mapping[str, str] | None = ..., + debug: int | None = ..., + errorlevel: int | None = ..., + ) -> Self: ... + @overload + @classmethod + def open( + cls, + name: StrOrBytesPath | None, + mode: Literal["x:gz", "x:bz2", "w:gz", "w:bz2"], + fileobj: _Fileobj | None = None, + bufsize: int = 10240, + *, + format: int | None = ..., + tarinfo: type[TarInfo] | None = ..., + dereference: bool | None = ..., + ignore_zeros: bool | None = ..., + encoding: str | None = ..., + errors: str = ..., + pax_headers: Mapping[str, str] | None = ..., + debug: int | None = ..., + errorlevel: int | None = ..., + compresslevel: int = 9, + ) -> Self: ... + @overload + @classmethod + def open( + cls, + name: StrOrBytesPath | None = None, + *, + mode: Literal["x:gz", "x:bz2", "w:gz", "w:bz2"], + fileobj: _Fileobj | None = None, + bufsize: int = 10240, + format: int | None = ..., + tarinfo: type[TarInfo] | None = ..., + dereference: bool | None = ..., + ignore_zeros: bool | None = ..., + encoding: str | None = ..., + errors: str = ..., + pax_headers: Mapping[str, str] | None = ..., + debug: int | None = ..., + errorlevel: int | None = ..., + compresslevel: int = 9, + ) -> Self: ... + @overload + @classmethod + def open( + cls, + name: StrOrBytesPath | None, + mode: Literal["x:xz", "w:xz"], + fileobj: _Fileobj | None = None, + bufsize: int = 10240, + *, + format: int | None = ..., + tarinfo: type[TarInfo] | None = ..., + dereference: bool | None = ..., + ignore_zeros: bool | None = ..., + encoding: str | None = ..., + errors: str = ..., + pax_headers: Mapping[str, str] | None = ..., + debug: int | None = ..., + errorlevel: int | None = ..., + preset: Literal[0, 1, 2, 3, 4, 5, 6, 7, 8, 9] | None = ..., + ) -> Self: ... + @overload @classmethod def open( cls, name: StrOrBytesPath | None = None, - mode: str = "r", - fileobj: IO[bytes] | None = None, # depends on mode + *, + mode: Literal["x:xz", "w:xz"], + fileobj: _Fileobj | None = None, bufsize: int = 10240, + format: int | None = ..., + tarinfo: type[TarInfo] | None = ..., + dereference: bool | None = ..., + ignore_zeros: bool | None = ..., + encoding: str | None = ..., + errors: str = ..., + pax_headers: Mapping[str, str] | None = ..., + debug: int | None = ..., + errorlevel: int | None = ..., + preset: Literal[0, 1, 2, 3, 4, 5, 6, 7, 8, 9] | None = ..., + ) -> Self: ... + @overload + @classmethod + def open( + cls, + name: StrOrBytesPath | ReadableBuffer | None = None, *, + mode: Literal["r|*", "r|", "r|gz", "r|bz2", "r|xz"], + fileobj: _Fileobj | None = None, + bufsize: int = 10240, format: int | None = ..., tarinfo: type[TarInfo] | None = ..., dereference: bool | None = ..., @@ -359,6 +321,45 @@ class TarFile: debug: int | None = ..., errorlevel: int | None = ..., ) -> Self: ... + @overload + @classmethod + def open( + cls, + name: StrOrBytesPath | WriteableBuffer | None = None, + *, + mode: Literal["w|", "w|xz"], + fileobj: _Fileobj | None = None, + bufsize: int = 10240, + format: int | None = ..., + tarinfo: type[TarInfo] | None = ..., + dereference: bool | None = ..., + ignore_zeros: bool | None = ..., + encoding: str | None = ..., + errors: str = ..., + pax_headers: Mapping[str, str] | None = ..., + debug: int | None = ..., + errorlevel: int | None = ..., + ) -> Self: ... + @overload + @classmethod + def open( + cls, + name: StrOrBytesPath | WriteableBuffer | None = None, + *, + mode: Literal["w|gz", "w|bz2"], + fileobj: _Fileobj | None = None, + bufsize: int = 10240, + format: int | None = ..., + tarinfo: type[TarInfo] | None = ..., + dereference: bool | None = ..., + ignore_zeros: bool | None = ..., + encoding: str | None = ..., + errors: str = ..., + pax_headers: Mapping[str, str] | None = ..., + debug: int | None = ..., + errorlevel: int | None = ..., + compresslevel: int = 9, + ) -> Self: ... @classmethod def taropen( cls, @@ -517,6 +518,8 @@ class TarFile: ) -> TarInfo: ... def close(self) -> None: ... +open = TarFile.open + if sys.version_info >= (3, 9): def is_tarfile(name: StrOrBytesPath | IO[bytes]) -> bool: ... From a02a75d2129642ccc1023372565cac439000a89e Mon Sep 17 00:00:00 2001 From: Sebastian Rittau Date: Thu, 6 Mar 2025 17:02:48 +0100 Subject: [PATCH 059/388] Remove redundant version_info checks (#13588) `zipfile._path` was split into a separate module in Python 3.12. Originally, we just copied the definition for `CompleteDirs` and `Path` from `zipfile.pyi` to `zipfile/_path/__init__.pyi` and left the now defunct version_info branches. This removes the unnecessary branches from the respective stub files. --- stdlib/zipfile/__init__.pyi | 8 --- stdlib/zipfile/_path/__init__.pyi | 84 ++++++++++++------------------- 2 files changed, 32 insertions(+), 60 deletions(-) diff --git a/stdlib/zipfile/__init__.pyi b/stdlib/zipfile/__init__.pyi index 5b8f02f61bce..91bc051df686 100644 --- a/stdlib/zipfile/__init__.pyi +++ b/stdlib/zipfile/__init__.pyi @@ -362,14 +362,6 @@ else: def joinpath(self, *other: StrPath) -> Path: ... else: def joinpath(self, add: StrPath) -> Path: ... # undocumented - if sys.version_info >= (3, 12): - def glob(self, pattern: str) -> Iterator[Self]: ... - def rglob(self, pattern: str) -> Iterator[Self]: ... - def is_symlink(self) -> Literal[False]: ... - def relative_to(self, other: Path, *extra: StrPath) -> str: ... - def match(self, path_pattern: str) -> bool: ... - def __eq__(self, other: object) -> bool: ... - def __hash__(self) -> int: ... def __truediv__(self, add: StrPath) -> Path: ... diff --git a/stdlib/zipfile/_path/__init__.pyi b/stdlib/zipfile/_path/__init__.pyi index a7248ba7ab72..4c7b39ec4c6c 100644 --- a/stdlib/zipfile/_path/__init__.pyi +++ b/stdlib/zipfile/_path/__init__.pyi @@ -4,11 +4,9 @@ from collections.abc import Iterator, Sequence from io import TextIOWrapper from os import PathLike from typing import IO, Literal, TypeVar, overload -from typing_extensions import Self, TypeAlias +from typing_extensions import Self from zipfile import ZipFile -_ReadWriteBinaryMode: TypeAlias = Literal["r", "w", "rb", "wb"] - _ZF = TypeVar("_ZF", bound=ZipFile) if sys.version_info >= (3, 12): @@ -39,42 +37,29 @@ if sys.version_info >= (3, 12): def name(self) -> str: ... @property def parent(self) -> PathLike[str]: ... # undocumented - if sys.version_info >= (3, 10): - @property - def filename(self) -> PathLike[str]: ... # undocumented - if sys.version_info >= (3, 11): - @property - def suffix(self) -> str: ... - @property - def suffixes(self) -> list[str]: ... - @property - def stem(self) -> str: ... - - if sys.version_info >= (3, 9): - @overload - def open( - self, - mode: Literal["r", "w"] = "r", - encoding: str | None = None, - errors: str | None = None, - newline: str | None = None, - line_buffering: bool = ..., - write_through: bool = ..., - *, - pwd: bytes | None = None, - ) -> TextIOWrapper: ... - @overload - def open(self, mode: Literal["rb", "wb"], *, pwd: bytes | None = None) -> IO[bytes]: ... - else: - def open( - self, mode: _ReadWriteBinaryMode = "r", pwd: bytes | None = None, *, force_zip64: bool = False - ) -> IO[bytes]: ... - - if sys.version_info >= (3, 10): - def iterdir(self) -> Iterator[Self]: ... - else: - def iterdir(self) -> Iterator[Path]: ... - + @property + def filename(self) -> PathLike[str]: ... # undocumented + @property + def suffix(self) -> str: ... + @property + def suffixes(self) -> list[str]: ... + @property + def stem(self) -> str: ... + @overload + def open( + self, + mode: Literal["r", "w"] = "r", + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, + line_buffering: bool = ..., + write_through: bool = ..., + *, + pwd: bytes | None = None, + ) -> TextIOWrapper: ... + @overload + def open(self, mode: Literal["rb", "wb"], *, pwd: bytes | None = None) -> IO[bytes]: ... + def iterdir(self) -> Iterator[Self]: ... def is_dir(self) -> bool: ... def is_file(self) -> bool: ... def exists(self) -> bool: ... @@ -87,17 +72,12 @@ if sys.version_info >= (3, 12): write_through: bool = ..., ) -> str: ... def read_bytes(self) -> bytes: ... - if sys.version_info >= (3, 10): - def joinpath(self, *other: StrPath) -> Path: ... - else: - def joinpath(self, add: StrPath) -> Path: ... # undocumented - if sys.version_info >= (3, 12): - def glob(self, pattern: str) -> Iterator[Self]: ... - def rglob(self, pattern: str) -> Iterator[Self]: ... - def is_symlink(self) -> Literal[False]: ... - def relative_to(self, other: Path, *extra: StrPath) -> str: ... - def match(self, path_pattern: str) -> bool: ... - def __eq__(self, other: object) -> bool: ... - def __hash__(self) -> int: ... - + def joinpath(self, *other: StrPath) -> Path: ... + def glob(self, pattern: str) -> Iterator[Self]: ... + def rglob(self, pattern: str) -> Iterator[Self]: ... + def is_symlink(self) -> Literal[False]: ... + def relative_to(self, other: Path, *extra: StrPath) -> str: ... + def match(self, path_pattern: str) -> bool: ... + def __eq__(self, other: object) -> bool: ... + def __hash__(self) -> int: ... def __truediv__(self, add: StrPath) -> Path: ... From 957a35c1e3c31a931581f19ef5a2116cb5ef86a6 Mon Sep 17 00:00:00 2001 From: Kanishk Pachauri Date: Thu, 6 Mar 2025 23:33:01 +0530 Subject: [PATCH 060/388] Fix parameter type annotations in `_cffi_backend` stub. (#13590) --- stubs/cffi/_cffi_backend.pyi | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/stubs/cffi/_cffi_backend.pyi b/stubs/cffi/_cffi_backend.pyi index 8acbc0c50493..8fb8cbe6d6f5 100644 --- a/stubs/cffi/_cffi_backend.pyi +++ b/stubs/cffi/_cffi_backend.pyi @@ -127,13 +127,13 @@ class FFI: def __init__( self, - module_name: str = ..., + module_name: bytes = ..., _version: int = ..., - _types: str = ..., - _globals: tuple[str | int, ...] = ..., - _struct_unions: tuple[tuple[str, ...], ...] = ..., - _enums: tuple[str, ...] = ..., - _typenames: tuple[str, ...] = ..., + _types: bytes = ..., + _globals: tuple[bytes | int, ...] = ..., + _struct_unions: tuple[tuple[bytes, ...], ...] = ..., + _enums: tuple[bytes, ...] = ..., + _typenames: tuple[bytes, ...] = ..., _includes: tuple[FFI, ...] = ..., ) -> None: ... @overload From d8611923ad49e437364a51c1fec9b0ef720b178b Mon Sep 17 00:00:00 2001 From: Joakim Soderlund Date: Fri, 7 Mar 2025 11:30:50 +0100 Subject: [PATCH 061/388] Mark database parameters in Peewee as optional (#13442) Peewee injects database arguments using a decorator whenever a model or query has a bound connection. Passing the argument is therefore not required, even for some function parameters without default values. --- stubs/peewee/@tests/stubtest_allowlist.txt | 10 ++++++ stubs/peewee/peewee.pyi | 38 +++++++++++----------- 2 files changed, 29 insertions(+), 19 deletions(-) diff --git a/stubs/peewee/@tests/stubtest_allowlist.txt b/stubs/peewee/@tests/stubtest_allowlist.txt index 8ef4f207162a..8e93c781b09a 100644 --- a/stubs/peewee/@tests/stubtest_allowlist.txt +++ b/stubs/peewee/@tests/stubtest_allowlist.txt @@ -11,6 +11,16 @@ peewee.DQ.__invert__ peewee.Window.as_groups peewee.Window.as_range peewee.Window.as_rows +# Wrapped with @database_required which sometimes injects the database argument +peewee.BaseQuery.execute +peewee.CompoundSelectQuery.exists +peewee.SelectBase.count +peewee.SelectBase.exists +peewee.SelectBase.first +peewee.SelectBase.get +peewee.SelectBase.peek +peewee.SelectBase.scalar +peewee.SelectBase.scalars # Ignore missing playhouse modules and names we don't currently provide playhouse\.\w+? diff --git a/stubs/peewee/peewee.pyi b/stubs/peewee/peewee.pyi index b562ba5a00c1..6ea807eefc5b 100644 --- a/stubs/peewee/peewee.pyi +++ b/stubs/peewee/peewee.pyi @@ -190,11 +190,11 @@ class Table(_HashableSource, BaseTable): # type: ignore[misc] schema: str | None = None, alias: Incomplete | None = ..., _model: Incomplete | None = ..., - _database: Incomplete | None = ..., + _database: Incomplete | None = None, ) -> None: ... def clone(self): ... - def bind(self, database: Incomplete | None = ...): ... - def bind_ctx(self, database: Incomplete | None = ...): ... + def bind(self, database: Incomplete | None = None): ... + def bind_ctx(self, database: Incomplete | None = None): ... def select(self, *columns): ... def insert(self, insert: Incomplete | None = ..., columns: Incomplete | None = ..., **kwargs): ... def replace(self, insert: Incomplete | None = ..., columns: Incomplete | None = ..., **kwargs): ... @@ -517,8 +517,8 @@ class OnConflict(Node): class BaseQuery(Node): default_row_type: Incomplete - def __init__(self, _database: Incomplete | None = ..., **kwargs) -> None: ... - def bind(self, database: Incomplete | None = ...): ... + def __init__(self, _database: Incomplete | None = None, **kwargs) -> None: ... + def bind(self, database: Incomplete | None = None): ... def clone(self): ... def dicts(self, as_dict: bool = ...) -> Self: ... def tuples(self, as_tuple: bool = ...) -> Self: ... @@ -526,8 +526,8 @@ class BaseQuery(Node): def objects(self, constructor: Incomplete | None = ...) -> Self: ... def __sql__(self, ctx) -> None: ... def sql(self): ... - def execute(self, database): ... - def iterator(self, database: Incomplete | None = ...): ... + def execute(self, database: Incomplete | None = None): ... + def iterator(self, database: Incomplete | None = None): ... def __iter__(self): ... def __getitem__(self, value): ... def __len__(self) -> int: ... @@ -571,20 +571,20 @@ class SelectQuery(Query): def select_from(self, *columns): ... class SelectBase(_HashableSource, Source, SelectQuery): # type: ignore[misc] - def peek(self, database, n: int = ...): ... - def first(self, database, n: int = ...): ... - def scalar(self, database, as_tuple: bool = ..., as_dict: bool = ...): ... - def scalars(self, database) -> Generator[Incomplete, None, None]: ... - def count(self, database, clear_limit: bool = ...): ... - def exists(self, database): ... - def get(self, database): ... + def peek(self, database: Incomplete | None = None, n: int = ...): ... + def first(self, database: Incomplete | None = None, n: int = ...): ... + def scalar(self, database: Incomplete | None = None, as_tuple: bool = ..., as_dict: bool = ...): ... + def scalars(self, database: Incomplete | None = None) -> Generator[Incomplete, None, None]: ... + def count(self, database: Incomplete | None = None, clear_limit: bool = ...): ... + def exists(self, database: Incomplete | None = None): ... + def get(self, database: Incomplete | None = None): ... class CompoundSelectQuery(SelectBase): lhs: Incomplete op: Incomplete rhs: Incomplete def __init__(self, lhs, op, rhs) -> None: ... - def exists(self, database): ... + def exists(self, database: Incomplete | None = None): ... def __sql__(self, ctx): ... class Select(SelectBase): @@ -1495,7 +1495,7 @@ class _SortedFieldList: class SchemaManager: model: Incomplete context_options: Incomplete - def __init__(self, model, database: Incomplete | None = ..., **context_options) -> None: ... + def __init__(self, model, database: Incomplete | None = None, **context_options) -> None: ... @property def database(self): ... @database.setter @@ -1546,7 +1546,7 @@ class Metadata: def __init__( self, model, - database: Incomplete | None = ..., + database: Incomplete | None = None, table_name: Incomplete | None = ..., indexes: Incomplete | None = ..., primary_key: Incomplete | None = ..., @@ -1744,8 +1744,8 @@ class BaseModelSelect(_ModelQueryHelper): __sub__: Incomplete def __iter__(self): ... def prefetch(self, *subqueries): ... - def get(self, database: Incomplete | None = ...): ... - def get_or_none(self, database: Incomplete | None = ...): ... + def get(self, database: Incomplete | None = None): ... + def get_or_none(self, database: Incomplete | None = None): ... def group_by(self, *columns) -> Self: ... class ModelCompoundSelectQuery(BaseModelSelect, CompoundSelectQuery): # type: ignore[misc] From 4c9ed8444f8ccbbc4c9fe9bca22d33a23b24c638 Mon Sep 17 00:00:00 2001 From: sobolevn Date: Fri, 7 Mar 2025 13:33:42 +0300 Subject: [PATCH 062/388] Bump braintree to 4.34.* (#13592) --- stubs/braintree/METADATA.toml | 2 +- stubs/braintree/braintree/__init__.pyi | 1 + .../braintree/braintree/braintree_gateway.pyi | 2 + stubs/braintree/braintree/credit_card.pyi | 1 + stubs/braintree/braintree/error_codes.pyi | 40 +++++++++++++++++++ .../braintree/paypal_payment_resource.pyi | 10 +++++ .../paypal_payment_resource_gateway.pyi | 10 +++++ .../braintree/test/credit_card_numbers.pyi | 1 + stubs/braintree/braintree/test/nonces.pyi | 1 + 9 files changed, 67 insertions(+), 1 deletion(-) create mode 100644 stubs/braintree/braintree/paypal_payment_resource.pyi create mode 100644 stubs/braintree/braintree/paypal_payment_resource_gateway.pyi diff --git a/stubs/braintree/METADATA.toml b/stubs/braintree/METADATA.toml index 5f9b367ae344..1fea0e751c75 100644 --- a/stubs/braintree/METADATA.toml +++ b/stubs/braintree/METADATA.toml @@ -1,2 +1,2 @@ -version = "4.33.*" +version = "4.34.*" upstream_repository = "https://github.com/braintree/braintree_python" diff --git a/stubs/braintree/braintree/__init__.pyi b/stubs/braintree/braintree/__init__.pyi index f242635c0a75..30ed226bba2e 100644 --- a/stubs/braintree/braintree/__init__.pyi +++ b/stubs/braintree/braintree/__init__.pyi @@ -49,6 +49,7 @@ from braintree.payment_method import PaymentMethod as PaymentMethod from braintree.payment_method_nonce import PaymentMethodNonce as PaymentMethodNonce from braintree.payment_method_parser import parse_payment_method as parse_payment_method from braintree.paypal_account import PayPalAccount as PayPalAccount +from braintree.paypal_payment_resource import PayPalPaymentResource as PayPalPaymentResource from braintree.plan import Plan as Plan from braintree.plan_gateway import PlanGateway as PlanGateway from braintree.processor_response_types import ProcessorResponseTypes as ProcessorResponseTypes diff --git a/stubs/braintree/braintree/braintree_gateway.pyi b/stubs/braintree/braintree/braintree_gateway.pyi index 39e20d7e6fc8..5afb253f8639 100644 --- a/stubs/braintree/braintree/braintree_gateway.pyi +++ b/stubs/braintree/braintree/braintree_gateway.pyi @@ -18,6 +18,7 @@ from braintree.oauth_gateway import OAuthGateway from braintree.payment_method_gateway import PaymentMethodGateway from braintree.payment_method_nonce_gateway import PaymentMethodNonceGateway from braintree.paypal_account_gateway import PayPalAccountGateway +from braintree.paypal_payment_resource_gateway import PayPalPaymentResourceGateway from braintree.plan_gateway import PlanGateway from braintree.sepa_direct_debit_account_gateway import SepaDirectDebitAccountGateway from braintree.settlement_batch_summary_gateway import SettlementBatchSummaryGateway @@ -49,6 +50,7 @@ class BraintreeGateway: payment_method: PaymentMethodGateway payment_method_nonce: PaymentMethodNonceGateway paypal_account: PayPalAccountGateway + paypal_payment_resource: PayPalPaymentResourceGateway plan: PlanGateway sepa_direct_debit_account: SepaDirectDebitAccountGateway settlement_batch_summary: SettlementBatchSummaryGateway diff --git a/stubs/braintree/braintree/credit_card.pyi b/stubs/braintree/braintree/credit_card.pyi index b70edba8baed..a419ef74d736 100644 --- a/stubs/braintree/braintree/credit_card.pyi +++ b/stubs/braintree/braintree/credit_card.pyi @@ -54,6 +54,7 @@ class CreditCard(Resource): Payroll: type[CardTypeIndicator] Prepaid: type[CardTypeIndicator] ProductId: type[CardTypeIndicator] + PrepaidReloadable: type[CardTypeIndicator] @staticmethod def create(params: Incomplete | None = None): ... @staticmethod diff --git a/stubs/braintree/braintree/error_codes.pyi b/stubs/braintree/braintree/error_codes.pyi index b4436b69c6b6..4f1fffcbc1e4 100644 --- a/stubs/braintree/braintree/error_codes.pyi +++ b/stubs/braintree/braintree/error_codes.pyi @@ -401,6 +401,46 @@ class ErrorCodes: PaymentMethodNonceUnknown: Final = "92908" TokenIsInUse: Final = "92906" + class PayPalPaymentResource: + NonceExpired: Final = "97301" + IdNotSupported: Final = "97302" + NonceRequired: Final = "97303" + InvalidEmail: Final = "97304" + EmailTooLong: Final = "97305" + ExpectedLineItemCollection: Final = "97306" + ExpectedLineItemHash: Final = "97307" + ExpectedLineItemDebit: Final = "97308" + InvalidUnitAmount: Final = "97309" + InvalidUnitTaxAmount: Final = "97310" + IsoCodeRequired: Final = "97311" + IsoCodeUnsupported: Final = "97312" + ShippingFieldsMissing: Final = "97313" + InvalidAmountBreakdown: Final = "97314" + ExpectedShippingOptionCollection: Final = "97315" + ShippingOptionsRequired: Final = "97316" + ShippingOptionFieldsMissing: Final = "97317" + InvalidShippingOptionType: Final = "97318" + ShippingOptionIdReused: Final = "97319" + TooManyShippingOptionsSelected: Final = "97320" + ShippingOptionMustMatchBreakdown: Final = "97321" + LineItemsShouldMatchTotal: Final = "97322" + LineItemsTaxShouldMatchTotal: Final = "97323" + PatchCallFailed: Final = "97324" + InvalidAmount: Final = "97325" + ShippingIdTooLong: Final = "97326" + ShippingLabelTooLong: Final = "97327" + ShippingFullNameTooLong: Final = "97328" + ShippingAddressTooLong: Final = "97329" + ShippingExtendedAddressTooLong: Final = "97330" + ShippingLocalityTooLong: Final = "97331" + ShippingRegionTooLong: Final = "97332" + CountryCodeTooLong: Final = "97333" + NationalNumberTooLong: Final = "97334" + PostalCodeTooLong: Final = "97335" + DescriptionTooLong: Final = "97336" + CustomFieldTooLong: Final = "97337" + OrderIdTooLong: Final = "97338" + class SettlementBatchSummary: CustomFieldIsInvalid: Final = "82303" SettlementDateIsInvalid: Final = "82302" diff --git a/stubs/braintree/braintree/paypal_payment_resource.pyi b/stubs/braintree/braintree/paypal_payment_resource.pyi new file mode 100644 index 000000000000..4c2297db78e2 --- /dev/null +++ b/stubs/braintree/braintree/paypal_payment_resource.pyi @@ -0,0 +1,10 @@ +from _typeshed import Incomplete + +from braintree.resource import Resource + +class PayPalPaymentResource(Resource): + def __init__(self, gateway, attributes) -> None: ... + @staticmethod + def update(request): ... + @staticmethod + def update_signature() -> list[Incomplete]: ... diff --git a/stubs/braintree/braintree/paypal_payment_resource_gateway.pyi b/stubs/braintree/braintree/paypal_payment_resource_gateway.pyi new file mode 100644 index 000000000000..10eb5fd462a1 --- /dev/null +++ b/stubs/braintree/braintree/paypal_payment_resource_gateway.pyi @@ -0,0 +1,10 @@ +from braintree.braintree_gateway import BraintreeGateway +from braintree.configuration import Configuration +from braintree.error_result import ErrorResult +from braintree.successful_result import SuccessfulResult + +class PayPalPaymentResourceGateway: + config: Configuration + gateway: BraintreeGateway + def __init__(self, gateway: BraintreeGateway) -> None: ... + def update(self, params) -> SuccessfulResult | ErrorResult: ... diff --git a/stubs/braintree/braintree/test/credit_card_numbers.pyi b/stubs/braintree/braintree/test/credit_card_numbers.pyi index 0bf82686842e..08559159054f 100644 --- a/stubs/braintree/braintree/test/credit_card_numbers.pyi +++ b/stubs/braintree/braintree/test/credit_card_numbers.pyi @@ -8,6 +8,7 @@ class CreditCardNumbers: Healthcare: Final = "4111111510101010" Payroll: Final = "4111111114101010" Prepaid: Final = "4111111111111210" + PrepaidReloadable: Final = "4229989900000002" IssuingBank: Final = "4111111141010101" CountryOfIssuance: Final = "4111111111121102" No: Final = "4111111111310101" diff --git a/stubs/braintree/braintree/test/nonces.pyi b/stubs/braintree/braintree/test/nonces.pyi index 448e3ef09631..c59ffa78a285 100644 --- a/stubs/braintree/braintree/test/nonces.pyi +++ b/stubs/braintree/braintree/test/nonces.pyi @@ -72,6 +72,7 @@ class Nonces: TransactablePayroll: Final = "fake-valid-payroll-nonce" TransactablePinlessDebitVisa: Final = "fake-pinless-debit-visa-nonce" TransactablePrepaid: Final = "fake-valid-prepaid-nonce" + TransactablePrepaidReloadable: Final = "fake-valid-prepaid-reloadable-nonce" TransactableUnknownIndicators: Final = "fake-valid-unknown-indicators-nonce" TransactableVisa: Final = "fake-valid-visa-nonce" VenmoAccount: Final = "fake-venmo-account-nonce" From 6df7d084911536acc7ebacbfecd037daafdef69d Mon Sep 17 00:00:00 2001 From: Kanishk Pachauri Date: Fri, 7 Mar 2025 23:35:24 +0530 Subject: [PATCH 063/388] Update `_socket.pyi` to include `tuple[int, bytes]` address format (#13596) --- stdlib/_socket.pyi | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/stdlib/_socket.pyi b/stdlib/_socket.pyi index 9be0c3f2e669..649728257c1a 100644 --- a/stdlib/_socket.pyi +++ b/stdlib/_socket.pyi @@ -812,12 +812,12 @@ def getaddrinfo( type: int = ..., proto: int = ..., flags: int = ..., -) -> list[tuple[int, int, int, str, tuple[str, int] | tuple[str, int, int, int]]]: ... +) -> list[tuple[int, int, int, str, tuple[str, int] | tuple[str, int, int, int] | tuple[int, bytes]]]: ... def gethostbyname(hostname: str, /) -> str: ... def gethostbyname_ex(hostname: str, /) -> tuple[str, list[str], list[str]]: ... def gethostname() -> str: ... def gethostbyaddr(ip_address: str, /) -> tuple[str, list[str], list[str]]: ... -def getnameinfo(sockaddr: tuple[str, int] | tuple[str, int, int, int], flags: int, /) -> tuple[str, str]: ... +def getnameinfo(sockaddr: tuple[str, int] | tuple[str, int, int, int] | tuple[int, bytes], flags: int, /) -> tuple[str, str]: ... def getprotobyname(protocolname: str, /) -> int: ... def getservbyname(servicename: str, protocolname: str = ..., /) -> int: ... def getservbyport(port: int, protocolname: str = ..., /) -> str: ... From b4b281dd2113011b34c91c46880b0adbb507a3c9 Mon Sep 17 00:00:00 2001 From: Stephen Morton Date: Fri, 7 Mar 2025 10:08:27 -0800 Subject: [PATCH 064/388] Decouple types.DynamicClassAttribute from property (#13276) --- stdlib/@tests/stubtest_allowlists/py310.txt | 2 -- stdlib/@tests/stubtest_allowlists/py311.txt | 2 -- stdlib/@tests/stubtest_allowlists/py312.txt | 2 -- stdlib/@tests/stubtest_allowlists/py313.txt | 2 -- stdlib/@tests/test_cases/check_types.py | 19 +++++++++++++++++ stdlib/types.pyi | 23 +++++++++++++++++++-- 6 files changed, 40 insertions(+), 10 deletions(-) diff --git a/stdlib/@tests/stubtest_allowlists/py310.txt b/stdlib/@tests/stubtest_allowlists/py310.txt index b9117eba1d2e..d809618c6df0 100644 --- a/stdlib/@tests/stubtest_allowlists/py310.txt +++ b/stdlib/@tests/stubtest_allowlists/py310.txt @@ -25,8 +25,6 @@ posixpath.join ntpath.join os.path.join -types.DynamicClassAttribute..* # In the stub we pretend it's an alias for property, but it has positional-only differences - # typing.IO uses positional-or-keyword arguments, but in the stubs we prefer # to mark these as positional-only for compatibility with existing sub-classes. typing(_extensions)?\.BinaryIO\.write diff --git a/stdlib/@tests/stubtest_allowlists/py311.txt b/stdlib/@tests/stubtest_allowlists/py311.txt index aa5ab8aaf936..26f846f339a3 100644 --- a/stdlib/@tests/stubtest_allowlists/py311.txt +++ b/stdlib/@tests/stubtest_allowlists/py311.txt @@ -46,8 +46,6 @@ posixpath.join ntpath.join os.path.join -types.DynamicClassAttribute..* # In the stub we pretend it's an alias for property, but it has positional-only differences - # typing.IO uses positional-or-keyword arguments, but in the stubs we prefer # to mark these as positional-only for compatibility with existing sub-classes. typing(_extensions)?\.BinaryIO\.write diff --git a/stdlib/@tests/stubtest_allowlists/py312.txt b/stdlib/@tests/stubtest_allowlists/py312.txt index 9d3ed5487085..1e48d68093a0 100644 --- a/stdlib/@tests/stubtest_allowlists/py312.txt +++ b/stdlib/@tests/stubtest_allowlists/py312.txt @@ -45,8 +45,6 @@ posixpath.join ntpath.join os.path.join -types.DynamicClassAttribute..* # In the stub we pretend it's an alias for property, but it has positional-only differences - # typing.IO uses positional-or-keyword arguments, but in the stubs we prefer # to mark these as positional-only for compatibility with existing sub-classes. typing(_extensions)?\.BinaryIO\.write diff --git a/stdlib/@tests/stubtest_allowlists/py313.txt b/stdlib/@tests/stubtest_allowlists/py313.txt index 2aa1f8eaa79b..25f56e73e2bc 100644 --- a/stdlib/@tests/stubtest_allowlists/py313.txt +++ b/stdlib/@tests/stubtest_allowlists/py313.txt @@ -45,8 +45,6 @@ posixpath.join ntpath.join os.path.join -types.DynamicClassAttribute..* # In the stub we pretend it's an alias for property, but it has positional-only differences - # typing.IO uses positional-or-keyword arguments, but in the stubs we prefer # to mark these as positional-only for compatibility with existing sub-classes. typing(_extensions)?\.BinaryIO\.write diff --git a/stdlib/@tests/test_cases/check_types.py b/stdlib/@tests/test_cases/check_types.py index 7dcf31923bec..8ae5b1641abb 100644 --- a/stdlib/@tests/test_cases/check_types.py +++ b/stdlib/@tests/test_cases/check_types.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import sys import types from collections import UserDict @@ -39,3 +41,20 @@ assert_type(item_3, Union[int, str]) # Default isn't accepted as a keyword argument. mp.get(4, default="default") # type: ignore + + +# test: `types.DynamicClassAttribute` +class DCAtest: + _value: int | None = None + + @types.DynamicClassAttribute + def foo(self) -> int | None: + return self._value + + @foo.setter + def foo(self, value: int) -> None: + self._value = value + + @foo.deleter + def foo(self) -> None: + self._value = None diff --git a/stdlib/types.pyi b/stdlib/types.pyi index d41ca0d1c367..57d1ec935a76 100644 --- a/stdlib/types.pyi +++ b/stdlib/types.pyi @@ -615,8 +615,27 @@ def prepare_class( if sys.version_info >= (3, 12): def get_original_bases(cls: type, /) -> tuple[Any, ...]: ... -# Actually a different type, but `property` is special and we want that too. -DynamicClassAttribute = property +# Does not actually inherit from property, but saying it does makes sure that +# pyright handles this class correctly. +class DynamicClassAttribute(property): + fget: Callable[[Any], Any] | None + fset: Callable[[Any, Any], object] | None # type: ignore[assignment] + fdel: Callable[[Any], object] | None # type: ignore[assignment] + overwrite_doc: bool + __isabstractmethod__: bool + def __init__( + self, + fget: Callable[[Any], Any] | None = None, + fset: Callable[[Any, Any], object] | None = None, + fdel: Callable[[Any], object] | None = None, + doc: str | None = None, + ) -> None: ... + def __get__(self, instance: Any, ownerclass: type | None = None) -> Any: ... + def __set__(self, instance: Any, value: Any) -> None: ... + def __delete__(self, instance: Any) -> None: ... + def getter(self, fget: Callable[[Any], Any]) -> DynamicClassAttribute: ... + def setter(self, fset: Callable[[Any, Any], object]) -> DynamicClassAttribute: ... + def deleter(self, fdel: Callable[[Any], object]) -> DynamicClassAttribute: ... _Fn = TypeVar("_Fn", bound=Callable[..., object]) _R = TypeVar("_R") From 068c72bab4b3951419329798593cbcbd12981988 Mon Sep 17 00:00:00 2001 From: Sabfo Date: Fri, 7 Mar 2025 23:32:22 +0200 Subject: [PATCH 065/388] PyInstaller: Complete types in splash.pyi (#13568) --------- Co-authored-by: Avasam --- .../PyInstaller/building/splash.pyi | 34 ++++++++++--------- 1 file changed, 18 insertions(+), 16 deletions(-) diff --git a/stubs/pyinstaller/PyInstaller/building/splash.pyi b/stubs/pyinstaller/PyInstaller/building/splash.pyi index 1caae6c82b49..ffdbf395670e 100644 --- a/stubs/pyinstaller/PyInstaller/building/splash.pyi +++ b/stubs/pyinstaller/PyInstaller/building/splash.pyi @@ -1,4 +1,4 @@ -from _typeshed import Incomplete, StrPath +from _typeshed import StrPath from PyInstaller.building.datastruct import Target, _TOCTuple @@ -8,20 +8,22 @@ splash_requirements: list[str] # Not to be imported during runtime, but is the type reference for spec files which are executed as python code class Splash(Target): image_file: str - full_tk: Incomplete - name: Incomplete - script_name: Incomplete - minify_script: Incomplete - max_img_size: Incomplete - text_pos: Incomplete - text_size: Incomplete - text_font: Incomplete - text_color: Incomplete - text_default: Incomplete - always_on_top: Incomplete - uses_tkinter: Incomplete - script: Incomplete - splash_requirements: Incomplete + full_tk: bool + tcl_lib: str + tk_lib: str + name: str + script_name: StrPath + minify_script: bool + max_img_size: tuple[int, int] + text_pos: tuple[int, int] | None + text_size: int + text_font: str + text_color: str + text_default: str + always_on_top: bool + uses_tkinter: bool + script: str + splash_requirements: set[str] binaries: list[_TOCTuple] def __init__( self, @@ -37,7 +39,7 @@ class Splash(Target): full_tk: bool = False, minify_script: bool = True, name: str = ..., - script_name: str = ..., + script_name: StrPath = ..., max_img_size: tuple[int, int] | None = (760, 480), always_on_top: bool = True, ) -> None: ... From 078a0d7c2cf930bfc0bf161367efdd72a38c49b5 Mon Sep 17 00:00:00 2001 From: Sebastian Rittau Date: Sat, 8 Mar 2025 00:09:47 +0100 Subject: [PATCH 066/388] Remove LD_LIBRARY_PATH before running gdb (#13594) Suggestion by @peace-maker --- stubs/gdb/METADATA.toml | 3 --- tests/stubtest_third_party.py | 3 +++ 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/stubs/gdb/METADATA.toml b/stubs/gdb/METADATA.toml index 00a3b24159b8..475788246998 100644 --- a/stubs/gdb/METADATA.toml +++ b/stubs/gdb/METADATA.toml @@ -13,6 +13,3 @@ extra_description = """\ [tool.stubtest] platforms = ["linux"] apt_dependencies = ["gdb"] -# TODO (2025-03-05): unskip once `gdb` can be installed on `ubuntu-24.04`, -# see https://github.com/python/typeshed/pull/13582 -skip = true diff --git a/tests/stubtest_third_party.py b/tests/stubtest_third_party.py index bc1db5a413d2..a83d4fbc572f 100755 --- a/tests/stubtest_third_party.py +++ b/tests/stubtest_third_party.py @@ -252,6 +252,9 @@ def setup_gdb_stubtest_command(venv_dir: Path, stubtest_cmd: list[str]) -> bool: import sys stubtest_env = os.environ | {{"STUBTEST_ARGS": json.dumps(sys.argv)}} + # With LD_LIBRARY_PATH set, some GitHub action runners look in the wrong + # location for gdb, causing stubtest to fail. + stubtest_env.pop("LD_LIBRARY_PATH", None) gdb_cmd = [ "gdb", "--quiet", From 7e3e022a6923c95b868e61b6461d18ee113be337 Mon Sep 17 00:00:00 2001 From: Danny Yang Date: Fri, 7 Mar 2025 23:40:31 -0500 Subject: [PATCH 067/388] Update pycurl to 7.45.6 (#13600) --- stubs/pycurl/METADATA.toml | 2 +- stubs/pycurl/pycurl.pyi | 18 ++++++++++++++++++ 2 files changed, 19 insertions(+), 1 deletion(-) diff --git a/stubs/pycurl/METADATA.toml b/stubs/pycurl/METADATA.toml index 09ac8a43ea91..d6f49acb07b0 100644 --- a/stubs/pycurl/METADATA.toml +++ b/stubs/pycurl/METADATA.toml @@ -1,4 +1,4 @@ -version = "7.45.4" +version = "7.45.6" upstream_repository = "https://github.com/pycurl/pycurl" [tool.stubtest] diff --git a/stubs/pycurl/pycurl.pyi b/stubs/pycurl/pycurl.pyi index 197912f92e9f..867e2cb6cd80 100644 --- a/stubs/pycurl/pycurl.pyi +++ b/stubs/pycurl/pycurl.pyi @@ -53,6 +53,24 @@ class CurlShare: def close(self) -> None: ... def setopt(self, option: int, value): ... +APPCONNECT_TIME_T: Final[int] = ... +CONNECT_TIME_T: Final[int] = ... +CONTENT_LENGTH_DOWNLOAD_T: Final[int] = ... +CONTENT_LENGTH_UPLOAD_T: Final[int] = ... +EARLYDATA_SENT_T: Final[int] = ... +FILETIME_T: Final[int] = ... +NAMELOOKUP_TIME_T: Final[int] = ... +POSTTRANSFER_TIME_T: Final[int] = ... +PRETRANSFER_TIME_T: Final[int] = ... +QUEUE_TIME_T: Final[int] = ... +REDIRECT_TIME_T: Final[int] = ... +SIZE_DOWNLOAD_T: Final[int] = ... +SIZE_UPLOAD_T: Final[int] = ... +SPEED_DOWNLOAD_T: Final[int] = ... +SPEED_UPLOAD_T: Final[int] = ... +STARTTRANSFER_TIME_T: Final[int] = ... +TOTAL_TIME_T: Final[int] = ... + ACCEPTTIMEOUT_MS: Final = 212 ACCEPT_ENCODING: Final = 10102 ADDRESS_SCOPE: Final = 171 From c3cb95c7e52bcdd6fed9022e0f37aace591e4868 Mon Sep 17 00:00:00 2001 From: Avasam Date: Sat, 8 Mar 2025 04:59:07 -0500 Subject: [PATCH 068/388] Mention uv for the initial install in CONTRIBUTING.md (#13595) --- CONTRIBUTING.md | 80 +++++++++++++++++++++++++++++++++---------------- 1 file changed, 55 insertions(+), 25 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index e4f95ca4ed1c..4cf190229822 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -50,37 +50,67 @@ please refer to this Note that some tests require extra setup steps to install the required dependencies. -### Linux/Mac OS/WSL + + + + + + + + + + + + + + + +
Linux / macOS / WSL + + On Linux and macOS, you will be able to run the full test suite on Python + 3.9-3.12. + To install the necessary requirements, run the following commands from a + terminal window: + + ```bash + $ python3 -m venv .venv + $ source .venv/bin/activate + (.venv)$ pip install -U pip + (.venv)$ pip install -r requirements-tests.txt + ``` + +
Windows + + Run the following commands from a Windows terminal to install all requirements: + + ```powershell + > python -m venv .venv + > .venv\Scripts\activate + (.venv) > pip install -U pip + (.venv) > pip install -r requirements-tests.txt + ``` + + To be able to run pytype tests, you'll also need to install it manually +as it's currently excluded from the requirements file: -On Linux and Mac OS, you will be able to run the full test suite on Python -3.9-3.12. -To install the necessary requirements, run the following commands from a -terminal window: + ```powershell + (.venv) > pip install -U pytype + ``` -```bash -$ python3 -m venv .venv -$ source .venv/bin/activate -(.venv)$ pip install -U pip -(.venv)$ pip install -r requirements-tests.txt -``` +
Using uv -### Windows + If you already have [uv](https://docs.astral.sh/uv/getting-started/installation/) installed, you can simply replace the commands above with: -Run the following commands from a Windows terminal to install all requirements: + ```shell + uv venv + uv pip install -r requirements-tests.txt + ``` -```powershell -> python -m venv .venv -> .venv\Scripts\activate -(.venv) > pip install -U pip -(.venv) > pip install -r "requirements-tests.txt" -``` + ```shell + uv pip install -U pytype + ``` -To be able to run pytype tests, you'll also need to install it manually -as it's currently excluded from the requirements file: - -```powershell -(.venv) > pip install -U pytype -``` +
## Code formatting From 79e41eab4a3d1c20dfaa156d0c054e28c92c5557 Mon Sep 17 00:00:00 2001 From: Stephen Morton Date: Sun, 9 Mar 2025 05:44:27 -0700 Subject: [PATCH 069/388] Fix default of `dict.get` (#13222) --- stdlib/@tests/stubtest_allowlists/common.txt | 1 - stdlib/@tests/test_cases/builtins/check_dict.py | 13 +++++++------ stdlib/builtins.pyi | 2 +- stdlib/importlib/metadata/__init__.pyi | 2 +- 4 files changed, 9 insertions(+), 9 deletions(-) diff --git a/stdlib/@tests/stubtest_allowlists/common.txt b/stdlib/@tests/stubtest_allowlists/common.txt index 451875d795f1..f3cb0b2b52e7 100644 --- a/stdlib/@tests/stubtest_allowlists/common.txt +++ b/stdlib/@tests/stubtest_allowlists/common.txt @@ -4,7 +4,6 @@ # Please keep sorted alphabetically -builtins.dict.get collections\.ChainMap\.fromkeys # https://github.com/python/mypy/issues/17023 http.client.HTTPConnection.response_class # the actual type at runtime is abc.ABCMeta importlib.abc.Loader.exec_module # See Lib/importlib/_abc.py. Might be defined for backwards compatibility diff --git a/stdlib/@tests/test_cases/builtins/check_dict.py b/stdlib/@tests/test_cases/builtins/check_dict.py index dd4569eccbe5..d89c3a27d489 100644 --- a/stdlib/@tests/test_cases/builtins/check_dict.py +++ b/stdlib/@tests/test_cases/builtins/check_dict.py @@ -67,7 +67,7 @@ def test_iterable_tuple_overload(x: Iterable[tuple[int, str]]) -> dict[int, str] assert_type(d_any["key"], Any) assert_type(d_any.get("key"), Union[Any, None]) -assert_type(d_any.get("key", None), Any) +assert_type(d_any.get("key", None), Union[Any, None]) assert_type(d_any.get("key", any_value), Any) assert_type(d_any.get("key", str_value), Any) assert_type(d_any.get("key", int_value), Any) @@ -84,15 +84,16 @@ def test_iterable_tuple_overload(x: Iterable[tuple[int, str]]) -> dict[int, str] result: str result = d_any["key"] result = d_any.get("key") # type: ignore[assignment] -result = d_any.get("key", None) +result = d_any.get("key", None) # type: ignore[assignment] result = d_any.get("key", any_value) result = d_any.get("key", str_value) result = d_any.get("key", int_value) result = d_str["key"] result = d_str.get("key") # type: ignore[assignment] -result = d_str.get("key", None) # type: ignore[arg-type] -result = d_str.get("key", any_value) +result = d_str.get("key", None) # type: ignore[assignment] +# Pyright has str | None here, see https://github.com/microsoft/pyright/discussions/9570 +result = d_str.get("key", any_value) # pyright: ignore[reportAssignmentType] result = d_str.get("key", str_value) result = d_str.get("key", int_value) # type: ignore[arg-type] @@ -134,11 +135,11 @@ def test8() -> str: def test9() -> str: - return d_str.get("key", None) # type: ignore[arg-type] + return d_str.get("key", None) # type: ignore[return-value] def test10() -> str: - return d_str.get("key", any_value) + return d_str.get("key", any_value) # type: ignore[no-any-return] def test11() -> str: diff --git a/stdlib/builtins.pyi b/stdlib/builtins.pyi index b9a1f3c9e456..4e2484509c1d 100644 --- a/stdlib/builtins.pyi +++ b/stdlib/builtins.pyi @@ -1143,7 +1143,7 @@ class dict(MutableMapping[_KT, _VT]): def fromkeys(cls, iterable: Iterable[_T], value: _S, /) -> dict[_T, _S]: ... # Positional-only in dict, but not in MutableMapping @overload # type: ignore[override] - def get(self, key: _KT, /) -> _VT | None: ... + def get(self, key: _KT, default: None = None, /) -> _VT | None: ... @overload def get(self, key: _KT, default: _VT, /) -> _VT: ... @overload diff --git a/stdlib/importlib/metadata/__init__.pyi b/stdlib/importlib/metadata/__init__.pyi index 5e26f8987277..8ab7a0c4a9e8 100644 --- a/stdlib/importlib/metadata/__init__.pyi +++ b/stdlib/importlib/metadata/__init__.pyi @@ -139,7 +139,7 @@ if sys.version_info >= (3, 10) and sys.version_info < (3, 12): class Deprecated(Generic[_KT, _VT]): def __getitem__(self, name: _KT) -> _VT: ... @overload - def get(self, name: _KT) -> _VT | None: ... + def get(self, name: _KT, default: None = None) -> _VT | None: ... @overload def get(self, name: _KT, default: _T) -> _VT | _T: ... def __iter__(self) -> Iterator[_KT]: ... From 88066fdfaa07e7912a7dfd7cdadd6e8a17cad3b7 Mon Sep 17 00:00:00 2001 From: Joren Hammudoglu Date: Sun, 9 Mar 2025 13:47:43 +0100 Subject: [PATCH 070/388] Annotate `unittest.TestCase.__init_subclass__` (#13602) --- stdlib/@tests/stubtest_allowlists/py310.txt | 2 -- stdlib/@tests/stubtest_allowlists/py311.txt | 2 -- stdlib/@tests/stubtest_allowlists/py312.txt | 2 -- stdlib/@tests/stubtest_allowlists/py313.txt | 2 -- stdlib/unittest/case.pyi | 6 +++++- 5 files changed, 5 insertions(+), 9 deletions(-) diff --git a/stdlib/@tests/stubtest_allowlists/py310.txt b/stdlib/@tests/stubtest_allowlists/py310.txt index d809618c6df0..aa3af5ccc4b8 100644 --- a/stdlib/@tests/stubtest_allowlists/py310.txt +++ b/stdlib/@tests/stubtest_allowlists/py310.txt @@ -164,8 +164,6 @@ builtins.property.__set_name__ # Doesn't actually exist collections\.UserList\.index # ignoring pos-or-keyword parameter dataclasses.KW_ONLY # white lies around defaults importlib.metadata._meta.SimplePath.joinpath # Runtime definition of protocol is incorrect -unittest.TestCase.__init_subclass__ # Runtime has *args, **kwargs, but will error if any are supplied -unittest.case.TestCase.__init_subclass__ # Runtime has *args, **kwargs, but will error if any are supplied # =============================================================== diff --git a/stdlib/@tests/stubtest_allowlists/py311.txt b/stdlib/@tests/stubtest_allowlists/py311.txt index 26f846f339a3..07c67b0584bb 100644 --- a/stdlib/@tests/stubtest_allowlists/py311.txt +++ b/stdlib/@tests/stubtest_allowlists/py311.txt @@ -145,8 +145,6 @@ builtins.property.__set_name__ # Doesn't actually exist collections\.UserList\.index # ignoring pos-or-keyword parameter dataclasses.KW_ONLY # white lies around defaults importlib.metadata._meta.SimplePath.joinpath # Runtime definition of protocol is incorrect -unittest.TestCase.__init_subclass__ # Runtime has *args, **kwargs, but will error if any are supplied -unittest.case.TestCase.__init_subclass__ # Runtime has *args, **kwargs, but will error if any are supplied # =============================================================== diff --git a/stdlib/@tests/stubtest_allowlists/py312.txt b/stdlib/@tests/stubtest_allowlists/py312.txt index 1e48d68093a0..51f169f91088 100644 --- a/stdlib/@tests/stubtest_allowlists/py312.txt +++ b/stdlib/@tests/stubtest_allowlists/py312.txt @@ -157,8 +157,6 @@ builtins.property.__set_name__ # Doesn't actually exist collections\.UserList\.index # ignoring pos-or-keyword parameter dataclasses.KW_ONLY # white lies around defaults importlib.metadata._meta.SimplePath.joinpath # Runtime definition of protocol is incorrect -unittest.TestCase.__init_subclass__ # Runtime has *args, **kwargs, but will error if any are supplied -unittest.case.TestCase.__init_subclass__ # Runtime has *args, **kwargs, but will error if any are supplied # =============================================================== diff --git a/stdlib/@tests/stubtest_allowlists/py313.txt b/stdlib/@tests/stubtest_allowlists/py313.txt index 25f56e73e2bc..86faeafc4f65 100644 --- a/stdlib/@tests/stubtest_allowlists/py313.txt +++ b/stdlib/@tests/stubtest_allowlists/py313.txt @@ -147,5 +147,3 @@ builtins.property.__set_name__ # Doesn't actually exist collections\.UserList\.index # ignoring pos-or-keyword parameter dataclasses.KW_ONLY # white lies around defaults importlib.metadata._meta.SimplePath.joinpath # Runtime definition of protocol is incorrect -unittest.TestCase.__init_subclass__ # Runtime has *args, **kwargs, but will error if any are supplied -unittest.case.TestCase.__init_subclass__ # Runtime has *args, **kwargs, but will error if any are supplied diff --git a/stdlib/unittest/case.pyi b/stdlib/unittest/case.pyi index a92f03f9745f..33cd556d2e3b 100644 --- a/stdlib/unittest/case.pyi +++ b/stdlib/unittest/case.pyi @@ -20,7 +20,7 @@ from typing import ( TypeVar, overload, ) -from typing_extensions import ParamSpec, Self, TypeAlias +from typing_extensions import Never, ParamSpec, Self, TypeAlias from warnings import WarningMessage if sys.version_info >= (3, 9): @@ -323,6 +323,10 @@ class TestCase: self, subset: Mapping[Any, Any], dictionary: Mapping[Any, Any], msg: object = None ) -> None: ... + if sys.version_info >= (3, 10): + # Runtime has *args, **kwargs, but will error if any are supplied + def __init_subclass__(cls, *args: Never, **kwargs: Never) -> None: ... + class FunctionTestCase(TestCase): def __init__( self, From f5b66cd61aa46d6011b681bf863f129adf1f0135 Mon Sep 17 00:00:00 2001 From: Daksh2000 <44369829+Daksh2000@users.noreply.github.com> Date: Sun, 9 Mar 2025 18:55:56 +0530 Subject: [PATCH 071/388] Fix override issue in GeoDataFrame.astype return type (#13606) --- stubs/geopandas/geopandas/geodataframe.pyi | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stubs/geopandas/geopandas/geodataframe.pyi b/stubs/geopandas/geopandas/geodataframe.pyi index 4fe4e468998f..7f30a113b0ab 100644 --- a/stubs/geopandas/geopandas/geodataframe.pyi +++ b/stubs/geopandas/geopandas/geodataframe.pyi @@ -315,7 +315,7 @@ class GeoDataFrame(GeoPandasBase, pd.DataFrame): # type: ignore[misc] dtype: AstypeArg | Mapping[Any, Dtype] | pd.Series[Any], # any because of mapping invariance and series typevar bounds copy: bool | None = None, errors: Literal["ignore", "raise"] = "raise", - ) -> GeoDataFrame | pd.DataFrame: ... + ) -> GeoDataFrame: ... def to_postgis( self, name: str, From 2e8ea88bc7841157a62cb88d1449ef76a7ab078e Mon Sep 17 00:00:00 2001 From: Avasam Date: Mon, 10 Mar 2025 01:34:42 -0400 Subject: [PATCH 072/388] Bump pywin32 to 309 (#13607) --- .../@tests/stubtest_allowlist_win32.txt | 5 + stubs/pywin32/METADATA.toml | 2 +- stubs/pywin32/pythoncom.pyi | 21 +- stubs/pywin32/win32/lib/win32con.pyi | 9659 ++++++++-------- stubs/pywin32/win32/lib/win32timezone.pyi | 123 +- stubs/pywin32/win32/lib/winerror.pyi | 9832 ++++++++++++----- stubs/pywin32/win32/servicemanager.pyi | 2 +- stubs/pywin32/win32/win32trace.pyi | 2 +- stubs/pywin32/win32/win32ts.pyi | 1 + stubs/pywin32/win32com/client/__init__.pyi | 4 +- 10 files changed, 12265 insertions(+), 7386 deletions(-) diff --git a/stubs/pywin32/@tests/stubtest_allowlist_win32.txt b/stubs/pywin32/@tests/stubtest_allowlist_win32.txt index 2fd035cff467..e29b00349dc2 100644 --- a/stubs/pywin32/@tests/stubtest_allowlist_win32.txt +++ b/stubs/pywin32/@tests/stubtest_allowlist_win32.txt @@ -61,3 +61,8 @@ win32com(ext)?.axdebug.stackframe # failed to import, ImportError: cannot import name 'axdebug' from 'win32com.axdebug' win32com(ext)?.axdebug.gateways win32com(ext)?.axscript.client.debug + +# These missing is likely an issue with the upstream build's WINVER target on 309. +# Keep them in stubs as there will likely be a patch soon. +# https://github.com/mhammond/pywin32/issues/2486 +win32com(ext)?\.shell\.shell\.\w+? diff --git a/stubs/pywin32/METADATA.toml b/stubs/pywin32/METADATA.toml index b3bb0bad8e4b..d6fb6753ea1d 100644 --- a/stubs/pywin32/METADATA.toml +++ b/stubs/pywin32/METADATA.toml @@ -1,4 +1,4 @@ -version = "308.*" +version = "309.*" upstream_repository = "https://github.com/mhammond/pywin32" [tool.stubtest] diff --git a/stubs/pywin32/pythoncom.pyi b/stubs/pywin32/pythoncom.pyi index 70e020953adc..0e0c3bc0147c 100644 --- a/stubs/pywin32/pythoncom.pyi +++ b/stubs/pywin32/pythoncom.pyi @@ -1,6 +1,7 @@ from _typeshed import Incomplete, Unused +from abc import abstractmethod from collections.abc import Sequence -from typing import SupportsInt, overload +from typing import ClassVar, SupportsInt, overload from typing_extensions import TypeAlias, deprecated import _win32typing @@ -10,6 +11,15 @@ error: TypeAlias = com_error # noqa: Y042 class internal_error(Exception): ... +class com_record: + @abstractmethod + def __init__(self, /, *args, **kwargs) -> None: ... + TLBID: ClassVar[str] + MJVER: ClassVar[int] + MNVER: ClassVar[int] + LCID: ClassVar[int] + GUID: ClassVar[str] + def CoCreateFreeThreadedMarshaler(unk: _win32typing.PyIUnknown, /) -> _win32typing.PyIUnknown: ... def CoCreateInstanceEx( clsid: _win32typing.PyIID, @@ -60,7 +70,13 @@ def GetActiveObject(cls, /) -> _win32typing.PyIUnknown: ... def GetClassFile(fileName, /) -> _win32typing.PyIID: ... def GetFacilityString(scode, /) -> str: ... def GetRecordFromGuids( - iid: _win32typing.PyIID, verMajor, verMinor, lcid, infoIID: _win32typing.PyIID, data: Incomplete | None = ..., / + iid: str | _win32typing.PyIID, + verMajor: int, + verMinor: int, + lcid: int, + infoIID: str | _win32typing.PyIID, + data: Incomplete | None = ..., + /, ): ... def GetRecordFromTypeInfo(TypeInfo: _win32typing.PyITypeInfo, /): ... def GetRunningObjectTable(reserved: int = ..., /) -> _win32typing.PyIRunningObjectTable: ... @@ -399,6 +415,7 @@ TYPEFLAG_FPREDECLID: int TYPEFLAG_FREPLACEABLE: int TYPEFLAG_FRESTRICTED: int TYPEFLAG_FREVERSEBIND: int +RecordClasses: dict[str, com_record] TypeIIDs: dict[_win32typing.PyIID, type] URL_MK_LEGACY: int URL_MK_UNIFORM: int diff --git a/stubs/pywin32/win32/lib/win32con.pyi b/stubs/pywin32/win32/lib/win32con.pyi index 56ca7d1f66b1..a64e7d157553 100644 --- a/stubs/pywin32/win32/lib/win32con.pyi +++ b/stubs/pywin32/win32/lib/win32con.pyi @@ -1,4780 +1,4911 @@ -WINVER: int -WM_USER: int -PY_0U: int -OFN_READONLY: int -OFN_OVERWRITEPROMPT: int -OFN_HIDEREADONLY: int -OFN_NOCHANGEDIR: int -OFN_SHOWHELP: int -OFN_ENABLEHOOK: int -OFN_ENABLETEMPLATE: int -OFN_ENABLETEMPLATEHANDLE: int -OFN_NOVALIDATE: int -OFN_ALLOWMULTISELECT: int -OFN_EXTENSIONDIFFERENT: int -OFN_PATHMUSTEXIST: int -OFN_FILEMUSTEXIST: int -OFN_CREATEPROMPT: int -OFN_SHAREAWARE: int -OFN_NOREADONLYRETURN: int -OFN_NOTESTFILECREATE: int -OFN_NONETWORKBUTTON: int -OFN_NOLONGNAMES: int -OFN_EXPLORER: int -OFN_NODEREFERENCELINKS: int -OFN_LONGNAMES: int -OFN_ENABLEINCLUDENOTIFY: int -OFN_ENABLESIZING: int -OFN_DONTADDTORECENT: int -OFN_FORCESHOWHIDDEN: int -OFN_EX_NOPLACESBAR: int -OFN_SHAREFALLTHROUGH: int -OFN_SHARENOWARN: int -OFN_SHAREWARN: int -CDN_FIRST: int -CDN_LAST: int -CDN_INITDONE: int -CDN_SELCHANGE: int -CDN_FOLDERCHANGE: int -CDN_SHAREVIOLATION: int -CDN_HELP: int -CDN_FILEOK: int -CDN_TYPECHANGE: int -CDN_INCLUDEITEM: int -CDM_FIRST: int -CDM_LAST: int -CDM_GETSPEC: int -CDM_GETFILEPATH: int -CDM_GETFOLDERPATH: int -CDM_SETCONTROLTEXT: int -CDM_HIDECONTROL: int -CDM_SETDEFEXT: int -CC_RGBINIT: int -CC_FULLOPEN: int -CC_PREVENTFULLOPEN: int -CC_SHOWHELP: int -CC_ENABLEHOOK: int -CC_ENABLETEMPLATE: int -CC_ENABLETEMPLATEHANDLE: int -CC_SOLIDCOLOR: int -CC_ANYCOLOR: int -FR_DOWN: int -FR_WHOLEWORD: int -FR_MATCHCASE: int -FR_FINDNEXT: int -FR_REPLACE: int -FR_REPLACEALL: int -FR_DIALOGTERM: int -FR_SHOWHELP: int -FR_ENABLEHOOK: int -FR_ENABLETEMPLATE: int -FR_NOUPDOWN: int -FR_NOMATCHCASE: int -FR_NOWHOLEWORD: int -FR_ENABLETEMPLATEHANDLE: int -FR_HIDEUPDOWN: int -FR_HIDEMATCHCASE: int -FR_HIDEWHOLEWORD: int -CF_SCREENFONTS: int -CF_PRINTERFONTS: int -CF_BOTH: int -CF_SHOWHELP: int -CF_ENABLEHOOK: int -CF_ENABLETEMPLATE: int -CF_ENABLETEMPLATEHANDLE: int -CF_INITTOLOGFONTSTRUCT: int -CF_USESTYLE: int -CF_EFFECTS: int -CF_APPLY: int -CF_ANSIONLY: int -CF_SCRIPTSONLY: int -CF_NOVECTORFONTS: int -CF_NOOEMFONTS: int -CF_NOSIMULATIONS: int -CF_LIMITSIZE: int -CF_FIXEDPITCHONLY: int -CF_WYSIWYG: int -CF_FORCEFONTEXIST: int -CF_SCALABLEONLY: int -CF_TTONLY: int -CF_NOFACESEL: int -CF_NOSTYLESEL: int -CF_NOSIZESEL: int -CF_SELECTSCRIPT: int -CF_NOSCRIPTSEL: int -CF_NOVERTFONTS: int -SIMULATED_FONTTYPE: int -PRINTER_FONTTYPE: int -SCREEN_FONTTYPE: int -BOLD_FONTTYPE: int -ITALIC_FONTTYPE: int -REGULAR_FONTTYPE: int -OPENTYPE_FONTTYPE: int -TYPE1_FONTTYPE: int -DSIG_FONTTYPE: int -WM_CHOOSEFONT_GETLOGFONT: int -WM_CHOOSEFONT_SETLOGFONT: int -WM_CHOOSEFONT_SETFLAGS: int -LBSELCHSTRINGA: str -SHAREVISTRINGA: str -FILEOKSTRINGA: str -COLOROKSTRINGA: str -SETRGBSTRINGA: str -HELPMSGSTRINGA: str -FINDMSGSTRINGA: str -LBSELCHSTRING: str -SHAREVISTRING: str -FILEOKSTRING: str -COLOROKSTRING: str -SETRGBSTRING: str -HELPMSGSTRING: str -FINDMSGSTRING: str -CD_LBSELNOITEMS: int -CD_LBSELCHANGE: int -CD_LBSELSUB: int -CD_LBSELADD: int -PD_ALLPAGES: int -PD_SELECTION: int -PD_PAGENUMS: int -PD_NOSELECTION: int -PD_NOPAGENUMS: int -PD_COLLATE: int -PD_PRINTTOFILE: int -PD_PRINTSETUP: int -PD_NOWARNING: int -PD_RETURNDC: int -PD_RETURNIC: int -PD_RETURNDEFAULT: int -PD_SHOWHELP: int -PD_ENABLEPRINTHOOK: int -PD_ENABLESETUPHOOK: int -PD_ENABLEPRINTTEMPLATE: int -PD_ENABLESETUPTEMPLATE: int -PD_ENABLEPRINTTEMPLATEHANDLE: int -PD_ENABLESETUPTEMPLATEHANDLE: int -PD_USEDEVMODECOPIES: int -PD_DISABLEPRINTTOFILE: int -PD_HIDEPRINTTOFILE: int -PD_NONETWORKBUTTON: int -DN_DEFAULTPRN: int -WM_PSD_PAGESETUPDLG: int -WM_PSD_FULLPAGERECT: int -WM_PSD_MINMARGINRECT: int -WM_PSD_MARGINRECT: int -WM_PSD_GREEKTEXTRECT: int -WM_PSD_ENVSTAMPRECT: int -WM_PSD_YAFULLPAGERECT: int -PSD_DEFAULTMINMARGINS: int -PSD_INWININIINTLMEASURE: int -PSD_MINMARGINS: int -PSD_MARGINS: int -PSD_INTHOUSANDTHSOFINCHES: int -PSD_INHUNDREDTHSOFMILLIMETERS: int -PSD_DISABLEMARGINS: int -PSD_DISABLEPRINTER: int -PSD_NOWARNING: int -PSD_DISABLEORIENTATION: int -PSD_RETURNDEFAULT: int -PSD_DISABLEPAPER: int -PSD_SHOWHELP: int -PSD_ENABLEPAGESETUPHOOK: int -PSD_ENABLEPAGESETUPTEMPLATE: int -PSD_ENABLEPAGESETUPTEMPLATEHANDLE: int -PSD_ENABLEPAGEPAINTHOOK: int -PSD_DISABLEPAGEPAINTING: int -PSD_NONETWORKBUTTON: int -HKEY_CLASSES_ROOT: int -HKEY_CURRENT_USER: int -HKEY_LOCAL_MACHINE: int -HKEY_USERS: int -HKEY_PERFORMANCE_DATA: int -HKEY_CURRENT_CONFIG: int -HKEY_DYN_DATA: int -HKEY_PERFORMANCE_TEXT: int -HKEY_PERFORMANCE_NLSTEXT: int -HWND_BROADCAST: int -HWND_DESKTOP: int -HWND_TOP: int -HWND_BOTTOM: int -HWND_TOPMOST: int -HWND_NOTOPMOST: int -HWND_MESSAGE: int -SM_CXSCREEN: int -SM_CYSCREEN: int -SM_CXVSCROLL: int -SM_CYHSCROLL: int -SM_CYCAPTION: int -SM_CXBORDER: int -SM_CYBORDER: int -SM_CXDLGFRAME: int -SM_CYDLGFRAME: int -SM_CYVTHUMB: int -SM_CXHTHUMB: int -SM_CXICON: int -SM_CYICON: int -SM_CXCURSOR: int -SM_CYCURSOR: int -SM_CYMENU: int -SM_CXFULLSCREEN: int -SM_CYFULLSCREEN: int -SM_CYKANJIWINDOW: int -SM_MOUSEPRESENT: int -SM_CYVSCROLL: int -SM_CXHSCROLL: int -SM_DEBUG: int -SM_SWAPBUTTON: int -SM_RESERVED1: int -SM_RESERVED2: int -SM_RESERVED3: int -SM_RESERVED4: int -SM_CXMIN: int -SM_CYMIN: int -SM_CXSIZE: int -SM_CYSIZE: int -SM_CXFRAME: int -SM_CYFRAME: int -SM_CXMINTRACK: int -SM_CYMINTRACK: int -SM_CXDOUBLECLK: int -SM_CYDOUBLECLK: int -SM_CXICONSPACING: int -SM_CYICONSPACING: int -SM_MENUDROPALIGNMENT: int -SM_PENWINDOWS: int -SM_DBCSENABLED: int -SM_CMOUSEBUTTONS: int -SM_CXFIXEDFRAME: int -SM_CYFIXEDFRAME: int -SM_CXSIZEFRAME: int -SM_CYSIZEFRAME: int -SM_SECURE: int -SM_CXEDGE: int -SM_CYEDGE: int -SM_CXMINSPACING: int -SM_CYMINSPACING: int -SM_CXSMICON: int -SM_CYSMICON: int -SM_CYSMCAPTION: int -SM_CXSMSIZE: int -SM_CYSMSIZE: int -SM_CXMENUSIZE: int -SM_CYMENUSIZE: int -SM_ARRANGE: int -SM_CXMINIMIZED: int -SM_CYMINIMIZED: int -SM_CXMAXTRACK: int -SM_CYMAXTRACK: int -SM_CXMAXIMIZED: int -SM_CYMAXIMIZED: int -SM_NETWORK: int -SM_CLEANBOOT: int -SM_CXDRAG: int -SM_CYDRAG: int -SM_SHOWSOUNDS: int -SM_CXMENUCHECK: int -SM_CYMENUCHECK: int -SM_SLOWMACHINE: int -SM_MIDEASTENABLED: int -SM_MOUSEWHEELPRESENT: int -SM_XVIRTUALSCREEN: int -SM_YVIRTUALSCREEN: int -SM_CXVIRTUALSCREEN: int -SM_CYVIRTUALSCREEN: int -SM_CMONITORS: int -SM_SAMEDISPLAYFORMAT: int -SM_CMETRICS: int -MNC_IGNORE: int -MNC_CLOSE: int -MNC_EXECUTE: int -MNC_SELECT: int -MNS_NOCHECK: int -MNS_MODELESS: int -MNS_DRAGDROP: int -MNS_AUTODISMISS: int -MNS_NOTIFYBYPOS: int -MNS_CHECKORBMP: int -MIM_MAXHEIGHT: int -MIM_BACKGROUND: int -MIM_HELPID: int -MIM_MENUDATA: int -MIM_STYLE: int -MIM_APPLYTOSUBMENUS: int -MND_CONTINUE: int -MND_ENDMENU: int -MNGOF_GAP: int -MNGO_NOINTERFACE: int -MNGO_NOERROR: int -MIIM_STATE: int -MIIM_ID: int -MIIM_SUBMENU: int -MIIM_CHECKMARKS: int -MIIM_TYPE: int -MIIM_DATA: int -MIIM_STRING: int -MIIM_BITMAP: int -MIIM_FTYPE: int -HBMMENU_CALLBACK: int -HBMMENU_SYSTEM: int -HBMMENU_MBAR_RESTORE: int -HBMMENU_MBAR_MINIMIZE: int -HBMMENU_MBAR_CLOSE: int -HBMMENU_MBAR_CLOSE_D: int -HBMMENU_MBAR_MINIMIZE_D: int -HBMMENU_POPUP_CLOSE: int -HBMMENU_POPUP_RESTORE: int -HBMMENU_POPUP_MAXIMIZE: int -HBMMENU_POPUP_MINIMIZE: int -GMDI_USEDISABLED: int -GMDI_GOINTOPOPUPS: int -TPM_LEFTBUTTON: int -TPM_RIGHTBUTTON: int -TPM_LEFTALIGN: int -TPM_CENTERALIGN: int -TPM_RIGHTALIGN: int -TPM_TOPALIGN: int -TPM_VCENTERALIGN: int -TPM_BOTTOMALIGN: int -TPM_HORIZONTAL: int -TPM_VERTICAL: int -TPM_NONOTIFY: int -TPM_RETURNCMD: int -TPM_RECURSE: int -DOF_EXECUTABLE: int -DOF_DOCUMENT: int -DOF_DIRECTORY: int -DOF_MULTIPLE: int -DOF_PROGMAN: int -DOF_SHELLDATA: int -DO_DROPFILE: int -DO_PRINTFILE: int -DT_TOP: int -DT_LEFT: int -DT_CENTER: int -DT_RIGHT: int -DT_VCENTER: int -DT_BOTTOM: int -DT_WORDBREAK: int -DT_SINGLELINE: int -DT_EXPANDTABS: int -DT_TABSTOP: int -DT_NOCLIP: int -DT_EXTERNALLEADING: int -DT_CALCRECT: int -DT_NOPREFIX: int -DT_INTERNAL: int -DT_EDITCONTROL: int -DT_PATH_ELLIPSIS: int -DT_END_ELLIPSIS: int -DT_MODIFYSTRING: int -DT_RTLREADING: int -DT_WORD_ELLIPSIS: int -DST_COMPLEX: int -DST_TEXT: int -DST_PREFIXTEXT: int -DST_ICON: int -DST_BITMAP: int -DSS_NORMAL: int -DSS_UNION: int -DSS_DISABLED: int -DSS_MONO: int -DSS_RIGHT: int -DCX_WINDOW: int -DCX_CACHE: int -DCX_NORESETATTRS: int -DCX_CLIPCHILDREN: int -DCX_CLIPSIBLINGS: int -DCX_PARENTCLIP: int -DCX_EXCLUDERGN: int -DCX_INTERSECTRGN: int -DCX_EXCLUDEUPDATE: int -DCX_INTERSECTUPDATE: int -DCX_LOCKWINDOWUPDATE: int -DCX_VALIDATE: int -CUDR_NORMAL: int -CUDR_NOSNAPTOGRID: int -CUDR_NORESOLVEPOSITIONS: int -CUDR_NOCLOSEGAPS: int -CUDR_NEGATIVECOORDS: int -CUDR_NOPRIMARY: int -RDW_INVALIDATE: int -RDW_INTERNALPAINT: int -RDW_ERASE: int -RDW_VALIDATE: int -RDW_NOINTERNALPAINT: int -RDW_NOERASE: int -RDW_NOCHILDREN: int -RDW_ALLCHILDREN: int -RDW_UPDATENOW: int -RDW_ERASENOW: int -RDW_FRAME: int -RDW_NOFRAME: int -SW_SCROLLCHILDREN: int -SW_INVALIDATE: int -SW_ERASE: int -SW_SMOOTHSCROLL: int -ESB_ENABLE_BOTH: int -ESB_DISABLE_BOTH: int -ESB_DISABLE_LEFT: int -ESB_DISABLE_RIGHT: int -ESB_DISABLE_UP: int -ESB_DISABLE_DOWN: int -ESB_DISABLE_LTUP: int -ESB_DISABLE_RTDN: int -HELPINFO_WINDOW: int -HELPINFO_MENUITEM: int -MB_OK: int -MB_OKCANCEL: int -MB_ABORTRETRYIGNORE: int -MB_YESNOCANCEL: int -MB_YESNO: int -MB_RETRYCANCEL: int -MB_ICONHAND: int -MB_ICONQUESTION: int -MB_ICONEXCLAMATION: int -MB_ICONASTERISK: int -MB_ICONWARNING: int -MB_ICONERROR: int -MB_ICONINFORMATION: int -MB_ICONSTOP: int -MB_DEFBUTTON1: int -MB_DEFBUTTON2: int -MB_DEFBUTTON3: int -MB_DEFBUTTON4: int -MB_APPLMODAL: int -MB_SYSTEMMODAL: int -MB_TASKMODAL: int -MB_HELP: int -MB_NOFOCUS: int -MB_SETFOREGROUND: int -MB_DEFAULT_DESKTOP_ONLY: int -MB_TOPMOST: int -MB_RIGHT: int -MB_RTLREADING: int -MB_SERVICE_NOTIFICATION: int -MB_TYPEMASK: int -MB_USERICON: int -MB_ICONMASK: int -MB_DEFMASK: int -MB_MODEMASK: int -MB_MISCMASK: int -CWP_ALL: int -CWP_SKIPINVISIBLE: int -CWP_SKIPDISABLED: int -CWP_SKIPTRANSPARENT: int -CTLCOLOR_MSGBOX: int -CTLCOLOR_EDIT: int -CTLCOLOR_BTN: int -CTLCOLOR_DLG: int -CTLCOLOR_SCROLLBAR: int -CTLCOLOR_STATIC: int -CTLCOLOR_MAX: int -COLOR_SCROLLBAR: int -COLOR_BACKGROUND: int -COLOR_ACTIVECAPTION: int -COLOR_INACTIVECAPTION: int -COLOR_MENU: int -COLOR_WINDOW: int -COLOR_WINDOWFRAME: int -COLOR_MENUTEXT: int -COLOR_WINDOWTEXT: int -COLOR_CAPTIONTEXT: int -COLOR_ACTIVEBORDER: int -COLOR_INACTIVEBORDER: int -COLOR_APPWORKSPACE: int -COLOR_HIGHLIGHT: int -COLOR_HIGHLIGHTTEXT: int -COLOR_BTNFACE: int -COLOR_BTNSHADOW: int -COLOR_GRAYTEXT: int -COLOR_BTNTEXT: int -COLOR_INACTIVECAPTIONTEXT: int -COLOR_BTNHIGHLIGHT: int -COLOR_3DDKSHADOW: int -COLOR_3DLIGHT: int -COLOR_INFOTEXT: int -COLOR_INFOBK: int -COLOR_HOTLIGHT: int -COLOR_GRADIENTACTIVECAPTION: int -COLOR_GRADIENTINACTIVECAPTION: int -COLOR_DESKTOP: int -COLOR_3DFACE: int -COLOR_3DSHADOW: int -COLOR_3DHIGHLIGHT: int -COLOR_3DHILIGHT: int -COLOR_BTNHILIGHT: int -GW_HWNDFIRST: int -GW_HWNDLAST: int -GW_HWNDNEXT: int -GW_HWNDPREV: int -GW_OWNER: int -GW_CHILD: int -GW_ENABLEDPOPUP: int -GW_MAX: int -MF_INSERT: int -MF_CHANGE: int -MF_APPEND: int -MF_DELETE: int -MF_REMOVE: int -MF_BYCOMMAND: int -MF_BYPOSITION: int -MF_SEPARATOR: int -MF_ENABLED: int -MF_GRAYED: int -MF_DISABLED: int -MF_UNCHECKED: int -MF_CHECKED: int -MF_USECHECKBITMAPS: int -MF_STRING: int -MF_BITMAP: int -MF_OWNERDRAW: int -MF_POPUP: int -MF_MENUBARBREAK: int -MF_MENUBREAK: int -MF_UNHILITE: int -MF_HILITE: int -MF_DEFAULT: int -MF_SYSMENU: int -MF_HELP: int -MF_RIGHTJUSTIFY: int -MF_MOUSESELECT: int -MF_END: int -MFT_STRING: int -MFT_BITMAP: int -MFT_MENUBARBREAK: int -MFT_MENUBREAK: int -MFT_OWNERDRAW: int -MFT_RADIOCHECK: int -MFT_SEPARATOR: int -MFT_RIGHTORDER: int -MFT_RIGHTJUSTIFY: int -MFS_GRAYED: int -MFS_DISABLED: int -MFS_CHECKED: int -MFS_HILITE: int -MFS_ENABLED: int -MFS_UNCHECKED: int -MFS_UNHILITE: int -MFS_DEFAULT: int -MFS_MASK: int -MFS_HOTTRACKDRAWN: int -MFS_CACHEDBMP: int -MFS_BOTTOMGAPDROP: int -MFS_TOPGAPDROP: int -MFS_GAPDROP: int -SC_SIZE: int -SC_MOVE: int -SC_MINIMIZE: int -SC_MAXIMIZE: int -SC_NEXTWINDOW: int -SC_PREVWINDOW: int -SC_CLOSE: int -SC_VSCROLL: int -SC_HSCROLL: int -SC_MOUSEMENU: int -SC_KEYMENU: int -SC_ARRANGE: int -SC_RESTORE: int -SC_SCREENSAVE: int -SC_HOTKEY: int -SC_DEFAULT: int -SC_MONITORPOWER: int -SC_CONTEXTHELP: int -SC_SEPARATOR: int -SC_ICON: int -SC_ZOOM: int -IDC_ARROW: int -IDC_IBEAM: int -IDC_WAIT: int -IDC_CROSS: int -IDC_UPARROW: int -IDC_SIZE: int -IDC_ICON: int -IDC_SIZENWSE: int -IDC_SIZENESW: int -IDC_SIZEWE: int -IDC_SIZENS: int -IDC_SIZEALL: int -IDC_NO: int -IDC_HAND: int -IDC_APPSTARTING: int -IDC_HELP: int -IMAGE_BITMAP: int -IMAGE_ICON: int -IMAGE_CURSOR: int -IMAGE_ENHMETAFILE: int -LR_DEFAULTCOLOR: int -LR_MONOCHROME: int -LR_COLOR: int -LR_COPYRETURNORG: int -LR_COPYDELETEORG: int -LR_LOADFROMFILE: int -LR_LOADTRANSPARENT: int -LR_DEFAULTSIZE: int -LR_LOADREALSIZE: int -LR_LOADMAP3DCOLORS: int -LR_CREATEDIBSECTION: int -LR_COPYFROMRESOURCE: int -LR_SHARED: int -DI_MASK: int -DI_IMAGE: int -DI_NORMAL: int -DI_COMPAT: int -DI_DEFAULTSIZE: int -RES_ICON: int -RES_CURSOR: int -OBM_CLOSE: int -OBM_UPARROW: int -OBM_DNARROW: int -OBM_RGARROW: int -OBM_LFARROW: int -OBM_REDUCE: int -OBM_ZOOM: int -OBM_RESTORE: int -OBM_REDUCED: int -OBM_ZOOMD: int -OBM_RESTORED: int -OBM_UPARROWD: int -OBM_DNARROWD: int -OBM_RGARROWD: int -OBM_LFARROWD: int -OBM_MNARROW: int -OBM_COMBO: int -OBM_UPARROWI: int -OBM_DNARROWI: int -OBM_RGARROWI: int -OBM_LFARROWI: int -OBM_OLD_CLOSE: int -OBM_SIZE: int -OBM_OLD_UPARROW: int -OBM_OLD_DNARROW: int -OBM_OLD_RGARROW: int -OBM_OLD_LFARROW: int -OBM_BTSIZE: int -OBM_CHECK: int -OBM_CHECKBOXES: int -OBM_BTNCORNERS: int -OBM_OLD_REDUCE: int -OBM_OLD_ZOOM: int -OBM_OLD_RESTORE: int -OCR_NORMAL: int -OCR_IBEAM: int -OCR_WAIT: int -OCR_CROSS: int -OCR_UP: int -OCR_SIZE: int -OCR_ICON: int -OCR_SIZENWSE: int -OCR_SIZENESW: int -OCR_SIZEWE: int -OCR_SIZENS: int -OCR_SIZEALL: int -OCR_ICOCUR: int -OCR_NO: int -OCR_HAND: int -OCR_APPSTARTING: int -OIC_SAMPLE: int -OIC_HAND: int -OIC_QUES: int -OIC_BANG: int -OIC_NOTE: int -OIC_WINLOGO: int -OIC_WARNING: int -OIC_ERROR: int -OIC_INFORMATION: int -ORD_LANGDRIVER: int -IDI_APPLICATION: int -IDI_HAND: int -IDI_QUESTION: int -IDI_EXCLAMATION: int -IDI_ASTERISK: int -IDI_WINLOGO: int -IDI_WARNING: int -IDI_ERROR: int -IDI_INFORMATION: int -IDOK: int -IDCANCEL: int -IDABORT: int -IDRETRY: int -IDIGNORE: int -IDYES: int -IDNO: int -IDCLOSE: int -IDHELP: int -ES_LEFT: int -ES_CENTER: int -ES_RIGHT: int -ES_MULTILINE: int -ES_UPPERCASE: int -ES_LOWERCASE: int -ES_PASSWORD: int -ES_AUTOVSCROLL: int -ES_AUTOHSCROLL: int -ES_NOHIDESEL: int -ES_OEMCONVERT: int -ES_READONLY: int -ES_WANTRETURN: int -ES_NUMBER: int -EN_SETFOCUS: int -EN_KILLFOCUS: int -EN_CHANGE: int -EN_UPDATE: int -EN_ERRSPACE: int -EN_MAXTEXT: int -EN_HSCROLL: int -EN_VSCROLL: int -EC_LEFTMARGIN: int -EC_RIGHTMARGIN: int -EC_USEFONTINFO: int -EMSIS_COMPOSITIONSTRING: int -EIMES_GETCOMPSTRATONCE: int -EIMES_CANCELCOMPSTRINFOCUS: int -EIMES_COMPLETECOMPSTRKILLFOCUS: int -EM_GETSEL: int -EM_SETSEL: int -EM_GETRECT: int -EM_SETRECT: int -EM_SETRECTNP: int -EM_SCROLL: int -EM_LINESCROLL: int -EM_SCROLLCARET: int -EM_GETMODIFY: int -EM_SETMODIFY: int -EM_GETLINECOUNT: int -EM_LINEINDEX: int -EM_SETHANDLE: int -EM_GETHANDLE: int -EM_GETTHUMB: int -EM_LINELENGTH: int -EM_REPLACESEL: int -EM_GETLINE: int -EM_LIMITTEXT: int -EM_CANUNDO: int -EM_UNDO: int -EM_FMTLINES: int -EM_LINEFROMCHAR: int -EM_SETTABSTOPS: int -EM_SETPASSWORDCHAR: int -EM_EMPTYUNDOBUFFER: int -EM_GETFIRSTVISIBLELINE: int -EM_SETREADONLY: int -EM_SETWORDBREAKPROC: int -EM_GETWORDBREAKPROC: int -EM_GETPASSWORDCHAR: int -EM_SETMARGINS: int -EM_GETMARGINS: int -EM_SETLIMITTEXT: int -EM_GETLIMITTEXT: int -EM_POSFROMCHAR: int -EM_CHARFROMPOS: int -EM_SETIMESTATUS: int -EM_GETIMESTATUS: int -WB_LEFT: int -WB_RIGHT: int -WB_ISDELIMITER: int -BS_PUSHBUTTON: int -BS_DEFPUSHBUTTON: int -BS_CHECKBOX: int -BS_AUTOCHECKBOX: int -BS_RADIOBUTTON: int -BS_3STATE: int -BS_AUTO3STATE: int -BS_GROUPBOX: int -BS_USERBUTTON: int -BS_AUTORADIOBUTTON: int -BS_OWNERDRAW: int -BS_LEFTTEXT: int -BS_TEXT: int -BS_ICON: int -BS_BITMAP: int -BS_LEFT: int -BS_RIGHT: int -BS_CENTER: int -BS_TOP: int -BS_BOTTOM: int -BS_VCENTER: int -BS_PUSHLIKE: int -BS_MULTILINE: int -BS_NOTIFY: int -BS_FLAT: int -BS_RIGHTBUTTON: int -BN_CLICKED: int -BN_PAINT: int -BN_HILITE: int -BN_UNHILITE: int -BN_DISABLE: int -BN_DOUBLECLICKED: int -BN_PUSHED: int -BN_UNPUSHED: int -BN_DBLCLK: int -BN_SETFOCUS: int -BN_KILLFOCUS: int -BM_GETCHECK: int -BM_SETCHECK: int -BM_GETSTATE: int -BM_SETSTATE: int -BM_SETSTYLE: int -BM_CLICK: int -BM_GETIMAGE: int -BM_SETIMAGE: int -BST_UNCHECKED: int -BST_CHECKED: int -BST_INDETERMINATE: int -BST_PUSHED: int -BST_FOCUS: int -SS_LEFT: int -SS_CENTER: int -SS_RIGHT: int -SS_ICON: int -SS_BLACKRECT: int -SS_GRAYRECT: int -SS_WHITERECT: int -SS_BLACKFRAME: int -SS_GRAYFRAME: int -SS_WHITEFRAME: int -SS_USERITEM: int -SS_SIMPLE: int -SS_LEFTNOWORDWRAP: int -SS_BITMAP: int -SS_OWNERDRAW: int -SS_ENHMETAFILE: int -SS_ETCHEDHORZ: int -SS_ETCHEDVERT: int -SS_ETCHEDFRAME: int -SS_TYPEMASK: int -SS_NOPREFIX: int -SS_NOTIFY: int -SS_CENTERIMAGE: int -SS_RIGHTJUST: int -SS_REALSIZEIMAGE: int -SS_SUNKEN: int -SS_ENDELLIPSIS: int -SS_PATHELLIPSIS: int -SS_WORDELLIPSIS: int -SS_ELLIPSISMASK: int -STM_SETICON: int -STM_GETICON: int -STM_SETIMAGE: int -STM_GETIMAGE: int -STN_CLICKED: int -STN_DBLCLK: int -STN_ENABLE: int -STN_DISABLE: int -STM_MSGMAX: int -DWL_MSGRESULT: int -DWL_DLGPROC: int -DWL_USER: int -DDL_READWRITE: int -DDL_READONLY: int -DDL_HIDDEN: int -DDL_SYSTEM: int -DDL_DIRECTORY: int -DDL_ARCHIVE: int -DDL_POSTMSGS: int -DDL_DRIVES: int -DDL_EXCLUSIVE: int -RT_CURSOR: int -RT_BITMAP: int -RT_ICON: int -RT_MENU: int -RT_DIALOG: int -RT_STRING: int -RT_FONTDIR: int -RT_FONT: int -RT_ACCELERATOR: int -RT_RCDATA: int -RT_MESSAGETABLE: int -DIFFERENCE: int -RT_GROUP_CURSOR: int -RT_GROUP_ICON: int -RT_VERSION: int -RT_DLGINCLUDE: int -RT_PLUGPLAY: int -RT_VXD: int -RT_ANICURSOR: int -RT_ANIICON: int -RT_HTML: int -SB_HORZ: int -SB_VERT: int -SB_CTL: int -SB_BOTH: int -SB_LINEUP: int -SB_LINELEFT: int -SB_LINEDOWN: int -SB_LINERIGHT: int -SB_PAGEUP: int -SB_PAGELEFT: int -SB_PAGEDOWN: int -SB_PAGERIGHT: int -SB_THUMBPOSITION: int -SB_THUMBTRACK: int -SB_TOP: int -SB_LEFT: int -SB_BOTTOM: int -SB_RIGHT: int -SB_ENDSCROLL: int -SW_HIDE: int -SW_SHOWNORMAL: int -SW_NORMAL: int -SW_SHOWMINIMIZED: int -SW_SHOWMAXIMIZED: int -SW_MAXIMIZE: int -SW_SHOWNOACTIVATE: int -SW_SHOW: int -SW_MINIMIZE: int -SW_SHOWMINNOACTIVE: int -SW_SHOWNA: int -SW_RESTORE: int -SW_SHOWDEFAULT: int -SW_FORCEMINIMIZE: int -SW_MAX: int -HIDE_WINDOW: int -SHOW_OPENWINDOW: int -SHOW_ICONWINDOW: int -SHOW_FULLSCREEN: int -SHOW_OPENNOACTIVATE: int -SW_PARENTCLOSING: int -SW_OTHERZOOM: int -SW_PARENTOPENING: int -SW_OTHERUNZOOM: int -AW_HOR_POSITIVE: int -AW_HOR_NEGATIVE: int -AW_VER_POSITIVE: int -AW_VER_NEGATIVE: int -AW_CENTER: int -AW_HIDE: int -AW_ACTIVATE: int -AW_SLIDE: int -AW_BLEND: int -KF_EXTENDED: int -KF_DLGMODE: int -KF_MENUMODE: int -KF_ALTDOWN: int -KF_REPEAT: int -KF_UP: int -VK_LBUTTON: int -VK_RBUTTON: int -VK_CANCEL: int -VK_MBUTTON: int -VK_BACK: int -VK_TAB: int -VK_CLEAR: int -VK_RETURN: int -VK_SHIFT: int -VK_CONTROL: int -VK_MENU: int -VK_PAUSE: int -VK_CAPITAL: int -VK_KANA: int -VK_HANGEUL: int -VK_HANGUL: int -VK_JUNJA: int -VK_FINAL: int -VK_HANJA: int -VK_KANJI: int -VK_ESCAPE: int -VK_CONVERT: int -VK_NONCONVERT: int -VK_ACCEPT: int -VK_MODECHANGE: int -VK_SPACE: int -VK_PRIOR: int -VK_NEXT: int -VK_END: int -VK_HOME: int -VK_LEFT: int -VK_UP: int -VK_RIGHT: int -VK_DOWN: int -VK_SELECT: int -VK_PRINT: int -VK_EXECUTE: int -VK_SNAPSHOT: int -VK_INSERT: int -VK_DELETE: int -VK_HELP: int -VK_LWIN: int -VK_RWIN: int -VK_APPS: int -VK_NUMPAD0: int -VK_NUMPAD1: int -VK_NUMPAD2: int -VK_NUMPAD3: int -VK_NUMPAD4: int -VK_NUMPAD5: int -VK_NUMPAD6: int -VK_NUMPAD7: int -VK_NUMPAD8: int -VK_NUMPAD9: int -VK_MULTIPLY: int -VK_ADD: int -VK_SEPARATOR: int -VK_SUBTRACT: int -VK_DECIMAL: int -VK_DIVIDE: int -VK_F1: int -VK_F2: int -VK_F3: int -VK_F4: int -VK_F5: int -VK_F6: int -VK_F7: int -VK_F8: int -VK_F9: int -VK_F10: int -VK_F11: int -VK_F12: int -VK_F13: int -VK_F14: int -VK_F15: int -VK_F16: int -VK_F17: int -VK_F18: int -VK_F19: int -VK_F20: int -VK_F21: int -VK_F22: int -VK_F23: int -VK_F24: int -VK_NUMLOCK: int -VK_SCROLL: int -VK_LSHIFT: int -VK_RSHIFT: int -VK_LCONTROL: int -VK_RCONTROL: int -VK_LMENU: int -VK_RMENU: int -VK_PROCESSKEY: int -VK_ATTN: int -VK_CRSEL: int -VK_EXSEL: int -VK_EREOF: int -VK_PLAY: int -VK_ZOOM: int -VK_NONAME: int -VK_PA1: int -VK_OEM_CLEAR: int -MOUSEEVENTF_XDOWN: int -MOUSEEVENTF_XUP: int -MOUSEEVENTF_WHEEL: int -VK_XBUTTON1: int -VK_XBUTTON2: int -VK_VOLUME_MUTE: int -VK_VOLUME_DOWN: int -VK_VOLUME_UP: int -VK_MEDIA_NEXT_TRACK: int -VK_MEDIA_PREV_TRACK: int -VK_MEDIA_PLAY_PAUSE: int -VK_BROWSER_BACK: int -VK_BROWSER_FORWARD: int -WH_MIN: int -WH_MSGFILTER: int -WH_JOURNALRECORD: int -WH_JOURNALPLAYBACK: int -WH_KEYBOARD: int -WH_GETMESSAGE: int -WH_CALLWNDPROC: int -WH_CBT: int -WH_SYSMSGFILTER: int -WH_MOUSE: int -WH_HARDWARE: int -WH_DEBUG: int -WH_SHELL: int -WH_FOREGROUNDIDLE: int -WH_CALLWNDPROCRET: int -WH_KEYBOARD_LL: int -WH_MOUSE_LL: int -WH_MAX: int -WH_MINHOOK: int -WH_MAXHOOK: int -HC_ACTION: int -HC_GETNEXT: int -HC_SKIP: int -HC_NOREMOVE: int -HC_NOREM: int -HC_SYSMODALON: int -HC_SYSMODALOFF: int -HCBT_MOVESIZE: int -HCBT_MINMAX: int -HCBT_QS: int -HCBT_CREATEWND: int -HCBT_DESTROYWND: int -HCBT_ACTIVATE: int -HCBT_CLICKSKIPPED: int -HCBT_KEYSKIPPED: int -HCBT_SYSCOMMAND: int -HCBT_SETFOCUS: int -MSGF_DIALOGBOX: int -MSGF_MESSAGEBOX: int -MSGF_MENU: int -MSGF_SCROLLBAR: int -MSGF_NEXTWINDOW: int -MSGF_MAX: int -MSGF_USER: int -HSHELL_WINDOWCREATED: int -HSHELL_WINDOWDESTROYED: int -HSHELL_ACTIVATESHELLWINDOW: int -HSHELL_WINDOWACTIVATED: int -HSHELL_GETMINRECT: int -HSHELL_REDRAW: int -HSHELL_TASKMAN: int -HSHELL_LANGUAGE: int -HSHELL_ACCESSIBILITYSTATE: int -ACCESS_STICKYKEYS: int -ACCESS_FILTERKEYS: int -ACCESS_MOUSEKEYS: int -LLKHF_EXTENDED: int -LLKHF_INJECTED: int -LLKHF_ALTDOWN: int -LLKHF_UP: int -LLKHF_LOWER_IL_INJECTED: int -LLMHF_INJECTED: int -LLMHF_LOWER_IL_INJECTED: int -HKL_PREV: int -HKL_NEXT: int -KLF_ACTIVATE: int -KLF_SUBSTITUTE_OK: int -KLF_UNLOADPREVIOUS: int -KLF_REORDER: int -KLF_REPLACELANG: int -KLF_NOTELLSHELL: int -KLF_SETFORPROCESS: int -KL_NAMELENGTH: int -DESKTOP_READOBJECTS: int -DESKTOP_CREATEWINDOW: int -DESKTOP_CREATEMENU: int -DESKTOP_HOOKCONTROL: int -DESKTOP_JOURNALRECORD: int -DESKTOP_JOURNALPLAYBACK: int -DESKTOP_ENUMERATE: int -DESKTOP_WRITEOBJECTS: int -DESKTOP_SWITCHDESKTOP: int -DF_ALLOWOTHERACCOUNTHOOK: int -WINSTA_ENUMDESKTOPS: int -WINSTA_READATTRIBUTES: int -WINSTA_ACCESSCLIPBOARD: int -WINSTA_CREATEDESKTOP: int -WINSTA_WRITEATTRIBUTES: int -WINSTA_ACCESSGLOBALATOMS: int -WINSTA_EXITWINDOWS: int -WINSTA_ENUMERATE: int -WINSTA_READSCREEN: int -WSF_VISIBLE: int -UOI_FLAGS: int -UOI_NAME: int -UOI_TYPE: int -UOI_USER_SID: int -GWL_WNDPROC: int -GWL_HINSTANCE: int -GWL_HWNDPARENT: int -GWL_STYLE: int -GWL_EXSTYLE: int -GWL_USERDATA: int -GWL_ID: int -GCL_MENUNAME: int -GCL_HBRBACKGROUND: int -GCL_HCURSOR: int -GCL_HICON: int -GCL_HMODULE: int -GCL_CBWNDEXTRA: int -GCL_CBCLSEXTRA: int -GCL_WNDPROC: int -GCL_STYLE: int -GCW_ATOM: int -GCL_HICONSM: int -WM_NULL: int -WM_CREATE: int -WM_DESTROY: int -WM_MOVE: int -WM_SIZE: int -WM_ACTIVATE: int -WA_INACTIVE: int -WA_ACTIVE: int -WA_CLICKACTIVE: int -WM_SETFOCUS: int -WM_KILLFOCUS: int -WM_ENABLE: int -WM_SETREDRAW: int -WM_SETTEXT: int -WM_GETTEXT: int -WM_GETTEXTLENGTH: int -WM_PAINT: int -WM_CLOSE: int -WM_QUERYENDSESSION: int -WM_QUIT: int -WM_QUERYOPEN: int -WM_ERASEBKGND: int -WM_SYSCOLORCHANGE: int -WM_ENDSESSION: int -WM_SHOWWINDOW: int -WM_WININICHANGE: int -WM_SETTINGCHANGE: int -WM_DEVMODECHANGE: int -WM_ACTIVATEAPP: int -WM_FONTCHANGE: int -WM_TIMECHANGE: int -WM_CANCELMODE: int -WM_SETCURSOR: int -WM_MOUSEACTIVATE: int -WM_CHILDACTIVATE: int -WM_QUEUESYNC: int -WM_GETMINMAXINFO: int -WM_PAINTICON: int -WM_ICONERASEBKGND: int -WM_NEXTDLGCTL: int -WM_SPOOLERSTATUS: int -WM_DRAWITEM: int -WM_MEASUREITEM: int -WM_DELETEITEM: int -WM_VKEYTOITEM: int -WM_CHARTOITEM: int -WM_SETFONT: int -WM_GETFONT: int -WM_SETHOTKEY: int -WM_GETHOTKEY: int -WM_QUERYDRAGICON: int -WM_COMPAREITEM: int -WM_GETOBJECT: int -WM_COMPACTING: int -WM_COMMNOTIFY: int -WM_WINDOWPOSCHANGING: int -WM_WINDOWPOSCHANGED: int -WM_POWER: int -PWR_OK: int -PWR_FAIL: int -PWR_SUSPENDREQUEST: int -PWR_SUSPENDRESUME: int -PWR_CRITICALRESUME: int -WM_COPYDATA: int -WM_CANCELJOURNAL: int -WM_NOTIFY: int -WM_INPUTLANGCHANGEREQUEST: int -WM_INPUTLANGCHANGE: int -WM_TCARD: int -WM_HELP: int -WM_USERCHANGED: int -WM_NOTIFYFORMAT: int -NFR_ANSI: int -NFR_UNICODE: int -NF_QUERY: int -NF_REQUERY: int -WM_CONTEXTMENU: int -WM_STYLECHANGING: int -WM_STYLECHANGED: int -WM_DISPLAYCHANGE: int -WM_GETICON: int -WM_SETICON: int -WM_NCCREATE: int -WM_NCDESTROY: int -WM_NCCALCSIZE: int -WM_NCHITTEST: int -WM_NCPAINT: int -WM_NCACTIVATE: int -WM_GETDLGCODE: int -WM_SYNCPAINT: int -WM_NCMOUSEMOVE: int -WM_NCLBUTTONDOWN: int -WM_NCLBUTTONUP: int -WM_NCLBUTTONDBLCLK: int -WM_NCRBUTTONDOWN: int -WM_NCRBUTTONUP: int -WM_NCRBUTTONDBLCLK: int -WM_NCMBUTTONDOWN: int -WM_NCMBUTTONUP: int -WM_NCMBUTTONDBLCLK: int -WM_KEYFIRST: int -WM_KEYDOWN: int -WM_KEYUP: int -WM_CHAR: int -WM_DEADCHAR: int -WM_SYSKEYDOWN: int -WM_SYSKEYUP: int -WM_SYSCHAR: int -WM_SYSDEADCHAR: int -WM_KEYLAST: int -WM_IME_STARTCOMPOSITION: int -WM_IME_ENDCOMPOSITION: int -WM_IME_COMPOSITION: int -WM_IME_KEYLAST: int -WM_INITDIALOG: int -WM_COMMAND: int -WM_SYSCOMMAND: int -WM_TIMER: int -WM_HSCROLL: int -WM_VSCROLL: int -WM_INITMENU: int -WM_INITMENUPOPUP: int -WM_MENUSELECT: int -WM_MENUCHAR: int -WM_ENTERIDLE: int -WM_MENURBUTTONUP: int -WM_MENUDRAG: int -WM_MENUGETOBJECT: int -WM_UNINITMENUPOPUP: int -WM_MENUCOMMAND: int -WM_CTLCOLORMSGBOX: int -WM_CTLCOLOREDIT: int -WM_CTLCOLORBTN: int -WM_CTLCOLORDLG: int -WM_CTLCOLORSCROLLBAR: int -WM_CTLCOLORSTATIC: int -WM_MOUSEFIRST: int -WM_MOUSEMOVE: int -WM_LBUTTONDOWN: int -WM_LBUTTONUP: int -WM_LBUTTONDBLCLK: int -WM_RBUTTONDOWN: int -WM_RBUTTONUP: int -WM_RBUTTONDBLCLK: int -WM_MBUTTONDOWN: int -WM_MBUTTONUP: int -WM_MBUTTONDBLCLK: int -WM_MOUSEWHEEL: int -WM_MOUSELAST: int -WHEEL_DELTA: int -WHEEL_PAGESCROLL: int -WM_PARENTNOTIFY: int -MENULOOP_WINDOW: int -MENULOOP_POPUP: int -WM_ENTERMENULOOP: int -WM_EXITMENULOOP: int -WM_NEXTMENU: int -WM_SIZING: int -WM_CAPTURECHANGED: int -WM_MOVING: int -WM_POWERBROADCAST: int -PBT_APMQUERYSUSPEND: int -PBT_APMQUERYSTANDBY: int -PBT_APMQUERYSUSPENDFAILED: int -PBT_APMQUERYSTANDBYFAILED: int -PBT_APMSUSPEND: int -PBT_APMSTANDBY: int -PBT_APMRESUMECRITICAL: int -PBT_APMRESUMESUSPEND: int -PBT_APMRESUMESTANDBY: int -PBTF_APMRESUMEFROMFAILURE: int -PBT_APMBATTERYLOW: int -PBT_APMPOWERSTATUSCHANGE: int -PBT_APMOEMEVENT: int -PBT_APMRESUMEAUTOMATIC: int -WM_DEVICECHANGE: int -WM_MDICREATE: int -WM_MDIDESTROY: int -WM_MDIACTIVATE: int -WM_MDIRESTORE: int -WM_MDINEXT: int -WM_MDIMAXIMIZE: int -WM_MDITILE: int -WM_MDICASCADE: int -WM_MDIICONARRANGE: int -WM_MDIGETACTIVE: int -WM_MDISETMENU: int -WM_ENTERSIZEMOVE: int -WM_EXITSIZEMOVE: int -WM_DROPFILES: int -WM_MDIREFRESHMENU: int -WM_IME_SETCONTEXT: int -WM_IME_NOTIFY: int -WM_IME_CONTROL: int -WM_IME_COMPOSITIONFULL: int -WM_IME_SELECT: int -WM_IME_CHAR: int -WM_IME_REQUEST: int -WM_IME_KEYDOWN: int -WM_IME_KEYUP: int -WM_MOUSEHOVER: int -WM_MOUSELEAVE: int -WM_CUT: int -WM_COPY: int -WM_PASTE: int -WM_CLEAR: int -WM_UNDO: int -WM_RENDERFORMAT: int -WM_RENDERALLFORMATS: int -WM_DESTROYCLIPBOARD: int -WM_DRAWCLIPBOARD: int -WM_PAINTCLIPBOARD: int -WM_VSCROLLCLIPBOARD: int -WM_SIZECLIPBOARD: int -WM_ASKCBFORMATNAME: int -WM_CHANGECBCHAIN: int -WM_HSCROLLCLIPBOARD: int -WM_QUERYNEWPALETTE: int -WM_PALETTEISCHANGING: int -WM_PALETTECHANGED: int -WM_HOTKEY: int -WM_PRINT: int -WM_PRINTCLIENT: int -WM_HANDHELDFIRST: int -WM_HANDHELDLAST: int -WM_AFXFIRST: int -WM_AFXLAST: int -WM_PENWINFIRST: int -WM_PENWINLAST: int -WM_APP: int -WMSZ_LEFT: int -WMSZ_RIGHT: int -WMSZ_TOP: int -WMSZ_TOPLEFT: int -WMSZ_TOPRIGHT: int -WMSZ_BOTTOM: int -WMSZ_BOTTOMLEFT: int -WMSZ_BOTTOMRIGHT: int -HTERROR: int -HTTRANSPARENT: int -HTNOWHERE: int -HTCLIENT: int -HTCAPTION: int -HTSYSMENU: int -HTGROWBOX: int -HTSIZE: int -HTMENU: int -HTHSCROLL: int -HTVSCROLL: int -HTMINBUTTON: int -HTMAXBUTTON: int -HTLEFT: int -HTRIGHT: int -HTTOP: int -HTTOPLEFT: int -HTTOPRIGHT: int -HTBOTTOM: int -HTBOTTOMLEFT: int -HTBOTTOMRIGHT: int -HTBORDER: int -HTREDUCE: int -HTZOOM: int -HTSIZEFIRST: int -HTSIZELAST: int -HTOBJECT: int -HTCLOSE: int -HTHELP: int -SMTO_NORMAL: int -SMTO_BLOCK: int -SMTO_ABORTIFHUNG: int -SMTO_NOTIMEOUTIFNOTHUNG: int -MA_ACTIVATE: int -MA_ACTIVATEANDEAT: int -MA_NOACTIVATE: int -MA_NOACTIVATEANDEAT: int -ICON_SMALL: int -ICON_BIG: int -SIZE_RESTORED: int -SIZE_MINIMIZED: int -SIZE_MAXIMIZED: int -SIZE_MAXSHOW: int -SIZE_MAXHIDE: int -SIZENORMAL: int -SIZEICONIC: int -SIZEFULLSCREEN: int -SIZEZOOMSHOW: int -SIZEZOOMHIDE: int -WVR_ALIGNTOP: int -WVR_ALIGNLEFT: int -WVR_ALIGNBOTTOM: int -WVR_ALIGNRIGHT: int -WVR_HREDRAW: int -WVR_VREDRAW: int -WVR_REDRAW: int -WVR_VALIDRECTS: int -MK_LBUTTON: int -MK_RBUTTON: int -MK_SHIFT: int -MK_CONTROL: int -MK_MBUTTON: int -TME_HOVER: int -TME_LEAVE: int -TME_QUERY: int -TME_CANCEL: int -HOVER_DEFAULT: int -WS_OVERLAPPED: int -WS_POPUP: int -WS_CHILD: int -WS_MINIMIZE: int -WS_VISIBLE: int -WS_DISABLED: int -WS_CLIPSIBLINGS: int -WS_CLIPCHILDREN: int -WS_MAXIMIZE: int -WS_CAPTION: int -WS_BORDER: int -WS_DLGFRAME: int -WS_VSCROLL: int -WS_HSCROLL: int -WS_SYSMENU: int -WS_THICKFRAME: int -WS_GROUP: int -WS_TABSTOP: int -WS_MINIMIZEBOX: int -WS_MAXIMIZEBOX: int -WS_TILED: int -WS_ICONIC: int -WS_SIZEBOX: int -WS_OVERLAPPEDWINDOW: int -WS_POPUPWINDOW: int -WS_CHILDWINDOW: int -WS_TILEDWINDOW: int -WS_EX_DLGMODALFRAME: int -WS_EX_NOPARENTNOTIFY: int -WS_EX_TOPMOST: int -WS_EX_ACCEPTFILES: int -WS_EX_TRANSPARENT: int -WS_EX_MDICHILD: int -WS_EX_TOOLWINDOW: int -WS_EX_WINDOWEDGE: int -WS_EX_CLIENTEDGE: int -WS_EX_CONTEXTHELP: int -WS_EX_RIGHT: int -WS_EX_LEFT: int -WS_EX_RTLREADING: int -WS_EX_LTRREADING: int -WS_EX_LEFTSCROLLBAR: int -WS_EX_RIGHTSCROLLBAR: int -WS_EX_CONTROLPARENT: int -WS_EX_STATICEDGE: int -WS_EX_APPWINDOW: int -WS_EX_OVERLAPPEDWINDOW: int -WS_EX_PALETTEWINDOW: int -WS_EX_LAYERED: int -WS_EX_NOINHERITLAYOUT: int -WS_EX_LAYOUTRTL: int -WS_EX_COMPOSITED: int -WS_EX_NOACTIVATE: int -CS_VREDRAW: int -CS_HREDRAW: int -CS_DBLCLKS: int -CS_OWNDC: int -CS_CLASSDC: int -CS_PARENTDC: int -CS_NOCLOSE: int -CS_SAVEBITS: int -CS_BYTEALIGNCLIENT: int -CS_BYTEALIGNWINDOW: int -CS_GLOBALCLASS: int -CS_IME: int -PRF_CHECKVISIBLE: int -PRF_NONCLIENT: int -PRF_CLIENT: int -PRF_ERASEBKGND: int -PRF_CHILDREN: int -PRF_OWNED: int -BDR_RAISEDOUTER: int -BDR_SUNKENOUTER: int -BDR_RAISEDINNER: int -BDR_SUNKENINNER: int -BDR_OUTER: int -BDR_INNER: int -EDGE_RAISED: int -EDGE_SUNKEN: int -EDGE_ETCHED: int -EDGE_BUMP: int -ISMEX_NOSEND: int -ISMEX_SEND: int -ISMEX_NOTIFY: int -ISMEX_CALLBACK: int -ISMEX_REPLIED: int -CW_USEDEFAULT: int -FLASHW_STOP: int -FLASHW_CAPTION: int -FLASHW_TRAY: int -FLASHW_ALL: int -FLASHW_TIMER: int -FLASHW_TIMERNOFG: int -DS_ABSALIGN: int -DS_SYSMODAL: int -DS_LOCALEDIT: int -DS_SETFONT: int -DS_MODALFRAME: int -DS_NOIDLEMSG: int -DS_SETFOREGROUND: int -DS_3DLOOK: int -DS_FIXEDSYS: int -DS_NOFAILCREATE: int -DS_CONTROL: int -DS_CENTER: int -DS_CENTERMOUSE: int -DS_CONTEXTHELP: int -DM_GETDEFID: int -DM_SETDEFID: int -DM_REPOSITION: int -DC_HASDEFID: int -DLGC_WANTARROWS: int -DLGC_WANTTAB: int -DLGC_WANTALLKEYS: int -DLGC_WANTMESSAGE: int -DLGC_HASSETSEL: int -DLGC_DEFPUSHBUTTON: int -DLGC_UNDEFPUSHBUTTON: int -DLGC_RADIOBUTTON: int -DLGC_WANTCHARS: int -DLGC_STATIC: int -DLGC_BUTTON: int -LB_CTLCODE: int -LB_OKAY: int -LB_ERR: int -LB_ERRSPACE: int -LBN_ERRSPACE: int -LBN_SELCHANGE: int -LBN_DBLCLK: int -LBN_SELCANCEL: int -LBN_SETFOCUS: int -LBN_KILLFOCUS: int -LB_ADDSTRING: int -LB_INSERTSTRING: int -LB_DELETESTRING: int -LB_SELITEMRANGEEX: int -LB_RESETCONTENT: int -LB_SETSEL: int -LB_SETCURSEL: int -LB_GETSEL: int -LB_GETCURSEL: int -LB_GETTEXT: int -LB_GETTEXTLEN: int -LB_GETCOUNT: int -LB_SELECTSTRING: int -LB_DIR: int -LB_GETTOPINDEX: int -LB_FINDSTRING: int -LB_GETSELCOUNT: int -LB_GETSELITEMS: int -LB_SETTABSTOPS: int -LB_GETHORIZONTALEXTENT: int -LB_SETHORIZONTALEXTENT: int -LB_SETCOLUMNWIDTH: int -LB_ADDFILE: int -LB_SETTOPINDEX: int -LB_GETITEMRECT: int -LB_GETITEMDATA: int -LB_SETITEMDATA: int -LB_SELITEMRANGE: int -LB_SETANCHORINDEX: int -LB_GETANCHORINDEX: int -LB_SETCARETINDEX: int -LB_GETCARETINDEX: int -LB_SETITEMHEIGHT: int -LB_GETITEMHEIGHT: int -LB_FINDSTRINGEXACT: int -LB_SETLOCALE: int -LB_GETLOCALE: int -LB_SETCOUNT: int -LB_INITSTORAGE: int -LB_ITEMFROMPOINT: int -LB_MSGMAX: int -LBS_NOTIFY: int -LBS_SORT: int -LBS_NOREDRAW: int -LBS_MULTIPLESEL: int -LBS_OWNERDRAWFIXED: int -LBS_OWNERDRAWVARIABLE: int -LBS_HASSTRINGS: int -LBS_USETABSTOPS: int -LBS_NOINTEGRALHEIGHT: int -LBS_MULTICOLUMN: int -LBS_WANTKEYBOARDINPUT: int -LBS_EXTENDEDSEL: int -LBS_DISABLENOSCROLL: int -LBS_NODATA: int -LBS_NOSEL: int -LBS_STANDARD: int -CB_OKAY: int -CB_ERR: int -CB_ERRSPACE: int -CBN_ERRSPACE: int -CBN_SELCHANGE: int -CBN_DBLCLK: int -CBN_SETFOCUS: int -CBN_KILLFOCUS: int -CBN_EDITCHANGE: int -CBN_EDITUPDATE: int -CBN_DROPDOWN: int -CBN_CLOSEUP: int -CBN_SELENDOK: int -CBN_SELENDCANCEL: int -CBS_SIMPLE: int -CBS_DROPDOWN: int -CBS_OWNERDRAWFIXED: int -CBS_OWNERDRAWVARIABLE: int -CBS_AUTOHSCROLL: int -CBS_OEMCONVERT: int -CBS_SORT: int -CBS_HASSTRINGS: int -CBS_NOINTEGRALHEIGHT: int -CBS_DISABLENOSCROLL: int -CBS_UPPERCASE: int -CBS_LOWERCASE: int -CB_GETEDITSEL: int -CB_LIMITTEXT: int -CB_SETEDITSEL: int -CB_ADDSTRING: int -CB_DELETESTRING: int -CB_DIR: int -CB_GETCOUNT: int -CB_GETCURSEL: int -CB_GETLBTEXT: int -CB_GETLBTEXTLEN: int -CB_INSERTSTRING: int -CB_RESETCONTENT: int -CB_FINDSTRING: int -CB_SELECTSTRING: int -CB_SETCURSEL: int -CB_SHOWDROPDOWN: int -CB_GETITEMDATA: int -CB_SETITEMDATA: int -CB_GETDROPPEDCONTROLRECT: int -CB_SETITEMHEIGHT: int -CB_GETITEMHEIGHT: int -CB_SETEXTENDEDUI: int -CB_GETEXTENDEDUI: int -CB_GETDROPPEDSTATE: int -CB_FINDSTRINGEXACT: int -CB_SETLOCALE: int -CB_GETLOCALE: int -CB_GETTOPINDEX: int -CB_SETTOPINDEX: int -CB_GETHORIZONTALEXTENT: int -CB_SETHORIZONTALEXTENT: int -CB_GETDROPPEDWIDTH: int -CB_SETDROPPEDWIDTH: int -CB_INITSTORAGE: int -CB_MSGMAX: int -SBS_HORZ: int -SBS_VERT: int -SBS_TOPALIGN: int -SBS_LEFTALIGN: int -SBS_BOTTOMALIGN: int -SBS_RIGHTALIGN: int -SBS_SIZEBOXTOPLEFTALIGN: int -SBS_SIZEBOXBOTTOMRIGHTALIGN: int -SBS_SIZEBOX: int -SBS_SIZEGRIP: int -SBM_SETPOS: int -SBM_GETPOS: int -SBM_SETRANGE: int -SBM_SETRANGEREDRAW: int -SBM_GETRANGE: int -SBM_ENABLE_ARROWS: int -SBM_SETSCROLLINFO: int -SBM_GETSCROLLINFO: int -SIF_RANGE: int -SIF_PAGE: int -SIF_POS: int -SIF_DISABLENOSCROLL: int -SIF_TRACKPOS: int -SIF_ALL: int -MDIS_ALLCHILDSTYLES: int -MDITILE_VERTICAL: int -MDITILE_HORIZONTAL: int -MDITILE_SKIPDISABLED: int -MDITILE_ZORDER: int -IMC_GETCANDIDATEPOS: int -IMC_SETCANDIDATEPOS: int -IMC_GETCOMPOSITIONFONT: int -IMC_SETCOMPOSITIONFONT: int -IMC_GETCOMPOSITIONWINDOW: int -IMC_SETCOMPOSITIONWINDOW: int -IMC_GETSTATUSWINDOWPOS: int -IMC_SETSTATUSWINDOWPOS: int -IMC_CLOSESTATUSWINDOW: int -IMC_OPENSTATUSWINDOW: int -DELETE: int -READ_CONTROL: int -WRITE_DAC: int -WRITE_OWNER: int -SYNCHRONIZE: int -STANDARD_RIGHTS_REQUIRED: int -STANDARD_RIGHTS_READ: int -STANDARD_RIGHTS_WRITE: int -STANDARD_RIGHTS_EXECUTE: int -STANDARD_RIGHTS_ALL: int -SPECIFIC_RIGHTS_ALL: int -ACCESS_SYSTEM_SECURITY: int -MAXIMUM_ALLOWED: int -GENERIC_READ: int -GENERIC_WRITE: int -GENERIC_EXECUTE: int -GENERIC_ALL: int -SERVICE_KERNEL_DRIVER: int -SERVICE_FILE_SYSTEM_DRIVER: int -SERVICE_ADAPTER: int -SERVICE_RECOGNIZER_DRIVER: int -SERVICE_DRIVER: int -SERVICE_WIN32_OWN_PROCESS: int -SERVICE_WIN32_SHARE_PROCESS: int -SERVICE_WIN32: int -SERVICE_INTERACTIVE_PROCESS: int -SERVICE_TYPE_ALL: int -SERVICE_BOOT_START: int -SERVICE_SYSTEM_START: int -SERVICE_AUTO_START: int -SERVICE_DEMAND_START: int -SERVICE_DISABLED: int -SERVICE_ERROR_IGNORE: int -SERVICE_ERROR_NORMAL: int -SERVICE_ERROR_SEVERE: int -SERVICE_ERROR_CRITICAL: int -TAPE_ERASE_SHORT: int -TAPE_ERASE_LONG: int -TAPE_LOAD: int -TAPE_UNLOAD: int -TAPE_TENSION: int -TAPE_LOCK: int -TAPE_UNLOCK: int -TAPE_FORMAT: int -TAPE_SETMARKS: int -TAPE_FILEMARKS: int -TAPE_SHORT_FILEMARKS: int -TAPE_LONG_FILEMARKS: int -TAPE_ABSOLUTE_POSITION: int -TAPE_LOGICAL_POSITION: int -TAPE_PSEUDO_LOGICAL_POSITION: int -TAPE_REWIND: int -TAPE_ABSOLUTE_BLOCK: int -TAPE_LOGICAL_BLOCK: int -TAPE_PSEUDO_LOGICAL_BLOCK: int -TAPE_SPACE_END_OF_DATA: int -TAPE_SPACE_RELATIVE_BLOCKS: int -TAPE_SPACE_FILEMARKS: int -TAPE_SPACE_SEQUENTIAL_FMKS: int -TAPE_SPACE_SETMARKS: int -TAPE_SPACE_SEQUENTIAL_SMKS: int -TAPE_DRIVE_FIXED: int -TAPE_DRIVE_SELECT: int -TAPE_DRIVE_INITIATOR: int -TAPE_DRIVE_ERASE_SHORT: int -TAPE_DRIVE_ERASE_LONG: int -TAPE_DRIVE_ERASE_BOP_ONLY: int -TAPE_DRIVE_ERASE_IMMEDIATE: int -TAPE_DRIVE_TAPE_CAPACITY: int -TAPE_DRIVE_TAPE_REMAINING: int -TAPE_DRIVE_FIXED_BLOCK: int -TAPE_DRIVE_VARIABLE_BLOCK: int -TAPE_DRIVE_WRITE_PROTECT: int -TAPE_DRIVE_EOT_WZ_SIZE: int -TAPE_DRIVE_ECC: int -TAPE_DRIVE_COMPRESSION: int -TAPE_DRIVE_PADDING: int -TAPE_DRIVE_REPORT_SMKS: int -TAPE_DRIVE_GET_ABSOLUTE_BLK: int -TAPE_DRIVE_GET_LOGICAL_BLK: int -TAPE_DRIVE_SET_EOT_WZ_SIZE: int -TAPE_DRIVE_LOAD_UNLOAD: int -TAPE_DRIVE_TENSION: int -TAPE_DRIVE_LOCK_UNLOCK: int -TAPE_DRIVE_REWIND_IMMEDIATE: int -TAPE_DRIVE_SET_BLOCK_SIZE: int -TAPE_DRIVE_LOAD_UNLD_IMMED: int -TAPE_DRIVE_TENSION_IMMED: int -TAPE_DRIVE_LOCK_UNLK_IMMED: int -TAPE_DRIVE_SET_ECC: int -TAPE_DRIVE_SET_COMPRESSION: int -TAPE_DRIVE_SET_PADDING: int -TAPE_DRIVE_SET_REPORT_SMKS: int -TAPE_DRIVE_ABSOLUTE_BLK: int -TAPE_DRIVE_ABS_BLK_IMMED: int -TAPE_DRIVE_LOGICAL_BLK: int -TAPE_DRIVE_LOG_BLK_IMMED: int -TAPE_DRIVE_END_OF_DATA: int -TAPE_DRIVE_RELATIVE_BLKS: int -TAPE_DRIVE_FILEMARKS: int -TAPE_DRIVE_SEQUENTIAL_FMKS: int -TAPE_DRIVE_SETMARKS: int -TAPE_DRIVE_SEQUENTIAL_SMKS: int -TAPE_DRIVE_REVERSE_POSITION: int -TAPE_DRIVE_SPACE_IMMEDIATE: int -TAPE_DRIVE_WRITE_SETMARKS: int -TAPE_DRIVE_WRITE_FILEMARKS: int -TAPE_DRIVE_WRITE_SHORT_FMKS: int -TAPE_DRIVE_WRITE_LONG_FMKS: int -TAPE_DRIVE_WRITE_MARK_IMMED: int -TAPE_DRIVE_FORMAT: int -TAPE_DRIVE_FORMAT_IMMEDIATE: int -TAPE_FIXED_PARTITIONS: int -TAPE_SELECT_PARTITIONS: int -TAPE_INITIATOR_PARTITIONS: int -APPLICATION_ERROR_MASK: int -ERROR_SEVERITY_SUCCESS: int -ERROR_SEVERITY_INFORMATIONAL: int -ERROR_SEVERITY_WARNING: int -ERROR_SEVERITY_ERROR: int -MINCHAR: int -MAXCHAR: int -MINSHORT: int -MAXSHORT: int -MINLONG: int -MAXLONG: int -MAXBYTE: int -MAXWORD: int -MAXDWORD: int -LANG_NEUTRAL: int -LANG_BULGARIAN: int -LANG_CHINESE: int -LANG_CROATIAN: int -LANG_CZECH: int -LANG_DANISH: int -LANG_DUTCH: int -LANG_ENGLISH: int -LANG_FINNISH: int -LANG_FRENCH: int -LANG_GERMAN: int -LANG_GREEK: int -LANG_HUNGARIAN: int -LANG_ICELANDIC: int -LANG_ITALIAN: int -LANG_JAPANESE: int -LANG_KOREAN: int -LANG_NORWEGIAN: int -LANG_POLISH: int -LANG_PORTUGUESE: int -LANG_ROMANIAN: int -LANG_RUSSIAN: int -LANG_SLOVAK: int -LANG_SLOVENIAN: int -LANG_SPANISH: int -LANG_SWEDISH: int -LANG_TURKISH: int -SUBLANG_NEUTRAL: int -SUBLANG_DEFAULT: int -SUBLANG_SYS_DEFAULT: int -SUBLANG_CHINESE_TRADITIONAL: int -SUBLANG_CHINESE_SIMPLIFIED: int -SUBLANG_CHINESE_HONGKONG: int -SUBLANG_CHINESE_SINGAPORE: int -SUBLANG_DUTCH: int -SUBLANG_DUTCH_BELGIAN: int -SUBLANG_ENGLISH_US: int -SUBLANG_ENGLISH_UK: int -SUBLANG_ENGLISH_AUS: int -SUBLANG_ENGLISH_CAN: int -SUBLANG_ENGLISH_NZ: int -SUBLANG_ENGLISH_EIRE: int -SUBLANG_FRENCH: int -SUBLANG_FRENCH_BELGIAN: int -SUBLANG_FRENCH_CANADIAN: int -SUBLANG_FRENCH_SWISS: int -SUBLANG_GERMAN: int -SUBLANG_GERMAN_SWISS: int -SUBLANG_GERMAN_AUSTRIAN: int -SUBLANG_ITALIAN: int -SUBLANG_ITALIAN_SWISS: int -SUBLANG_NORWEGIAN_BOKMAL: int -SUBLANG_NORWEGIAN_NYNORSK: int -SUBLANG_PORTUGUESE: int -SUBLANG_PORTUGUESE_BRAZILIAN: int -SUBLANG_SPANISH: int -SUBLANG_SPANISH_MEXICAN: int -SUBLANG_SPANISH_MODERN: int -SORT_DEFAULT: int -SORT_JAPANESE_XJIS: int -SORT_JAPANESE_UNICODE: int -SORT_CHINESE_BIG5: int -SORT_CHINESE_UNICODE: int -SORT_KOREAN_KSC: int -SORT_KOREAN_UNICODE: int +from typing import Final + +WINVER: Final = 1280 +WM_USER: Final = 1024 +PY_0U: Final = 0 +OFN_READONLY: Final = 1 +OFN_OVERWRITEPROMPT: Final = 2 +OFN_HIDEREADONLY: Final = 4 +OFN_NOCHANGEDIR: Final = 8 +OFN_SHOWHELP: Final = 16 +OFN_ENABLEHOOK: Final = 32 +OFN_ENABLETEMPLATE: Final = 64 +OFN_ENABLETEMPLATEHANDLE: Final = 128 +OFN_NOVALIDATE: Final = 256 +OFN_ALLOWMULTISELECT: Final = 512 +OFN_EXTENSIONDIFFERENT: Final = 1024 +OFN_PATHMUSTEXIST: Final = 2048 +OFN_FILEMUSTEXIST: Final = 4096 +OFN_CREATEPROMPT: Final = 8192 +OFN_SHAREAWARE: Final = 16384 +OFN_NOREADONLYRETURN: Final = 32768 +OFN_NOTESTFILECREATE: Final = 65536 +OFN_NONETWORKBUTTON: Final = 131072 +OFN_NOLONGNAMES: Final = 262144 +OFN_EXPLORER: Final = 524288 +OFN_NODEREFERENCELINKS: Final = 1048576 +OFN_LONGNAMES: Final = 2097152 +OFN_ENABLEINCLUDENOTIFY: Final = 4194304 +OFN_ENABLESIZING: Final = 8388608 +OFN_DONTADDTORECENT: Final = 33554432 +OFN_FORCESHOWHIDDEN: Final = 268435456 +OFN_EX_NOPLACESBAR: Final = 1 +OFN_SHAREFALLTHROUGH: Final = 2 +OFN_SHARENOWARN: Final = 1 +OFN_SHAREWARN: Final = 0 +CDN_FIRST: Final[int] +CDN_LAST: Final[int] +CDN_INITDONE: Final[int] +CDN_SELCHANGE: Final[int] +CDN_FOLDERCHANGE: Final[int] +CDN_SHAREVIOLATION: Final[int] +CDN_HELP: Final[int] +CDN_FILEOK: Final[int] +CDN_TYPECHANGE: Final[int] +CDN_INCLUDEITEM: Final[int] +CDM_FIRST: Final[int] +CDM_LAST: Final[int] +CDM_GETSPEC: Final[int] +CDM_GETFILEPATH: Final[int] +CDM_GETFOLDERPATH: Final[int] +CDM_GETFOLDERIDLIST: Final[int] +CDM_SETCONTROLTEXT: Final[int] +CDM_HIDECONTROL: Final[int] +CDM_SETDEFEXT: Final[int] +CC_RGBINIT: Final = 1 +CC_FULLOPEN: Final = 2 +CC_PREVENTFULLOPEN: Final = 4 +CC_SHOWHELP: Final = 8 +CC_ENABLEHOOK: Final = 16 +CC_ENABLETEMPLATE: Final = 32 +CC_ENABLETEMPLATEHANDLE: Final = 64 +CC_SOLIDCOLOR: Final = 128 +CC_ANYCOLOR: Final = 256 +FR_DOWN: Final = 1 +FR_WHOLEWORD: Final = 2 +FR_MATCHCASE: Final = 4 +FR_FINDNEXT: Final = 8 +FR_REPLACE: Final = 16 +FR_REPLACEALL: Final = 32 +FR_DIALOGTERM: Final = 64 +FR_SHOWHELP: Final = 128 +FR_ENABLEHOOK: Final = 256 +FR_ENABLETEMPLATE: Final = 512 +FR_NOUPDOWN: Final = 1024 +FR_NOMATCHCASE: Final = 2048 +FR_NOWHOLEWORD: Final = 4096 +FR_ENABLETEMPLATEHANDLE: Final = 8192 +FR_HIDEUPDOWN: Final = 16384 +FR_HIDEMATCHCASE: Final = 32768 +FR_HIDEWHOLEWORD: Final = 65536 +CF_SCREENFONTS: Final = 1 +CF_PRINTERFONTS: Final = 2 +CF_BOTH: Final[int] +CF_SHOWHELP: Final = 4 +CF_ENABLEHOOK: Final = 8 +CF_ENABLETEMPLATE: Final = 16 +CF_ENABLETEMPLATEHANDLE: Final = 32 +CF_INITTOLOGFONTSTRUCT: Final = 64 +CF_USESTYLE: Final = 128 +CF_EFFECTS: Final = 256 +CF_APPLY: Final = 512 +CF_ANSIONLY: Final = 1024 +CF_SCRIPTSONLY: Final = CF_ANSIONLY +CF_NOVECTORFONTS: Final = 2048 +CF_NOOEMFONTS: Final = CF_NOVECTORFONTS +CF_NOSIMULATIONS: Final = 4096 +CF_LIMITSIZE: Final = 8192 +CF_FIXEDPITCHONLY: Final = 16384 +CF_WYSIWYG: Final = 32768 +CF_FORCEFONTEXIST: Final = 65536 +CF_SCALABLEONLY: Final = 131072 +CF_TTONLY: Final = 262144 +CF_NOFACESEL: Final = 524288 +CF_NOSTYLESEL: Final = 1048576 +CF_NOSIZESEL: Final = 2097152 +CF_SELECTSCRIPT: Final = 4194304 +CF_NOSCRIPTSEL: Final = 8388608 +CF_NOVERTFONTS: Final = 16777216 +SIMULATED_FONTTYPE: Final = 32768 +PRINTER_FONTTYPE: Final = 16384 +SCREEN_FONTTYPE: Final = 8192 +BOLD_FONTTYPE: Final = 256 +ITALIC_FONTTYPE: Final = 512 +REGULAR_FONTTYPE: Final = 1024 +OPENTYPE_FONTTYPE: Final = 65536 +TYPE1_FONTTYPE: Final = 131072 +DSIG_FONTTYPE: Final = 262144 +WM_CHOOSEFONT_GETLOGFONT: Final[int] +WM_CHOOSEFONT_SETLOGFONT: Final[int] +WM_CHOOSEFONT_SETFLAGS: Final[int] +LBSELCHSTRINGA: Final = "commdlg_LBSelChangedNotify" +SHAREVISTRINGA: Final = "commdlg_ShareViolation" +FILEOKSTRINGA: Final = "commdlg_FileNameOK" +COLOROKSTRINGA: Final = "commdlg_ColorOK" +SETRGBSTRINGA: Final = "commdlg_SetRGBColor" +HELPMSGSTRINGA: Final = "commdlg_help" +FINDMSGSTRINGA: Final = "commdlg_FindReplace" +LBSELCHSTRING: Final = LBSELCHSTRINGA +SHAREVISTRING: Final = SHAREVISTRINGA +FILEOKSTRING: Final = FILEOKSTRINGA +COLOROKSTRING: Final = COLOROKSTRINGA +SETRGBSTRING: Final = SETRGBSTRINGA +HELPMSGSTRING: Final = HELPMSGSTRINGA +FINDMSGSTRING: Final = FINDMSGSTRINGA +CD_LBSELNOITEMS: Final = -1 +CD_LBSELCHANGE: Final = 0 +CD_LBSELSUB: Final = 1 +CD_LBSELADD: Final = 2 +PD_ALLPAGES: Final = 0 +PD_SELECTION: Final = 1 +PD_PAGENUMS: Final = 2 +PD_NOSELECTION: Final = 4 +PD_NOPAGENUMS: Final = 8 +PD_COLLATE: Final = 16 +PD_PRINTTOFILE: Final = 32 +PD_PRINTSETUP: Final = 64 +PD_NOWARNING: Final = 128 +PD_RETURNDC: Final = 256 +PD_RETURNIC: Final = 512 +PD_RETURNDEFAULT: Final = 1024 +PD_SHOWHELP: Final = 2048 +PD_ENABLEPRINTHOOK: Final = 4096 +PD_ENABLESETUPHOOK: Final = 8192 +PD_ENABLEPRINTTEMPLATE: Final = 16384 +PD_ENABLESETUPTEMPLATE: Final = 32768 +PD_ENABLEPRINTTEMPLATEHANDLE: Final = 65536 +PD_ENABLESETUPTEMPLATEHANDLE: Final = 131072 +PD_USEDEVMODECOPIES: Final = 262144 +PD_DISABLEPRINTTOFILE: Final = 524288 +PD_HIDEPRINTTOFILE: Final = 1048576 +PD_NONETWORKBUTTON: Final = 2097152 +DN_DEFAULTPRN: Final = 1 +WM_PSD_PAGESETUPDLG: Final = WM_USER +WM_PSD_FULLPAGERECT: Final[int] +WM_PSD_MINMARGINRECT: Final[int] +WM_PSD_MARGINRECT: Final[int] +WM_PSD_GREEKTEXTRECT: Final[int] +WM_PSD_ENVSTAMPRECT: Final[int] +WM_PSD_YAFULLPAGERECT: Final[int] +PSD_DEFAULTMINMARGINS: Final = 0 +PSD_INWININIINTLMEASURE: Final = 0 +PSD_MINMARGINS: Final = 1 +PSD_MARGINS: Final = 2 +PSD_INTHOUSANDTHSOFINCHES: Final = 4 +PSD_INHUNDREDTHSOFMILLIMETERS: Final = 8 +PSD_DISABLEMARGINS: Final = 16 +PSD_DISABLEPRINTER: Final = 32 +PSD_NOWARNING: Final = 128 +PSD_DISABLEORIENTATION: Final = 256 +PSD_RETURNDEFAULT: Final = 1024 +PSD_DISABLEPAPER: Final = 512 +PSD_SHOWHELP: Final = 2048 +PSD_ENABLEPAGESETUPHOOK: Final = 8192 +PSD_ENABLEPAGESETUPTEMPLATE: Final = 32768 +PSD_ENABLEPAGESETUPTEMPLATEHANDLE: Final = 131072 +PSD_ENABLEPAGEPAINTHOOK: Final = 262144 +PSD_DISABLEPAGEPAINTING: Final = 524288 +PSD_NONETWORKBUTTON: Final = 2097152 + +HKEY_CLASSES_ROOT: Final = -2147483648 +HKEY_CURRENT_USER: Final = -2147483647 +HKEY_LOCAL_MACHINE: Final = -2147483646 +HKEY_USERS: Final = -2147483645 +HKEY_PERFORMANCE_DATA: Final = -2147483644 +HKEY_CURRENT_CONFIG: Final = -2147483643 +HKEY_DYN_DATA: Final = -2147483642 +HKEY_PERFORMANCE_TEXT: Final = -2147483568 +HKEY_PERFORMANCE_NLSTEXT: Final = -2147483552 + +HWND_BROADCAST: Final = 65535 +HWND_DESKTOP: Final = 0 +HWND_TOP: Final = 0 +HWND_BOTTOM: Final = 1 +HWND_TOPMOST: Final = -1 +HWND_NOTOPMOST: Final = -2 +HWND_MESSAGE: Final = -3 + +SM_CXSCREEN: Final = 0 +SM_CYSCREEN: Final = 1 +SM_CXVSCROLL: Final = 2 +SM_CYHSCROLL: Final = 3 +SM_CYCAPTION: Final = 4 +SM_CXBORDER: Final = 5 +SM_CYBORDER: Final = 6 +SM_CXDLGFRAME: Final = 7 +SM_CYDLGFRAME: Final = 8 +SM_CYVTHUMB: Final = 9 +SM_CXHTHUMB: Final = 10 +SM_CXICON: Final = 11 +SM_CYICON: Final = 12 +SM_CXCURSOR: Final = 13 +SM_CYCURSOR: Final = 14 +SM_CYMENU: Final = 15 +SM_CXFULLSCREEN: Final = 16 +SM_CYFULLSCREEN: Final = 17 +SM_CYKANJIWINDOW: Final = 18 +SM_MOUSEPRESENT: Final = 19 +SM_CYVSCROLL: Final = 20 +SM_CXHSCROLL: Final = 21 +SM_DEBUG: Final = 22 +SM_SWAPBUTTON: Final = 23 +SM_RESERVED1: Final = 24 +SM_RESERVED2: Final = 25 +SM_RESERVED3: Final = 26 +SM_RESERVED4: Final = 27 +SM_CXMIN: Final = 28 +SM_CYMIN: Final = 29 +SM_CXSIZE: Final = 30 +SM_CYSIZE: Final = 31 +SM_CXFRAME: Final = 32 +SM_CYFRAME: Final = 33 +SM_CXMINTRACK: Final = 34 +SM_CYMINTRACK: Final = 35 +SM_CXDOUBLECLK: Final = 36 +SM_CYDOUBLECLK: Final = 37 +SM_CXICONSPACING: Final = 38 +SM_CYICONSPACING: Final = 39 +SM_MENUDROPALIGNMENT: Final = 40 +SM_PENWINDOWS: Final = 41 +SM_DBCSENABLED: Final = 42 +SM_CMOUSEBUTTONS: Final = 43 +SM_CXFIXEDFRAME: Final = SM_CXDLGFRAME +SM_CYFIXEDFRAME: Final = SM_CYDLGFRAME +SM_CXSIZEFRAME: Final = SM_CXFRAME +SM_CYSIZEFRAME: Final = SM_CYFRAME +SM_SECURE: Final = 44 +SM_CXEDGE: Final = 45 +SM_CYEDGE: Final = 46 +SM_CXMINSPACING: Final = 47 +SM_CYMINSPACING: Final = 48 +SM_CXSMICON: Final = 49 +SM_CYSMICON: Final = 50 +SM_CYSMCAPTION: Final = 51 +SM_CXSMSIZE: Final = 52 +SM_CYSMSIZE: Final = 53 +SM_CXMENUSIZE: Final = 54 +SM_CYMENUSIZE: Final = 55 +SM_ARRANGE: Final = 56 +SM_CXMINIMIZED: Final = 57 +SM_CYMINIMIZED: Final = 58 +SM_CXMAXTRACK: Final = 59 +SM_CYMAXTRACK: Final = 60 +SM_CXMAXIMIZED: Final = 61 +SM_CYMAXIMIZED: Final = 62 +SM_NETWORK: Final = 63 +SM_CLEANBOOT: Final = 67 +SM_CXDRAG: Final = 68 +SM_CYDRAG: Final = 69 +SM_SHOWSOUNDS: Final = 70 +SM_CXMENUCHECK: Final = 71 +SM_CYMENUCHECK: Final = 72 +SM_SLOWMACHINE: Final = 73 +SM_MIDEASTENABLED: Final = 74 +SM_MOUSEWHEELPRESENT: Final = 75 +SM_XVIRTUALSCREEN: Final = 76 +SM_YVIRTUALSCREEN: Final = 77 +SM_CXVIRTUALSCREEN: Final = 78 +SM_CYVIRTUALSCREEN: Final = 79 +SM_CMONITORS: Final = 80 +SM_SAMEDISPLAYFORMAT: Final = 81 +SM_CMETRICS: Final = 83 +MNC_IGNORE: Final = 0 +MNC_CLOSE: Final = 1 +MNC_EXECUTE: Final = 2 +MNC_SELECT: Final = 3 +MNS_NOCHECK: Final = -2147483648 +MNS_MODELESS: Final = 1073741824 +MNS_DRAGDROP: Final = 536870912 +MNS_AUTODISMISS: Final = 268435456 +MNS_NOTIFYBYPOS: Final = 134217728 +MNS_CHECKORBMP: Final = 67108864 +MIM_MAXHEIGHT: Final = 1 +MIM_BACKGROUND: Final = 2 +MIM_HELPID: Final = 4 +MIM_MENUDATA: Final = 8 +MIM_STYLE: Final = 16 +MIM_APPLYTOSUBMENUS: Final = -2147483648 +MND_CONTINUE: Final = 0 +MND_ENDMENU: Final = 1 +MNGOF_GAP: Final = 3 +MNGO_NOINTERFACE: Final = 0 +MNGO_NOERROR: Final = 1 +MIIM_STATE: Final = 1 +MIIM_ID: Final = 2 +MIIM_SUBMENU: Final = 4 +MIIM_CHECKMARKS: Final = 8 +MIIM_TYPE: Final = 16 +MIIM_DATA: Final = 32 +MIIM_STRING: Final = 64 +MIIM_BITMAP: Final = 128 +MIIM_FTYPE: Final = 256 +HBMMENU_CALLBACK: Final = -1 +HBMMENU_SYSTEM: Final = 1 +HBMMENU_MBAR_RESTORE: Final = 2 +HBMMENU_MBAR_MINIMIZE: Final = 3 +HBMMENU_MBAR_CLOSE: Final = 5 +HBMMENU_MBAR_CLOSE_D: Final = 6 +HBMMENU_MBAR_MINIMIZE_D: Final = 7 +HBMMENU_POPUP_CLOSE: Final = 8 +HBMMENU_POPUP_RESTORE: Final = 9 +HBMMENU_POPUP_MAXIMIZE: Final = 10 +HBMMENU_POPUP_MINIMIZE: Final = 11 +GMDI_USEDISABLED: Final = 1 +GMDI_GOINTOPOPUPS: Final = 2 +TPM_LEFTBUTTON: Final = 0 +TPM_RIGHTBUTTON: Final = 2 +TPM_LEFTALIGN: Final = 0 +TPM_CENTERALIGN: Final = 4 +TPM_RIGHTALIGN: Final = 8 +TPM_TOPALIGN: Final = 0 +TPM_VCENTERALIGN: Final = 16 +TPM_BOTTOMALIGN: Final = 32 +TPM_HORIZONTAL: Final = 0 +TPM_VERTICAL: Final = 64 +TPM_NONOTIFY: Final = 128 +TPM_RETURNCMD: Final = 256 +TPM_RECURSE: Final = 1 +DOF_EXECUTABLE: Final = 32769 +DOF_DOCUMENT: Final = 32770 +DOF_DIRECTORY: Final = 32771 +DOF_MULTIPLE: Final = 32772 +DOF_PROGMAN: Final = 1 +DOF_SHELLDATA: Final = 2 +DO_DROPFILE: Final = 1162627398 +DO_PRINTFILE: Final = 1414419024 +DT_TOP: Final = 0 +DT_LEFT: Final = 0 +DT_CENTER: Final = 1 +DT_RIGHT: Final = 2 +DT_VCENTER: Final = 4 +DT_BOTTOM: Final = 8 +DT_WORDBREAK: Final = 16 +DT_SINGLELINE: Final = 32 +DT_EXPANDTABS: Final = 64 +DT_TABSTOP: Final = 128 +DT_NOCLIP: Final = 256 +DT_EXTERNALLEADING: Final = 512 +DT_CALCRECT: Final = 1024 +DT_NOPREFIX: Final = 2048 +DT_INTERNAL: Final = 4096 +DT_EDITCONTROL: Final = 8192 +DT_PATH_ELLIPSIS: Final = 16384 +DT_END_ELLIPSIS: Final = 32768 +DT_MODIFYSTRING: Final = 65536 +DT_RTLREADING: Final = 131072 +DT_WORD_ELLIPSIS: Final = 262144 +DST_COMPLEX: Final = 0 +DST_TEXT: Final = 1 +DST_PREFIXTEXT: Final = 2 +DST_ICON: Final = 3 +DST_BITMAP: Final = 4 +DSS_NORMAL: Final = 0 +DSS_UNION: Final = 16 +DSS_DISABLED: Final = 32 +DSS_MONO: Final = 128 +DSS_RIGHT: Final = 32768 +DCX_WINDOW: Final = 1 +DCX_CACHE: Final = 2 +DCX_NORESETATTRS: Final = 4 +DCX_CLIPCHILDREN: Final = 8 +DCX_CLIPSIBLINGS: Final = 16 +DCX_PARENTCLIP: Final = 32 +DCX_EXCLUDERGN: Final = 64 +DCX_INTERSECTRGN: Final = 128 +DCX_EXCLUDEUPDATE: Final = 256 +DCX_INTERSECTUPDATE: Final = 512 +DCX_LOCKWINDOWUPDATE: Final = 1024 +DCX_VALIDATE: Final = 2097152 +CUDR_NORMAL: Final = 0 +CUDR_NOSNAPTOGRID: Final = 1 +CUDR_NORESOLVEPOSITIONS: Final = 2 +CUDR_NOCLOSEGAPS: Final = 4 +CUDR_NEGATIVECOORDS: Final = 8 +CUDR_NOPRIMARY: Final = 16 +RDW_INVALIDATE: Final = 1 +RDW_INTERNALPAINT: Final = 2 +RDW_ERASE: Final = 4 +RDW_VALIDATE: Final = 8 +RDW_NOINTERNALPAINT: Final = 16 +RDW_NOERASE: Final = 32 +RDW_NOCHILDREN: Final = 64 +RDW_ALLCHILDREN: Final = 128 +RDW_UPDATENOW: Final = 256 +RDW_ERASENOW: Final = 512 +RDW_FRAME: Final = 1024 +RDW_NOFRAME: Final = 2048 +SW_SCROLLCHILDREN: Final = 1 +SW_INVALIDATE: Final = 2 +SW_ERASE: Final = 4 +SW_SMOOTHSCROLL: Final = 16 +ESB_ENABLE_BOTH: Final = 0 +ESB_DISABLE_BOTH: Final = 3 +ESB_DISABLE_LEFT: Final = 1 +ESB_DISABLE_RIGHT: Final = 2 +ESB_DISABLE_UP: Final = 1 +ESB_DISABLE_DOWN: Final = 2 +ESB_DISABLE_LTUP: Final = ESB_DISABLE_LEFT +ESB_DISABLE_RTDN: Final = ESB_DISABLE_RIGHT +HELPINFO_WINDOW: Final = 1 +HELPINFO_MENUITEM: Final = 2 +MB_OK: Final = 0 +MB_OKCANCEL: Final = 1 +MB_ABORTRETRYIGNORE: Final = 2 +MB_YESNOCANCEL: Final = 3 +MB_YESNO: Final = 4 +MB_RETRYCANCEL: Final = 5 +MB_ICONHAND: Final = 16 +MB_ICONQUESTION: Final = 32 +MB_ICONEXCLAMATION: Final = 48 +MB_ICONASTERISK: Final = 64 +MB_ICONWARNING: Final = MB_ICONEXCLAMATION +MB_ICONERROR: Final = MB_ICONHAND +MB_ICONINFORMATION: Final = MB_ICONASTERISK +MB_ICONSTOP: Final = MB_ICONHAND +MB_DEFBUTTON1: Final = 0 +MB_DEFBUTTON2: Final = 256 +MB_DEFBUTTON3: Final = 512 +MB_DEFBUTTON4: Final = 768 +MB_APPLMODAL: Final = 0 +MB_SYSTEMMODAL: Final = 4096 +MB_TASKMODAL: Final = 8192 +MB_HELP: Final = 16384 +MB_NOFOCUS: Final = 32768 +MB_SETFOREGROUND: Final = 65536 +MB_DEFAULT_DESKTOP_ONLY: Final = 131072 +MB_TOPMOST: Final = 262144 +MB_RIGHT: Final = 524288 +MB_RTLREADING: Final = 1048576 +MB_SERVICE_NOTIFICATION: Final = 2097152 +MB_TYPEMASK: Final = 15 +MB_USERICON: Final = 128 +MB_ICONMASK: Final = 240 +MB_DEFMASK: Final = 3840 +MB_MODEMASK: Final = 12288 +MB_MISCMASK: Final = 49152 + +CWP_ALL: Final = 0 +CWP_SKIPINVISIBLE: Final = 1 +CWP_SKIPDISABLED: Final = 2 +CWP_SKIPTRANSPARENT: Final = 4 +CTLCOLOR_MSGBOX: Final = 0 +CTLCOLOR_EDIT: Final = 1 +CTLCOLOR_LISTBOX: Final = 2 +CTLCOLOR_BTN: Final = 3 +CTLCOLOR_DLG: Final = 4 +CTLCOLOR_SCROLLBAR: Final = 5 +CTLCOLOR_STATIC: Final = 6 +CTLCOLOR_MAX: Final = 7 +COLOR_SCROLLBAR: Final = 0 +COLOR_BACKGROUND: Final = 1 +COLOR_ACTIVECAPTION: Final = 2 +COLOR_INACTIVECAPTION: Final = 3 +COLOR_MENU: Final = 4 +COLOR_WINDOW: Final = 5 +COLOR_WINDOWFRAME: Final = 6 +COLOR_MENUTEXT: Final = 7 +COLOR_WINDOWTEXT: Final = 8 +COLOR_CAPTIONTEXT: Final = 9 +COLOR_ACTIVEBORDER: Final = 10 +COLOR_INACTIVEBORDER: Final = 11 +COLOR_APPWORKSPACE: Final = 12 +COLOR_HIGHLIGHT: Final = 13 +COLOR_HIGHLIGHTTEXT: Final = 14 +COLOR_BTNFACE: Final = 15 +COLOR_BTNSHADOW: Final = 16 +COLOR_GRAYTEXT: Final = 17 +COLOR_BTNTEXT: Final = 18 +COLOR_INACTIVECAPTIONTEXT: Final = 19 +COLOR_BTNHIGHLIGHT: Final = 20 +COLOR_3DDKSHADOW: Final = 21 +COLOR_3DLIGHT: Final = 22 +COLOR_INFOTEXT: Final = 23 +COLOR_INFOBK: Final = 24 +COLOR_HOTLIGHT: Final = 26 +COLOR_GRADIENTACTIVECAPTION: Final = 27 +COLOR_GRADIENTINACTIVECAPTION: Final = 28 +COLOR_DESKTOP: Final = COLOR_BACKGROUND +COLOR_3DFACE: Final = COLOR_BTNFACE +COLOR_3DSHADOW: Final = COLOR_BTNSHADOW +COLOR_3DHIGHLIGHT: Final = COLOR_BTNHIGHLIGHT +COLOR_3DHILIGHT: Final = COLOR_BTNHIGHLIGHT +COLOR_BTNHILIGHT: Final = COLOR_BTNHIGHLIGHT +GW_HWNDFIRST: Final = 0 +GW_HWNDLAST: Final = 1 +GW_HWNDNEXT: Final = 2 +GW_HWNDPREV: Final = 3 +GW_OWNER: Final = 4 +GW_CHILD: Final = 5 +GW_ENABLEDPOPUP: Final = 6 +GW_MAX: Final = 6 +MF_INSERT: Final = 0 +MF_CHANGE: Final = 128 +MF_APPEND: Final = 256 +MF_DELETE: Final = 512 +MF_REMOVE: Final = 4096 +MF_BYCOMMAND: Final = 0 +MF_BYPOSITION: Final = 1024 +MF_SEPARATOR: Final = 2048 +MF_ENABLED: Final = 0 +MF_GRAYED: Final = 1 +MF_DISABLED: Final = 2 +MF_UNCHECKED: Final = 0 +MF_CHECKED: Final = 8 +MF_USECHECKBITMAPS: Final = 512 +MF_STRING: Final = 0 +MF_BITMAP: Final = 4 +MF_OWNERDRAW: Final = 256 +MF_POPUP: Final = 16 +MF_MENUBARBREAK: Final = 32 +MF_MENUBREAK: Final = 64 +MF_UNHILITE: Final = 0 +MF_HILITE: Final = 128 +MF_DEFAULT: Final = 4096 +MF_SYSMENU: Final = 8192 +MF_HELP: Final = 16384 +MF_RIGHTJUSTIFY: Final = 16384 +MF_MOUSESELECT: Final = 32768 +MF_END: Final = 128 +MFT_STRING: Final = MF_STRING +MFT_BITMAP: Final = MF_BITMAP +MFT_MENUBARBREAK: Final = MF_MENUBARBREAK +MFT_MENUBREAK: Final = MF_MENUBREAK +MFT_OWNERDRAW: Final = MF_OWNERDRAW +MFT_RADIOCHECK: Final = 512 +MFT_SEPARATOR: Final = MF_SEPARATOR +MFT_RIGHTORDER: Final = 8192 +MFT_RIGHTJUSTIFY: Final = MF_RIGHTJUSTIFY +MFS_GRAYED: Final = 3 +MFS_DISABLED: Final = MFS_GRAYED +MFS_CHECKED: Final = MF_CHECKED +MFS_HILITE: Final = MF_HILITE +MFS_ENABLED: Final = MF_ENABLED +MFS_UNCHECKED: Final = MF_UNCHECKED +MFS_UNHILITE: Final = MF_UNHILITE +MFS_DEFAULT: Final = MF_DEFAULT +MFS_MASK: Final = 4235 +MFS_HOTTRACKDRAWN: Final = 268435456 +MFS_CACHEDBMP: Final = 536870912 +MFS_BOTTOMGAPDROP: Final = 1073741824 +MFS_TOPGAPDROP: Final = -2147483648 +MFS_GAPDROP: Final = -1073741824 +SC_SIZE: Final = 61440 +SC_MOVE: Final = 61456 +SC_MINIMIZE: Final = 61472 +SC_MAXIMIZE: Final = 61488 +SC_NEXTWINDOW: Final = 61504 +SC_PREVWINDOW: Final = 61520 +SC_CLOSE: Final = 61536 +SC_VSCROLL: Final = 61552 +SC_HSCROLL: Final = 61568 +SC_MOUSEMENU: Final = 61584 +SC_KEYMENU: Final = 61696 +SC_ARRANGE: Final = 61712 +SC_RESTORE: Final = 61728 +SC_TASKLIST: Final = 61744 +SC_SCREENSAVE: Final = 61760 +SC_HOTKEY: Final = 61776 +SC_DEFAULT: Final = 61792 +SC_MONITORPOWER: Final = 61808 +SC_CONTEXTHELP: Final = 61824 +SC_SEPARATOR: Final = 61455 +SC_ICON: Final = SC_MINIMIZE +SC_ZOOM: Final = SC_MAXIMIZE +IDC_ARROW: Final = 32512 +IDC_IBEAM: Final = 32513 +IDC_WAIT: Final = 32514 +IDC_CROSS: Final = 32515 +IDC_UPARROW: Final = 32516 +IDC_SIZE: Final = 32640 +IDC_ICON: Final = 32641 +IDC_SIZENWSE: Final = 32642 +IDC_SIZENESW: Final = 32643 +IDC_SIZEWE: Final = 32644 +IDC_SIZENS: Final = 32645 +IDC_SIZEALL: Final = 32646 +IDC_NO: Final = 32648 +IDC_HAND: Final = 32649 +IDC_APPSTARTING: Final = 32650 +IDC_HELP: Final = 32651 +IMAGE_BITMAP: Final = 0 +IMAGE_ICON: Final = 1 +IMAGE_CURSOR: Final = 2 +IMAGE_ENHMETAFILE: Final = 3 +LR_DEFAULTCOLOR: Final = 0 +LR_MONOCHROME: Final = 1 +LR_COLOR: Final = 2 +LR_COPYRETURNORG: Final = 4 +LR_COPYDELETEORG: Final = 8 +LR_LOADFROMFILE: Final = 16 +LR_LOADTRANSPARENT: Final = 32 +LR_DEFAULTSIZE: Final = 64 +LR_LOADREALSIZE: Final = 128 +LR_LOADMAP3DCOLORS: Final = 4096 +LR_CREATEDIBSECTION: Final = 8192 +LR_COPYFROMRESOURCE: Final = 16384 +LR_SHARED: Final = 32768 +DI_MASK: Final = 1 +DI_IMAGE: Final = 2 +DI_NORMAL: Final = 3 +DI_COMPAT: Final = 4 +DI_DEFAULTSIZE: Final = 8 +RES_ICON: Final = 1 +RES_CURSOR: Final = 2 +OBM_CLOSE: Final = 32754 +OBM_UPARROW: Final = 32753 +OBM_DNARROW: Final = 32752 +OBM_RGARROW: Final = 32751 +OBM_LFARROW: Final = 32750 +OBM_REDUCE: Final = 32749 +OBM_ZOOM: Final = 32748 +OBM_RESTORE: Final = 32747 +OBM_REDUCED: Final = 32746 +OBM_ZOOMD: Final = 32745 +OBM_RESTORED: Final = 32744 +OBM_UPARROWD: Final = 32743 +OBM_DNARROWD: Final = 32742 +OBM_RGARROWD: Final = 32741 +OBM_LFARROWD: Final = 32740 +OBM_MNARROW: Final = 32739 +OBM_COMBO: Final = 32738 +OBM_UPARROWI: Final = 32737 +OBM_DNARROWI: Final = 32736 +OBM_RGARROWI: Final = 32735 +OBM_LFARROWI: Final = 32734 +OBM_OLD_CLOSE: Final = 32767 +OBM_SIZE: Final = 32766 +OBM_OLD_UPARROW: Final = 32765 +OBM_OLD_DNARROW: Final = 32764 +OBM_OLD_RGARROW: Final = 32763 +OBM_OLD_LFARROW: Final = 32762 +OBM_BTSIZE: Final = 32761 +OBM_CHECK: Final = 32760 +OBM_CHECKBOXES: Final = 32759 +OBM_BTNCORNERS: Final = 32758 +OBM_OLD_REDUCE: Final = 32757 +OBM_OLD_ZOOM: Final = 32756 +OBM_OLD_RESTORE: Final = 32755 +OCR_NORMAL: Final = 32512 +OCR_IBEAM: Final = 32513 +OCR_WAIT: Final = 32514 +OCR_CROSS: Final = 32515 +OCR_UP: Final = 32516 +OCR_SIZE: Final = 32640 +OCR_ICON: Final = 32641 +OCR_SIZENWSE: Final = 32642 +OCR_SIZENESW: Final = 32643 +OCR_SIZEWE: Final = 32644 +OCR_SIZENS: Final = 32645 +OCR_SIZEALL: Final = 32646 +OCR_ICOCUR: Final = 32647 +OCR_NO: Final = 32648 +OCR_HAND: Final = 32649 +OCR_APPSTARTING: Final = 32650 + +OIC_SAMPLE: Final = 32512 +OIC_HAND: Final = 32513 +OIC_QUES: Final = 32514 +OIC_BANG: Final = 32515 +OIC_NOTE: Final = 32516 +OIC_WINLOGO: Final = 32517 +OIC_WARNING: Final = OIC_BANG +OIC_ERROR: Final = OIC_HAND +OIC_INFORMATION: Final = OIC_NOTE +ORD_LANGDRIVER: Final = 1 +IDI_APPLICATION: Final = 32512 +IDI_HAND: Final = 32513 +IDI_QUESTION: Final = 32514 +IDI_EXCLAMATION: Final = 32515 +IDI_ASTERISK: Final = 32516 +IDI_WINLOGO: Final = 32517 +IDI_WARNING: Final = IDI_EXCLAMATION +IDI_ERROR: Final = IDI_HAND +IDI_INFORMATION: Final = IDI_ASTERISK +IDOK: Final = 1 +IDCANCEL: Final = 2 +IDABORT: Final = 3 +IDRETRY: Final = 4 +IDIGNORE: Final = 5 +IDYES: Final = 6 +IDNO: Final = 7 +IDCLOSE: Final = 8 +IDHELP: Final = 9 +ES_LEFT: Final = 0 +ES_CENTER: Final = 1 +ES_RIGHT: Final = 2 +ES_MULTILINE: Final = 4 +ES_UPPERCASE: Final = 8 +ES_LOWERCASE: Final = 16 +ES_PASSWORD: Final = 32 +ES_AUTOVSCROLL: Final = 64 +ES_AUTOHSCROLL: Final = 128 +ES_NOHIDESEL: Final = 256 +ES_OEMCONVERT: Final = 1024 +ES_READONLY: Final = 2048 +ES_WANTRETURN: Final = 4096 +ES_NUMBER: Final = 8192 +EN_SETFOCUS: Final = 256 +EN_KILLFOCUS: Final = 512 +EN_CHANGE: Final = 768 +EN_UPDATE: Final = 1024 +EN_ERRSPACE: Final = 1280 +EN_MAXTEXT: Final = 1281 +EN_HSCROLL: Final = 1537 +EN_VSCROLL: Final = 1538 +EC_LEFTMARGIN: Final = 1 +EC_RIGHTMARGIN: Final = 2 +EC_USEFONTINFO: Final = 65535 +EMSIS_COMPOSITIONSTRING: Final = 1 +EIMES_GETCOMPSTRATONCE: Final = 1 +EIMES_CANCELCOMPSTRINFOCUS: Final = 2 +EIMES_COMPLETECOMPSTRKILLFOCUS: Final = 4 +EM_GETSEL: Final = 176 +EM_SETSEL: Final = 177 +EM_GETRECT: Final = 178 +EM_SETRECT: Final = 179 +EM_SETRECTNP: Final = 180 +EM_SCROLL: Final = 181 +EM_LINESCROLL: Final = 182 +EM_SCROLLCARET: Final = 183 +EM_GETMODIFY: Final = 184 +EM_SETMODIFY: Final = 185 +EM_GETLINECOUNT: Final = 186 +EM_LINEINDEX: Final = 187 +EM_SETHANDLE: Final = 188 +EM_GETHANDLE: Final = 189 +EM_GETTHUMB: Final = 190 +EM_LINELENGTH: Final = 193 +EM_REPLACESEL: Final = 194 +EM_GETLINE: Final = 196 +EM_LIMITTEXT: Final = 197 +EM_CANUNDO: Final = 198 +EM_UNDO: Final = 199 +EM_FMTLINES: Final = 200 +EM_LINEFROMCHAR: Final = 201 +EM_SETTABSTOPS: Final = 203 +EM_SETPASSWORDCHAR: Final = 204 +EM_EMPTYUNDOBUFFER: Final = 205 +EM_GETFIRSTVISIBLELINE: Final = 206 +EM_SETREADONLY: Final = 207 +EM_SETWORDBREAKPROC: Final = 208 +EM_GETWORDBREAKPROC: Final = 209 +EM_GETPASSWORDCHAR: Final = 210 +EM_SETMARGINS: Final = 211 +EM_GETMARGINS: Final = 212 +EM_SETLIMITTEXT: Final = EM_LIMITTEXT +EM_GETLIMITTEXT: Final = 213 +EM_POSFROMCHAR: Final = 214 +EM_CHARFROMPOS: Final = 215 +EM_SETIMESTATUS: Final = 216 +EM_GETIMESTATUS: Final = 217 +WB_LEFT: Final = 0 +WB_RIGHT: Final = 1 +WB_ISDELIMITER: Final = 2 +BS_PUSHBUTTON: Final = 0 +BS_DEFPUSHBUTTON: Final = 1 +BS_CHECKBOX: Final = 2 +BS_AUTOCHECKBOX: Final = 3 +BS_RADIOBUTTON: Final = 4 +BS_3STATE: Final = 5 +BS_AUTO3STATE: Final = 6 +BS_GROUPBOX: Final = 7 +BS_USERBUTTON: Final = 8 +BS_AUTORADIOBUTTON: Final = 9 +BS_OWNERDRAW: Final = 11 +BS_LEFTTEXT: Final = 32 +BS_TEXT: Final = 0 +BS_ICON: Final = 64 +BS_BITMAP: Final = 128 +BS_LEFT: Final = 256 +BS_RIGHT: Final = 512 +BS_CENTER: Final = 768 +BS_TOP: Final = 1024 +BS_BOTTOM: Final = 2048 +BS_VCENTER: Final = 3072 +BS_PUSHLIKE: Final = 4096 +BS_MULTILINE: Final = 8192 +BS_NOTIFY: Final = 16384 +BS_FLAT: Final = 32768 +BS_RIGHTBUTTON: Final = BS_LEFTTEXT +BN_CLICKED: Final = 0 +BN_PAINT: Final = 1 +BN_HILITE: Final = 2 +BN_UNHILITE: Final = 3 +BN_DISABLE: Final = 4 +BN_DOUBLECLICKED: Final = 5 +BN_PUSHED: Final = BN_HILITE +BN_UNPUSHED: Final = BN_UNHILITE +BN_DBLCLK: Final = BN_DOUBLECLICKED +BN_SETFOCUS: Final = 6 +BN_KILLFOCUS: Final = 7 +BM_GETCHECK: Final = 240 +BM_SETCHECK: Final = 241 +BM_GETSTATE: Final = 242 +BM_SETSTATE: Final = 243 +BM_SETSTYLE: Final = 244 +BM_CLICK: Final = 245 +BM_GETIMAGE: Final = 246 +BM_SETIMAGE: Final = 247 +BST_UNCHECKED: Final = 0 +BST_CHECKED: Final = 1 +BST_INDETERMINATE: Final = 2 +BST_PUSHED: Final = 4 +BST_FOCUS: Final = 8 +SS_LEFT: Final = 0 +SS_CENTER: Final = 1 +SS_RIGHT: Final = 2 +SS_ICON: Final = 3 +SS_BLACKRECT: Final = 4 +SS_GRAYRECT: Final = 5 +SS_WHITERECT: Final = 6 +SS_BLACKFRAME: Final = 7 +SS_GRAYFRAME: Final = 8 +SS_WHITEFRAME: Final = 9 +SS_USERITEM: Final = 10 +SS_SIMPLE: Final = 11 +SS_LEFTNOWORDWRAP: Final = 12 +SS_BITMAP: Final = 14 +SS_OWNERDRAW: Final = 13 +SS_ENHMETAFILE: Final = 15 +SS_ETCHEDHORZ: Final = 16 +SS_ETCHEDVERT: Final = 17 +SS_ETCHEDFRAME: Final = 18 +SS_TYPEMASK: Final = 31 +SS_NOPREFIX: Final = 128 +SS_NOTIFY: Final = 256 +SS_CENTERIMAGE: Final = 512 +SS_RIGHTJUST: Final = 1024 +SS_REALSIZEIMAGE: Final = 2048 +SS_SUNKEN: Final = 4096 +SS_ENDELLIPSIS: Final = 16384 +SS_PATHELLIPSIS: Final = 32768 +SS_WORDELLIPSIS: Final = 49152 +SS_ELLIPSISMASK: Final = 49152 +STM_SETICON: Final = 368 +STM_GETICON: Final = 369 +STM_SETIMAGE: Final = 370 +STM_GETIMAGE: Final = 371 +STN_CLICKED: Final = 0 +STN_DBLCLK: Final = 1 +STN_ENABLE: Final = 2 +STN_DISABLE: Final = 3 +STM_MSGMAX: Final = 372 +DWL_MSGRESULT: Final = 0 +DWL_DLGPROC: Final = 4 +DWL_USER: Final = 8 +DDL_READWRITE: Final = 0 +DDL_READONLY: Final = 1 +DDL_HIDDEN: Final = 2 +DDL_SYSTEM: Final = 4 +DDL_DIRECTORY: Final = 16 +DDL_ARCHIVE: Final = 32 +DDL_POSTMSGS: Final = 8192 +DDL_DRIVES: Final = 16384 +DDL_EXCLUSIVE: Final = 32768 + +RT_CURSOR: Final = 1 +RT_BITMAP: Final = 2 +RT_ICON: Final = 3 +RT_MENU: Final = 4 +RT_DIALOG: Final = 5 +RT_STRING: Final = 6 +RT_FONTDIR: Final = 7 +RT_FONT: Final = 8 +RT_ACCELERATOR: Final = 9 +RT_RCDATA: Final = 10 +RT_MESSAGETABLE: Final = 11 +DIFFERENCE: Final = 11 +RT_GROUP_CURSOR: Final[int] +RT_GROUP_ICON: Final[int] +RT_VERSION: Final = 16 +RT_DLGINCLUDE: Final = 17 +RT_PLUGPLAY: Final = 19 +RT_VXD: Final = 20 +RT_ANICURSOR: Final = 21 +RT_ANIICON: Final = 22 +RT_HTML: Final = 23 + +SB_HORZ: Final = 0 +SB_VERT: Final = 1 +SB_CTL: Final = 2 +SB_BOTH: Final = 3 +SB_LINEUP: Final = 0 +SB_LINELEFT: Final = 0 +SB_LINEDOWN: Final = 1 +SB_LINERIGHT: Final = 1 +SB_PAGEUP: Final = 2 +SB_PAGELEFT: Final = 2 +SB_PAGEDOWN: Final = 3 +SB_PAGERIGHT: Final = 3 +SB_THUMBPOSITION: Final = 4 +SB_THUMBTRACK: Final = 5 +SB_TOP: Final = 6 +SB_LEFT: Final = 6 +SB_BOTTOM: Final = 7 +SB_RIGHT: Final = 7 +SB_ENDSCROLL: Final = 8 +SW_HIDE: Final = 0 +SW_SHOWNORMAL: Final = 1 +SW_NORMAL: Final = 1 +SW_SHOWMINIMIZED: Final = 2 +SW_SHOWMAXIMIZED: Final = 3 +SW_MAXIMIZE: Final = 3 +SW_SHOWNOACTIVATE: Final = 4 +SW_SHOW: Final = 5 +SW_MINIMIZE: Final = 6 +SW_SHOWMINNOACTIVE: Final = 7 +SW_SHOWNA: Final = 8 +SW_RESTORE: Final = 9 +SW_SHOWDEFAULT: Final = 10 +SW_FORCEMINIMIZE: Final = 11 +SW_MAX: Final = 11 +HIDE_WINDOW: Final = 0 +SHOW_OPENWINDOW: Final = 1 +SHOW_ICONWINDOW: Final = 2 +SHOW_FULLSCREEN: Final = 3 +SHOW_OPENNOACTIVATE: Final = 4 +SW_PARENTCLOSING: Final = 1 +SW_OTHERZOOM: Final = 2 +SW_PARENTOPENING: Final = 3 +SW_OTHERUNZOOM: Final = 4 +AW_HOR_POSITIVE: Final = 1 +AW_HOR_NEGATIVE: Final = 2 +AW_VER_POSITIVE: Final = 4 +AW_VER_NEGATIVE: Final = 8 +AW_CENTER: Final = 16 +AW_HIDE: Final = 65536 +AW_ACTIVATE: Final = 131072 +AW_SLIDE: Final = 262144 +AW_BLEND: Final = 524288 +KF_EXTENDED: Final = 256 +KF_DLGMODE: Final = 2048 +KF_MENUMODE: Final = 4096 +KF_ALTDOWN: Final = 8192 +KF_REPEAT: Final = 16384 +KF_UP: Final = 32768 +VK_LBUTTON: Final = 1 +VK_RBUTTON: Final = 2 +VK_CANCEL: Final = 3 +VK_MBUTTON: Final = 4 +VK_BACK: Final = 8 +VK_TAB: Final = 9 +VK_CLEAR: Final = 12 +VK_RETURN: Final = 13 +VK_SHIFT: Final = 16 +VK_CONTROL: Final = 17 +VK_MENU: Final = 18 +VK_PAUSE: Final = 19 +VK_CAPITAL: Final = 20 +VK_KANA: Final = 21 +VK_HANGEUL: Final = 21 +VK_HANGUL: Final = 21 +VK_JUNJA: Final = 23 +VK_FINAL: Final = 24 +VK_HANJA: Final = 25 +VK_KANJI: Final = 25 +VK_ESCAPE: Final = 27 +VK_CONVERT: Final = 28 +VK_NONCONVERT: Final = 29 +VK_ACCEPT: Final = 30 +VK_MODECHANGE: Final = 31 +VK_SPACE: Final = 32 +VK_PRIOR: Final = 33 +VK_NEXT: Final = 34 +VK_END: Final = 35 +VK_HOME: Final = 36 +VK_LEFT: Final = 37 +VK_UP: Final = 38 +VK_RIGHT: Final = 39 +VK_DOWN: Final = 40 +VK_SELECT: Final = 41 +VK_PRINT: Final = 42 +VK_EXECUTE: Final = 43 +VK_SNAPSHOT: Final = 44 +VK_INSERT: Final = 45 +VK_DELETE: Final = 46 +VK_HELP: Final = 47 +VK_LWIN: Final = 91 +VK_RWIN: Final = 92 +VK_APPS: Final = 93 +VK_NUMPAD0: Final = 96 +VK_NUMPAD1: Final = 97 +VK_NUMPAD2: Final = 98 +VK_NUMPAD3: Final = 99 +VK_NUMPAD4: Final = 100 +VK_NUMPAD5: Final = 101 +VK_NUMPAD6: Final = 102 +VK_NUMPAD7: Final = 103 +VK_NUMPAD8: Final = 104 +VK_NUMPAD9: Final = 105 +VK_MULTIPLY: Final = 106 +VK_ADD: Final = 107 +VK_SEPARATOR: Final = 108 +VK_SUBTRACT: Final = 109 +VK_DECIMAL: Final = 110 +VK_DIVIDE: Final = 111 +VK_F1: Final = 112 +VK_F2: Final = 113 +VK_F3: Final = 114 +VK_F4: Final = 115 +VK_F5: Final = 116 +VK_F6: Final = 117 +VK_F7: Final = 118 +VK_F8: Final = 119 +VK_F9: Final = 120 +VK_F10: Final = 121 +VK_F11: Final = 122 +VK_F12: Final = 123 +VK_F13: Final = 124 +VK_F14: Final = 125 +VK_F15: Final = 126 +VK_F16: Final = 127 +VK_F17: Final = 128 +VK_F18: Final = 129 +VK_F19: Final = 130 +VK_F20: Final = 131 +VK_F21: Final = 132 +VK_F22: Final = 133 +VK_F23: Final = 134 +VK_F24: Final = 135 +VK_NUMLOCK: Final = 144 +VK_SCROLL: Final = 145 +VK_LSHIFT: Final = 160 +VK_RSHIFT: Final = 161 +VK_LCONTROL: Final = 162 +VK_RCONTROL: Final = 163 +VK_LMENU: Final = 164 +VK_RMENU: Final = 165 +VK_PROCESSKEY: Final = 229 +VK_ATTN: Final = 246 +VK_CRSEL: Final = 247 +VK_EXSEL: Final = 248 +VK_EREOF: Final = 249 +VK_PLAY: Final = 250 +VK_ZOOM: Final = 251 +VK_NONAME: Final = 252 +VK_PA1: Final = 253 +VK_OEM_CLEAR: Final = 254 + +VK_XBUTTON1: Final = 0x05 +VK_XBUTTON2: Final = 0x06 +VK_VOLUME_MUTE: Final = 0xAD +VK_VOLUME_DOWN: Final = 0xAE +VK_VOLUME_UP: Final = 0xAF +VK_MEDIA_NEXT_TRACK: Final = 0xB0 +VK_MEDIA_PREV_TRACK: Final = 0xB1 +VK_MEDIA_PLAY_PAUSE: Final = 0xB3 +VK_BROWSER_BACK: Final = 0xA6 +VK_BROWSER_FORWARD: Final = 0xA7 +WH_MIN: Final = -1 +WH_MSGFILTER: Final = -1 +WH_JOURNALRECORD: Final = 0 +WH_JOURNALPLAYBACK: Final = 1 +WH_KEYBOARD: Final = 2 +WH_GETMESSAGE: Final = 3 +WH_CALLWNDPROC: Final = 4 +WH_CBT: Final = 5 +WH_SYSMSGFILTER: Final = 6 +WH_MOUSE: Final = 7 +WH_HARDWARE: Final = 8 +WH_DEBUG: Final = 9 +WH_SHELL: Final = 10 +WH_FOREGROUNDIDLE: Final = 11 +WH_CALLWNDPROCRET: Final = 12 +WH_KEYBOARD_LL: Final = 13 +WH_MOUSE_LL: Final = 14 +WH_MAX: Final = 14 +WH_MINHOOK: Final = WH_MIN +WH_MAXHOOK: Final = WH_MAX +HC_ACTION: Final = 0 +HC_GETNEXT: Final = 1 +HC_SKIP: Final = 2 +HC_NOREMOVE: Final = 3 +HC_NOREM: Final = HC_NOREMOVE +HC_SYSMODALON: Final = 4 +HC_SYSMODALOFF: Final = 5 +HCBT_MOVESIZE: Final = 0 +HCBT_MINMAX: Final = 1 +HCBT_QS: Final = 2 +HCBT_CREATEWND: Final = 3 +HCBT_DESTROYWND: Final = 4 +HCBT_ACTIVATE: Final = 5 +HCBT_CLICKSKIPPED: Final = 6 +HCBT_KEYSKIPPED: Final = 7 +HCBT_SYSCOMMAND: Final = 8 +HCBT_SETFOCUS: Final = 9 +MSGF_DIALOGBOX: Final = 0 +MSGF_MESSAGEBOX: Final = 1 +MSGF_MENU: Final = 2 + +MSGF_SCROLLBAR: Final = 5 +MSGF_NEXTWINDOW: Final = 6 + +MSGF_MAX: Final = 8 +MSGF_USER: Final = 4096 +HSHELL_WINDOWCREATED: Final = 1 +HSHELL_WINDOWDESTROYED: Final = 2 +HSHELL_ACTIVATESHELLWINDOW: Final = 3 +HSHELL_WINDOWACTIVATED: Final = 4 +HSHELL_GETMINRECT: Final = 5 +HSHELL_REDRAW: Final = 6 +HSHELL_TASKMAN: Final = 7 +HSHELL_LANGUAGE: Final = 8 +HSHELL_ACCESSIBILITYSTATE: Final = 11 +ACCESS_STICKYKEYS: Final = 1 +ACCESS_FILTERKEYS: Final = 2 +ACCESS_MOUSEKEYS: Final = 3 + +LLKHF_EXTENDED: Final = 1 +LLKHF_INJECTED: Final = 16 +LLKHF_ALTDOWN: Final = 32 +LLKHF_UP: Final = 128 +LLKHF_LOWER_IL_INJECTED: Final = 2 +LLMHF_INJECTED: Final = 1 +LLMHF_LOWER_IL_INJECTED: Final = 2 + +HKL_PREV: Final = 0 +HKL_NEXT: Final = 1 +KLF_ACTIVATE: Final = 1 +KLF_SUBSTITUTE_OK: Final = 2 +KLF_UNLOADPREVIOUS: Final = 4 +KLF_REORDER: Final = 8 +KLF_REPLACELANG: Final = 16 +KLF_NOTELLSHELL: Final = 128 +KLF_SETFORPROCESS: Final = 256 +KL_NAMELENGTH: Final = 9 +DESKTOP_READOBJECTS: Final = 1 +DESKTOP_CREATEWINDOW: Final = 2 +DESKTOP_CREATEMENU: Final = 4 +DESKTOP_HOOKCONTROL: Final = 8 +DESKTOP_JOURNALRECORD: Final = 16 +DESKTOP_JOURNALPLAYBACK: Final = 32 +DESKTOP_ENUMERATE: Final = 64 +DESKTOP_WRITEOBJECTS: Final = 128 +DESKTOP_SWITCHDESKTOP: Final = 256 +DF_ALLOWOTHERACCOUNTHOOK: Final = 1 +WINSTA_ENUMDESKTOPS: Final = 1 +WINSTA_READATTRIBUTES: Final = 2 +WINSTA_ACCESSCLIPBOARD: Final = 4 +WINSTA_CREATEDESKTOP: Final = 8 +WINSTA_WRITEATTRIBUTES: Final = 16 +WINSTA_ACCESSGLOBALATOMS: Final = 32 +WINSTA_EXITWINDOWS: Final = 64 +WINSTA_ENUMERATE: Final = 256 +WINSTA_READSCREEN: Final = 512 +WSF_VISIBLE: Final = 1 +UOI_FLAGS: Final = 1 +UOI_NAME: Final = 2 +UOI_TYPE: Final = 3 +UOI_USER_SID: Final = 4 +GWL_WNDPROC: Final = -4 +GWL_HINSTANCE: Final = -6 +GWL_HWNDPARENT: Final = -8 +GWL_STYLE: Final = -16 +GWL_EXSTYLE: Final = -20 +GWL_USERDATA: Final = -21 +GWL_ID: Final = -12 +GCL_MENUNAME: Final = -8 +GCL_HBRBACKGROUND: Final = -10 +GCL_HCURSOR: Final = -12 +GCL_HICON: Final = -14 +GCL_HMODULE: Final = -16 +GCL_CBWNDEXTRA: Final = -18 +GCL_CBCLSEXTRA: Final = -20 +GCL_WNDPROC: Final = -24 +GCL_STYLE: Final = -26 +GCW_ATOM: Final = -32 +GCL_HICONSM: Final = -34 + +WM_NULL: Final = 0 +WM_CREATE: Final = 1 +WM_DESTROY: Final = 2 +WM_MOVE: Final = 3 +WM_SIZE: Final = 5 +WM_ACTIVATE: Final = 6 +WA_INACTIVE: Final = 0 +WA_ACTIVE: Final = 1 +WA_CLICKACTIVE: Final = 2 +WM_SETFOCUS: Final = 7 +WM_KILLFOCUS: Final = 8 +WM_ENABLE: Final = 10 +WM_SETREDRAW: Final = 11 +WM_SETTEXT: Final = 12 +WM_GETTEXT: Final = 13 +WM_GETTEXTLENGTH: Final = 14 +WM_PAINT: Final = 15 +WM_CLOSE: Final = 16 +WM_QUERYENDSESSION: Final = 17 +WM_QUIT: Final = 18 +WM_QUERYOPEN: Final = 19 +WM_ERASEBKGND: Final = 20 +WM_SYSCOLORCHANGE: Final = 21 +WM_ENDSESSION: Final = 22 +WM_SHOWWINDOW: Final = 24 +WM_WININICHANGE: Final = 26 +WM_SETTINGCHANGE: Final = WM_WININICHANGE +WM_DEVMODECHANGE: Final = 27 +WM_ACTIVATEAPP: Final = 28 +WM_FONTCHANGE: Final = 29 +WM_TIMECHANGE: Final = 30 +WM_CANCELMODE: Final = 31 +WM_SETCURSOR: Final = 32 +WM_MOUSEACTIVATE: Final = 33 +WM_CHILDACTIVATE: Final = 34 +WM_QUEUESYNC: Final = 35 +WM_GETMINMAXINFO: Final = 36 +WM_PAINTICON: Final = 38 +WM_ICONERASEBKGND: Final = 39 +WM_NEXTDLGCTL: Final = 40 +WM_SPOOLERSTATUS: Final = 42 +WM_DRAWITEM: Final = 43 +WM_MEASUREITEM: Final = 44 +WM_DELETEITEM: Final = 45 +WM_VKEYTOITEM: Final = 46 +WM_CHARTOITEM: Final = 47 +WM_SETFONT: Final = 48 +WM_GETFONT: Final = 49 +WM_SETHOTKEY: Final = 50 +WM_GETHOTKEY: Final = 51 +WM_QUERYDRAGICON: Final = 55 +WM_COMPAREITEM: Final = 57 +WM_GETOBJECT: Final = 61 +WM_COMPACTING: Final = 65 +WM_COMMNOTIFY: Final = 68 +WM_WINDOWPOSCHANGING: Final = 70 +WM_WINDOWPOSCHANGED: Final = 71 +WM_POWER: Final = 72 +PWR_OK: Final = 1 +PWR_FAIL: Final = -1 +PWR_SUSPENDREQUEST: Final = 1 +PWR_SUSPENDRESUME: Final = 2 +PWR_CRITICALRESUME: Final = 3 +WM_COPYDATA: Final = 74 +WM_CANCELJOURNAL: Final = 75 +WM_INPUTLANGCHANGEREQUEST: Final = 80 +WM_INPUTLANGCHANGE: Final = 81 +WM_TCARD: Final = 82 +WM_HELP: Final = 83 +WM_USERCHANGED: Final = 84 +WM_NOTIFYFORMAT: Final = 85 +NFR_ANSI: Final = 1 +NFR_UNICODE: Final = 2 +NF_QUERY: Final = 3 +NF_REQUERY: Final = 4 +WM_STYLECHANGING: Final = 124 +WM_STYLECHANGED: Final = 125 +WM_DISPLAYCHANGE: Final = 126 +WM_GETICON: Final = 127 +WM_SETICON: Final = 128 +WM_NCCREATE: Final = 129 +WM_NCDESTROY: Final = 130 +WM_NCCALCSIZE: Final = 131 +WM_NCHITTEST: Final = 132 +WM_NCPAINT: Final = 133 +WM_NCACTIVATE: Final = 134 +WM_GETDLGCODE: Final = 135 +WM_SYNCPAINT: Final = 136 +WM_NCMOUSEMOVE: Final = 160 +WM_NCLBUTTONDOWN: Final = 161 +WM_NCLBUTTONUP: Final = 162 +WM_NCLBUTTONDBLCLK: Final = 163 +WM_NCRBUTTONDOWN: Final = 164 +WM_NCRBUTTONUP: Final = 165 +WM_NCRBUTTONDBLCLK: Final = 166 +WM_NCMBUTTONDOWN: Final = 167 +WM_NCMBUTTONUP: Final = 168 +WM_NCMBUTTONDBLCLK: Final = 169 +WM_KEYFIRST: Final = 256 +WM_KEYDOWN: Final = 256 +WM_KEYUP: Final = 257 +WM_CHAR: Final = 258 +WM_DEADCHAR: Final = 259 +WM_SYSKEYDOWN: Final = 260 +WM_SYSKEYUP: Final = 261 +WM_SYSCHAR: Final = 262 +WM_SYSDEADCHAR: Final = 263 +WM_KEYLAST: Final = 264 +WM_IME_STARTCOMPOSITION: Final = 269 +WM_IME_ENDCOMPOSITION: Final = 270 +WM_IME_COMPOSITION: Final = 271 +WM_IME_KEYLAST: Final = 271 +WM_INITDIALOG: Final = 272 +WM_COMMAND: Final = 273 +WM_SYSCOMMAND: Final = 274 +WM_TIMER: Final = 275 +WM_HSCROLL: Final = 276 +WM_VSCROLL: Final = 277 +WM_INITMENU: Final = 278 +WM_INITMENUPOPUP: Final = 279 +WM_MENUSELECT: Final = 287 +WM_MENUCHAR: Final = 288 +WM_ENTERIDLE: Final = 289 +WM_MENURBUTTONUP: Final = 290 +WM_MENUDRAG: Final = 291 +WM_MENUGETOBJECT: Final = 292 +WM_UNINITMENUPOPUP: Final = 293 +WM_MENUCOMMAND: Final = 294 +WM_CTLCOLORMSGBOX: Final = 306 +WM_CTLCOLOREDIT: Final = 307 +WM_CTLCOLORLISTBOX: Final = 308 +WM_CTLCOLORBTN: Final = 309 +WM_CTLCOLORDLG: Final = 310 +WM_CTLCOLORSCROLLBAR: Final = 311 +WM_CTLCOLORSTATIC: Final = 312 +WM_MOUSEFIRST: Final = 512 +WM_MOUSEMOVE: Final = 512 +WM_LBUTTONDOWN: Final = 513 +WM_LBUTTONUP: Final = 514 +WM_LBUTTONDBLCLK: Final = 515 +WM_RBUTTONDOWN: Final = 516 +WM_RBUTTONUP: Final = 517 +WM_RBUTTONDBLCLK: Final = 518 +WM_MBUTTONDOWN: Final = 519 +WM_MBUTTONUP: Final = 520 +WM_MBUTTONDBLCLK: Final = 521 +WM_MOUSEWHEEL: Final = 522 +WM_MOUSELAST: Final = 522 +WHEEL_DELTA: Final = 120 +WHEEL_PAGESCROLL: Final = -1 +WM_PARENTNOTIFY: Final = 528 +MENULOOP_WINDOW: Final = 0 +MENULOOP_POPUP: Final = 1 +WM_ENTERMENULOOP: Final = 529 +WM_EXITMENULOOP: Final = 530 +WM_NEXTMENU: Final = 531 +WM_SIZING: Final = 532 +WM_CAPTURECHANGED: Final = 533 +WM_MOVING: Final = 534 +WM_POWERBROADCAST: Final = 536 +PBT_APMQUERYSUSPEND: Final = 0 +PBT_APMQUERYSTANDBY: Final = 1 +PBT_APMQUERYSUSPENDFAILED: Final = 2 +PBT_APMQUERYSTANDBYFAILED: Final = 3 +PBT_APMSUSPEND: Final = 4 +PBT_APMSTANDBY: Final = 5 +PBT_APMRESUMECRITICAL: Final = 6 +PBT_APMRESUMESUSPEND: Final = 7 +PBT_APMRESUMESTANDBY: Final = 8 +PBTF_APMRESUMEFROMFAILURE: Final = 1 +PBT_APMBATTERYLOW: Final = 9 +PBT_APMPOWERSTATUSCHANGE: Final = 10 +PBT_APMOEMEVENT: Final = 11 +PBT_APMRESUMEAUTOMATIC: Final = 18 +WM_MDICREATE: Final = 544 +WM_MDIDESTROY: Final = 545 +WM_MDIACTIVATE: Final = 546 +WM_MDIRESTORE: Final = 547 +WM_MDINEXT: Final = 548 +WM_MDIMAXIMIZE: Final = 549 +WM_MDITILE: Final = 550 +WM_MDICASCADE: Final = 551 +WM_MDIICONARRANGE: Final = 552 +WM_MDIGETACTIVE: Final = 553 +WM_MDISETMENU: Final = 560 +WM_ENTERSIZEMOVE: Final = 561 +WM_EXITSIZEMOVE: Final = 562 +WM_DROPFILES: Final = 563 +WM_MDIREFRESHMENU: Final = 564 +WM_IME_SETCONTEXT: Final = 641 +WM_IME_NOTIFY: Final = 642 +WM_IME_CONTROL: Final = 643 +WM_IME_COMPOSITIONFULL: Final = 644 +WM_IME_SELECT: Final = 645 +WM_IME_CHAR: Final = 646 +WM_IME_REQUEST: Final = 648 +WM_IME_KEYDOWN: Final = 656 +WM_IME_KEYUP: Final = 657 +WM_MOUSEHOVER: Final = 673 +WM_MOUSELEAVE: Final = 675 +WM_CUT: Final = 768 +WM_COPY: Final = 769 +WM_PASTE: Final = 770 +WM_CLEAR: Final = 771 +WM_UNDO: Final = 772 +WM_RENDERFORMAT: Final = 773 +WM_RENDERALLFORMATS: Final = 774 +WM_DESTROYCLIPBOARD: Final = 775 +WM_DRAWCLIPBOARD: Final = 776 +WM_PAINTCLIPBOARD: Final = 777 +WM_VSCROLLCLIPBOARD: Final = 778 +WM_SIZECLIPBOARD: Final = 779 +WM_ASKCBFORMATNAME: Final = 780 +WM_CHANGECBCHAIN: Final = 781 +WM_HSCROLLCLIPBOARD: Final = 782 +WM_QUERYNEWPALETTE: Final = 783 +WM_PALETTEISCHANGING: Final = 784 +WM_PALETTECHANGED: Final = 785 +WM_HOTKEY: Final = 786 +WM_PRINT: Final = 791 +WM_HANDHELDFIRST: Final = 856 +WM_HANDHELDLAST: Final = 863 +WM_AFXFIRST: Final = 864 +WM_AFXLAST: Final = 895 +WM_PENWINFIRST: Final = 896 +WM_PENWINLAST: Final = 911 +WM_APP: Final = 32768 +WMSZ_LEFT: Final = 1 +WMSZ_RIGHT: Final = 2 +WMSZ_TOP: Final = 3 +WMSZ_TOPLEFT: Final = 4 +WMSZ_TOPRIGHT: Final = 5 +WMSZ_BOTTOM: Final = 6 +WMSZ_BOTTOMLEFT: Final = 7 +WMSZ_BOTTOMRIGHT: Final = 8 + +HTERROR: Final = -2 +HTTRANSPARENT: Final = -1 +HTNOWHERE: Final = 0 +HTCLIENT: Final = 1 +HTCAPTION: Final = 2 +HTSYSMENU: Final = 3 +HTGROWBOX: Final = 4 +HTSIZE: Final = HTGROWBOX +HTMENU: Final = 5 +HTHSCROLL: Final = 6 +HTVSCROLL: Final = 7 +HTMINBUTTON: Final = 8 +HTMAXBUTTON: Final = 9 +HTLEFT: Final = 10 +HTRIGHT: Final = 11 +HTTOP: Final = 12 +HTTOPLEFT: Final = 13 +HTTOPRIGHT: Final = 14 +HTBOTTOM: Final = 15 +HTBOTTOMLEFT: Final = 16 +HTBOTTOMRIGHT: Final = 17 +HTBORDER: Final = 18 +HTREDUCE: Final = HTMINBUTTON +HTZOOM: Final = HTMAXBUTTON +HTSIZEFIRST: Final = HTLEFT +HTSIZELAST: Final = HTBOTTOMRIGHT +HTOBJECT: Final = 19 +HTCLOSE: Final = 20 +HTHELP: Final = 21 +SMTO_NORMAL: Final = 0 +SMTO_BLOCK: Final = 1 +SMTO_ABORTIFHUNG: Final = 2 +SMTO_NOTIMEOUTIFNOTHUNG: Final = 8 +MA_ACTIVATE: Final = 1 +MA_ACTIVATEANDEAT: Final = 2 +MA_NOACTIVATE: Final = 3 +MA_NOACTIVATEANDEAT: Final = 4 +ICON_SMALL: Final = 0 +ICON_BIG: Final = 1 +SIZE_RESTORED: Final = 0 +SIZE_MINIMIZED: Final = 1 +SIZE_MAXIMIZED: Final = 2 +SIZE_MAXSHOW: Final = 3 +SIZE_MAXHIDE: Final = 4 +SIZENORMAL: Final = SIZE_RESTORED +SIZEICONIC: Final = SIZE_MINIMIZED +SIZEFULLSCREEN: Final = SIZE_MAXIMIZED +SIZEZOOMSHOW: Final = SIZE_MAXSHOW +SIZEZOOMHIDE: Final = SIZE_MAXHIDE +WVR_ALIGNTOP: Final = 16 +WVR_ALIGNLEFT: Final = 32 +WVR_ALIGNBOTTOM: Final = 64 +WVR_ALIGNRIGHT: Final = 128 +WVR_HREDRAW: Final = 256 +WVR_VREDRAW: Final = 512 +WVR_REDRAW: Final[int] +WVR_VALIDRECTS: Final = 1024 +MK_LBUTTON: Final = 1 +MK_RBUTTON: Final = 2 +MK_SHIFT: Final = 4 +MK_CONTROL: Final = 8 +MK_MBUTTON: Final = 16 +TME_HOVER: Final = 1 +TME_LEAVE: Final = 2 +TME_QUERY: Final = 1073741824 +TME_CANCEL: Final = -2147483648 +HOVER_DEFAULT: Final = -1 +WS_OVERLAPPED: Final = 0 +WS_POPUP: Final = -2147483648 +WS_CHILD: Final = 1073741824 +WS_MINIMIZE: Final = 536870912 +WS_VISIBLE: Final = 268435456 +WS_DISABLED: Final = 134217728 +WS_CLIPSIBLINGS: Final = 67108864 +WS_CLIPCHILDREN: Final = 33554432 +WS_MAXIMIZE: Final = 16777216 +WS_CAPTION: Final = 12582912 +WS_BORDER: Final = 8388608 +WS_DLGFRAME: Final = 4194304 +WS_VSCROLL: Final = 2097152 +WS_HSCROLL: Final = 1048576 +WS_SYSMENU: Final = 524288 +WS_THICKFRAME: Final = 262144 +WS_GROUP: Final = 131072 +WS_TABSTOP: Final = 65536 +WS_MINIMIZEBOX: Final = 131072 +WS_MAXIMIZEBOX: Final = 65536 +WS_TILED: Final = WS_OVERLAPPED +WS_ICONIC: Final = WS_MINIMIZE +WS_SIZEBOX: Final = WS_THICKFRAME +WS_OVERLAPPEDWINDOW: Final[int] +WS_POPUPWINDOW: Final[int] +WS_CHILDWINDOW: Final = WS_CHILD +WS_TILEDWINDOW: Final = WS_OVERLAPPEDWINDOW +WS_EX_DLGMODALFRAME: Final = 1 +WS_EX_NOPARENTNOTIFY: Final = 4 +WS_EX_TOPMOST: Final = 8 +WS_EX_ACCEPTFILES: Final = 16 +WS_EX_TRANSPARENT: Final = 32 +WS_EX_MDICHILD: Final = 64 +WS_EX_TOOLWINDOW: Final = 128 +WS_EX_WINDOWEDGE: Final = 256 +WS_EX_CLIENTEDGE: Final = 512 +WS_EX_CONTEXTHELP: Final = 1024 +WS_EX_RIGHT: Final = 4096 +WS_EX_LEFT: Final = 0 +WS_EX_RTLREADING: Final = 8192 +WS_EX_LTRREADING: Final = 0 +WS_EX_LEFTSCROLLBAR: Final = 16384 +WS_EX_RIGHTSCROLLBAR: Final = 0 +WS_EX_CONTROLPARENT: Final = 65536 +WS_EX_STATICEDGE: Final = 131072 +WS_EX_APPWINDOW: Final = 262144 +WS_EX_OVERLAPPEDWINDOW: Final[int] +WS_EX_PALETTEWINDOW: Final[int] +WS_EX_LAYERED: Final = 0x00080000 +WS_EX_NOINHERITLAYOUT: Final = 0x00100000 +WS_EX_LAYOUTRTL: Final = 0x00400000 +WS_EX_COMPOSITED: Final = 0x02000000 +WS_EX_NOACTIVATE: Final = 0x08000000 + +CS_VREDRAW: Final = 1 +CS_HREDRAW: Final = 2 + +CS_DBLCLKS: Final = 8 +CS_OWNDC: Final = 32 +CS_CLASSDC: Final = 64 +CS_PARENTDC: Final = 128 + +CS_NOCLOSE: Final = 512 +CS_SAVEBITS: Final = 2048 +CS_BYTEALIGNCLIENT: Final = 4096 +CS_BYTEALIGNWINDOW: Final = 8192 +CS_GLOBALCLASS: Final = 16384 +CS_IME: Final = 65536 +PRF_CHECKVISIBLE: Final = 1 +PRF_NONCLIENT: Final = 2 +PRF_CLIENT: Final = 4 +PRF_ERASEBKGND: Final = 8 +PRF_CHILDREN: Final = 16 +PRF_OWNED: Final = 32 +BDR_RAISEDOUTER: Final = 1 +BDR_SUNKENOUTER: Final = 2 +BDR_RAISEDINNER: Final = 4 +BDR_SUNKENINNER: Final = 8 +BDR_OUTER: Final = 3 +BDR_INNER: Final = 12 + +EDGE_RAISED: Final[int] +EDGE_SUNKEN: Final[int] +EDGE_ETCHED: Final[int] +EDGE_BUMP: Final[int] + +ISMEX_NOSEND: Final = 0 +ISMEX_SEND: Final = 1 +ISMEX_NOTIFY: Final = 2 +ISMEX_CALLBACK: Final = 4 +ISMEX_REPLIED: Final = 8 +CW_USEDEFAULT: Final = -2147483648 +FLASHW_STOP: Final = 0 +FLASHW_CAPTION: Final = 1 +FLASHW_TRAY: Final = 2 +FLASHW_ALL: Final[int] +FLASHW_TIMER: Final = 4 +FLASHW_TIMERNOFG: Final = 12 + +DS_ABSALIGN: Final = 1 +DS_SYSMODAL: Final = 2 +DS_LOCALEDIT: Final = 32 +DS_SETFONT: Final = 64 +DS_MODALFRAME: Final = 128 +DS_NOIDLEMSG: Final = 256 +DS_SETFOREGROUND: Final = 512 +DS_3DLOOK: Final = 4 +DS_FIXEDSYS: Final = 8 +DS_NOFAILCREATE: Final = 16 +DS_CONTROL: Final = 1024 +DS_CENTER: Final = 2048 +DS_CENTERMOUSE: Final = 4096 +DS_CONTEXTHELP: Final = 8192 +DM_GETDEFID: Final[int] +DM_SETDEFID: Final[int] +DM_REPOSITION: Final[int] + +DC_HASDEFID: Final = 21323 +DLGC_WANTARROWS: Final = 1 +DLGC_WANTTAB: Final = 2 +DLGC_WANTALLKEYS: Final = 4 +DLGC_WANTMESSAGE: Final = 4 +DLGC_HASSETSEL: Final = 8 +DLGC_DEFPUSHBUTTON: Final = 16 +DLGC_UNDEFPUSHBUTTON: Final = 32 +DLGC_RADIOBUTTON: Final = 64 +DLGC_WANTCHARS: Final = 128 +DLGC_STATIC: Final = 256 +DLGC_BUTTON: Final = 8192 +LB_CTLCODE: Final = 0 +LB_OKAY: Final = 0 +LB_ERR: Final = -1 +LB_ERRSPACE: Final = -2 +LBN_ERRSPACE: Final = -2 +LBN_SELCHANGE: Final = 1 +LBN_DBLCLK: Final = 2 +LBN_SELCANCEL: Final = 3 +LBN_SETFOCUS: Final = 4 +LBN_KILLFOCUS: Final = 5 +LB_ADDSTRING: Final = 384 +LB_INSERTSTRING: Final = 385 +LB_DELETESTRING: Final = 386 +LB_SELITEMRANGEEX: Final = 387 +LB_RESETCONTENT: Final = 388 +LB_SETSEL: Final = 389 +LB_SETCURSEL: Final = 390 +LB_GETSEL: Final = 391 +LB_GETCURSEL: Final = 392 +LB_GETTEXT: Final = 393 +LB_GETTEXTLEN: Final = 394 +LB_GETCOUNT: Final = 395 +LB_SELECTSTRING: Final = 396 +LB_DIR: Final = 397 +LB_GETTOPINDEX: Final = 398 +LB_FINDSTRING: Final = 399 +LB_GETSELCOUNT: Final = 400 +LB_GETSELITEMS: Final = 401 +LB_SETTABSTOPS: Final = 402 +LB_GETHORIZONTALEXTENT: Final = 403 +LB_SETHORIZONTALEXTENT: Final = 404 +LB_SETCOLUMNWIDTH: Final = 405 +LB_ADDFILE: Final = 406 +LB_SETTOPINDEX: Final = 407 +LB_GETITEMRECT: Final = 408 +LB_GETITEMDATA: Final = 409 +LB_SETITEMDATA: Final = 410 +LB_SELITEMRANGE: Final = 411 +LB_SETANCHORINDEX: Final = 412 +LB_GETANCHORINDEX: Final = 413 +LB_SETCARETINDEX: Final = 414 +LB_GETCARETINDEX: Final = 415 +LB_SETITEMHEIGHT: Final = 416 +LB_GETITEMHEIGHT: Final = 417 +LB_FINDSTRINGEXACT: Final = 418 +LB_SETLOCALE: Final = 421 +LB_GETLOCALE: Final = 422 +LB_SETCOUNT: Final = 423 +LB_INITSTORAGE: Final = 424 +LB_ITEMFROMPOINT: Final = 425 +LB_MSGMAX: Final = 432 +LBS_NOTIFY: Final = 1 +LBS_SORT: Final = 2 +LBS_NOREDRAW: Final = 4 +LBS_MULTIPLESEL: Final = 8 +LBS_OWNERDRAWFIXED: Final = 16 +LBS_OWNERDRAWVARIABLE: Final = 32 +LBS_HASSTRINGS: Final = 64 +LBS_USETABSTOPS: Final = 128 +LBS_NOINTEGRALHEIGHT: Final = 256 +LBS_MULTICOLUMN: Final = 512 +LBS_WANTKEYBOARDINPUT: Final = 1024 +LBS_EXTENDEDSEL: Final = 2048 +LBS_DISABLENOSCROLL: Final = 4096 +LBS_NODATA: Final = 8192 +LBS_NOSEL: Final = 16384 +LBS_STANDARD: Final[int] +CB_OKAY: Final = 0 +CB_ERR: Final = -1 +CB_ERRSPACE: Final = -2 +CBN_ERRSPACE: Final = -1 +CBN_SELCHANGE: Final = 1 +CBN_DBLCLK: Final = 2 +CBN_SETFOCUS: Final = 3 +CBN_KILLFOCUS: Final = 4 +CBN_EDITCHANGE: Final = 5 +CBN_EDITUPDATE: Final = 6 +CBN_DROPDOWN: Final = 7 +CBN_CLOSEUP: Final = 8 +CBN_SELENDOK: Final = 9 +CBN_SELENDCANCEL: Final = 10 +CBS_SIMPLE: Final = 1 +CBS_DROPDOWN: Final = 2 +CBS_DROPDOWNLIST: Final = 3 +CBS_OWNERDRAWFIXED: Final = 16 +CBS_OWNERDRAWVARIABLE: Final = 32 +CBS_AUTOHSCROLL: Final = 64 +CBS_OEMCONVERT: Final = 128 +CBS_SORT: Final = 256 +CBS_HASSTRINGS: Final = 512 +CBS_NOINTEGRALHEIGHT: Final = 1024 +CBS_DISABLENOSCROLL: Final = 2048 +CBS_UPPERCASE: Final = 8192 +CBS_LOWERCASE: Final = 16384 +CB_GETEDITSEL: Final = 320 +CB_LIMITTEXT: Final = 321 +CB_SETEDITSEL: Final = 322 +CB_ADDSTRING: Final = 323 +CB_DELETESTRING: Final = 324 +CB_DIR: Final = 325 +CB_GETCOUNT: Final = 326 +CB_GETCURSEL: Final = 327 +CB_GETLBTEXT: Final = 328 +CB_GETLBTEXTLEN: Final = 329 +CB_INSERTSTRING: Final = 330 +CB_RESETCONTENT: Final = 331 +CB_FINDSTRING: Final = 332 +CB_SELECTSTRING: Final = 333 +CB_SETCURSEL: Final = 334 +CB_SHOWDROPDOWN: Final = 335 +CB_GETITEMDATA: Final = 336 +CB_SETITEMDATA: Final = 337 +CB_GETDROPPEDCONTROLRECT: Final = 338 +CB_SETITEMHEIGHT: Final = 339 +CB_GETITEMHEIGHT: Final = 340 +CB_SETEXTENDEDUI: Final = 341 +CB_GETEXTENDEDUI: Final = 342 +CB_GETDROPPEDSTATE: Final = 343 +CB_FINDSTRINGEXACT: Final = 344 +CB_SETLOCALE: Final = 345 +CB_GETLOCALE: Final = 346 +CB_GETTOPINDEX: Final = 347 +CB_SETTOPINDEX: Final = 348 +CB_GETHORIZONTALEXTENT: Final = 349 +CB_SETHORIZONTALEXTENT: Final = 350 +CB_GETDROPPEDWIDTH: Final = 351 +CB_SETDROPPEDWIDTH: Final = 352 +CB_INITSTORAGE: Final = 353 +CB_MSGMAX: Final = 354 +SBS_HORZ: Final = 0 +SBS_VERT: Final = 1 +SBS_TOPALIGN: Final = 2 +SBS_LEFTALIGN: Final = 2 +SBS_BOTTOMALIGN: Final = 4 +SBS_RIGHTALIGN: Final = 4 +SBS_SIZEBOXTOPLEFTALIGN: Final = 2 +SBS_SIZEBOXBOTTOMRIGHTALIGN: Final = 4 +SBS_SIZEBOX: Final = 8 +SBS_SIZEGRIP: Final = 16 +SBM_SETPOS: Final = 224 +SBM_GETPOS: Final = 225 +SBM_SETRANGE: Final = 226 +SBM_SETRANGEREDRAW: Final = 230 +SBM_GETRANGE: Final = 227 +SBM_ENABLE_ARROWS: Final = 228 +SBM_SETSCROLLINFO: Final = 233 +SBM_GETSCROLLINFO: Final = 234 +SIF_RANGE: Final = 1 +SIF_PAGE: Final = 2 +SIF_POS: Final = 4 +SIF_DISABLENOSCROLL: Final = 8 +SIF_TRACKPOS: Final = 16 +SIF_ALL: Final[int] +MDIS_ALLCHILDSTYLES: Final = 1 +MDITILE_VERTICAL: Final = 0 +MDITILE_HORIZONTAL: Final = 1 +MDITILE_SKIPDISABLED: Final = 2 +MDITILE_ZORDER: Final = 4 + +IMC_GETCANDIDATEPOS: Final = 7 +IMC_SETCANDIDATEPOS: Final = 8 +IMC_GETCOMPOSITIONFONT: Final = 9 +IMC_SETCOMPOSITIONFONT: Final = 10 +IMC_GETCOMPOSITIONWINDOW: Final = 11 +IMC_SETCOMPOSITIONWINDOW: Final = 12 +IMC_GETSTATUSWINDOWPOS: Final = 15 +IMC_SETSTATUSWINDOWPOS: Final = 16 +IMC_CLOSESTATUSWINDOW: Final = 33 +IMC_OPENSTATUSWINDOW: Final = 34 + +DELETE: Final = 65536 +READ_CONTROL: Final = 131072 +WRITE_DAC: Final = 262144 +WRITE_OWNER: Final = 524288 +SYNCHRONIZE: Final = 1048576 +STANDARD_RIGHTS_REQUIRED: Final = 983040 +STANDARD_RIGHTS_READ: Final = READ_CONTROL +STANDARD_RIGHTS_WRITE: Final = READ_CONTROL +STANDARD_RIGHTS_EXECUTE: Final = READ_CONTROL +STANDARD_RIGHTS_ALL: Final = 2031616 +SPECIFIC_RIGHTS_ALL: Final = 65535 +ACCESS_SYSTEM_SECURITY: Final = 16777216 +MAXIMUM_ALLOWED: Final = 33554432 +GENERIC_READ: Final = -2147483648 +GENERIC_WRITE: Final = 1073741824 +GENERIC_EXECUTE: Final = 536870912 +GENERIC_ALL: Final = 268435456 + +SERVICE_KERNEL_DRIVER: Final = 1 +SERVICE_FILE_SYSTEM_DRIVER: Final = 2 +SERVICE_ADAPTER: Final = 4 +SERVICE_RECOGNIZER_DRIVER: Final = 8 +SERVICE_DRIVER: Final[int] +SERVICE_WIN32_OWN_PROCESS: Final = 16 +SERVICE_WIN32_SHARE_PROCESS: Final = 32 +SERVICE_WIN32: Final[int] +SERVICE_INTERACTIVE_PROCESS: Final = 256 +SERVICE_TYPE_ALL: Final[int] +SERVICE_BOOT_START: Final = 0 +SERVICE_SYSTEM_START: Final = 1 +SERVICE_AUTO_START: Final = 2 +SERVICE_DEMAND_START: Final = 3 +SERVICE_DISABLED: Final = 4 +SERVICE_ERROR_IGNORE: Final = 0 +SERVICE_ERROR_NORMAL: Final = 1 +SERVICE_ERROR_SEVERE: Final = 2 +SERVICE_ERROR_CRITICAL: Final = 3 +TAPE_ERASE_SHORT: Final = 0 +TAPE_ERASE_LONG: Final = 1 +TAPE_LOAD: Final = 0 +TAPE_UNLOAD: Final = 1 +TAPE_TENSION: Final = 2 +TAPE_LOCK: Final = 3 +TAPE_UNLOCK: Final = 4 +TAPE_FORMAT: Final = 5 +TAPE_SETMARKS: Final = 0 +TAPE_FILEMARKS: Final = 1 +TAPE_SHORT_FILEMARKS: Final = 2 +TAPE_LONG_FILEMARKS: Final = 3 +TAPE_ABSOLUTE_POSITION: Final = 0 +TAPE_LOGICAL_POSITION: Final = 1 +TAPE_PSEUDO_LOGICAL_POSITION: Final = 2 +TAPE_REWIND: Final = 0 +TAPE_ABSOLUTE_BLOCK: Final = 1 +TAPE_LOGICAL_BLOCK: Final = 2 +TAPE_PSEUDO_LOGICAL_BLOCK: Final = 3 +TAPE_SPACE_END_OF_DATA: Final = 4 +TAPE_SPACE_RELATIVE_BLOCKS: Final = 5 +TAPE_SPACE_FILEMARKS: Final = 6 +TAPE_SPACE_SEQUENTIAL_FMKS: Final = 7 +TAPE_SPACE_SETMARKS: Final = 8 +TAPE_SPACE_SEQUENTIAL_SMKS: Final = 9 +TAPE_DRIVE_FIXED: Final = 1 +TAPE_DRIVE_SELECT: Final = 2 +TAPE_DRIVE_INITIATOR: Final = 4 +TAPE_DRIVE_ERASE_SHORT: Final = 16 +TAPE_DRIVE_ERASE_LONG: Final = 32 +TAPE_DRIVE_ERASE_BOP_ONLY: Final = 64 +TAPE_DRIVE_ERASE_IMMEDIATE: Final = 128 +TAPE_DRIVE_TAPE_CAPACITY: Final = 256 +TAPE_DRIVE_TAPE_REMAINING: Final = 512 +TAPE_DRIVE_FIXED_BLOCK: Final = 1024 +TAPE_DRIVE_VARIABLE_BLOCK: Final = 2048 +TAPE_DRIVE_WRITE_PROTECT: Final = 4096 +TAPE_DRIVE_EOT_WZ_SIZE: Final = 8192 +TAPE_DRIVE_ECC: Final = 65536 +TAPE_DRIVE_COMPRESSION: Final = 131072 +TAPE_DRIVE_PADDING: Final = 262144 +TAPE_DRIVE_REPORT_SMKS: Final = 524288 +TAPE_DRIVE_GET_ABSOLUTE_BLK: Final = 1048576 +TAPE_DRIVE_GET_LOGICAL_BLK: Final = 2097152 +TAPE_DRIVE_SET_EOT_WZ_SIZE: Final = 4194304 +TAPE_DRIVE_LOAD_UNLOAD: Final = -2147483647 +TAPE_DRIVE_TENSION: Final = -2147483646 +TAPE_DRIVE_LOCK_UNLOCK: Final = -2147483644 +TAPE_DRIVE_REWIND_IMMEDIATE: Final = -2147483640 +TAPE_DRIVE_SET_BLOCK_SIZE: Final = -2147483632 +TAPE_DRIVE_LOAD_UNLD_IMMED: Final = -2147483616 +TAPE_DRIVE_TENSION_IMMED: Final = -2147483584 +TAPE_DRIVE_LOCK_UNLK_IMMED: Final = -2147483520 +TAPE_DRIVE_SET_ECC: Final = -2147483392 +TAPE_DRIVE_SET_COMPRESSION: Final = -2147483136 +TAPE_DRIVE_SET_PADDING: Final = -2147482624 +TAPE_DRIVE_SET_REPORT_SMKS: Final = -2147481600 +TAPE_DRIVE_ABSOLUTE_BLK: Final = -2147479552 +TAPE_DRIVE_ABS_BLK_IMMED: Final = -2147475456 +TAPE_DRIVE_LOGICAL_BLK: Final = -2147467264 +TAPE_DRIVE_LOG_BLK_IMMED: Final = -2147450880 +TAPE_DRIVE_END_OF_DATA: Final = -2147418112 +TAPE_DRIVE_RELATIVE_BLKS: Final = -2147352576 +TAPE_DRIVE_FILEMARKS: Final = -2147221504 +TAPE_DRIVE_SEQUENTIAL_FMKS: Final = -2146959360 +TAPE_DRIVE_SETMARKS: Final = -2146435072 +TAPE_DRIVE_SEQUENTIAL_SMKS: Final = -2145386496 +TAPE_DRIVE_REVERSE_POSITION: Final = -2143289344 +TAPE_DRIVE_SPACE_IMMEDIATE: Final = -2139095040 +TAPE_DRIVE_WRITE_SETMARKS: Final = -2130706432 +TAPE_DRIVE_WRITE_FILEMARKS: Final = -2113929216 +TAPE_DRIVE_WRITE_SHORT_FMKS: Final = -2080374784 +TAPE_DRIVE_WRITE_LONG_FMKS: Final = -2013265920 +TAPE_DRIVE_WRITE_MARK_IMMED: Final = -1879048192 +TAPE_DRIVE_FORMAT: Final = -1610612736 +TAPE_DRIVE_FORMAT_IMMEDIATE: Final = -1073741824 +TAPE_FIXED_PARTITIONS: Final = 0 +TAPE_SELECT_PARTITIONS: Final = 1 +TAPE_INITIATOR_PARTITIONS: Final = 2 + +APPLICATION_ERROR_MASK: Final = 536870912 +ERROR_SEVERITY_SUCCESS: Final = 0 +ERROR_SEVERITY_INFORMATIONAL: Final = 1073741824 +ERROR_SEVERITY_WARNING: Final = -2147483648 +ERROR_SEVERITY_ERROR: Final = -1073741824 +MINCHAR: Final = 128 +MAXCHAR: Final = 127 +MINSHORT: Final = 32768 +MAXSHORT: Final = 32767 +MINLONG: Final = -2147483648 +MAXLONG: Final = 2147483647 +MAXBYTE: Final = 255 +MAXWORD: Final = 65535 +MAXDWORD: Final = -1 +LANG_NEUTRAL: Final = 0 +LANG_BULGARIAN: Final = 2 +LANG_CHINESE: Final = 4 +LANG_CROATIAN: Final = 26 +LANG_CZECH: Final = 5 +LANG_DANISH: Final = 6 +LANG_DUTCH: Final = 19 +LANG_ENGLISH: Final = 9 +LANG_FINNISH: Final = 11 +LANG_FRENCH: Final = 12 +LANG_GERMAN: Final = 7 +LANG_GREEK: Final = 8 +LANG_HUNGARIAN: Final = 14 +LANG_ICELANDIC: Final = 15 +LANG_ITALIAN: Final = 16 +LANG_JAPANESE: Final = 17 +LANG_KOREAN: Final = 18 +LANG_NORWEGIAN: Final = 20 +LANG_POLISH: Final = 21 +LANG_PORTUGUESE: Final = 22 +LANG_ROMANIAN: Final = 24 +LANG_RUSSIAN: Final = 25 +LANG_SLOVAK: Final = 27 +LANG_SLOVENIAN: Final = 36 +LANG_SPANISH: Final = 10 +LANG_SWEDISH: Final = 29 +LANG_TURKISH: Final = 31 +SUBLANG_NEUTRAL: Final = 0 +SUBLANG_DEFAULT: Final = 1 +SUBLANG_SYS_DEFAULT: Final = 2 +SUBLANG_CHINESE_TRADITIONAL: Final = 1 +SUBLANG_CHINESE_SIMPLIFIED: Final = 2 +SUBLANG_CHINESE_HONGKONG: Final = 3 +SUBLANG_CHINESE_SINGAPORE: Final = 4 +SUBLANG_DUTCH: Final = 1 +SUBLANG_DUTCH_BELGIAN: Final = 2 +SUBLANG_ENGLISH_US: Final = 1 +SUBLANG_ENGLISH_UK: Final = 2 +SUBLANG_ENGLISH_AUS: Final = 3 +SUBLANG_ENGLISH_CAN: Final = 4 +SUBLANG_ENGLISH_NZ: Final = 5 +SUBLANG_ENGLISH_EIRE: Final = 6 +SUBLANG_FRENCH: Final = 1 +SUBLANG_FRENCH_BELGIAN: Final = 2 +SUBLANG_FRENCH_CANADIAN: Final = 3 +SUBLANG_FRENCH_SWISS: Final = 4 +SUBLANG_GERMAN: Final = 1 +SUBLANG_GERMAN_SWISS: Final = 2 +SUBLANG_GERMAN_AUSTRIAN: Final = 3 +SUBLANG_ITALIAN: Final = 1 +SUBLANG_ITALIAN_SWISS: Final = 2 +SUBLANG_NORWEGIAN_BOKMAL: Final = 1 +SUBLANG_NORWEGIAN_NYNORSK: Final = 2 +SUBLANG_PORTUGUESE: Final = 2 +SUBLANG_PORTUGUESE_BRAZILIAN: Final = 1 +SUBLANG_SPANISH: Final = 1 +SUBLANG_SPANISH_MEXICAN: Final = 2 +SUBLANG_SPANISH_MODERN: Final = 3 +SORT_DEFAULT: Final = 0 +SORT_JAPANESE_XJIS: Final = 0 +SORT_JAPANESE_UNICODE: Final = 1 +SORT_CHINESE_BIG5: Final = 0 +SORT_CHINESE_UNICODE: Final = 1 +SORT_KOREAN_KSC: Final = 0 +SORT_KOREAN_UNICODE: Final = 1 def PRIMARYLANGID(lgid: int) -> int: ... def SUBLANGID(lgid: int) -> int: ... -NLS_VALID_LOCALE_MASK: int -CONTEXT_PORTABLE_32BIT: int -CONTEXT_ALPHA: int -SIZE_OF_80387_REGISTERS: int -CONTEXT_CONTROL: int -CONTEXT_FLOATING_POINT: int -CONTEXT_INTEGER: int -CONTEXT_FULL: int -PROCESS_TERMINATE: int -PROCESS_CREATE_THREAD: int -PROCESS_VM_OPERATION: int -PROCESS_VM_READ: int -PROCESS_VM_WRITE: int -PROCESS_DUP_HANDLE: int -PROCESS_CREATE_PROCESS: int -PROCESS_SET_QUOTA: int -PROCESS_SET_INFORMATION: int -PROCESS_QUERY_INFORMATION: int -PROCESS_SUSPEND_RESUME: int -PROCESS_QUERY_LIMITED_INFORMATION: int -PROCESS_SET_LIMITED_INFORMATION: int -PROCESS_ALL_ACCESS: int -THREAD_TERMINATE: int -THREAD_SUSPEND_RESUME: int -THREAD_GET_CONTEXT: int -THREAD_SET_CONTEXT: int -THREAD_SET_INFORMATION: int -THREAD_QUERY_INFORMATION: int -THREAD_SET_THREAD_TOKEN: int -THREAD_IMPERSONATE: int -THREAD_DIRECT_IMPERSONATION: int -THREAD_SET_LIMITED_INFORMATION: int -THREAD_QUERY_LIMITED_INFORMATION: int -THREAD_RESUME: int -TLS_MINIMUM_AVAILABLE: int -EVENT_MODIFY_STATE: int -MUTANT_QUERY_STATE: int -SEMAPHORE_MODIFY_STATE: int -TIME_ZONE_ID_UNKNOWN: int -TIME_ZONE_ID_STANDARD: int -TIME_ZONE_ID_DAYLIGHT: int -PROCESSOR_INTEL_386: int -PROCESSOR_INTEL_486: int -PROCESSOR_INTEL_PENTIUM: int -PROCESSOR_INTEL_860: int -PROCESSOR_MIPS_R2000: int -PROCESSOR_MIPS_R3000: int -PROCESSOR_MIPS_R4000: int -PROCESSOR_ALPHA_21064: int -PROCESSOR_PPC_601: int -PROCESSOR_PPC_603: int -PROCESSOR_PPC_604: int -PROCESSOR_PPC_620: int -SECTION_QUERY: int -SECTION_MAP_WRITE: int -SECTION_MAP_READ: int -SECTION_MAP_EXECUTE: int -SECTION_EXTEND_SIZE: int -PAGE_NOACCESS: int -PAGE_READONLY: int -PAGE_READWRITE: int -PAGE_WRITECOPY: int -PAGE_EXECUTE: int -PAGE_EXECUTE_READ: int -PAGE_EXECUTE_READWRITE: int -PAGE_EXECUTE_WRITECOPY: int -PAGE_GUARD: int -PAGE_NOCACHE: int -MEM_COMMIT: int -MEM_RESERVE: int -MEM_DECOMMIT: int -MEM_RELEASE: int -MEM_FREE: int -MEM_PRIVATE: int -MEM_MAPPED: int -MEM_TOP_DOWN: int -SEC_FILE: int -SEC_IMAGE: int -SEC_RESERVE: int -SEC_COMMIT: int -SEC_NOCACHE: int -MEM_IMAGE: int -FILE_SHARE_READ: int -FILE_SHARE_WRITE: int -FILE_SHARE_DELETE: int -FILE_ATTRIBUTE_READONLY: int -FILE_ATTRIBUTE_HIDDEN: int -FILE_ATTRIBUTE_SYSTEM: int -FILE_ATTRIBUTE_DIRECTORY: int -FILE_ATTRIBUTE_ARCHIVE: int -FILE_ATTRIBUTE_DEVICE: int -FILE_ATTRIBUTE_NORMAL: int -FILE_ATTRIBUTE_TEMPORARY: int -FILE_ATTRIBUTE_SPARSE_FILE: int -FILE_ATTRIBUTE_REPARSE_POINT: int -FILE_ATTRIBUTE_COMPRESSED: int -FILE_ATTRIBUTE_OFFLINE: int -FILE_ATTRIBUTE_NOT_CONTENT_INDEXED: int -FILE_ATTRIBUTE_ENCRYPTED: int -FILE_ATTRIBUTE_VIRTUAL: int -FILE_ATTRIBUTE_ATOMIC_WRITE: int -FILE_ATTRIBUTE_XACTION_WRITE: int -FILE_NOTIFY_CHANGE_FILE_NAME: int -FILE_NOTIFY_CHANGE_DIR_NAME: int -FILE_NOTIFY_CHANGE_ATTRIBUTES: int -FILE_NOTIFY_CHANGE_SIZE: int -FILE_NOTIFY_CHANGE_LAST_WRITE: int -FILE_NOTIFY_CHANGE_SECURITY: int -FILE_CASE_SENSITIVE_SEARCH: int -FILE_CASE_PRESERVED_NAMES: int -FILE_UNICODE_ON_DISK: int -FILE_PERSISTENT_ACLS: int -FILE_FILE_COMPRESSION: int -FILE_VOLUME_IS_COMPRESSED: int -IO_COMPLETION_MODIFY_STATE: int -DUPLICATE_CLOSE_SOURCE: int -DUPLICATE_SAME_ACCESS: int -SID_MAX_SUB_AUTHORITIES: int -SECURITY_NULL_RID: int -SECURITY_WORLD_RID: int -SECURITY_LOCAL_RID: int -SECURITY_CREATOR_OWNER_RID: int -SECURITY_CREATOR_GROUP_RID: int -SECURITY_DIALUP_RID: int -SECURITY_NETWORK_RID: int -SECURITY_BATCH_RID: int -SECURITY_INTERACTIVE_RID: int -SECURITY_SERVICE_RID: int -SECURITY_ANONYMOUS_LOGON_RID: int -SECURITY_LOGON_IDS_RID: int -SECURITY_LOGON_IDS_RID_COUNT: int -SECURITY_LOCAL_SYSTEM_RID: int -SECURITY_NT_NON_UNIQUE: int -SECURITY_BUILTIN_DOMAIN_RID: int -DOMAIN_USER_RID_ADMIN: int -DOMAIN_USER_RID_GUEST: int -DOMAIN_GROUP_RID_ADMINS: int -DOMAIN_GROUP_RID_USERS: int -DOMAIN_GROUP_RID_GUESTS: int -DOMAIN_ALIAS_RID_ADMINS: int -DOMAIN_ALIAS_RID_USERS: int -DOMAIN_ALIAS_RID_GUESTS: int -DOMAIN_ALIAS_RID_POWER_USERS: int -DOMAIN_ALIAS_RID_ACCOUNT_OPS: int -DOMAIN_ALIAS_RID_SYSTEM_OPS: int -DOMAIN_ALIAS_RID_PRINT_OPS: int -DOMAIN_ALIAS_RID_BACKUP_OPS: int -DOMAIN_ALIAS_RID_REPLICATOR: int -SE_GROUP_MANDATORY: int -SE_GROUP_ENABLED_BY_DEFAULT: int -SE_GROUP_ENABLED: int -SE_GROUP_OWNER: int -SE_GROUP_LOGON_ID: int -ACL_REVISION: int -ACL_REVISION1: int -ACL_REVISION2: int -ACCESS_ALLOWED_ACE_TYPE: int -ACCESS_DENIED_ACE_TYPE: int -SYSTEM_AUDIT_ACE_TYPE: int -SYSTEM_ALARM_ACE_TYPE: int -OBJECT_INHERIT_ACE: int -CONTAINER_INHERIT_ACE: int -NO_PROPAGATE_INHERIT_ACE: int -INHERIT_ONLY_ACE: int -VALID_INHERIT_FLAGS: int -SUCCESSFUL_ACCESS_ACE_FLAG: int -FAILED_ACCESS_ACE_FLAG: int -SECURITY_DESCRIPTOR_REVISION: int -SECURITY_DESCRIPTOR_REVISION1: int -SECURITY_DESCRIPTOR_MIN_LENGTH: int -SE_OWNER_DEFAULTED: int -SE_GROUP_DEFAULTED: int -SE_DACL_PRESENT: int -SE_DACL_DEFAULTED: int -SE_SACL_PRESENT: int -SE_SACL_DEFAULTED: int -SE_SELF_RELATIVE: int -SE_PRIVILEGE_ENABLED_BY_DEFAULT: int -SE_PRIVILEGE_ENABLED: int -SE_PRIVILEGE_USED_FOR_ACCESS: int -PRIVILEGE_SET_ALL_NECESSARY: int -SE_CREATE_TOKEN_NAME: str -SE_ASSIGNPRIMARYTOKEN_NAME: str -SE_LOCK_MEMORY_NAME: str -SE_INCREASE_QUOTA_NAME: str -SE_UNSOLICITED_INPUT_NAME: str -SE_MACHINE_ACCOUNT_NAME: str -SE_TCB_NAME: str -SE_SECURITY_NAME: str -SE_TAKE_OWNERSHIP_NAME: str -SE_LOAD_DRIVER_NAME: str -SE_SYSTEM_PROFILE_NAME: str -SE_SYSTEMTIME_NAME: str -SE_PROF_SINGLE_PROCESS_NAME: str -SE_INC_BASE_PRIORITY_NAME: str -SE_CREATE_PAGEFILE_NAME: str -SE_CREATE_PERMANENT_NAME: str -SE_BACKUP_NAME: str -SE_RESTORE_NAME: str -SE_SHUTDOWN_NAME: str -SE_DEBUG_NAME: str -SE_AUDIT_NAME: str -SE_SYSTEM_ENVIRONMENT_NAME: str -SE_CHANGE_NOTIFY_NAME: str -SE_REMOTE_SHUTDOWN_NAME: str -TOKEN_ASSIGN_PRIMARY: int -TOKEN_DUPLICATE: int -TOKEN_IMPERSONATE: int -TOKEN_QUERY: int -TOKEN_QUERY_SOURCE: int -TOKEN_ADJUST_PRIVILEGES: int -TOKEN_ADJUST_GROUPS: int -TOKEN_ADJUST_DEFAULT: int -TOKEN_ADJUST_SESSIONID: int -TOKEN_ALL_ACCESS: int -TOKEN_READ: int -TOKEN_WRITE: int -TOKEN_EXECUTE: int -TOKEN_SOURCE_LENGTH: int -KEY_QUERY_VALUE: int -KEY_SET_VALUE: int -KEY_CREATE_SUB_KEY: int -KEY_ENUMERATE_SUB_KEYS: int -KEY_NOTIFY: int -KEY_CREATE_LINK: int -KEY_WOW64_32KEY: int -KEY_WOW64_64KEY: int -KEY_WOW64_RES: int -KEY_READ: int -KEY_WRITE: int -KEY_EXECUTE: int -KEY_ALL_ACCESS: int -REG_NOTIFY_CHANGE_ATTRIBUTES: int -REG_NOTIFY_CHANGE_SECURITY: int -REG_NONE: int -REG_SZ: int -REG_EXPAND_SZ: int -REG_BINARY: int -REG_DWORD: int -REG_DWORD_LITTLE_ENDIAN: int -REG_DWORD_BIG_ENDIAN: int -REG_LINK: int -REG_MULTI_SZ: int -REG_FULL_RESOURCE_DESCRIPTOR: int -REG_QWORD: int -REG_QWORD_LITTLE_ENDIAN: int -NULL: int -HEAP_NO_SERIALIZE: int -HEAP_GROWABLE: int -HEAP_GENERATE_EXCEPTIONS: int -HEAP_ZERO_MEMORY: int -HEAP_REALLOC_IN_PLACE_ONLY: int -HEAP_TAIL_CHECKING_ENABLED: int -HEAP_FREE_CHECKING_ENABLED: int -HEAP_DISABLE_COALESCE_ON_FREE: int -IS_TEXT_UNICODE_ASCII16: int -IS_TEXT_UNICODE_REVERSE_ASCII16: int -IS_TEXT_UNICODE_STATISTICS: int -IS_TEXT_UNICODE_REVERSE_STATISTICS: int -IS_TEXT_UNICODE_CONTROLS: int -IS_TEXT_UNICODE_REVERSE_CONTROLS: int -IS_TEXT_UNICODE_SIGNATURE: int -IS_TEXT_UNICODE_REVERSE_SIGNATURE: int -IS_TEXT_UNICODE_ILLEGAL_CHARS: int -IS_TEXT_UNICODE_ODD_LENGTH: int -IS_TEXT_UNICODE_DBCS_LEADBYTE: int -IS_TEXT_UNICODE_NULL_BYTES: int -IS_TEXT_UNICODE_UNICODE_MASK: int -IS_TEXT_UNICODE_REVERSE_MASK: int -IS_TEXT_UNICODE_NOT_UNICODE_MASK: int -IS_TEXT_UNICODE_NOT_ASCII_MASK: int -COMPRESSION_FORMAT_NONE: int -COMPRESSION_FORMAT_DEFAULT: int -COMPRESSION_FORMAT_LZNT1: int -COMPRESSION_ENGINE_STANDARD: int -COMPRESSION_ENGINE_MAXIMUM: int -MESSAGE_RESOURCE_UNICODE: int -RTL_CRITSECT_TYPE: int -RTL_RESOURCE_TYPE: int -DLL_PROCESS_ATTACH: int -DLL_THREAD_ATTACH: int -DLL_THREAD_DETACH: int -DLL_PROCESS_DETACH: int -EVENTLOG_SEQUENTIAL_READ: int -EVENTLOG_SEEK_READ: int -EVENTLOG_FORWARDS_READ: int -EVENTLOG_BACKWARDS_READ: int -EVENTLOG_SUCCESS: int -EVENTLOG_ERROR_TYPE: int -EVENTLOG_WARNING_TYPE: int -EVENTLOG_INFORMATION_TYPE: int -EVENTLOG_AUDIT_SUCCESS: int -EVENTLOG_AUDIT_FAILURE: int -EVENTLOG_START_PAIRED_EVENT: int -EVENTLOG_END_PAIRED_EVENT: int -EVENTLOG_END_ALL_PAIRED_EVENTS: int -EVENTLOG_PAIRED_EVENT_ACTIVE: int -EVENTLOG_PAIRED_EVENT_INACTIVE: int -OWNER_SECURITY_INFORMATION: int -GROUP_SECURITY_INFORMATION: int -DACL_SECURITY_INFORMATION: int -SACL_SECURITY_INFORMATION: int -IMAGE_SIZEOF_FILE_HEADER: int -IMAGE_FILE_MACHINE_UNKNOWN: int -IMAGE_NUMBEROF_DIRECTORY_ENTRIES: int -IMAGE_SIZEOF_ROM_OPTIONAL_HEADER: int -IMAGE_SIZEOF_STD_OPTIONAL_HEADER: int -IMAGE_SIZEOF_NT_OPTIONAL_HEADER: int -IMAGE_NT_OPTIONAL_HDR_MAGIC: int -IMAGE_ROM_OPTIONAL_HDR_MAGIC: int -IMAGE_SIZEOF_SHORT_NAME: int -IMAGE_SIZEOF_SECTION_HEADER: int -IMAGE_SIZEOF_SYMBOL: int -IMAGE_SYM_CLASS_NULL: int -IMAGE_SYM_CLASS_AUTOMATIC: int -IMAGE_SYM_CLASS_EXTERNAL: int -IMAGE_SYM_CLASS_STATIC: int -IMAGE_SYM_CLASS_REGISTER: int -IMAGE_SYM_CLASS_EXTERNAL_DEF: int -IMAGE_SYM_CLASS_LABEL: int -IMAGE_SYM_CLASS_UNDEFINED_LABEL: int -IMAGE_SYM_CLASS_MEMBER_OF_STRUCT: int -IMAGE_SYM_CLASS_ARGUMENT: int -IMAGE_SYM_CLASS_STRUCT_TAG: int -IMAGE_SYM_CLASS_MEMBER_OF_UNION: int -IMAGE_SYM_CLASS_UNION_TAG: int -IMAGE_SYM_CLASS_TYPE_DEFINITION: int -IMAGE_SYM_CLASS_UNDEFINED_STATIC: int -IMAGE_SYM_CLASS_ENUM_TAG: int -IMAGE_SYM_CLASS_MEMBER_OF_ENUM: int -IMAGE_SYM_CLASS_REGISTER_PARAM: int -IMAGE_SYM_CLASS_BIT_FIELD: int -IMAGE_SYM_CLASS_BLOCK: int -IMAGE_SYM_CLASS_FUNCTION: int -IMAGE_SYM_CLASS_END_OF_STRUCT: int -IMAGE_SYM_CLASS_FILE: int -IMAGE_SYM_CLASS_SECTION: int -IMAGE_SYM_CLASS_WEAK_EXTERNAL: int -N_BTMASK: int -N_TMASK: int -N_TMASK1: int -N_TMASK2: int -N_BTSHFT: int -N_TSHIFT: int -IMAGE_SIZEOF_AUX_SYMBOL: int -IMAGE_COMDAT_SELECT_NODUPLICATES: int -IMAGE_COMDAT_SELECT_ANY: int -IMAGE_COMDAT_SELECT_SAME_SIZE: int -IMAGE_COMDAT_SELECT_EXACT_MATCH: int -IMAGE_COMDAT_SELECT_ASSOCIATIVE: int -IMAGE_WEAK_EXTERN_SEARCH_NOLIBRARY: int -IMAGE_WEAK_EXTERN_SEARCH_LIBRARY: int -IMAGE_WEAK_EXTERN_SEARCH_ALIAS: int -IMAGE_SIZEOF_RELOCATION: int -IMAGE_REL_I386_SECTION: int -IMAGE_REL_I386_SECREL: int -IMAGE_REL_MIPS_REFHALF: int -IMAGE_REL_MIPS_REFWORD: int -IMAGE_REL_MIPS_JMPADDR: int -IMAGE_REL_MIPS_REFHI: int -IMAGE_REL_MIPS_REFLO: int -IMAGE_REL_MIPS_GPREL: int -IMAGE_REL_MIPS_LITERAL: int -IMAGE_REL_MIPS_SECTION: int -IMAGE_REL_MIPS_SECREL: int -IMAGE_REL_MIPS_REFWORDNB: int -IMAGE_REL_MIPS_PAIR: int -IMAGE_REL_ALPHA_ABSOLUTE: int -IMAGE_REL_ALPHA_REFLONG: int -IMAGE_REL_ALPHA_REFQUAD: int -IMAGE_REL_ALPHA_GPREL32: int -IMAGE_REL_ALPHA_LITERAL: int -IMAGE_REL_ALPHA_LITUSE: int -IMAGE_REL_ALPHA_GPDISP: int -IMAGE_REL_ALPHA_BRADDR: int -IMAGE_REL_ALPHA_HINT: int -IMAGE_REL_ALPHA_INLINE_REFLONG: int -IMAGE_REL_ALPHA_REFHI: int -IMAGE_REL_ALPHA_REFLO: int -IMAGE_REL_ALPHA_PAIR: int -IMAGE_REL_ALPHA_MATCH: int -IMAGE_REL_ALPHA_SECTION: int -IMAGE_REL_ALPHA_SECREL: int -IMAGE_REL_ALPHA_REFLONGNB: int -IMAGE_SIZEOF_BASE_RELOCATION: int -IMAGE_REL_BASED_ABSOLUTE: int -IMAGE_REL_BASED_HIGH: int -IMAGE_REL_BASED_LOW: int -IMAGE_REL_BASED_HIGHLOW: int -IMAGE_REL_BASED_HIGHADJ: int -IMAGE_REL_BASED_MIPS_JMPADDR: int -IMAGE_SIZEOF_LINENUMBER: int -IMAGE_ARCHIVE_START_SIZE: int -IMAGE_ARCHIVE_START: str -IMAGE_ARCHIVE_END: str -IMAGE_ARCHIVE_PAD: str -IMAGE_ARCHIVE_LINKER_MEMBER: str -IMAGE_ARCHIVE_LONGNAMES_MEMBER: str -IMAGE_SIZEOF_ARCHIVE_MEMBER_HDR: int -IMAGE_ORDINAL_FLAG: int +NLS_VALID_LOCALE_MASK: Final = 1048575 +CONTEXT_PORTABLE_32BIT: Final = 1048576 +CONTEXT_ALPHA: Final = 131072 +SIZE_OF_80387_REGISTERS: Final = 80 +CONTEXT_CONTROL: Final = 1 +CONTEXT_FLOATING_POINT: Final = 2 +CONTEXT_INTEGER: Final = 4 +CONTEXT_FULL: Final[int] +PROCESS_TERMINATE: Final = 1 +PROCESS_CREATE_THREAD: Final = 2 +PROCESS_VM_OPERATION: Final = 8 +PROCESS_VM_READ: Final = 16 +PROCESS_VM_WRITE: Final = 32 +PROCESS_DUP_HANDLE: Final = 64 +PROCESS_CREATE_PROCESS: Final = 128 +PROCESS_SET_QUOTA: Final = 256 +PROCESS_SET_INFORMATION: Final = 512 +PROCESS_QUERY_INFORMATION: Final = 1024 +PROCESS_SUSPEND_RESUME: Final = 2048 +PROCESS_QUERY_LIMITED_INFORMATION: Final = 4096 +PROCESS_SET_LIMITED_INFORMATION: Final = 8192 +PROCESS_ALL_ACCESS: Final[int] +THREAD_TERMINATE: Final = 1 +THREAD_SUSPEND_RESUME: Final = 2 +THREAD_GET_CONTEXT: Final = 8 +THREAD_SET_CONTEXT: Final = 16 +THREAD_SET_INFORMATION: Final = 32 +THREAD_QUERY_INFORMATION: Final = 64 +THREAD_SET_THREAD_TOKEN: Final = 128 +THREAD_IMPERSONATE: Final = 256 +THREAD_DIRECT_IMPERSONATION: Final = 512 +THREAD_SET_LIMITED_INFORMATION: Final = 1024 +THREAD_QUERY_LIMITED_INFORMATION: Final = 2048 +THREAD_RESUME: Final = 4096 +TLS_MINIMUM_AVAILABLE: Final = 64 +EVENT_MODIFY_STATE: Final = 2 +MUTANT_QUERY_STATE: Final = 1 +SEMAPHORE_MODIFY_STATE: Final = 2 +TIME_ZONE_ID_UNKNOWN: Final = 0 +TIME_ZONE_ID_STANDARD: Final = 1 +TIME_ZONE_ID_DAYLIGHT: Final = 2 +PROCESSOR_INTEL_386: Final = 386 +PROCESSOR_INTEL_486: Final = 486 +PROCESSOR_INTEL_PENTIUM: Final = 586 +PROCESSOR_INTEL_860: Final = 860 +PROCESSOR_MIPS_R2000: Final = 2000 +PROCESSOR_MIPS_R3000: Final = 3000 +PROCESSOR_MIPS_R4000: Final = 4000 +PROCESSOR_ALPHA_21064: Final = 21064 +PROCESSOR_PPC_601: Final = 601 +PROCESSOR_PPC_603: Final = 603 +PROCESSOR_PPC_604: Final = 604 +PROCESSOR_PPC_620: Final = 620 +SECTION_QUERY: Final = 1 +SECTION_MAP_WRITE: Final = 2 +SECTION_MAP_READ: Final = 4 +SECTION_MAP_EXECUTE: Final = 8 +SECTION_EXTEND_SIZE: Final = 16 +PAGE_NOACCESS: Final = 1 +PAGE_READONLY: Final = 2 +PAGE_READWRITE: Final = 4 +PAGE_WRITECOPY: Final = 8 +PAGE_EXECUTE: Final = 16 +PAGE_EXECUTE_READ: Final = 32 +PAGE_EXECUTE_READWRITE: Final = 64 +PAGE_EXECUTE_WRITECOPY: Final = 128 +PAGE_GUARD: Final = 256 +PAGE_NOCACHE: Final = 512 +MEM_COMMIT: Final = 4096 +MEM_RESERVE: Final = 8192 +MEM_DECOMMIT: Final = 16384 +MEM_RELEASE: Final = 32768 +MEM_FREE: Final = 65536 +MEM_PRIVATE: Final = 131072 +MEM_MAPPED: Final = 262144 +MEM_TOP_DOWN: Final = 1048576 + +SEC_FILE: Final = 8388608 +SEC_IMAGE: Final = 16777216 +SEC_RESERVE: Final = 67108864 +SEC_COMMIT: Final = 134217728 +SEC_NOCACHE: Final = 268435456 +MEM_IMAGE: Final = SEC_IMAGE +FILE_SHARE_READ: Final = 1 +FILE_SHARE_WRITE: Final = 2 +FILE_SHARE_DELETE: Final = 4 +FILE_ATTRIBUTE_READONLY: Final = 1 +FILE_ATTRIBUTE_HIDDEN: Final = 2 +FILE_ATTRIBUTE_SYSTEM: Final = 4 +FILE_ATTRIBUTE_DIRECTORY: Final = 16 +FILE_ATTRIBUTE_ARCHIVE: Final = 32 +FILE_ATTRIBUTE_DEVICE: Final = 64 +FILE_ATTRIBUTE_NORMAL: Final = 128 +FILE_ATTRIBUTE_TEMPORARY: Final = 256 +FILE_ATTRIBUTE_SPARSE_FILE: Final = 512 +FILE_ATTRIBUTE_REPARSE_POINT: Final = 1024 +FILE_ATTRIBUTE_COMPRESSED: Final = 2048 +FILE_ATTRIBUTE_OFFLINE: Final = 4096 +FILE_ATTRIBUTE_NOT_CONTENT_INDEXED: Final = 8192 +FILE_ATTRIBUTE_ENCRYPTED: Final = 16384 +FILE_ATTRIBUTE_VIRTUAL: Final = 65536 + +FILE_ATTRIBUTE_ATOMIC_WRITE: Final = 512 +FILE_ATTRIBUTE_XACTION_WRITE: Final = 1024 + +FILE_NOTIFY_CHANGE_FILE_NAME: Final = 1 +FILE_NOTIFY_CHANGE_DIR_NAME: Final = 2 +FILE_NOTIFY_CHANGE_ATTRIBUTES: Final = 4 +FILE_NOTIFY_CHANGE_SIZE: Final = 8 +FILE_NOTIFY_CHANGE_LAST_WRITE: Final = 16 +FILE_NOTIFY_CHANGE_SECURITY: Final = 256 +FILE_CASE_SENSITIVE_SEARCH: Final = 1 +FILE_CASE_PRESERVED_NAMES: Final = 2 +FILE_FILE_COMPRESSION: Final = 16 +FILE_NAMED_STREAMS: Final = 262144 +FILE_PERSISTENT_ACLS: Final = 0x00000008 +FILE_READ_ONLY_VOLUME: Final = 0x00080000 +FILE_SEQUENTIAL_WRITE_ONCE: Final = 0x00100000 +FILE_SUPPORTS_ENCRYPTION: Final = 0x00020000 +FILE_SUPPORTS_EXTENDED_ATTRIBUTES: Final = 0x00800000 +FILE_SUPPORTS_HARD_LINKS: Final = 0x00400000 +FILE_SUPPORTS_OBJECT_IDS: Final = 0x00010000 +FILE_SUPPORTS_OPEN_BY_FILE_ID: Final = 0x01000000 +FILE_SUPPORTS_REPARSE_POINTS: Final = 0x00000080 +FILE_SUPPORTS_SPARSE_FILES: Final = 0x00000040 +FILE_SUPPORTS_TRANSACTIONS: Final = 0x00200000 +FILE_SUPPORTS_USN_JOURNAL: Final = 0x02000000 +FILE_UNICODE_ON_DISK: Final = 0x00000004 +FILE_VOLUME_QUOTAS: Final = 0x00000020 +FILE_VOLUME_IS_COMPRESSED: Final = 32768 +IO_COMPLETION_MODIFY_STATE: Final = 2 +DUPLICATE_CLOSE_SOURCE: Final = 1 +DUPLICATE_SAME_ACCESS: Final = 2 +SID_MAX_SUB_AUTHORITIES: Final = 15 +SECURITY_NULL_RID: Final = 0 +SECURITY_WORLD_RID: Final = 0 +SECURITY_LOCAL_RID: Final = 0x00000000 +SECURITY_CREATOR_OWNER_RID: Final = 0 +SECURITY_CREATOR_GROUP_RID: Final = 1 +SECURITY_DIALUP_RID: Final = 1 +SECURITY_NETWORK_RID: Final = 2 +SECURITY_BATCH_RID: Final = 3 +SECURITY_INTERACTIVE_RID: Final = 4 +SECURITY_SERVICE_RID: Final = 6 +SECURITY_ANONYMOUS_LOGON_RID: Final = 7 +SECURITY_LOGON_IDS_RID: Final = 5 +SECURITY_LOGON_IDS_RID_COUNT: Final = 3 +SECURITY_LOCAL_SYSTEM_RID: Final = 18 +SECURITY_NT_NON_UNIQUE: Final = 21 +SECURITY_BUILTIN_DOMAIN_RID: Final = 32 +DOMAIN_USER_RID_ADMIN: Final = 500 +DOMAIN_USER_RID_GUEST: Final = 501 +DOMAIN_GROUP_RID_ADMINS: Final = 512 +DOMAIN_GROUP_RID_USERS: Final = 513 +DOMAIN_GROUP_RID_GUESTS: Final = 514 +DOMAIN_ALIAS_RID_ADMINS: Final = 544 +DOMAIN_ALIAS_RID_USERS: Final = 545 +DOMAIN_ALIAS_RID_GUESTS: Final = 546 +DOMAIN_ALIAS_RID_POWER_USERS: Final = 547 +DOMAIN_ALIAS_RID_ACCOUNT_OPS: Final = 548 +DOMAIN_ALIAS_RID_SYSTEM_OPS: Final = 549 +DOMAIN_ALIAS_RID_PRINT_OPS: Final = 550 +DOMAIN_ALIAS_RID_BACKUP_OPS: Final = 551 +DOMAIN_ALIAS_RID_REPLICATOR: Final = 552 +SE_GROUP_MANDATORY: Final = 1 +SE_GROUP_ENABLED_BY_DEFAULT: Final = 2 +SE_GROUP_ENABLED: Final = 4 +SE_GROUP_OWNER: Final = 8 +SE_GROUP_LOGON_ID: Final = -1073741824 +ACL_REVISION: Final = 2 +ACL_REVISION1: Final = 1 +ACL_REVISION2: Final = 2 +ACCESS_ALLOWED_ACE_TYPE: Final = 0 +ACCESS_DENIED_ACE_TYPE: Final = 1 +SYSTEM_AUDIT_ACE_TYPE: Final = 2 +SYSTEM_ALARM_ACE_TYPE: Final = 3 +OBJECT_INHERIT_ACE: Final = 1 +CONTAINER_INHERIT_ACE: Final = 2 +NO_PROPAGATE_INHERIT_ACE: Final = 4 +INHERIT_ONLY_ACE: Final = 8 +VALID_INHERIT_FLAGS: Final = 15 +SUCCESSFUL_ACCESS_ACE_FLAG: Final = 64 +FAILED_ACCESS_ACE_FLAG: Final = 128 +SECURITY_DESCRIPTOR_REVISION: Final = 1 +SECURITY_DESCRIPTOR_REVISION1: Final = 1 +SECURITY_DESCRIPTOR_MIN_LENGTH: Final = 20 +SE_OWNER_DEFAULTED: Final = 1 +SE_GROUP_DEFAULTED: Final = 2 +SE_DACL_PRESENT: Final = 4 +SE_DACL_DEFAULTED: Final = 8 +SE_SACL_PRESENT: Final = 16 +SE_SACL_DEFAULTED: Final = 32 +SE_SELF_RELATIVE: Final = 32768 +SE_PRIVILEGE_ENABLED_BY_DEFAULT: Final = 1 +SE_PRIVILEGE_ENABLED: Final = 2 +SE_PRIVILEGE_USED_FOR_ACCESS: Final = -2147483648 +PRIVILEGE_SET_ALL_NECESSARY: Final = 1 +SE_CREATE_TOKEN_NAME: Final = "SeCreateTokenPrivilege" +SE_ASSIGNPRIMARYTOKEN_NAME: Final = "SeAssignPrimaryTokenPrivilege" +SE_LOCK_MEMORY_NAME: Final = "SeLockMemoryPrivilege" +SE_INCREASE_QUOTA_NAME: Final = "SeIncreaseQuotaPrivilege" +SE_UNSOLICITED_INPUT_NAME: Final = "SeUnsolicitedInputPrivilege" +SE_MACHINE_ACCOUNT_NAME: Final = "SeMachineAccountPrivilege" +SE_TCB_NAME: Final = "SeTcbPrivilege" +SE_SECURITY_NAME: Final = "SeSecurityPrivilege" +SE_TAKE_OWNERSHIP_NAME: Final = "SeTakeOwnershipPrivilege" +SE_LOAD_DRIVER_NAME: Final = "SeLoadDriverPrivilege" +SE_SYSTEM_PROFILE_NAME: Final = "SeSystemProfilePrivilege" +SE_SYSTEMTIME_NAME: Final = "SeSystemtimePrivilege" +SE_PROF_SINGLE_PROCESS_NAME: Final = "SeProfileSingleProcessPrivilege" +SE_INC_BASE_PRIORITY_NAME: Final = "SeIncreaseBasePriorityPrivilege" +SE_CREATE_PAGEFILE_NAME: Final = "SeCreatePagefilePrivilege" +SE_CREATE_PERMANENT_NAME: Final = "SeCreatePermanentPrivilege" +SE_BACKUP_NAME: Final = "SeBackupPrivilege" +SE_RESTORE_NAME: Final = "SeRestorePrivilege" +SE_SHUTDOWN_NAME: Final = "SeShutdownPrivilege" +SE_DEBUG_NAME: Final = "SeDebugPrivilege" +SE_AUDIT_NAME: Final = "SeAuditPrivilege" +SE_SYSTEM_ENVIRONMENT_NAME: Final = "SeSystemEnvironmentPrivilege" +SE_CHANGE_NOTIFY_NAME: Final = "SeChangeNotifyPrivilege" +SE_REMOTE_SHUTDOWN_NAME: Final = "SeRemoteShutdownPrivilege" + +TOKEN_ASSIGN_PRIMARY: Final = 1 +TOKEN_DUPLICATE: Final = 2 +TOKEN_IMPERSONATE: Final = 4 +TOKEN_QUERY: Final = 8 +TOKEN_QUERY_SOURCE: Final = 16 +TOKEN_ADJUST_PRIVILEGES: Final = 32 +TOKEN_ADJUST_GROUPS: Final = 64 +TOKEN_ADJUST_DEFAULT: Final = 128 +TOKEN_ADJUST_SESSIONID: Final = 256 +TOKEN_ALL_ACCESS: Final[int] +TOKEN_READ: Final[int] +TOKEN_WRITE: Final[int] +TOKEN_EXECUTE: Final = STANDARD_RIGHTS_EXECUTE +TOKEN_SOURCE_LENGTH: Final = 8 + +KEY_QUERY_VALUE: Final = 1 +KEY_SET_VALUE: Final = 2 +KEY_CREATE_SUB_KEY: Final = 4 +KEY_ENUMERATE_SUB_KEYS: Final = 8 +KEY_NOTIFY: Final = 16 +KEY_CREATE_LINK: Final = 32 +KEY_WOW64_32KEY: Final = 512 +KEY_WOW64_64KEY: Final = 256 +KEY_WOW64_RES: Final = 768 +KEY_READ: Final[int] +KEY_WRITE: Final[int] +KEY_EXECUTE: Final[int] +KEY_ALL_ACCESS: Final[int] +REG_NOTIFY_CHANGE_ATTRIBUTES: Final = 2 +REG_NOTIFY_CHANGE_SECURITY: Final = 8 +REG_NONE: Final = 0 +REG_SZ: Final = 1 +REG_EXPAND_SZ: Final = 2 + +REG_BINARY: Final = 3 +REG_DWORD: Final = 4 +REG_DWORD_LITTLE_ENDIAN: Final = 4 +REG_DWORD_BIG_ENDIAN: Final = 5 +REG_LINK: Final = 6 +REG_MULTI_SZ: Final = 7 +REG_RESOURCE_LIST: Final = 8 +REG_FULL_RESOURCE_DESCRIPTOR: Final = 9 +REG_RESOURCE_REQUIREMENTS_LIST: Final = 10 +REG_QWORD: Final = 11 +REG_QWORD_LITTLE_ENDIAN: Final = 11 + +_NLSCMPERROR: Final = 2147483647 +NULL: Final = 0 +HEAP_NO_SERIALIZE: Final = 1 +HEAP_GROWABLE: Final = 2 +HEAP_GENERATE_EXCEPTIONS: Final = 4 +HEAP_ZERO_MEMORY: Final = 8 +HEAP_REALLOC_IN_PLACE_ONLY: Final = 16 +HEAP_TAIL_CHECKING_ENABLED: Final = 32 +HEAP_FREE_CHECKING_ENABLED: Final = 64 +HEAP_DISABLE_COALESCE_ON_FREE: Final = 128 +IS_TEXT_UNICODE_ASCII16: Final = 1 +IS_TEXT_UNICODE_REVERSE_ASCII16: Final = 16 +IS_TEXT_UNICODE_STATISTICS: Final = 2 +IS_TEXT_UNICODE_REVERSE_STATISTICS: Final = 32 +IS_TEXT_UNICODE_CONTROLS: Final = 4 +IS_TEXT_UNICODE_REVERSE_CONTROLS: Final = 64 +IS_TEXT_UNICODE_SIGNATURE: Final = 8 +IS_TEXT_UNICODE_REVERSE_SIGNATURE: Final = 128 +IS_TEXT_UNICODE_ILLEGAL_CHARS: Final = 256 +IS_TEXT_UNICODE_ODD_LENGTH: Final = 512 +IS_TEXT_UNICODE_DBCS_LEADBYTE: Final = 1024 +IS_TEXT_UNICODE_NULL_BYTES: Final = 4096 +IS_TEXT_UNICODE_UNICODE_MASK: Final = 15 +IS_TEXT_UNICODE_REVERSE_MASK: Final = 240 +IS_TEXT_UNICODE_NOT_UNICODE_MASK: Final = 3840 +IS_TEXT_UNICODE_NOT_ASCII_MASK: Final = 61440 +COMPRESSION_FORMAT_NONE: Final = 0 +COMPRESSION_FORMAT_DEFAULT: Final = 1 +COMPRESSION_FORMAT_LZNT1: Final = 2 +COMPRESSION_ENGINE_STANDARD: Final = 0 +COMPRESSION_ENGINE_MAXIMUM: Final = 256 +MESSAGE_RESOURCE_UNICODE: Final = 1 +RTL_CRITSECT_TYPE: Final = 0 +RTL_RESOURCE_TYPE: Final = 1 +DLL_PROCESS_ATTACH: Final = 1 +DLL_THREAD_ATTACH: Final = 2 +DLL_THREAD_DETACH: Final = 3 +DLL_PROCESS_DETACH: Final = 0 +EVENTLOG_SEQUENTIAL_READ: Final = 0x0001 +EVENTLOG_SEEK_READ: Final = 0x0002 +EVENTLOG_FORWARDS_READ: Final = 0x0004 +EVENTLOG_BACKWARDS_READ: Final = 0x0008 +EVENTLOG_SUCCESS: Final = 0x0000 +EVENTLOG_ERROR_TYPE: Final = 1 +EVENTLOG_WARNING_TYPE: Final = 2 +EVENTLOG_INFORMATION_TYPE: Final = 4 +EVENTLOG_AUDIT_SUCCESS: Final = 8 +EVENTLOG_AUDIT_FAILURE: Final = 16 +EVENTLOG_START_PAIRED_EVENT: Final = 1 +EVENTLOG_END_PAIRED_EVENT: Final = 2 +EVENTLOG_END_ALL_PAIRED_EVENTS: Final = 4 +EVENTLOG_PAIRED_EVENT_ACTIVE: Final = 8 +EVENTLOG_PAIRED_EVENT_INACTIVE: Final = 16 + +OWNER_SECURITY_INFORMATION: Final = 0x00000001 +GROUP_SECURITY_INFORMATION: Final = 0x00000002 +DACL_SECURITY_INFORMATION: Final = 0x00000004 +SACL_SECURITY_INFORMATION: Final = 0x00000008 +IMAGE_SIZEOF_FILE_HEADER: Final = 20 +IMAGE_FILE_MACHINE_UNKNOWN: Final = 0 +IMAGE_NUMBEROF_DIRECTORY_ENTRIES: Final = 16 +IMAGE_SIZEOF_ROM_OPTIONAL_HEADER: Final = 56 +IMAGE_SIZEOF_STD_OPTIONAL_HEADER: Final = 28 +IMAGE_SIZEOF_NT_OPTIONAL_HEADER: Final = 224 +IMAGE_NT_OPTIONAL_HDR_MAGIC: Final = 267 +IMAGE_ROM_OPTIONAL_HDR_MAGIC: Final = 263 +IMAGE_SIZEOF_SHORT_NAME: Final = 8 +IMAGE_SIZEOF_SECTION_HEADER: Final = 40 +IMAGE_SIZEOF_SYMBOL: Final = 18 +IMAGE_SYM_CLASS_NULL: Final = 0 +IMAGE_SYM_CLASS_AUTOMATIC: Final = 1 +IMAGE_SYM_CLASS_EXTERNAL: Final = 2 +IMAGE_SYM_CLASS_STATIC: Final = 3 +IMAGE_SYM_CLASS_REGISTER: Final = 4 +IMAGE_SYM_CLASS_EXTERNAL_DEF: Final = 5 +IMAGE_SYM_CLASS_LABEL: Final = 6 +IMAGE_SYM_CLASS_UNDEFINED_LABEL: Final = 7 +IMAGE_SYM_CLASS_MEMBER_OF_STRUCT: Final = 8 +IMAGE_SYM_CLASS_ARGUMENT: Final = 9 +IMAGE_SYM_CLASS_STRUCT_TAG: Final = 10 +IMAGE_SYM_CLASS_MEMBER_OF_UNION: Final = 11 +IMAGE_SYM_CLASS_UNION_TAG: Final = 12 +IMAGE_SYM_CLASS_TYPE_DEFINITION: Final = 13 +IMAGE_SYM_CLASS_UNDEFINED_STATIC: Final = 14 +IMAGE_SYM_CLASS_ENUM_TAG: Final = 15 +IMAGE_SYM_CLASS_MEMBER_OF_ENUM: Final = 16 +IMAGE_SYM_CLASS_REGISTER_PARAM: Final = 17 +IMAGE_SYM_CLASS_BIT_FIELD: Final = 18 +IMAGE_SYM_CLASS_BLOCK: Final = 100 +IMAGE_SYM_CLASS_FUNCTION: Final = 101 +IMAGE_SYM_CLASS_END_OF_STRUCT: Final = 102 +IMAGE_SYM_CLASS_FILE: Final = 103 +IMAGE_SYM_CLASS_SECTION: Final = 104 +IMAGE_SYM_CLASS_WEAK_EXTERNAL: Final = 105 +N_BTMASK: Final = 15 +N_TMASK: Final = 48 +N_TMASK1: Final = 192 +N_TMASK2: Final = 240 +N_BTSHFT: Final = 4 +N_TSHIFT: Final = 2 +IMAGE_SIZEOF_AUX_SYMBOL: Final = 18 +IMAGE_COMDAT_SELECT_NODUPLICATES: Final = 1 +IMAGE_COMDAT_SELECT_ANY: Final = 2 +IMAGE_COMDAT_SELECT_SAME_SIZE: Final = 3 +IMAGE_COMDAT_SELECT_EXACT_MATCH: Final = 4 +IMAGE_COMDAT_SELECT_ASSOCIATIVE: Final = 5 +IMAGE_WEAK_EXTERN_SEARCH_NOLIBRARY: Final = 1 +IMAGE_WEAK_EXTERN_SEARCH_LIBRARY: Final = 2 +IMAGE_WEAK_EXTERN_SEARCH_ALIAS: Final = 3 +IMAGE_SIZEOF_RELOCATION: Final = 10 +IMAGE_REL_I386_SECTION: Final = 10 +IMAGE_REL_I386_SECREL: Final = 11 +IMAGE_REL_MIPS_REFHALF: Final = 1 +IMAGE_REL_MIPS_REFWORD: Final = 2 +IMAGE_REL_MIPS_JMPADDR: Final = 3 +IMAGE_REL_MIPS_REFHI: Final = 4 +IMAGE_REL_MIPS_REFLO: Final = 5 +IMAGE_REL_MIPS_GPREL: Final = 6 +IMAGE_REL_MIPS_LITERAL: Final = 7 +IMAGE_REL_MIPS_SECTION: Final = 10 +IMAGE_REL_MIPS_SECREL: Final = 11 +IMAGE_REL_MIPS_REFWORDNB: Final = 34 +IMAGE_REL_MIPS_PAIR: Final = 37 +IMAGE_REL_ALPHA_ABSOLUTE: Final = 0 +IMAGE_REL_ALPHA_REFLONG: Final = 1 +IMAGE_REL_ALPHA_REFQUAD: Final = 2 +IMAGE_REL_ALPHA_GPREL32: Final = 3 +IMAGE_REL_ALPHA_LITERAL: Final = 4 +IMAGE_REL_ALPHA_LITUSE: Final = 5 +IMAGE_REL_ALPHA_GPDISP: Final = 6 +IMAGE_REL_ALPHA_BRADDR: Final = 7 +IMAGE_REL_ALPHA_HINT: Final = 8 +IMAGE_REL_ALPHA_INLINE_REFLONG: Final = 9 +IMAGE_REL_ALPHA_REFHI: Final = 10 +IMAGE_REL_ALPHA_REFLO: Final = 11 +IMAGE_REL_ALPHA_PAIR: Final = 12 +IMAGE_REL_ALPHA_MATCH: Final = 13 +IMAGE_REL_ALPHA_SECTION: Final = 14 +IMAGE_REL_ALPHA_SECREL: Final = 15 +IMAGE_REL_ALPHA_REFLONGNB: Final = 16 +IMAGE_SIZEOF_BASE_RELOCATION: Final = 8 +IMAGE_REL_BASED_ABSOLUTE: Final = 0 +IMAGE_REL_BASED_HIGH: Final = 1 +IMAGE_REL_BASED_LOW: Final = 2 +IMAGE_REL_BASED_HIGHLOW: Final = 3 +IMAGE_REL_BASED_HIGHADJ: Final = 4 +IMAGE_REL_BASED_MIPS_JMPADDR: Final = 5 +IMAGE_SIZEOF_LINENUMBER: Final = 6 +IMAGE_ARCHIVE_START_SIZE: Final = 8 +IMAGE_ARCHIVE_START: Final = "!\n" +IMAGE_ARCHIVE_END: Final = "`\n" +IMAGE_ARCHIVE_PAD: Final = "\n" +IMAGE_ARCHIVE_LINKER_MEMBER: Final = "/ " +IMAGE_ARCHIVE_LONGNAMES_MEMBER: Final = "// " +IMAGE_SIZEOF_ARCHIVE_MEMBER_HDR: Final = 60 +IMAGE_ORDINAL_FLAG: Final = -2147483648 def IMAGE_SNAP_BY_ORDINAL(Ordinal: int) -> bool: ... def IMAGE_ORDINAL(Ordinal: int) -> int: ... -IMAGE_RESOURCE_NAME_IS_STRING: int -IMAGE_RESOURCE_DATA_IS_DIRECTORY: int -IMAGE_DEBUG_TYPE_UNKNOWN: int -IMAGE_DEBUG_TYPE_COFF: int -IMAGE_DEBUG_TYPE_CODEVIEW: int -IMAGE_DEBUG_TYPE_FPO: int -IMAGE_DEBUG_TYPE_MISC: int -IMAGE_DEBUG_TYPE_EXCEPTION: int -IMAGE_DEBUG_TYPE_FIXUP: int -IMAGE_DEBUG_TYPE_OMAP_TO_SRC: int -IMAGE_DEBUG_TYPE_OMAP_FROM_SRC: int -FRAME_FPO: int -FRAME_TRAP: int -FRAME_TSS: int -SIZEOF_RFPO_DATA: int -IMAGE_DEBUG_MISC_EXENAME: int -IMAGE_SEPARATE_DEBUG_SIGNATURE: int -NEWFRAME: int -ABORTDOC: int -NEXTBAND: int -SETCOLORTABLE: int -GETCOLORTABLE: int -FLUSHOUTPUT: int -DRAFTMODE: int -QUERYESCSUPPORT: int -SETABORTPROC: int -STARTDOC: int -ENDDOC: int -GETPHYSPAGESIZE: int -GETPRINTINGOFFSET: int -GETSCALINGFACTOR: int -MFCOMMENT: int -GETPENWIDTH: int -SETCOPYCOUNT: int -SELECTPAPERSOURCE: int -DEVICEDATA: int -PASSTHROUGH: int -GETTECHNOLGY: int -GETTECHNOLOGY: int -SETLINECAP: int -SETLINEJOIN: int -SETMITERLIMIT: int -BANDINFO: int -DRAWPATTERNRECT: int -GETVECTORPENSIZE: int -GETVECTORBRUSHSIZE: int -ENABLEDUPLEX: int -GETSETPAPERBINS: int -GETSETPRINTORIENT: int -ENUMPAPERBINS: int -SETDIBSCALING: int -EPSPRINTING: int -ENUMPAPERMETRICS: int -GETSETPAPERMETRICS: int -POSTSCRIPT_DATA: int -POSTSCRIPT_IGNORE: int -MOUSETRAILS: int -GETDEVICEUNITS: int -GETEXTENDEDTEXTMETRICS: int -GETEXTENTTABLE: int -GETPAIRKERNTABLE: int -GETTRACKKERNTABLE: int -EXTTEXTOUT: int -GETFACENAME: int -DOWNLOADFACE: int -ENABLERELATIVEWIDTHS: int -ENABLEPAIRKERNING: int -SETKERNTRACK: int -SETALLJUSTVALUES: int -SETCHARSET: int -STRETCHBLT: int -GETSETSCREENPARAMS: int -BEGIN_PATH: int -CLIP_TO_PATH: int -END_PATH: int -EXT_DEVICE_CAPS: int -RESTORE_CTM: int -SAVE_CTM: int -SET_ARC_DIRECTION: int -SET_BACKGROUND_COLOR: int -SET_POLY_MODE: int -SET_SCREEN_ANGLE: int -SET_SPREAD: int -TRANSFORM_CTM: int -SET_CLIP_BOX: int -SET_BOUNDS: int -SET_MIRROR_MODE: int -OPENCHANNEL: int -DOWNLOADHEADER: int -CLOSECHANNEL: int -POSTSCRIPT_PASSTHROUGH: int -ENCAPSULATED_POSTSCRIPT: int -SP_NOTREPORTED: int -SP_ERROR: int -SP_APPABORT: int -SP_USERABORT: int -SP_OUTOFDISK: int -SP_OUTOFMEMORY: int -PR_JOBSTATUS: int -OBJ_PEN: int -OBJ_BRUSH: int -OBJ_DC: int -OBJ_METADC: int -OBJ_PAL: int -OBJ_FONT: int -OBJ_BITMAP: int -OBJ_REGION: int -OBJ_METAFILE: int -OBJ_MEMDC: int -OBJ_EXTPEN: int -OBJ_ENHMETADC: int -OBJ_ENHMETAFILE: int -OBJ_COLORSPACE: int -MWT_IDENTITY: int -MWT_LEFTMULTIPLY: int -MWT_RIGHTMULTIPLY: int -MWT_MIN: int -MWT_MAX: int -BI_RGB: int -BI_RLE8: int -BI_RLE4: int -BI_BITFIELDS: int -TMPF_FIXED_PITCH: int -TMPF_VECTOR: int -TMPF_DEVICE: int -TMPF_TRUETYPE: int -NTM_REGULAR: int -NTM_BOLD: int -NTM_ITALIC: int -LF_FACESIZE: int -LF_FULLFACESIZE: int -OUT_DEFAULT_PRECIS: int -OUT_STRING_PRECIS: int -OUT_CHARACTER_PRECIS: int -OUT_STROKE_PRECIS: int -OUT_TT_PRECIS: int -OUT_DEVICE_PRECIS: int -OUT_RASTER_PRECIS: int -OUT_TT_ONLY_PRECIS: int -OUT_OUTLINE_PRECIS: int -CLIP_DEFAULT_PRECIS: int -CLIP_CHARACTER_PRECIS: int -CLIP_STROKE_PRECIS: int -CLIP_MASK: int -CLIP_LH_ANGLES: int -CLIP_TT_ALWAYS: int -CLIP_EMBEDDED: int -DEFAULT_QUALITY: int -DRAFT_QUALITY: int -PROOF_QUALITY: int -NONANTIALIASED_QUALITY: int -ANTIALIASED_QUALITY: int -CLEARTYPE_QUALITY: int -CLEARTYPE_NATURAL_QUALITY: int -DEFAULT_PITCH: int -FIXED_PITCH: int -VARIABLE_PITCH: int -ANSI_CHARSET: int -DEFAULT_CHARSET: int -SYMBOL_CHARSET: int -SHIFTJIS_CHARSET: int -HANGEUL_CHARSET: int -CHINESEBIG5_CHARSET: int -OEM_CHARSET: int -JOHAB_CHARSET: int -HEBREW_CHARSET: int -ARABIC_CHARSET: int -GREEK_CHARSET: int -TURKISH_CHARSET: int -VIETNAMESE_CHARSET: int -THAI_CHARSET: int -EASTEUROPE_CHARSET: int -RUSSIAN_CHARSET: int -MAC_CHARSET: int -BALTIC_CHARSET: int -FF_DONTCARE: int -FF_ROMAN: int -FF_SWISS: int -FF_MODERN: int -FF_SCRIPT: int -FF_DECORATIVE: int -FW_DONTCARE: int -FW_THIN: int -FW_EXTRALIGHT: int -FW_LIGHT: int -FW_NORMAL: int -FW_MEDIUM: int -FW_SEMIBOLD: int -FW_BOLD: int -FW_EXTRABOLD: int -FW_HEAVY: int -FW_ULTRALIGHT: int -FW_REGULAR: int -FW_DEMIBOLD: int -FW_ULTRABOLD: int -FW_BLACK: int -BS_SOLID: int -BS_NULL: int -BS_HOLLOW: int -BS_HATCHED: int -BS_PATTERN: int -BS_INDEXED: int -BS_DIBPATTERN: int -BS_DIBPATTERNPT: int -BS_PATTERN8X8: int -BS_DIBPATTERN8X8: int -HS_HORIZONTAL: int -HS_VERTICAL: int -HS_FDIAGONAL: int -HS_BDIAGONAL: int -HS_CROSS: int -HS_DIAGCROSS: int -HS_FDIAGONAL1: int -HS_BDIAGONAL1: int -HS_SOLID: int -HS_DENSE1: int -HS_DENSE2: int -HS_DENSE3: int -HS_DENSE4: int -HS_DENSE5: int -HS_DENSE6: int -HS_DENSE7: int -HS_DENSE8: int -HS_NOSHADE: int -HS_HALFTONE: int -HS_SOLIDCLR: int -HS_DITHEREDCLR: int -HS_SOLIDTEXTCLR: int -HS_DITHEREDTEXTCLR: int -HS_SOLIDBKCLR: int -HS_DITHEREDBKCLR: int -HS_API_MAX: int -PS_SOLID: int -PS_DASH: int -PS_DOT: int -PS_DASHDOT: int -PS_DASHDOTDOT: int -PS_NULL: int -PS_INSIDEFRAME: int -PS_USERSTYLE: int -PS_ALTERNATE: int -PS_STYLE_MASK: int -PS_ENDCAP_ROUND: int -PS_ENDCAP_SQUARE: int -PS_ENDCAP_FLAT: int -PS_ENDCAP_MASK: int -PS_JOIN_ROUND: int -PS_JOIN_BEVEL: int -PS_JOIN_MITER: int -PS_JOIN_MASK: int -PS_COSMETIC: int -PS_GEOMETRIC: int -PS_TYPE_MASK: int -AD_COUNTERCLOCKWISE: int -AD_CLOCKWISE: int -DRIVERVERSION: int -TECHNOLOGY: int -HORZSIZE: int -VERTSIZE: int -HORZRES: int -VERTRES: int -BITSPIXEL: int -PLANES: int -NUMBRUSHES: int -NUMPENS: int -NUMMARKERS: int -NUMFONTS: int -NUMCOLORS: int -PDEVICESIZE: int -CURVECAPS: int -LINECAPS: int -POLYGONALCAPS: int -TEXTCAPS: int -CLIPCAPS: int -RASTERCAPS: int -ASPECTX: int -ASPECTY: int -ASPECTXY: int -LOGPIXELSX: int -LOGPIXELSY: int -SIZEPALETTE: int -NUMRESERVED: int -COLORRES: int -PHYSICALWIDTH: int -PHYSICALHEIGHT: int -PHYSICALOFFSETX: int -PHYSICALOFFSETY: int -SCALINGFACTORX: int -SCALINGFACTORY: int -VREFRESH: int -DESKTOPVERTRES: int -DESKTOPHORZRES: int -BLTALIGNMENT: int -SHADEBLENDCAPS: int -COLORMGMTCAPS: int -DT_PLOTTER: int -DT_RASDISPLAY: int -DT_RASPRINTER: int -DT_RASCAMERA: int -DT_CHARSTREAM: int -DT_METAFILE: int -DT_DISPFILE: int -CC_NONE: int -CC_CIRCLES: int -CC_PIE: int -CC_CHORD: int -CC_ELLIPSES: int -CC_WIDE: int -CC_STYLED: int -CC_WIDESTYLED: int -CC_INTERIORS: int -CC_ROUNDRECT: int -LC_NONE: int -LC_POLYLINE: int -LC_MARKER: int -LC_POLYMARKER: int -LC_WIDE: int -LC_STYLED: int -LC_WIDESTYLED: int -LC_INTERIORS: int -PC_NONE: int -PC_POLYGON: int -PC_RECTANGLE: int -PC_WINDPOLYGON: int -PC_TRAPEZOID: int -PC_SCANLINE: int -PC_WIDE: int -PC_STYLED: int -PC_WIDESTYLED: int -PC_INTERIORS: int -CP_NONE: int -CP_RECTANGLE: int -CP_REGION: int -TC_OP_CHARACTER: int -TC_OP_STROKE: int -TC_CP_STROKE: int -TC_CR_90: int -TC_CR_ANY: int -TC_SF_X_YINDEP: int -TC_SA_DOUBLE: int -TC_SA_INTEGER: int -TC_SA_CONTIN: int -TC_EA_DOUBLE: int -TC_IA_ABLE: int -TC_UA_ABLE: int -TC_SO_ABLE: int -TC_RA_ABLE: int -TC_VA_ABLE: int -TC_RESERVED: int -TC_SCROLLBLT: int -RC_BITBLT: int -RC_BANDING: int -RC_SCALING: int -RC_BITMAP64: int -RC_GDI20_OUTPUT: int -RC_GDI20_STATE: int -RC_SAVEBITMAP: int -RC_DI_BITMAP: int -RC_PALETTE: int -RC_DIBTODEV: int -RC_BIGFONT: int -RC_STRETCHBLT: int -RC_FLOODFILL: int -RC_STRETCHDIB: int -RC_OP_DX_OUTPUT: int -RC_DEVBITS: int -DIB_RGB_COLORS: int -DIB_PAL_COLORS: int -DIB_PAL_INDICES: int -DIB_PAL_PHYSINDICES: int -DIB_PAL_LOGINDICES: int -SYSPAL_ERROR: int -SYSPAL_STATIC: int -SYSPAL_NOSTATIC: int -CBM_CREATEDIB: int -CBM_INIT: int -FLOODFILLBORDER: int -FLOODFILLSURFACE: int -CCHDEVICENAME: int -CCHFORMNAME: int -DM_SPECVERSION: int -DM_ORIENTATION: int -DM_PAPERSIZE: int -DM_PAPERLENGTH: int -DM_PAPERWIDTH: int -DM_SCALE: int -DM_POSITION: int -DM_NUP: int -DM_DISPLAYORIENTATION: int -DM_COPIES: int -DM_DEFAULTSOURCE: int -DM_PRINTQUALITY: int -DM_COLOR: int -DM_DUPLEX: int -DM_YRESOLUTION: int -DM_TTOPTION: int -DM_COLLATE: int -DM_FORMNAME: int -DM_LOGPIXELS: int -DM_BITSPERPEL: int -DM_PELSWIDTH: int -DM_PELSHEIGHT: int -DM_DISPLAYFLAGS: int -DM_DISPLAYFREQUENCY: int -DM_ICMMETHOD: int -DM_ICMINTENT: int -DM_MEDIATYPE: int -DM_DITHERTYPE: int -DM_PANNINGWIDTH: int -DM_PANNINGHEIGHT: int -DM_DISPLAYFIXEDOUTPUT: int -DMORIENT_PORTRAIT: int -DMORIENT_LANDSCAPE: int -DMDO_DEFAULT: int -DMDO_90: int -DMDO_180: int -DMDO_270: int -DMDFO_DEFAULT: int -DMDFO_STRETCH: int -DMDFO_CENTER: int -DMPAPER_LETTER: int -DMPAPER_LETTERSMALL: int -DMPAPER_TABLOID: int -DMPAPER_LEDGER: int -DMPAPER_LEGAL: int -DMPAPER_STATEMENT: int -DMPAPER_EXECUTIVE: int -DMPAPER_A3: int -DMPAPER_A4: int -DMPAPER_A4SMALL: int -DMPAPER_A5: int -DMPAPER_B4: int -DMPAPER_B5: int -DMPAPER_FOLIO: int -DMPAPER_QUARTO: int -DMPAPER_10X14: int -DMPAPER_11X17: int -DMPAPER_NOTE: int -DMPAPER_ENV_9: int -DMPAPER_ENV_10: int -DMPAPER_ENV_11: int -DMPAPER_ENV_12: int -DMPAPER_ENV_14: int -DMPAPER_CSHEET: int -DMPAPER_DSHEET: int -DMPAPER_ESHEET: int -DMPAPER_ENV_DL: int -DMPAPER_ENV_C5: int -DMPAPER_ENV_C3: int -DMPAPER_ENV_C4: int -DMPAPER_ENV_C6: int -DMPAPER_ENV_C65: int -DMPAPER_ENV_B4: int -DMPAPER_ENV_B5: int -DMPAPER_ENV_B6: int -DMPAPER_ENV_ITALY: int -DMPAPER_ENV_MONARCH: int -DMPAPER_ENV_PERSONAL: int -DMPAPER_FANFOLD_US: int -DMPAPER_FANFOLD_STD_GERMAN: int -DMPAPER_FANFOLD_LGL_GERMAN: int -DMPAPER_ISO_B4: int -DMPAPER_JAPANESE_POSTCARD: int -DMPAPER_9X11: int -DMPAPER_10X11: int -DMPAPER_15X11: int -DMPAPER_ENV_INVITE: int -DMPAPER_RESERVED_48: int -DMPAPER_RESERVED_49: int -DMPAPER_LETTER_EXTRA: int -DMPAPER_LEGAL_EXTRA: int -DMPAPER_TABLOID_EXTRA: int -DMPAPER_A4_EXTRA: int -DMPAPER_LETTER_TRANSVERSE: int -DMPAPER_A4_TRANSVERSE: int -DMPAPER_LETTER_EXTRA_TRANSVERSE: int -DMPAPER_A_PLUS: int -DMPAPER_B_PLUS: int -DMPAPER_LETTER_PLUS: int -DMPAPER_A4_PLUS: int -DMPAPER_A5_TRANSVERSE: int -DMPAPER_B5_TRANSVERSE: int -DMPAPER_A3_EXTRA: int -DMPAPER_A5_EXTRA: int -DMPAPER_B5_EXTRA: int -DMPAPER_A2: int -DMPAPER_A3_TRANSVERSE: int -DMPAPER_A3_EXTRA_TRANSVERSE: int -DMPAPER_DBL_JAPANESE_POSTCARD: int -DMPAPER_A6: int -DMPAPER_JENV_KAKU2: int -DMPAPER_JENV_KAKU3: int -DMPAPER_JENV_CHOU3: int -DMPAPER_JENV_CHOU4: int -DMPAPER_LETTER_ROTATED: int -DMPAPER_A3_ROTATED: int -DMPAPER_A4_ROTATED: int -DMPAPER_A5_ROTATED: int -DMPAPER_B4_JIS_ROTATED: int -DMPAPER_B5_JIS_ROTATED: int -DMPAPER_JAPANESE_POSTCARD_ROTATED: int -DMPAPER_DBL_JAPANESE_POSTCARD_ROTATED: int -DMPAPER_A6_ROTATED: int -DMPAPER_JENV_KAKU2_ROTATED: int -DMPAPER_JENV_KAKU3_ROTATED: int -DMPAPER_JENV_CHOU3_ROTATED: int -DMPAPER_JENV_CHOU4_ROTATED: int -DMPAPER_B6_JIS: int -DMPAPER_B6_JIS_ROTATED: int -DMPAPER_12X11: int -DMPAPER_JENV_YOU4: int -DMPAPER_JENV_YOU4_ROTATED: int -DMPAPER_P16K: int -DMPAPER_P32K: int -DMPAPER_P32KBIG: int -DMPAPER_PENV_1: int -DMPAPER_PENV_2: int -DMPAPER_PENV_3: int -DMPAPER_PENV_4: int -DMPAPER_PENV_5: int -DMPAPER_PENV_6: int -DMPAPER_PENV_7: int -DMPAPER_PENV_8: int -DMPAPER_PENV_9: int -DMPAPER_PENV_10: int -DMPAPER_P16K_ROTATED: int -DMPAPER_P32K_ROTATED: int -DMPAPER_P32KBIG_ROTATED: int -DMPAPER_PENV_1_ROTATED: int -DMPAPER_PENV_2_ROTATED: int -DMPAPER_PENV_3_ROTATED: int -DMPAPER_PENV_4_ROTATED: int -DMPAPER_PENV_5_ROTATED: int -DMPAPER_PENV_6_ROTATED: int -DMPAPER_PENV_7_ROTATED: int -DMPAPER_PENV_8_ROTATED: int -DMPAPER_PENV_9_ROTATED: int -DMPAPER_PENV_10_ROTATED: int -DMPAPER_LAST: int -DMPAPER_USER: int -DMBIN_UPPER: int -DMBIN_ONLYONE: int -DMBIN_LOWER: int -DMBIN_MIDDLE: int -DMBIN_MANUAL: int -DMBIN_ENVELOPE: int -DMBIN_ENVMANUAL: int -DMBIN_AUTO: int -DMBIN_TRACTOR: int -DMBIN_SMALLFMT: int -DMBIN_LARGEFMT: int -DMBIN_LARGECAPACITY: int -DMBIN_CASSETTE: int -DMBIN_FORMSOURCE: int -DMBIN_LAST: int -DMBIN_USER: int -DMRES_DRAFT: int -DMRES_LOW: int -DMRES_MEDIUM: int -DMRES_HIGH: int -DMCOLOR_MONOCHROME: int -DMCOLOR_COLOR: int -DMDUP_SIMPLEX: int -DMDUP_VERTICAL: int -DMDUP_HORIZONTAL: int -DMTT_BITMAP: int -DMTT_DOWNLOAD: int -DMTT_SUBDEV: int -DMTT_DOWNLOAD_OUTLINE: int -DMCOLLATE_FALSE: int -DMCOLLATE_TRUE: int -DM_GRAYSCALE: int -DM_INTERLACED: int -DMICMMETHOD_NONE: int -DMICMMETHOD_SYSTEM: int -DMICMMETHOD_DRIVER: int -DMICMMETHOD_DEVICE: int -DMICMMETHOD_USER: int -DMICM_SATURATE: int -DMICM_CONTRAST: int -DMICM_COLORIMETRIC: int -DMICM_ABS_COLORIMETRIC: int -DMICM_USER: int -DMMEDIA_STANDARD: int -DMMEDIA_TRANSPARENCY: int -DMMEDIA_GLOSSY: int -DMMEDIA_USER: int -DMDITHER_NONE: int -DMDITHER_COARSE: int -DMDITHER_FINE: int -DMDITHER_LINEART: int -DMDITHER_ERRORDIFFUSION: int -DMDITHER_RESERVED6: int -DMDITHER_RESERVED7: int -DMDITHER_RESERVED8: int -DMDITHER_RESERVED9: int -DMDITHER_GRAYSCALE: int -DMDITHER_USER: int -DMNUP_SYSTEM: int -DMNUP_ONEUP: int -FEATURESETTING_NUP: int -FEATURESETTING_OUTPUT: int -FEATURESETTING_PSLEVEL: int -FEATURESETTING_CUSTPAPER: int -FEATURESETTING_MIRROR: int -FEATURESETTING_NEGATIVE: int -FEATURESETTING_PROTOCOL: int -FEATURESETTING_PRIVATE_BEGIN: int -FEATURESETTING_PRIVATE_END: int -RDH_RECTANGLES: int -GGO_METRICS: int -GGO_BITMAP: int -GGO_NATIVE: int -TT_POLYGON_TYPE: int -TT_PRIM_LINE: int -TT_PRIM_QSPLINE: int -TT_AVAILABLE: int -TT_ENABLED: int -DM_UPDATE: int -DM_COPY: int -DM_PROMPT: int -DM_MODIFY: int -DM_IN_BUFFER: int -DM_IN_PROMPT: int -DM_OUT_BUFFER: int -DM_OUT_DEFAULT: int -DISPLAY_DEVICE_ATTACHED_TO_DESKTOP: int -DISPLAY_DEVICE_MULTI_DRIVER: int -DISPLAY_DEVICE_PRIMARY_DEVICE: int -DISPLAY_DEVICE_MIRRORING_DRIVER: int -DISPLAY_DEVICE_VGA_COMPATIBLE: int -DISPLAY_DEVICE_REMOVABLE: int -DISPLAY_DEVICE_MODESPRUNED: int -DISPLAY_DEVICE_REMOTE: int -DISPLAY_DEVICE_DISCONNECT: int -DC_FIELDS: int -DC_PAPERS: int -DC_PAPERSIZE: int -DC_MINEXTENT: int -DC_MAXEXTENT: int -DC_BINS: int -DC_DUPLEX: int -DC_SIZE: int -DC_EXTRA: int -DC_VERSION: int -DC_DRIVER: int -DC_BINNAMES: int -DC_ENUMRESOLUTIONS: int -DC_FILEDEPENDENCIES: int -DC_TRUETYPE: int -DC_PAPERNAMES: int -DC_ORIENTATION: int -DC_COPIES: int -DC_BINADJUST: int -DC_EMF_COMPLIANT: int -DC_DATATYPE_PRODUCED: int -DC_COLLATE: int -DC_MANUFACTURER: int -DC_MODEL: int -DC_PERSONALITY: int -DC_PRINTRATE: int -DC_PRINTRATEUNIT: int -DC_PRINTERMEM: int -DC_MEDIAREADY: int -DC_STAPLE: int -DC_PRINTRATEPPM: int -DC_COLORDEVICE: int -DC_NUP: int -DC_MEDIATYPENAMES: int -DC_MEDIATYPES: int -PRINTRATEUNIT_PPM: int -PRINTRATEUNIT_CPS: int -PRINTRATEUNIT_LPM: int -PRINTRATEUNIT_IPM: int -DCTT_BITMAP: int -DCTT_DOWNLOAD: int -DCTT_SUBDEV: int -DCTT_DOWNLOAD_OUTLINE: int -DCBA_FACEUPNONE: int -DCBA_FACEUPCENTER: int -DCBA_FACEUPLEFT: int -DCBA_FACEUPRIGHT: int -DCBA_FACEDOWNNONE: int -DCBA_FACEDOWNCENTER: int -DCBA_FACEDOWNLEFT: int -DCBA_FACEDOWNRIGHT: int -CA_NEGATIVE: int -CA_LOG_FILTER: int -ILLUMINANT_DEVICE_DEFAULT: int -ILLUMINANT_A: int -ILLUMINANT_B: int -ILLUMINANT_C: int -ILLUMINANT_D50: int -ILLUMINANT_D55: int -ILLUMINANT_D65: int -ILLUMINANT_D75: int -ILLUMINANT_F2: int -ILLUMINANT_MAX_INDEX: int -ILLUMINANT_TUNGSTEN: int -ILLUMINANT_DAYLIGHT: int -ILLUMINANT_FLUORESCENT: int -ILLUMINANT_NTSC: int -FONTMAPPER_MAX: int -ENHMETA_SIGNATURE: int -ENHMETA_STOCK_OBJECT: int -EMR_HEADER: int -EMR_POLYBEZIER: int -EMR_POLYGON: int -EMR_POLYLINE: int -EMR_POLYBEZIERTO: int -EMR_POLYLINETO: int -EMR_POLYPOLYLINE: int -EMR_POLYPOLYGON: int -EMR_SETWINDOWEXTEX: int -EMR_SETWINDOWORGEX: int -EMR_SETVIEWPORTEXTEX: int -EMR_SETVIEWPORTORGEX: int -EMR_SETBRUSHORGEX: int -EMR_EOF: int -EMR_SETPIXELV: int -EMR_SETMAPPERFLAGS: int -EMR_SETMAPMODE: int -EMR_SETBKMODE: int -EMR_SETPOLYFILLMODE: int -EMR_SETROP2: int -EMR_SETSTRETCHBLTMODE: int -EMR_SETTEXTALIGN: int -EMR_SETCOLORADJUSTMENT: int -EMR_SETTEXTCOLOR: int -EMR_SETBKCOLOR: int -EMR_OFFSETCLIPRGN: int -EMR_MOVETOEX: int -EMR_SETMETARGN: int -EMR_EXCLUDECLIPRECT: int -EMR_INTERSECTCLIPRECT: int -EMR_SCALEVIEWPORTEXTEX: int -EMR_SCALEWINDOWEXTEX: int -EMR_SAVEDC: int -EMR_RESTOREDC: int -EMR_SETWORLDTRANSFORM: int -EMR_MODIFYWORLDTRANSFORM: int -EMR_SELECTOBJECT: int -EMR_CREATEPEN: int -EMR_CREATEBRUSHINDIRECT: int -EMR_DELETEOBJECT: int -EMR_ANGLEARC: int -EMR_ELLIPSE: int -EMR_RECTANGLE: int -EMR_ROUNDRECT: int -EMR_ARC: int -EMR_CHORD: int -EMR_PIE: int -EMR_SELECTPALETTE: int -EMR_CREATEPALETTE: int -EMR_SETPALETTEENTRIES: int -EMR_RESIZEPALETTE: int -EMR_REALIZEPALETTE: int -EMR_EXTFLOODFILL: int -EMR_LINETO: int -EMR_ARCTO: int -EMR_POLYDRAW: int -EMR_SETARCDIRECTION: int -EMR_SETMITERLIMIT: int -EMR_BEGINPATH: int -EMR_ENDPATH: int -EMR_CLOSEFIGURE: int -EMR_FILLPATH: int -EMR_STROKEANDFILLPATH: int -EMR_STROKEPATH: int -EMR_FLATTENPATH: int -EMR_WIDENPATH: int -EMR_SELECTCLIPPATH: int -EMR_ABORTPATH: int -EMR_GDICOMMENT: int -EMR_FILLRGN: int -EMR_FRAMERGN: int -EMR_INVERTRGN: int -EMR_PAINTRGN: int -EMR_EXTSELECTCLIPRGN: int -EMR_BITBLT: int -EMR_STRETCHBLT: int -EMR_MASKBLT: int -EMR_PLGBLT: int -EMR_SETDIBITSTODEVICE: int -EMR_STRETCHDIBITS: int -EMR_EXTCREATEFONTINDIRECTW: int -EMR_EXTTEXTOUTA: int -EMR_EXTTEXTOUTW: int -EMR_POLYBEZIER16: int -EMR_POLYGON16: int -EMR_POLYLINE16: int -EMR_POLYBEZIERTO16: int -EMR_POLYLINETO16: int -EMR_POLYPOLYLINE16: int -EMR_POLYPOLYGON16: int -EMR_POLYDRAW16: int -EMR_CREATEMONOBRUSH: int -EMR_CREATEDIBPATTERNBRUSHPT: int -EMR_EXTCREATEPEN: int -EMR_POLYTEXTOUTA: int -EMR_POLYTEXTOUTW: int -EMR_MIN: int -EMR_MAX: int -PANOSE_COUNT: int -PAN_FAMILYTYPE_INDEX: int -PAN_SERIFSTYLE_INDEX: int -PAN_WEIGHT_INDEX: int -PAN_PROPORTION_INDEX: int -PAN_CONTRAST_INDEX: int -PAN_STROKEVARIATION_INDEX: int -PAN_ARMSTYLE_INDEX: int -PAN_LETTERFORM_INDEX: int -PAN_MIDLINE_INDEX: int -PAN_XHEIGHT_INDEX: int -PAN_CULTURE_LATIN: int -PAN_ANY: int -PAN_NO_FIT: int -PAN_FAMILY_TEXT_DISPLAY: int -PAN_FAMILY_SCRIPT: int -PAN_FAMILY_DECORATIVE: int -PAN_FAMILY_PICTORIAL: int -PAN_SERIF_COVE: int -PAN_SERIF_OBTUSE_COVE: int -PAN_SERIF_SQUARE_COVE: int -PAN_SERIF_OBTUSE_SQUARE_COVE: int -PAN_SERIF_SQUARE: int -PAN_SERIF_THIN: int -PAN_SERIF_BONE: int -PAN_SERIF_EXAGGERATED: int -PAN_SERIF_TRIANGLE: int -PAN_SERIF_NORMAL_SANS: int -PAN_SERIF_OBTUSE_SANS: int -PAN_SERIF_PERP_SANS: int -PAN_SERIF_FLARED: int -PAN_SERIF_ROUNDED: int -PAN_WEIGHT_VERY_LIGHT: int -PAN_WEIGHT_LIGHT: int -PAN_WEIGHT_THIN: int -PAN_WEIGHT_BOOK: int -PAN_WEIGHT_MEDIUM: int -PAN_WEIGHT_DEMI: int -PAN_WEIGHT_BOLD: int -PAN_WEIGHT_HEAVY: int -PAN_WEIGHT_BLACK: int -PAN_WEIGHT_NORD: int -PAN_PROP_OLD_STYLE: int -PAN_PROP_MODERN: int -PAN_PROP_EVEN_WIDTH: int -PAN_PROP_EXPANDED: int -PAN_PROP_CONDENSED: int -PAN_PROP_VERY_EXPANDED: int -PAN_PROP_VERY_CONDENSED: int -PAN_PROP_MONOSPACED: int -PAN_CONTRAST_NONE: int -PAN_CONTRAST_VERY_LOW: int -PAN_CONTRAST_LOW: int -PAN_CONTRAST_MEDIUM_LOW: int -PAN_CONTRAST_MEDIUM: int -PAN_CONTRAST_MEDIUM_HIGH: int -PAN_CONTRAST_HIGH: int -PAN_CONTRAST_VERY_HIGH: int -PAN_STROKE_GRADUAL_DIAG: int -PAN_STROKE_GRADUAL_TRAN: int -PAN_STROKE_GRADUAL_VERT: int -PAN_STROKE_GRADUAL_HORZ: int -PAN_STROKE_RAPID_VERT: int -PAN_STROKE_RAPID_HORZ: int -PAN_STROKE_INSTANT_VERT: int -PAN_STRAIGHT_ARMS_HORZ: int -PAN_STRAIGHT_ARMS_WEDGE: int -PAN_STRAIGHT_ARMS_VERT: int -PAN_STRAIGHT_ARMS_SINGLE_SERIF: int -PAN_STRAIGHT_ARMS_DOUBLE_SERIF: int -PAN_BENT_ARMS_HORZ: int -PAN_BENT_ARMS_WEDGE: int -PAN_BENT_ARMS_VERT: int -PAN_BENT_ARMS_SINGLE_SERIF: int -PAN_BENT_ARMS_DOUBLE_SERIF: int -PAN_LETT_NORMAL_CONTACT: int -PAN_LETT_NORMAL_WEIGHTED: int -PAN_LETT_NORMAL_BOXED: int -PAN_LETT_NORMAL_FLATTENED: int -PAN_LETT_NORMAL_ROUNDED: int -PAN_LETT_NORMAL_OFF_CENTER: int -PAN_LETT_NORMAL_SQUARE: int -PAN_LETT_OBLIQUE_CONTACT: int -PAN_LETT_OBLIQUE_WEIGHTED: int -PAN_LETT_OBLIQUE_BOXED: int -PAN_LETT_OBLIQUE_FLATTENED: int -PAN_LETT_OBLIQUE_ROUNDED: int -PAN_LETT_OBLIQUE_OFF_CENTER: int -PAN_LETT_OBLIQUE_SQUARE: int -PAN_MIDLINE_STANDARD_TRIMMED: int -PAN_MIDLINE_STANDARD_POINTED: int -PAN_MIDLINE_STANDARD_SERIFED: int -PAN_MIDLINE_HIGH_TRIMMED: int -PAN_MIDLINE_HIGH_POINTED: int -PAN_MIDLINE_HIGH_SERIFED: int -PAN_MIDLINE_CONSTANT_TRIMMED: int -PAN_MIDLINE_CONSTANT_POINTED: int -PAN_MIDLINE_CONSTANT_SERIFED: int -PAN_MIDLINE_LOW_TRIMMED: int -PAN_MIDLINE_LOW_POINTED: int -PAN_MIDLINE_LOW_SERIFED: int -PAN_XHEIGHT_CONSTANT_SMALL: int -PAN_XHEIGHT_CONSTANT_STD: int -PAN_XHEIGHT_CONSTANT_LARGE: int -PAN_XHEIGHT_DUCKING_SMALL: int -PAN_XHEIGHT_DUCKING_STD: int -PAN_XHEIGHT_DUCKING_LARGE: int -ELF_VENDOR_SIZE: int -ELF_VERSION: int -ELF_CULTURE_LATIN: int -RASTER_FONTTYPE: int -DEVICE_FONTTYPE: int -TRUETYPE_FONTTYPE: int +IMAGE_RESOURCE_NAME_IS_STRING: Final = -2147483648 +IMAGE_RESOURCE_DATA_IS_DIRECTORY: Final = -2147483648 +IMAGE_DEBUG_TYPE_UNKNOWN: Final = 0 +IMAGE_DEBUG_TYPE_COFF: Final = 1 +IMAGE_DEBUG_TYPE_CODEVIEW: Final = 2 +IMAGE_DEBUG_TYPE_FPO: Final = 3 +IMAGE_DEBUG_TYPE_MISC: Final = 4 +IMAGE_DEBUG_TYPE_EXCEPTION: Final = 5 +IMAGE_DEBUG_TYPE_FIXUP: Final = 6 +IMAGE_DEBUG_TYPE_OMAP_TO_SRC: Final = 7 +IMAGE_DEBUG_TYPE_OMAP_FROM_SRC: Final = 8 +FRAME_FPO: Final = 0 +FRAME_TRAP: Final = 1 +FRAME_TSS: Final = 2 +SIZEOF_RFPO_DATA: Final = 16 +IMAGE_DEBUG_MISC_EXENAME: Final = 1 +IMAGE_SEPARATE_DEBUG_SIGNATURE: Final = 18756 + +NEWFRAME: Final = 1 +ABORTDOC: Final = 2 +NEXTBAND: Final = 3 +SETCOLORTABLE: Final = 4 +GETCOLORTABLE: Final = 5 +FLUSHOUTPUT: Final = 6 +DRAFTMODE: Final = 7 +QUERYESCSUPPORT: Final = 8 +SETABORTPROC: Final = 9 +STARTDOC: Final = 10 +ENDDOC: Final = 11 +GETPHYSPAGESIZE: Final = 12 +GETPRINTINGOFFSET: Final = 13 +GETSCALINGFACTOR: Final = 14 +MFCOMMENT: Final = 15 +GETPENWIDTH: Final = 16 +SETCOPYCOUNT: Final = 17 +SELECTPAPERSOURCE: Final = 18 +DEVICEDATA: Final = 19 +PASSTHROUGH: Final = 19 +GETTECHNOLGY: Final = 20 +GETTECHNOLOGY: Final = 20 +SETLINECAP: Final = 21 +SETLINEJOIN: Final = 22 +SETMITERLIMIT: Final = 23 +BANDINFO: Final = 24 +DRAWPATTERNRECT: Final = 25 +GETVECTORPENSIZE: Final = 26 +GETVECTORBRUSHSIZE: Final = 27 +ENABLEDUPLEX: Final = 28 +GETSETPAPERBINS: Final = 29 +GETSETPRINTORIENT: Final = 30 +ENUMPAPERBINS: Final = 31 +SETDIBSCALING: Final = 32 +EPSPRINTING: Final = 33 +ENUMPAPERMETRICS: Final = 34 +GETSETPAPERMETRICS: Final = 35 +POSTSCRIPT_DATA: Final = 37 +POSTSCRIPT_IGNORE: Final = 38 +MOUSETRAILS: Final = 39 +GETDEVICEUNITS: Final = 42 +GETEXTENDEDTEXTMETRICS: Final = 256 +GETEXTENTTABLE: Final = 257 +GETPAIRKERNTABLE: Final = 258 +GETTRACKKERNTABLE: Final = 259 +EXTTEXTOUT: Final = 512 +GETFACENAME: Final = 513 +DOWNLOADFACE: Final = 514 +ENABLERELATIVEWIDTHS: Final = 768 +ENABLEPAIRKERNING: Final = 769 +SETKERNTRACK: Final = 770 +SETALLJUSTVALUES: Final = 771 +SETCHARSET: Final = 772 +STRETCHBLT: Final = 2048 +GETSETSCREENPARAMS: Final = 3072 +BEGIN_PATH: Final = 4096 +CLIP_TO_PATH: Final = 4097 +END_PATH: Final = 4098 +EXT_DEVICE_CAPS: Final = 4099 +RESTORE_CTM: Final = 4100 +SAVE_CTM: Final = 4101 +SET_ARC_DIRECTION: Final = 4102 +SET_BACKGROUND_COLOR: Final = 4103 +SET_POLY_MODE: Final = 4104 +SET_SCREEN_ANGLE: Final = 4105 +SET_SPREAD: Final = 4106 +TRANSFORM_CTM: Final = 4107 +SET_CLIP_BOX: Final = 4108 +SET_BOUNDS: Final = 4109 +SET_MIRROR_MODE: Final = 4110 +OPENCHANNEL: Final = 4110 +DOWNLOADHEADER: Final = 4111 +CLOSECHANNEL: Final = 4112 +POSTSCRIPT_PASSTHROUGH: Final = 4115 +ENCAPSULATED_POSTSCRIPT: Final = 4116 +SP_NOTREPORTED: Final = 16384 +SP_ERROR: Final = -1 +SP_APPABORT: Final = -2 +SP_USERABORT: Final = -3 +SP_OUTOFDISK: Final = -4 +SP_OUTOFMEMORY: Final = -5 +PR_JOBSTATUS: Final = 0 + +OBJ_PEN: Final = 1 +OBJ_BRUSH: Final = 2 +OBJ_DC: Final = 3 +OBJ_METADC: Final = 4 +OBJ_PAL: Final = 5 +OBJ_FONT: Final = 6 +OBJ_BITMAP: Final = 7 +OBJ_REGION: Final = 8 +OBJ_METAFILE: Final = 9 +OBJ_MEMDC: Final = 10 +OBJ_EXTPEN: Final = 11 +OBJ_ENHMETADC: Final = 12 +OBJ_ENHMETAFILE: Final = 13 +OBJ_COLORSPACE: Final = 14 + +MWT_IDENTITY: Final = 1 +MWT_LEFTMULTIPLY: Final = 2 +MWT_RIGHTMULTIPLY: Final = 3 +MWT_MIN: Final = MWT_IDENTITY +MWT_MAX: Final = MWT_RIGHTMULTIPLY +BI_RGB: Final = 0 +BI_RLE8: Final = 1 +BI_RLE4: Final = 2 +BI_BITFIELDS: Final = 3 +TMPF_FIXED_PITCH: Final = 1 +TMPF_VECTOR: Final = 2 +TMPF_DEVICE: Final = 8 +TMPF_TRUETYPE: Final = 4 +NTM_REGULAR: Final = 64 +NTM_BOLD: Final = 32 +NTM_ITALIC: Final = 1 +LF_FACESIZE: Final = 32 +LF_FULLFACESIZE: Final = 64 +OUT_DEFAULT_PRECIS: Final = 0 +OUT_STRING_PRECIS: Final = 1 +OUT_CHARACTER_PRECIS: Final = 2 +OUT_STROKE_PRECIS: Final = 3 +OUT_TT_PRECIS: Final = 4 +OUT_DEVICE_PRECIS: Final = 5 +OUT_RASTER_PRECIS: Final = 6 +OUT_TT_ONLY_PRECIS: Final = 7 +OUT_OUTLINE_PRECIS: Final = 8 +CLIP_DEFAULT_PRECIS: Final = 0 +CLIP_CHARACTER_PRECIS: Final = 1 +CLIP_STROKE_PRECIS: Final = 2 +CLIP_MASK: Final = 15 +CLIP_LH_ANGLES: Final[int] +CLIP_TT_ALWAYS: Final[int] +CLIP_EMBEDDED: Final[int] +DEFAULT_QUALITY: Final = 0 +DRAFT_QUALITY: Final = 1 +PROOF_QUALITY: Final = 2 +NONANTIALIASED_QUALITY: Final = 3 +ANTIALIASED_QUALITY: Final = 4 +CLEARTYPE_QUALITY: Final = 5 +CLEARTYPE_NATURAL_QUALITY: Final = 6 +DEFAULT_PITCH: Final = 0 +FIXED_PITCH: Final = 1 +VARIABLE_PITCH: Final = 2 +ANSI_CHARSET: Final = 0 +DEFAULT_CHARSET: Final = 1 +SYMBOL_CHARSET: Final = 2 +SHIFTJIS_CHARSET: Final = 128 +HANGEUL_CHARSET: Final = 129 +CHINESEBIG5_CHARSET: Final = 136 +OEM_CHARSET: Final = 255 +JOHAB_CHARSET: Final = 130 +HEBREW_CHARSET: Final = 177 +ARABIC_CHARSET: Final = 178 +GREEK_CHARSET: Final = 161 +TURKISH_CHARSET: Final = 162 +VIETNAMESE_CHARSET: Final = 163 +THAI_CHARSET: Final = 222 +EASTEUROPE_CHARSET: Final = 238 +RUSSIAN_CHARSET: Final = 204 +MAC_CHARSET: Final = 77 +BALTIC_CHARSET: Final = 186 +FF_DONTCARE: Final[int] +FF_ROMAN: Final[int] +FF_SWISS: Final[int] +FF_MODERN: Final[int] +FF_SCRIPT: Final[int] +FF_DECORATIVE: Final[int] +FW_DONTCARE: Final = 0 +FW_THIN: Final = 100 +FW_EXTRALIGHT: Final = 200 +FW_LIGHT: Final = 300 +FW_NORMAL: Final = 400 +FW_MEDIUM: Final = 500 +FW_SEMIBOLD: Final = 600 +FW_BOLD: Final = 700 +FW_EXTRABOLD: Final = 800 +FW_HEAVY: Final = 900 +FW_ULTRALIGHT: Final = FW_EXTRALIGHT +FW_REGULAR: Final = FW_NORMAL +FW_DEMIBOLD: Final = FW_SEMIBOLD +FW_ULTRABOLD: Final = FW_EXTRABOLD +FW_BLACK: Final = FW_HEAVY + +BS_SOLID: Final = 0 +BS_NULL: Final = 1 +BS_HOLLOW: Final = BS_NULL +BS_HATCHED: Final = 2 +BS_PATTERN: Final = 3 +BS_INDEXED: Final = 4 +BS_DIBPATTERN: Final = 5 +BS_DIBPATTERNPT: Final = 6 +BS_PATTERN8X8: Final = 7 +BS_DIBPATTERN8X8: Final = 8 +HS_HORIZONTAL: Final = 0 +HS_VERTICAL: Final = 1 +HS_FDIAGONAL: Final = 2 +HS_BDIAGONAL: Final = 3 +HS_CROSS: Final = 4 +HS_DIAGCROSS: Final = 5 +HS_FDIAGONAL1: Final = 6 +HS_BDIAGONAL1: Final = 7 +HS_SOLID: Final = 8 +HS_DENSE1: Final = 9 +HS_DENSE2: Final = 10 +HS_DENSE3: Final = 11 +HS_DENSE4: Final = 12 +HS_DENSE5: Final = 13 +HS_DENSE6: Final = 14 +HS_DENSE7: Final = 15 +HS_DENSE8: Final = 16 +HS_NOSHADE: Final = 17 +HS_HALFTONE: Final = 18 +HS_SOLIDCLR: Final = 19 +HS_DITHEREDCLR: Final = 20 +HS_SOLIDTEXTCLR: Final = 21 +HS_DITHEREDTEXTCLR: Final = 22 +HS_SOLIDBKCLR: Final = 23 +HS_DITHEREDBKCLR: Final = 24 +HS_API_MAX: Final = 25 +PS_SOLID: Final = 0 +PS_DASH: Final = 1 +PS_DOT: Final = 2 +PS_DASHDOT: Final = 3 +PS_DASHDOTDOT: Final = 4 +PS_NULL: Final = 5 +PS_INSIDEFRAME: Final = 6 +PS_USERSTYLE: Final = 7 +PS_ALTERNATE: Final = 8 +PS_STYLE_MASK: Final = 15 +PS_ENDCAP_ROUND: Final = 0 +PS_ENDCAP_SQUARE: Final = 256 +PS_ENDCAP_FLAT: Final = 512 +PS_ENDCAP_MASK: Final = 3840 +PS_JOIN_ROUND: Final = 0 +PS_JOIN_BEVEL: Final = 4096 +PS_JOIN_MITER: Final = 8192 +PS_JOIN_MASK: Final = 61440 +PS_COSMETIC: Final = 0 +PS_GEOMETRIC: Final = 65536 +PS_TYPE_MASK: Final = 983040 +AD_COUNTERCLOCKWISE: Final = 1 +AD_CLOCKWISE: Final = 2 +DRIVERVERSION: Final = 0 +TECHNOLOGY: Final = 2 +HORZSIZE: Final = 4 +VERTSIZE: Final = 6 +HORZRES: Final = 8 +VERTRES: Final = 10 +BITSPIXEL: Final = 12 +PLANES: Final = 14 +NUMBRUSHES: Final = 16 +NUMPENS: Final = 18 +NUMMARKERS: Final = 20 +NUMFONTS: Final = 22 +NUMCOLORS: Final = 24 +PDEVICESIZE: Final = 26 +CURVECAPS: Final = 28 +LINECAPS: Final = 30 +POLYGONALCAPS: Final = 32 +TEXTCAPS: Final = 34 +CLIPCAPS: Final = 36 +RASTERCAPS: Final = 38 +ASPECTX: Final = 40 +ASPECTY: Final = 42 +ASPECTXY: Final = 44 +LOGPIXELSX: Final = 88 +LOGPIXELSY: Final = 90 +SIZEPALETTE: Final = 104 +NUMRESERVED: Final = 106 +COLORRES: Final = 108 + +PHYSICALWIDTH: Final = 110 +PHYSICALHEIGHT: Final = 111 +PHYSICALOFFSETX: Final = 112 +PHYSICALOFFSETY: Final = 113 +SCALINGFACTORX: Final = 114 +SCALINGFACTORY: Final = 115 +VREFRESH: Final = 116 +DESKTOPVERTRES: Final = 117 +DESKTOPHORZRES: Final = 118 +BLTALIGNMENT: Final = 119 +SHADEBLENDCAPS: Final = 120 +COLORMGMTCAPS: Final = 121 + +DT_PLOTTER: Final = 0 +DT_RASDISPLAY: Final = 1 +DT_RASPRINTER: Final = 2 +DT_RASCAMERA: Final = 3 +DT_CHARSTREAM: Final = 4 +DT_METAFILE: Final = 5 +DT_DISPFILE: Final = 6 +CC_NONE: Final = 0 +CC_CIRCLES: Final = 1 +CC_PIE: Final = 2 +CC_CHORD: Final = 4 +CC_ELLIPSES: Final = 8 +CC_WIDE: Final = 16 +CC_STYLED: Final = 32 +CC_WIDESTYLED: Final = 64 +CC_INTERIORS: Final = 128 +CC_ROUNDRECT: Final = 256 +LC_NONE: Final = 0 +LC_POLYLINE: Final = 2 +LC_MARKER: Final = 4 +LC_POLYMARKER: Final = 8 +LC_WIDE: Final = 16 +LC_STYLED: Final = 32 +LC_WIDESTYLED: Final = 64 +LC_INTERIORS: Final = 128 +PC_NONE: Final = 0 +PC_POLYGON: Final = 1 +PC_RECTANGLE: Final = 2 +PC_WINDPOLYGON: Final = 4 +PC_TRAPEZOID: Final = 4 +PC_SCANLINE: Final = 8 +PC_WIDE: Final = 16 +PC_STYLED: Final = 32 +PC_WIDESTYLED: Final = 64 +PC_INTERIORS: Final = 128 +CP_NONE: Final = 0 +CP_RECTANGLE: Final = 1 +CP_REGION: Final = 2 +TC_OP_CHARACTER: Final = 1 +TC_OP_STROKE: Final = 2 +TC_CP_STROKE: Final = 4 +TC_CR_90: Final = 8 +TC_CR_ANY: Final = 16 +TC_SF_X_YINDEP: Final = 32 +TC_SA_DOUBLE: Final = 64 +TC_SA_INTEGER: Final = 128 +TC_SA_CONTIN: Final = 256 +TC_EA_DOUBLE: Final = 512 +TC_IA_ABLE: Final = 1024 +TC_UA_ABLE: Final = 2048 +TC_SO_ABLE: Final = 4096 +TC_RA_ABLE: Final = 8192 +TC_VA_ABLE: Final = 16384 +TC_RESERVED: Final = 32768 +TC_SCROLLBLT: Final = 65536 +RC_BITBLT: Final = 1 +RC_BANDING: Final = 2 +RC_SCALING: Final = 4 +RC_BITMAP64: Final = 8 +RC_GDI20_OUTPUT: Final = 16 +RC_GDI20_STATE: Final = 32 +RC_SAVEBITMAP: Final = 64 +RC_DI_BITMAP: Final = 128 +RC_PALETTE: Final = 256 +RC_DIBTODEV: Final = 512 +RC_BIGFONT: Final = 1024 +RC_STRETCHBLT: Final = 2048 +RC_FLOODFILL: Final = 4096 +RC_STRETCHDIB: Final = 8192 +RC_OP_DX_OUTPUT: Final = 16384 +RC_DEVBITS: Final = 32768 +DIB_RGB_COLORS: Final = 0 +DIB_PAL_COLORS: Final = 1 +DIB_PAL_INDICES: Final = 2 +DIB_PAL_PHYSINDICES: Final = 2 +DIB_PAL_LOGINDICES: Final = 4 +SYSPAL_ERROR: Final = 0 +SYSPAL_STATIC: Final = 1 +SYSPAL_NOSTATIC: Final = 2 +CBM_CREATEDIB: Final = 2 +CBM_INIT: Final = 4 +FLOODFILLBORDER: Final = 0 +FLOODFILLSURFACE: Final = 1 +CCHFORMNAME: Final = 32 + +DM_SPECVERSION: Final = 800 +DM_ORIENTATION: Final = 1 +DM_PAPERSIZE: Final = 2 +DM_PAPERLENGTH: Final = 4 +DM_PAPERWIDTH: Final = 8 +DM_SCALE: Final = 16 +DM_POSITION: Final = 32 +DM_NUP: Final = 64 +DM_DISPLAYORIENTATION: Final = 128 +DM_COPIES: Final = 256 +DM_DEFAULTSOURCE: Final = 512 +DM_PRINTQUALITY: Final = 1024 +DM_COLOR: Final = 2048 +DM_DUPLEX: Final = 4096 +DM_YRESOLUTION: Final = 8192 +DM_TTOPTION: Final = 16384 +DM_COLLATE: Final = 32768 +DM_FORMNAME: Final = 65536 +DM_LOGPIXELS: Final = 131072 +DM_BITSPERPEL: Final = 262144 +DM_PELSWIDTH: Final = 524288 +DM_PELSHEIGHT: Final = 1048576 +DM_DISPLAYFLAGS: Final = 2097152 +DM_DISPLAYFREQUENCY: Final = 4194304 +DM_ICMMETHOD: Final = 8388608 +DM_ICMINTENT: Final = 16777216 +DM_MEDIATYPE: Final = 33554432 +DM_DITHERTYPE: Final = 67108864 +DM_PANNINGWIDTH: Final = 134217728 +DM_PANNINGHEIGHT: Final = 268435456 +DM_DISPLAYFIXEDOUTPUT: Final = 536870912 + +DMORIENT_PORTRAIT: Final = 1 +DMORIENT_LANDSCAPE: Final = 2 + +DMDO_DEFAULT: Final = 0 +DMDO_90: Final = 1 +DMDO_180: Final = 2 +DMDO_270: Final = 3 + +DMDFO_DEFAULT: Final = 0 +DMDFO_STRETCH: Final = 1 +DMDFO_CENTER: Final = 2 + +DMPAPER_LETTER: Final = 1 +DMPAPER_LETTERSMALL: Final = 2 +DMPAPER_TABLOID: Final = 3 +DMPAPER_LEDGER: Final = 4 +DMPAPER_LEGAL: Final = 5 +DMPAPER_STATEMENT: Final = 6 +DMPAPER_EXECUTIVE: Final = 7 +DMPAPER_A3: Final = 8 +DMPAPER_A4: Final = 9 +DMPAPER_A4SMALL: Final = 10 +DMPAPER_A5: Final = 11 +DMPAPER_B4: Final = 12 +DMPAPER_B5: Final = 13 +DMPAPER_FOLIO: Final = 14 +DMPAPER_QUARTO: Final = 15 +DMPAPER_10X14: Final = 16 +DMPAPER_11X17: Final = 17 +DMPAPER_NOTE: Final = 18 +DMPAPER_ENV_9: Final = 19 +DMPAPER_ENV_10: Final = 20 +DMPAPER_ENV_11: Final = 21 +DMPAPER_ENV_12: Final = 22 +DMPAPER_ENV_14: Final = 23 +DMPAPER_CSHEET: Final = 24 +DMPAPER_DSHEET: Final = 25 +DMPAPER_ESHEET: Final = 26 +DMPAPER_ENV_DL: Final = 27 +DMPAPER_ENV_C5: Final = 28 +DMPAPER_ENV_C3: Final = 29 +DMPAPER_ENV_C4: Final = 30 +DMPAPER_ENV_C6: Final = 31 +DMPAPER_ENV_C65: Final = 32 +DMPAPER_ENV_B4: Final = 33 +DMPAPER_ENV_B5: Final = 34 +DMPAPER_ENV_B6: Final = 35 +DMPAPER_ENV_ITALY: Final = 36 +DMPAPER_ENV_MONARCH: Final = 37 +DMPAPER_ENV_PERSONAL: Final = 38 +DMPAPER_FANFOLD_US: Final = 39 +DMPAPER_FANFOLD_STD_GERMAN: Final = 40 +DMPAPER_FANFOLD_LGL_GERMAN: Final = 41 +DMPAPER_ISO_B4: Final = 42 +DMPAPER_JAPANESE_POSTCARD: Final = 43 +DMPAPER_9X11: Final = 44 +DMPAPER_10X11: Final = 45 +DMPAPER_15X11: Final = 46 +DMPAPER_ENV_INVITE: Final = 47 +DMPAPER_RESERVED_48: Final = 48 +DMPAPER_RESERVED_49: Final = 49 +DMPAPER_LETTER_EXTRA: Final = 50 +DMPAPER_LEGAL_EXTRA: Final = 51 +DMPAPER_TABLOID_EXTRA: Final = 52 +DMPAPER_A4_EXTRA: Final = 53 +DMPAPER_LETTER_TRANSVERSE: Final = 54 +DMPAPER_A4_TRANSVERSE: Final = 55 +DMPAPER_LETTER_EXTRA_TRANSVERSE: Final = 56 +DMPAPER_A_PLUS: Final = 57 +DMPAPER_B_PLUS: Final = 58 +DMPAPER_LETTER_PLUS: Final = 59 +DMPAPER_A4_PLUS: Final = 60 +DMPAPER_A5_TRANSVERSE: Final = 61 +DMPAPER_B5_TRANSVERSE: Final = 62 +DMPAPER_A3_EXTRA: Final = 63 +DMPAPER_A5_EXTRA: Final = 64 +DMPAPER_B5_EXTRA: Final = 65 +DMPAPER_A2: Final = 66 +DMPAPER_A3_TRANSVERSE: Final = 67 +DMPAPER_A3_EXTRA_TRANSVERSE: Final = 68 +DMPAPER_DBL_JAPANESE_POSTCARD: Final = 69 +DMPAPER_A6: Final = 70 +DMPAPER_JENV_KAKU2: Final = 71 +DMPAPER_JENV_KAKU3: Final = 72 +DMPAPER_JENV_CHOU3: Final = 73 +DMPAPER_JENV_CHOU4: Final = 74 +DMPAPER_LETTER_ROTATED: Final = 75 +DMPAPER_A3_ROTATED: Final = 76 +DMPAPER_A4_ROTATED: Final = 77 +DMPAPER_A5_ROTATED: Final = 78 +DMPAPER_B4_JIS_ROTATED: Final = 79 +DMPAPER_B5_JIS_ROTATED: Final = 80 +DMPAPER_JAPANESE_POSTCARD_ROTATED: Final = 81 +DMPAPER_DBL_JAPANESE_POSTCARD_ROTATED: Final = 82 +DMPAPER_A6_ROTATED: Final = 83 +DMPAPER_JENV_KAKU2_ROTATED: Final = 84 +DMPAPER_JENV_KAKU3_ROTATED: Final = 85 +DMPAPER_JENV_CHOU3_ROTATED: Final = 86 +DMPAPER_JENV_CHOU4_ROTATED: Final = 87 +DMPAPER_B6_JIS: Final = 88 +DMPAPER_B6_JIS_ROTATED: Final = 89 +DMPAPER_12X11: Final = 90 +DMPAPER_JENV_YOU4: Final = 91 +DMPAPER_JENV_YOU4_ROTATED: Final = 92 +DMPAPER_P16K: Final = 93 +DMPAPER_P32K: Final = 94 +DMPAPER_P32KBIG: Final = 95 +DMPAPER_PENV_1: Final = 96 +DMPAPER_PENV_2: Final = 97 +DMPAPER_PENV_3: Final = 98 +DMPAPER_PENV_4: Final = 99 +DMPAPER_PENV_5: Final = 100 +DMPAPER_PENV_6: Final = 101 +DMPAPER_PENV_7: Final = 102 +DMPAPER_PENV_8: Final = 103 +DMPAPER_PENV_9: Final = 104 +DMPAPER_PENV_10: Final = 105 +DMPAPER_P16K_ROTATED: Final = 106 +DMPAPER_P32K_ROTATED: Final = 107 +DMPAPER_P32KBIG_ROTATED: Final = 108 +DMPAPER_PENV_1_ROTATED: Final = 109 +DMPAPER_PENV_2_ROTATED: Final = 110 +DMPAPER_PENV_3_ROTATED: Final = 111 +DMPAPER_PENV_4_ROTATED: Final = 112 +DMPAPER_PENV_5_ROTATED: Final = 113 +DMPAPER_PENV_6_ROTATED: Final = 114 +DMPAPER_PENV_7_ROTATED: Final = 115 +DMPAPER_PENV_8_ROTATED: Final = 116 +DMPAPER_PENV_9_ROTATED: Final = 117 +DMPAPER_PENV_10_ROTATED: Final = 118 +DMPAPER_LAST: Final = DMPAPER_PENV_10_ROTATED +DMPAPER_USER: Final = 256 + +DMBIN_UPPER: Final = 1 +DMBIN_ONLYONE: Final = 1 +DMBIN_LOWER: Final = 2 +DMBIN_MIDDLE: Final = 3 +DMBIN_MANUAL: Final = 4 +DMBIN_ENVELOPE: Final = 5 +DMBIN_ENVMANUAL: Final = 6 +DMBIN_AUTO: Final = 7 +DMBIN_TRACTOR: Final = 8 +DMBIN_SMALLFMT: Final = 9 +DMBIN_LARGEFMT: Final = 10 +DMBIN_LARGECAPACITY: Final = 11 +DMBIN_CASSETTE: Final = 14 +DMBIN_FORMSOURCE: Final = 15 +DMBIN_LAST: Final = DMBIN_FORMSOURCE +DMBIN_USER: Final = 256 + +DMRES_DRAFT: Final = -1 +DMRES_LOW: Final = -2 +DMRES_MEDIUM: Final = -3 +DMRES_HIGH: Final = -4 + +DMCOLOR_MONOCHROME: Final = 1 +DMCOLOR_COLOR: Final = 2 + +DMDUP_SIMPLEX: Final = 1 +DMDUP_VERTICAL: Final = 2 +DMDUP_HORIZONTAL: Final = 3 + +DMTT_BITMAP: Final = 1 +DMTT_DOWNLOAD: Final = 2 +DMTT_SUBDEV: Final = 3 +DMTT_DOWNLOAD_OUTLINE: Final = 4 + +DMCOLLATE_FALSE: Final = 0 +DMCOLLATE_TRUE: Final = 1 + +DM_GRAYSCALE: Final = 1 +DM_INTERLACED: Final = 2 + +DMICMMETHOD_NONE: Final = 1 +DMICMMETHOD_SYSTEM: Final = 2 +DMICMMETHOD_DRIVER: Final = 3 +DMICMMETHOD_DEVICE: Final = 4 +DMICMMETHOD_USER: Final = 256 + +DMICM_SATURATE: Final = 1 +DMICM_CONTRAST: Final = 2 +DMICM_COLORIMETRIC: Final = 3 +DMICM_ABS_COLORIMETRIC: Final = 4 +DMICM_USER: Final = 256 + +DMMEDIA_STANDARD: Final = 1 +DMMEDIA_TRANSPARENCY: Final = 2 +DMMEDIA_GLOSSY: Final = 3 +DMMEDIA_USER: Final = 256 + +DMDITHER_NONE: Final = 1 +DMDITHER_COARSE: Final = 2 +DMDITHER_FINE: Final = 3 +DMDITHER_LINEART: Final = 4 +DMDITHER_ERRORDIFFUSION: Final = 5 +DMDITHER_RESERVED6: Final = 6 +DMDITHER_RESERVED7: Final = 7 +DMDITHER_RESERVED8: Final = 8 +DMDITHER_RESERVED9: Final = 9 +DMDITHER_GRAYSCALE: Final = 10 +DMDITHER_USER: Final = 256 + +DMNUP_SYSTEM: Final = 1 +DMNUP_ONEUP: Final = 2 + +FEATURESETTING_NUP: Final = 0 +FEATURESETTING_OUTPUT: Final = 1 +FEATURESETTING_PSLEVEL: Final = 2 +FEATURESETTING_CUSTPAPER: Final = 3 +FEATURESETTING_MIRROR: Final = 4 +FEATURESETTING_NEGATIVE: Final = 5 +FEATURESETTING_PROTOCOL: Final = 6 +FEATURESETTING_PRIVATE_BEGIN: Final = 0x1000 +FEATURESETTING_PRIVATE_END: Final = 0x1FFF + +RDH_RECTANGLES: Final = 1 +GGO_METRICS: Final = 0 +GGO_BITMAP: Final = 1 +GGO_NATIVE: Final = 2 +TT_POLYGON_TYPE: Final = 24 +TT_PRIM_LINE: Final = 1 +TT_PRIM_QSPLINE: Final = 2 +TT_AVAILABLE: Final = 1 +TT_ENABLED: Final = 2 +DM_UPDATE: Final = 1 +DM_COPY: Final = 2 +DM_PROMPT: Final = 4 +DM_MODIFY: Final = 8 +DM_IN_BUFFER: Final = DM_MODIFY +DM_IN_PROMPT: Final = DM_PROMPT +DM_OUT_BUFFER: Final = DM_COPY +DM_OUT_DEFAULT: Final = DM_UPDATE + +DISPLAY_DEVICE_ATTACHED_TO_DESKTOP: Final = 1 +DISPLAY_DEVICE_MULTI_DRIVER: Final = 2 +DISPLAY_DEVICE_PRIMARY_DEVICE: Final = 4 +DISPLAY_DEVICE_MIRRORING_DRIVER: Final = 8 +DISPLAY_DEVICE_VGA_COMPATIBLE: Final = 16 +DISPLAY_DEVICE_REMOVABLE: Final = 32 +DISPLAY_DEVICE_MODESPRUNED: Final = 134217728 +DISPLAY_DEVICE_REMOTE: Final = 67108864 +DISPLAY_DEVICE_DISCONNECT: Final = 33554432 + +DC_FIELDS: Final = 1 +DC_PAPERS: Final = 2 +DC_PAPERSIZE: Final = 3 +DC_MINEXTENT: Final = 4 +DC_MAXEXTENT: Final = 5 +DC_BINS: Final = 6 +DC_DUPLEX: Final = 7 +DC_SIZE: Final = 8 +DC_EXTRA: Final = 9 +DC_VERSION: Final = 10 +DC_DRIVER: Final = 11 +DC_BINNAMES: Final = 12 +DC_ENUMRESOLUTIONS: Final = 13 +DC_FILEDEPENDENCIES: Final = 14 +DC_TRUETYPE: Final = 15 +DC_PAPERNAMES: Final = 16 +DC_ORIENTATION: Final = 17 +DC_COPIES: Final = 18 +DC_BINADJUST: Final = 19 +DC_EMF_COMPLIANT: Final = 20 +DC_DATATYPE_PRODUCED: Final = 21 +DC_COLLATE: Final = 22 +DC_MANUFACTURER: Final = 23 +DC_MODEL: Final = 24 +DC_PERSONALITY: Final = 25 +DC_PRINTRATE: Final = 26 +DC_PRINTRATEUNIT: Final = 27 +DC_PRINTERMEM: Final = 28 +DC_MEDIAREADY: Final = 29 +DC_STAPLE: Final = 30 +DC_PRINTRATEPPM: Final = 31 +DC_COLORDEVICE: Final = 32 +DC_NUP: Final = 33 +DC_MEDIATYPENAMES: Final = 34 +DC_MEDIATYPES: Final = 35 + +PRINTRATEUNIT_PPM: Final = 1 +PRINTRATEUNIT_CPS: Final = 2 +PRINTRATEUNIT_LPM: Final = 3 +PRINTRATEUNIT_IPM: Final = 4 + +DCTT_BITMAP: Final = 1 +DCTT_DOWNLOAD: Final = 2 +DCTT_SUBDEV: Final = 4 +DCTT_DOWNLOAD_OUTLINE: Final = 8 + +DCBA_FACEUPNONE: Final = 0 +DCBA_FACEUPCENTER: Final = 1 +DCBA_FACEUPLEFT: Final = 2 +DCBA_FACEUPRIGHT: Final = 3 +DCBA_FACEDOWNNONE: Final = 256 +DCBA_FACEDOWNCENTER: Final = 257 +DCBA_FACEDOWNLEFT: Final = 258 +DCBA_FACEDOWNRIGHT: Final = 259 + +CA_NEGATIVE: Final = 1 +CA_LOG_FILTER: Final = 2 +ILLUMINANT_DEVICE_DEFAULT: Final = 0 +ILLUMINANT_A: Final = 1 +ILLUMINANT_B: Final = 2 +ILLUMINANT_C: Final = 3 +ILLUMINANT_D50: Final = 4 +ILLUMINANT_D55: Final = 5 +ILLUMINANT_D65: Final = 6 +ILLUMINANT_D75: Final = 7 +ILLUMINANT_F2: Final = 8 +ILLUMINANT_MAX_INDEX: Final = ILLUMINANT_F2 +ILLUMINANT_TUNGSTEN: Final = ILLUMINANT_A +ILLUMINANT_DAYLIGHT: Final = ILLUMINANT_C +ILLUMINANT_FLUORESCENT: Final = ILLUMINANT_F2 +ILLUMINANT_NTSC: Final = ILLUMINANT_C + +FONTMAPPER_MAX: Final = 10 +ENHMETA_SIGNATURE: Final = 1179469088 +ENHMETA_STOCK_OBJECT: Final = -2147483648 +EMR_HEADER: Final = 1 +EMR_POLYBEZIER: Final = 2 +EMR_POLYGON: Final = 3 +EMR_POLYLINE: Final = 4 +EMR_POLYBEZIERTO: Final = 5 +EMR_POLYLINETO: Final = 6 +EMR_POLYPOLYLINE: Final = 7 +EMR_POLYPOLYGON: Final = 8 +EMR_SETWINDOWEXTEX: Final = 9 +EMR_SETWINDOWORGEX: Final = 10 +EMR_SETVIEWPORTEXTEX: Final = 11 +EMR_SETVIEWPORTORGEX: Final = 12 +EMR_SETBRUSHORGEX: Final = 13 +EMR_EOF: Final = 14 +EMR_SETPIXELV: Final = 15 +EMR_SETMAPPERFLAGS: Final = 16 +EMR_SETMAPMODE: Final = 17 +EMR_SETBKMODE: Final = 18 +EMR_SETPOLYFILLMODE: Final = 19 +EMR_SETROP2: Final = 20 +EMR_SETSTRETCHBLTMODE: Final = 21 +EMR_SETTEXTALIGN: Final = 22 +EMR_SETCOLORADJUSTMENT: Final = 23 +EMR_SETTEXTCOLOR: Final = 24 +EMR_SETBKCOLOR: Final = 25 +EMR_OFFSETCLIPRGN: Final = 26 +EMR_MOVETOEX: Final = 27 +EMR_SETMETARGN: Final = 28 +EMR_EXCLUDECLIPRECT: Final = 29 +EMR_INTERSECTCLIPRECT: Final = 30 +EMR_SCALEVIEWPORTEXTEX: Final = 31 +EMR_SCALEWINDOWEXTEX: Final = 32 +EMR_SAVEDC: Final = 33 +EMR_RESTOREDC: Final = 34 +EMR_SETWORLDTRANSFORM: Final = 35 +EMR_MODIFYWORLDTRANSFORM: Final = 36 +EMR_SELECTOBJECT: Final = 37 +EMR_CREATEPEN: Final = 38 +EMR_CREATEBRUSHINDIRECT: Final = 39 +EMR_DELETEOBJECT: Final = 40 +EMR_ANGLEARC: Final = 41 +EMR_ELLIPSE: Final = 42 +EMR_RECTANGLE: Final = 43 +EMR_ROUNDRECT: Final = 44 +EMR_ARC: Final = 45 +EMR_CHORD: Final = 46 +EMR_PIE: Final = 47 +EMR_SELECTPALETTE: Final = 48 +EMR_CREATEPALETTE: Final = 49 +EMR_SETPALETTEENTRIES: Final = 50 +EMR_RESIZEPALETTE: Final = 51 +EMR_REALIZEPALETTE: Final = 52 +EMR_EXTFLOODFILL: Final = 53 +EMR_LINETO: Final = 54 +EMR_ARCTO: Final = 55 +EMR_POLYDRAW: Final = 56 +EMR_SETARCDIRECTION: Final = 57 +EMR_SETMITERLIMIT: Final = 58 +EMR_BEGINPATH: Final = 59 +EMR_ENDPATH: Final = 60 +EMR_CLOSEFIGURE: Final = 61 +EMR_FILLPATH: Final = 62 +EMR_STROKEANDFILLPATH: Final = 63 +EMR_STROKEPATH: Final = 64 +EMR_FLATTENPATH: Final = 65 +EMR_WIDENPATH: Final = 66 +EMR_SELECTCLIPPATH: Final = 67 +EMR_ABORTPATH: Final = 68 +EMR_GDICOMMENT: Final = 70 +EMR_FILLRGN: Final = 71 +EMR_FRAMERGN: Final = 72 +EMR_INVERTRGN: Final = 73 +EMR_PAINTRGN: Final = 74 +EMR_EXTSELECTCLIPRGN: Final = 75 +EMR_BITBLT: Final = 76 +EMR_STRETCHBLT: Final = 77 +EMR_MASKBLT: Final = 78 +EMR_PLGBLT: Final = 79 +EMR_SETDIBITSTODEVICE: Final = 80 +EMR_STRETCHDIBITS: Final = 81 +EMR_EXTCREATEFONTINDIRECTW: Final = 82 +EMR_EXTTEXTOUTA: Final = 83 +EMR_EXTTEXTOUTW: Final = 84 +EMR_POLYBEZIER16: Final = 85 +EMR_POLYGON16: Final = 86 +EMR_POLYLINE16: Final = 87 +EMR_POLYBEZIERTO16: Final = 88 +EMR_POLYLINETO16: Final = 89 +EMR_POLYPOLYLINE16: Final = 90 +EMR_POLYPOLYGON16: Final = 91 +EMR_POLYDRAW16: Final = 92 +EMR_CREATEMONOBRUSH: Final = 93 +EMR_CREATEDIBPATTERNBRUSHPT: Final = 94 +EMR_EXTCREATEPEN: Final = 95 +EMR_POLYTEXTOUTA: Final = 96 +EMR_POLYTEXTOUTW: Final = 97 +EMR_MIN: Final = 1 +EMR_MAX: Final = 97 + +PANOSE_COUNT: Final = 10 +PAN_FAMILYTYPE_INDEX: Final = 0 +PAN_SERIFSTYLE_INDEX: Final = 1 +PAN_WEIGHT_INDEX: Final = 2 +PAN_PROPORTION_INDEX: Final = 3 +PAN_CONTRAST_INDEX: Final = 4 +PAN_STROKEVARIATION_INDEX: Final = 5 +PAN_ARMSTYLE_INDEX: Final = 6 +PAN_LETTERFORM_INDEX: Final = 7 +PAN_MIDLINE_INDEX: Final = 8 +PAN_XHEIGHT_INDEX: Final = 9 +PAN_CULTURE_LATIN: Final = 0 +PAN_ANY: Final = 0 +PAN_NO_FIT: Final = 1 +PAN_FAMILY_TEXT_DISPLAY: Final = 2 +PAN_FAMILY_SCRIPT: Final = 3 +PAN_FAMILY_DECORATIVE: Final = 4 +PAN_FAMILY_PICTORIAL: Final = 5 +PAN_SERIF_COVE: Final = 2 +PAN_SERIF_OBTUSE_COVE: Final = 3 +PAN_SERIF_SQUARE_COVE: Final = 4 +PAN_SERIF_OBTUSE_SQUARE_COVE: Final = 5 +PAN_SERIF_SQUARE: Final = 6 +PAN_SERIF_THIN: Final = 7 +PAN_SERIF_BONE: Final = 8 +PAN_SERIF_EXAGGERATED: Final = 9 +PAN_SERIF_TRIANGLE: Final = 10 +PAN_SERIF_NORMAL_SANS: Final = 11 +PAN_SERIF_OBTUSE_SANS: Final = 12 +PAN_SERIF_PERP_SANS: Final = 13 +PAN_SERIF_FLARED: Final = 14 +PAN_SERIF_ROUNDED: Final = 15 +PAN_WEIGHT_VERY_LIGHT: Final = 2 +PAN_WEIGHT_LIGHT: Final = 3 +PAN_WEIGHT_THIN: Final = 4 +PAN_WEIGHT_BOOK: Final = 5 +PAN_WEIGHT_MEDIUM: Final = 6 +PAN_WEIGHT_DEMI: Final = 7 +PAN_WEIGHT_BOLD: Final = 8 +PAN_WEIGHT_HEAVY: Final = 9 +PAN_WEIGHT_BLACK: Final = 10 +PAN_WEIGHT_NORD: Final = 11 +PAN_PROP_OLD_STYLE: Final = 2 +PAN_PROP_MODERN: Final = 3 +PAN_PROP_EVEN_WIDTH: Final = 4 +PAN_PROP_EXPANDED: Final = 5 +PAN_PROP_CONDENSED: Final = 6 +PAN_PROP_VERY_EXPANDED: Final = 7 +PAN_PROP_VERY_CONDENSED: Final = 8 +PAN_PROP_MONOSPACED: Final = 9 +PAN_CONTRAST_NONE: Final = 2 +PAN_CONTRAST_VERY_LOW: Final = 3 +PAN_CONTRAST_LOW: Final = 4 +PAN_CONTRAST_MEDIUM_LOW: Final = 5 +PAN_CONTRAST_MEDIUM: Final = 6 +PAN_CONTRAST_MEDIUM_HIGH: Final = 7 +PAN_CONTRAST_HIGH: Final = 8 +PAN_CONTRAST_VERY_HIGH: Final = 9 +PAN_STROKE_GRADUAL_DIAG: Final = 2 +PAN_STROKE_GRADUAL_TRAN: Final = 3 +PAN_STROKE_GRADUAL_VERT: Final = 4 +PAN_STROKE_GRADUAL_HORZ: Final = 5 +PAN_STROKE_RAPID_VERT: Final = 6 +PAN_STROKE_RAPID_HORZ: Final = 7 +PAN_STROKE_INSTANT_VERT: Final = 8 +PAN_STRAIGHT_ARMS_HORZ: Final = 2 +PAN_STRAIGHT_ARMS_WEDGE: Final = 3 +PAN_STRAIGHT_ARMS_VERT: Final = 4 +PAN_STRAIGHT_ARMS_SINGLE_SERIF: Final = 5 +PAN_STRAIGHT_ARMS_DOUBLE_SERIF: Final = 6 +PAN_BENT_ARMS_HORZ: Final = 7 +PAN_BENT_ARMS_WEDGE: Final = 8 +PAN_BENT_ARMS_VERT: Final = 9 +PAN_BENT_ARMS_SINGLE_SERIF: Final = 10 +PAN_BENT_ARMS_DOUBLE_SERIF: Final = 11 +PAN_LETT_NORMAL_CONTACT: Final = 2 +PAN_LETT_NORMAL_WEIGHTED: Final = 3 +PAN_LETT_NORMAL_BOXED: Final = 4 +PAN_LETT_NORMAL_FLATTENED: Final = 5 +PAN_LETT_NORMAL_ROUNDED: Final = 6 +PAN_LETT_NORMAL_OFF_CENTER: Final = 7 +PAN_LETT_NORMAL_SQUARE: Final = 8 +PAN_LETT_OBLIQUE_CONTACT: Final = 9 +PAN_LETT_OBLIQUE_WEIGHTED: Final = 10 +PAN_LETT_OBLIQUE_BOXED: Final = 11 +PAN_LETT_OBLIQUE_FLATTENED: Final = 12 +PAN_LETT_OBLIQUE_ROUNDED: Final = 13 +PAN_LETT_OBLIQUE_OFF_CENTER: Final = 14 +PAN_LETT_OBLIQUE_SQUARE: Final = 15 +PAN_MIDLINE_STANDARD_TRIMMED: Final = 2 +PAN_MIDLINE_STANDARD_POINTED: Final = 3 +PAN_MIDLINE_STANDARD_SERIFED: Final = 4 +PAN_MIDLINE_HIGH_TRIMMED: Final = 5 +PAN_MIDLINE_HIGH_POINTED: Final = 6 +PAN_MIDLINE_HIGH_SERIFED: Final = 7 +PAN_MIDLINE_CONSTANT_TRIMMED: Final = 8 +PAN_MIDLINE_CONSTANT_POINTED: Final = 9 +PAN_MIDLINE_CONSTANT_SERIFED: Final = 10 +PAN_MIDLINE_LOW_TRIMMED: Final = 11 +PAN_MIDLINE_LOW_POINTED: Final = 12 +PAN_MIDLINE_LOW_SERIFED: Final = 13 +PAN_XHEIGHT_CONSTANT_SMALL: Final = 2 +PAN_XHEIGHT_CONSTANT_STD: Final = 3 +PAN_XHEIGHT_CONSTANT_LARGE: Final = 4 +PAN_XHEIGHT_DUCKING_SMALL: Final = 5 +PAN_XHEIGHT_DUCKING_STD: Final = 6 +PAN_XHEIGHT_DUCKING_LARGE: Final = 7 +ELF_VENDOR_SIZE: Final = 4 +ELF_VERSION: Final = 0 +ELF_CULTURE_LATIN: Final = 0 +RASTER_FONTTYPE: Final = 1 +DEVICE_FONTTYPE: Final = 2 +TRUETYPE_FONTTYPE: Final = 4 def PALETTEINDEX(i: int) -> int: ... -PC_RESERVED: int -PC_EXPLICIT: int -PC_NOCOLLAPSE: int +PC_RESERVED: Final = 1 +PC_EXPLICIT: Final = 2 +PC_NOCOLLAPSE: Final = 4 def GetRValue(rgb: int) -> int: ... def GetGValue(rgb: int) -> int: ... def GetBValue(rgb: int) -> int: ... -TRANSPARENT: int -OPAQUE: int -BKMODE_LAST: int -GM_COMPATIBLE: int -GM_ADVANCED: int -GM_LAST: int -PT_CLOSEFIGURE: int -PT_LINETO: int -PT_BEZIERTO: int -PT_MOVETO: int -MM_TEXT: int -MM_LOMETRIC: int -MM_HIMETRIC: int -MM_LOENGLISH: int -MM_HIENGLISH: int -MM_TWIPS: int -MM_ISOTROPIC: int -MM_ANISOTROPIC: int -MM_MIN: int -MM_MAX: int -MM_MAX_FIXEDSCALE: int -ABSOLUTE: int -RELATIVE: int -WHITE_BRUSH: int -LTGRAY_BRUSH: int -GRAY_BRUSH: int -DKGRAY_BRUSH: int -BLACK_BRUSH: int -NULL_BRUSH: int -HOLLOW_BRUSH: int -WHITE_PEN: int -BLACK_PEN: int -NULL_PEN: int -OEM_FIXED_FONT: int -ANSI_FIXED_FONT: int -ANSI_VAR_FONT: int -SYSTEM_FONT: int -DEVICE_DEFAULT_FONT: int -DEFAULT_PALETTE: int -SYSTEM_FIXED_FONT: int -STOCK_LAST: int -CLR_INVALID: int -DC_BRUSH: int -DC_PEN: int -STATUS_WAIT_0: int -STATUS_ABANDONED_WAIT_0: int -STATUS_USER_APC: int -STATUS_TIMEOUT: int -STATUS_PENDING: int -STATUS_SEGMENT_NOTIFICATION: int -STATUS_GUARD_PAGE_VIOLATION: int -STATUS_DATATYPE_MISALIGNMENT: int -STATUS_BREAKPOINT: int -STATUS_SINGLE_STEP: int -STATUS_ACCESS_VIOLATION: int -STATUS_IN_PAGE_ERROR: int -STATUS_INVALID_HANDLE: int -STATUS_NO_MEMORY: int -STATUS_ILLEGAL_INSTRUCTION: int -STATUS_NONCONTINUABLE_EXCEPTION: int -STATUS_INVALID_DISPOSITION: int -STATUS_ARRAY_BOUNDS_EXCEEDED: int -STATUS_FLOAT_DENORMAL_OPERAND: int -STATUS_FLOAT_DIVIDE_BY_ZERO: int -STATUS_FLOAT_INEXACT_RESULT: int -STATUS_FLOAT_INVALID_OPERATION: int -STATUS_FLOAT_OVERFLOW: int -STATUS_FLOAT_STACK_CHECK: int -STATUS_FLOAT_UNDERFLOW: int -STATUS_INTEGER_DIVIDE_BY_ZERO: int -STATUS_INTEGER_OVERFLOW: int -STATUS_PRIVILEGED_INSTRUCTION: int -STATUS_STACK_OVERFLOW: int -STATUS_CONTROL_C_EXIT: int -WAIT_FAILED: int -WAIT_OBJECT_0: int -WAIT_ABANDONED: int -WAIT_ABANDONED_0: int -WAIT_TIMEOUT: int -WAIT_IO_COMPLETION: int -STILL_ACTIVE: int -EXCEPTION_ACCESS_VIOLATION: int -EXCEPTION_DATATYPE_MISALIGNMENT: int -EXCEPTION_BREAKPOINT: int -EXCEPTION_SINGLE_STEP: int -EXCEPTION_ARRAY_BOUNDS_EXCEEDED: int -EXCEPTION_FLT_DENORMAL_OPERAND: int -EXCEPTION_FLT_DIVIDE_BY_ZERO: int -EXCEPTION_FLT_INEXACT_RESULT: int -EXCEPTION_FLT_INVALID_OPERATION: int -EXCEPTION_FLT_OVERFLOW: int -EXCEPTION_FLT_STACK_CHECK: int -EXCEPTION_FLT_UNDERFLOW: int -EXCEPTION_INT_DIVIDE_BY_ZERO: int -EXCEPTION_INT_OVERFLOW: int -EXCEPTION_PRIV_INSTRUCTION: int -EXCEPTION_IN_PAGE_ERROR: int -EXCEPTION_ILLEGAL_INSTRUCTION: int -EXCEPTION_NONCONTINUABLE_EXCEPTION: int -EXCEPTION_STACK_OVERFLOW: int -EXCEPTION_INVALID_DISPOSITION: int -EXCEPTION_GUARD_PAGE: int -EXCEPTION_INVALID_HANDLE: int -CONTROL_C_EXIT: int -SPI_GETBEEP: int -SPI_SETBEEP: int -SPI_GETMOUSE: int -SPI_SETMOUSE: int -SPI_GETBORDER: int -SPI_SETBORDER: int -SPI_GETKEYBOARDSPEED: int -SPI_SETKEYBOARDSPEED: int -SPI_LANGDRIVER: int -SPI_ICONHORIZONTALSPACING: int -SPI_GETSCREENSAVETIMEOUT: int -SPI_SETSCREENSAVETIMEOUT: int -SPI_GETSCREENSAVEACTIVE: int -SPI_SETSCREENSAVEACTIVE: int -SPI_GETGRIDGRANULARITY: int -SPI_SETGRIDGRANULARITY: int -SPI_SETDESKWALLPAPER: int -SPI_SETDESKPATTERN: int -SPI_GETKEYBOARDDELAY: int -SPI_SETKEYBOARDDELAY: int -SPI_ICONVERTICALSPACING: int -SPI_GETICONTITLEWRAP: int -SPI_SETICONTITLEWRAP: int -SPI_GETMENUDROPALIGNMENT: int -SPI_SETMENUDROPALIGNMENT: int -SPI_SETDOUBLECLKWIDTH: int -SPI_SETDOUBLECLKHEIGHT: int -SPI_GETICONTITLELOGFONT: int -SPI_SETDOUBLECLICKTIME: int -SPI_SETMOUSEBUTTONSWAP: int -SPI_SETICONTITLELOGFONT: int -SPI_GETFASTTASKSWITCH: int -SPI_SETFASTTASKSWITCH: int -SPI_SETDRAGFULLWINDOWS: int -SPI_GETDRAGFULLWINDOWS: int -SPI_GETNONCLIENTMETRICS: int -SPI_SETNONCLIENTMETRICS: int -SPI_GETMINIMIZEDMETRICS: int -SPI_SETMINIMIZEDMETRICS: int -SPI_GETICONMETRICS: int -SPI_SETICONMETRICS: int -SPI_SETWORKAREA: int -SPI_GETWORKAREA: int -SPI_SETPENWINDOWS: int -SPI_GETFILTERKEYS: int -SPI_SETFILTERKEYS: int -SPI_GETTOGGLEKEYS: int -SPI_SETTOGGLEKEYS: int -SPI_GETMOUSEKEYS: int -SPI_SETMOUSEKEYS: int -SPI_GETSHOWSOUNDS: int -SPI_SETSHOWSOUNDS: int -SPI_GETSTICKYKEYS: int -SPI_SETSTICKYKEYS: int -SPI_GETACCESSTIMEOUT: int -SPI_SETACCESSTIMEOUT: int -SPI_GETSERIALKEYS: int -SPI_SETSERIALKEYS: int -SPI_GETSOUNDSENTRY: int -SPI_SETSOUNDSENTRY: int -SPI_GETHIGHCONTRAST: int -SPI_SETHIGHCONTRAST: int -SPI_GETKEYBOARDPREF: int -SPI_SETKEYBOARDPREF: int -SPI_GETSCREENREADER: int -SPI_SETSCREENREADER: int -SPI_GETANIMATION: int -SPI_SETANIMATION: int -SPI_GETFONTSMOOTHING: int -SPI_SETFONTSMOOTHING: int -SPI_SETDRAGWIDTH: int -SPI_SETDRAGHEIGHT: int -SPI_SETHANDHELD: int -SPI_GETLOWPOWERTIMEOUT: int -SPI_GETPOWEROFFTIMEOUT: int -SPI_SETLOWPOWERTIMEOUT: int -SPI_SETPOWEROFFTIMEOUT: int -SPI_GETLOWPOWERACTIVE: int -SPI_GETPOWEROFFACTIVE: int -SPI_SETLOWPOWERACTIVE: int -SPI_SETPOWEROFFACTIVE: int -SPI_SETCURSORS: int -SPI_SETICONS: int -SPI_GETDEFAULTINPUTLANG: int -SPI_SETDEFAULTINPUTLANG: int -SPI_SETLANGTOGGLE: int -SPI_GETWINDOWSEXTENSION: int -SPI_SETMOUSETRAILS: int -SPI_GETMOUSETRAILS: int -SPI_GETSNAPTODEFBUTTON: int -SPI_SETSNAPTODEFBUTTON: int -SPI_SETSCREENSAVERRUNNING: int -SPI_SCREENSAVERRUNNING: int -SPI_GETMOUSEHOVERWIDTH: int -SPI_SETMOUSEHOVERWIDTH: int -SPI_GETMOUSEHOVERHEIGHT: int -SPI_SETMOUSEHOVERHEIGHT: int -SPI_GETMOUSEHOVERTIME: int -SPI_SETMOUSEHOVERTIME: int -SPI_GETWHEELSCROLLLINES: int -SPI_SETWHEELSCROLLLINES: int -SPI_GETMENUSHOWDELAY: int -SPI_SETMENUSHOWDELAY: int -SPI_GETSHOWIMEUI: int -SPI_SETSHOWIMEUI: int -SPI_GETMOUSESPEED: int -SPI_SETMOUSESPEED: int -SPI_GETSCREENSAVERRUNNING: int -SPI_GETDESKWALLPAPER: int -SPI_GETACTIVEWINDOWTRACKING: int -SPI_SETACTIVEWINDOWTRACKING: int -SPI_GETMENUANIMATION: int -SPI_SETMENUANIMATION: int -SPI_GETCOMBOBOXANIMATION: int -SPI_SETCOMBOBOXANIMATION: int -SPI_GETGRADIENTCAPTIONS: int -SPI_SETGRADIENTCAPTIONS: int -SPI_GETKEYBOARDCUES: int -SPI_SETKEYBOARDCUES: int -SPI_GETMENUUNDERLINES: int -SPI_SETMENUUNDERLINES: int -SPI_GETACTIVEWNDTRKZORDER: int -SPI_SETACTIVEWNDTRKZORDER: int -SPI_GETHOTTRACKING: int -SPI_SETHOTTRACKING: int -SPI_GETMENUFADE: int -SPI_SETMENUFADE: int -SPI_GETSELECTIONFADE: int -SPI_SETSELECTIONFADE: int -SPI_GETTOOLTIPANIMATION: int -SPI_SETTOOLTIPANIMATION: int -SPI_GETTOOLTIPFADE: int -SPI_SETTOOLTIPFADE: int -SPI_GETCURSORSHADOW: int -SPI_SETCURSORSHADOW: int -SPI_GETMOUSESONAR: int -SPI_SETMOUSESONAR: int -SPI_GETMOUSECLICKLOCK: int -SPI_SETMOUSECLICKLOCK: int -SPI_GETMOUSEVANISH: int -SPI_SETMOUSEVANISH: int -SPI_GETFLATMENU: int -SPI_SETFLATMENU: int -SPI_GETDROPSHADOW: int -SPI_SETDROPSHADOW: int -SPI_GETBLOCKSENDINPUTRESETS: int -SPI_SETBLOCKSENDINPUTRESETS: int -SPI_GETUIEFFECTS: int -SPI_SETUIEFFECTS: int -SPI_GETFOREGROUNDLOCKTIMEOUT: int -SPI_SETFOREGROUNDLOCKTIMEOUT: int -SPI_GETACTIVEWNDTRKTIMEOUT: int -SPI_SETACTIVEWNDTRKTIMEOUT: int -SPI_GETFOREGROUNDFLASHCOUNT: int -SPI_SETFOREGROUNDFLASHCOUNT: int -SPI_GETCARETWIDTH: int -SPI_SETCARETWIDTH: int -SPI_GETMOUSECLICKLOCKTIME: int -SPI_SETMOUSECLICKLOCKTIME: int -SPI_GETFONTSMOOTHINGTYPE: int -SPI_SETFONTSMOOTHINGTYPE: int -SPI_GETFONTSMOOTHINGCONTRAST: int -SPI_SETFONTSMOOTHINGCONTRAST: int -SPI_GETFOCUSBORDERWIDTH: int -SPI_SETFOCUSBORDERWIDTH: int -SPI_GETFOCUSBORDERHEIGHT: int -SPI_SETFOCUSBORDERHEIGHT: int -SPI_GETFONTSMOOTHINGORIENTATION: int -SPI_SETFONTSMOOTHINGORIENTATION: int -SPIF_UPDATEINIFILE: int -SPIF_SENDWININICHANGE: int -SPIF_SENDCHANGE: int -FE_FONTSMOOTHINGSTANDARD: int -FE_FONTSMOOTHINGCLEARTYPE: int -FE_FONTSMOOTHINGDOCKING: int -METRICS_USEDEFAULT: int -ARW_BOTTOMLEFT: int -ARW_BOTTOMRIGHT: int -ARW_TOPLEFT: int -ARW_TOPRIGHT: int -ARW_STARTMASK: int -ARW_STARTRIGHT: int -ARW_STARTTOP: int -ARW_LEFT: int -ARW_RIGHT: int -ARW_UP: int -ARW_DOWN: int -ARW_HIDE: int -SERKF_SERIALKEYSON: int -SERKF_AVAILABLE: int -SERKF_INDICATOR: int -HCF_HIGHCONTRASTON: int -HCF_AVAILABLE: int -HCF_HOTKEYACTIVE: int -HCF_CONFIRMHOTKEY: int -HCF_HOTKEYSOUND: int -HCF_INDICATOR: int -HCF_HOTKEYAVAILABLE: int -CDS_UPDATEREGISTRY: int -CDS_TEST: int -CDS_FULLSCREEN: int -CDS_GLOBAL: int -CDS_SET_PRIMARY: int -CDS_RESET: int -CDS_SETRECT: int -CDS_NORESET: int -DISP_CHANGE_SUCCESSFUL: int -DISP_CHANGE_RESTART: int -DISP_CHANGE_FAILED: int -DISP_CHANGE_BADMODE: int -DISP_CHANGE_NOTUPDATED: int -DISP_CHANGE_BADFLAGS: int -DISP_CHANGE_BADPARAM: int -DISP_CHANGE_BADDUALVIEW: int -ENUM_CURRENT_SETTINGS: int -ENUM_REGISTRY_SETTINGS: int -FKF_FILTERKEYSON: int -FKF_AVAILABLE: int -FKF_HOTKEYACTIVE: int -FKF_CONFIRMHOTKEY: int -FKF_HOTKEYSOUND: int -FKF_INDICATOR: int -FKF_CLICKON: int -SKF_STICKYKEYSON: int -SKF_AVAILABLE: int -SKF_HOTKEYACTIVE: int -SKF_CONFIRMHOTKEY: int -SKF_HOTKEYSOUND: int -SKF_INDICATOR: int -SKF_AUDIBLEFEEDBACK: int -SKF_TRISTATE: int -SKF_TWOKEYSOFF: int -SKF_LALTLATCHED: int -SKF_LCTLLATCHED: int -SKF_LSHIFTLATCHED: int -SKF_RALTLATCHED: int -SKF_RCTLLATCHED: int -SKF_RSHIFTLATCHED: int -SKF_LWINLATCHED: int -SKF_RWINLATCHED: int -SKF_LALTLOCKED: int -SKF_LCTLLOCKED: int -SKF_LSHIFTLOCKED: int -SKF_RALTLOCKED: int -SKF_RCTLLOCKED: int -SKF_RSHIFTLOCKED: int -SKF_LWINLOCKED: int -SKF_RWINLOCKED: int -MKF_MOUSEKEYSON: int -MKF_AVAILABLE: int -MKF_HOTKEYACTIVE: int -MKF_CONFIRMHOTKEY: int -MKF_HOTKEYSOUND: int -MKF_INDICATOR: int -MKF_MODIFIERS: int -MKF_REPLACENUMBERS: int -MKF_LEFTBUTTONSEL: int -MKF_RIGHTBUTTONSEL: int -MKF_LEFTBUTTONDOWN: int -MKF_RIGHTBUTTONDOWN: int -MKF_MOUSEMODE: int -ATF_TIMEOUTON: int -ATF_ONOFFFEEDBACK: int -SSGF_NONE: int -SSGF_DISPLAY: int -SSTF_NONE: int -SSTF_CHARS: int -SSTF_BORDER: int -SSTF_DISPLAY: int -SSWF_NONE: int -SSWF_TITLE: int -SSWF_WINDOW: int -SSWF_DISPLAY: int -SSWF_CUSTOM: int -SSF_SOUNDSENTRYON: int -SSF_AVAILABLE: int -SSF_INDICATOR: int -TKF_TOGGLEKEYSON: int -TKF_AVAILABLE: int -TKF_HOTKEYACTIVE: int -TKF_CONFIRMHOTKEY: int -TKF_HOTKEYSOUND: int -TKF_INDICATOR: int -SLE_ERROR: int -SLE_MINORERROR: int -SLE_WARNING: int -MONITOR_DEFAULTTONULL: int -MONITOR_DEFAULTTOPRIMARY: int -MONITOR_DEFAULTTONEAREST: int -MONITORINFOF_PRIMARY: int -CHILDID_SELF: int -INDEXID_OBJECT: int -INDEXID_CONTAINER: int -OBJID_WINDOW: int -OBJID_SYSMENU: int -OBJID_TITLEBAR: int -OBJID_MENU: int -OBJID_CLIENT: int -OBJID_VSCROLL: int -OBJID_HSCROLL: int -OBJID_SIZEGRIP: int -OBJID_CARET: int -OBJID_CURSOR: int -OBJID_ALERT: int -OBJID_SOUND: int -EVENT_MIN: int -EVENT_MAX: int -EVENT_SYSTEM_SOUND: int -EVENT_SYSTEM_ALERT: int -EVENT_SYSTEM_FOREGROUND: int -EVENT_SYSTEM_MENUSTART: int -EVENT_SYSTEM_MENUEND: int -EVENT_SYSTEM_MENUPOPUPSTART: int -EVENT_SYSTEM_MENUPOPUPEND: int -EVENT_SYSTEM_CAPTURESTART: int -EVENT_SYSTEM_CAPTUREEND: int -EVENT_SYSTEM_MOVESIZESTART: int -EVENT_SYSTEM_MOVESIZEEND: int -EVENT_SYSTEM_CONTEXTHELPSTART: int -EVENT_SYSTEM_CONTEXTHELPEND: int -EVENT_SYSTEM_DRAGDROPSTART: int -EVENT_SYSTEM_DRAGDROPEND: int -EVENT_SYSTEM_DIALOGSTART: int -EVENT_SYSTEM_DIALOGEND: int -EVENT_SYSTEM_SCROLLINGSTART: int -EVENT_SYSTEM_SCROLLINGEND: int -EVENT_SYSTEM_SWITCHSTART: int -EVENT_SYSTEM_SWITCHEND: int -EVENT_SYSTEM_MINIMIZESTART: int -EVENT_SYSTEM_MINIMIZEEND: int -EVENT_OBJECT_CREATE: int -EVENT_OBJECT_DESTROY: int -EVENT_OBJECT_SHOW: int -EVENT_OBJECT_HIDE: int -EVENT_OBJECT_REORDER: int -EVENT_OBJECT_FOCUS: int -EVENT_OBJECT_SELECTION: int -EVENT_OBJECT_SELECTIONADD: int -EVENT_OBJECT_SELECTIONREMOVE: int -EVENT_OBJECT_SELECTIONWITHIN: int -EVENT_OBJECT_STATECHANGE: int -EVENT_OBJECT_LOCATIONCHANGE: int -EVENT_OBJECT_NAMECHANGE: int -EVENT_OBJECT_DESCRIPTIONCHANGE: int -EVENT_OBJECT_VALUECHANGE: int -EVENT_OBJECT_PARENTCHANGE: int -EVENT_OBJECT_HELPCHANGE: int -EVENT_OBJECT_DEFACTIONCHANGE: int -EVENT_OBJECT_ACCELERATORCHANGE: int -SOUND_SYSTEM_STARTUP: int -SOUND_SYSTEM_SHUTDOWN: int -SOUND_SYSTEM_BEEP: int -SOUND_SYSTEM_ERROR: int -SOUND_SYSTEM_QUESTION: int -SOUND_SYSTEM_WARNING: int -SOUND_SYSTEM_INFORMATION: int -SOUND_SYSTEM_MAXIMIZE: int -SOUND_SYSTEM_MINIMIZE: int -SOUND_SYSTEM_RESTOREUP: int -SOUND_SYSTEM_RESTOREDOWN: int -SOUND_SYSTEM_APPSTART: int -SOUND_SYSTEM_FAULT: int -SOUND_SYSTEM_APPEND: int -SOUND_SYSTEM_MENUCOMMAND: int -SOUND_SYSTEM_MENUPOPUP: int -CSOUND_SYSTEM: int -ALERT_SYSTEM_INFORMATIONAL: int -ALERT_SYSTEM_WARNING: int -ALERT_SYSTEM_ERROR: int -ALERT_SYSTEM_QUERY: int -ALERT_SYSTEM_CRITICAL: int -CALERT_SYSTEM: int -WINEVENT_OUTOFCONTEXT: int -WINEVENT_SKIPOWNTHREAD: int -WINEVENT_SKIPOWNPROCESS: int -WINEVENT_INCONTEXT: int -GUI_CARETBLINKING: int -GUI_INMOVESIZE: int -GUI_INMENUMODE: int -GUI_SYSTEMMENUMODE: int -GUI_POPUPMENUMODE: int -STATE_SYSTEM_UNAVAILABLE: int -STATE_SYSTEM_SELECTED: int -STATE_SYSTEM_FOCUSED: int -STATE_SYSTEM_PRESSED: int -STATE_SYSTEM_CHECKED: int -STATE_SYSTEM_MIXED: int -STATE_SYSTEM_READONLY: int -STATE_SYSTEM_HOTTRACKED: int -STATE_SYSTEM_DEFAULT: int -STATE_SYSTEM_EXPANDED: int -STATE_SYSTEM_COLLAPSED: int -STATE_SYSTEM_BUSY: int -STATE_SYSTEM_FLOATING: int -STATE_SYSTEM_MARQUEED: int -STATE_SYSTEM_ANIMATED: int -STATE_SYSTEM_INVISIBLE: int -STATE_SYSTEM_OFFSCREEN: int -STATE_SYSTEM_SIZEABLE: int -STATE_SYSTEM_MOVEABLE: int -STATE_SYSTEM_SELFVOICING: int -STATE_SYSTEM_FOCUSABLE: int -STATE_SYSTEM_SELECTABLE: int -STATE_SYSTEM_LINKED: int -STATE_SYSTEM_TRAVERSED: int -STATE_SYSTEM_MULTISELECTABLE: int -STATE_SYSTEM_EXTSELECTABLE: int -STATE_SYSTEM_ALERT_LOW: int -STATE_SYSTEM_ALERT_MEDIUM: int -STATE_SYSTEM_ALERT_HIGH: int -STATE_SYSTEM_VALID: int -CCHILDREN_TITLEBAR: int -CCHILDREN_SCROLLBAR: int -CURSOR_SHOWING: int -WS_ACTIVECAPTION: int -GA_MIC: int -GA_PARENT: int -GA_ROOT: int -GA_ROOTOWNER: int -GA_MAC: int -BF_LEFT: int -BF_TOP: int -BF_RIGHT: int -BF_BOTTOM: int -BF_TOPLEFT: int -BF_TOPRIGHT: int -BF_BOTTOMLEFT: int -BF_BOTTOMRIGHT: int -BF_RECT: int -BF_DIAGONAL: int -BF_DIAGONAL_ENDTOPRIGHT: int -BF_DIAGONAL_ENDTOPLEFT: int -BF_DIAGONAL_ENDBOTTOMLEFT: int -BF_DIAGONAL_ENDBOTTOMRIGHT: int -BF_MIDDLE: int -BF_SOFT: int -BF_ADJUST: int -BF_FLAT: int -BF_MONO: int -DFC_CAPTION: int -DFC_MENU: int -DFC_SCROLL: int -DFC_BUTTON: int -DFC_POPUPMENU: int -DFCS_CAPTIONCLOSE: int -DFCS_CAPTIONMIN: int -DFCS_CAPTIONMAX: int -DFCS_CAPTIONRESTORE: int -DFCS_CAPTIONHELP: int -DFCS_MENUARROW: int -DFCS_MENUCHECK: int -DFCS_MENUBULLET: int -DFCS_MENUARROWRIGHT: int -DFCS_SCROLLUP: int -DFCS_SCROLLDOWN: int -DFCS_SCROLLLEFT: int -DFCS_SCROLLRIGHT: int -DFCS_SCROLLCOMBOBOX: int -DFCS_SCROLLSIZEGRIP: int -DFCS_SCROLLSIZEGRIPRIGHT: int -DFCS_BUTTONCHECK: int -DFCS_BUTTONRADIOIMAGE: int -DFCS_BUTTONRADIOMASK: int -DFCS_BUTTONRADIO: int -DFCS_BUTTON3STATE: int -DFCS_BUTTONPUSH: int -DFCS_INACTIVE: int -DFCS_PUSHED: int -DFCS_CHECKED: int -DFCS_TRANSPARENT: int -DFCS_HOT: int -DFCS_ADJUSTRECT: int -DFCS_FLAT: int -DFCS_MONO: int -DC_ACTIVE: int -DC_SMALLCAP: int -DC_ICON: int -DC_TEXT: int -DC_INBUTTON: int -DC_GRADIENT: int -IDANI_OPEN: int -IDANI_CLOSE: int -IDANI_CAPTION: int -CF_TEXT: int -CF_BITMAP: int -CF_METAFILEPICT: int -CF_SYLK: int -CF_DIF: int -CF_TIFF: int -CF_OEMTEXT: int -CF_DIB: int -CF_PALETTE: int -CF_PENDATA: int -CF_RIFF: int -CF_WAVE: int -CF_UNICODETEXT: int -CF_ENHMETAFILE: int -CF_HDROP: int -CF_LOCALE: int -CF_DIBV5: int -CF_MAX: int -CF_OWNERDISPLAY: int -CF_DSPTEXT: int -CF_DSPBITMAP: int -CF_DSPMETAFILEPICT: int -CF_DSPENHMETAFILE: int -CF_PRIVATEFIRST: int -CF_PRIVATELAST: int -CF_GDIOBJFIRST: int -CF_GDIOBJLAST: int -FVIRTKEY: int -FNOINVERT: int -FSHIFT: int -FCONTROL: int -FALT: int -WPF_SETMINPOSITION: int -WPF_RESTORETOMAXIMIZED: int -ODT_MENU: int -ODT_COMBOBOX: int -ODT_BUTTON: int -ODT_STATIC: int -ODA_DRAWENTIRE: int -ODA_SELECT: int -ODA_FOCUS: int -ODS_SELECTED: int -ODS_GRAYED: int -ODS_DISABLED: int -ODS_CHECKED: int -ODS_FOCUS: int -ODS_DEFAULT: int -ODS_COMBOBOXEDIT: int -ODS_HOTLIGHT: int -ODS_INACTIVE: int -PM_NOREMOVE: int -PM_REMOVE: int -PM_NOYIELD: int -MOD_ALT: int -MOD_CONTROL: int -MOD_SHIFT: int -MOD_WIN: int -IDHOT_SNAPWINDOW: int -IDHOT_SNAPDESKTOP: int -ENDSESSION_LOGOFF: int -EWX_LOGOFF: int -EWX_SHUTDOWN: int -EWX_REBOOT: int -EWX_FORCE: int -EWX_POWEROFF: int -EWX_FORCEIFHUNG: int -BSM_ALLCOMPONENTS: int -BSM_VXDS: int -BSM_NETDRIVER: int -BSM_INSTALLABLEDRIVERS: int -BSM_APPLICATIONS: int -BSM_ALLDESKTOPS: int -BSF_QUERY: int -BSF_IGNORECURRENTTASK: int -BSF_FLUSHDISK: int -BSF_NOHANG: int -BSF_POSTMESSAGE: int -BSF_FORCEIFHUNG: int -BSF_NOTIMEOUTIFNOTHUNG: int -BROADCAST_QUERY_DENY: int -DBWF_LPARAMPOINTER: int -SWP_NOSIZE: int -SWP_NOMOVE: int -SWP_NOZORDER: int -SWP_NOREDRAW: int -SWP_NOACTIVATE: int -SWP_FRAMECHANGED: int -SWP_SHOWWINDOW: int -SWP_HIDEWINDOW: int -SWP_NOCOPYBITS: int -SWP_NOOWNERZORDER: int -SWP_NOSENDCHANGING: int -SWP_DRAWFRAME: int -SWP_NOREPOSITION: int -SWP_DEFERERASE: int -SWP_ASYNCWINDOWPOS: int -DLGWINDOWEXTRA: int -KEYEVENTF_EXTENDEDKEY: int -KEYEVENTF_KEYUP: int -MOUSEEVENTF_MOVE: int -MOUSEEVENTF_LEFTDOWN: int -MOUSEEVENTF_LEFTUP: int -MOUSEEVENTF_RIGHTDOWN: int -MOUSEEVENTF_RIGHTUP: int -MOUSEEVENTF_MIDDLEDOWN: int -MOUSEEVENTF_MIDDLEUP: int -MOUSEEVENTF_ABSOLUTE: int -INPUT_MOUSE: int -INPUT_KEYBOARD: int -INPUT_HARDWARE: int -MWMO_WAITALL: int -MWMO_ALERTABLE: int -MWMO_INPUTAVAILABLE: int -QS_KEY: int -QS_MOUSEMOVE: int -QS_MOUSEBUTTON: int -QS_POSTMESSAGE: int -QS_TIMER: int -QS_PAINT: int -QS_SENDMESSAGE: int -QS_HOTKEY: int -QS_MOUSE: int -QS_INPUT: int -QS_ALLEVENTS: int -QS_ALLINPUT: int -IMN_CLOSESTATUSWINDOW: int -IMN_OPENSTATUSWINDOW: int -IMN_CHANGECANDIDATE: int -IMN_CLOSECANDIDATE: int -IMN_OPENCANDIDATE: int -IMN_SETCONVERSIONMODE: int -IMN_SETSENTENCEMODE: int -IMN_SETOPENSTATUS: int -IMN_SETCANDIDATEPOS: int -IMN_SETCOMPOSITIONFONT: int -IMN_SETCOMPOSITIONWINDOW: int -IMN_SETSTATUSWINDOWPOS: int -IMN_GUIDELINE: int -IMN_PRIVATE: int -HELP_CONTEXT: int -HELP_QUIT: int -HELP_INDEX: int -HELP_CONTENTS: int -HELP_HELPONHELP: int -HELP_SETINDEX: int -HELP_SETCONTENTS: int -HELP_CONTEXTPOPUP: int -HELP_FORCEFILE: int -HELP_KEY: int -HELP_COMMAND: int -HELP_PARTIALKEY: int -HELP_MULTIKEY: int -HELP_SETWINPOS: int -HELP_CONTEXTMENU: int -HELP_FINDER: int -HELP_WM_HELP: int -HELP_SETPOPUP_POS: int -HELP_TCARD: int -HELP_TCARD_DATA: int -HELP_TCARD_OTHER_CALLER: int -IDH_NO_HELP: int -IDH_MISSING_CONTEXT: int -IDH_GENERIC_HELP_BUTTON: int -IDH_OK: int -IDH_CANCEL: int -IDH_HELP: int -GR_GDIOBJECTS: int -GR_USEROBJECTS: int -SRCCOPY: int -SRCPAINT: int -SRCAND: int -SRCINVERT: int -SRCERASE: int -NOTSRCCOPY: int -NOTSRCERASE: int -MERGECOPY: int -MERGEPAINT: int -PATCOPY: int -PATPAINT: int -PATINVERT: int -DSTINVERT: int -BLACKNESS: int -WHITENESS: int -R2_BLACK: int -R2_NOTMERGEPEN: int -R2_MASKNOTPEN: int -R2_NOTCOPYPEN: int -R2_MASKPENNOT: int -R2_NOT: int -R2_XORPEN: int -R2_NOTMASKPEN: int -R2_MASKPEN: int -R2_NOTXORPEN: int -R2_NOP: int -R2_MERGENOTPEN: int -R2_COPYPEN: int -R2_MERGEPENNOT: int -R2_MERGEPEN: int -R2_WHITE: int -R2_LAST: int -GDI_ERROR: int -ERROR: int -NULLREGION: int -SIMPLEREGION: int -COMPLEXREGION: int -RGN_ERROR: int -RGN_AND: int -RGN_OR: int -RGN_XOR: int -RGN_DIFF: int -RGN_COPY: int -RGN_MIN: int -RGN_MAX: int -BLACKONWHITE: int -WHITEONBLACK: int -COLORONCOLOR: int -HALFTONE: int -MAXSTRETCHBLTMODE: int -STRETCH_ANDSCANS: int -STRETCH_ORSCANS: int -STRETCH_DELETESCANS: int -STRETCH_HALFTONE: int -ALTERNATE: int -WINDING: int -POLYFILL_LAST: int -LAYOUT_RTL: int -LAYOUT_BTT: int -LAYOUT_VBH: int -LAYOUT_ORIENTATIONMASK: int -LAYOUT_BITMAPORIENTATIONPRESERVED: int -TA_NOUPDATECP: int -TA_UPDATECP: int -TA_LEFT: int -TA_RIGHT: int -TA_CENTER: int -TA_TOP: int -TA_BOTTOM: int -TA_BASELINE: int -TA_MASK: int -VTA_BASELINE: int -VTA_LEFT: int -VTA_RIGHT: int -VTA_CENTER: int -VTA_BOTTOM: int -VTA_TOP: int -ETO_GRAYED: int -ETO_OPAQUE: int -ETO_CLIPPED: int -ASPECT_FILTERING: int -DCB_RESET: int -DCB_ACCUMULATE: int -DCB_DIRTY: int -DCB_SET: int -DCB_ENABLE: int -DCB_DISABLE: int -META_SETBKCOLOR: int -META_SETBKMODE: int -META_SETMAPMODE: int -META_SETROP2: int -META_SETRELABS: int -META_SETPOLYFILLMODE: int -META_SETSTRETCHBLTMODE: int -META_SETTEXTCHAREXTRA: int -META_SETTEXTCOLOR: int -META_SETTEXTJUSTIFICATION: int -META_SETWINDOWORG: int -META_SETWINDOWEXT: int -META_SETVIEWPORTORG: int -META_SETVIEWPORTEXT: int -META_OFFSETWINDOWORG: int -META_SCALEWINDOWEXT: int -META_OFFSETVIEWPORTORG: int -META_SCALEVIEWPORTEXT: int -META_LINETO: int -META_MOVETO: int -META_EXCLUDECLIPRECT: int -META_INTERSECTCLIPRECT: int -META_ARC: int -META_ELLIPSE: int -META_FLOODFILL: int -META_PIE: int -META_RECTANGLE: int -META_ROUNDRECT: int -META_PATBLT: int -META_SAVEDC: int -META_SETPIXEL: int -META_OFFSETCLIPRGN: int -META_TEXTOUT: int -META_BITBLT: int -META_STRETCHBLT: int -META_POLYGON: int -META_POLYLINE: int -META_ESCAPE: int -META_RESTOREDC: int -META_FILLREGION: int -META_FRAMEREGION: int -META_INVERTREGION: int -META_PAINTREGION: int -META_SELECTCLIPREGION: int -META_SELECTOBJECT: int -META_SETTEXTALIGN: int -META_CHORD: int -META_SETMAPPERFLAGS: int -META_EXTTEXTOUT: int -META_SETDIBTODEV: int -META_SELECTPALETTE: int -META_REALIZEPALETTE: int -META_ANIMATEPALETTE: int -META_SETPALENTRIES: int -META_POLYPOLYGON: int -META_RESIZEPALETTE: int -META_DIBBITBLT: int -META_DIBSTRETCHBLT: int -META_DIBCREATEPATTERNBRUSH: int -META_STRETCHDIB: int -META_EXTFLOODFILL: int -META_DELETEOBJECT: int -META_CREATEPALETTE: int -META_CREATEPATTERNBRUSH: int -META_CREATEPENINDIRECT: int -META_CREATEFONTINDIRECT: int -META_CREATEBRUSHINDIRECT: int -META_CREATEREGION: int -FILE_BEGIN: int -FILE_CURRENT: int -FILE_END: int -FILE_FLAG_WRITE_THROUGH: int -FILE_FLAG_OVERLAPPED: int -FILE_FLAG_NO_BUFFERING: int -FILE_FLAG_RANDOM_ACCESS: int -FILE_FLAG_SEQUENTIAL_SCAN: int -FILE_FLAG_DELETE_ON_CLOSE: int -FILE_FLAG_BACKUP_SEMANTICS: int -FILE_FLAG_POSIX_SEMANTICS: int -CREATE_NEW: int -CREATE_ALWAYS: int -OPEN_EXISTING: int -OPEN_ALWAYS: int -TRUNCATE_EXISTING: int -PIPE_ACCESS_INBOUND: int -PIPE_ACCESS_OUTBOUND: int -PIPE_ACCESS_DUPLEX: int -PIPE_CLIENT_END: int -PIPE_SERVER_END: int -PIPE_WAIT: int -PIPE_NOWAIT: int -PIPE_READMODE_BYTE: int -PIPE_READMODE_MESSAGE: int -PIPE_TYPE_BYTE: int -PIPE_TYPE_MESSAGE: int -PIPE_UNLIMITED_INSTANCES: int -SECURITY_CONTEXT_TRACKING: int -SECURITY_EFFECTIVE_ONLY: int -SECURITY_SQOS_PRESENT: int -SECURITY_VALID_SQOS_FLAGS: int -DTR_CONTROL_DISABLE: int -DTR_CONTROL_ENABLE: int -DTR_CONTROL_HANDSHAKE: int -RTS_CONTROL_DISABLE: int -RTS_CONTROL_ENABLE: int -RTS_CONTROL_HANDSHAKE: int -RTS_CONTROL_TOGGLE: int -GMEM_FIXED: int -GMEM_MOVEABLE: int -GMEM_NOCOMPACT: int -GMEM_NODISCARD: int -GMEM_ZEROINIT: int -GMEM_MODIFY: int -GMEM_DISCARDABLE: int -GMEM_NOT_BANKED: int -GMEM_SHARE: int -GMEM_DDESHARE: int -GMEM_NOTIFY: int -GMEM_LOWER: int -GMEM_VALID_FLAGS: int -GMEM_INVALID_HANDLE: int -GHND: int -GPTR: int -GMEM_DISCARDED: int -GMEM_LOCKCOUNT: int -LMEM_FIXED: int -LMEM_MOVEABLE: int -LMEM_NOCOMPACT: int -LMEM_NODISCARD: int -LMEM_ZEROINIT: int -LMEM_MODIFY: int -LMEM_DISCARDABLE: int -LMEM_VALID_FLAGS: int -LMEM_INVALID_HANDLE: int -LHND: int -LPTR: int -NONZEROLHND: int -NONZEROLPTR: int -LMEM_DISCARDED: int -LMEM_LOCKCOUNT: int -DEBUG_PROCESS: int -DEBUG_ONLY_THIS_PROCESS: int -CREATE_SUSPENDED: int -DETACHED_PROCESS: int -CREATE_NEW_CONSOLE: int -NORMAL_PRIORITY_CLASS: int -IDLE_PRIORITY_CLASS: int -HIGH_PRIORITY_CLASS: int -REALTIME_PRIORITY_CLASS: int -CREATE_NEW_PROCESS_GROUP: int -CREATE_UNICODE_ENVIRONMENT: int -CREATE_SEPARATE_WOW_VDM: int -CREATE_SHARED_WOW_VDM: int -CREATE_DEFAULT_ERROR_MODE: int -CREATE_NO_WINDOW: int -PROFILE_USER: int -PROFILE_KERNEL: int -PROFILE_SERVER: int -THREAD_BASE_PRIORITY_LOWRT: int -THREAD_BASE_PRIORITY_MAX: int -THREAD_BASE_PRIORITY_MIN: int -THREAD_BASE_PRIORITY_IDLE: int -THREAD_PRIORITY_LOWEST: int -THREAD_PRIORITY_BELOW_NORMAL: int -THREAD_PRIORITY_HIGHEST: int -THREAD_PRIORITY_ABOVE_NORMAL: int -THREAD_PRIORITY_ERROR_RETURN: int -THREAD_PRIORITY_TIME_CRITICAL: int -THREAD_PRIORITY_IDLE: int -THREAD_PRIORITY_NORMAL: int -THREAD_MODE_BACKGROUND_BEGIN: int -THREAD_MODE_BACKGROUND_END: int -EXCEPTION_DEBUG_EVENT: int -CREATE_THREAD_DEBUG_EVENT: int -CREATE_PROCESS_DEBUG_EVENT: int -EXIT_THREAD_DEBUG_EVENT: int -EXIT_PROCESS_DEBUG_EVENT: int -LOAD_DLL_DEBUG_EVENT: int -UNLOAD_DLL_DEBUG_EVENT: int -OUTPUT_DEBUG_STRING_EVENT: int -RIP_EVENT: int -DRIVE_UNKNOWN: int -DRIVE_NO_ROOT_DIR: int -DRIVE_REMOVABLE: int -DRIVE_FIXED: int -DRIVE_REMOTE: int -DRIVE_CDROM: int -DRIVE_RAMDISK: int -FILE_TYPE_UNKNOWN: int -FILE_TYPE_DISK: int -FILE_TYPE_CHAR: int -FILE_TYPE_PIPE: int -FILE_TYPE_REMOTE: int -NOPARITY: int -ODDPARITY: int -EVENPARITY: int -MARKPARITY: int -SPACEPARITY: int -ONESTOPBIT: int -ONE5STOPBITS: int -TWOSTOPBITS: int -CBR_110: int -CBR_300: int -CBR_600: int -CBR_1200: int -CBR_2400: int -CBR_4800: int -CBR_9600: int -CBR_14400: int -CBR_19200: int -CBR_38400: int -CBR_56000: int -CBR_57600: int -CBR_115200: int -CBR_128000: int -CBR_256000: int -S_QUEUEEMPTY: int -S_THRESHOLD: int -S_ALLTHRESHOLD: int -S_NORMAL: int -S_LEGATO: int -S_STACCATO: int -NMPWAIT_WAIT_FOREVER: int -NMPWAIT_NOWAIT: int -NMPWAIT_USE_DEFAULT_WAIT: int -OF_READ: int -OF_WRITE: int -OF_READWRITE: int -OF_SHARE_COMPAT: int -OF_SHARE_EXCLUSIVE: int -OF_SHARE_DENY_WRITE: int -OF_SHARE_DENY_READ: int -OF_SHARE_DENY_NONE: int -OF_PARSE: int -OF_DELETE: int -OF_VERIFY: int -OF_CANCEL: int -OF_CREATE: int -OF_PROMPT: int -OF_EXIST: int -OF_REOPEN: int -OFS_MAXPATHNAME: int -MAXINTATOM: int -PROCESS_HEAP_REGION: int -PROCESS_HEAP_UNCOMMITTED_RANGE: int -PROCESS_HEAP_ENTRY_BUSY: int -PROCESS_HEAP_ENTRY_MOVEABLE: int -PROCESS_HEAP_ENTRY_DDESHARE: int -SCS_32BIT_BINARY: int -SCS_DOS_BINARY: int -SCS_WOW_BINARY: int -SCS_PIF_BINARY: int -SCS_POSIX_BINARY: int -SCS_OS216_BINARY: int -SEM_FAILCRITICALERRORS: int -SEM_NOGPFAULTERRORBOX: int -SEM_NOALIGNMENTFAULTEXCEPT: int -SEM_NOOPENFILEERRORBOX: int -LOCKFILE_FAIL_IMMEDIATELY: int -LOCKFILE_EXCLUSIVE_LOCK: int -HANDLE_FLAG_INHERIT: int -HANDLE_FLAG_PROTECT_FROM_CLOSE: int -HINSTANCE_ERROR: int -GET_TAPE_MEDIA_INFORMATION: int -GET_TAPE_DRIVE_INFORMATION: int -SET_TAPE_MEDIA_INFORMATION: int -SET_TAPE_DRIVE_INFORMATION: int -FORMAT_MESSAGE_ALLOCATE_BUFFER: int -FORMAT_MESSAGE_IGNORE_INSERTS: int -FORMAT_MESSAGE_FROM_STRING: int -FORMAT_MESSAGE_FROM_HMODULE: int -FORMAT_MESSAGE_FROM_SYSTEM: int -FORMAT_MESSAGE_ARGUMENT_ARRAY: int -FORMAT_MESSAGE_MAX_WIDTH_MASK: int -BACKUP_INVALID: int -BACKUP_DATA: int -BACKUP_EA_DATA: int -BACKUP_SECURITY_DATA: int -BACKUP_ALTERNATE_DATA: int -BACKUP_LINK: int -BACKUP_PROPERTY_DATA: int -BACKUP_OBJECT_ID: int -BACKUP_REPARSE_DATA: int -BACKUP_SPARSE_BLOCK: int -STREAM_NORMAL_ATTRIBUTE: int -STREAM_MODIFIED_WHEN_READ: int -STREAM_CONTAINS_SECURITY: int -STREAM_CONTAINS_PROPERTIES: int -STARTF_USESHOWWINDOW: int -STARTF_USESIZE: int -STARTF_USEPOSITION: int -STARTF_USECOUNTCHARS: int -STARTF_USEFILLATTRIBUTE: int -STARTF_FORCEONFEEDBACK: int -STARTF_FORCEOFFFEEDBACK: int -STARTF_USESTDHANDLES: int -STARTF_USEHOTKEY: int -SHUTDOWN_NORETRY: int -DONT_RESOLVE_DLL_REFERENCES: int -LOAD_LIBRARY_AS_DATAFILE: int -LOAD_WITH_ALTERED_SEARCH_PATH: int -DDD_RAW_TARGET_PATH: int -DDD_REMOVE_DEFINITION: int -DDD_EXACT_MATCH_ON_REMOVE: int -MOVEFILE_REPLACE_EXISTING: int -MOVEFILE_COPY_ALLOWED: int -MOVEFILE_DELAY_UNTIL_REBOOT: int -MAX_COMPUTERNAME_LENGTH: int -LOGON32_LOGON_INTERACTIVE: int -LOGON32_LOGON_NETWORK: int -LOGON32_LOGON_BATCH: int -LOGON32_LOGON_SERVICE: int -LOGON32_LOGON_UNLOCK: int -LOGON32_LOGON_NETWORK_CLEARTEXT: int -LOGON32_LOGON_NEW_CREDENTIALS: int -LOGON32_PROVIDER_DEFAULT: int -LOGON32_PROVIDER_WINNT35: int -LOGON32_PROVIDER_WINNT40: int -LOGON32_PROVIDER_WINNT50: int -VER_PLATFORM_WIN32s: int -VER_PLATFORM_WIN32_WINDOWS: int -VER_PLATFORM_WIN32_NT: int -TC_NORMAL: int -TC_HARDERR: int -TC_GP_TRAP: int -TC_SIGNAL: int -AC_LINE_OFFLINE: int -AC_LINE_ONLINE: int -AC_LINE_BACKUP_POWER: int -AC_LINE_UNKNOWN: int -BATTERY_FLAG_HIGH: int -BATTERY_FLAG_LOW: int -BATTERY_FLAG_CRITICAL: int -BATTERY_FLAG_CHARGING: int -BATTERY_FLAG_NO_BATTERY: int -BATTERY_FLAG_UNKNOWN: int -BATTERY_PERCENTAGE_UNKNOWN: int -BATTERY_LIFE_UNKNOWN: int -cchTextLimitDefault: int -EN_MSGFILTER: int -EN_REQUESTRESIZE: int -EN_SELCHANGE: int -EN_DROPFILES: int -EN_PROTECTED: int -EN_CORRECTTEXT: int -EN_STOPNOUNDO: int -EN_IMECHANGE: int -EN_SAVECLIPBOARD: int -EN_OLEOPFAILED: int -ENM_NONE: int -ENM_CHANGE: int -ENM_UPDATE: int -ENM_SCROLL: int -ENM_KEYEVENTS: int -ENM_MOUSEEVENTS: int -ENM_REQUESTRESIZE: int -ENM_SELCHANGE: int -ENM_DROPFILES: int -ENM_PROTECTED: int -ENM_CORRECTTEXT: int -ENM_IMECHANGE: int -ES_SAVESEL: int -ES_SUNKEN: int -ES_DISABLENOSCROLL: int -ES_SELECTIONBAR: int -ES_EX_NOCALLOLEINIT: int -ES_VERTICAL: int -ES_NOIME: int -ES_SELFIME: int -ECO_AUTOWORDSELECTION: int -ECO_AUTOVSCROLL: int -ECO_AUTOHSCROLL: int -ECO_NOHIDESEL: int -ECO_READONLY: int -ECO_WANTRETURN: int -ECO_SAVESEL: int -ECO_SELECTIONBAR: int -ECO_VERTICAL: int -ECOOP_SET: int -ECOOP_OR: int -ECOOP_AND: int -ECOOP_XOR: int -WB_CLASSIFY: int -WB_MOVEWORDLEFT: int -WB_MOVEWORDRIGHT: int -WB_LEFTBREAK: int -WB_RIGHTBREAK: int -WB_MOVEWORDPREV: int -WB_MOVEWORDNEXT: int -WB_PREVBREAK: int -WB_NEXTBREAK: int -PC_FOLLOWING: int -PC_LEADING: int -PC_OVERFLOW: int -PC_DELIMITER: int -WBF_WORDWRAP: int -WBF_WORDBREAK: int -WBF_OVERFLOW: int -WBF_LEVEL1: int -WBF_LEVEL2: int -WBF_CUSTOM: int -CFM_BOLD: int -CFM_ITALIC: int -CFM_UNDERLINE: int -CFM_STRIKEOUT: int -CFM_PROTECTED: int -CFM_SIZE: int -CFM_COLOR: int -CFM_FACE: int -CFM_OFFSET: int -CFM_CHARSET: int -CFE_BOLD: int -CFE_ITALIC: int -CFE_UNDERLINE: int -CFE_STRIKEOUT: int -CFE_PROTECTED: int -CFE_AUTOCOLOR: int -yHeightCharPtsMost: int -SCF_SELECTION: int -SCF_WORD: int -SF_TEXT: int -SF_RTF: int -SF_RTFNOOBJS: int -SF_TEXTIZED: int -SFF_SELECTION: int -SFF_PLAINRTF: int -MAX_TAB_STOPS: int -lDefaultTab: int -PFM_STARTINDENT: int -PFM_RIGHTINDENT: int -PFM_OFFSET: int -PFM_ALIGNMENT: int -PFM_TABSTOPS: int -PFM_NUMBERING: int -PFM_OFFSETINDENT: int -PFN_BULLET: int -PFA_LEFT: int -PFA_RIGHT: int -PFA_CENTER: int -SEL_EMPTY: int -SEL_TEXT: int -SEL_OBJECT: int -SEL_MULTICHAR: int -SEL_MULTIOBJECT: int -OLEOP_DOVERB: int -CF_RTF: str -CF_RTFNOOBJS: str -CF_RETEXTOBJ: str -RIGHT_ALT_PRESSED: int -LEFT_ALT_PRESSED: int -RIGHT_CTRL_PRESSED: int -LEFT_CTRL_PRESSED: int -SHIFT_PRESSED: int -NUMLOCK_ON: int -SCROLLLOCK_ON: int -CAPSLOCK_ON: int -ENHANCED_KEY: int -NLS_DBCSCHAR: int -NLS_ALPHANUMERIC: int -NLS_KATAKANA: int -NLS_HIRAGANA: int -NLS_ROMAN: int -NLS_IME_CONVERSION: int -NLS_IME_DISABLE: int -FROM_LEFT_1ST_BUTTON_PRESSED: int -RIGHTMOST_BUTTON_PRESSED: int -FROM_LEFT_2ND_BUTTON_PRESSED: int -FROM_LEFT_3RD_BUTTON_PRESSED: int -FROM_LEFT_4TH_BUTTON_PRESSED: int -CTRL_C_EVENT: int -CTRL_BREAK_EVENT: int -CTRL_CLOSE_EVENT: int -CTRL_LOGOFF_EVENT: int -CTRL_SHUTDOWN_EVENT: int -MOUSE_MOVED: int -DOUBLE_CLICK: int -MOUSE_WHEELED: int -PSM_SETCURSEL: int -PSM_REMOVEPAGE: int -PSM_ADDPAGE: int -PSM_CHANGED: int -PSM_RESTARTWINDOWS: int -PSM_REBOOTSYSTEM: int -PSM_CANCELTOCLOSE: int -PSM_QUERYSIBLINGS: int -PSM_UNCHANGED: int -PSM_APPLY: int -PSM_SETTITLEA: int -PSM_SETTITLEW: int -PSM_SETWIZBUTTONS: int -PSM_PRESSBUTTON: int -PSM_SETCURSELID: int -PSM_SETFINISHTEXTA: int -PSM_SETFINISHTEXTW: int -PSM_GETTABCONTROL: int -PSM_ISDIALOGMESSAGE: int -PSM_GETCURRENTPAGEHWND: int -PSM_INSERTPAGE: int -PSM_SETHEADERTITLEA: int -PSM_SETHEADERTITLEW: int -PSM_SETHEADERSUBTITLEA: int -PSM_SETHEADERSUBTITLEW: int -PSM_HWNDTOINDEX: int -PSM_INDEXTOHWND: int -PSM_PAGETOINDEX: int -PSM_INDEXTOPAGE: int -PSM_IDTOINDEX: int -PSM_INDEXTOID: int -PSM_GETRESULT: int -PSM_RECALCPAGESIZES: int -NameUnknown: int -NameFullyQualifiedDN: int -NameSamCompatible: int -NameDisplay: int -NameUniqueId: int -NameCanonical: int -NameUserPrincipal: int -NameCanonicalEx: int -NameServicePrincipal: int -NameDnsDomain: int -ComputerNameNetBIOS: int -ComputerNameDnsHostname: int -ComputerNameDnsDomain: int -ComputerNameDnsFullyQualified: int -ComputerNamePhysicalNetBIOS: int -ComputerNamePhysicalDnsHostname: int -ComputerNamePhysicalDnsDomain: int -ComputerNamePhysicalDnsFullyQualified: int -LWA_COLORKEY: int -LWA_ALPHA: int -ULW_COLORKEY: int -ULW_ALPHA: int -ULW_OPAQUE: int -TRUE: int -FALSE: int -MAX_PATH: int -AC_SRC_OVER: int -AC_SRC_ALPHA: int -GRADIENT_FILL_RECT_H: int -GRADIENT_FILL_RECT_V: int -GRADIENT_FILL_TRIANGLE: int -GRADIENT_FILL_OP_FLAG: int -MM_WORKING_SET_MAX_HARD_ENABLE: int -MM_WORKING_SET_MAX_HARD_DISABLE: int -MM_WORKING_SET_MIN_HARD_ENABLE: int -MM_WORKING_SET_MIN_HARD_DISABLE: int -VOLUME_NAME_DOS: int -VOLUME_NAME_GUID: int -VOLUME_NAME_NT: int -VOLUME_NAME_NONE: int -FILE_NAME_NORMALIZED: int -FILE_NAME_OPENED: int -DEVICE_NOTIFY_WINDOW_HANDLE: int -DEVICE_NOTIFY_SERVICE_HANDLE: int -BSF_MSGSRV32ISOK: int -BSF_MSGSRV32ISOK_BIT: int -DBT_APPYEND: int -DBT_DEVNODES_CHANGED: int -DBT_QUERYCHANGECONFIG: int -DBT_CONFIGCHANGED: int -DBT_CONFIGCHANGECANCELED: int -DBT_MONITORCHANGE: int -DBT_SHELLLOGGEDON: int -DBT_CONFIGMGAPI32: int -DBT_VXDINITCOMPLETE: int -DBT_VOLLOCKQUERYLOCK: int -DBT_VOLLOCKLOCKTAKEN: int -DBT_VOLLOCKLOCKFAILED: int -DBT_VOLLOCKQUERYUNLOCK: int -DBT_VOLLOCKLOCKRELEASED: int -DBT_VOLLOCKUNLOCKFAILED: int -LOCKP_ALLOW_WRITES: int -LOCKP_FAIL_WRITES: int -LOCKP_FAIL_MEM_MAPPING: int -LOCKP_ALLOW_MEM_MAPPING: int -LOCKP_USER_MASK: int -LOCKP_LOCK_FOR_FORMAT: int -LOCKF_LOGICAL_LOCK: int -LOCKF_PHYSICAL_LOCK: int -DBT_NO_DISK_SPACE: int -DBT_LOW_DISK_SPACE: int -DBT_CONFIGMGPRIVATE: int -DBT_DEVICEARRIVAL: int -DBT_DEVICEQUERYREMOVE: int -DBT_DEVICEQUERYREMOVEFAILED: int -DBT_DEVICEREMOVEPENDING: int -DBT_DEVICEREMOVECOMPLETE: int -DBT_DEVICETYPESPECIFIC: int -DBT_CUSTOMEVENT: int -DBT_DEVTYP_OEM: int -DBT_DEVTYP_DEVNODE: int -DBT_DEVTYP_VOLUME: int -DBT_DEVTYP_PORT: int -DBT_DEVTYP_NET: int -DBT_DEVTYP_DEVICEINTERFACE: int -DBT_DEVTYP_HANDLE: int -DBTF_MEDIA: int -DBTF_NET: int -DBTF_RESOURCE: int -DBTF_XPORT: int -DBTF_SLOWNET: int -DBT_VPOWERDAPI: int -DBT_USERDEFINED: int -CBS_DROPDOWNLIST: int -CDM_GETFOLDERIDLIST: int -CTLCOLOR_LISTBOX: int -DBT_APPYBEGIN: int -FILE_NAMED_STREAMS: int -FILE_READ_ONLY_VOLUME: int -FILE_SEQUENTIAL_WRITE_ONCE: int -FILE_SUPPORTS_ENCRYPTION: int -FILE_SUPPORTS_EXTENDED_ATTRIBUTES: int -FILE_SUPPORTS_HARD_LINKS: int -FILE_SUPPORTS_OBJECT_IDS: int -FILE_SUPPORTS_OPEN_BY_FILE_ID: int -FILE_SUPPORTS_REPARSE_POINTS: int -FILE_SUPPORTS_SPARSE_FILES: int -FILE_SUPPORTS_TRANSACTIONS: int -FILE_SUPPORTS_USN_JOURNAL: int -FILE_VOLUME_QUOTAS: int -KEYEVENTF_SCANCODE: int -KEYEVENTF_UNICODE: int -MOUSEEVENTF_HWHEEL: int -MOUSEEVENTF_MOVE_NOCOALESCE: int -MOUSEEVENTF_VIRTUALDESK: int -ODT_LISTBOX: int -REG_RESOURCE_LIST: int -REG_RESOURCE_REQUIREMENTS_LIST: int -SC_TASKLIST: int -SPI_GETLISTBOXSMOOTHSCROLLING: int -SPI_SETLISTBOXSMOOTHSCROLLING: int -WM_CTLCOLORLISTBOX: int +TRANSPARENT: Final = 1 +OPAQUE: Final = 2 +BKMODE_LAST: Final = 2 +GM_COMPATIBLE: Final = 1 +GM_ADVANCED: Final = 2 +GM_LAST: Final = 2 +PT_CLOSEFIGURE: Final = 1 +PT_LINETO: Final = 2 +PT_BEZIERTO: Final = 4 +PT_MOVETO: Final = 6 +MM_TEXT: Final = 1 +MM_LOMETRIC: Final = 2 +MM_HIMETRIC: Final = 3 +MM_LOENGLISH: Final = 4 +MM_HIENGLISH: Final = 5 +MM_TWIPS: Final = 6 +MM_ISOTROPIC: Final = 7 +MM_ANISOTROPIC: Final = 8 +MM_MIN: Final = MM_TEXT +MM_MAX: Final = MM_ANISOTROPIC +MM_MAX_FIXEDSCALE: Final = MM_TWIPS +ABSOLUTE: Final = 1 +RELATIVE: Final = 2 +WHITE_BRUSH: Final = 0 +LTGRAY_BRUSH: Final = 1 +GRAY_BRUSH: Final = 2 +DKGRAY_BRUSH: Final = 3 +BLACK_BRUSH: Final = 4 +NULL_BRUSH: Final = 5 +HOLLOW_BRUSH: Final = NULL_BRUSH +WHITE_PEN: Final = 6 +BLACK_PEN: Final = 7 +NULL_PEN: Final = 8 +OEM_FIXED_FONT: Final = 10 +ANSI_FIXED_FONT: Final = 11 +ANSI_VAR_FONT: Final = 12 +SYSTEM_FONT: Final = 13 +DEVICE_DEFAULT_FONT: Final = 14 +DEFAULT_PALETTE: Final = 15 +SYSTEM_FIXED_FONT: Final = 16 +STOCK_LAST: Final = 16 +CLR_INVALID: Final = -1 + +DC_BRUSH: Final = 18 +DC_PEN: Final = 19 + +STATUS_WAIT_0: Final = 0 +STATUS_ABANDONED_WAIT_0: Final = 128 +STATUS_USER_APC: Final = 192 +STATUS_TIMEOUT: Final = 258 +STATUS_PENDING: Final = 259 +STATUS_SEGMENT_NOTIFICATION: Final = 1073741829 +STATUS_GUARD_PAGE_VIOLATION: Final = -2147483647 +STATUS_DATATYPE_MISALIGNMENT: Final = -2147483646 +STATUS_BREAKPOINT: Final = -2147483645 +STATUS_SINGLE_STEP: Final = -2147483644 +STATUS_ACCESS_VIOLATION: Final = -1073741819 +STATUS_IN_PAGE_ERROR: Final = -1073741818 +STATUS_INVALID_HANDLE: Final = -1073741816 +STATUS_NO_MEMORY: Final = -1073741801 +STATUS_ILLEGAL_INSTRUCTION: Final = -1073741795 +STATUS_NONCONTINUABLE_EXCEPTION: Final = -1073741787 +STATUS_INVALID_DISPOSITION: Final = -1073741786 +STATUS_ARRAY_BOUNDS_EXCEEDED: Final = -1073741684 +STATUS_FLOAT_DENORMAL_OPERAND: Final = -1073741683 +STATUS_FLOAT_DIVIDE_BY_ZERO: Final = -1073741682 +STATUS_FLOAT_INEXACT_RESULT: Final = -1073741681 +STATUS_FLOAT_INVALID_OPERATION: Final = -1073741680 +STATUS_FLOAT_OVERFLOW: Final = -1073741679 +STATUS_FLOAT_STACK_CHECK: Final = -1073741678 +STATUS_FLOAT_UNDERFLOW: Final = -1073741677 +STATUS_INTEGER_DIVIDE_BY_ZERO: Final = -1073741676 +STATUS_INTEGER_OVERFLOW: Final = -1073741675 +STATUS_PRIVILEGED_INSTRUCTION: Final = -1073741674 +STATUS_STACK_OVERFLOW: Final = -1073741571 +STATUS_CONTROL_C_EXIT: Final = -1073741510 + +WAIT_FAILED: Final = -1 +WAIT_OBJECT_0: Final[int] + +WAIT_ABANDONED: Final[int] +WAIT_ABANDONED_0: Final[int] + +WAIT_TIMEOUT: Final = STATUS_TIMEOUT +WAIT_IO_COMPLETION: Final = STATUS_USER_APC +STILL_ACTIVE: Final = STATUS_PENDING +EXCEPTION_ACCESS_VIOLATION: Final = STATUS_ACCESS_VIOLATION +EXCEPTION_DATATYPE_MISALIGNMENT: Final = STATUS_DATATYPE_MISALIGNMENT +EXCEPTION_BREAKPOINT: Final = STATUS_BREAKPOINT +EXCEPTION_SINGLE_STEP: Final = STATUS_SINGLE_STEP +EXCEPTION_ARRAY_BOUNDS_EXCEEDED: Final = STATUS_ARRAY_BOUNDS_EXCEEDED +EXCEPTION_FLT_DENORMAL_OPERAND: Final = STATUS_FLOAT_DENORMAL_OPERAND +EXCEPTION_FLT_DIVIDE_BY_ZERO: Final = STATUS_FLOAT_DIVIDE_BY_ZERO +EXCEPTION_FLT_INEXACT_RESULT: Final = STATUS_FLOAT_INEXACT_RESULT +EXCEPTION_FLT_INVALID_OPERATION: Final = STATUS_FLOAT_INVALID_OPERATION +EXCEPTION_FLT_OVERFLOW: Final = STATUS_FLOAT_OVERFLOW +EXCEPTION_FLT_STACK_CHECK: Final = STATUS_FLOAT_STACK_CHECK +EXCEPTION_FLT_UNDERFLOW: Final = STATUS_FLOAT_UNDERFLOW +EXCEPTION_INT_DIVIDE_BY_ZERO: Final = STATUS_INTEGER_DIVIDE_BY_ZERO +EXCEPTION_INT_OVERFLOW: Final = STATUS_INTEGER_OVERFLOW +EXCEPTION_PRIV_INSTRUCTION: Final = STATUS_PRIVILEGED_INSTRUCTION +EXCEPTION_IN_PAGE_ERROR: Final = STATUS_IN_PAGE_ERROR +EXCEPTION_ILLEGAL_INSTRUCTION: Final = STATUS_ILLEGAL_INSTRUCTION +EXCEPTION_NONCONTINUABLE_EXCEPTION: Final = STATUS_NONCONTINUABLE_EXCEPTION +EXCEPTION_STACK_OVERFLOW: Final = STATUS_STACK_OVERFLOW +EXCEPTION_INVALID_DISPOSITION: Final = STATUS_INVALID_DISPOSITION +EXCEPTION_GUARD_PAGE: Final = STATUS_GUARD_PAGE_VIOLATION +EXCEPTION_INVALID_HANDLE: Final = STATUS_INVALID_HANDLE +CONTROL_C_EXIT: Final = STATUS_CONTROL_C_EXIT + +SPI_GETBEEP: Final = 1 +SPI_SETBEEP: Final = 2 +SPI_GETMOUSE: Final = 3 +SPI_SETMOUSE: Final = 4 +SPI_GETBORDER: Final = 5 +SPI_SETBORDER: Final = 6 +SPI_GETKEYBOARDSPEED: Final = 10 +SPI_SETKEYBOARDSPEED: Final = 11 +SPI_LANGDRIVER: Final = 12 +SPI_ICONHORIZONTALSPACING: Final = 13 +SPI_GETSCREENSAVETIMEOUT: Final = 14 +SPI_SETSCREENSAVETIMEOUT: Final = 15 +SPI_GETSCREENSAVEACTIVE: Final = 16 +SPI_SETSCREENSAVEACTIVE: Final = 17 +SPI_GETGRIDGRANULARITY: Final = 18 +SPI_SETGRIDGRANULARITY: Final = 19 +SPI_SETDESKWALLPAPER: Final = 20 +SPI_SETDESKPATTERN: Final = 21 +SPI_GETKEYBOARDDELAY: Final = 22 +SPI_SETKEYBOARDDELAY: Final = 23 +SPI_ICONVERTICALSPACING: Final = 24 +SPI_GETICONTITLEWRAP: Final = 25 +SPI_SETICONTITLEWRAP: Final = 26 +SPI_GETMENUDROPALIGNMENT: Final = 27 +SPI_SETMENUDROPALIGNMENT: Final = 28 +SPI_SETDOUBLECLKWIDTH: Final = 29 +SPI_SETDOUBLECLKHEIGHT: Final = 30 +SPI_GETICONTITLELOGFONT: Final = 31 +SPI_SETDOUBLECLICKTIME: Final = 32 +SPI_SETMOUSEBUTTONSWAP: Final = 33 +SPI_SETICONTITLELOGFONT: Final = 34 +SPI_GETFASTTASKSWITCH: Final = 35 +SPI_SETFASTTASKSWITCH: Final = 36 +SPI_SETDRAGFULLWINDOWS: Final = 37 +SPI_GETDRAGFULLWINDOWS: Final = 38 +SPI_GETNONCLIENTMETRICS: Final = 41 +SPI_SETNONCLIENTMETRICS: Final = 42 +SPI_GETMINIMIZEDMETRICS: Final = 43 +SPI_SETMINIMIZEDMETRICS: Final = 44 +SPI_GETICONMETRICS: Final = 45 +SPI_SETICONMETRICS: Final = 46 +SPI_SETWORKAREA: Final = 47 +SPI_GETWORKAREA: Final = 48 +SPI_SETPENWINDOWS: Final = 49 +SPI_GETFILTERKEYS: Final = 50 +SPI_SETFILTERKEYS: Final = 51 +SPI_GETTOGGLEKEYS: Final = 52 +SPI_SETTOGGLEKEYS: Final = 53 +SPI_GETMOUSEKEYS: Final = 54 +SPI_SETMOUSEKEYS: Final = 55 +SPI_GETSHOWSOUNDS: Final = 56 +SPI_SETSHOWSOUNDS: Final = 57 +SPI_GETSTICKYKEYS: Final = 58 +SPI_SETSTICKYKEYS: Final = 59 +SPI_GETACCESSTIMEOUT: Final = 60 +SPI_SETACCESSTIMEOUT: Final = 61 +SPI_GETSERIALKEYS: Final = 62 +SPI_SETSERIALKEYS: Final = 63 +SPI_GETSOUNDSENTRY: Final = 64 +SPI_SETSOUNDSENTRY: Final = 65 +SPI_GETHIGHCONTRAST: Final = 66 +SPI_SETHIGHCONTRAST: Final = 67 +SPI_GETKEYBOARDPREF: Final = 68 +SPI_SETKEYBOARDPREF: Final = 69 +SPI_GETSCREENREADER: Final = 70 +SPI_SETSCREENREADER: Final = 71 +SPI_GETANIMATION: Final = 72 +SPI_SETANIMATION: Final = 73 +SPI_GETFONTSMOOTHING: Final = 74 +SPI_SETFONTSMOOTHING: Final = 75 +SPI_SETDRAGWIDTH: Final = 76 +SPI_SETDRAGHEIGHT: Final = 77 +SPI_SETHANDHELD: Final = 78 +SPI_GETLOWPOWERTIMEOUT: Final = 79 +SPI_GETPOWEROFFTIMEOUT: Final = 80 +SPI_SETLOWPOWERTIMEOUT: Final = 81 +SPI_SETPOWEROFFTIMEOUT: Final = 82 +SPI_GETLOWPOWERACTIVE: Final = 83 +SPI_GETPOWEROFFACTIVE: Final = 84 +SPI_SETLOWPOWERACTIVE: Final = 85 +SPI_SETPOWEROFFACTIVE: Final = 86 +SPI_SETCURSORS: Final = 87 +SPI_SETICONS: Final = 88 +SPI_GETDEFAULTINPUTLANG: Final = 89 +SPI_SETDEFAULTINPUTLANG: Final = 90 +SPI_SETLANGTOGGLE: Final = 91 +SPI_GETWINDOWSEXTENSION: Final = 92 +SPI_SETMOUSETRAILS: Final = 93 +SPI_GETMOUSETRAILS: Final = 94 +SPI_GETSNAPTODEFBUTTON: Final = 95 +SPI_SETSNAPTODEFBUTTON: Final = 96 +SPI_SETSCREENSAVERRUNNING: Final = 97 +SPI_SCREENSAVERRUNNING: Final = SPI_SETSCREENSAVERRUNNING +SPI_GETMOUSEHOVERWIDTH: Final = 98 +SPI_SETMOUSEHOVERWIDTH: Final = 99 +SPI_GETMOUSEHOVERHEIGHT: Final = 100 +SPI_SETMOUSEHOVERHEIGHT: Final = 101 +SPI_GETMOUSEHOVERTIME: Final = 102 +SPI_SETMOUSEHOVERTIME: Final = 103 +SPI_GETWHEELSCROLLLINES: Final = 104 +SPI_SETWHEELSCROLLLINES: Final = 105 +SPI_GETMENUSHOWDELAY: Final = 106 +SPI_SETMENUSHOWDELAY: Final = 107 + +SPI_GETSHOWIMEUI: Final = 110 +SPI_SETSHOWIMEUI: Final = 111 +SPI_GETMOUSESPEED: Final = 112 +SPI_SETMOUSESPEED: Final = 113 +SPI_GETSCREENSAVERRUNNING: Final = 114 +SPI_GETDESKWALLPAPER: Final = 115 + +SPI_GETACTIVEWINDOWTRACKING: Final = 4096 +SPI_SETACTIVEWINDOWTRACKING: Final = 4097 +SPI_GETMENUANIMATION: Final = 4098 +SPI_SETMENUANIMATION: Final = 4099 +SPI_GETCOMBOBOXANIMATION: Final = 4100 +SPI_SETCOMBOBOXANIMATION: Final = 4101 +SPI_GETLISTBOXSMOOTHSCROLLING: Final = 4102 +SPI_SETLISTBOXSMOOTHSCROLLING: Final = 4103 +SPI_GETGRADIENTCAPTIONS: Final = 4104 +SPI_SETGRADIENTCAPTIONS: Final = 4105 +SPI_GETKEYBOARDCUES: Final = 4106 +SPI_SETKEYBOARDCUES: Final = 4107 +SPI_GETMENUUNDERLINES: Final = 4106 +SPI_SETMENUUNDERLINES: Final = 4107 +SPI_GETACTIVEWNDTRKZORDER: Final = 4108 +SPI_SETACTIVEWNDTRKZORDER: Final = 4109 +SPI_GETHOTTRACKING: Final = 4110 +SPI_SETHOTTRACKING: Final = 4111 + +SPI_GETMENUFADE: Final = 4114 +SPI_SETMENUFADE: Final = 4115 +SPI_GETSELECTIONFADE: Final = 4116 +SPI_SETSELECTIONFADE: Final = 4117 +SPI_GETTOOLTIPANIMATION: Final = 4118 +SPI_SETTOOLTIPANIMATION: Final = 4119 +SPI_GETTOOLTIPFADE: Final = 4120 +SPI_SETTOOLTIPFADE: Final = 4121 +SPI_GETCURSORSHADOW: Final = 4122 +SPI_SETCURSORSHADOW: Final = 4123 +SPI_GETMOUSESONAR: Final = 4124 +SPI_SETMOUSESONAR: Final = 4125 +SPI_GETMOUSECLICKLOCK: Final = 4126 +SPI_SETMOUSECLICKLOCK: Final = 4127 +SPI_GETMOUSEVANISH: Final = 4128 +SPI_SETMOUSEVANISH: Final = 4129 +SPI_GETFLATMENU: Final = 4130 +SPI_SETFLATMENU: Final = 4131 +SPI_GETDROPSHADOW: Final = 4132 +SPI_SETDROPSHADOW: Final = 4133 +SPI_GETBLOCKSENDINPUTRESETS: Final = 4134 +SPI_SETBLOCKSENDINPUTRESETS: Final = 4135 +SPI_GETUIEFFECTS: Final = 4158 +SPI_SETUIEFFECTS: Final = 4159 + +SPI_GETFOREGROUNDLOCKTIMEOUT: Final = 8192 +SPI_SETFOREGROUNDLOCKTIMEOUT: Final = 8193 +SPI_GETACTIVEWNDTRKTIMEOUT: Final = 8194 +SPI_SETACTIVEWNDTRKTIMEOUT: Final = 8195 +SPI_GETFOREGROUNDFLASHCOUNT: Final = 8196 +SPI_SETFOREGROUNDFLASHCOUNT: Final = 8197 +SPI_GETCARETWIDTH: Final = 8198 +SPI_SETCARETWIDTH: Final = 8199 +SPI_GETMOUSECLICKLOCKTIME: Final = 8200 +SPI_SETMOUSECLICKLOCKTIME: Final = 8201 +SPI_GETFONTSMOOTHINGTYPE: Final = 8202 +SPI_SETFONTSMOOTHINGTYPE: Final = 8203 +SPI_GETFONTSMOOTHINGCONTRAST: Final = 8204 +SPI_SETFONTSMOOTHINGCONTRAST: Final = 8205 +SPI_GETFOCUSBORDERWIDTH: Final = 8206 +SPI_SETFOCUSBORDERWIDTH: Final = 8207 +SPI_GETFOCUSBORDERHEIGHT: Final = 8208 +SPI_SETFOCUSBORDERHEIGHT: Final = 8209 +SPI_GETFONTSMOOTHINGORIENTATION: Final = 8210 +SPI_SETFONTSMOOTHINGORIENTATION: Final = 8211 + +SPIF_UPDATEINIFILE: Final = 1 +SPIF_SENDWININICHANGE: Final = 2 +SPIF_SENDCHANGE: Final = SPIF_SENDWININICHANGE + +FE_FONTSMOOTHINGSTANDARD: Final = 1 +FE_FONTSMOOTHINGCLEARTYPE: Final = 2 +FE_FONTSMOOTHINGDOCKING: Final = 32768 + +METRICS_USEDEFAULT: Final = -1 +ARW_BOTTOMLEFT: Final = 0 +ARW_BOTTOMRIGHT: Final = 1 +ARW_TOPLEFT: Final = 2 +ARW_TOPRIGHT: Final = 3 +ARW_STARTMASK: Final = 3 +ARW_STARTRIGHT: Final = 1 +ARW_STARTTOP: Final = 2 +ARW_LEFT: Final = 0 +ARW_RIGHT: Final = 0 +ARW_UP: Final = 4 +ARW_DOWN: Final = 4 +ARW_HIDE: Final = 8 + +SERKF_SERIALKEYSON: Final = 1 +SERKF_AVAILABLE: Final = 2 +SERKF_INDICATOR: Final = 4 +HCF_HIGHCONTRASTON: Final = 1 +HCF_AVAILABLE: Final = 2 +HCF_HOTKEYACTIVE: Final = 4 +HCF_CONFIRMHOTKEY: Final = 8 +HCF_HOTKEYSOUND: Final = 16 +HCF_INDICATOR: Final = 32 +HCF_HOTKEYAVAILABLE: Final = 64 +CDS_UPDATEREGISTRY: Final = 1 +CDS_TEST: Final = 2 +CDS_FULLSCREEN: Final = 4 +CDS_GLOBAL: Final = 8 +CDS_SET_PRIMARY: Final = 16 +CDS_RESET: Final = 1073741824 +CDS_SETRECT: Final = 536870912 +CDS_NORESET: Final = 268435456 + +DISP_CHANGE_SUCCESSFUL: Final = 0 +DISP_CHANGE_RESTART: Final = 1 +DISP_CHANGE_FAILED: Final = -1 +DISP_CHANGE_BADMODE: Final = -2 +DISP_CHANGE_NOTUPDATED: Final = -3 +DISP_CHANGE_BADFLAGS: Final = -4 +DISP_CHANGE_BADPARAM: Final = -5 +DISP_CHANGE_BADDUALVIEW: Final = -6 + +ENUM_CURRENT_SETTINGS: Final = -1 +ENUM_REGISTRY_SETTINGS: Final = -2 +FKF_FILTERKEYSON: Final = 1 +FKF_AVAILABLE: Final = 2 +FKF_HOTKEYACTIVE: Final = 4 +FKF_CONFIRMHOTKEY: Final = 8 +FKF_HOTKEYSOUND: Final = 16 +FKF_INDICATOR: Final = 32 +FKF_CLICKON: Final = 64 +SKF_STICKYKEYSON: Final = 1 +SKF_AVAILABLE: Final = 2 +SKF_HOTKEYACTIVE: Final = 4 +SKF_CONFIRMHOTKEY: Final = 8 +SKF_HOTKEYSOUND: Final = 16 +SKF_INDICATOR: Final = 32 +SKF_AUDIBLEFEEDBACK: Final = 64 +SKF_TRISTATE: Final = 128 +SKF_TWOKEYSOFF: Final = 256 +SKF_LALTLATCHED: Final = 268435456 +SKF_LCTLLATCHED: Final = 67108864 +SKF_LSHIFTLATCHED: Final = 16777216 +SKF_RALTLATCHED: Final = 536870912 +SKF_RCTLLATCHED: Final = 134217728 +SKF_RSHIFTLATCHED: Final = 33554432 +SKF_LWINLATCHED: Final = 1073741824 +SKF_RWINLATCHED: Final = -2147483648 +SKF_LALTLOCKED: Final = 1048576 +SKF_LCTLLOCKED: Final = 262144 +SKF_LSHIFTLOCKED: Final = 65536 +SKF_RALTLOCKED: Final = 2097152 +SKF_RCTLLOCKED: Final = 524288 +SKF_RSHIFTLOCKED: Final = 131072 +SKF_LWINLOCKED: Final = 4194304 +SKF_RWINLOCKED: Final = 8388608 +MKF_MOUSEKEYSON: Final = 1 +MKF_AVAILABLE: Final = 2 +MKF_HOTKEYACTIVE: Final = 4 +MKF_CONFIRMHOTKEY: Final = 8 +MKF_HOTKEYSOUND: Final = 16 +MKF_INDICATOR: Final = 32 +MKF_MODIFIERS: Final = 64 +MKF_REPLACENUMBERS: Final = 128 +MKF_LEFTBUTTONSEL: Final = 268435456 +MKF_RIGHTBUTTONSEL: Final = 536870912 +MKF_LEFTBUTTONDOWN: Final = 16777216 +MKF_RIGHTBUTTONDOWN: Final = 33554432 +MKF_MOUSEMODE: Final = -2147483648 +ATF_TIMEOUTON: Final = 1 +ATF_ONOFFFEEDBACK: Final = 2 +SSGF_NONE: Final = 0 +SSGF_DISPLAY: Final = 3 +SSTF_NONE: Final = 0 +SSTF_CHARS: Final = 1 +SSTF_BORDER: Final = 2 +SSTF_DISPLAY: Final = 3 +SSWF_NONE: Final = 0 +SSWF_TITLE: Final = 1 +SSWF_WINDOW: Final = 2 +SSWF_DISPLAY: Final = 3 +SSWF_CUSTOM: Final = 4 +SSF_SOUNDSENTRYON: Final = 1 +SSF_AVAILABLE: Final = 2 +SSF_INDICATOR: Final = 4 +TKF_TOGGLEKEYSON: Final = 1 +TKF_AVAILABLE: Final = 2 +TKF_HOTKEYACTIVE: Final = 4 +TKF_CONFIRMHOTKEY: Final = 8 +TKF_HOTKEYSOUND: Final = 16 +TKF_INDICATOR: Final = 32 +SLE_ERROR: Final = 1 +SLE_MINORERROR: Final = 2 +SLE_WARNING: Final = 3 +MONITOR_DEFAULTTONULL: Final = 0 +MONITOR_DEFAULTTOPRIMARY: Final = 1 +MONITOR_DEFAULTTONEAREST: Final = 2 +MONITORINFOF_PRIMARY: Final = 1 +CCHDEVICENAME: Final = 32 +CHILDID_SELF: Final = 0 +INDEXID_OBJECT: Final = 0 +INDEXID_CONTAINER: Final = 0 +OBJID_WINDOW: Final = 0 +OBJID_SYSMENU: Final = -1 +OBJID_TITLEBAR: Final = -2 +OBJID_MENU: Final = -3 +OBJID_CLIENT: Final = -4 +OBJID_VSCROLL: Final = -5 +OBJID_HSCROLL: Final = -6 +OBJID_SIZEGRIP: Final = -7 +OBJID_CARET: Final = -8 +OBJID_CURSOR: Final = -9 +OBJID_ALERT: Final = -10 +OBJID_SOUND: Final = -11 +EVENT_MIN: Final = 1 +EVENT_MAX: Final = 2147483647 +EVENT_SYSTEM_SOUND: Final = 1 +EVENT_SYSTEM_ALERT: Final = 2 +EVENT_SYSTEM_FOREGROUND: Final = 3 +EVENT_SYSTEM_MENUSTART: Final = 4 +EVENT_SYSTEM_MENUEND: Final = 5 +EVENT_SYSTEM_MENUPOPUPSTART: Final = 6 +EVENT_SYSTEM_MENUPOPUPEND: Final = 7 +EVENT_SYSTEM_CAPTURESTART: Final = 8 +EVENT_SYSTEM_CAPTUREEND: Final = 9 +EVENT_SYSTEM_MOVESIZESTART: Final = 10 +EVENT_SYSTEM_MOVESIZEEND: Final = 11 +EVENT_SYSTEM_CONTEXTHELPSTART: Final = 12 +EVENT_SYSTEM_CONTEXTHELPEND: Final = 13 +EVENT_SYSTEM_DRAGDROPSTART: Final = 14 +EVENT_SYSTEM_DRAGDROPEND: Final = 15 +EVENT_SYSTEM_DIALOGSTART: Final = 16 +EVENT_SYSTEM_DIALOGEND: Final = 17 +EVENT_SYSTEM_SCROLLINGSTART: Final = 18 +EVENT_SYSTEM_SCROLLINGEND: Final = 19 +EVENT_SYSTEM_SWITCHSTART: Final = 20 +EVENT_SYSTEM_SWITCHEND: Final = 21 +EVENT_SYSTEM_MINIMIZESTART: Final = 22 +EVENT_SYSTEM_MINIMIZEEND: Final = 23 +EVENT_OBJECT_CREATE: Final = 32768 +EVENT_OBJECT_DESTROY: Final = 32769 +EVENT_OBJECT_SHOW: Final = 32770 +EVENT_OBJECT_HIDE: Final = 32771 +EVENT_OBJECT_REORDER: Final = 32772 +EVENT_OBJECT_FOCUS: Final = 32773 +EVENT_OBJECT_SELECTION: Final = 32774 +EVENT_OBJECT_SELECTIONADD: Final = 32775 +EVENT_OBJECT_SELECTIONREMOVE: Final = 32776 +EVENT_OBJECT_SELECTIONWITHIN: Final = 32777 +EVENT_OBJECT_STATECHANGE: Final = 32778 +EVENT_OBJECT_LOCATIONCHANGE: Final = 32779 +EVENT_OBJECT_NAMECHANGE: Final = 32780 +EVENT_OBJECT_DESCRIPTIONCHANGE: Final = 32781 +EVENT_OBJECT_VALUECHANGE: Final = 32782 +EVENT_OBJECT_PARENTCHANGE: Final = 32783 +EVENT_OBJECT_HELPCHANGE: Final = 32784 +EVENT_OBJECT_DEFACTIONCHANGE: Final = 32785 +EVENT_OBJECT_ACCELERATORCHANGE: Final = 32786 +SOUND_SYSTEM_STARTUP: Final = 1 +SOUND_SYSTEM_SHUTDOWN: Final = 2 +SOUND_SYSTEM_BEEP: Final = 3 +SOUND_SYSTEM_ERROR: Final = 4 +SOUND_SYSTEM_QUESTION: Final = 5 +SOUND_SYSTEM_WARNING: Final = 6 +SOUND_SYSTEM_INFORMATION: Final = 7 +SOUND_SYSTEM_MAXIMIZE: Final = 8 +SOUND_SYSTEM_MINIMIZE: Final = 9 +SOUND_SYSTEM_RESTOREUP: Final = 10 +SOUND_SYSTEM_RESTOREDOWN: Final = 11 +SOUND_SYSTEM_APPSTART: Final = 12 +SOUND_SYSTEM_FAULT: Final = 13 +SOUND_SYSTEM_APPEND: Final = 14 +SOUND_SYSTEM_MENUCOMMAND: Final = 15 +SOUND_SYSTEM_MENUPOPUP: Final = 16 +CSOUND_SYSTEM: Final = 16 +ALERT_SYSTEM_INFORMATIONAL: Final = 1 +ALERT_SYSTEM_WARNING: Final = 2 +ALERT_SYSTEM_ERROR: Final = 3 +ALERT_SYSTEM_QUERY: Final = 4 +ALERT_SYSTEM_CRITICAL: Final = 5 +CALERT_SYSTEM: Final = 6 +WINEVENT_OUTOFCONTEXT: Final = 0 +WINEVENT_SKIPOWNTHREAD: Final = 1 +WINEVENT_SKIPOWNPROCESS: Final = 2 +WINEVENT_INCONTEXT: Final = 4 +GUI_CARETBLINKING: Final = 1 +GUI_INMOVESIZE: Final = 2 +GUI_INMENUMODE: Final = 4 +GUI_SYSTEMMENUMODE: Final = 8 +GUI_POPUPMENUMODE: Final = 16 +STATE_SYSTEM_UNAVAILABLE: Final = 1 +STATE_SYSTEM_SELECTED: Final = 2 +STATE_SYSTEM_FOCUSED: Final = 4 +STATE_SYSTEM_PRESSED: Final = 8 +STATE_SYSTEM_CHECKED: Final = 16 +STATE_SYSTEM_MIXED: Final = 32 +STATE_SYSTEM_READONLY: Final = 64 +STATE_SYSTEM_HOTTRACKED: Final = 128 +STATE_SYSTEM_DEFAULT: Final = 256 +STATE_SYSTEM_EXPANDED: Final = 512 +STATE_SYSTEM_COLLAPSED: Final = 1024 +STATE_SYSTEM_BUSY: Final = 2048 +STATE_SYSTEM_FLOATING: Final = 4096 +STATE_SYSTEM_MARQUEED: Final = 8192 +STATE_SYSTEM_ANIMATED: Final = 16384 +STATE_SYSTEM_INVISIBLE: Final = 32768 +STATE_SYSTEM_OFFSCREEN: Final = 65536 +STATE_SYSTEM_SIZEABLE: Final = 131072 +STATE_SYSTEM_MOVEABLE: Final = 262144 +STATE_SYSTEM_SELFVOICING: Final = 524288 +STATE_SYSTEM_FOCUSABLE: Final = 1048576 +STATE_SYSTEM_SELECTABLE: Final = 2097152 +STATE_SYSTEM_LINKED: Final = 4194304 +STATE_SYSTEM_TRAVERSED: Final = 8388608 +STATE_SYSTEM_MULTISELECTABLE: Final = 16777216 +STATE_SYSTEM_EXTSELECTABLE: Final = 33554432 +STATE_SYSTEM_ALERT_LOW: Final = 67108864 +STATE_SYSTEM_ALERT_MEDIUM: Final = 134217728 +STATE_SYSTEM_ALERT_HIGH: Final = 268435456 +STATE_SYSTEM_VALID: Final = 536870911 +CCHILDREN_TITLEBAR: Final = 5 +CCHILDREN_SCROLLBAR: Final = 5 +CURSOR_SHOWING: Final = 1 +WS_ACTIVECAPTION: Final = 1 +GA_MIC: Final = 1 +GA_PARENT: Final = 1 +GA_ROOT: Final = 2 +GA_ROOTOWNER: Final = 3 +GA_MAC: Final = 4 + +BF_LEFT: Final = 1 +BF_TOP: Final = 2 +BF_RIGHT: Final = 4 +BF_BOTTOM: Final = 8 +BF_TOPLEFT: Final[int] +BF_TOPRIGHT: Final[int] +BF_BOTTOMLEFT: Final[int] +BF_BOTTOMRIGHT: Final[int] +BF_RECT: Final[int] +BF_DIAGONAL: Final = 16 +BF_DIAGONAL_ENDTOPRIGHT: Final[int] +BF_DIAGONAL_ENDTOPLEFT: Final[int] +BF_DIAGONAL_ENDBOTTOMLEFT: Final[int] +BF_DIAGONAL_ENDBOTTOMRIGHT: Final[int] +BF_MIDDLE: Final = 2048 +BF_SOFT: Final = 4096 +BF_ADJUST: Final = 8192 +BF_FLAT: Final = 16384 +BF_MONO: Final = 32768 +DFC_CAPTION: Final = 1 +DFC_MENU: Final = 2 +DFC_SCROLL: Final = 3 +DFC_BUTTON: Final = 4 +DFC_POPUPMENU: Final = 5 +DFCS_CAPTIONCLOSE: Final = 0 +DFCS_CAPTIONMIN: Final = 1 +DFCS_CAPTIONMAX: Final = 2 +DFCS_CAPTIONRESTORE: Final = 3 +DFCS_CAPTIONHELP: Final = 4 +DFCS_MENUARROW: Final = 0 +DFCS_MENUCHECK: Final = 1 +DFCS_MENUBULLET: Final = 2 +DFCS_MENUARROWRIGHT: Final = 4 +DFCS_SCROLLUP: Final = 0 +DFCS_SCROLLDOWN: Final = 1 +DFCS_SCROLLLEFT: Final = 2 +DFCS_SCROLLRIGHT: Final = 3 +DFCS_SCROLLCOMBOBOX: Final = 5 +DFCS_SCROLLSIZEGRIP: Final = 8 +DFCS_SCROLLSIZEGRIPRIGHT: Final = 16 +DFCS_BUTTONCHECK: Final = 0 +DFCS_BUTTONRADIOIMAGE: Final = 1 +DFCS_BUTTONRADIOMASK: Final = 2 +DFCS_BUTTONRADIO: Final = 4 +DFCS_BUTTON3STATE: Final = 8 +DFCS_BUTTONPUSH: Final = 16 +DFCS_INACTIVE: Final = 256 +DFCS_PUSHED: Final = 512 +DFCS_CHECKED: Final = 1024 +DFCS_TRANSPARENT: Final = 2048 +DFCS_HOT: Final = 4096 +DFCS_ADJUSTRECT: Final = 8192 +DFCS_FLAT: Final = 16384 +DFCS_MONO: Final = 32768 +DC_ACTIVE: Final = 1 +DC_SMALLCAP: Final = 2 +DC_ICON: Final = 4 +DC_TEXT: Final = 8 +DC_INBUTTON: Final = 16 +DC_GRADIENT: Final = 32 +IDANI_OPEN: Final = 1 +IDANI_CLOSE: Final = 2 +IDANI_CAPTION: Final = 3 +CF_TEXT: Final = 1 +CF_BITMAP: Final = 2 +CF_METAFILEPICT: Final = 3 +CF_SYLK: Final = 4 +CF_DIF: Final = 5 +CF_TIFF: Final = 6 +CF_OEMTEXT: Final = 7 +CF_DIB: Final = 8 +CF_PALETTE: Final = 9 +CF_PENDATA: Final = 10 +CF_RIFF: Final = 11 +CF_WAVE: Final = 12 +CF_UNICODETEXT: Final = 13 +CF_ENHMETAFILE: Final = 14 +CF_HDROP: Final = 15 +CF_LOCALE: Final = 16 +CF_DIBV5: Final = 17 +CF_MAX: Final = 18 +CF_OWNERDISPLAY: Final = 128 +CF_DSPTEXT: Final = 129 +CF_DSPBITMAP: Final = 130 +CF_DSPMETAFILEPICT: Final = 131 +CF_DSPENHMETAFILE: Final = 142 +CF_PRIVATEFIRST: Final = 512 +CF_PRIVATELAST: Final = 767 +CF_GDIOBJFIRST: Final = 768 +CF_GDIOBJLAST: Final = 1023 +FVIRTKEY: Final = 1 +FNOINVERT: Final = 2 +FSHIFT: Final = 4 +FCONTROL: Final = 8 +FALT: Final = 16 +WPF_SETMINPOSITION: Final = 1 +WPF_RESTORETOMAXIMIZED: Final = 2 +ODT_MENU: Final = 1 +ODT_LISTBOX: Final = 2 +ODT_COMBOBOX: Final = 3 +ODT_BUTTON: Final = 4 +ODT_STATIC: Final = 5 +ODA_DRAWENTIRE: Final = 1 +ODA_SELECT: Final = 2 +ODA_FOCUS: Final = 4 +ODS_SELECTED: Final = 1 +ODS_GRAYED: Final = 2 +ODS_DISABLED: Final = 4 +ODS_CHECKED: Final = 8 +ODS_FOCUS: Final = 16 +ODS_DEFAULT: Final = 32 +ODS_COMBOBOXEDIT: Final = 4096 +ODS_HOTLIGHT: Final = 64 +ODS_INACTIVE: Final = 128 +PM_NOREMOVE: Final = 0 +PM_REMOVE: Final = 1 +PM_NOYIELD: Final = 2 +MOD_ALT: Final = 1 +MOD_CONTROL: Final = 2 +MOD_SHIFT: Final = 4 +MOD_WIN: Final = 8 +MOD_NOREPEAT: Final = 16384 +IDHOT_SNAPWINDOW: Final = -1 +IDHOT_SNAPDESKTOP: Final = -2 + +ENDSESSION_LOGOFF: Final = -2147483648 +EWX_LOGOFF: Final = 0 +EWX_SHUTDOWN: Final = 1 +EWX_REBOOT: Final = 2 +EWX_FORCE: Final = 4 +EWX_POWEROFF: Final = 8 +EWX_FORCEIFHUNG: Final = 16 +BSM_ALLDESKTOPS: Final = 16 +BROADCAST_QUERY_DENY: Final = 1112363332 + +DBWF_LPARAMPOINTER: Final = 32768 + +SWP_NOSIZE: Final = 1 +SWP_NOMOVE: Final = 2 +SWP_NOZORDER: Final = 4 +SWP_NOREDRAW: Final = 8 +SWP_NOACTIVATE: Final = 16 +SWP_FRAMECHANGED: Final = 32 +SWP_SHOWWINDOW: Final = 64 +SWP_HIDEWINDOW: Final = 128 +SWP_NOCOPYBITS: Final = 256 +SWP_NOOWNERZORDER: Final = 512 +SWP_NOSENDCHANGING: Final = 1024 +SWP_DRAWFRAME: Final = SWP_FRAMECHANGED +SWP_NOREPOSITION: Final = SWP_NOOWNERZORDER +SWP_DEFERERASE: Final = 8192 +SWP_ASYNCWINDOWPOS: Final = 16384 + +DLGWINDOWEXTRA: Final = 30 + +KEYEVENTF_EXTENDEDKEY: Final = 1 +KEYEVENTF_KEYUP: Final = 2 +KEYEVENTF_UNICODE: Final = 4 +KEYEVENTF_SCANCODE: Final = 8 +MOUSEEVENTF_MOVE: Final = 1 +MOUSEEVENTF_LEFTDOWN: Final = 2 +MOUSEEVENTF_LEFTUP: Final = 4 +MOUSEEVENTF_RIGHTDOWN: Final = 8 +MOUSEEVENTF_RIGHTUP: Final = 16 +MOUSEEVENTF_MIDDLEDOWN: Final = 32 +MOUSEEVENTF_MIDDLEUP: Final = 64 +MOUSEEVENTF_XDOWN: Final = 128 +MOUSEEVENTF_XUP: Final = 256 +MOUSEEVENTF_WHEEL: Final = 2048 +MOUSEEVENTF_HWHEEL: Final = 4096 +MOUSEEVENTF_MOVE_NOCOALESCE: Final = 8192 +MOUSEEVENTF_VIRTUALDESK: Final = 16384 +MOUSEEVENTF_ABSOLUTE: Final = 32768 +INPUT_MOUSE: Final = 0 +INPUT_KEYBOARD: Final = 1 +INPUT_HARDWARE: Final = 2 +MWMO_WAITALL: Final = 1 +MWMO_ALERTABLE: Final = 2 +MWMO_INPUTAVAILABLE: Final = 4 +QS_KEY: Final = 1 +QS_MOUSEMOVE: Final = 2 +QS_MOUSEBUTTON: Final = 4 +QS_POSTMESSAGE: Final = 8 +QS_TIMER: Final = 16 +QS_PAINT: Final = 32 +QS_SENDMESSAGE: Final = 64 +QS_HOTKEY: Final = 128 +QS_MOUSE: Final[int] +QS_INPUT: Final[int] +QS_ALLEVENTS: Final[int] +QS_ALLINPUT: Final[int] + +IMN_CLOSESTATUSWINDOW: Final = 1 +IMN_OPENSTATUSWINDOW: Final = 2 +IMN_CHANGECANDIDATE: Final = 3 +IMN_CLOSECANDIDATE: Final = 4 +IMN_OPENCANDIDATE: Final = 5 +IMN_SETCONVERSIONMODE: Final = 6 +IMN_SETSENTENCEMODE: Final = 7 +IMN_SETOPENSTATUS: Final = 8 +IMN_SETCANDIDATEPOS: Final = 9 +IMN_SETCOMPOSITIONFONT: Final = 10 +IMN_SETCOMPOSITIONWINDOW: Final = 11 +IMN_SETSTATUSWINDOWPOS: Final = 12 +IMN_GUIDELINE: Final = 13 +IMN_PRIVATE: Final = 14 + +HELP_CONTEXT: Final = 1 +HELP_QUIT: Final = 2 +HELP_INDEX: Final = 3 +HELP_CONTENTS: Final = 3 +HELP_HELPONHELP: Final = 4 +HELP_SETINDEX: Final = 5 +HELP_SETCONTENTS: Final = 5 +HELP_CONTEXTPOPUP: Final = 8 +HELP_FORCEFILE: Final = 9 +HELP_KEY: Final = 257 +HELP_COMMAND: Final = 258 +HELP_PARTIALKEY: Final = 261 +HELP_MULTIKEY: Final = 513 +HELP_SETWINPOS: Final = 515 +HELP_CONTEXTMENU: Final = 10 +HELP_FINDER: Final = 11 +HELP_WM_HELP: Final = 12 +HELP_SETPOPUP_POS: Final = 13 +HELP_TCARD: Final = 32768 +HELP_TCARD_DATA: Final = 16 +HELP_TCARD_OTHER_CALLER: Final = 17 +IDH_NO_HELP: Final = 28440 +IDH_MISSING_CONTEXT: Final = 28441 +IDH_GENERIC_HELP_BUTTON: Final = 28442 +IDH_OK: Final = 28443 +IDH_CANCEL: Final = 28444 +IDH_HELP: Final = 28445 +GR_GDIOBJECTS: Final = 0 +GR_USEROBJECTS: Final = 1 + +SRCCOPY: Final = 13369376 +SRCPAINT: Final = 15597702 +SRCAND: Final = 8913094 +SRCINVERT: Final = 6684742 +SRCERASE: Final = 4457256 +NOTSRCCOPY: Final = 3342344 +NOTSRCERASE: Final = 1114278 +MERGECOPY: Final = 12583114 +MERGEPAINT: Final = 12255782 +PATCOPY: Final = 15728673 +PATPAINT: Final = 16452105 +PATINVERT: Final = 5898313 +DSTINVERT: Final = 5570569 +BLACKNESS: Final = 66 +WHITENESS: Final = 16711778 + +R2_BLACK: Final = 1 +R2_NOTMERGEPEN: Final = 2 +R2_MASKNOTPEN: Final = 3 +R2_NOTCOPYPEN: Final = 4 +R2_MASKPENNOT: Final = 5 +R2_NOT: Final = 6 +R2_XORPEN: Final = 7 +R2_NOTMASKPEN: Final = 8 +R2_MASKPEN: Final = 9 +R2_NOTXORPEN: Final = 10 +R2_NOP: Final = 11 +R2_MERGENOTPEN: Final = 12 +R2_COPYPEN: Final = 13 +R2_MERGEPENNOT: Final = 14 +R2_MERGEPEN: Final = 15 +R2_WHITE: Final = 16 +R2_LAST: Final = 16 +GDI_ERROR: Final = -1 +ERROR: Final = 0 +NULLREGION: Final = 1 +SIMPLEREGION: Final = 2 +COMPLEXREGION: Final = 3 +RGN_ERROR: Final = ERROR +RGN_AND: Final = 1 +RGN_OR: Final = 2 +RGN_XOR: Final = 3 +RGN_DIFF: Final = 4 +RGN_COPY: Final = 5 +RGN_MIN: Final = RGN_AND +RGN_MAX: Final = RGN_COPY + +BLACKONWHITE: Final = 1 +WHITEONBLACK: Final = 2 +COLORONCOLOR: Final = 3 +HALFTONE: Final = 4 +MAXSTRETCHBLTMODE: Final = 4 +STRETCH_ANDSCANS: Final = BLACKONWHITE +STRETCH_ORSCANS: Final = WHITEONBLACK +STRETCH_DELETESCANS: Final = COLORONCOLOR +STRETCH_HALFTONE: Final = HALFTONE + +ALTERNATE: Final = 1 +WINDING: Final = 2 +POLYFILL_LAST: Final = 2 + +LAYOUT_RTL: Final = 1 +LAYOUT_BTT: Final = 2 +LAYOUT_VBH: Final = 4 +LAYOUT_ORIENTATIONMASK: Final[int] +LAYOUT_BITMAPORIENTATIONPRESERVED: Final = 8 + +TA_NOUPDATECP: Final = 0 +TA_UPDATECP: Final = 1 +TA_LEFT: Final = 0 +TA_RIGHT: Final = 2 +TA_CENTER: Final = 6 +TA_TOP: Final = 0 +TA_BOTTOM: Final = 8 +TA_BASELINE: Final = 24 +TA_MASK: Final[int] +VTA_BASELINE: Final = TA_BASELINE +VTA_LEFT: Final = TA_BOTTOM +VTA_RIGHT: Final = TA_TOP +VTA_CENTER: Final = TA_CENTER +VTA_BOTTOM: Final = TA_RIGHT +VTA_TOP: Final = TA_LEFT +ETO_GRAYED: Final = 1 +ETO_OPAQUE: Final = 2 +ETO_CLIPPED: Final = 4 +ASPECT_FILTERING: Final = 1 +DCB_RESET: Final = 1 +DCB_ACCUMULATE: Final = 2 +DCB_DIRTY: Final = DCB_ACCUMULATE +DCB_SET: Final[int] +DCB_ENABLE: Final = 4 +DCB_DISABLE: Final = 8 +META_SETBKCOLOR: Final = 513 +META_SETBKMODE: Final = 258 +META_SETMAPMODE: Final = 259 +META_SETROP2: Final = 260 +META_SETRELABS: Final = 261 +META_SETPOLYFILLMODE: Final = 262 +META_SETSTRETCHBLTMODE: Final = 263 +META_SETTEXTCHAREXTRA: Final = 264 +META_SETTEXTCOLOR: Final = 521 +META_SETTEXTJUSTIFICATION: Final = 522 +META_SETWINDOWORG: Final = 523 +META_SETWINDOWEXT: Final = 524 +META_SETVIEWPORTORG: Final = 525 +META_SETVIEWPORTEXT: Final = 526 +META_OFFSETWINDOWORG: Final = 527 +META_SCALEWINDOWEXT: Final = 1040 +META_OFFSETVIEWPORTORG: Final = 529 +META_SCALEVIEWPORTEXT: Final = 1042 +META_LINETO: Final = 531 +META_MOVETO: Final = 532 +META_EXCLUDECLIPRECT: Final = 1045 +META_INTERSECTCLIPRECT: Final = 1046 +META_ARC: Final = 2071 +META_ELLIPSE: Final = 1048 +META_FLOODFILL: Final = 1049 +META_PIE: Final = 2074 +META_RECTANGLE: Final = 1051 +META_ROUNDRECT: Final = 1564 +META_PATBLT: Final = 1565 +META_SAVEDC: Final = 30 +META_SETPIXEL: Final = 1055 +META_OFFSETCLIPRGN: Final = 544 +META_TEXTOUT: Final = 1313 +META_BITBLT: Final = 2338 +META_STRETCHBLT: Final = 2851 +META_POLYGON: Final = 804 +META_POLYLINE: Final = 805 +META_ESCAPE: Final = 1574 +META_RESTOREDC: Final = 295 +META_FILLREGION: Final = 552 +META_FRAMEREGION: Final = 1065 +META_INVERTREGION: Final = 298 +META_PAINTREGION: Final = 299 +META_SELECTCLIPREGION: Final = 300 +META_SELECTOBJECT: Final = 301 +META_SETTEXTALIGN: Final = 302 +META_CHORD: Final = 2096 +META_SETMAPPERFLAGS: Final = 561 +META_EXTTEXTOUT: Final = 2610 +META_SETDIBTODEV: Final = 3379 +META_SELECTPALETTE: Final = 564 +META_REALIZEPALETTE: Final = 53 +META_ANIMATEPALETTE: Final = 1078 +META_SETPALENTRIES: Final = 55 +META_POLYPOLYGON: Final = 1336 +META_RESIZEPALETTE: Final = 313 +META_DIBBITBLT: Final = 2368 +META_DIBSTRETCHBLT: Final = 2881 +META_DIBCREATEPATTERNBRUSH: Final = 322 +META_STRETCHDIB: Final = 3907 +META_EXTFLOODFILL: Final = 1352 +META_DELETEOBJECT: Final = 496 +META_CREATEPALETTE: Final = 247 +META_CREATEPATTERNBRUSH: Final = 505 +META_CREATEPENINDIRECT: Final = 762 +META_CREATEFONTINDIRECT: Final = 763 +META_CREATEBRUSHINDIRECT: Final = 764 +META_CREATEREGION: Final = 1791 +FILE_BEGIN: Final = 0 +FILE_CURRENT: Final = 1 +FILE_END: Final = 2 +FILE_FLAG_WRITE_THROUGH: Final = -2147483648 +FILE_FLAG_OVERLAPPED: Final = 1073741824 +FILE_FLAG_NO_BUFFERING: Final = 536870912 +FILE_FLAG_RANDOM_ACCESS: Final = 268435456 +FILE_FLAG_SEQUENTIAL_SCAN: Final = 134217728 +FILE_FLAG_DELETE_ON_CLOSE: Final = 67108864 +FILE_FLAG_BACKUP_SEMANTICS: Final = 33554432 +FILE_FLAG_POSIX_SEMANTICS: Final = 16777216 +CREATE_NEW: Final = 1 +CREATE_ALWAYS: Final = 2 +OPEN_EXISTING: Final = 3 +OPEN_ALWAYS: Final = 4 +TRUNCATE_EXISTING: Final = 5 +PIPE_ACCESS_INBOUND: Final = 1 +PIPE_ACCESS_OUTBOUND: Final = 2 +PIPE_ACCESS_DUPLEX: Final = 3 +PIPE_CLIENT_END: Final = 0 +PIPE_SERVER_END: Final = 1 +PIPE_WAIT: Final = 0 +PIPE_NOWAIT: Final = 1 +PIPE_READMODE_BYTE: Final = 0 +PIPE_READMODE_MESSAGE: Final = 2 +PIPE_TYPE_BYTE: Final = 0 +PIPE_TYPE_MESSAGE: Final = 4 +PIPE_UNLIMITED_INSTANCES: Final = 255 +SECURITY_CONTEXT_TRACKING: Final = 262144 +SECURITY_EFFECTIVE_ONLY: Final = 524288 +SECURITY_SQOS_PRESENT: Final = 1048576 +SECURITY_VALID_SQOS_FLAGS: Final = 2031616 +DTR_CONTROL_DISABLE: Final = 0 +DTR_CONTROL_ENABLE: Final = 1 +DTR_CONTROL_HANDSHAKE: Final = 2 +RTS_CONTROL_DISABLE: Final = 0 +RTS_CONTROL_ENABLE: Final = 1 +RTS_CONTROL_HANDSHAKE: Final = 2 +RTS_CONTROL_TOGGLE: Final = 3 +GMEM_FIXED: Final = 0 +GMEM_MOVEABLE: Final = 2 +GMEM_NOCOMPACT: Final = 16 +GMEM_NODISCARD: Final = 32 +GMEM_ZEROINIT: Final = 64 +GMEM_MODIFY: Final = 128 +GMEM_DISCARDABLE: Final = 256 +GMEM_NOT_BANKED: Final = 4096 +GMEM_SHARE: Final = 8192 +GMEM_DDESHARE: Final = 8192 +GMEM_NOTIFY: Final = 16384 +GMEM_LOWER: Final = GMEM_NOT_BANKED +GMEM_VALID_FLAGS: Final = 32626 +GMEM_INVALID_HANDLE: Final = 32768 +GHND: Final[int] +GPTR: Final[int] +GMEM_DISCARDED: Final = 16384 +GMEM_LOCKCOUNT: Final = 255 +LMEM_FIXED: Final = 0 +LMEM_MOVEABLE: Final = 2 +LMEM_NOCOMPACT: Final = 16 +LMEM_NODISCARD: Final = 32 +LMEM_ZEROINIT: Final = 64 +LMEM_MODIFY: Final = 128 +LMEM_DISCARDABLE: Final = 3840 +LMEM_VALID_FLAGS: Final = 3954 +LMEM_INVALID_HANDLE: Final = 32768 +LHND: Final[int] +LPTR: Final[int] +NONZEROLHND: Final = LMEM_MOVEABLE +NONZEROLPTR: Final = LMEM_FIXED +LMEM_DISCARDED: Final = 16384 +LMEM_LOCKCOUNT: Final = 255 +DEBUG_PROCESS: Final = 1 +DEBUG_ONLY_THIS_PROCESS: Final = 2 +CREATE_SUSPENDED: Final = 4 +DETACHED_PROCESS: Final = 8 +CREATE_NEW_CONSOLE: Final = 16 +NORMAL_PRIORITY_CLASS: Final = 32 +IDLE_PRIORITY_CLASS: Final = 64 +HIGH_PRIORITY_CLASS: Final = 128 +REALTIME_PRIORITY_CLASS: Final = 256 +CREATE_NEW_PROCESS_GROUP: Final = 512 +CREATE_UNICODE_ENVIRONMENT: Final = 1024 +CREATE_SEPARATE_WOW_VDM: Final = 2048 +CREATE_SHARED_WOW_VDM: Final = 4096 +CREATE_DEFAULT_ERROR_MODE: Final = 67108864 +CREATE_NO_WINDOW: Final = 134217728 +PROFILE_USER: Final = 268435456 +PROFILE_KERNEL: Final = 536870912 +PROFILE_SERVER: Final = 1073741824 +THREAD_BASE_PRIORITY_LOWRT: Final = 15 +THREAD_BASE_PRIORITY_MAX: Final = 2 +THREAD_BASE_PRIORITY_MIN: Final = -2 +THREAD_BASE_PRIORITY_IDLE: Final = -15 +THREAD_PRIORITY_LOWEST: Final = THREAD_BASE_PRIORITY_MIN +THREAD_PRIORITY_BELOW_NORMAL: Final[int] +THREAD_PRIORITY_HIGHEST: Final = THREAD_BASE_PRIORITY_MAX +THREAD_PRIORITY_ABOVE_NORMAL: Final[int] +THREAD_PRIORITY_ERROR_RETURN: Final = MAXLONG +THREAD_PRIORITY_TIME_CRITICAL: Final = THREAD_BASE_PRIORITY_LOWRT +THREAD_PRIORITY_IDLE: Final = THREAD_BASE_PRIORITY_IDLE +THREAD_PRIORITY_NORMAL: Final = 0 +THREAD_MODE_BACKGROUND_BEGIN: Final = 0x00010000 +THREAD_MODE_BACKGROUND_END: Final = 0x00020000 + +EXCEPTION_DEBUG_EVENT: Final = 1 +CREATE_THREAD_DEBUG_EVENT: Final = 2 +CREATE_PROCESS_DEBUG_EVENT: Final = 3 +EXIT_THREAD_DEBUG_EVENT: Final = 4 +EXIT_PROCESS_DEBUG_EVENT: Final = 5 +LOAD_DLL_DEBUG_EVENT: Final = 6 +UNLOAD_DLL_DEBUG_EVENT: Final = 7 +OUTPUT_DEBUG_STRING_EVENT: Final = 8 +RIP_EVENT: Final = 9 +DRIVE_UNKNOWN: Final = 0 +DRIVE_NO_ROOT_DIR: Final = 1 +DRIVE_REMOVABLE: Final = 2 +DRIVE_FIXED: Final = 3 +DRIVE_REMOTE: Final = 4 +DRIVE_CDROM: Final = 5 +DRIVE_RAMDISK: Final = 6 +FILE_TYPE_UNKNOWN: Final = 0 +FILE_TYPE_DISK: Final = 1 +FILE_TYPE_CHAR: Final = 2 +FILE_TYPE_PIPE: Final = 3 +FILE_TYPE_REMOTE: Final = 32768 +NOPARITY: Final = 0 +ODDPARITY: Final = 1 +EVENPARITY: Final = 2 +MARKPARITY: Final = 3 +SPACEPARITY: Final = 4 +ONESTOPBIT: Final = 0 +ONE5STOPBITS: Final = 1 +TWOSTOPBITS: Final = 2 +CBR_110: Final = 110 +CBR_300: Final = 300 +CBR_600: Final = 600 +CBR_1200: Final = 1200 +CBR_2400: Final = 2400 +CBR_4800: Final = 4800 +CBR_9600: Final = 9600 +CBR_14400: Final = 14400 +CBR_19200: Final = 19200 +CBR_38400: Final = 38400 +CBR_56000: Final = 56000 +CBR_57600: Final = 57600 +CBR_115200: Final = 115200 +CBR_128000: Final = 128000 +CBR_256000: Final = 256000 +S_QUEUEEMPTY: Final = 0 +S_THRESHOLD: Final = 1 +S_ALLTHRESHOLD: Final = 2 +S_NORMAL: Final = 0 +S_LEGATO: Final = 1 +S_STACCATO: Final = 2 +NMPWAIT_WAIT_FOREVER: Final = -1 +NMPWAIT_NOWAIT: Final = 1 +NMPWAIT_USE_DEFAULT_WAIT: Final = 0 +OF_READ: Final = 0 +OF_WRITE: Final = 1 +OF_READWRITE: Final = 2 +OF_SHARE_COMPAT: Final = 0 +OF_SHARE_EXCLUSIVE: Final = 16 +OF_SHARE_DENY_WRITE: Final = 32 +OF_SHARE_DENY_READ: Final = 48 +OF_SHARE_DENY_NONE: Final = 64 +OF_PARSE: Final = 256 +OF_DELETE: Final = 512 +OF_VERIFY: Final = 1024 +OF_CANCEL: Final = 2048 +OF_CREATE: Final = 4096 +OF_PROMPT: Final = 8192 +OF_EXIST: Final = 16384 +OF_REOPEN: Final = 32768 +OFS_MAXPATHNAME: Final = 128 +MAXINTATOM: Final = 49152 + +PROCESS_HEAP_REGION: Final = 1 +PROCESS_HEAP_UNCOMMITTED_RANGE: Final = 2 +PROCESS_HEAP_ENTRY_BUSY: Final = 4 +PROCESS_HEAP_ENTRY_MOVEABLE: Final = 16 +PROCESS_HEAP_ENTRY_DDESHARE: Final = 32 +SCS_32BIT_BINARY: Final = 0 +SCS_DOS_BINARY: Final = 1 +SCS_WOW_BINARY: Final = 2 +SCS_PIF_BINARY: Final = 3 +SCS_POSIX_BINARY: Final = 4 +SCS_OS216_BINARY: Final = 5 +SEM_FAILCRITICALERRORS: Final = 1 +SEM_NOGPFAULTERRORBOX: Final = 2 +SEM_NOALIGNMENTFAULTEXCEPT: Final = 4 +SEM_NOOPENFILEERRORBOX: Final = 32768 +LOCKFILE_FAIL_IMMEDIATELY: Final = 1 +LOCKFILE_EXCLUSIVE_LOCK: Final = 2 +HANDLE_FLAG_INHERIT: Final = 1 +HANDLE_FLAG_PROTECT_FROM_CLOSE: Final = 2 +HINSTANCE_ERROR: Final = 32 +GET_TAPE_MEDIA_INFORMATION: Final = 0 +GET_TAPE_DRIVE_INFORMATION: Final = 1 +SET_TAPE_MEDIA_INFORMATION: Final = 0 +SET_TAPE_DRIVE_INFORMATION: Final = 1 +FORMAT_MESSAGE_ALLOCATE_BUFFER: Final = 256 +FORMAT_MESSAGE_IGNORE_INSERTS: Final = 512 +FORMAT_MESSAGE_FROM_STRING: Final = 1024 +FORMAT_MESSAGE_FROM_HMODULE: Final = 2048 +FORMAT_MESSAGE_FROM_SYSTEM: Final = 4096 +FORMAT_MESSAGE_ARGUMENT_ARRAY: Final = 8192 +FORMAT_MESSAGE_MAX_WIDTH_MASK: Final = 255 +BACKUP_INVALID: Final = 0 +BACKUP_DATA: Final = 1 +BACKUP_EA_DATA: Final = 2 +BACKUP_SECURITY_DATA: Final = 3 +BACKUP_ALTERNATE_DATA: Final = 4 +BACKUP_LINK: Final = 5 +BACKUP_PROPERTY_DATA: Final = 6 +BACKUP_OBJECT_ID: Final = 7 +BACKUP_REPARSE_DATA: Final = 8 +BACKUP_SPARSE_BLOCK: Final = 9 + +STREAM_NORMAL_ATTRIBUTE: Final = 0 +STREAM_MODIFIED_WHEN_READ: Final = 1 +STREAM_CONTAINS_SECURITY: Final = 2 +STREAM_CONTAINS_PROPERTIES: Final = 4 +STARTF_USESHOWWINDOW: Final = 1 +STARTF_USESIZE: Final = 2 +STARTF_USEPOSITION: Final = 4 +STARTF_USECOUNTCHARS: Final = 8 +STARTF_USEFILLATTRIBUTE: Final = 16 +STARTF_FORCEONFEEDBACK: Final = 64 +STARTF_FORCEOFFFEEDBACK: Final = 128 +STARTF_USESTDHANDLES: Final = 256 +STARTF_USEHOTKEY: Final = 512 +SHUTDOWN_NORETRY: Final = 1 +DONT_RESOLVE_DLL_REFERENCES: Final = 1 +LOAD_LIBRARY_AS_DATAFILE: Final = 2 +LOAD_WITH_ALTERED_SEARCH_PATH: Final = 8 +DDD_RAW_TARGET_PATH: Final = 1 +DDD_REMOVE_DEFINITION: Final = 2 +DDD_EXACT_MATCH_ON_REMOVE: Final = 4 +MOVEFILE_REPLACE_EXISTING: Final = 1 +MOVEFILE_COPY_ALLOWED: Final = 2 +MOVEFILE_DELAY_UNTIL_REBOOT: Final = 4 +MAX_COMPUTERNAME_LENGTH: Final = 15 +LOGON32_LOGON_INTERACTIVE: Final = 2 +LOGON32_LOGON_NETWORK: Final = 3 +LOGON32_LOGON_BATCH: Final = 4 +LOGON32_LOGON_SERVICE: Final = 5 +LOGON32_LOGON_UNLOCK: Final = 7 +LOGON32_LOGON_NETWORK_CLEARTEXT: Final = 8 +LOGON32_LOGON_NEW_CREDENTIALS: Final = 9 +LOGON32_PROVIDER_DEFAULT: Final = 0 +LOGON32_PROVIDER_WINNT35: Final = 1 +LOGON32_PROVIDER_WINNT40: Final = 2 +LOGON32_PROVIDER_WINNT50: Final = 3 +VER_PLATFORM_WIN32s: Final = 0 +VER_PLATFORM_WIN32_WINDOWS: Final = 1 +VER_PLATFORM_WIN32_NT: Final = 2 +TC_NORMAL: Final = 0 +TC_HARDERR: Final = 1 +TC_GP_TRAP: Final = 2 +TC_SIGNAL: Final = 3 +AC_LINE_OFFLINE: Final = 0 +AC_LINE_ONLINE: Final = 1 +AC_LINE_BACKUP_POWER: Final = 2 +AC_LINE_UNKNOWN: Final = 255 +BATTERY_FLAG_HIGH: Final = 1 +BATTERY_FLAG_LOW: Final = 2 +BATTERY_FLAG_CRITICAL: Final = 4 +BATTERY_FLAG_CHARGING: Final = 8 +BATTERY_FLAG_NO_BATTERY: Final = 128 +BATTERY_FLAG_UNKNOWN: Final = 255 +BATTERY_PERCENTAGE_UNKNOWN: Final = 255 +BATTERY_LIFE_UNKNOWN: Final = -1 + +cchTextLimitDefault: Final = 32767 +WM_CONTEXTMENU: Final = 123 +WM_PRINTCLIENT: Final = 792 +EN_MSGFILTER: Final = 1792 +EN_REQUESTRESIZE: Final = 1793 +EN_SELCHANGE: Final = 1794 +EN_DROPFILES: Final = 1795 +EN_PROTECTED: Final = 1796 +EN_CORRECTTEXT: Final = 1797 +EN_STOPNOUNDO: Final = 1798 +EN_IMECHANGE: Final = 1799 +EN_SAVECLIPBOARD: Final = 1800 +EN_OLEOPFAILED: Final = 1801 +ENM_NONE: Final = 0 +ENM_CHANGE: Final = 1 +ENM_UPDATE: Final = 2 +ENM_SCROLL: Final = 4 +ENM_KEYEVENTS: Final = 65536 +ENM_MOUSEEVENTS: Final = 131072 +ENM_REQUESTRESIZE: Final = 262144 +ENM_SELCHANGE: Final = 524288 +ENM_DROPFILES: Final = 1048576 +ENM_PROTECTED: Final = 2097152 +ENM_CORRECTTEXT: Final = 4194304 +ENM_IMECHANGE: Final = 8388608 +ES_SAVESEL: Final = 32768 +ES_SUNKEN: Final = 16384 +ES_DISABLENOSCROLL: Final = 8192 +ES_SELECTIONBAR: Final = 16777216 +ES_EX_NOCALLOLEINIT: Final = 16777216 +ES_VERTICAL: Final = 4194304 +ES_NOIME: Final = 524288 +ES_SELFIME: Final = 262144 +ECO_AUTOWORDSELECTION: Final = 1 +ECO_AUTOVSCROLL: Final = 64 +ECO_AUTOHSCROLL: Final = 128 +ECO_NOHIDESEL: Final = 256 +ECO_READONLY: Final = 2048 +ECO_WANTRETURN: Final = 4096 +ECO_SAVESEL: Final = 32768 +ECO_SELECTIONBAR: Final = 16777216 +ECO_VERTICAL: Final = 4194304 +ECOOP_SET: Final = 1 +ECOOP_OR: Final = 2 +ECOOP_AND: Final = 3 +ECOOP_XOR: Final = 4 +WB_CLASSIFY: Final = 3 +WB_MOVEWORDLEFT: Final = 4 +WB_MOVEWORDRIGHT: Final = 5 +WB_LEFTBREAK: Final = 6 +WB_RIGHTBREAK: Final = 7 +WB_MOVEWORDPREV: Final = 4 +WB_MOVEWORDNEXT: Final = 5 +WB_PREVBREAK: Final = 6 +WB_NEXTBREAK: Final = 7 +PC_FOLLOWING: Final = 1 +PC_LEADING: Final = 2 +PC_OVERFLOW: Final = 3 +PC_DELIMITER: Final = 4 +WBF_WORDWRAP: Final = 16 +WBF_WORDBREAK: Final = 32 +WBF_OVERFLOW: Final = 64 +WBF_LEVEL1: Final = 128 +WBF_LEVEL2: Final = 256 +WBF_CUSTOM: Final = 512 +CFM_BOLD: Final = 1 +CFM_ITALIC: Final = 2 +CFM_UNDERLINE: Final = 4 +CFM_STRIKEOUT: Final = 8 +CFM_PROTECTED: Final = 16 +CFM_SIZE: Final = -2147483648 +CFM_COLOR: Final = 1073741824 +CFM_FACE: Final = 536870912 +CFM_OFFSET: Final = 268435456 +CFM_CHARSET: Final = 134217728 +CFE_BOLD: Final = 1 +CFE_ITALIC: Final = 2 +CFE_UNDERLINE: Final = 4 +CFE_STRIKEOUT: Final = 8 +CFE_PROTECTED: Final = 16 +CFE_AUTOCOLOR: Final = 1073741824 +yHeightCharPtsMost: Final = 1638 +SCF_SELECTION: Final = 1 +SCF_WORD: Final = 2 +SF_TEXT: Final = 1 +SF_RTF: Final = 2 +SF_RTFNOOBJS: Final = 3 +SF_TEXTIZED: Final = 4 +SFF_SELECTION: Final = 32768 +SFF_PLAINRTF: Final = 16384 +MAX_TAB_STOPS: Final = 32 +lDefaultTab: Final = 720 +PFM_STARTINDENT: Final = 1 +PFM_RIGHTINDENT: Final = 2 +PFM_OFFSET: Final = 4 +PFM_ALIGNMENT: Final = 8 +PFM_TABSTOPS: Final = 16 +PFM_NUMBERING: Final = 32 +PFM_OFFSETINDENT: Final = -2147483648 +PFN_BULLET: Final = 1 +PFA_LEFT: Final = 1 +PFA_RIGHT: Final = 2 +PFA_CENTER: Final = 3 +WM_NOTIFY: Final = 78 +SEL_EMPTY: Final = 0 +SEL_TEXT: Final = 1 +SEL_OBJECT: Final = 2 +SEL_MULTICHAR: Final = 4 +SEL_MULTIOBJECT: Final = 8 +OLEOP_DOVERB: Final = 1 +CF_RTF: Final = "Rich Text Format" +CF_RTFNOOBJS: Final = "Rich Text Format Without Objects" +CF_RETEXTOBJ: Final = "RichEdit Text and Objects" + +RIGHT_ALT_PRESSED: Final = 1 +LEFT_ALT_PRESSED: Final = 2 +RIGHT_CTRL_PRESSED: Final = 4 +LEFT_CTRL_PRESSED: Final = 8 +SHIFT_PRESSED: Final = 16 +NUMLOCK_ON: Final = 32 +SCROLLLOCK_ON: Final = 64 +CAPSLOCK_ON: Final = 128 +ENHANCED_KEY: Final = 256 +NLS_DBCSCHAR: Final = 65536 +NLS_ALPHANUMERIC: Final = 0 +NLS_KATAKANA: Final = 131072 +NLS_HIRAGANA: Final = 262144 +NLS_ROMAN: Final = 4194304 +NLS_IME_CONVERSION: Final = 8388608 +NLS_IME_DISABLE: Final = 536870912 + +FROM_LEFT_1ST_BUTTON_PRESSED: Final = 1 +RIGHTMOST_BUTTON_PRESSED: Final = 2 +FROM_LEFT_2ND_BUTTON_PRESSED: Final = 4 +FROM_LEFT_3RD_BUTTON_PRESSED: Final = 8 +FROM_LEFT_4TH_BUTTON_PRESSED: Final = 16 + +CTRL_C_EVENT: Final = 0 +CTRL_BREAK_EVENT: Final = 1 +CTRL_CLOSE_EVENT: Final = 2 +CTRL_LOGOFF_EVENT: Final = 5 +CTRL_SHUTDOWN_EVENT: Final = 6 + +MOUSE_MOVED: Final = 1 +DOUBLE_CLICK: Final = 2 +MOUSE_WHEELED: Final = 4 + +PSM_SETCURSEL: Final[int] +PSM_REMOVEPAGE: Final[int] +PSM_ADDPAGE: Final[int] +PSM_CHANGED: Final[int] +PSM_RESTARTWINDOWS: Final[int] +PSM_REBOOTSYSTEM: Final[int] +PSM_CANCELTOCLOSE: Final[int] +PSM_QUERYSIBLINGS: Final[int] +PSM_UNCHANGED: Final[int] +PSM_APPLY: Final[int] +PSM_SETTITLEA: Final[int] +PSM_SETTITLEW: Final[int] +PSM_SETWIZBUTTONS: Final[int] +PSM_PRESSBUTTON: Final[int] +PSM_SETCURSELID: Final[int] +PSM_SETFINISHTEXTA: Final[int] +PSM_SETFINISHTEXTW: Final[int] +PSM_GETTABCONTROL: Final[int] +PSM_ISDIALOGMESSAGE: Final[int] +PSM_GETCURRENTPAGEHWND: Final[int] +PSM_INSERTPAGE: Final[int] +PSM_SETHEADERTITLEA: Final[int] +PSM_SETHEADERTITLEW: Final[int] +PSM_SETHEADERSUBTITLEA: Final[int] +PSM_SETHEADERSUBTITLEW: Final[int] +PSM_HWNDTOINDEX: Final[int] +PSM_INDEXTOHWND: Final[int] +PSM_PAGETOINDEX: Final[int] +PSM_INDEXTOPAGE: Final[int] +PSM_IDTOINDEX: Final[int] +PSM_INDEXTOID: Final[int] +PSM_GETRESULT: Final[int] +PSM_RECALCPAGESIZES: Final[int] + +NameUnknown: Final = 0 +NameFullyQualifiedDN: Final = 1 +NameSamCompatible: Final = 2 +NameDisplay: Final = 3 +NameUniqueId: Final = 6 +NameCanonical: Final = 7 +NameUserPrincipal: Final = 8 +NameCanonicalEx: Final = 9 +NameServicePrincipal: Final = 10 +NameDnsDomain: Final = 12 + +ComputerNameNetBIOS: Final = 0 +ComputerNameDnsHostname: Final = 1 +ComputerNameDnsDomain: Final = 2 +ComputerNameDnsFullyQualified: Final = 3 +ComputerNamePhysicalNetBIOS: Final = 4 +ComputerNamePhysicalDnsHostname: Final = 5 +ComputerNamePhysicalDnsDomain: Final = 6 +ComputerNamePhysicalDnsFullyQualified: Final = 7 + +LWA_COLORKEY: Final = 0x00000001 +LWA_ALPHA: Final = 0x00000002 +ULW_COLORKEY: Final = 0x00000001 +ULW_ALPHA: Final = 0x00000002 +ULW_OPAQUE: Final = 0x00000004 + +TRUE: Final = 1 +FALSE: Final = 0 +MAX_PATH: Final = 260 + +AC_SRC_OVER: Final = 0 +AC_SRC_ALPHA: Final = 1 +GRADIENT_FILL_RECT_H: Final = 0 +GRADIENT_FILL_RECT_V: Final = 1 +GRADIENT_FILL_TRIANGLE: Final = 2 +GRADIENT_FILL_OP_FLAG: Final = 255 + +MM_WORKING_SET_MAX_HARD_ENABLE: Final = 1 +MM_WORKING_SET_MAX_HARD_DISABLE: Final = 2 +MM_WORKING_SET_MIN_HARD_ENABLE: Final = 4 +MM_WORKING_SET_MIN_HARD_DISABLE: Final = 8 + +VOLUME_NAME_DOS: Final = 0 +VOLUME_NAME_GUID: Final = 1 +VOLUME_NAME_NT: Final = 2 +VOLUME_NAME_NONE: Final = 4 +FILE_NAME_NORMALIZED: Final = 0 +FILE_NAME_OPENED: Final = 8 + +DEVICE_NOTIFY_WINDOW_HANDLE: Final = 0x00000000 +DEVICE_NOTIFY_SERVICE_HANDLE: Final = 0x00000001 + +WM_DEVICECHANGE: Final = 0x0219 +BSF_QUERY: Final = 0x00000001 +BSF_IGNORECURRENTTASK: Final = 0x00000002 +BSF_FLUSHDISK: Final = 0x00000004 +BSF_NOHANG: Final = 0x00000008 +BSF_POSTMESSAGE: Final = 0x00000010 +BSF_FORCEIFHUNG: Final = 0x00000020 +BSF_NOTIMEOUTIFNOTHUNG: Final = 0x00000040 +BSF_MSGSRV32ISOK: Final = -2147483648 +BSF_MSGSRV32ISOK_BIT: Final = 31 +BSM_ALLCOMPONENTS: Final = 0x00000000 +BSM_VXDS: Final = 0x00000001 +BSM_NETDRIVER: Final = 0x00000002 +BSM_INSTALLABLEDRIVERS: Final = 0x00000004 +BSM_APPLICATIONS: Final = 0x00000008 +DBT_APPYBEGIN: Final = 0x0000 +DBT_APPYEND: Final = 0x0001 +DBT_DEVNODES_CHANGED: Final = 0x0007 +DBT_QUERYCHANGECONFIG: Final = 0x0017 +DBT_CONFIGCHANGED: Final = 0x0018 +DBT_CONFIGCHANGECANCELED: Final = 0x0019 +DBT_MONITORCHANGE: Final = 0x001B +DBT_SHELLLOGGEDON: Final = 0x0020 +DBT_CONFIGMGAPI32: Final = 0x0022 +DBT_VXDINITCOMPLETE: Final = 0x0023 +DBT_VOLLOCKQUERYLOCK: Final = 0x8041 +DBT_VOLLOCKLOCKTAKEN: Final = 0x8042 +DBT_VOLLOCKLOCKFAILED: Final = 0x8043 +DBT_VOLLOCKQUERYUNLOCK: Final = 0x8044 +DBT_VOLLOCKLOCKRELEASED: Final = 0x8045 +DBT_VOLLOCKUNLOCKFAILED: Final = 0x8046 +LOCKP_ALLOW_WRITES: Final = 0x01 +LOCKP_FAIL_WRITES: Final = 0x00 +LOCKP_FAIL_MEM_MAPPING: Final = 0x02 +LOCKP_ALLOW_MEM_MAPPING: Final = 0x00 +LOCKP_USER_MASK: Final = 0x03 +LOCKP_LOCK_FOR_FORMAT: Final = 0x04 +LOCKF_LOGICAL_LOCK: Final = 0x00 +LOCKF_PHYSICAL_LOCK: Final = 0x01 +DBT_NO_DISK_SPACE: Final = 0x0047 +DBT_LOW_DISK_SPACE: Final = 0x0048 +DBT_CONFIGMGPRIVATE: Final = 0x7FFF +DBT_DEVICEARRIVAL: Final = 0x8000 +DBT_DEVICEQUERYREMOVE: Final = 0x8001 +DBT_DEVICEQUERYREMOVEFAILED: Final = 0x8002 +DBT_DEVICEREMOVEPENDING: Final = 0x8003 +DBT_DEVICEREMOVECOMPLETE: Final = 0x8004 +DBT_DEVICETYPESPECIFIC: Final = 0x8005 +DBT_CUSTOMEVENT: Final = 0x8006 +DBT_DEVTYP_OEM: Final = 0x00000000 +DBT_DEVTYP_DEVNODE: Final = 0x00000001 +DBT_DEVTYP_VOLUME: Final = 0x00000002 +DBT_DEVTYP_PORT: Final = 0x00000003 +DBT_DEVTYP_NET: Final = 0x00000004 +DBT_DEVTYP_DEVICEINTERFACE: Final = 0x00000005 +DBT_DEVTYP_HANDLE: Final = 0x00000006 +DBTF_MEDIA: Final = 0x0001 +DBTF_NET: Final = 0x0002 +DBTF_RESOURCE: Final = 0x00000001 +DBTF_XPORT: Final = 0x00000002 +DBTF_SLOWNET: Final = 0x00000004 +DBT_VPOWERDAPI: Final = 0x8100 +DBT_USERDEFINED: Final = 0xFFFF + +IME_CMODE_ALPHANUMERIC: Final = 0x0000 +IME_CMODE_NATIVE: Final = 0x0001 +IME_CMODE_CHINESE: Final = IME_CMODE_NATIVE +IME_CMODE_HANGUL: Final = IME_CMODE_NATIVE +IME_CMODE_JAPANESE: Final = IME_CMODE_NATIVE +IME_CMODE_KATAKANA: Final = 0x0002 +IME_CMODE_LANGUAGE: Final = 0x0003 +IME_CMODE_FULLSHAPE: Final = 0x0008 +IME_CMODE_ROMAN: Final = 0x0010 +IME_CMODE_CHARCODE: Final = 0x0020 +IME_CMODE_HANJACONVERT: Final = 0x0040 +IME_CMODE_NATIVESYMBOL: Final = 0x0080 diff --git a/stubs/pywin32/win32/lib/win32timezone.pyi b/stubs/pywin32/win32/lib/win32timezone.pyi index 2b604323aa4a..2d254c0ce43f 100644 --- a/stubs/pywin32/win32/lib/win32timezone.pyi +++ b/stubs/pywin32/win32/lib/win32timezone.pyi @@ -1,13 +1,23 @@ import datetime -from _typeshed import Incomplete +from _operator import _SupportsComparison +from _typeshed import Incomplete, SupportsKeysAndGetItem +from collections.abc import Callable, Iterable, Mapping +from logging import Logger +from typing import ClassVar, TypeVar, overload, type_check_only +from typing_extensions import Self -log: Incomplete +_RangeMapKT = TypeVar("_RangeMapKT", bound=_SupportsComparison) + +_T = TypeVar("_T") +_VT = TypeVar("_VT") + +log: Logger class _SimpleStruct: def __init__(self, *args, **kw) -> None: ... - def field_names(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... + def field_names(self) -> list[str]: ... + def __eq__(self, other: object) -> bool: ... + def __ne__(self, other: object) -> bool: ... class SYSTEMTIME(_SimpleStruct): ... class TIME_ZONE_INFORMATION(_SimpleStruct): ... @@ -15,59 +25,88 @@ class DYNAMIC_TIME_ZONE_INFORMATION(_SimpleStruct): ... class TimeZoneDefinition(DYNAMIC_TIME_ZONE_INFORMATION): def __init__(self, *args, **kwargs) -> None: ... + # TIME_ZONE_INFORMATION fields as obtained by __getattribute__ + bias: datetime.timedelta + standard_name: str + standard_start: SYSTEMTIME + standard_bias: datetime.timedelta + daylight_name: str + daylight_start: SYSTEMTIME + daylight_bias: datetime.timedelta def __getattribute__(self, attr: str): ... @classmethod - def current(cls): ... + def current(cls) -> tuple[int, Self]: ... def set(self) -> None: ... - def copy(self): ... - def locate_daylight_start(self, year): ... - def locate_standard_start(self, year): ... + def copy(self) -> Self: ... + def locate_daylight_start(self, year) -> datetime.datetime: ... + def locate_standard_start(self, year) -> datetime.datetime: ... class TimeZoneInfo(datetime.tzinfo): - tzRegKey: str - timeZoneName: Incomplete - fixedStandardTime: Incomplete - def __init__(self, param: Incomplete | None = ..., fix_standard_time: bool = ...) -> None: ... - def tzname(self, dt): ... - def getWinInfo(self, targetYear): ... - def utcoffset(self, dt): ... - def dst(self, dt): ... - def GetDSTStartTime(self, year): ... - def GetDSTEndTime(self, year): ... - def __le__(self, other) -> bool: ... - def __eq__(self, other) -> bool: ... - def __ne__(self, other) -> bool: ... + tzRegKey: ClassVar[str] + timeZoneName: str + fixedStandardTime: bool + def __init__(self, param: str | TimeZoneDefinition, fix_standard_time: bool = False) -> None: ... + @overload # type: ignore[override] # Split definition into overrides + def tzname(self, dt: datetime.datetime) -> str: ... + @overload + def tzname(self, dt: None) -> None: ... + def getWinInfo(self, targetYear: int) -> TimeZoneDefinition: ... + @overload # type: ignore[override] # False-positive, our overload covers all base types + def utcoffset(self, dt: None) -> None: ... + @overload + def utcoffset(self, dt: datetime.datetime) -> datetime.timedelta: ... + @overload # type: ignore[override] # False-positive, our overload covers all base types + def dst(self, dt: None) -> None: ... + @overload + def dst(self, dt: datetime.datetime) -> datetime.timedelta: ... + def GetDSTStartTime(self, year: int) -> datetime.datetime: ... + def GetDSTEndTime(self, year: int) -> datetime.datetime: ... + def __eq__(self, other: object) -> bool: ... + def __ne__(self, other: object) -> bool: ... @classmethod - def local(cls): ... + def local(cls) -> Self: ... @classmethod - def utc(cls): ... + def utc(cls) -> Self: ... @staticmethod - def get_sorted_time_zone_names(): ... + def get_sorted_time_zone_names() -> list[str]: ... @staticmethod - def get_all_time_zones(): ... + def get_all_time_zones() -> list[TimeZoneInfo]: ... @staticmethod def get_sorted_time_zones(key: Incomplete | None = ...): ... -def utcnow(): ... -def now(): ... -def GetTZCapabilities(): ... +def utcnow() -> datetime.datetime: ... +def now() -> datetime.datetime: ... +def GetTZCapabilities() -> dict[str, bool]: ... class DLLHandleCache: - def __getitem__(self, filename): ... + def __getitem__(self, filename: str) -> int: ... -DLLCache: Incomplete +DLLCache: DLLHandleCache -def resolveMUITimeZone(spec): ... +def resolveMUITimeZone(spec: str) -> str | None: ... + +class RangeMap(dict[_RangeMapKT, _VT]): + sort_params: Mapping[str, Incomplete] + match: Callable[[_RangeMapKT, _RangeMapKT], bool] + def __init__( + self, + source: SupportsKeysAndGetItem[_RangeMapKT, _VT] | Iterable[tuple[_RangeMapKT, _VT]], + sort_params: Mapping[str, Incomplete] = {}, + key_match_comparator: Callable[[_RangeMapKT, _RangeMapKT], bool] = ..., + ) -> None: ... + @classmethod + def left(cls, source: SupportsKeysAndGetItem[_RangeMapKT, _VT] | Iterable[tuple[_RangeMapKT, _VT]]) -> Self: ... + def __getitem__(self, item: _RangeMapKT) -> _VT: ... + @overload # type: ignore[override] # Signature simplified over dict and Mapping + def get(self, key: _RangeMapKT, default: _T) -> _VT | _T: ... + @overload + def get(self, key: _RangeMapKT, default: None = None) -> _VT | None: ... + def bounds(self) -> tuple[_RangeMapKT, _RangeMapKT]: ... + @type_check_only + class RangeValueUndefined: ... -class RangeMap(dict[int, str]): - sort_params: Incomplete - match: Incomplete - def __init__(self, source, sort_params=..., key_match_comparator=...) -> None: ... - def __getitem__(self, item): ... - def get(self, key, default: Incomplete | None = ...): ... - def bounds(self): ... - undefined_value: Incomplete + undefined_value: RangeValueUndefined class Item(int): ... - first_item: Incomplete - last_item: Incomplete + first_item: Item + last_item: Item diff --git a/stubs/pywin32/win32/lib/winerror.pyi b/stubs/pywin32/win32/lib/winerror.pyi index b586d0f7acec..a40ca554ca0b 100644 --- a/stubs/pywin32/win32/lib/winerror.pyi +++ b/stubs/pywin32/win32/lib/winerror.pyi @@ -1,2586 +1,7270 @@ -TRUST_E_PROVIDER_UNKNOWN: int -TRUST_E_ACTION_UNKNOWN: int -TRUST_E_SUBJECT_FORM_UNKNOWN: int -TRUST_E_SUBJECT_NOT_TRUSTED: int -FACILITY_WINRM: int -FACILITY_WINDOWSUPDATE: int -FACILITY_WINDOWS_DEFENDER: int -FACILITY_WINDOWS_CE: int -FACILITY_WINDOWS: int -FACILITY_URT: int -FACILITY_UMI: int -FACILITY_TPM_SOFTWARE: int -FACILITY_TPM_SERVICES: int -FACILITY_SXS: int -FACILITY_STORAGE: int -FACILITY_STATE_MANAGEMENT: int -FACILITY_SSPI: int -FACILITY_SCARD: int -FACILITY_SHELL: int -FACILITY_SETUPAPI: int -FACILITY_SECURITY: int -FACILITY_RPC: int -FACILITY_PLA: int -FACILITY_WIN32: int -FACILITY_CONTROL: int -FACILITY_NULL: int -FACILITY_NDIS: int -FACILITY_METADIRECTORY: int -FACILITY_MSMQ: int -FACILITY_MEDIASERVER: int -FACILITY_INTERNET: int -FACILITY_ITF: int -FACILITY_USERMODE_HYPERVISOR: int -FACILITY_HTTP: int -FACILITY_GRAPHICS: int -FACILITY_FWP: int -FACILITY_FVE: int -FACILITY_USERMODE_FILTER_MANAGER: int -FACILITY_DPLAY: int -FACILITY_DISPATCH: int -FACILITY_DIRECTORYSERVICE: int -FACILITY_CONFIGURATION: int -FACILITY_COMPLUS: int -FACILITY_USERMODE_COMMONLOG: int -FACILITY_CMI: int -FACILITY_CERT: int -FACILITY_BACKGROUNDCOPY: int -FACILITY_ACS: int -FACILITY_AAF: int -ERROR_SUCCESS: int -NO_ERROR: int -S_OK: int -S_FALSE: int -ERROR_INVALID_FUNCTION: int -ERROR_FILE_NOT_FOUND: int -ERROR_PATH_NOT_FOUND: int -ERROR_TOO_MANY_OPEN_FILES: int -ERROR_ACCESS_DENIED: int -ERROR_INVALID_HANDLE: int -ERROR_ARENA_TRASHED: int -ERROR_NOT_ENOUGH_MEMORY: int -ERROR_INVALID_BLOCK: int -ERROR_BAD_ENVIRONMENT: int -ERROR_BAD_FORMAT: int -ERROR_INVALID_ACCESS: int -ERROR_INVALID_DATA: int -ERROR_OUTOFMEMORY: int -ERROR_INVALID_DRIVE: int -ERROR_CURRENT_DIRECTORY: int -ERROR_NOT_SAME_DEVICE: int -ERROR_NO_MORE_FILES: int -ERROR_WRITE_PROTECT: int -ERROR_BAD_UNIT: int -ERROR_NOT_READY: int -ERROR_BAD_COMMAND: int -ERROR_CRC: int -ERROR_BAD_LENGTH: int -ERROR_SEEK: int -ERROR_NOT_DOS_DISK: int -ERROR_SECTOR_NOT_FOUND: int -ERROR_OUT_OF_PAPER: int -ERROR_WRITE_FAULT: int -ERROR_READ_FAULT: int -ERROR_GEN_FAILURE: int -ERROR_SHARING_VIOLATION: int -ERROR_LOCK_VIOLATION: int -ERROR_WRONG_DISK: int -ERROR_SHARING_BUFFER_EXCEEDED: int -ERROR_HANDLE_EOF: int -ERROR_HANDLE_DISK_FULL: int -ERROR_NOT_SUPPORTED: int -ERROR_REM_NOT_LIST: int -ERROR_DUP_NAME: int -ERROR_BAD_NETPATH: int -ERROR_NETWORK_BUSY: int -ERROR_DEV_NOT_EXIST: int -ERROR_TOO_MANY_CMDS: int -ERROR_ADAP_HDW_ERR: int -ERROR_BAD_NET_RESP: int -ERROR_UNEXP_NET_ERR: int -ERROR_BAD_REM_ADAP: int -ERROR_PRINTQ_FULL: int -ERROR_NO_SPOOL_SPACE: int -ERROR_PRINT_CANCELLED: int -ERROR_NETNAME_DELETED: int -ERROR_NETWORK_ACCESS_DENIED: int -ERROR_BAD_DEV_TYPE: int -ERROR_BAD_NET_NAME: int -ERROR_TOO_MANY_NAMES: int -ERROR_TOO_MANY_SESS: int -ERROR_SHARING_PAUSED: int -ERROR_REQ_NOT_ACCEP: int -ERROR_REDIR_PAUSED: int -ERROR_FILE_EXISTS: int -ERROR_CANNOT_MAKE: int -ERROR_FAIL_I24: int -ERROR_OUT_OF_STRUCTURES: int -ERROR_ALREADY_ASSIGNED: int -ERROR_INVALID_PASSWORD: int -ERROR_INVALID_PARAMETER: int -ERROR_NET_WRITE_FAULT: int -ERROR_NO_PROC_SLOTS: int -ERROR_TOO_MANY_SEMAPHORES: int -ERROR_EXCL_SEM_ALREADY_OWNED: int -ERROR_SEM_IS_SET: int -ERROR_TOO_MANY_SEM_REQUESTS: int -ERROR_INVALID_AT_INTERRUPT_TIME: int -ERROR_SEM_OWNER_DIED: int -ERROR_SEM_USER_LIMIT: int -ERROR_DISK_CHANGE: int -ERROR_DRIVE_LOCKED: int -ERROR_BROKEN_PIPE: int -ERROR_OPEN_FAILED: int -ERROR_BUFFER_OVERFLOW: int -ERROR_DISK_FULL: int -ERROR_NO_MORE_SEARCH_HANDLES: int -ERROR_INVALID_TARGET_HANDLE: int -ERROR_INVALID_CATEGORY: int -ERROR_INVALID_VERIFY_SWITCH: int -ERROR_BAD_DRIVER_LEVEL: int -ERROR_CALL_NOT_IMPLEMENTED: int -ERROR_SEM_TIMEOUT: int -ERROR_INSUFFICIENT_BUFFER: int -ERROR_INVALID_NAME: int -ERROR_INVALID_LEVEL: int -ERROR_NO_VOLUME_LABEL: int -ERROR_MOD_NOT_FOUND: int -ERROR_PROC_NOT_FOUND: int -ERROR_WAIT_NO_CHILDREN: int -ERROR_CHILD_NOT_COMPLETE: int -ERROR_DIRECT_ACCESS_HANDLE: int -ERROR_NEGATIVE_SEEK: int -ERROR_SEEK_ON_DEVICE: int -ERROR_IS_JOIN_TARGET: int -ERROR_IS_JOINED: int -ERROR_IS_SUBSTED: int -ERROR_NOT_JOINED: int -ERROR_NOT_SUBSTED: int -ERROR_JOIN_TO_JOIN: int -ERROR_SUBST_TO_SUBST: int -ERROR_JOIN_TO_SUBST: int -ERROR_SUBST_TO_JOIN: int -ERROR_BUSY_DRIVE: int -ERROR_SAME_DRIVE: int -ERROR_DIR_NOT_ROOT: int -ERROR_DIR_NOT_EMPTY: int -ERROR_IS_SUBST_PATH: int -ERROR_IS_JOIN_PATH: int -ERROR_PATH_BUSY: int -ERROR_IS_SUBST_TARGET: int -ERROR_SYSTEM_TRACE: int -ERROR_INVALID_EVENT_COUNT: int -ERROR_TOO_MANY_MUXWAITERS: int -ERROR_INVALID_LIST_FORMAT: int -ERROR_LABEL_TOO_LONG: int -ERROR_TOO_MANY_TCBS: int -ERROR_SIGNAL_REFUSED: int -ERROR_DISCARDED: int -ERROR_NOT_LOCKED: int -ERROR_BAD_THREADID_ADDR: int -ERROR_BAD_ARGUMENTS: int -ERROR_BAD_PATHNAME: int -ERROR_SIGNAL_PENDING: int -ERROR_MAX_THRDS_REACHED: int -ERROR_LOCK_FAILED: int -ERROR_BUSY: int -ERROR_CANCEL_VIOLATION: int -ERROR_ATOMIC_LOCKS_NOT_SUPPORTED: int -ERROR_INVALID_SEGMENT_NUMBER: int -ERROR_INVALID_ORDINAL: int -ERROR_ALREADY_EXISTS: int -ERROR_INVALID_FLAG_NUMBER: int -ERROR_SEM_NOT_FOUND: int -ERROR_INVALID_STARTING_CODESEG: int -ERROR_INVALID_STACKSEG: int -ERROR_INVALID_MODULETYPE: int -ERROR_INVALID_EXE_SIGNATURE: int -ERROR_EXE_MARKED_INVALID: int -ERROR_BAD_EXE_FORMAT: int -ERROR_ITERATED_DATA_EXCEEDS_64k: int -ERROR_INVALID_MINALLOCSIZE: int -ERROR_DYNLINK_FROM_INVALID_RING: int -ERROR_IOPL_NOT_ENABLED: int -ERROR_INVALID_SEGDPL: int -ERROR_AUTODATASEG_EXCEEDS_64k: int -ERROR_RING2SEG_MUST_BE_MOVABLE: int -ERROR_RELOC_CHAIN_XEEDS_SEGLIM: int -ERROR_INFLOOP_IN_RELOC_CHAIN: int -ERROR_ENVVAR_NOT_FOUND: int -ERROR_NO_SIGNAL_SENT: int -ERROR_FILENAME_EXCED_RANGE: int -ERROR_RING2_STACK_IN_USE: int -ERROR_META_EXPANSION_TOO_LONG: int -ERROR_INVALID_SIGNAL_NUMBER: int -ERROR_THREAD_1_INACTIVE: int -ERROR_LOCKED: int -ERROR_TOO_MANY_MODULES: int -ERROR_NESTING_NOT_ALLOWED: int -ERROR_EXE_MACHINE_TYPE_MISMATCH: int -ERROR_EXE_CANNOT_MODIFY_SIGNED_BINARY: int -ERROR_EXE_CANNOT_MODIFY_STRONG_SIGNED_BINARY: int -ERROR_FILE_CHECKED_OUT: int -ERROR_CHECKOUT_REQUIRED: int -ERROR_BAD_FILE_TYPE: int -ERROR_FILE_TOO_LARGE: int -ERROR_FORMS_AUTH_REQUIRED: int -ERROR_VIRUS_INFECTED: int -ERROR_VIRUS_DELETED: int -ERROR_PIPE_LOCAL: int -ERROR_BAD_PIPE: int -ERROR_PIPE_BUSY: int -ERROR_NO_DATA: int -ERROR_PIPE_NOT_CONNECTED: int -ERROR_MORE_DATA: int -ERROR_VC_DISCONNECTED: int -ERROR_INVALID_EA_NAME: int -ERROR_EA_LIST_INCONSISTENT: int -WAIT_TIMEOUT: int -ERROR_NO_MORE_ITEMS: int -ERROR_CANNOT_COPY: int -ERROR_DIRECTORY: int -ERROR_EAS_DIDNT_FIT: int -ERROR_EA_FILE_CORRUPT: int -ERROR_EA_TABLE_FULL: int -ERROR_INVALID_EA_HANDLE: int -ERROR_EAS_NOT_SUPPORTED: int -ERROR_NOT_OWNER: int -ERROR_TOO_MANY_POSTS: int -ERROR_PARTIAL_COPY: int -ERROR_OPLOCK_NOT_GRANTED: int -ERROR_INVALID_OPLOCK_PROTOCOL: int -ERROR_DISK_TOO_FRAGMENTED: int -ERROR_DELETE_PENDING: int -ERROR_MR_MID_NOT_FOUND: int -ERROR_SCOPE_NOT_FOUND: int -ERROR_FAIL_NOACTION_REBOOT: int -ERROR_FAIL_SHUTDOWN: int -ERROR_FAIL_RESTART: int -ERROR_MAX_SESSIONS_REACHED: int -ERROR_THREAD_MODE_ALREADY_BACKGROUND: int -ERROR_THREAD_MODE_NOT_BACKGROUND: int -ERROR_PROCESS_MODE_ALREADY_BACKGROUND: int -ERROR_PROCESS_MODE_NOT_BACKGROUND: int -ERROR_INVALID_ADDRESS: int -ERROR_USER_PROFILE_LOAD: int -ERROR_ARITHMETIC_OVERFLOW: int -ERROR_PIPE_CONNECTED: int -ERROR_PIPE_LISTENING: int -ERROR_VERIFIER_STOP: int -ERROR_ABIOS_ERROR: int -ERROR_WX86_WARNING: int -ERROR_WX86_ERROR: int -ERROR_TIMER_NOT_CANCELED: int -ERROR_UNWIND: int -ERROR_BAD_STACK: int -ERROR_INVALID_UNWIND_TARGET: int -ERROR_INVALID_PORT_ATTRIBUTES: int -ERROR_PORT_MESSAGE_TOO_LONG: int -ERROR_INVALID_QUOTA_LOWER: int -ERROR_DEVICE_ALREADY_ATTACHED: int -ERROR_INSTRUCTION_MISALIGNMENT: int -ERROR_PROFILING_NOT_STARTED: int -ERROR_PROFILING_NOT_STOPPED: int -ERROR_COULD_NOT_INTERPRET: int -ERROR_PROFILING_AT_LIMIT: int -ERROR_CANT_WAIT: int -ERROR_CANT_TERMINATE_SELF: int -ERROR_UNEXPECTED_MM_CREATE_ERR: int -ERROR_UNEXPECTED_MM_MAP_ERROR: int -ERROR_UNEXPECTED_MM_EXTEND_ERR: int -ERROR_BAD_FUNCTION_TABLE: int -ERROR_NO_GUID_TRANSLATION: int -ERROR_INVALID_LDT_SIZE: int -ERROR_INVALID_LDT_OFFSET: int -ERROR_INVALID_LDT_DESCRIPTOR: int -ERROR_TOO_MANY_THREADS: int -ERROR_THREAD_NOT_IN_PROCESS: int -ERROR_PAGEFILE_QUOTA_EXCEEDED: int -ERROR_LOGON_SERVER_CONFLICT: int -ERROR_SYNCHRONIZATION_REQUIRED: int -ERROR_NET_OPEN_FAILED: int -ERROR_IO_PRIVILEGE_FAILED: int -ERROR_CONTROL_C_EXIT: int -ERROR_MISSING_SYSTEMFILE: int -ERROR_UNHANDLED_EXCEPTION: int -ERROR_APP_INIT_FAILURE: int -ERROR_PAGEFILE_CREATE_FAILED: int -ERROR_INVALID_IMAGE_HASH: int -ERROR_NO_PAGEFILE: int -ERROR_ILLEGAL_FLOAT_CONTEXT: int -ERROR_NO_EVENT_PAIR: int -ERROR_DOMAIN_CTRLR_CONFIG_ERROR: int -ERROR_ILLEGAL_CHARACTER: int -ERROR_UNDEFINED_CHARACTER: int -ERROR_FLOPPY_VOLUME: int -ERROR_BIOS_FAILED_TO_CONNECT_INTERRUPT: int -ERROR_BACKUP_CONTROLLER: int -ERROR_MUTANT_LIMIT_EXCEEDED: int -ERROR_FS_DRIVER_REQUIRED: int -ERROR_CANNOT_LOAD_REGISTRY_FILE: int -ERROR_DEBUG_ATTACH_FAILED: int -ERROR_SYSTEM_PROCESS_TERMINATED: int -ERROR_DATA_NOT_ACCEPTED: int -ERROR_VDM_HARD_ERROR: int -ERROR_DRIVER_CANCEL_TIMEOUT: int -ERROR_REPLY_MESSAGE_MISMATCH: int -ERROR_LOST_WRITEBEHIND_DATA: int -ERROR_CLIENT_SERVER_PARAMETERS_INVALID: int -ERROR_NOT_TINY_STREAM: int -ERROR_STACK_OVERFLOW_READ: int -ERROR_CONVERT_TO_LARGE: int -ERROR_FOUND_OUT_OF_SCOPE: int -ERROR_ALLOCATE_BUCKET: int -ERROR_MARSHALL_OVERFLOW: int -ERROR_INVALID_VARIANT: int -ERROR_BAD_COMPRESSION_BUFFER: int -ERROR_AUDIT_FAILED: int -ERROR_TIMER_RESOLUTION_NOT_SET: int -ERROR_INSUFFICIENT_LOGON_INFO: int -ERROR_BAD_DLL_ENTRYPOINT: int -ERROR_BAD_SERVICE_ENTRYPOINT: int -ERROR_IP_ADDRESS_CONFLICT1: int -ERROR_IP_ADDRESS_CONFLICT2: int -ERROR_REGISTRY_QUOTA_LIMIT: int -ERROR_NO_CALLBACK_ACTIVE: int -ERROR_PWD_TOO_SHORT: int -ERROR_PWD_TOO_RECENT: int -ERROR_PWD_HISTORY_CONFLICT: int -ERROR_UNSUPPORTED_COMPRESSION: int -ERROR_INVALID_HW_PROFILE: int -ERROR_INVALID_PLUGPLAY_DEVICE_PATH: int -ERROR_QUOTA_LIST_INCONSISTENT: int -ERROR_EVALUATION_EXPIRATION: int -ERROR_ILLEGAL_DLL_RELOCATION: int -ERROR_DLL_INIT_FAILED_LOGOFF: int -ERROR_VALIDATE_CONTINUE: int -ERROR_NO_MORE_MATCHES: int -ERROR_RANGE_LIST_CONFLICT: int -ERROR_SERVER_SID_MISMATCH: int -ERROR_CANT_ENABLE_DENY_ONLY: int -ERROR_FLOAT_MULTIPLE_FAULTS: int -ERROR_FLOAT_MULTIPLE_TRAPS: int -ERROR_NOINTERFACE: int -ERROR_DRIVER_FAILED_SLEEP: int -ERROR_CORRUPT_SYSTEM_FILE: int -ERROR_COMMITMENT_MINIMUM: int -ERROR_PNP_RESTART_ENUMERATION: int -ERROR_SYSTEM_IMAGE_BAD_SIGNATURE: int -ERROR_PNP_REBOOT_REQUIRED: int -ERROR_INSUFFICIENT_POWER: int -ERROR_MULTIPLE_FAULT_VIOLATION: int -ERROR_SYSTEM_SHUTDOWN: int -ERROR_PORT_NOT_SET: int -ERROR_DS_VERSION_CHECK_FAILURE: int -ERROR_RANGE_NOT_FOUND: int -ERROR_NOT_SAFE_MODE_DRIVER: int -ERROR_FAILED_DRIVER_ENTRY: int -ERROR_DEVICE_ENUMERATION_ERROR: int -ERROR_MOUNT_POINT_NOT_RESOLVED: int -ERROR_INVALID_DEVICE_OBJECT_PARAMETER: int -ERROR_MCA_OCCURED: int -ERROR_DRIVER_DATABASE_ERROR: int -ERROR_SYSTEM_HIVE_TOO_LARGE: int -ERROR_DRIVER_FAILED_PRIOR_UNLOAD: int -ERROR_VOLSNAP_PREPARE_HIBERNATE: int -ERROR_HIBERNATION_FAILURE: int -ERROR_FILE_SYSTEM_LIMITATION: int -ERROR_ASSERTION_FAILURE: int -ERROR_ACPI_ERROR: int -ERROR_WOW_ASSERTION: int -ERROR_PNP_BAD_MPS_TABLE: int -ERROR_PNP_TRANSLATION_FAILED: int -ERROR_PNP_IRQ_TRANSLATION_FAILED: int -ERROR_PNP_INVALID_ID: int -ERROR_WAKE_SYSTEM_DEBUGGER: int -ERROR_HANDLES_CLOSED: int -ERROR_EXTRANEOUS_INFORMATION: int -ERROR_RXACT_COMMIT_NECESSARY: int -ERROR_MEDIA_CHECK: int -ERROR_GUID_SUBSTITUTION_MADE: int -ERROR_STOPPED_ON_SYMLINK: int -ERROR_LONGJUMP: int -ERROR_PLUGPLAY_QUERY_VETOED: int -ERROR_UNWIND_CONSOLIDATE: int -ERROR_REGISTRY_HIVE_RECOVERED: int -ERROR_DLL_MIGHT_BE_INSECURE: int -ERROR_DLL_MIGHT_BE_INCOMPATIBLE: int -ERROR_DBG_EXCEPTION_NOT_HANDLED: int -ERROR_DBG_REPLY_LATER: int -ERROR_DBG_UNABLE_TO_PROVIDE_HANDLE: int -ERROR_DBG_TERMINATE_THREAD: int -ERROR_DBG_TERMINATE_PROCESS: int -ERROR_DBG_CONTROL_C: int -ERROR_DBG_PRINTEXCEPTION_C: int -ERROR_DBG_RIPEXCEPTION: int -ERROR_DBG_CONTROL_BREAK: int -ERROR_DBG_COMMAND_EXCEPTION: int -ERROR_OBJECT_NAME_EXISTS: int -ERROR_THREAD_WAS_SUSPENDED: int -ERROR_IMAGE_NOT_AT_BASE: int -ERROR_RXACT_STATE_CREATED: int -ERROR_SEGMENT_NOTIFICATION: int -ERROR_BAD_CURRENT_DIRECTORY: int -ERROR_FT_READ_RECOVERY_FROM_BACKUP: int -ERROR_FT_WRITE_RECOVERY: int -ERROR_IMAGE_MACHINE_TYPE_MISMATCH: int -ERROR_RECEIVE_PARTIAL: int -ERROR_RECEIVE_EXPEDITED: int -ERROR_RECEIVE_PARTIAL_EXPEDITED: int -ERROR_EVENT_DONE: int -ERROR_EVENT_PENDING: int -ERROR_CHECKING_FILE_SYSTEM: int -ERROR_FATAL_APP_EXIT: int -ERROR_PREDEFINED_HANDLE: int -ERROR_WAS_UNLOCKED: int -ERROR_SERVICE_NOTIFICATION: int -ERROR_WAS_LOCKED: int -ERROR_LOG_HARD_ERROR: int -ERROR_ALREADY_WIN32: int -ERROR_IMAGE_MACHINE_TYPE_MISMATCH_EXE: int -ERROR_NO_YIELD_PERFORMED: int -ERROR_TIMER_RESUME_IGNORED: int -ERROR_ARBITRATION_UNHANDLED: int -ERROR_CARDBUS_NOT_SUPPORTED: int -ERROR_MP_PROCESSOR_MISMATCH: int -ERROR_HIBERNATED: int -ERROR_RESUME_HIBERNATION: int -ERROR_FIRMWARE_UPDATED: int -ERROR_DRIVERS_LEAKING_LOCKED_PAGES: int -ERROR_WAKE_SYSTEM: int -ERROR_WAIT_1: int -ERROR_WAIT_2: int -ERROR_WAIT_3: int -ERROR_WAIT_63: int -ERROR_ABANDONED_WAIT_0: int -ERROR_ABANDONED_WAIT_63: int -ERROR_USER_APC: int -ERROR_KERNEL_APC: int -ERROR_ALERTED: int -ERROR_ELEVATION_REQUIRED: int -ERROR_REPARSE: int -ERROR_OPLOCK_BREAK_IN_PROGRESS: int -ERROR_VOLUME_MOUNTED: int -ERROR_RXACT_COMMITTED: int -ERROR_NOTIFY_CLEANUP: int -ERROR_PRIMARY_TRANSPORT_CONNECT_FAILED: int -ERROR_PAGE_FAULT_TRANSITION: int -ERROR_PAGE_FAULT_DEMAND_ZERO: int -ERROR_PAGE_FAULT_COPY_ON_WRITE: int -ERROR_PAGE_FAULT_GUARD_PAGE: int -ERROR_PAGE_FAULT_PAGING_FILE: int -ERROR_CACHE_PAGE_LOCKED: int -ERROR_CRASH_DUMP: int -ERROR_BUFFER_ALL_ZEROS: int -ERROR_REPARSE_OBJECT: int -ERROR_RESOURCE_REQUIREMENTS_CHANGED: int -ERROR_TRANSLATION_COMPLETE: int -ERROR_NOTHING_TO_TERMINATE: int -ERROR_PROCESS_NOT_IN_JOB: int -ERROR_PROCESS_IN_JOB: int -ERROR_VOLSNAP_HIBERNATE_READY: int -ERROR_FSFILTER_OP_COMPLETED_SUCCESSFULLY: int -ERROR_INTERRUPT_VECTOR_ALREADY_CONNECTED: int -ERROR_INTERRUPT_STILL_CONNECTED: int -ERROR_WAIT_FOR_OPLOCK: int -ERROR_DBG_EXCEPTION_HANDLED: int -ERROR_DBG_CONTINUE: int -ERROR_CALLBACK_POP_STACK: int -ERROR_COMPRESSION_DISABLED: int -ERROR_CANTFETCHBACKWARDS: int -ERROR_CANTSCROLLBACKWARDS: int -ERROR_ROWSNOTRELEASED: int -ERROR_BAD_ACCESSOR_FLAGS: int -ERROR_ERRORS_ENCOUNTERED: int -ERROR_NOT_CAPABLE: int -ERROR_REQUEST_OUT_OF_SEQUENCE: int -ERROR_VERSION_PARSE_ERROR: int -ERROR_BADSTARTPOSITION: int -ERROR_MEMORY_HARDWARE: int -ERROR_DISK_REPAIR_DISABLED: int -ERROR_INSUFFICIENT_RESOURCE_FOR_SPECIFIED_SHARED_SECTION_SIZE: int -ERROR_SYSTEM_POWERSTATE_TRANSITION: int -ERROR_SYSTEM_POWERSTATE_COMPLEX_TRANSITION: int -ERROR_MCA_EXCEPTION: int -ERROR_ACCESS_AUDIT_BY_POLICY: int -ERROR_ACCESS_DISABLED_NO_SAFER_UI_BY_POLICY: int -ERROR_ABANDON_HIBERFILE: int -ERROR_LOST_WRITEBEHIND_DATA_NETWORK_DISCONNECTED: int -ERROR_LOST_WRITEBEHIND_DATA_NETWORK_SERVER_ERROR: int -ERROR_LOST_WRITEBEHIND_DATA_LOCAL_DISK_ERROR: int -ERROR_BAD_MCFG_TABLE: int -ERROR_EA_ACCESS_DENIED: int -ERROR_OPERATION_ABORTED: int -ERROR_IO_INCOMPLETE: int -ERROR_IO_PENDING: int -ERROR_NOACCESS: int -ERROR_SWAPERROR: int -ERROR_STACK_OVERFLOW: int -ERROR_INVALID_MESSAGE: int -ERROR_CAN_NOT_COMPLETE: int -ERROR_INVALID_FLAGS: int -ERROR_UNRECOGNIZED_VOLUME: int -ERROR_FILE_INVALID: int -ERROR_FULLSCREEN_MODE: int -ERROR_NO_TOKEN: int -ERROR_BADDB: int -ERROR_BADKEY: int -ERROR_CANTOPEN: int -ERROR_CANTREAD: int -ERROR_CANTWRITE: int -ERROR_REGISTRY_RECOVERED: int -ERROR_REGISTRY_CORRUPT: int -ERROR_REGISTRY_IO_FAILED: int -ERROR_NOT_REGISTRY_FILE: int -ERROR_KEY_DELETED: int -ERROR_NO_LOG_SPACE: int -ERROR_KEY_HAS_CHILDREN: int -ERROR_CHILD_MUST_BE_VOLATILE: int -ERROR_NOTIFY_ENUM_DIR: int -ERROR_DEPENDENT_SERVICES_RUNNING: int -ERROR_INVALID_SERVICE_CONTROL: int -ERROR_SERVICE_REQUEST_TIMEOUT: int -ERROR_SERVICE_NO_THREAD: int -ERROR_SERVICE_DATABASE_LOCKED: int -ERROR_SERVICE_ALREADY_RUNNING: int -ERROR_INVALID_SERVICE_ACCOUNT: int -ERROR_SERVICE_DISABLED: int -ERROR_CIRCULAR_DEPENDENCY: int -ERROR_SERVICE_DOES_NOT_EXIST: int -ERROR_SERVICE_CANNOT_ACCEPT_CTRL: int -ERROR_SERVICE_NOT_ACTIVE: int -ERROR_FAILED_SERVICE_CONTROLLER_CONNECT: int -ERROR_EXCEPTION_IN_SERVICE: int -ERROR_DATABASE_DOES_NOT_EXIST: int -ERROR_SERVICE_SPECIFIC_ERROR: int -ERROR_PROCESS_ABORTED: int -ERROR_SERVICE_DEPENDENCY_FAIL: int -ERROR_SERVICE_LOGON_FAILED: int -ERROR_SERVICE_START_HANG: int -ERROR_INVALID_SERVICE_LOCK: int -ERROR_SERVICE_MARKED_FOR_DELETE: int -ERROR_SERVICE_EXISTS: int -ERROR_ALREADY_RUNNING_LKG: int -ERROR_SERVICE_DEPENDENCY_DELETED: int -ERROR_BOOT_ALREADY_ACCEPTED: int -ERROR_SERVICE_NEVER_STARTED: int -ERROR_DUPLICATE_SERVICE_NAME: int -ERROR_DIFFERENT_SERVICE_ACCOUNT: int -ERROR_CANNOT_DETECT_DRIVER_FAILURE: int -ERROR_CANNOT_DETECT_PROCESS_ABORT: int -ERROR_NO_RECOVERY_PROGRAM: int -ERROR_SERVICE_NOT_IN_EXE: int -ERROR_NOT_SAFEBOOT_SERVICE: int -ERROR_END_OF_MEDIA: int -ERROR_FILEMARK_DETECTED: int -ERROR_BEGINNING_OF_MEDIA: int -ERROR_SETMARK_DETECTED: int -ERROR_NO_DATA_DETECTED: int -ERROR_PARTITION_FAILURE: int -ERROR_INVALID_BLOCK_LENGTH: int -ERROR_DEVICE_NOT_PARTITIONED: int -ERROR_UNABLE_TO_LOCK_MEDIA: int -ERROR_UNABLE_TO_UNLOAD_MEDIA: int -ERROR_MEDIA_CHANGED: int -ERROR_BUS_RESET: int -ERROR_NO_MEDIA_IN_DRIVE: int -ERROR_NO_UNICODE_TRANSLATION: int -ERROR_DLL_INIT_FAILED: int -ERROR_SHUTDOWN_IN_PROGRESS: int -ERROR_NO_SHUTDOWN_IN_PROGRESS: int -ERROR_IO_DEVICE: int -ERROR_SERIAL_NO_DEVICE: int -ERROR_IRQ_BUSY: int -ERROR_MORE_WRITES: int -ERROR_COUNTER_TIMEOUT: int -ERROR_FLOPPY_ID_MARK_NOT_FOUND: int -ERROR_FLOPPY_WRONG_CYLINDER: int -ERROR_FLOPPY_UNKNOWN_ERROR: int -ERROR_FLOPPY_BAD_REGISTERS: int -ERROR_DISK_RECALIBRATE_FAILED: int -ERROR_DISK_OPERATION_FAILED: int -ERROR_DISK_RESET_FAILED: int -ERROR_EOM_OVERFLOW: int -ERROR_NOT_ENOUGH_SERVER_MEMORY: int -ERROR_POSSIBLE_DEADLOCK: int -ERROR_MAPPED_ALIGNMENT: int -ERROR_SET_POWER_STATE_VETOED: int -ERROR_SET_POWER_STATE_FAILED: int -ERROR_TOO_MANY_LINKS: int -ERROR_OLD_WIN_VERSION: int -ERROR_APP_WRONG_OS: int -ERROR_SINGLE_INSTANCE_APP: int -ERROR_RMODE_APP: int -ERROR_INVALID_DLL: int -ERROR_NO_ASSOCIATION: int -ERROR_DDE_FAIL: int -ERROR_DLL_NOT_FOUND: int -ERROR_NO_MORE_USER_HANDLES: int -ERROR_MESSAGE_SYNC_ONLY: int -ERROR_SOURCE_ELEMENT_EMPTY: int -ERROR_DESTINATION_ELEMENT_FULL: int -ERROR_ILLEGAL_ELEMENT_ADDRESS: int -ERROR_MAGAZINE_NOT_PRESENT: int -ERROR_DEVICE_REINITIALIZATION_NEEDED: int -ERROR_DEVICE_REQUIRES_CLEANING: int -ERROR_DEVICE_DOOR_OPEN: int -ERROR_DEVICE_NOT_CONNECTED: int -ERROR_NOT_FOUND: int -ERROR_NO_MATCH: int -ERROR_SET_NOT_FOUND: int -ERROR_POINT_NOT_FOUND: int -ERROR_NO_TRACKING_SERVICE: int -ERROR_NO_VOLUME_ID: int -ERROR_CONNECTED_OTHER_PASSWORD: int -ERROR_BAD_USERNAME: int -ERROR_NOT_CONNECTED: int -ERROR_OPEN_FILES: int -ERROR_ACTIVE_CONNECTIONS: int -ERROR_DEVICE_IN_USE: int -ERROR_BAD_DEVICE: int -ERROR_CONNECTION_UNAVAIL: int -ERROR_DEVICE_ALREADY_REMEMBERED: int -ERROR_NO_NET_OR_BAD_PATH: int -ERROR_BAD_PROVIDER: int -ERROR_CANNOT_OPEN_PROFILE: int -ERROR_BAD_PROFILE: int -ERROR_NOT_CONTAINER: int -ERROR_EXTENDED_ERROR: int -ERROR_INVALID_GROUPNAME: int -ERROR_INVALID_COMPUTERNAME: int -ERROR_INVALID_EVENTNAME: int -ERROR_INVALID_DOMAINNAME: int -ERROR_INVALID_SERVICENAME: int -ERROR_INVALID_NETNAME: int -ERROR_INVALID_SHARENAME: int -ERROR_INVALID_PASSWORDNAME: int -ERROR_INVALID_MESSAGENAME: int -ERROR_INVALID_MESSAGEDEST: int -ERROR_SESSION_CREDENTIAL_CONFLICT: int -ERROR_REMOTE_SESSION_LIMIT_EXCEEDED: int -ERROR_DUP_DOMAINNAME: int -ERROR_NO_NETWORK: int -ERROR_CANCELLED: int -ERROR_USER_MAPPED_FILE: int -ERROR_CONNECTION_REFUSED: int -ERROR_GRACEFUL_DISCONNECT: int -ERROR_ADDRESS_ALREADY_ASSOCIATED: int -ERROR_ADDRESS_NOT_ASSOCIATED: int -ERROR_CONNECTION_INVALID: int -ERROR_CONNECTION_ACTIVE: int -ERROR_NETWORK_UNREACHABLE: int -ERROR_HOST_UNREACHABLE: int -ERROR_PROTOCOL_UNREACHABLE: int -ERROR_PORT_UNREACHABLE: int -ERROR_REQUEST_ABORTED: int -ERROR_CONNECTION_ABORTED: int -ERROR_RETRY: int -ERROR_CONNECTION_COUNT_LIMIT: int -ERROR_LOGIN_TIME_RESTRICTION: int -ERROR_LOGIN_WKSTA_RESTRICTION: int -ERROR_INCORRECT_ADDRESS: int -ERROR_ALREADY_REGISTERED: int -ERROR_SERVICE_NOT_FOUND: int -ERROR_NOT_AUTHENTICATED: int -ERROR_NOT_LOGGED_ON: int -ERROR_CONTINUE: int -ERROR_ALREADY_INITIALIZED: int -ERROR_NO_MORE_DEVICES: int -ERROR_NO_SUCH_SITE: int -ERROR_DOMAIN_CONTROLLER_EXISTS: int -ERROR_DS_NOT_INSTALLED: int -ERROR_NOT_ALL_ASSIGNED: int -ERROR_SOME_NOT_MAPPED: int -ERROR_NO_QUOTAS_FOR_ACCOUNT: int -ERROR_LOCAL_USER_SESSION_KEY: int -ERROR_NULL_LM_PASSWORD: int -ERROR_UNKNOWN_REVISION: int -ERROR_REVISION_MISMATCH: int -ERROR_INVALID_OWNER: int -ERROR_INVALID_PRIMARY_GROUP: int -ERROR_NO_IMPERSONATION_TOKEN: int -ERROR_CANT_DISABLE_MANDATORY: int -ERROR_NO_LOGON_SERVERS: int -ERROR_NO_SUCH_LOGON_SESSION: int -ERROR_NO_SUCH_PRIVILEGE: int -ERROR_PRIVILEGE_NOT_HELD: int -ERROR_INVALID_ACCOUNT_NAME: int -ERROR_USER_EXISTS: int -ERROR_NO_SUCH_USER: int -ERROR_GROUP_EXISTS: int -ERROR_NO_SUCH_GROUP: int -ERROR_MEMBER_IN_GROUP: int -ERROR_MEMBER_NOT_IN_GROUP: int -ERROR_LAST_ADMIN: int -ERROR_WRONG_PASSWORD: int -ERROR_ILL_FORMED_PASSWORD: int -ERROR_PASSWORD_RESTRICTION: int -ERROR_LOGON_FAILURE: int -ERROR_ACCOUNT_RESTRICTION: int -ERROR_INVALID_LOGON_HOURS: int -ERROR_INVALID_WORKSTATION: int -ERROR_PASSWORD_EXPIRED: int -ERROR_ACCOUNT_DISABLED: int -ERROR_NONE_MAPPED: int -ERROR_TOO_MANY_LUIDS_REQUESTED: int -ERROR_LUIDS_EXHAUSTED: int -ERROR_INVALID_SUB_AUTHORITY: int -ERROR_INVALID_ACL: int -ERROR_INVALID_SID: int -ERROR_INVALID_SECURITY_DESCR: int -ERROR_BAD_INHERITANCE_ACL: int -ERROR_SERVER_DISABLED: int -ERROR_SERVER_NOT_DISABLED: int -ERROR_INVALID_ID_AUTHORITY: int -ERROR_ALLOTTED_SPACE_EXCEEDED: int -ERROR_INVALID_GROUP_ATTRIBUTES: int -ERROR_BAD_IMPERSONATION_LEVEL: int -ERROR_CANT_OPEN_ANONYMOUS: int -ERROR_BAD_VALIDATION_CLASS: int -ERROR_BAD_TOKEN_TYPE: int -ERROR_NO_SECURITY_ON_OBJECT: int -ERROR_CANT_ACCESS_DOMAIN_INFO: int -ERROR_INVALID_SERVER_STATE: int -ERROR_INVALID_DOMAIN_STATE: int -ERROR_INVALID_DOMAIN_ROLE: int -ERROR_NO_SUCH_DOMAIN: int -ERROR_DOMAIN_EXISTS: int -ERROR_DOMAIN_LIMIT_EXCEEDED: int -ERROR_INTERNAL_DB_CORRUPTION: int -ERROR_INTERNAL_ERROR: int -ERROR_GENERIC_NOT_MAPPED: int -ERROR_BAD_DESCRIPTOR_FORMAT: int -ERROR_NOT_LOGON_PROCESS: int -ERROR_LOGON_SESSION_EXISTS: int -ERROR_NO_SUCH_PACKAGE: int -ERROR_BAD_LOGON_SESSION_STATE: int -ERROR_LOGON_SESSION_COLLISION: int -ERROR_INVALID_LOGON_TYPE: int -ERROR_CANNOT_IMPERSONATE: int -ERROR_RXACT_INVALID_STATE: int -ERROR_RXACT_COMMIT_FAILURE: int -ERROR_SPECIAL_ACCOUNT: int -ERROR_SPECIAL_GROUP: int -ERROR_SPECIAL_USER: int -ERROR_MEMBERS_PRIMARY_GROUP: int -ERROR_TOKEN_ALREADY_IN_USE: int -ERROR_NO_SUCH_ALIAS: int -ERROR_MEMBER_NOT_IN_ALIAS: int -ERROR_MEMBER_IN_ALIAS: int -ERROR_ALIAS_EXISTS: int -ERROR_LOGON_NOT_GRANTED: int -ERROR_TOO_MANY_SECRETS: int -ERROR_SECRET_TOO_LONG: int -ERROR_INTERNAL_DB_ERROR: int -ERROR_TOO_MANY_CONTEXT_IDS: int -ERROR_LOGON_TYPE_NOT_GRANTED: int -ERROR_NT_CROSS_ENCRYPTION_REQUIRED: int -ERROR_NO_SUCH_MEMBER: int -ERROR_INVALID_MEMBER: int -ERROR_TOO_MANY_SIDS: int -ERROR_LM_CROSS_ENCRYPTION_REQUIRED: int -ERROR_NO_INHERITANCE: int -ERROR_FILE_CORRUPT: int -ERROR_DISK_CORRUPT: int -ERROR_NO_USER_SESSION_KEY: int -ERROR_LICENSE_QUOTA_EXCEEDED: int -ERROR_INVALID_WINDOW_HANDLE: int -ERROR_INVALID_MENU_HANDLE: int -ERROR_INVALID_CURSOR_HANDLE: int -ERROR_INVALID_ACCEL_HANDLE: int -ERROR_INVALID_HOOK_HANDLE: int -ERROR_INVALID_DWP_HANDLE: int -ERROR_TLW_WITH_WSCHILD: int -ERROR_CANNOT_FIND_WND_CLASS: int -ERROR_WINDOW_OF_OTHER_THREAD: int -ERROR_HOTKEY_ALREADY_REGISTERED: int -ERROR_CLASS_ALREADY_EXISTS: int -ERROR_CLASS_DOES_NOT_EXIST: int -ERROR_CLASS_HAS_WINDOWS: int -ERROR_INVALID_INDEX: int -ERROR_INVALID_ICON_HANDLE: int -ERROR_PRIVATE_DIALOG_INDEX: int -ERROR_LISTBOX_ID_NOT_FOUND: int -ERROR_NO_WILDCARD_CHARACTERS: int -ERROR_CLIPBOARD_NOT_OPEN: int -ERROR_HOTKEY_NOT_REGISTERED: int -ERROR_WINDOW_NOT_DIALOG: int -ERROR_CONTROL_ID_NOT_FOUND: int -ERROR_INVALID_COMBOBOX_MESSAGE: int -ERROR_WINDOW_NOT_COMBOBOX: int -ERROR_INVALID_EDIT_HEIGHT: int -ERROR_DC_NOT_FOUND: int -ERROR_INVALID_HOOK_FILTER: int -ERROR_INVALID_FILTER_PROC: int -ERROR_HOOK_NEEDS_HMOD: int -ERROR_GLOBAL_ONLY_HOOK: int -ERROR_JOURNAL_HOOK_SET: int -ERROR_HOOK_NOT_INSTALLED: int -ERROR_INVALID_LB_MESSAGE: int -ERROR_SETCOUNT_ON_BAD_LB: int -ERROR_LB_WITHOUT_TABSTOPS: int -ERROR_DESTROY_OBJECT_OF_OTHER_THREAD: int -ERROR_CHILD_WINDOW_MENU: int -ERROR_NO_SYSTEM_MENU: int -ERROR_INVALID_MSGBOX_STYLE: int -ERROR_INVALID_SPI_VALUE: int -ERROR_SCREEN_ALREADY_LOCKED: int -ERROR_HWNDS_HAVE_DIFF_PARENT: int -ERROR_NOT_CHILD_WINDOW: int -ERROR_INVALID_GW_COMMAND: int -ERROR_INVALID_THREAD_ID: int -ERROR_NON_MDICHILD_WINDOW: int -ERROR_POPUP_ALREADY_ACTIVE: int -ERROR_NO_SCROLLBARS: int -ERROR_INVALID_SCROLLBAR_RANGE: int -ERROR_INVALID_SHOWWIN_COMMAND: int -ERROR_NO_SYSTEM_RESOURCES: int -ERROR_NONPAGED_SYSTEM_RESOURCES: int -ERROR_PAGED_SYSTEM_RESOURCES: int -ERROR_WORKING_SET_QUOTA: int -ERROR_PAGEFILE_QUOTA: int -ERROR_COMMITMENT_LIMIT: int -ERROR_MENU_ITEM_NOT_FOUND: int -ERROR_INVALID_KEYBOARD_HANDLE: int -ERROR_HOOK_TYPE_NOT_ALLOWED: int -ERROR_REQUIRES_INTERACTIVE_WINDOWSTATION: int -ERROR_TIMEOUT: int -ERROR_INVALID_MONITOR_HANDLE: int -ERROR_INCORRECT_SIZE: int -ERROR_SYMLINK_CLASS_DISABLED: int -ERROR_SYMLINK_NOT_SUPPORTED: int -ERROR_XML_PARSE_ERROR: int -ERROR_XMLDSIG_ERROR: int -ERROR_RESTART_APPLICATION: int -ERROR_WRONG_COMPARTMENT: int -ERROR_AUTHIP_FAILURE: int -ERROR_EVENTLOG_FILE_CORRUPT: int -ERROR_EVENTLOG_CANT_START: int -ERROR_LOG_FILE_FULL: int -ERROR_EVENTLOG_FILE_CHANGED: int -ERROR_INSTALL_SERVICE: int -ERROR_INSTALL_USEREXIT: int -ERROR_INSTALL_FAILURE: int -ERROR_INSTALL_SUSPEND: int -ERROR_UNKNOWN_PRODUCT: int -ERROR_UNKNOWN_FEATURE: int -ERROR_UNKNOWN_COMPONENT: int -ERROR_UNKNOWN_PROPERTY: int -ERROR_INVALID_HANDLE_STATE: int -ERROR_BAD_CONFIGURATION: int -ERROR_INDEX_ABSENT: int -ERROR_INSTALL_SOURCE_ABSENT: int -ERROR_BAD_DATABASE_VERSION: int -ERROR_PRODUCT_UNINSTALLED: int -ERROR_BAD_QUERY_SYNTAX: int -ERROR_INVALID_FIELD: int -ERROR_DEVICE_REMOVED: int -ERROR_INSTALL_ALREADY_RUNNING: int -ERROR_INSTALL_PACKAGE_OPEN_FAILED: int -ERROR_INSTALL_PACKAGE_INVALID: int -ERROR_INSTALL_UI_FAILURE: int -ERROR_INSTALL_LOG_FAILURE: int -ERROR_INSTALL_LANGUAGE_UNSUPPORTED: int -ERROR_INSTALL_TRANSFORM_FAILURE: int -ERROR_INSTALL_PACKAGE_REJECTED: int -ERROR_FUNCTION_NOT_CALLED: int -ERROR_FUNCTION_FAILED: int -ERROR_INVALID_TABLE: int -ERROR_DATATYPE_MISMATCH: int -ERROR_UNSUPPORTED_TYPE: int -ERROR_CREATE_FAILED: int -ERROR_INSTALL_TEMP_UNWRITABLE: int -ERROR_INSTALL_PLATFORM_UNSUPPORTED: int -ERROR_INSTALL_NOTUSED: int -ERROR_PATCH_PACKAGE_OPEN_FAILED: int -ERROR_PATCH_PACKAGE_INVALID: int -ERROR_PATCH_PACKAGE_UNSUPPORTED: int -ERROR_PRODUCT_VERSION: int -ERROR_INVALID_COMMAND_LINE: int -ERROR_INSTALL_REMOTE_DISALLOWED: int -ERROR_SUCCESS_REBOOT_INITIATED: int -ERROR_PATCH_TARGET_NOT_FOUND: int -ERROR_PATCH_PACKAGE_REJECTED: int -ERROR_INSTALL_TRANSFORM_REJECTED: int -ERROR_INSTALL_REMOTE_PROHIBITED: int -ERROR_PATCH_REMOVAL_UNSUPPORTED: int -ERROR_UNKNOWN_PATCH: int -ERROR_PATCH_NO_SEQUENCE: int -ERROR_PATCH_REMOVAL_DISALLOWED: int -ERROR_INVALID_PATCH_XML: int -ERROR_PATCH_MANAGED_ADVERTISED_PRODUCT: int -ERROR_INSTALL_SERVICE_SAFEBOOT: int -RPC_S_INVALID_STRING_BINDING: int -RPC_S_WRONG_KIND_OF_BINDING: int -RPC_S_INVALID_BINDING: int -RPC_S_PROTSEQ_NOT_SUPPORTED: int -RPC_S_INVALID_RPC_PROTSEQ: int -RPC_S_INVALID_STRING_UUID: int -RPC_S_INVALID_ENDPOINT_FORMAT: int -RPC_S_INVALID_NET_ADDR: int -RPC_S_NO_ENDPOINT_FOUND: int -RPC_S_INVALID_TIMEOUT: int -RPC_S_OBJECT_NOT_FOUND: int -RPC_S_ALREADY_REGISTERED: int -RPC_S_TYPE_ALREADY_REGISTERED: int -RPC_S_ALREADY_LISTENING: int -RPC_S_NO_PROTSEQS_REGISTERED: int -RPC_S_NOT_LISTENING: int -RPC_S_UNKNOWN_MGR_TYPE: int -RPC_S_UNKNOWN_IF: int -RPC_S_NO_BINDINGS: int -RPC_S_NO_PROTSEQS: int -RPC_S_CANT_CREATE_ENDPOINT: int -RPC_S_OUT_OF_RESOURCES: int -RPC_S_SERVER_UNAVAILABLE: int -RPC_S_SERVER_TOO_BUSY: int -RPC_S_INVALID_NETWORK_OPTIONS: int -RPC_S_NO_CALL_ACTIVE: int -RPC_S_CALL_FAILED: int -RPC_S_CALL_FAILED_DNE: int -RPC_S_PROTOCOL_ERROR: int -RPC_S_PROXY_ACCESS_DENIED: int -RPC_S_UNSUPPORTED_TRANS_SYN: int -RPC_S_UNSUPPORTED_TYPE: int -RPC_S_INVALID_TAG: int -RPC_S_INVALID_BOUND: int -RPC_S_NO_ENTRY_NAME: int -RPC_S_INVALID_NAME_SYNTAX: int -RPC_S_UNSUPPORTED_NAME_SYNTAX: int -RPC_S_UUID_NO_ADDRESS: int -RPC_S_DUPLICATE_ENDPOINT: int -RPC_S_UNKNOWN_AUTHN_TYPE: int -RPC_S_MAX_CALLS_TOO_SMALL: int -RPC_S_STRING_TOO_LONG: int -RPC_S_PROTSEQ_NOT_FOUND: int -RPC_S_PROCNUM_OUT_OF_RANGE: int -RPC_S_BINDING_HAS_NO_AUTH: int -RPC_S_UNKNOWN_AUTHN_SERVICE: int -RPC_S_UNKNOWN_AUTHN_LEVEL: int -RPC_S_INVALID_AUTH_IDENTITY: int -RPC_S_UNKNOWN_AUTHZ_SERVICE: int -EPT_S_INVALID_ENTRY: int -EPT_S_CANT_PERFORM_OP: int -EPT_S_NOT_REGISTERED: int -RPC_S_NOTHING_TO_EXPORT: int -RPC_S_INCOMPLETE_NAME: int -RPC_S_INVALID_VERS_OPTION: int -RPC_S_NO_MORE_MEMBERS: int -RPC_S_NOT_ALL_OBJS_UNEXPORTED: int -RPC_S_INTERFACE_NOT_FOUND: int -RPC_S_ENTRY_ALREADY_EXISTS: int -RPC_S_ENTRY_NOT_FOUND: int -RPC_S_NAME_SERVICE_UNAVAILABLE: int -RPC_S_INVALID_NAF_ID: int -RPC_S_CANNOT_SUPPORT: int -RPC_S_NO_CONTEXT_AVAILABLE: int -RPC_S_INTERNAL_ERROR: int -RPC_S_ZERO_DIVIDE: int -RPC_S_ADDRESS_ERROR: int -RPC_S_FP_DIV_ZERO: int -RPC_S_FP_UNDERFLOW: int -RPC_S_FP_OVERFLOW: int -RPC_X_NO_MORE_ENTRIES: int -RPC_X_SS_CHAR_TRANS_OPEN_FAIL: int -RPC_X_SS_CHAR_TRANS_SHORT_FILE: int -RPC_X_SS_IN_NULL_CONTEXT: int -RPC_X_SS_CONTEXT_DAMAGED: int -RPC_X_SS_HANDLES_MISMATCH: int -RPC_X_SS_CANNOT_GET_CALL_HANDLE: int -RPC_X_NULL_REF_POINTER: int -RPC_X_ENUM_VALUE_OUT_OF_RANGE: int -RPC_X_BYTE_COUNT_TOO_SMALL: int -RPC_X_BAD_STUB_DATA: int -ERROR_INVALID_USER_BUFFER: int -ERROR_UNRECOGNIZED_MEDIA: int -ERROR_NO_TRUST_LSA_SECRET: int -ERROR_NO_TRUST_SAM_ACCOUNT: int -ERROR_TRUSTED_DOMAIN_FAILURE: int -ERROR_TRUSTED_RELATIONSHIP_FAILURE: int -ERROR_TRUST_FAILURE: int -RPC_S_CALL_IN_PROGRESS: int -ERROR_NETLOGON_NOT_STARTED: int -ERROR_ACCOUNT_EXPIRED: int -ERROR_REDIRECTOR_HAS_OPEN_HANDLES: int -ERROR_PRINTER_DRIVER_ALREADY_INSTALLED: int -ERROR_UNKNOWN_PORT: int -ERROR_UNKNOWN_PRINTER_DRIVER: int -ERROR_UNKNOWN_PRINTPROCESSOR: int -ERROR_INVALID_SEPARATOR_FILE: int -ERROR_INVALID_PRIORITY: int -ERROR_INVALID_PRINTER_NAME: int -ERROR_PRINTER_ALREADY_EXISTS: int -ERROR_INVALID_PRINTER_COMMAND: int -ERROR_INVALID_DATATYPE: int -ERROR_INVALID_ENVIRONMENT: int -RPC_S_NO_MORE_BINDINGS: int -ERROR_NOLOGON_INTERDOMAIN_TRUST_ACCOUNT: int -ERROR_NOLOGON_WORKSTATION_TRUST_ACCOUNT: int -ERROR_NOLOGON_SERVER_TRUST_ACCOUNT: int -ERROR_DOMAIN_TRUST_INCONSISTENT: int -ERROR_SERVER_HAS_OPEN_HANDLES: int -ERROR_RESOURCE_DATA_NOT_FOUND: int -ERROR_RESOURCE_TYPE_NOT_FOUND: int -ERROR_RESOURCE_NAME_NOT_FOUND: int -ERROR_RESOURCE_LANG_NOT_FOUND: int -ERROR_NOT_ENOUGH_QUOTA: int -RPC_S_NO_INTERFACES: int -RPC_S_CALL_CANCELLED: int -RPC_S_BINDING_INCOMPLETE: int -RPC_S_COMM_FAILURE: int -RPC_S_UNSUPPORTED_AUTHN_LEVEL: int -RPC_S_NO_PRINC_NAME: int -RPC_S_NOT_RPC_ERROR: int -RPC_S_UUID_LOCAL_ONLY: int -RPC_S_SEC_PKG_ERROR: int -RPC_S_NOT_CANCELLED: int -RPC_X_INVALID_ES_ACTION: int -RPC_X_WRONG_ES_VERSION: int -RPC_X_WRONG_STUB_VERSION: int -RPC_X_INVALID_PIPE_OBJECT: int -RPC_X_WRONG_PIPE_ORDER: int -RPC_X_WRONG_PIPE_VERSION: int -RPC_S_GROUP_MEMBER_NOT_FOUND: int -EPT_S_CANT_CREATE: int -RPC_S_INVALID_OBJECT: int -ERROR_INVALID_TIME: int -ERROR_INVALID_FORM_NAME: int -ERROR_INVALID_FORM_SIZE: int -ERROR_ALREADY_WAITING: int -ERROR_PRINTER_DELETED: int -ERROR_INVALID_PRINTER_STATE: int -ERROR_PASSWORD_MUST_CHANGE: int -ERROR_DOMAIN_CONTROLLER_NOT_FOUND: int -ERROR_ACCOUNT_LOCKED_OUT: int -OR_INVALID_OXID: int -OR_INVALID_OID: int -OR_INVALID_SET: int -RPC_S_SEND_INCOMPLETE: int -RPC_S_INVALID_ASYNC_HANDLE: int -RPC_S_INVALID_ASYNC_CALL: int -RPC_X_PIPE_CLOSED: int -RPC_X_PIPE_DISCIPLINE_ERROR: int -RPC_X_PIPE_EMPTY: int -ERROR_NO_SITENAME: int -ERROR_CANT_ACCESS_FILE: int -ERROR_CANT_RESOLVE_FILENAME: int -RPC_S_ENTRY_TYPE_MISMATCH: int -RPC_S_NOT_ALL_OBJS_EXPORTED: int -RPC_S_INTERFACE_NOT_EXPORTED: int -RPC_S_PROFILE_NOT_ADDED: int -RPC_S_PRF_ELT_NOT_ADDED: int -RPC_S_PRF_ELT_NOT_REMOVED: int -RPC_S_GRP_ELT_NOT_ADDED: int -RPC_S_GRP_ELT_NOT_REMOVED: int -ERROR_KM_DRIVER_BLOCKED: int -ERROR_CONTEXT_EXPIRED: int -ERROR_PER_USER_TRUST_QUOTA_EXCEEDED: int -ERROR_ALL_USER_TRUST_QUOTA_EXCEEDED: int -ERROR_USER_DELETE_TRUST_QUOTA_EXCEEDED: int -ERROR_AUTHENTICATION_FIREWALL_FAILED: int -ERROR_REMOTE_PRINT_CONNECTIONS_BLOCKED: int -ERROR_NTLM_BLOCKED: int -ERROR_INVALID_PIXEL_FORMAT: int -ERROR_BAD_DRIVER: int -ERROR_INVALID_WINDOW_STYLE: int -ERROR_METAFILE_NOT_SUPPORTED: int -ERROR_TRANSFORM_NOT_SUPPORTED: int -ERROR_CLIPPING_NOT_SUPPORTED: int -ERROR_INVALID_CMM: int -ERROR_INVALID_PROFILE: int -ERROR_TAG_NOT_FOUND: int -ERROR_TAG_NOT_PRESENT: int -ERROR_DUPLICATE_TAG: int -ERROR_PROFILE_NOT_ASSOCIATED_WITH_DEVICE: int -ERROR_PROFILE_NOT_FOUND: int -ERROR_INVALID_COLORSPACE: int -ERROR_ICM_NOT_ENABLED: int -ERROR_DELETING_ICM_XFORM: int -ERROR_INVALID_TRANSFORM: int -ERROR_COLORSPACE_MISMATCH: int -ERROR_INVALID_COLORINDEX: int -ERROR_PROFILE_DOES_NOT_MATCH_DEVICE: int -ERROR_CONNECTED_OTHER_PASSWORD_DEFAULT: int -ERROR_UNKNOWN_PRINT_MONITOR: int -ERROR_PRINTER_DRIVER_IN_USE: int -ERROR_SPOOL_FILE_NOT_FOUND: int -ERROR_SPL_NO_STARTDOC: int -ERROR_SPL_NO_ADDJOB: int -ERROR_PRINT_PROCESSOR_ALREADY_INSTALLED: int -ERROR_PRINT_MONITOR_ALREADY_INSTALLED: int -ERROR_INVALID_PRINT_MONITOR: int -ERROR_PRINT_MONITOR_IN_USE: int -ERROR_PRINTER_HAS_JOBS_QUEUED: int -ERROR_SUCCESS_REBOOT_REQUIRED: int -ERROR_SUCCESS_RESTART_REQUIRED: int -ERROR_PRINTER_NOT_FOUND: int -ERROR_PRINTER_DRIVER_WARNED: int -ERROR_PRINTER_DRIVER_BLOCKED: int -ERROR_PRINTER_DRIVER_PACKAGE_IN_USE: int -ERROR_CORE_DRIVER_PACKAGE_NOT_FOUND: int -ERROR_FAIL_REBOOT_REQUIRED: int -ERROR_FAIL_REBOOT_INITIATED: int -ERROR_PRINTER_DRIVER_DOWNLOAD_NEEDED: int -ERROR_PRINT_JOB_RESTART_REQUIRED: int -ERROR_IO_REISSUE_AS_CACHED: int -ERROR_WINS_INTERNAL: int -ERROR_CAN_NOT_DEL_LOCAL_WINS: int -ERROR_STATIC_INIT: int -ERROR_INC_BACKUP: int -ERROR_FULL_BACKUP: int -ERROR_REC_NON_EXISTENT: int -ERROR_RPL_NOT_ALLOWED: int -ERROR_DHCP_ADDRESS_CONFLICT: int -ERROR_WMI_GUID_NOT_FOUND: int -ERROR_WMI_INSTANCE_NOT_FOUND: int -ERROR_WMI_ITEMID_NOT_FOUND: int -ERROR_WMI_TRY_AGAIN: int -ERROR_WMI_DP_NOT_FOUND: int -ERROR_WMI_UNRESOLVED_INSTANCE_REF: int -ERROR_WMI_ALREADY_ENABLED: int -ERROR_WMI_GUID_DISCONNECTED: int -ERROR_WMI_SERVER_UNAVAILABLE: int -ERROR_WMI_DP_FAILED: int -ERROR_WMI_INVALID_MOF: int -ERROR_WMI_INVALID_REGINFO: int -ERROR_WMI_ALREADY_DISABLED: int -ERROR_WMI_READ_ONLY: int -ERROR_WMI_SET_FAILURE: int -ERROR_INVALID_MEDIA: int -ERROR_INVALID_LIBRARY: int -ERROR_INVALID_MEDIA_POOL: int -ERROR_DRIVE_MEDIA_MISMATCH: int -ERROR_MEDIA_OFFLINE: int -ERROR_LIBRARY_OFFLINE: int -ERROR_EMPTY: int -ERROR_NOT_EMPTY: int -ERROR_MEDIA_UNAVAILABLE: int -ERROR_RESOURCE_DISABLED: int -ERROR_INVALID_CLEANER: int -ERROR_UNABLE_TO_CLEAN: int -ERROR_OBJECT_NOT_FOUND: int -ERROR_DATABASE_FAILURE: int -ERROR_DATABASE_FULL: int -ERROR_MEDIA_INCOMPATIBLE: int -ERROR_RESOURCE_NOT_PRESENT: int -ERROR_INVALID_OPERATION: int -ERROR_MEDIA_NOT_AVAILABLE: int -ERROR_DEVICE_NOT_AVAILABLE: int -ERROR_REQUEST_REFUSED: int -ERROR_INVALID_DRIVE_OBJECT: int -ERROR_LIBRARY_FULL: int -ERROR_MEDIUM_NOT_ACCESSIBLE: int -ERROR_UNABLE_TO_LOAD_MEDIUM: int -ERROR_UNABLE_TO_INVENTORY_DRIVE: int -ERROR_UNABLE_TO_INVENTORY_SLOT: int -ERROR_UNABLE_TO_INVENTORY_TRANSPORT: int -ERROR_TRANSPORT_FULL: int -ERROR_CONTROLLING_IEPORT: int -ERROR_UNABLE_TO_EJECT_MOUNTED_MEDIA: int -ERROR_CLEANER_SLOT_SET: int -ERROR_CLEANER_SLOT_NOT_SET: int -ERROR_CLEANER_CARTRIDGE_SPENT: int -ERROR_UNEXPECTED_OMID: int -ERROR_CANT_DELETE_LAST_ITEM: int -ERROR_MESSAGE_EXCEEDS_MAX_SIZE: int -ERROR_VOLUME_CONTAINS_SYS_FILES: int -ERROR_INDIGENOUS_TYPE: int -ERROR_NO_SUPPORTING_DRIVES: int -ERROR_CLEANER_CARTRIDGE_INSTALLED: int -ERROR_IEPORT_FULL: int -ERROR_FILE_OFFLINE: int -ERROR_REMOTE_STORAGE_NOT_ACTIVE: int -ERROR_REMOTE_STORAGE_MEDIA_ERROR: int -ERROR_NOT_A_REPARSE_POINT: int -ERROR_REPARSE_ATTRIBUTE_CONFLICT: int -ERROR_INVALID_REPARSE_DATA: int -ERROR_REPARSE_TAG_INVALID: int -ERROR_REPARSE_TAG_MISMATCH: int -ERROR_VOLUME_NOT_SIS_ENABLED: int -ERROR_DEPENDENT_RESOURCE_EXISTS: int -ERROR_DEPENDENCY_NOT_FOUND: int -ERROR_DEPENDENCY_ALREADY_EXISTS: int -ERROR_RESOURCE_NOT_ONLINE: int -ERROR_HOST_NODE_NOT_AVAILABLE: int -ERROR_RESOURCE_NOT_AVAILABLE: int -ERROR_RESOURCE_NOT_FOUND: int -ERROR_SHUTDOWN_CLUSTER: int -ERROR_CANT_EVICT_ACTIVE_NODE: int -ERROR_OBJECT_ALREADY_EXISTS: int -ERROR_OBJECT_IN_LIST: int -ERROR_GROUP_NOT_AVAILABLE: int -ERROR_GROUP_NOT_FOUND: int -ERROR_GROUP_NOT_ONLINE: int -ERROR_HOST_NODE_NOT_RESOURCE_OWNER: int -ERROR_HOST_NODE_NOT_GROUP_OWNER: int -ERROR_RESMON_CREATE_FAILED: int -ERROR_RESMON_ONLINE_FAILED: int -ERROR_RESOURCE_ONLINE: int -ERROR_QUORUM_RESOURCE: int -ERROR_NOT_QUORUM_CAPABLE: int -ERROR_CLUSTER_SHUTTING_DOWN: int -ERROR_INVALID_STATE: int -ERROR_RESOURCE_PROPERTIES_STORED: int -ERROR_NOT_QUORUM_CLASS: int -ERROR_CORE_RESOURCE: int -ERROR_QUORUM_RESOURCE_ONLINE_FAILED: int -ERROR_QUORUMLOG_OPEN_FAILED: int -ERROR_CLUSTERLOG_CORRUPT: int -ERROR_CLUSTERLOG_RECORD_EXCEEDS_MAXSIZE: int -ERROR_CLUSTERLOG_EXCEEDS_MAXSIZE: int -ERROR_CLUSTERLOG_CHKPOINT_NOT_FOUND: int -ERROR_CLUSTERLOG_NOT_ENOUGH_SPACE: int -ERROR_QUORUM_OWNER_ALIVE: int -ERROR_NETWORK_NOT_AVAILABLE: int -ERROR_NODE_NOT_AVAILABLE: int -ERROR_ALL_NODES_NOT_AVAILABLE: int -ERROR_RESOURCE_FAILED: int -ERROR_CLUSTER_INVALID_NODE: int -ERROR_CLUSTER_NODE_EXISTS: int -ERROR_CLUSTER_JOIN_IN_PROGRESS: int -ERROR_CLUSTER_NODE_NOT_FOUND: int -ERROR_CLUSTER_LOCAL_NODE_NOT_FOUND: int -ERROR_CLUSTER_NETWORK_EXISTS: int -ERROR_CLUSTER_NETWORK_NOT_FOUND: int -ERROR_CLUSTER_NETINTERFACE_EXISTS: int -ERROR_CLUSTER_NETINTERFACE_NOT_FOUND: int -ERROR_CLUSTER_INVALID_REQUEST: int -ERROR_CLUSTER_INVALID_NETWORK_PROVIDER: int -ERROR_CLUSTER_NODE_DOWN: int -ERROR_CLUSTER_NODE_UNREACHABLE: int -ERROR_CLUSTER_NODE_NOT_MEMBER: int -ERROR_CLUSTER_JOIN_NOT_IN_PROGRESS: int -ERROR_CLUSTER_INVALID_NETWORK: int -ERROR_CLUSTER_NODE_UP: int -ERROR_CLUSTER_IPADDR_IN_USE: int -ERROR_CLUSTER_NODE_NOT_PAUSED: int -ERROR_CLUSTER_NO_SECURITY_CONTEXT: int -ERROR_CLUSTER_NETWORK_NOT_INTERNAL: int -ERROR_CLUSTER_NODE_ALREADY_UP: int -ERROR_CLUSTER_NODE_ALREADY_DOWN: int -ERROR_CLUSTER_NETWORK_ALREADY_ONLINE: int -ERROR_CLUSTER_NETWORK_ALREADY_OFFLINE: int -ERROR_CLUSTER_NODE_ALREADY_MEMBER: int -ERROR_CLUSTER_LAST_INTERNAL_NETWORK: int -ERROR_CLUSTER_NETWORK_HAS_DEPENDENTS: int -ERROR_INVALID_OPERATION_ON_QUORUM: int -ERROR_DEPENDENCY_NOT_ALLOWED: int -ERROR_CLUSTER_NODE_PAUSED: int -ERROR_NODE_CANT_HOST_RESOURCE: int -ERROR_CLUSTER_NODE_NOT_READY: int -ERROR_CLUSTER_NODE_SHUTTING_DOWN: int -ERROR_CLUSTER_JOIN_ABORTED: int -ERROR_CLUSTER_INCOMPATIBLE_VERSIONS: int -ERROR_CLUSTER_MAXNUM_OF_RESOURCES_EXCEEDED: int -ERROR_CLUSTER_SYSTEM_CONFIG_CHANGED: int -ERROR_CLUSTER_RESOURCE_TYPE_NOT_FOUND: int -ERROR_CLUSTER_RESTYPE_NOT_SUPPORTED: int -ERROR_CLUSTER_RESNAME_NOT_FOUND: int -ERROR_CLUSTER_NO_RPC_PACKAGES_REGISTERED: int -ERROR_CLUSTER_OWNER_NOT_IN_PREFLIST: int -ERROR_CLUSTER_DATABASE_SEQMISMATCH: int -ERROR_RESMON_INVALID_STATE: int -ERROR_CLUSTER_GUM_NOT_LOCKER: int -ERROR_QUORUM_DISK_NOT_FOUND: int -ERROR_DATABASE_BACKUP_CORRUPT: int -ERROR_CLUSTER_NODE_ALREADY_HAS_DFS_ROOT: int -ERROR_RESOURCE_PROPERTY_UNCHANGEABLE: int -ERROR_CLUSTER_MEMBERSHIP_INVALID_STATE: int -ERROR_CLUSTER_QUORUMLOG_NOT_FOUND: int -ERROR_CLUSTER_MEMBERSHIP_HALT: int -ERROR_CLUSTER_INSTANCE_ID_MISMATCH: int -ERROR_CLUSTER_NETWORK_NOT_FOUND_FOR_IP: int -ERROR_CLUSTER_PROPERTY_DATA_TYPE_MISMATCH: int -ERROR_CLUSTER_EVICT_WITHOUT_CLEANUP: int -ERROR_CLUSTER_PARAMETER_MISMATCH: int -ERROR_NODE_CANNOT_BE_CLUSTERED: int -ERROR_CLUSTER_WRONG_OS_VERSION: int -ERROR_CLUSTER_CANT_CREATE_DUP_CLUSTER_NAME: int -ERROR_CLUSCFG_ALREADY_COMMITTED: int -ERROR_CLUSCFG_ROLLBACK_FAILED: int -ERROR_CLUSCFG_SYSTEM_DISK_DRIVE_LETTER_CONFLICT: int -ERROR_CLUSTER_OLD_VERSION: int -ERROR_CLUSTER_MISMATCHED_COMPUTER_ACCT_NAME: int -ERROR_CLUSTER_NO_NET_ADAPTERS: int -ERROR_CLUSTER_POISONED: int -ERROR_CLUSTER_GROUP_MOVING: int -ERROR_CLUSTER_RESOURCE_TYPE_BUSY: int -ERROR_RESOURCE_CALL_TIMED_OUT: int -ERROR_INVALID_CLUSTER_IPV6_ADDRESS: int -ERROR_CLUSTER_INTERNAL_INVALID_FUNCTION: int -ERROR_CLUSTER_PARAMETER_OUT_OF_BOUNDS: int -ERROR_CLUSTER_PARTIAL_SEND: int -ERROR_CLUSTER_REGISTRY_INVALID_FUNCTION: int -ERROR_CLUSTER_INVALID_STRING_TERMINATION: int -ERROR_CLUSTER_INVALID_STRING_FORMAT: int -ERROR_CLUSTER_DATABASE_TRANSACTION_IN_PROGRESS: int -ERROR_CLUSTER_DATABASE_TRANSACTION_NOT_IN_PROGRESS: int -ERROR_CLUSTER_NULL_DATA: int -ERROR_CLUSTER_PARTIAL_READ: int -ERROR_CLUSTER_PARTIAL_WRITE: int -ERROR_CLUSTER_CANT_DESERIALIZE_DATA: int -ERROR_DEPENDENT_RESOURCE_PROPERTY_CONFLICT: int -ERROR_CLUSTER_NO_QUORUM: int -ERROR_CLUSTER_INVALID_IPV6_NETWORK: int -ERROR_CLUSTER_INVALID_IPV6_TUNNEL_NETWORK: int -ERROR_QUORUM_NOT_ALLOWED_IN_THIS_GROUP: int -ERROR_DEPENDENCY_TREE_TOO_COMPLEX: int -ERROR_EXCEPTION_IN_RESOURCE_CALL: int -ERROR_CLUSTER_RHS_FAILED_INITIALIZATION: int -ERROR_CLUSTER_NOT_INSTALLED: int -ERROR_CLUSTER_RESOURCES_MUST_BE_ONLINE_ON_THE_SAME_NODE: int -ERROR_ENCRYPTION_FAILED: int -ERROR_DECRYPTION_FAILED: int -ERROR_FILE_ENCRYPTED: int -ERROR_NO_RECOVERY_POLICY: int -ERROR_NO_EFS: int -ERROR_WRONG_EFS: int -ERROR_NO_USER_KEYS: int -ERROR_FILE_NOT_ENCRYPTED: int -ERROR_NOT_EXPORT_FORMAT: int -ERROR_FILE_READ_ONLY: int -ERROR_DIR_EFS_DISALLOWED: int -ERROR_EFS_SERVER_NOT_TRUSTED: int -ERROR_BAD_RECOVERY_POLICY: int -ERROR_EFS_ALG_BLOB_TOO_BIG: int -ERROR_VOLUME_NOT_SUPPORT_EFS: int -ERROR_EFS_DISABLED: int -ERROR_EFS_VERSION_NOT_SUPPORT: int -ERROR_CS_ENCRYPTION_INVALID_SERVER_RESPONSE: int -ERROR_CS_ENCRYPTION_UNSUPPORTED_SERVER: int -ERROR_CS_ENCRYPTION_EXISTING_ENCRYPTED_FILE: int -ERROR_CS_ENCRYPTION_NEW_ENCRYPTED_FILE: int -ERROR_CS_ENCRYPTION_FILE_NOT_CSE: int -ERROR_NO_BROWSER_SERVERS_FOUND: int -ERROR_LOG_SECTOR_INVALID: int -ERROR_LOG_SECTOR_PARITY_INVALID: int -ERROR_LOG_SECTOR_REMAPPED: int -ERROR_LOG_BLOCK_INCOMPLETE: int -ERROR_LOG_INVALID_RANGE: int -ERROR_LOG_BLOCKS_EXHAUSTED: int -ERROR_LOG_READ_CONTEXT_INVALID: int -ERROR_LOG_RESTART_INVALID: int -ERROR_LOG_BLOCK_VERSION: int -ERROR_LOG_BLOCK_INVALID: int -ERROR_LOG_READ_MODE_INVALID: int -ERROR_LOG_NO_RESTART: int -ERROR_LOG_METADATA_CORRUPT: int -ERROR_LOG_METADATA_INVALID: int -ERROR_LOG_METADATA_INCONSISTENT: int -ERROR_LOG_RESERVATION_INVALID: int -ERROR_LOG_CANT_DELETE: int -ERROR_LOG_CONTAINER_LIMIT_EXCEEDED: int -ERROR_LOG_START_OF_LOG: int -ERROR_LOG_POLICY_ALREADY_INSTALLED: int -ERROR_LOG_POLICY_NOT_INSTALLED: int -ERROR_LOG_POLICY_INVALID: int -ERROR_LOG_POLICY_CONFLICT: int -ERROR_LOG_PINNED_ARCHIVE_TAIL: int -ERROR_LOG_RECORD_NONEXISTENT: int -ERROR_LOG_RECORDS_RESERVED_INVALID: int -ERROR_LOG_SPACE_RESERVED_INVALID: int -ERROR_LOG_TAIL_INVALID: int -ERROR_LOG_FULL: int -ERROR_COULD_NOT_RESIZE_LOG: int -ERROR_LOG_MULTIPLEXED: int -ERROR_LOG_DEDICATED: int -ERROR_LOG_ARCHIVE_NOT_IN_PROGRESS: int -ERROR_LOG_ARCHIVE_IN_PROGRESS: int -ERROR_LOG_EPHEMERAL: int -ERROR_LOG_NOT_ENOUGH_CONTAINERS: int -ERROR_LOG_CLIENT_ALREADY_REGISTERED: int -ERROR_LOG_CLIENT_NOT_REGISTERED: int -ERROR_LOG_FULL_HANDLER_IN_PROGRESS: int -ERROR_LOG_CONTAINER_READ_FAILED: int -ERROR_LOG_CONTAINER_WRITE_FAILED: int -ERROR_LOG_CONTAINER_OPEN_FAILED: int -ERROR_LOG_CONTAINER_STATE_INVALID: int -ERROR_LOG_STATE_INVALID: int -ERROR_LOG_PINNED: int -ERROR_LOG_METADATA_FLUSH_FAILED: int -ERROR_LOG_INCONSISTENT_SECURITY: int -ERROR_LOG_APPENDED_FLUSH_FAILED: int -ERROR_LOG_PINNED_RESERVATION: int -ERROR_INVALID_TRANSACTION: int -ERROR_TRANSACTION_NOT_ACTIVE: int -ERROR_TRANSACTION_REQUEST_NOT_VALID: int -ERROR_TRANSACTION_NOT_REQUESTED: int -ERROR_TRANSACTION_ALREADY_ABORTED: int -ERROR_TRANSACTION_ALREADY_COMMITTED: int -ERROR_TM_INITIALIZATION_FAILED: int -ERROR_RESOURCEMANAGER_READ_ONLY: int -ERROR_TRANSACTION_NOT_JOINED: int -ERROR_TRANSACTION_SUPERIOR_EXISTS: int -ERROR_CRM_PROTOCOL_ALREADY_EXISTS: int -ERROR_TRANSACTION_PROPAGATION_FAILED: int -ERROR_CRM_PROTOCOL_NOT_FOUND: int -ERROR_TRANSACTION_INVALID_MARSHALL_BUFFER: int -ERROR_CURRENT_TRANSACTION_NOT_VALID: int -ERROR_TRANSACTION_NOT_FOUND: int -ERROR_RESOURCEMANAGER_NOT_FOUND: int -ERROR_ENLISTMENT_NOT_FOUND: int -ERROR_TRANSACTIONMANAGER_NOT_FOUND: int -ERROR_TRANSACTIONMANAGER_NOT_ONLINE: int -ERROR_TRANSACTIONMANAGER_RECOVERY_NAME_COLLISION: int -ERROR_TRANSACTION_NOT_ROOT: int -ERROR_TRANSACTION_OBJECT_EXPIRED: int -ERROR_TRANSACTION_RESPONSE_NOT_ENLISTED: int -ERROR_TRANSACTION_RECORD_TOO_LONG: int -ERROR_IMPLICIT_TRANSACTION_NOT_SUPPORTED: int -ERROR_TRANSACTION_INTEGRITY_VIOLATED: int -ERROR_TRANSACTIONAL_CONFLICT: int -ERROR_RM_NOT_ACTIVE: int -ERROR_RM_METADATA_CORRUPT: int -ERROR_DIRECTORY_NOT_RM: int -ERROR_TRANSACTIONS_UNSUPPORTED_REMOTE: int -ERROR_LOG_RESIZE_INVALID_SIZE: int -ERROR_OBJECT_NO_LONGER_EXISTS: int -ERROR_STREAM_MINIVERSION_NOT_FOUND: int -ERROR_STREAM_MINIVERSION_NOT_VALID: int -ERROR_MINIVERSION_INACCESSIBLE_FROM_SPECIFIED_TRANSACTION: int -ERROR_CANT_OPEN_MINIVERSION_WITH_MODIFY_INTENT: int -ERROR_CANT_CREATE_MORE_STREAM_MINIVERSIONS: int -ERROR_REMOTE_FILE_VERSION_MISMATCH: int -ERROR_HANDLE_NO_LONGER_VALID: int -ERROR_NO_TXF_METADATA: int -ERROR_LOG_CORRUPTION_DETECTED: int -ERROR_CANT_RECOVER_WITH_HANDLE_OPEN: int -ERROR_RM_DISCONNECTED: int -ERROR_ENLISTMENT_NOT_SUPERIOR: int -ERROR_RECOVERY_NOT_NEEDED: int -ERROR_RM_ALREADY_STARTED: int -ERROR_FILE_IDENTITY_NOT_PERSISTENT: int -ERROR_CANT_BREAK_TRANSACTIONAL_DEPENDENCY: int -ERROR_CANT_CROSS_RM_BOUNDARY: int -ERROR_TXF_DIR_NOT_EMPTY: int -ERROR_INDOUBT_TRANSACTIONS_EXIST: int -ERROR_TM_VOLATILE: int -ERROR_ROLLBACK_TIMER_EXPIRED: int -ERROR_TXF_ATTRIBUTE_CORRUPT: int -ERROR_EFS_NOT_ALLOWED_IN_TRANSACTION: int -ERROR_TRANSACTIONAL_OPEN_NOT_ALLOWED: int -ERROR_LOG_GROWTH_FAILED: int -ERROR_TRANSACTED_MAPPING_UNSUPPORTED_REMOTE: int -ERROR_TXF_METADATA_ALREADY_PRESENT: int -ERROR_TRANSACTION_SCOPE_CALLBACKS_NOT_SET: int -ERROR_TRANSACTION_REQUIRED_PROMOTION: int -ERROR_CANNOT_EXECUTE_FILE_IN_TRANSACTION: int -ERROR_TRANSACTIONS_NOT_FROZEN: int -ERROR_TRANSACTION_FREEZE_IN_PROGRESS: int -ERROR_NOT_SNAPSHOT_VOLUME: int -ERROR_NO_SAVEPOINT_WITH_OPEN_FILES: int -ERROR_DATA_LOST_REPAIR: int -ERROR_SPARSE_NOT_ALLOWED_IN_TRANSACTION: int -ERROR_TM_IDENTITY_MISMATCH: int -ERROR_FLOATED_SECTION: int -ERROR_CANNOT_ACCEPT_TRANSACTED_WORK: int -ERROR_CANNOT_ABORT_TRANSACTIONS: int -ERROR_BAD_CLUSTERS: int -ERROR_COMPRESSION_NOT_ALLOWED_IN_TRANSACTION: int -ERROR_VOLUME_DIRTY: int -ERROR_NO_LINK_TRACKING_IN_TRANSACTION: int -ERROR_OPERATION_NOT_SUPPORTED_IN_TRANSACTION: int -ERROR_CTX_WINSTATION_NAME_INVALID: int -ERROR_CTX_INVALID_PD: int -ERROR_CTX_PD_NOT_FOUND: int -ERROR_CTX_WD_NOT_FOUND: int -ERROR_CTX_CANNOT_MAKE_EVENTLOG_ENTRY: int -ERROR_CTX_SERVICE_NAME_COLLISION: int -ERROR_CTX_CLOSE_PENDING: int -ERROR_CTX_NO_OUTBUF: int -ERROR_CTX_MODEM_INF_NOT_FOUND: int -ERROR_CTX_INVALID_MODEMNAME: int -ERROR_CTX_MODEM_RESPONSE_ERROR: int -ERROR_CTX_MODEM_RESPONSE_TIMEOUT: int -ERROR_CTX_MODEM_RESPONSE_NO_CARRIER: int -ERROR_CTX_MODEM_RESPONSE_NO_DIALTONE: int -ERROR_CTX_MODEM_RESPONSE_BUSY: int -ERROR_CTX_MODEM_RESPONSE_VOICE: int -ERROR_CTX_TD_ERROR: int -ERROR_CTX_WINSTATION_NOT_FOUND: int -ERROR_CTX_WINSTATION_ALREADY_EXISTS: int -ERROR_CTX_WINSTATION_BUSY: int -ERROR_CTX_BAD_VIDEO_MODE: int -ERROR_CTX_GRAPHICS_INVALID: int -ERROR_CTX_LOGON_DISABLED: int -ERROR_CTX_NOT_CONSOLE: int -ERROR_CTX_CLIENT_QUERY_TIMEOUT: int -ERROR_CTX_CONSOLE_DISCONNECT: int -ERROR_CTX_CONSOLE_CONNECT: int -ERROR_CTX_SHADOW_DENIED: int -ERROR_CTX_WINSTATION_ACCESS_DENIED: int -ERROR_CTX_INVALID_WD: int -ERROR_CTX_SHADOW_INVALID: int -ERROR_CTX_SHADOW_DISABLED: int -ERROR_CTX_CLIENT_LICENSE_IN_USE: int -ERROR_CTX_CLIENT_LICENSE_NOT_SET: int -ERROR_CTX_LICENSE_NOT_AVAILABLE: int -ERROR_CTX_LICENSE_CLIENT_INVALID: int -ERROR_CTX_LICENSE_EXPIRED: int -ERROR_CTX_SHADOW_NOT_RUNNING: int -ERROR_CTX_SHADOW_ENDED_BY_MODE_CHANGE: int -ERROR_ACTIVATION_COUNT_EXCEEDED: int -ERROR_CTX_WINSTATIONS_DISABLED: int -ERROR_CTX_ENCRYPTION_LEVEL_REQUIRED: int -ERROR_CTX_SESSION_IN_USE: int -ERROR_CTX_NO_FORCE_LOGOFF: int -ERROR_CTX_ACCOUNT_RESTRICTION: int -ERROR_RDP_PROTOCOL_ERROR: int -ERROR_CTX_CDM_CONNECT: int -ERROR_CTX_CDM_DISCONNECT: int -ERROR_CTX_SECURITY_LAYER_ERROR: int -ERROR_TS_INCOMPATIBLE_SESSIONS: int -FRS_ERR_INVALID_API_SEQUENCE: int -FRS_ERR_STARTING_SERVICE: int -FRS_ERR_STOPPING_SERVICE: int -FRS_ERR_INTERNAL_API: int -FRS_ERR_INTERNAL: int -FRS_ERR_SERVICE_COMM: int -FRS_ERR_INSUFFICIENT_PRIV: int -FRS_ERR_AUTHENTICATION: int -FRS_ERR_PARENT_INSUFFICIENT_PRIV: int -FRS_ERR_PARENT_AUTHENTICATION: int -FRS_ERR_CHILD_TO_PARENT_COMM: int -FRS_ERR_PARENT_TO_CHILD_COMM: int -FRS_ERR_SYSVOL_POPULATE: int -FRS_ERR_SYSVOL_POPULATE_TIMEOUT: int -FRS_ERR_SYSVOL_IS_BUSY: int -FRS_ERR_SYSVOL_DEMOTE: int -FRS_ERR_INVALID_SERVICE_PARAMETER: int -DS_S_SUCCESS: int -ERROR_DS_MEMBERSHIP_EVALUATED_LOCALLY: int -ERROR_DS_NO_ATTRIBUTE_OR_VALUE: int -ERROR_DS_INVALID_ATTRIBUTE_SYNTAX: int -ERROR_DS_ATTRIBUTE_TYPE_UNDEFINED: int -ERROR_DS_ATTRIBUTE_OR_VALUE_EXISTS: int -ERROR_DS_BUSY: int -ERROR_DS_UNAVAILABLE: int -ERROR_DS_NO_RIDS_ALLOCATED: int -ERROR_DS_NO_MORE_RIDS: int -ERROR_DS_INCORRECT_ROLE_OWNER: int -ERROR_DS_RIDMGR_INIT_ERROR: int -ERROR_DS_OBJ_CLASS_VIOLATION: int -ERROR_DS_CANT_ON_NON_LEAF: int -ERROR_DS_CANT_ON_RDN: int -ERROR_DS_CANT_MOD_OBJ_CLASS: int -ERROR_DS_CROSS_DOM_MOVE_ERROR: int -ERROR_DS_GC_NOT_AVAILABLE: int -ERROR_SHARED_POLICY: int -ERROR_POLICY_OBJECT_NOT_FOUND: int -ERROR_POLICY_ONLY_IN_DS: int -ERROR_PROMOTION_ACTIVE: int -ERROR_NO_PROMOTION_ACTIVE: int -ERROR_DS_OPERATIONS_ERROR: int -ERROR_DS_PROTOCOL_ERROR: int -ERROR_DS_TIMELIMIT_EXCEEDED: int -ERROR_DS_SIZELIMIT_EXCEEDED: int -ERROR_DS_ADMIN_LIMIT_EXCEEDED: int -ERROR_DS_COMPARE_FALSE: int -ERROR_DS_COMPARE_TRUE: int -ERROR_DS_AUTH_METHOD_NOT_SUPPORTED: int -ERROR_DS_STRONG_AUTH_REQUIRED: int -ERROR_DS_INAPPROPRIATE_AUTH: int -ERROR_DS_AUTH_UNKNOWN: int -ERROR_DS_REFERRAL: int -ERROR_DS_UNAVAILABLE_CRIT_EXTENSION: int -ERROR_DS_CONFIDENTIALITY_REQUIRED: int -ERROR_DS_INAPPROPRIATE_MATCHING: int -ERROR_DS_CONSTRAINT_VIOLATION: int -ERROR_DS_NO_SUCH_OBJECT: int -ERROR_DS_ALIAS_PROBLEM: int -ERROR_DS_INVALID_DN_SYNTAX: int -ERROR_DS_IS_LEAF: int -ERROR_DS_ALIAS_DEREF_PROBLEM: int -ERROR_DS_UNWILLING_TO_PERFORM: int -ERROR_DS_LOOP_DETECT: int -ERROR_DS_NAMING_VIOLATION: int -ERROR_DS_OBJECT_RESULTS_TOO_LARGE: int -ERROR_DS_AFFECTS_MULTIPLE_DSAS: int -ERROR_DS_SERVER_DOWN: int -ERROR_DS_LOCAL_ERROR: int -ERROR_DS_ENCODING_ERROR: int -ERROR_DS_DECODING_ERROR: int -ERROR_DS_FILTER_UNKNOWN: int -ERROR_DS_PARAM_ERROR: int -ERROR_DS_NOT_SUPPORTED: int -ERROR_DS_NO_RESULTS_RETURNED: int -ERROR_DS_CONTROL_NOT_FOUND: int -ERROR_DS_CLIENT_LOOP: int -ERROR_DS_REFERRAL_LIMIT_EXCEEDED: int -ERROR_DS_SORT_CONTROL_MISSING: int -ERROR_DS_OFFSET_RANGE_ERROR: int -ERROR_DS_ROOT_MUST_BE_NC: int -ERROR_DS_ADD_REPLICA_INHIBITED: int -ERROR_DS_ATT_NOT_DEF_IN_SCHEMA: int -ERROR_DS_MAX_OBJ_SIZE_EXCEEDED: int -ERROR_DS_OBJ_STRING_NAME_EXISTS: int -ERROR_DS_NO_RDN_DEFINED_IN_SCHEMA: int -ERROR_DS_RDN_DOESNT_MATCH_SCHEMA: int -ERROR_DS_NO_REQUESTED_ATTS_FOUND: int -ERROR_DS_USER_BUFFER_TO_SMALL: int -ERROR_DS_ATT_IS_NOT_ON_OBJ: int -ERROR_DS_ILLEGAL_MOD_OPERATION: int -ERROR_DS_OBJ_TOO_LARGE: int -ERROR_DS_BAD_INSTANCE_TYPE: int -ERROR_DS_MASTERDSA_REQUIRED: int -ERROR_DS_OBJECT_CLASS_REQUIRED: int -ERROR_DS_MISSING_REQUIRED_ATT: int -ERROR_DS_ATT_NOT_DEF_FOR_CLASS: int -ERROR_DS_ATT_ALREADY_EXISTS: int -ERROR_DS_CANT_ADD_ATT_VALUES: int -ERROR_DS_SINGLE_VALUE_CONSTRAINT: int -ERROR_DS_RANGE_CONSTRAINT: int -ERROR_DS_ATT_VAL_ALREADY_EXISTS: int -ERROR_DS_CANT_REM_MISSING_ATT: int -ERROR_DS_CANT_REM_MISSING_ATT_VAL: int -ERROR_DS_ROOT_CANT_BE_SUBREF: int -ERROR_DS_NO_CHAINING: int -ERROR_DS_NO_CHAINED_EVAL: int -ERROR_DS_NO_PARENT_OBJECT: int -ERROR_DS_PARENT_IS_AN_ALIAS: int -ERROR_DS_CANT_MIX_MASTER_AND_REPS: int -ERROR_DS_CHILDREN_EXIST: int -ERROR_DS_OBJ_NOT_FOUND: int -ERROR_DS_ALIASED_OBJ_MISSING: int -ERROR_DS_BAD_NAME_SYNTAX: int -ERROR_DS_ALIAS_POINTS_TO_ALIAS: int -ERROR_DS_CANT_DEREF_ALIAS: int -ERROR_DS_OUT_OF_SCOPE: int -ERROR_DS_OBJECT_BEING_REMOVED: int -ERROR_DS_CANT_DELETE_DSA_OBJ: int -ERROR_DS_GENERIC_ERROR: int -ERROR_DS_DSA_MUST_BE_INT_MASTER: int -ERROR_DS_CLASS_NOT_DSA: int -ERROR_DS_INSUFF_ACCESS_RIGHTS: int -ERROR_DS_ILLEGAL_SUPERIOR: int -ERROR_DS_ATTRIBUTE_OWNED_BY_SAM: int -ERROR_DS_NAME_TOO_MANY_PARTS: int -ERROR_DS_NAME_TOO_LONG: int -ERROR_DS_NAME_VALUE_TOO_LONG: int -ERROR_DS_NAME_UNPARSEABLE: int -ERROR_DS_NAME_TYPE_UNKNOWN: int -ERROR_DS_NOT_AN_OBJECT: int -ERROR_DS_SEC_DESC_TOO_SHORT: int -ERROR_DS_SEC_DESC_INVALID: int -ERROR_DS_NO_DELETED_NAME: int -ERROR_DS_SUBREF_MUST_HAVE_PARENT: int -ERROR_DS_NCNAME_MUST_BE_NC: int -ERROR_DS_CANT_ADD_SYSTEM_ONLY: int -ERROR_DS_CLASS_MUST_BE_CONCRETE: int -ERROR_DS_INVALID_DMD: int -ERROR_DS_OBJ_GUID_EXISTS: int -ERROR_DS_NOT_ON_BACKLINK: int -ERROR_DS_NO_CROSSREF_FOR_NC: int -ERROR_DS_SHUTTING_DOWN: int -ERROR_DS_UNKNOWN_OPERATION: int -ERROR_DS_INVALID_ROLE_OWNER: int -ERROR_DS_COULDNT_CONTACT_FSMO: int -ERROR_DS_CROSS_NC_DN_RENAME: int -ERROR_DS_CANT_MOD_SYSTEM_ONLY: int -ERROR_DS_REPLICATOR_ONLY: int -ERROR_DS_OBJ_CLASS_NOT_DEFINED: int -ERROR_DS_OBJ_CLASS_NOT_SUBCLASS: int -ERROR_DS_NAME_REFERENCE_INVALID: int -ERROR_DS_CROSS_REF_EXISTS: int -ERROR_DS_CANT_DEL_MASTER_CROSSREF: int -ERROR_DS_SUBTREE_NOTIFY_NOT_NC_HEAD: int -ERROR_DS_NOTIFY_FILTER_TOO_COMPLEX: int -ERROR_DS_DUP_RDN: int -ERROR_DS_DUP_OID: int -ERROR_DS_DUP_MAPI_ID: int -ERROR_DS_DUP_SCHEMA_ID_GUID: int -ERROR_DS_DUP_LDAP_DISPLAY_NAME: int -ERROR_DS_SEMANTIC_ATT_TEST: int -ERROR_DS_SYNTAX_MISMATCH: int -ERROR_DS_EXISTS_IN_MUST_HAVE: int -ERROR_DS_EXISTS_IN_MAY_HAVE: int -ERROR_DS_NONEXISTENT_MAY_HAVE: int -ERROR_DS_NONEXISTENT_MUST_HAVE: int -ERROR_DS_AUX_CLS_TEST_FAIL: int -ERROR_DS_NONEXISTENT_POSS_SUP: int -ERROR_DS_SUB_CLS_TEST_FAIL: int -ERROR_DS_BAD_RDN_ATT_ID_SYNTAX: int -ERROR_DS_EXISTS_IN_AUX_CLS: int -ERROR_DS_EXISTS_IN_SUB_CLS: int -ERROR_DS_EXISTS_IN_POSS_SUP: int -ERROR_DS_RECALCSCHEMA_FAILED: int -ERROR_DS_TREE_DELETE_NOT_FINISHED: int -ERROR_DS_CANT_DELETE: int -ERROR_DS_ATT_SCHEMA_REQ_ID: int -ERROR_DS_BAD_ATT_SCHEMA_SYNTAX: int -ERROR_DS_CANT_CACHE_ATT: int -ERROR_DS_CANT_CACHE_CLASS: int -ERROR_DS_CANT_REMOVE_ATT_CACHE: int -ERROR_DS_CANT_REMOVE_CLASS_CACHE: int -ERROR_DS_CANT_RETRIEVE_DN: int -ERROR_DS_MISSING_SUPREF: int -ERROR_DS_CANT_RETRIEVE_INSTANCE: int -ERROR_DS_CODE_INCONSISTENCY: int -ERROR_DS_DATABASE_ERROR: int -ERROR_DS_GOVERNSID_MISSING: int -ERROR_DS_MISSING_EXPECTED_ATT: int -ERROR_DS_NCNAME_MISSING_CR_REF: int -ERROR_DS_SECURITY_CHECKING_ERROR: int -ERROR_DS_SCHEMA_NOT_LOADED: int -ERROR_DS_SCHEMA_ALLOC_FAILED: int -ERROR_DS_ATT_SCHEMA_REQ_SYNTAX: int -ERROR_DS_GCVERIFY_ERROR: int -ERROR_DS_DRA_SCHEMA_MISMATCH: int -ERROR_DS_CANT_FIND_DSA_OBJ: int -ERROR_DS_CANT_FIND_EXPECTED_NC: int -ERROR_DS_CANT_FIND_NC_IN_CACHE: int -ERROR_DS_CANT_RETRIEVE_CHILD: int -ERROR_DS_SECURITY_ILLEGAL_MODIFY: int -ERROR_DS_CANT_REPLACE_HIDDEN_REC: int -ERROR_DS_BAD_HIERARCHY_FILE: int -ERROR_DS_BUILD_HIERARCHY_TABLE_FAILED: int -ERROR_DS_CONFIG_PARAM_MISSING: int -ERROR_DS_COUNTING_AB_INDICES_FAILED: int -ERROR_DS_HIERARCHY_TABLE_MALLOC_FAILED: int -ERROR_DS_INTERNAL_FAILURE: int -ERROR_DS_UNKNOWN_ERROR: int -ERROR_DS_ROOT_REQUIRES_CLASS_TOP: int -ERROR_DS_REFUSING_FSMO_ROLES: int -ERROR_DS_MISSING_FSMO_SETTINGS: int -ERROR_DS_UNABLE_TO_SURRENDER_ROLES: int -ERROR_DS_DRA_GENERIC: int -ERROR_DS_DRA_INVALID_PARAMETER: int -ERROR_DS_DRA_BUSY: int -ERROR_DS_DRA_BAD_DN: int -ERROR_DS_DRA_BAD_NC: int -ERROR_DS_DRA_DN_EXISTS: int -ERROR_DS_DRA_INTERNAL_ERROR: int -ERROR_DS_DRA_INCONSISTENT_DIT: int -ERROR_DS_DRA_CONNECTION_FAILED: int -ERROR_DS_DRA_BAD_INSTANCE_TYPE: int -ERROR_DS_DRA_OUT_OF_MEM: int -ERROR_DS_DRA_MAIL_PROBLEM: int -ERROR_DS_DRA_REF_ALREADY_EXISTS: int -ERROR_DS_DRA_REF_NOT_FOUND: int -ERROR_DS_DRA_OBJ_IS_REP_SOURCE: int -ERROR_DS_DRA_DB_ERROR: int -ERROR_DS_DRA_NO_REPLICA: int -ERROR_DS_DRA_ACCESS_DENIED: int -ERROR_DS_DRA_NOT_SUPPORTED: int -ERROR_DS_DRA_RPC_CANCELLED: int -ERROR_DS_DRA_SOURCE_DISABLED: int -ERROR_DS_DRA_SINK_DISABLED: int -ERROR_DS_DRA_NAME_COLLISION: int -ERROR_DS_DRA_SOURCE_REINSTALLED: int -ERROR_DS_DRA_MISSING_PARENT: int -ERROR_DS_DRA_PREEMPTED: int -ERROR_DS_DRA_ABANDON_SYNC: int -ERROR_DS_DRA_SHUTDOWN: int -ERROR_DS_DRA_INCOMPATIBLE_PARTIAL_SET: int -ERROR_DS_DRA_SOURCE_IS_PARTIAL_REPLICA: int -ERROR_DS_DRA_EXTN_CONNECTION_FAILED: int -ERROR_DS_INSTALL_SCHEMA_MISMATCH: int -ERROR_DS_DUP_LINK_ID: int -ERROR_DS_NAME_ERROR_RESOLVING: int -ERROR_DS_NAME_ERROR_NOT_FOUND: int -ERROR_DS_NAME_ERROR_NOT_UNIQUE: int -ERROR_DS_NAME_ERROR_NO_MAPPING: int -ERROR_DS_NAME_ERROR_DOMAIN_ONLY: int -ERROR_DS_NAME_ERROR_NO_SYNTACTICAL_MAPPING: int -ERROR_DS_CONSTRUCTED_ATT_MOD: int -ERROR_DS_WRONG_OM_OBJ_CLASS: int -ERROR_DS_DRA_REPL_PENDING: int -ERROR_DS_DS_REQUIRED: int -ERROR_DS_INVALID_LDAP_DISPLAY_NAME: int -ERROR_DS_NON_BASE_SEARCH: int -ERROR_DS_CANT_RETRIEVE_ATTS: int -ERROR_DS_BACKLINK_WITHOUT_LINK: int -ERROR_DS_EPOCH_MISMATCH: int -ERROR_DS_SRC_NAME_MISMATCH: int -ERROR_DS_SRC_AND_DST_NC_IDENTICAL: int -ERROR_DS_DST_NC_MISMATCH: int -ERROR_DS_NOT_AUTHORITIVE_FOR_DST_NC: int -ERROR_DS_SRC_GUID_MISMATCH: int -ERROR_DS_CANT_MOVE_DELETED_OBJECT: int -ERROR_DS_PDC_OPERATION_IN_PROGRESS: int -ERROR_DS_CROSS_DOMAIN_CLEANUP_REQD: int -ERROR_DS_ILLEGAL_XDOM_MOVE_OPERATION: int -ERROR_DS_CANT_WITH_ACCT_GROUP_MEMBERSHPS: int -ERROR_DS_NC_MUST_HAVE_NC_PARENT: int -ERROR_DS_CR_IMPOSSIBLE_TO_VALIDATE: int -ERROR_DS_DST_DOMAIN_NOT_NATIVE: int -ERROR_DS_MISSING_INFRASTRUCTURE_CONTAINER: int -ERROR_DS_CANT_MOVE_ACCOUNT_GROUP: int -ERROR_DS_CANT_MOVE_RESOURCE_GROUP: int -ERROR_DS_INVALID_SEARCH_FLAG: int -ERROR_DS_NO_TREE_DELETE_ABOVE_NC: int -ERROR_DS_COULDNT_LOCK_TREE_FOR_DELETE: int -ERROR_DS_COULDNT_IDENTIFY_OBJECTS_FOR_TREE_DELETE: int -ERROR_DS_SAM_INIT_FAILURE: int -ERROR_DS_SENSITIVE_GROUP_VIOLATION: int -ERROR_DS_CANT_MOD_PRIMARYGROUPID: int -ERROR_DS_ILLEGAL_BASE_SCHEMA_MOD: int -ERROR_DS_NONSAFE_SCHEMA_CHANGE: int -ERROR_DS_SCHEMA_UPDATE_DISALLOWED: int -ERROR_DS_CANT_CREATE_UNDER_SCHEMA: int -ERROR_DS_INSTALL_NO_SRC_SCH_VERSION: int -ERROR_DS_INSTALL_NO_SCH_VERSION_IN_INIFILE: int -ERROR_DS_INVALID_GROUP_TYPE: int -ERROR_DS_NO_NEST_GLOBALGROUP_IN_MIXEDDOMAIN: int -ERROR_DS_NO_NEST_LOCALGROUP_IN_MIXEDDOMAIN: int -ERROR_DS_GLOBAL_CANT_HAVE_LOCAL_MEMBER: int -ERROR_DS_GLOBAL_CANT_HAVE_UNIVERSAL_MEMBER: int -ERROR_DS_UNIVERSAL_CANT_HAVE_LOCAL_MEMBER: int -ERROR_DS_GLOBAL_CANT_HAVE_CROSSDOMAIN_MEMBER: int -ERROR_DS_LOCAL_CANT_HAVE_CROSSDOMAIN_LOCAL_MEMBER: int -ERROR_DS_HAVE_PRIMARY_MEMBERS: int -ERROR_DS_STRING_SD_CONVERSION_FAILED: int -ERROR_DS_NAMING_MASTER_GC: int -ERROR_DS_DNS_LOOKUP_FAILURE: int -ERROR_DS_COULDNT_UPDATE_SPNS: int -ERROR_DS_CANT_RETRIEVE_SD: int -ERROR_DS_KEY_NOT_UNIQUE: int -ERROR_DS_WRONG_LINKED_ATT_SYNTAX: int -ERROR_DS_SAM_NEED_BOOTKEY_PASSWORD: int -ERROR_DS_SAM_NEED_BOOTKEY_FLOPPY: int -ERROR_DS_CANT_START: int -ERROR_DS_INIT_FAILURE: int -ERROR_DS_NO_PKT_PRIVACY_ON_CONNECTION: int -ERROR_DS_SOURCE_DOMAIN_IN_FOREST: int -ERROR_DS_DESTINATION_DOMAIN_NOT_IN_FOREST: int -ERROR_DS_DESTINATION_AUDITING_NOT_ENABLED: int -ERROR_DS_CANT_FIND_DC_FOR_SRC_DOMAIN: int -ERROR_DS_SRC_OBJ_NOT_GROUP_OR_USER: int -ERROR_DS_SRC_SID_EXISTS_IN_FOREST: int -ERROR_DS_SRC_AND_DST_OBJECT_CLASS_MISMATCH: int -ERROR_SAM_INIT_FAILURE: int -ERROR_DS_DRA_SCHEMA_INFO_SHIP: int -ERROR_DS_DRA_SCHEMA_CONFLICT: int -ERROR_DS_DRA_EARLIER_SCHEMA_CONFLICT: int -ERROR_DS_DRA_OBJ_NC_MISMATCH: int -ERROR_DS_NC_STILL_HAS_DSAS: int -ERROR_DS_GC_REQUIRED: int -ERROR_DS_LOCAL_MEMBER_OF_LOCAL_ONLY: int -ERROR_DS_NO_FPO_IN_UNIVERSAL_GROUPS: int -ERROR_DS_CANT_ADD_TO_GC: int -ERROR_DS_NO_CHECKPOINT_WITH_PDC: int -ERROR_DS_SOURCE_AUDITING_NOT_ENABLED: int -ERROR_DS_CANT_CREATE_IN_NONDOMAIN_NC: int -ERROR_DS_INVALID_NAME_FOR_SPN: int -ERROR_DS_FILTER_USES_CONTRUCTED_ATTRS: int -ERROR_DS_UNICODEPWD_NOT_IN_QUOTES: int -ERROR_DS_MACHINE_ACCOUNT_QUOTA_EXCEEDED: int -ERROR_DS_MUST_BE_RUN_ON_DST_DC: int -ERROR_DS_SRC_DC_MUST_BE_SP4_OR_GREATER: int -ERROR_DS_CANT_TREE_DELETE_CRITICAL_OBJ: int -ERROR_DS_INIT_FAILURE_CONSOLE: int -ERROR_DS_SAM_INIT_FAILURE_CONSOLE: int -ERROR_DS_FOREST_VERSION_TOO_HIGH: int -ERROR_DS_DOMAIN_VERSION_TOO_HIGH: int -ERROR_DS_FOREST_VERSION_TOO_LOW: int -ERROR_DS_DOMAIN_VERSION_TOO_LOW: int -ERROR_DS_INCOMPATIBLE_VERSION: int -ERROR_DS_LOW_DSA_VERSION: int -ERROR_DS_NO_BEHAVIOR_VERSION_IN_MIXEDDOMAIN: int -ERROR_DS_NOT_SUPPORTED_SORT_ORDER: int -ERROR_DS_NAME_NOT_UNIQUE: int -ERROR_DS_MACHINE_ACCOUNT_CREATED_PRENT4: int -ERROR_DS_OUT_OF_VERSION_STORE: int -ERROR_DS_INCOMPATIBLE_CONTROLS_USED: int -ERROR_DS_NO_REF_DOMAIN: int -ERROR_DS_RESERVED_LINK_ID: int -ERROR_DS_LINK_ID_NOT_AVAILABLE: int -ERROR_DS_AG_CANT_HAVE_UNIVERSAL_MEMBER: int -ERROR_DS_MODIFYDN_DISALLOWED_BY_INSTANCE_TYPE: int -ERROR_DS_NO_OBJECT_MOVE_IN_SCHEMA_NC: int -ERROR_DS_MODIFYDN_DISALLOWED_BY_FLAG: int -ERROR_DS_MODIFYDN_WRONG_GRANDPARENT: int -ERROR_DS_NAME_ERROR_TRUST_REFERRAL: int -ERROR_NOT_SUPPORTED_ON_STANDARD_SERVER: int -ERROR_DS_CANT_ACCESS_REMOTE_PART_OF_AD: int -ERROR_DS_CR_IMPOSSIBLE_TO_VALIDATE_V2: int -ERROR_DS_THREAD_LIMIT_EXCEEDED: int -ERROR_DS_NOT_CLOSEST: int -ERROR_DS_CANT_DERIVE_SPN_WITHOUT_SERVER_REF: int -ERROR_DS_SINGLE_USER_MODE_FAILED: int -ERROR_DS_NTDSCRIPT_SYNTAX_ERROR: int -ERROR_DS_NTDSCRIPT_PROCESS_ERROR: int -ERROR_DS_DIFFERENT_REPL_EPOCHS: int -ERROR_DS_DRS_EXTENSIONS_CHANGED: int -ERROR_DS_REPLICA_SET_CHANGE_NOT_ALLOWED_ON_DISABLED_CR: int -ERROR_DS_NO_MSDS_INTID: int -ERROR_DS_DUP_MSDS_INTID: int -ERROR_DS_EXISTS_IN_RDNATTID: int -ERROR_DS_AUTHORIZATION_FAILED: int -ERROR_DS_INVALID_SCRIPT: int -ERROR_DS_REMOTE_CROSSREF_OP_FAILED: int -ERROR_DS_CROSS_REF_BUSY: int -ERROR_DS_CANT_DERIVE_SPN_FOR_DELETED_DOMAIN: int -ERROR_DS_CANT_DEMOTE_WITH_WRITEABLE_NC: int -ERROR_DS_DUPLICATE_ID_FOUND: int -ERROR_DS_INSUFFICIENT_ATTR_TO_CREATE_OBJECT: int -ERROR_DS_GROUP_CONVERSION_ERROR: int -ERROR_DS_CANT_MOVE_APP_BASIC_GROUP: int -ERROR_DS_CANT_MOVE_APP_QUERY_GROUP: int -ERROR_DS_ROLE_NOT_VERIFIED: int -ERROR_DS_WKO_CONTAINER_CANNOT_BE_SPECIAL: int -ERROR_DS_DOMAIN_RENAME_IN_PROGRESS: int -ERROR_DS_EXISTING_AD_CHILD_NC: int -ERROR_DS_REPL_LIFETIME_EXCEEDED: int -ERROR_DS_DISALLOWED_IN_SYSTEM_CONTAINER: int -ERROR_DS_LDAP_SEND_QUEUE_FULL: int -ERROR_DS_DRA_OUT_SCHEDULE_WINDOW: int -ERROR_DS_POLICY_NOT_KNOWN: int -ERROR_NO_SITE_SETTINGS_OBJECT: int -ERROR_NO_SECRETS: int -ERROR_NO_WRITABLE_DC_FOUND: int -ERROR_DS_NO_SERVER_OBJECT: int -ERROR_DS_NO_NTDSA_OBJECT: int -ERROR_DS_NON_ASQ_SEARCH: int -ERROR_DS_AUDIT_FAILURE: int -ERROR_DS_INVALID_SEARCH_FLAG_SUBTREE: int -ERROR_DS_INVALID_SEARCH_FLAG_TUPLE: int -ERROR_DS_HIERARCHY_TABLE_TOO_DEEP: int -SEVERITY_SUCCESS: int -SEVERITY_ERROR: int +from typing import Final -def HRESULT_FROM_WIN32(scode): ... -def SUCCEEDED(Status): ... -def FAILED(Status): ... -def HRESULT_CODE(hr: int) -> int: ... +ERROR_INSTALL_SERVICE: Final = 1601 +ERROR_BAD_DATABASE_VERSION: Final = 1613 +win16_E_NOTIMPL: Final = -2147483647 +win16_E_OUTOFMEMORY: Final = -2147483646 +win16_E_INVALIDARG: Final = -2147483645 +win16_E_NOINTERFACE: Final = -2147483644 +win16_E_POINTER: Final = -2147483643 +win16_E_HANDLE: Final = -2147483642 +win16_E_ABORT: Final = -2147483641 +win16_E_FAIL: Final = -2147483640 +win16_E_ACCESSDENIED: Final = -2147483639 +CERTDB_E_JET_ERROR: Final = -2146873344 + +FACILITY_NULL: Final = 0 +FACILITY_RPC: Final = 1 +FACILITY_DISPATCH: Final = 2 +FACILITY_STORAGE: Final = 3 +FACILITY_ITF: Final = 4 +FACILITY_WIN32: Final = 7 +FACILITY_WINDOWS: Final = 8 +FACILITY_SSPI: Final = 9 +FACILITY_SECURITY: Final = 9 +FACILITY_CONTROL: Final = 10 +FACILITY_CERT: Final = 11 +FACILITY_INTERNET: Final = 12 +FACILITY_MEDIASERVER: Final = 13 +FACILITY_MSMQ: Final = 14 +FACILITY_SETUPAPI: Final = 15 +FACILITY_SCARD: Final = 16 +FACILITY_COMPLUS: Final = 17 +FACILITY_AAF: Final = 18 +FACILITY_URT: Final = 19 +FACILITY_ACS: Final = 20 +FACILITY_DPLAY: Final = 21 +FACILITY_UMI: Final = 22 +FACILITY_SXS: Final = 23 +FACILITY_WINDOWS_CE: Final = 24 +FACILITY_HTTP: Final = 25 +FACILITY_USERMODE_COMMONLOG: Final = 26 +FACILITY_WER: Final = 27 +FACILITY_USERMODE_FILTER_MANAGER: Final = 31 +FACILITY_BACKGROUNDCOPY: Final = 32 +FACILITY_CONFIGURATION: Final = 33 +FACILITY_WIA: Final = 33 +FACILITY_STATE_MANAGEMENT: Final = 34 +FACILITY_METADIRECTORY: Final = 35 +FACILITY_WINDOWSUPDATE: Final = 36 +FACILITY_DIRECTORYSERVICE: Final = 37 +FACILITY_GRAPHICS: Final = 38 +FACILITY_SHELL: Final = 39 +FACILITY_NAP: Final = 39 +FACILITY_TPM_SERVICES: Final = 40 +FACILITY_TPM_SOFTWARE: Final = 41 +FACILITY_UI: Final = 42 +FACILITY_XAML: Final = 43 +FACILITY_ACTION_QUEUE: Final = 44 +FACILITY_PLA: Final = 48 +FACILITY_WINDOWS_SETUP: Final = 48 +FACILITY_FVE: Final = 49 +FACILITY_FWP: Final = 50 +FACILITY_WINRM: Final = 51 +FACILITY_NDIS: Final = 52 +FACILITY_USERMODE_HYPERVISOR: Final = 53 +FACILITY_CMI: Final = 54 +FACILITY_USERMODE_VIRTUALIZATION: Final = 55 +FACILITY_USERMODE_VOLMGR: Final = 56 +FACILITY_BCD: Final = 57 +FACILITY_USERMODE_VHD: Final = 58 +FACILITY_USERMODE_HNS: Final = 59 +FACILITY_SDIAG: Final = 60 +FACILITY_WEBSERVICES: Final = 61 +FACILITY_WINPE: Final = 61 +FACILITY_WPN: Final = 62 +FACILITY_WINDOWS_STORE: Final = 63 +FACILITY_INPUT: Final = 64 +FACILITY_QUIC: Final = 65 +FACILITY_EAP: Final = 66 +FACILITY_IORING: Final = 70 +FACILITY_WINDOWS_DEFENDER: Final = 80 +FACILITY_OPC: Final = 81 +FACILITY_XPS: Final = 82 +FACILITY_MBN: Final = 84 +FACILITY_POWERSHELL: Final = 84 +FACILITY_RAS: Final = 83 +FACILITY_P2P_INT: Final = 98 +FACILITY_P2P: Final = 99 +FACILITY_DAF: Final = 100 +FACILITY_BLUETOOTH_ATT: Final = 101 +FACILITY_AUDIO: Final = 102 +FACILITY_STATEREPOSITORY: Final = 103 +FACILITY_VISUALCPP: Final = 109 +FACILITY_SCRIPT: Final = 112 +FACILITY_PARSE: Final = 113 +FACILITY_BLB: Final = 120 +FACILITY_BLB_CLI: Final = 121 +FACILITY_WSBAPP: Final = 122 +FACILITY_BLBUI: Final = 128 +FACILITY_USN: Final = 129 +FACILITY_USERMODE_VOLSNAP: Final = 130 +FACILITY_TIERING: Final = 131 +FACILITY_WSB_ONLINE: Final = 133 +FACILITY_ONLINE_ID: Final = 134 +FACILITY_DEVICE_UPDATE_AGENT: Final = 135 +FACILITY_DRVSERVICING: Final = 136 +FACILITY_DLS: Final = 153 +FACILITY_DELIVERY_OPTIMIZATION: Final = 208 +FACILITY_USERMODE_SPACES: Final = 231 +FACILITY_USER_MODE_SECURITY_CORE: Final = 232 +FACILITY_USERMODE_LICENSING: Final = 234 +FACILITY_SOS: Final = 160 +FACILITY_OCP_UPDATE_AGENT: Final = 173 +FACILITY_DEBUGGERS: Final = 176 +FACILITY_SPP: Final = 256 +FACILITY_RESTORE: Final = 256 +FACILITY_DMSERVER: Final = 256 +FACILITY_DEPLOYMENT_SERVICES_SERVER: Final = 257 +FACILITY_DEPLOYMENT_SERVICES_IMAGING: Final = 258 +FACILITY_DEPLOYMENT_SERVICES_MANAGEMENT: Final = 259 +FACILITY_DEPLOYMENT_SERVICES_UTIL: Final = 260 +FACILITY_DEPLOYMENT_SERVICES_BINLSVC: Final = 261 +FACILITY_DEPLOYMENT_SERVICES_PXE: Final = 263 +FACILITY_DEPLOYMENT_SERVICES_TFTP: Final = 264 +FACILITY_DEPLOYMENT_SERVICES_TRANSPORT_MANAGEMENT: Final = 272 +FACILITY_DEPLOYMENT_SERVICES_DRIVER_PROVISIONING: Final = 278 +FACILITY_DEPLOYMENT_SERVICES_MULTICAST_SERVER: Final = 289 +FACILITY_DEPLOYMENT_SERVICES_MULTICAST_CLIENT: Final = 290 +FACILITY_DEPLOYMENT_SERVICES_CONTENT_PROVIDER: Final = 293 +FACILITY_HSP_SERVICES: Final = 296 +FACILITY_HSP_SOFTWARE: Final = 297 +FACILITY_LINGUISTIC_SERVICES: Final = 305 +FACILITY_AUDIOSTREAMING: Final = 1094 +FACILITY_TTD: Final = 1490 +FACILITY_ACCELERATOR: Final = 1536 +FACILITY_WMAAECMA: Final = 1996 +FACILITY_DIRECTMUSIC: Final = 2168 +FACILITY_DIRECT3D10: Final = 2169 +FACILITY_DXGI: Final = 2170 +FACILITY_DXGI_DDI: Final = 2171 +FACILITY_DIRECT3D11: Final = 2172 +FACILITY_DIRECT3D11_DEBUG: Final = 2173 +FACILITY_DIRECT3D12: Final = 2174 +FACILITY_DIRECT3D12_DEBUG: Final = 2175 +FACILITY_DXCORE: Final = 2176 +FACILITY_PRESENTATION: Final = 2177 +FACILITY_LEAP: Final = 2184 +FACILITY_AUDCLNT: Final = 2185 +FACILITY_WINCODEC_DWRITE_DWM: Final = 2200 +FACILITY_WINML: Final = 2192 +FACILITY_DIRECT2D: Final = 2201 +FACILITY_DEFRAG: Final = 2304 +FACILITY_USERMODE_SDBUS: Final = 2305 +FACILITY_JSCRIPT: Final = 2306 +FACILITY_PIDGENX: Final = 2561 +FACILITY_EAS: Final = 85 +FACILITY_WEB: Final = 885 +FACILITY_WEB_SOCKET: Final = 886 +FACILITY_MOBILE: Final = 1793 +FACILITY_SQLITE: Final = 1967 +FACILITY_SERVICE_FABRIC: Final = 1968 +FACILITY_UTC: Final = 1989 +FACILITY_WEP: Final = 2049 +FACILITY_SYNCENGINE: Final = 2050 +FACILITY_XBOX: Final = 2339 +FACILITY_GAME: Final = 2340 +FACILITY_PIX: Final = 2748 +ERROR_SUCCESS: Final = 0 +NO_ERROR: Final = 0 +SEC_E_OK: Final = 0x00000000 +ERROR_INVALID_FUNCTION: Final = 1 +ERROR_FILE_NOT_FOUND: Final = 2 +ERROR_PATH_NOT_FOUND: Final = 3 +ERROR_TOO_MANY_OPEN_FILES: Final = 4 +ERROR_ACCESS_DENIED: Final = 5 +ERROR_INVALID_HANDLE: Final = 6 +ERROR_ARENA_TRASHED: Final = 7 +ERROR_NOT_ENOUGH_MEMORY: Final = 8 +ERROR_INVALID_BLOCK: Final = 9 +ERROR_BAD_ENVIRONMENT: Final = 10 +ERROR_BAD_FORMAT: Final = 11 +ERROR_INVALID_ACCESS: Final = 12 +ERROR_INVALID_DATA: Final = 13 +ERROR_OUTOFMEMORY: Final = 14 +ERROR_INVALID_DRIVE: Final = 15 +ERROR_CURRENT_DIRECTORY: Final = 16 +ERROR_NOT_SAME_DEVICE: Final = 17 +ERROR_NO_MORE_FILES: Final = 18 +ERROR_WRITE_PROTECT: Final = 19 +ERROR_BAD_UNIT: Final = 20 +ERROR_NOT_READY: Final = 21 +ERROR_BAD_COMMAND: Final = 22 +ERROR_CRC: Final = 23 +ERROR_BAD_LENGTH: Final = 24 +ERROR_SEEK: Final = 25 +ERROR_NOT_DOS_DISK: Final = 26 +ERROR_SECTOR_NOT_FOUND: Final = 27 +ERROR_OUT_OF_PAPER: Final = 28 +ERROR_WRITE_FAULT: Final = 29 +ERROR_READ_FAULT: Final = 30 +ERROR_GEN_FAILURE: Final = 31 +ERROR_SHARING_VIOLATION: Final = 32 +ERROR_LOCK_VIOLATION: Final = 33 +ERROR_WRONG_DISK: Final = 34 +ERROR_SHARING_BUFFER_EXCEEDED: Final = 36 +ERROR_HANDLE_EOF: Final = 38 +ERROR_HANDLE_DISK_FULL: Final = 39 +ERROR_NOT_SUPPORTED: Final = 50 +ERROR_REM_NOT_LIST: Final = 51 +ERROR_DUP_NAME: Final = 52 +ERROR_BAD_NETPATH: Final = 53 +ERROR_NETWORK_BUSY: Final = 54 +ERROR_DEV_NOT_EXIST: Final = 55 +ERROR_TOO_MANY_CMDS: Final = 56 +ERROR_ADAP_HDW_ERR: Final = 57 +ERROR_BAD_NET_RESP: Final = 58 +ERROR_UNEXP_NET_ERR: Final = 59 +ERROR_BAD_REM_ADAP: Final = 60 +ERROR_PRINTQ_FULL: Final = 61 +ERROR_NO_SPOOL_SPACE: Final = 62 +ERROR_PRINT_CANCELLED: Final = 63 +ERROR_NETNAME_DELETED: Final = 64 +ERROR_NETWORK_ACCESS_DENIED: Final = 65 +ERROR_BAD_DEV_TYPE: Final = 66 +ERROR_BAD_NET_NAME: Final = 67 +ERROR_TOO_MANY_NAMES: Final = 68 +ERROR_TOO_MANY_SESS: Final = 69 +ERROR_SHARING_PAUSED: Final = 70 +ERROR_REQ_NOT_ACCEP: Final = 71 +ERROR_REDIR_PAUSED: Final = 72 +ERROR_FILE_EXISTS: Final = 80 +ERROR_CANNOT_MAKE: Final = 82 +ERROR_FAIL_I24: Final = 83 +ERROR_OUT_OF_STRUCTURES: Final = 84 +ERROR_ALREADY_ASSIGNED: Final = 85 +ERROR_INVALID_PASSWORD: Final = 86 +ERROR_INVALID_PARAMETER: Final = 87 +ERROR_NET_WRITE_FAULT: Final = 88 +ERROR_NO_PROC_SLOTS: Final = 89 +ERROR_TOO_MANY_SEMAPHORES: Final = 100 +ERROR_EXCL_SEM_ALREADY_OWNED: Final = 101 +ERROR_SEM_IS_SET: Final = 102 +ERROR_TOO_MANY_SEM_REQUESTS: Final = 103 +ERROR_INVALID_AT_INTERRUPT_TIME: Final = 104 +ERROR_SEM_OWNER_DIED: Final = 105 +ERROR_SEM_USER_LIMIT: Final = 106 +ERROR_DISK_CHANGE: Final = 107 +ERROR_DRIVE_LOCKED: Final = 108 +ERROR_BROKEN_PIPE: Final = 109 +ERROR_OPEN_FAILED: Final = 110 +ERROR_BUFFER_OVERFLOW: Final = 111 +ERROR_DISK_FULL: Final = 112 +ERROR_NO_MORE_SEARCH_HANDLES: Final = 113 +ERROR_INVALID_TARGET_HANDLE: Final = 114 +ERROR_INVALID_CATEGORY: Final = 117 +ERROR_INVALID_VERIFY_SWITCH: Final = 118 +ERROR_BAD_DRIVER_LEVEL: Final = 119 +ERROR_CALL_NOT_IMPLEMENTED: Final = 120 +ERROR_SEM_TIMEOUT: Final = 121 +ERROR_INSUFFICIENT_BUFFER: Final = 122 +ERROR_INVALID_NAME: Final = 123 +ERROR_INVALID_LEVEL: Final = 124 +ERROR_NO_VOLUME_LABEL: Final = 125 +ERROR_MOD_NOT_FOUND: Final = 126 +ERROR_PROC_NOT_FOUND: Final = 127 +ERROR_WAIT_NO_CHILDREN: Final = 128 +ERROR_CHILD_NOT_COMPLETE: Final = 129 +ERROR_DIRECT_ACCESS_HANDLE: Final = 130 +ERROR_NEGATIVE_SEEK: Final = 131 +ERROR_SEEK_ON_DEVICE: Final = 132 +ERROR_IS_JOIN_TARGET: Final = 133 +ERROR_IS_JOINED: Final = 134 +ERROR_IS_SUBSTED: Final = 135 +ERROR_NOT_JOINED: Final = 136 +ERROR_NOT_SUBSTED: Final = 137 +ERROR_JOIN_TO_JOIN: Final = 138 +ERROR_SUBST_TO_SUBST: Final = 139 +ERROR_JOIN_TO_SUBST: Final = 140 +ERROR_SUBST_TO_JOIN: Final = 141 +ERROR_BUSY_DRIVE: Final = 142 +ERROR_SAME_DRIVE: Final = 143 +ERROR_DIR_NOT_ROOT: Final = 144 +ERROR_DIR_NOT_EMPTY: Final = 145 +ERROR_IS_SUBST_PATH: Final = 146 +ERROR_IS_JOIN_PATH: Final = 147 +ERROR_PATH_BUSY: Final = 148 +ERROR_IS_SUBST_TARGET: Final = 149 +ERROR_SYSTEM_TRACE: Final = 150 +ERROR_INVALID_EVENT_COUNT: Final = 151 +ERROR_TOO_MANY_MUXWAITERS: Final = 152 +ERROR_INVALID_LIST_FORMAT: Final = 153 +ERROR_LABEL_TOO_LONG: Final = 154 +ERROR_TOO_MANY_TCBS: Final = 155 +ERROR_SIGNAL_REFUSED: Final = 156 +ERROR_DISCARDED: Final = 157 +ERROR_NOT_LOCKED: Final = 158 +ERROR_BAD_THREADID_ADDR: Final = 159 +ERROR_BAD_ARGUMENTS: Final = 160 +ERROR_BAD_PATHNAME: Final = 161 +ERROR_SIGNAL_PENDING: Final = 162 +ERROR_MAX_THRDS_REACHED: Final = 164 +ERROR_LOCK_FAILED: Final = 167 +ERROR_BUSY: Final = 170 +ERROR_DEVICE_SUPPORT_IN_PROGRESS: Final = 171 +ERROR_CANCEL_VIOLATION: Final = 173 +ERROR_ATOMIC_LOCKS_NOT_SUPPORTED: Final = 174 +ERROR_INVALID_SEGMENT_NUMBER: Final = 180 +ERROR_INVALID_ORDINAL: Final = 182 +ERROR_ALREADY_EXISTS: Final = 183 +ERROR_INVALID_FLAG_NUMBER: Final = 186 +ERROR_SEM_NOT_FOUND: Final = 187 +ERROR_INVALID_STARTING_CODESEG: Final = 188 +ERROR_INVALID_STACKSEG: Final = 189 +ERROR_INVALID_MODULETYPE: Final = 190 +ERROR_INVALID_EXE_SIGNATURE: Final = 191 +ERROR_EXE_MARKED_INVALID: Final = 192 +ERROR_BAD_EXE_FORMAT: Final = 193 +ERROR_ITERATED_DATA_EXCEEDS_64k: Final = 194 +ERROR_INVALID_MINALLOCSIZE: Final = 195 +ERROR_DYNLINK_FROM_INVALID_RING: Final = 196 +ERROR_IOPL_NOT_ENABLED: Final = 197 +ERROR_INVALID_SEGDPL: Final = 198 +ERROR_AUTODATASEG_EXCEEDS_64k: Final = 199 +ERROR_RING2SEG_MUST_BE_MOVABLE: Final = 200 +ERROR_RELOC_CHAIN_XEEDS_SEGLIM: Final = 201 +ERROR_INFLOOP_IN_RELOC_CHAIN: Final = 202 +ERROR_ENVVAR_NOT_FOUND: Final = 203 +ERROR_NO_SIGNAL_SENT: Final = 205 +ERROR_FILENAME_EXCED_RANGE: Final = 206 +ERROR_RING2_STACK_IN_USE: Final = 207 +ERROR_META_EXPANSION_TOO_LONG: Final = 208 +ERROR_INVALID_SIGNAL_NUMBER: Final = 209 +ERROR_THREAD_1_INACTIVE: Final = 210 +ERROR_LOCKED: Final = 212 +ERROR_TOO_MANY_MODULES: Final = 214 +ERROR_NESTING_NOT_ALLOWED: Final = 215 +ERROR_EXE_MACHINE_TYPE_MISMATCH: Final = 216 +ERROR_EXE_CANNOT_MODIFY_SIGNED_BINARY: Final = 217 +ERROR_EXE_CANNOT_MODIFY_STRONG_SIGNED_BINARY: Final = 218 +ERROR_FILE_CHECKED_OUT: Final = 220 +ERROR_CHECKOUT_REQUIRED: Final = 221 +ERROR_BAD_FILE_TYPE: Final = 222 +ERROR_FILE_TOO_LARGE: Final = 223 +ERROR_FORMS_AUTH_REQUIRED: Final = 224 +ERROR_VIRUS_INFECTED: Final = 225 +ERROR_VIRUS_DELETED: Final = 226 +ERROR_PIPE_LOCAL: Final = 229 +ERROR_BAD_PIPE: Final = 230 +ERROR_PIPE_BUSY: Final = 231 +ERROR_NO_DATA: Final = 232 +ERROR_PIPE_NOT_CONNECTED: Final = 233 +ERROR_MORE_DATA: Final = 234 +ERROR_NO_WORK_DONE: Final = 235 +ERROR_VC_DISCONNECTED: Final = 240 +ERROR_INVALID_EA_NAME: Final = 254 +ERROR_EA_LIST_INCONSISTENT: Final = 255 +WAIT_TIMEOUT: Final = 258 +ERROR_NO_MORE_ITEMS: Final = 259 +ERROR_CANNOT_COPY: Final = 266 +ERROR_DIRECTORY: Final = 267 +ERROR_EAS_DIDNT_FIT: Final = 275 +ERROR_EA_FILE_CORRUPT: Final = 276 +ERROR_EA_TABLE_FULL: Final = 277 +ERROR_INVALID_EA_HANDLE: Final = 278 +ERROR_EAS_NOT_SUPPORTED: Final = 282 +ERROR_NOT_OWNER: Final = 288 +ERROR_TOO_MANY_POSTS: Final = 298 +ERROR_PARTIAL_COPY: Final = 299 +ERROR_OPLOCK_NOT_GRANTED: Final = 300 +ERROR_INVALID_OPLOCK_PROTOCOL: Final = 301 +ERROR_DISK_TOO_FRAGMENTED: Final = 302 +ERROR_DELETE_PENDING: Final = 303 +ERROR_INCOMPATIBLE_WITH_GLOBAL_SHORT_NAME_REGISTRY_SETTING: Final = 304 +ERROR_SHORT_NAMES_NOT_ENABLED_ON_VOLUME: Final = 305 +ERROR_SECURITY_STREAM_IS_INCONSISTENT: Final = 306 +ERROR_INVALID_LOCK_RANGE: Final = 307 +ERROR_IMAGE_SUBSYSTEM_NOT_PRESENT: Final = 308 +ERROR_NOTIFICATION_GUID_ALREADY_DEFINED: Final = 309 +ERROR_INVALID_EXCEPTION_HANDLER: Final = 310 +ERROR_DUPLICATE_PRIVILEGES: Final = 311 +ERROR_NO_RANGES_PROCESSED: Final = 312 +ERROR_NOT_ALLOWED_ON_SYSTEM_FILE: Final = 313 +ERROR_DISK_RESOURCES_EXHAUSTED: Final = 314 +ERROR_INVALID_TOKEN: Final = 315 +ERROR_DEVICE_FEATURE_NOT_SUPPORTED: Final = 316 +ERROR_MR_MID_NOT_FOUND: Final = 317 +ERROR_SCOPE_NOT_FOUND: Final = 318 +ERROR_UNDEFINED_SCOPE: Final = 319 +ERROR_INVALID_CAP: Final = 320 +ERROR_DEVICE_UNREACHABLE: Final = 321 +ERROR_DEVICE_NO_RESOURCES: Final = 322 +ERROR_DATA_CHECKSUM_ERROR: Final = 323 +ERROR_INTERMIXED_KERNEL_EA_OPERATION: Final = 324 +ERROR_FILE_LEVEL_TRIM_NOT_SUPPORTED: Final = 326 +ERROR_OFFSET_ALIGNMENT_VIOLATION: Final = 327 +ERROR_INVALID_FIELD_IN_PARAMETER_LIST: Final = 328 +ERROR_OPERATION_IN_PROGRESS: Final = 329 +ERROR_BAD_DEVICE_PATH: Final = 330 +ERROR_TOO_MANY_DESCRIPTORS: Final = 331 +ERROR_SCRUB_DATA_DISABLED: Final = 332 +ERROR_NOT_REDUNDANT_STORAGE: Final = 333 +ERROR_RESIDENT_FILE_NOT_SUPPORTED: Final = 334 +ERROR_COMPRESSED_FILE_NOT_SUPPORTED: Final = 335 +ERROR_DIRECTORY_NOT_SUPPORTED: Final = 336 +ERROR_NOT_READ_FROM_COPY: Final = 337 +ERROR_FT_WRITE_FAILURE: Final = 338 +ERROR_FT_DI_SCAN_REQUIRED: Final = 339 +ERROR_INVALID_KERNEL_INFO_VERSION: Final = 340 +ERROR_INVALID_PEP_INFO_VERSION: Final = 341 +ERROR_OBJECT_NOT_EXTERNALLY_BACKED: Final = 342 +ERROR_EXTERNAL_BACKING_PROVIDER_UNKNOWN: Final = 343 +ERROR_COMPRESSION_NOT_BENEFICIAL: Final = 344 +ERROR_STORAGE_TOPOLOGY_ID_MISMATCH: Final = 345 +ERROR_BLOCKED_BY_PARENTAL_CONTROLS: Final = 346 +ERROR_BLOCK_TOO_MANY_REFERENCES: Final = 347 +ERROR_MARKED_TO_DISALLOW_WRITES: Final = 348 +ERROR_ENCLAVE_FAILURE: Final = 349 +ERROR_FAIL_NOACTION_REBOOT: Final = 350 +ERROR_FAIL_SHUTDOWN: Final = 351 +ERROR_FAIL_RESTART: Final = 352 +ERROR_MAX_SESSIONS_REACHED: Final = 353 +ERROR_NETWORK_ACCESS_DENIED_EDP: Final = 354 +ERROR_DEVICE_HINT_NAME_BUFFER_TOO_SMALL: Final = 355 +ERROR_EDP_POLICY_DENIES_OPERATION: Final = 356 +ERROR_EDP_DPL_POLICY_CANT_BE_SATISFIED: Final = 357 +ERROR_CLOUD_FILE_SYNC_ROOT_METADATA_CORRUPT: Final = 358 +ERROR_DEVICE_IN_MAINTENANCE: Final = 359 +ERROR_NOT_SUPPORTED_ON_DAX: Final = 360 +ERROR_DAX_MAPPING_EXISTS: Final = 361 +ERROR_CLOUD_FILE_PROVIDER_NOT_RUNNING: Final = 362 +ERROR_CLOUD_FILE_METADATA_CORRUPT: Final = 363 +ERROR_CLOUD_FILE_METADATA_TOO_LARGE: Final = 364 +ERROR_CLOUD_FILE_PROPERTY_BLOB_TOO_LARGE: Final = 365 +ERROR_CLOUD_FILE_PROPERTY_BLOB_CHECKSUM_MISMATCH: Final = 366 +ERROR_CHILD_PROCESS_BLOCKED: Final = 367 +ERROR_STORAGE_LOST_DATA_PERSISTENCE: Final = 368 +ERROR_FILE_SYSTEM_VIRTUALIZATION_UNAVAILABLE: Final = 369 +ERROR_FILE_SYSTEM_VIRTUALIZATION_METADATA_CORRUPT: Final = 370 +ERROR_FILE_SYSTEM_VIRTUALIZATION_BUSY: Final = 371 +ERROR_FILE_SYSTEM_VIRTUALIZATION_PROVIDER_UNKNOWN: Final = 372 +ERROR_GDI_HANDLE_LEAK: Final = 373 +ERROR_CLOUD_FILE_TOO_MANY_PROPERTY_BLOBS: Final = 374 +ERROR_CLOUD_FILE_PROPERTY_VERSION_NOT_SUPPORTED: Final = 375 +ERROR_NOT_A_CLOUD_FILE: Final = 376 +ERROR_CLOUD_FILE_NOT_IN_SYNC: Final = 377 +ERROR_CLOUD_FILE_ALREADY_CONNECTED: Final = 378 +ERROR_CLOUD_FILE_NOT_SUPPORTED: Final = 379 +ERROR_CLOUD_FILE_INVALID_REQUEST: Final = 380 +ERROR_CLOUD_FILE_READ_ONLY_VOLUME: Final = 381 +ERROR_CLOUD_FILE_CONNECTED_PROVIDER_ONLY: Final = 382 +ERROR_CLOUD_FILE_VALIDATION_FAILED: Final = 383 +ERROR_SMB1_NOT_AVAILABLE: Final = 384 +ERROR_FILE_SYSTEM_VIRTUALIZATION_INVALID_OPERATION: Final = 385 +ERROR_CLOUD_FILE_AUTHENTICATION_FAILED: Final = 386 +ERROR_CLOUD_FILE_INSUFFICIENT_RESOURCES: Final = 387 +ERROR_CLOUD_FILE_NETWORK_UNAVAILABLE: Final = 388 +ERROR_CLOUD_FILE_UNSUCCESSFUL: Final = 389 +ERROR_CLOUD_FILE_NOT_UNDER_SYNC_ROOT: Final = 390 +ERROR_CLOUD_FILE_IN_USE: Final = 391 +ERROR_CLOUD_FILE_PINNED: Final = 392 +ERROR_CLOUD_FILE_REQUEST_ABORTED: Final = 393 +ERROR_CLOUD_FILE_PROPERTY_CORRUPT: Final = 394 +ERROR_CLOUD_FILE_ACCESS_DENIED: Final = 395 +ERROR_CLOUD_FILE_INCOMPATIBLE_HARDLINKS: Final = 396 +ERROR_CLOUD_FILE_PROPERTY_LOCK_CONFLICT: Final = 397 +ERROR_CLOUD_FILE_REQUEST_CANCELED: Final = 398 +ERROR_EXTERNAL_SYSKEY_NOT_SUPPORTED: Final = 399 +ERROR_THREAD_MODE_ALREADY_BACKGROUND: Final = 400 +ERROR_THREAD_MODE_NOT_BACKGROUND: Final = 401 +ERROR_PROCESS_MODE_ALREADY_BACKGROUND: Final = 402 +ERROR_PROCESS_MODE_NOT_BACKGROUND: Final = 403 +ERROR_CLOUD_FILE_PROVIDER_TERMINATED: Final = 404 +ERROR_NOT_A_CLOUD_SYNC_ROOT: Final = 405 +ERROR_FILE_PROTECTED_UNDER_DPL: Final = 406 +ERROR_VOLUME_NOT_CLUSTER_ALIGNED: Final = 407 +ERROR_NO_PHYSICALLY_ALIGNED_FREE_SPACE_FOUND: Final = 408 +ERROR_APPX_FILE_NOT_ENCRYPTED: Final = 409 +ERROR_RWRAW_ENCRYPTED_FILE_NOT_ENCRYPTED: Final = 410 +ERROR_RWRAW_ENCRYPTED_INVALID_EDATAINFO_FILEOFFSET: Final = 411 +ERROR_RWRAW_ENCRYPTED_INVALID_EDATAINFO_FILERANGE: Final = 412 +ERROR_RWRAW_ENCRYPTED_INVALID_EDATAINFO_PARAMETER: Final = 413 +ERROR_LINUX_SUBSYSTEM_NOT_PRESENT: Final = 414 +ERROR_FT_READ_FAILURE: Final = 415 +ERROR_STORAGE_RESERVE_ID_INVALID: Final = 416 +ERROR_STORAGE_RESERVE_DOES_NOT_EXIST: Final = 417 +ERROR_STORAGE_RESERVE_ALREADY_EXISTS: Final = 418 +ERROR_STORAGE_RESERVE_NOT_EMPTY: Final = 419 +ERROR_NOT_A_DAX_VOLUME: Final = 420 +ERROR_NOT_DAX_MAPPABLE: Final = 421 +ERROR_TIME_SENSITIVE_THREAD: Final = 422 +ERROR_DPL_NOT_SUPPORTED_FOR_USER: Final = 423 +ERROR_CASE_DIFFERING_NAMES_IN_DIR: Final = 424 +ERROR_FILE_NOT_SUPPORTED: Final = 425 +ERROR_CLOUD_FILE_REQUEST_TIMEOUT: Final = 426 +ERROR_NO_TASK_QUEUE: Final = 427 +ERROR_SRC_SRV_DLL_LOAD_FAILED: Final = 428 +ERROR_NOT_SUPPORTED_WITH_BTT: Final = 429 +ERROR_ENCRYPTION_DISABLED: Final = 430 +ERROR_ENCRYPTING_METADATA_DISALLOWED: Final = 431 +ERROR_CANT_CLEAR_ENCRYPTION_FLAG: Final = 432 +ERROR_NO_SUCH_DEVICE: Final = 433 +ERROR_CLOUD_FILE_DEHYDRATION_DISALLOWED: Final = 434 +ERROR_FILE_SNAP_IN_PROGRESS: Final = 435 +ERROR_FILE_SNAP_USER_SECTION_NOT_SUPPORTED: Final = 436 +ERROR_FILE_SNAP_MODIFY_NOT_SUPPORTED: Final = 437 +ERROR_FILE_SNAP_IO_NOT_COORDINATED: Final = 438 +ERROR_FILE_SNAP_UNEXPECTED_ERROR: Final = 439 +ERROR_FILE_SNAP_INVALID_PARAMETER: Final = 440 +ERROR_UNSATISFIED_DEPENDENCIES: Final = 441 +ERROR_CASE_SENSITIVE_PATH: Final = 442 +ERROR_UNEXPECTED_NTCACHEMANAGER_ERROR: Final = 443 +ERROR_LINUX_SUBSYSTEM_UPDATE_REQUIRED: Final = 444 +ERROR_DLP_POLICY_WARNS_AGAINST_OPERATION: Final = 445 +ERROR_DLP_POLICY_DENIES_OPERATION: Final = 446 +ERROR_SECURITY_DENIES_OPERATION: Final = 447 +ERROR_UNTRUSTED_MOUNT_POINT: Final = 448 +ERROR_DLP_POLICY_SILENTLY_FAIL: Final = 449 +ERROR_CAPAUTHZ_NOT_DEVUNLOCKED: Final = 450 +ERROR_CAPAUTHZ_CHANGE_TYPE: Final = 451 +ERROR_CAPAUTHZ_NOT_PROVISIONED: Final = 452 +ERROR_CAPAUTHZ_NOT_AUTHORIZED: Final = 453 +ERROR_CAPAUTHZ_NO_POLICY: Final = 454 +ERROR_CAPAUTHZ_DB_CORRUPTED: Final = 455 +ERROR_CAPAUTHZ_SCCD_INVALID_CATALOG: Final = 456 +ERROR_CAPAUTHZ_SCCD_NO_AUTH_ENTITY: Final = 457 +ERROR_CAPAUTHZ_SCCD_PARSE_ERROR: Final = 458 +ERROR_CAPAUTHZ_SCCD_DEV_MODE_REQUIRED: Final = 459 +ERROR_CAPAUTHZ_SCCD_NO_CAPABILITY_MATCH: Final = 460 +ERROR_CIMFS_IMAGE_CORRUPT: Final = 470 +ERROR_CIMFS_IMAGE_VERSION_NOT_SUPPORTED: Final = 471 +ERROR_STORAGE_STACK_ACCESS_DENIED: Final = 472 +ERROR_INSUFFICIENT_VIRTUAL_ADDR_RESOURCES: Final = 473 +ERROR_INDEX_OUT_OF_BOUNDS: Final = 474 +ERROR_CLOUD_FILE_US_MESSAGE_TIMEOUT: Final = 475 +ERROR_NOT_A_DEV_VOLUME: Final = 476 +ERROR_FS_GUID_MISMATCH: Final = 477 +ERROR_CANT_ATTACH_TO_DEV_VOLUME: Final = 478 +ERROR_INVALID_CONFIG_VALUE: Final = 479 +ERROR_PNP_QUERY_REMOVE_DEVICE_TIMEOUT: Final = 480 +ERROR_PNP_QUERY_REMOVE_RELATED_DEVICE_TIMEOUT: Final = 481 +ERROR_PNP_QUERY_REMOVE_UNRELATED_DEVICE_TIMEOUT: Final = 482 +ERROR_DEVICE_HARDWARE_ERROR: Final = 483 +ERROR_INVALID_ADDRESS: Final = 487 +ERROR_HAS_SYSTEM_CRITICAL_FILES: Final = 488 +ERROR_ENCRYPTED_FILE_NOT_SUPPORTED: Final = 489 +ERROR_SPARSE_FILE_NOT_SUPPORTED: Final = 490 +ERROR_PAGEFILE_NOT_SUPPORTED: Final = 491 +ERROR_VOLUME_NOT_SUPPORTED: Final = 492 +ERROR_NOT_SUPPORTED_WITH_BYPASSIO: Final = 493 +ERROR_NO_BYPASSIO_DRIVER_SUPPORT: Final = 494 +ERROR_NOT_SUPPORTED_WITH_ENCRYPTION: Final = 495 +ERROR_NOT_SUPPORTED_WITH_COMPRESSION: Final = 496 +ERROR_NOT_SUPPORTED_WITH_REPLICATION: Final = 497 +ERROR_NOT_SUPPORTED_WITH_DEDUPLICATION: Final = 498 +ERROR_NOT_SUPPORTED_WITH_AUDITING: Final = 499 +ERROR_USER_PROFILE_LOAD: Final = 500 +ERROR_SESSION_KEY_TOO_SHORT: Final = 501 +ERROR_ACCESS_DENIED_APPDATA: Final = 502 +ERROR_NOT_SUPPORTED_WITH_MONITORING: Final = 503 +ERROR_NOT_SUPPORTED_WITH_SNAPSHOT: Final = 504 +ERROR_NOT_SUPPORTED_WITH_VIRTUALIZATION: Final = 505 +ERROR_BYPASSIO_FLT_NOT_SUPPORTED: Final = 506 +ERROR_DEVICE_RESET_REQUIRED: Final = 507 +ERROR_VOLUME_WRITE_ACCESS_DENIED: Final = 508 +ERROR_NOT_SUPPORTED_WITH_CACHED_HANDLE: Final = 509 +ERROR_FS_METADATA_INCONSISTENT: Final = 510 +ERROR_BLOCK_WEAK_REFERENCE_INVALID: Final = 511 +ERROR_BLOCK_SOURCE_WEAK_REFERENCE_INVALID: Final = 512 +ERROR_BLOCK_TARGET_WEAK_REFERENCE_INVALID: Final = 513 +ERROR_BLOCK_SHARED: Final = 514 +ERROR_VOLUME_UPGRADE_NOT_NEEDED: Final = 515 +ERROR_VOLUME_UPGRADE_PENDING: Final = 516 +ERROR_VOLUME_UPGRADE_DISABLED: Final = 517 +ERROR_VOLUME_UPGRADE_DISABLED_TILL_OS_DOWNGRADE_EXPIRED: Final = 518 +ERROR_ARITHMETIC_OVERFLOW: Final = 534 +ERROR_PIPE_CONNECTED: Final = 535 +ERROR_PIPE_LISTENING: Final = 536 +ERROR_VERIFIER_STOP: Final = 537 +ERROR_ABIOS_ERROR: Final = 538 +ERROR_WX86_WARNING: Final = 539 +ERROR_WX86_ERROR: Final = 540 +ERROR_TIMER_NOT_CANCELED: Final = 541 +ERROR_UNWIND: Final = 542 +ERROR_BAD_STACK: Final = 543 +ERROR_INVALID_UNWIND_TARGET: Final = 544 +ERROR_INVALID_PORT_ATTRIBUTES: Final = 545 +ERROR_PORT_MESSAGE_TOO_LONG: Final = 546 +ERROR_INVALID_QUOTA_LOWER: Final = 547 +ERROR_DEVICE_ALREADY_ATTACHED: Final = 548 +ERROR_INSTRUCTION_MISALIGNMENT: Final = 549 +ERROR_PROFILING_NOT_STARTED: Final = 550 +ERROR_PROFILING_NOT_STOPPED: Final = 551 +ERROR_COULD_NOT_INTERPRET: Final = 552 +ERROR_PROFILING_AT_LIMIT: Final = 553 +ERROR_CANT_WAIT: Final = 554 +ERROR_CANT_TERMINATE_SELF: Final = 555 +ERROR_UNEXPECTED_MM_CREATE_ERR: Final = 556 +ERROR_UNEXPECTED_MM_MAP_ERROR: Final = 557 +ERROR_UNEXPECTED_MM_EXTEND_ERR: Final = 558 +ERROR_BAD_FUNCTION_TABLE: Final = 559 +ERROR_NO_GUID_TRANSLATION: Final = 560 +ERROR_INVALID_LDT_SIZE: Final = 561 +ERROR_INVALID_LDT_OFFSET: Final = 563 +ERROR_INVALID_LDT_DESCRIPTOR: Final = 564 +ERROR_TOO_MANY_THREADS: Final = 565 +ERROR_THREAD_NOT_IN_PROCESS: Final = 566 +ERROR_PAGEFILE_QUOTA_EXCEEDED: Final = 567 +ERROR_LOGON_SERVER_CONFLICT: Final = 568 +ERROR_SYNCHRONIZATION_REQUIRED: Final = 569 +ERROR_NET_OPEN_FAILED: Final = 570 +ERROR_IO_PRIVILEGE_FAILED: Final = 571 +ERROR_CONTROL_C_EXIT: Final = 572 +ERROR_MISSING_SYSTEMFILE: Final = 573 +ERROR_UNHANDLED_EXCEPTION: Final = 574 +ERROR_APP_INIT_FAILURE: Final = 575 +ERROR_PAGEFILE_CREATE_FAILED: Final = 576 +ERROR_INVALID_IMAGE_HASH: Final = 577 +ERROR_NO_PAGEFILE: Final = 578 +ERROR_ILLEGAL_FLOAT_CONTEXT: Final = 579 +ERROR_NO_EVENT_PAIR: Final = 580 +ERROR_DOMAIN_CTRLR_CONFIG_ERROR: Final = 581 +ERROR_ILLEGAL_CHARACTER: Final = 582 +ERROR_UNDEFINED_CHARACTER: Final = 583 +ERROR_FLOPPY_VOLUME: Final = 584 +ERROR_BIOS_FAILED_TO_CONNECT_INTERRUPT: Final = 585 +ERROR_BACKUP_CONTROLLER: Final = 586 +ERROR_MUTANT_LIMIT_EXCEEDED: Final = 587 +ERROR_FS_DRIVER_REQUIRED: Final = 588 +ERROR_CANNOT_LOAD_REGISTRY_FILE: Final = 589 +ERROR_DEBUG_ATTACH_FAILED: Final = 590 +ERROR_SYSTEM_PROCESS_TERMINATED: Final = 591 +ERROR_DATA_NOT_ACCEPTED: Final = 592 +ERROR_VDM_HARD_ERROR: Final = 593 +ERROR_DRIVER_CANCEL_TIMEOUT: Final = 594 +ERROR_REPLY_MESSAGE_MISMATCH: Final = 595 +ERROR_LOST_WRITEBEHIND_DATA: Final = 596 +ERROR_CLIENT_SERVER_PARAMETERS_INVALID: Final = 597 +ERROR_NOT_TINY_STREAM: Final = 598 +ERROR_STACK_OVERFLOW_READ: Final = 599 +ERROR_CONVERT_TO_LARGE: Final = 600 +ERROR_FOUND_OUT_OF_SCOPE: Final = 601 +ERROR_ALLOCATE_BUCKET: Final = 602 +ERROR_MARSHALL_OVERFLOW: Final = 603 +ERROR_INVALID_VARIANT: Final = 604 +ERROR_BAD_COMPRESSION_BUFFER: Final = 605 +ERROR_AUDIT_FAILED: Final = 606 +ERROR_TIMER_RESOLUTION_NOT_SET: Final = 607 +ERROR_INSUFFICIENT_LOGON_INFO: Final = 608 +ERROR_BAD_DLL_ENTRYPOINT: Final = 609 +ERROR_BAD_SERVICE_ENTRYPOINT: Final = 610 +ERROR_IP_ADDRESS_CONFLICT1: Final = 611 +ERROR_IP_ADDRESS_CONFLICT2: Final = 612 +ERROR_REGISTRY_QUOTA_LIMIT: Final = 613 +ERROR_NO_CALLBACK_ACTIVE: Final = 614 +ERROR_PWD_TOO_SHORT: Final = 615 +ERROR_PWD_TOO_RECENT: Final = 616 +ERROR_PWD_HISTORY_CONFLICT: Final = 617 +ERROR_UNSUPPORTED_COMPRESSION: Final = 618 +ERROR_INVALID_HW_PROFILE: Final = 619 +ERROR_INVALID_PLUGPLAY_DEVICE_PATH: Final = 620 +ERROR_QUOTA_LIST_INCONSISTENT: Final = 621 +ERROR_EVALUATION_EXPIRATION: Final = 622 +ERROR_ILLEGAL_DLL_RELOCATION: Final = 623 +ERROR_DLL_INIT_FAILED_LOGOFF: Final = 624 +ERROR_VALIDATE_CONTINUE: Final = 625 +ERROR_NO_MORE_MATCHES: Final = 626 +ERROR_RANGE_LIST_CONFLICT: Final = 627 +ERROR_SERVER_SID_MISMATCH: Final = 628 +ERROR_CANT_ENABLE_DENY_ONLY: Final = 629 +ERROR_FLOAT_MULTIPLE_FAULTS: Final = 630 +ERROR_FLOAT_MULTIPLE_TRAPS: Final = 631 +ERROR_NOINTERFACE: Final = 632 +ERROR_DRIVER_FAILED_SLEEP: Final = 633 +ERROR_CORRUPT_SYSTEM_FILE: Final = 634 +ERROR_COMMITMENT_MINIMUM: Final = 635 +ERROR_PNP_RESTART_ENUMERATION: Final = 636 +ERROR_SYSTEM_IMAGE_BAD_SIGNATURE: Final = 637 +ERROR_PNP_REBOOT_REQUIRED: Final = 638 +ERROR_INSUFFICIENT_POWER: Final = 639 +ERROR_MULTIPLE_FAULT_VIOLATION: Final = 640 +ERROR_SYSTEM_SHUTDOWN: Final = 641 +ERROR_PORT_NOT_SET: Final = 642 +ERROR_DS_VERSION_CHECK_FAILURE: Final = 643 +ERROR_RANGE_NOT_FOUND: Final = 644 +ERROR_NOT_SAFE_MODE_DRIVER: Final = 646 +ERROR_FAILED_DRIVER_ENTRY: Final = 647 +ERROR_DEVICE_ENUMERATION_ERROR: Final = 648 +ERROR_MOUNT_POINT_NOT_RESOLVED: Final = 649 +ERROR_INVALID_DEVICE_OBJECT_PARAMETER: Final = 650 +ERROR_MCA_OCCURED: Final = 651 +ERROR_DRIVER_DATABASE_ERROR: Final = 652 +ERROR_SYSTEM_HIVE_TOO_LARGE: Final = 653 +ERROR_DRIVER_FAILED_PRIOR_UNLOAD: Final = 654 +ERROR_VOLSNAP_PREPARE_HIBERNATE: Final = 655 +ERROR_HIBERNATION_FAILURE: Final = 656 +ERROR_PWD_TOO_LONG: Final = 657 +ERROR_FILE_SYSTEM_LIMITATION: Final = 665 +ERROR_ASSERTION_FAILURE: Final = 668 +ERROR_ACPI_ERROR: Final = 669 +ERROR_WOW_ASSERTION: Final = 670 +ERROR_PNP_BAD_MPS_TABLE: Final = 671 +ERROR_PNP_TRANSLATION_FAILED: Final = 672 +ERROR_PNP_IRQ_TRANSLATION_FAILED: Final = 673 +ERROR_PNP_INVALID_ID: Final = 674 +ERROR_WAKE_SYSTEM_DEBUGGER: Final = 675 +ERROR_HANDLES_CLOSED: Final = 676 +ERROR_EXTRANEOUS_INFORMATION: Final = 677 +ERROR_RXACT_COMMIT_NECESSARY: Final = 678 +ERROR_MEDIA_CHECK: Final = 679 +ERROR_GUID_SUBSTITUTION_MADE: Final = 680 +ERROR_STOPPED_ON_SYMLINK: Final = 681 +ERROR_LONGJUMP: Final = 682 +ERROR_PLUGPLAY_QUERY_VETOED: Final = 683 +ERROR_UNWIND_CONSOLIDATE: Final = 684 +ERROR_REGISTRY_HIVE_RECOVERED: Final = 685 +ERROR_DLL_MIGHT_BE_INSECURE: Final = 686 +ERROR_DLL_MIGHT_BE_INCOMPATIBLE: Final = 687 +ERROR_DBG_EXCEPTION_NOT_HANDLED: Final = 688 +ERROR_DBG_REPLY_LATER: Final = 689 +ERROR_DBG_UNABLE_TO_PROVIDE_HANDLE: Final = 690 +ERROR_DBG_TERMINATE_THREAD: Final = 691 +ERROR_DBG_TERMINATE_PROCESS: Final = 692 +ERROR_DBG_CONTROL_C: Final = 693 +ERROR_DBG_PRINTEXCEPTION_C: Final = 694 +ERROR_DBG_RIPEXCEPTION: Final = 695 +ERROR_DBG_CONTROL_BREAK: Final = 696 +ERROR_DBG_COMMAND_EXCEPTION: Final = 697 +ERROR_OBJECT_NAME_EXISTS: Final = 698 +ERROR_THREAD_WAS_SUSPENDED: Final = 699 +ERROR_IMAGE_NOT_AT_BASE: Final = 700 +ERROR_RXACT_STATE_CREATED: Final = 701 +ERROR_SEGMENT_NOTIFICATION: Final = 702 +ERROR_BAD_CURRENT_DIRECTORY: Final = 703 +ERROR_FT_READ_RECOVERY_FROM_BACKUP: Final = 704 +ERROR_FT_WRITE_RECOVERY: Final = 705 +ERROR_IMAGE_MACHINE_TYPE_MISMATCH: Final = 706 +ERROR_RECEIVE_PARTIAL: Final = 707 +ERROR_RECEIVE_EXPEDITED: Final = 708 +ERROR_RECEIVE_PARTIAL_EXPEDITED: Final = 709 +ERROR_EVENT_DONE: Final = 710 +ERROR_EVENT_PENDING: Final = 711 +ERROR_CHECKING_FILE_SYSTEM: Final = 712 +ERROR_FATAL_APP_EXIT: Final = 713 +ERROR_PREDEFINED_HANDLE: Final = 714 +ERROR_WAS_UNLOCKED: Final = 715 +ERROR_SERVICE_NOTIFICATION: Final = 716 +ERROR_WAS_LOCKED: Final = 717 +ERROR_LOG_HARD_ERROR: Final = 718 +ERROR_ALREADY_WIN32: Final = 719 +ERROR_IMAGE_MACHINE_TYPE_MISMATCH_EXE: Final = 720 +ERROR_NO_YIELD_PERFORMED: Final = 721 +ERROR_TIMER_RESUME_IGNORED: Final = 722 +ERROR_ARBITRATION_UNHANDLED: Final = 723 +ERROR_CARDBUS_NOT_SUPPORTED: Final = 724 +ERROR_MP_PROCESSOR_MISMATCH: Final = 725 +ERROR_HIBERNATED: Final = 726 +ERROR_RESUME_HIBERNATION: Final = 727 +ERROR_FIRMWARE_UPDATED: Final = 728 +ERROR_DRIVERS_LEAKING_LOCKED_PAGES: Final = 729 +ERROR_WAKE_SYSTEM: Final = 730 +ERROR_WAIT_1: Final = 731 +ERROR_WAIT_2: Final = 732 +ERROR_WAIT_3: Final = 733 +ERROR_WAIT_63: Final = 734 +ERROR_ABANDONED_WAIT_0: Final = 735 +ERROR_ABANDONED_WAIT_63: Final = 736 +ERROR_USER_APC: Final = 737 +ERROR_KERNEL_APC: Final = 738 +ERROR_ALERTED: Final = 739 +ERROR_ELEVATION_REQUIRED: Final = 740 +ERROR_REPARSE: Final = 741 +ERROR_OPLOCK_BREAK_IN_PROGRESS: Final = 742 +ERROR_VOLUME_MOUNTED: Final = 743 +ERROR_RXACT_COMMITTED: Final = 744 +ERROR_NOTIFY_CLEANUP: Final = 745 +ERROR_PRIMARY_TRANSPORT_CONNECT_FAILED: Final = 746 +ERROR_PAGE_FAULT_TRANSITION: Final = 747 +ERROR_PAGE_FAULT_DEMAND_ZERO: Final = 748 +ERROR_PAGE_FAULT_COPY_ON_WRITE: Final = 749 +ERROR_PAGE_FAULT_GUARD_PAGE: Final = 750 +ERROR_PAGE_FAULT_PAGING_FILE: Final = 751 +ERROR_CACHE_PAGE_LOCKED: Final = 752 +ERROR_CRASH_DUMP: Final = 753 +ERROR_BUFFER_ALL_ZEROS: Final = 754 +ERROR_REPARSE_OBJECT: Final = 755 +ERROR_RESOURCE_REQUIREMENTS_CHANGED: Final = 756 +ERROR_TRANSLATION_COMPLETE: Final = 757 +ERROR_NOTHING_TO_TERMINATE: Final = 758 +ERROR_PROCESS_NOT_IN_JOB: Final = 759 +ERROR_PROCESS_IN_JOB: Final = 760 +ERROR_VOLSNAP_HIBERNATE_READY: Final = 761 +ERROR_FSFILTER_OP_COMPLETED_SUCCESSFULLY: Final = 762 +ERROR_INTERRUPT_VECTOR_ALREADY_CONNECTED: Final = 763 +ERROR_INTERRUPT_STILL_CONNECTED: Final = 764 +ERROR_WAIT_FOR_OPLOCK: Final = 765 +ERROR_DBG_EXCEPTION_HANDLED: Final = 766 +ERROR_DBG_CONTINUE: Final = 767 +ERROR_CALLBACK_POP_STACK: Final = 768 +ERROR_COMPRESSION_DISABLED: Final = 769 +ERROR_CANTFETCHBACKWARDS: Final = 770 +ERROR_CANTSCROLLBACKWARDS: Final = 771 +ERROR_ROWSNOTRELEASED: Final = 772 +ERROR_BAD_ACCESSOR_FLAGS: Final = 773 +ERROR_ERRORS_ENCOUNTERED: Final = 774 +ERROR_NOT_CAPABLE: Final = 775 +ERROR_REQUEST_OUT_OF_SEQUENCE: Final = 776 +ERROR_VERSION_PARSE_ERROR: Final = 777 +ERROR_BADSTARTPOSITION: Final = 778 +ERROR_MEMORY_HARDWARE: Final = 779 +ERROR_DISK_REPAIR_DISABLED: Final = 780 +ERROR_INSUFFICIENT_RESOURCE_FOR_SPECIFIED_SHARED_SECTION_SIZE: Final = 781 +ERROR_SYSTEM_POWERSTATE_TRANSITION: Final = 782 +ERROR_SYSTEM_POWERSTATE_COMPLEX_TRANSITION: Final = 783 +ERROR_MCA_EXCEPTION: Final = 784 +ERROR_ACCESS_AUDIT_BY_POLICY: Final = 785 +ERROR_ACCESS_DISABLED_NO_SAFER_UI_BY_POLICY: Final = 786 +ERROR_ABANDON_HIBERFILE: Final = 787 +ERROR_LOST_WRITEBEHIND_DATA_NETWORK_DISCONNECTED: Final = 788 +ERROR_LOST_WRITEBEHIND_DATA_NETWORK_SERVER_ERROR: Final = 789 +ERROR_LOST_WRITEBEHIND_DATA_LOCAL_DISK_ERROR: Final = 790 +ERROR_BAD_MCFG_TABLE: Final = 791 +ERROR_DISK_REPAIR_REDIRECTED: Final = 792 +ERROR_DISK_REPAIR_UNSUCCESSFUL: Final = 793 +ERROR_CORRUPT_LOG_OVERFULL: Final = 794 +ERROR_CORRUPT_LOG_CORRUPTED: Final = 795 +ERROR_CORRUPT_LOG_UNAVAILABLE: Final = 796 +ERROR_CORRUPT_LOG_DELETED_FULL: Final = 797 +ERROR_CORRUPT_LOG_CLEARED: Final = 798 +ERROR_ORPHAN_NAME_EXHAUSTED: Final = 799 +ERROR_OPLOCK_SWITCHED_TO_NEW_HANDLE: Final = 800 +ERROR_CANNOT_GRANT_REQUESTED_OPLOCK: Final = 801 +ERROR_CANNOT_BREAK_OPLOCK: Final = 802 +ERROR_OPLOCK_HANDLE_CLOSED: Final = 803 +ERROR_NO_ACE_CONDITION: Final = 804 +ERROR_INVALID_ACE_CONDITION: Final = 805 +ERROR_FILE_HANDLE_REVOKED: Final = 806 +ERROR_IMAGE_AT_DIFFERENT_BASE: Final = 807 +ERROR_ENCRYPTED_IO_NOT_POSSIBLE: Final = 808 +ERROR_FILE_METADATA_OPTIMIZATION_IN_PROGRESS: Final = 809 +ERROR_QUOTA_ACTIVITY: Final = 810 +ERROR_HANDLE_REVOKED: Final = 811 +ERROR_CALLBACK_INVOKE_INLINE: Final = 812 +ERROR_CPU_SET_INVALID: Final = 813 +ERROR_ENCLAVE_NOT_TERMINATED: Final = 814 +ERROR_ENCLAVE_VIOLATION: Final = 815 +ERROR_SERVER_TRANSPORT_CONFLICT: Final = 816 +ERROR_CERTIFICATE_VALIDATION_PREFERENCE_CONFLICT: Final = 817 +ERROR_FT_READ_FROM_COPY_FAILURE: Final = 818 +ERROR_SECTION_DIRECT_MAP_ONLY: Final = 819 +ERROR_EA_ACCESS_DENIED: Final = 994 +ERROR_OPERATION_ABORTED: Final = 995 +ERROR_IO_INCOMPLETE: Final = 996 +ERROR_IO_PENDING: Final = 997 +ERROR_NOACCESS: Final = 998 +ERROR_SWAPERROR: Final = 999 +ERROR_STACK_OVERFLOW: Final = 1001 +ERROR_INVALID_MESSAGE: Final = 1002 +ERROR_CAN_NOT_COMPLETE: Final = 1003 +ERROR_INVALID_FLAGS: Final = 1004 +ERROR_UNRECOGNIZED_VOLUME: Final = 1005 +ERROR_FILE_INVALID: Final = 1006 +ERROR_FULLSCREEN_MODE: Final = 1007 +ERROR_NO_TOKEN: Final = 1008 +ERROR_BADDB: Final = 1009 +ERROR_BADKEY: Final = 1010 +ERROR_CANTOPEN: Final = 1011 +ERROR_CANTREAD: Final = 1012 +ERROR_CANTWRITE: Final = 1013 +ERROR_REGISTRY_RECOVERED: Final = 1014 +ERROR_REGISTRY_CORRUPT: Final = 1015 +ERROR_REGISTRY_IO_FAILED: Final = 1016 +ERROR_NOT_REGISTRY_FILE: Final = 1017 +ERROR_KEY_DELETED: Final = 1018 +ERROR_NO_LOG_SPACE: Final = 1019 +ERROR_KEY_HAS_CHILDREN: Final = 1020 +ERROR_CHILD_MUST_BE_VOLATILE: Final = 1021 +ERROR_NOTIFY_ENUM_DIR: Final = 1022 +ERROR_DEPENDENT_SERVICES_RUNNING: Final = 1051 +ERROR_INVALID_SERVICE_CONTROL: Final = 1052 +ERROR_SERVICE_REQUEST_TIMEOUT: Final = 1053 +ERROR_SERVICE_NO_THREAD: Final = 1054 +ERROR_SERVICE_DATABASE_LOCKED: Final = 1055 +ERROR_SERVICE_ALREADY_RUNNING: Final = 1056 +ERROR_INVALID_SERVICE_ACCOUNT: Final = 1057 +ERROR_SERVICE_DISABLED: Final = 1058 +ERROR_CIRCULAR_DEPENDENCY: Final = 1059 +ERROR_SERVICE_DOES_NOT_EXIST: Final = 1060 +ERROR_SERVICE_CANNOT_ACCEPT_CTRL: Final = 1061 +ERROR_SERVICE_NOT_ACTIVE: Final = 1062 +ERROR_FAILED_SERVICE_CONTROLLER_CONNECT: Final = 1063 +ERROR_EXCEPTION_IN_SERVICE: Final = 1064 +ERROR_DATABASE_DOES_NOT_EXIST: Final = 1065 +ERROR_SERVICE_SPECIFIC_ERROR: Final = 1066 +ERROR_PROCESS_ABORTED: Final = 1067 +ERROR_SERVICE_DEPENDENCY_FAIL: Final = 1068 +ERROR_SERVICE_LOGON_FAILED: Final = 1069 +ERROR_SERVICE_START_HANG: Final = 1070 +ERROR_INVALID_SERVICE_LOCK: Final = 1071 +ERROR_SERVICE_MARKED_FOR_DELETE: Final = 1072 +ERROR_SERVICE_EXISTS: Final = 1073 +ERROR_ALREADY_RUNNING_LKG: Final = 1074 +ERROR_SERVICE_DEPENDENCY_DELETED: Final = 1075 +ERROR_BOOT_ALREADY_ACCEPTED: Final = 1076 +ERROR_SERVICE_NEVER_STARTED: Final = 1077 +ERROR_DUPLICATE_SERVICE_NAME: Final = 1078 +ERROR_DIFFERENT_SERVICE_ACCOUNT: Final = 1079 +ERROR_CANNOT_DETECT_DRIVER_FAILURE: Final = 1080 +ERROR_CANNOT_DETECT_PROCESS_ABORT: Final = 1081 +ERROR_NO_RECOVERY_PROGRAM: Final = 1082 +ERROR_SERVICE_NOT_IN_EXE: Final = 1083 +ERROR_NOT_SAFEBOOT_SERVICE: Final = 1084 +ERROR_END_OF_MEDIA: Final = 1100 +ERROR_FILEMARK_DETECTED: Final = 1101 +ERROR_BEGINNING_OF_MEDIA: Final = 1102 +ERROR_SETMARK_DETECTED: Final = 1103 +ERROR_NO_DATA_DETECTED: Final = 1104 +ERROR_PARTITION_FAILURE: Final = 1105 +ERROR_INVALID_BLOCK_LENGTH: Final = 1106 +ERROR_DEVICE_NOT_PARTITIONED: Final = 1107 +ERROR_UNABLE_TO_LOCK_MEDIA: Final = 1108 +ERROR_UNABLE_TO_UNLOAD_MEDIA: Final = 1109 +ERROR_MEDIA_CHANGED: Final = 1110 +ERROR_BUS_RESET: Final = 1111 +ERROR_NO_MEDIA_IN_DRIVE: Final = 1112 +ERROR_NO_UNICODE_TRANSLATION: Final = 1113 +ERROR_DLL_INIT_FAILED: Final = 1114 +ERROR_SHUTDOWN_IN_PROGRESS: Final = 1115 +ERROR_NO_SHUTDOWN_IN_PROGRESS: Final = 1116 +ERROR_IO_DEVICE: Final = 1117 +ERROR_SERIAL_NO_DEVICE: Final = 1118 +ERROR_IRQ_BUSY: Final = 1119 +ERROR_MORE_WRITES: Final = 1120 +ERROR_COUNTER_TIMEOUT: Final = 1121 +ERROR_FLOPPY_ID_MARK_NOT_FOUND: Final = 1122 +ERROR_FLOPPY_WRONG_CYLINDER: Final = 1123 +ERROR_FLOPPY_UNKNOWN_ERROR: Final = 1124 +ERROR_FLOPPY_BAD_REGISTERS: Final = 1125 +ERROR_DISK_RECALIBRATE_FAILED: Final = 1126 +ERROR_DISK_OPERATION_FAILED: Final = 1127 +ERROR_DISK_RESET_FAILED: Final = 1128 +ERROR_EOM_OVERFLOW: Final = 1129 +ERROR_NOT_ENOUGH_SERVER_MEMORY: Final = 1130 +ERROR_POSSIBLE_DEADLOCK: Final = 1131 +ERROR_MAPPED_ALIGNMENT: Final = 1132 +ERROR_SET_POWER_STATE_VETOED: Final = 1140 +ERROR_SET_POWER_STATE_FAILED: Final = 1141 +ERROR_TOO_MANY_LINKS: Final = 1142 +ERROR_OLD_WIN_VERSION: Final = 1150 +ERROR_APP_WRONG_OS: Final = 1151 +ERROR_SINGLE_INSTANCE_APP: Final = 1152 +ERROR_RMODE_APP: Final = 1153 +ERROR_INVALID_DLL: Final = 1154 +ERROR_NO_ASSOCIATION: Final = 1155 +ERROR_DDE_FAIL: Final = 1156 +ERROR_DLL_NOT_FOUND: Final = 1157 +ERROR_NO_MORE_USER_HANDLES: Final = 1158 +ERROR_MESSAGE_SYNC_ONLY: Final = 1159 +ERROR_SOURCE_ELEMENT_EMPTY: Final = 1160 +ERROR_DESTINATION_ELEMENT_FULL: Final = 1161 +ERROR_ILLEGAL_ELEMENT_ADDRESS: Final = 1162 +ERROR_MAGAZINE_NOT_PRESENT: Final = 1163 +ERROR_DEVICE_REINITIALIZATION_NEEDED: Final = 1164 +ERROR_DEVICE_REQUIRES_CLEANING: Final = 1165 +ERROR_DEVICE_DOOR_OPEN: Final = 1166 +ERROR_DEVICE_NOT_CONNECTED: Final = 1167 +ERROR_NOT_FOUND: Final = 1168 +ERROR_NO_MATCH: Final = 1169 +ERROR_SET_NOT_FOUND: Final = 1170 +ERROR_POINT_NOT_FOUND: Final = 1171 +ERROR_NO_TRACKING_SERVICE: Final = 1172 +ERROR_NO_VOLUME_ID: Final = 1173 +ERROR_UNABLE_TO_REMOVE_REPLACED: Final = 1175 +ERROR_UNABLE_TO_MOVE_REPLACEMENT: Final = 1176 +ERROR_UNABLE_TO_MOVE_REPLACEMENT_2: Final = 1177 +ERROR_JOURNAL_DELETE_IN_PROGRESS: Final = 1178 +ERROR_JOURNAL_NOT_ACTIVE: Final = 1179 +ERROR_POTENTIAL_FILE_FOUND: Final = 1180 +ERROR_JOURNAL_ENTRY_DELETED: Final = 1181 +ERROR_PARTITION_TERMINATING: Final = 1184 +ERROR_SHUTDOWN_IS_SCHEDULED: Final = 1190 +ERROR_SHUTDOWN_USERS_LOGGED_ON: Final = 1191 +ERROR_SHUTDOWN_DISKS_NOT_IN_MAINTENANCE_MODE: Final = 1192 +ERROR_BAD_DEVICE: Final = 1200 +ERROR_CONNECTION_UNAVAIL: Final = 1201 +ERROR_DEVICE_ALREADY_REMEMBERED: Final = 1202 +ERROR_NO_NET_OR_BAD_PATH: Final = 1203 +ERROR_BAD_PROVIDER: Final = 1204 +ERROR_CANNOT_OPEN_PROFILE: Final = 1205 +ERROR_BAD_PROFILE: Final = 1206 +ERROR_NOT_CONTAINER: Final = 1207 +ERROR_EXTENDED_ERROR: Final = 1208 +ERROR_INVALID_GROUPNAME: Final = 1209 +ERROR_INVALID_COMPUTERNAME: Final = 1210 +ERROR_INVALID_EVENTNAME: Final = 1211 +ERROR_INVALID_DOMAINNAME: Final = 1212 +ERROR_INVALID_SERVICENAME: Final = 1213 +ERROR_INVALID_NETNAME: Final = 1214 +ERROR_INVALID_SHARENAME: Final = 1215 +ERROR_INVALID_PASSWORDNAME: Final = 1216 +ERROR_INVALID_MESSAGENAME: Final = 1217 +ERROR_INVALID_MESSAGEDEST: Final = 1218 +ERROR_SESSION_CREDENTIAL_CONFLICT: Final = 1219 +ERROR_REMOTE_SESSION_LIMIT_EXCEEDED: Final = 1220 +ERROR_DUP_DOMAINNAME: Final = 1221 +ERROR_NO_NETWORK: Final = 1222 +ERROR_CANCELLED: Final = 1223 +ERROR_USER_MAPPED_FILE: Final = 1224 +ERROR_CONNECTION_REFUSED: Final = 1225 +ERROR_GRACEFUL_DISCONNECT: Final = 1226 +ERROR_ADDRESS_ALREADY_ASSOCIATED: Final = 1227 +ERROR_ADDRESS_NOT_ASSOCIATED: Final = 1228 +ERROR_CONNECTION_INVALID: Final = 1229 +ERROR_CONNECTION_ACTIVE: Final = 1230 +ERROR_NETWORK_UNREACHABLE: Final = 1231 +ERROR_HOST_UNREACHABLE: Final = 1232 +ERROR_PROTOCOL_UNREACHABLE: Final = 1233 +ERROR_PORT_UNREACHABLE: Final = 1234 +ERROR_REQUEST_ABORTED: Final = 1235 +ERROR_CONNECTION_ABORTED: Final = 1236 +ERROR_RETRY: Final = 1237 +ERROR_CONNECTION_COUNT_LIMIT: Final = 1238 +ERROR_LOGIN_TIME_RESTRICTION: Final = 1239 +ERROR_LOGIN_WKSTA_RESTRICTION: Final = 1240 +ERROR_INCORRECT_ADDRESS: Final = 1241 +ERROR_ALREADY_REGISTERED: Final = 1242 +ERROR_SERVICE_NOT_FOUND: Final = 1243 +ERROR_NOT_AUTHENTICATED: Final = 1244 +ERROR_NOT_LOGGED_ON: Final = 1245 +ERROR_CONTINUE: Final = 1246 +ERROR_ALREADY_INITIALIZED: Final = 1247 +ERROR_NO_MORE_DEVICES: Final = 1248 +ERROR_NO_SUCH_SITE: Final = 1249 +ERROR_DOMAIN_CONTROLLER_EXISTS: Final = 1250 +ERROR_ONLY_IF_CONNECTED: Final = 1251 +ERROR_OVERRIDE_NOCHANGES: Final = 1252 +ERROR_BAD_USER_PROFILE: Final = 1253 +ERROR_NOT_SUPPORTED_ON_SBS: Final = 1254 +ERROR_SERVER_SHUTDOWN_IN_PROGRESS: Final = 1255 +ERROR_HOST_DOWN: Final = 1256 +ERROR_NON_ACCOUNT_SID: Final = 1257 +ERROR_NON_DOMAIN_SID: Final = 1258 +ERROR_APPHELP_BLOCK: Final = 1259 +ERROR_ACCESS_DISABLED_BY_POLICY: Final = 1260 +ERROR_REG_NAT_CONSUMPTION: Final = 1261 +ERROR_CSCSHARE_OFFLINE: Final = 1262 +ERROR_PKINIT_FAILURE: Final = 1263 +ERROR_SMARTCARD_SUBSYSTEM_FAILURE: Final = 1264 +ERROR_DOWNGRADE_DETECTED: Final = 1265 +ERROR_MACHINE_LOCKED: Final = 1271 +ERROR_SMB_GUEST_LOGON_BLOCKED: Final = 1272 +ERROR_CALLBACK_SUPPLIED_INVALID_DATA: Final = 1273 +ERROR_SYNC_FOREGROUND_REFRESH_REQUIRED: Final = 1274 +ERROR_DRIVER_BLOCKED: Final = 1275 +ERROR_INVALID_IMPORT_OF_NON_DLL: Final = 1276 +ERROR_ACCESS_DISABLED_WEBBLADE: Final = 1277 +ERROR_ACCESS_DISABLED_WEBBLADE_TAMPER: Final = 1278 +ERROR_RECOVERY_FAILURE: Final = 1279 +ERROR_ALREADY_FIBER: Final = 1280 +ERROR_ALREADY_THREAD: Final = 1281 +ERROR_STACK_BUFFER_OVERRUN: Final = 1282 +ERROR_PARAMETER_QUOTA_EXCEEDED: Final = 1283 +ERROR_DEBUGGER_INACTIVE: Final = 1284 +ERROR_DELAY_LOAD_FAILED: Final = 1285 +ERROR_VDM_DISALLOWED: Final = 1286 +ERROR_UNIDENTIFIED_ERROR: Final = 1287 +ERROR_INVALID_CRUNTIME_PARAMETER: Final = 1288 +ERROR_BEYOND_VDL: Final = 1289 +ERROR_INCOMPATIBLE_SERVICE_SID_TYPE: Final = 1290 +ERROR_DRIVER_PROCESS_TERMINATED: Final = 1291 +ERROR_IMPLEMENTATION_LIMIT: Final = 1292 +ERROR_PROCESS_IS_PROTECTED: Final = 1293 +ERROR_SERVICE_NOTIFY_CLIENT_LAGGING: Final = 1294 +ERROR_DISK_QUOTA_EXCEEDED: Final = 1295 +ERROR_CONTENT_BLOCKED: Final = 1296 +ERROR_INCOMPATIBLE_SERVICE_PRIVILEGE: Final = 1297 +ERROR_APP_HANG: Final = 1298 +ERROR_INVALID_LABEL: Final = 1299 +ERROR_NOT_ALL_ASSIGNED: Final = 1300 +ERROR_SOME_NOT_MAPPED: Final = 1301 +ERROR_NO_QUOTAS_FOR_ACCOUNT: Final = 1302 +ERROR_LOCAL_USER_SESSION_KEY: Final = 1303 +ERROR_NULL_LM_PASSWORD: Final = 1304 +ERROR_UNKNOWN_REVISION: Final = 1305 +ERROR_REVISION_MISMATCH: Final = 1306 +ERROR_INVALID_OWNER: Final = 1307 +ERROR_INVALID_PRIMARY_GROUP: Final = 1308 +ERROR_NO_IMPERSONATION_TOKEN: Final = 1309 +ERROR_CANT_DISABLE_MANDATORY: Final = 1310 +ERROR_NO_LOGON_SERVERS: Final = 1311 +ERROR_NO_SUCH_LOGON_SESSION: Final = 1312 +ERROR_NO_SUCH_PRIVILEGE: Final = 1313 +ERROR_PRIVILEGE_NOT_HELD: Final = 1314 +ERROR_INVALID_ACCOUNT_NAME: Final = 1315 +ERROR_USER_EXISTS: Final = 1316 +ERROR_NO_SUCH_USER: Final = 1317 +ERROR_GROUP_EXISTS: Final = 1318 +ERROR_NO_SUCH_GROUP: Final = 1319 +ERROR_MEMBER_IN_GROUP: Final = 1320 +ERROR_MEMBER_NOT_IN_GROUP: Final = 1321 +ERROR_LAST_ADMIN: Final = 1322 +ERROR_WRONG_PASSWORD: Final = 1323 +ERROR_ILL_FORMED_PASSWORD: Final = 1324 +ERROR_PASSWORD_RESTRICTION: Final = 1325 +ERROR_LOGON_FAILURE: Final = 1326 +ERROR_ACCOUNT_RESTRICTION: Final = 1327 +ERROR_INVALID_LOGON_HOURS: Final = 1328 +ERROR_INVALID_WORKSTATION: Final = 1329 +ERROR_PASSWORD_EXPIRED: Final = 1330 +ERROR_ACCOUNT_DISABLED: Final = 1331 +ERROR_NONE_MAPPED: Final = 1332 +ERROR_TOO_MANY_LUIDS_REQUESTED: Final = 1333 +ERROR_LUIDS_EXHAUSTED: Final = 1334 +ERROR_INVALID_SUB_AUTHORITY: Final = 1335 +ERROR_INVALID_ACL: Final = 1336 +ERROR_INVALID_SID: Final = 1337 +ERROR_INVALID_SECURITY_DESCR: Final = 1338 +ERROR_BAD_INHERITANCE_ACL: Final = 1340 +ERROR_SERVER_DISABLED: Final = 1341 +ERROR_SERVER_NOT_DISABLED: Final = 1342 +ERROR_INVALID_ID_AUTHORITY: Final = 1343 +ERROR_ALLOTTED_SPACE_EXCEEDED: Final = 1344 +ERROR_INVALID_GROUP_ATTRIBUTES: Final = 1345 +ERROR_BAD_IMPERSONATION_LEVEL: Final = 1346 +ERROR_CANT_OPEN_ANONYMOUS: Final = 1347 +ERROR_BAD_VALIDATION_CLASS: Final = 1348 +ERROR_BAD_TOKEN_TYPE: Final = 1349 +ERROR_NO_SECURITY_ON_OBJECT: Final = 1350 +ERROR_CANT_ACCESS_DOMAIN_INFO: Final = 1351 +ERROR_INVALID_SERVER_STATE: Final = 1352 +ERROR_INVALID_DOMAIN_STATE: Final = 1353 +ERROR_INVALID_DOMAIN_ROLE: Final = 1354 +ERROR_NO_SUCH_DOMAIN: Final = 1355 +ERROR_DOMAIN_EXISTS: Final = 1356 +ERROR_DOMAIN_LIMIT_EXCEEDED: Final = 1357 +ERROR_INTERNAL_DB_CORRUPTION: Final = 1358 +ERROR_INTERNAL_ERROR: Final = 1359 +ERROR_GENERIC_NOT_MAPPED: Final = 1360 +ERROR_BAD_DESCRIPTOR_FORMAT: Final = 1361 +ERROR_NOT_LOGON_PROCESS: Final = 1362 +ERROR_LOGON_SESSION_EXISTS: Final = 1363 +ERROR_NO_SUCH_PACKAGE: Final = 1364 +ERROR_BAD_LOGON_SESSION_STATE: Final = 1365 +ERROR_LOGON_SESSION_COLLISION: Final = 1366 +ERROR_INVALID_LOGON_TYPE: Final = 1367 +ERROR_CANNOT_IMPERSONATE: Final = 1368 +ERROR_RXACT_INVALID_STATE: Final = 1369 +ERROR_RXACT_COMMIT_FAILURE: Final = 1370 +ERROR_SPECIAL_ACCOUNT: Final = 1371 +ERROR_SPECIAL_GROUP: Final = 1372 +ERROR_SPECIAL_USER: Final = 1373 +ERROR_MEMBERS_PRIMARY_GROUP: Final = 1374 +ERROR_TOKEN_ALREADY_IN_USE: Final = 1375 +ERROR_NO_SUCH_ALIAS: Final = 1376 +ERROR_MEMBER_NOT_IN_ALIAS: Final = 1377 +ERROR_MEMBER_IN_ALIAS: Final = 1378 +ERROR_ALIAS_EXISTS: Final = 1379 +ERROR_LOGON_NOT_GRANTED: Final = 1380 +ERROR_TOO_MANY_SECRETS: Final = 1381 +ERROR_SECRET_TOO_LONG: Final = 1382 +ERROR_INTERNAL_DB_ERROR: Final = 1383 +ERROR_TOO_MANY_CONTEXT_IDS: Final = 1384 +ERROR_LOGON_TYPE_NOT_GRANTED: Final = 1385 +ERROR_NT_CROSS_ENCRYPTION_REQUIRED: Final = 1386 +ERROR_NO_SUCH_MEMBER: Final = 1387 +ERROR_INVALID_MEMBER: Final = 1388 +ERROR_TOO_MANY_SIDS: Final = 1389 +ERROR_LM_CROSS_ENCRYPTION_REQUIRED: Final = 1390 +ERROR_NO_INHERITANCE: Final = 1391 +ERROR_FILE_CORRUPT: Final = 1392 +ERROR_DISK_CORRUPT: Final = 1393 +ERROR_NO_USER_SESSION_KEY: Final = 1394 +ERROR_LICENSE_QUOTA_EXCEEDED: Final = 1395 +ERROR_WRONG_TARGET_NAME: Final = 1396 +ERROR_MUTUAL_AUTH_FAILED: Final = 1397 +ERROR_TIME_SKEW: Final = 1398 +ERROR_CURRENT_DOMAIN_NOT_ALLOWED: Final = 1399 +ERROR_INVALID_WINDOW_HANDLE: Final = 1400 +ERROR_INVALID_MENU_HANDLE: Final = 1401 +ERROR_INVALID_CURSOR_HANDLE: Final = 1402 +ERROR_INVALID_ACCEL_HANDLE: Final = 1403 +ERROR_INVALID_HOOK_HANDLE: Final = 1404 +ERROR_INVALID_DWP_HANDLE: Final = 1405 +ERROR_TLW_WITH_WSCHILD: Final = 1406 +ERROR_CANNOT_FIND_WND_CLASS: Final = 1407 +ERROR_WINDOW_OF_OTHER_THREAD: Final = 1408 +ERROR_HOTKEY_ALREADY_REGISTERED: Final = 1409 +ERROR_CLASS_ALREADY_EXISTS: Final = 1410 +ERROR_CLASS_DOES_NOT_EXIST: Final = 1411 +ERROR_CLASS_HAS_WINDOWS: Final = 1412 +ERROR_INVALID_INDEX: Final = 1413 +ERROR_INVALID_ICON_HANDLE: Final = 1414 +ERROR_PRIVATE_DIALOG_INDEX: Final = 1415 +ERROR_LISTBOX_ID_NOT_FOUND: Final = 1416 +ERROR_NO_WILDCARD_CHARACTERS: Final = 1417 +ERROR_CLIPBOARD_NOT_OPEN: Final = 1418 +ERROR_HOTKEY_NOT_REGISTERED: Final = 1419 +ERROR_WINDOW_NOT_DIALOG: Final = 1420 +ERROR_CONTROL_ID_NOT_FOUND: Final = 1421 +ERROR_INVALID_COMBOBOX_MESSAGE: Final = 1422 +ERROR_WINDOW_NOT_COMBOBOX: Final = 1423 +ERROR_INVALID_EDIT_HEIGHT: Final = 1424 +ERROR_DC_NOT_FOUND: Final = 1425 +ERROR_INVALID_HOOK_FILTER: Final = 1426 +ERROR_INVALID_FILTER_PROC: Final = 1427 +ERROR_HOOK_NEEDS_HMOD: Final = 1428 +ERROR_GLOBAL_ONLY_HOOK: Final = 1429 +ERROR_JOURNAL_HOOK_SET: Final = 1430 +ERROR_HOOK_NOT_INSTALLED: Final = 1431 +ERROR_INVALID_LB_MESSAGE: Final = 1432 +ERROR_SETCOUNT_ON_BAD_LB: Final = 1433 +ERROR_LB_WITHOUT_TABSTOPS: Final = 1434 +ERROR_DESTROY_OBJECT_OF_OTHER_THREAD: Final = 1435 +ERROR_CHILD_WINDOW_MENU: Final = 1436 +ERROR_NO_SYSTEM_MENU: Final = 1437 +ERROR_INVALID_MSGBOX_STYLE: Final = 1438 +ERROR_INVALID_SPI_VALUE: Final = 1439 +ERROR_SCREEN_ALREADY_LOCKED: Final = 1440 +ERROR_HWNDS_HAVE_DIFF_PARENT: Final = 1441 +ERROR_NOT_CHILD_WINDOW: Final = 1442 +ERROR_INVALID_GW_COMMAND: Final = 1443 +ERROR_INVALID_THREAD_ID: Final = 1444 +ERROR_NON_MDICHILD_WINDOW: Final = 1445 +ERROR_POPUP_ALREADY_ACTIVE: Final = 1446 +ERROR_NO_SCROLLBARS: Final = 1447 +ERROR_INVALID_SCROLLBAR_RANGE: Final = 1448 +ERROR_INVALID_SHOWWIN_COMMAND: Final = 1449 +ERROR_NO_SYSTEM_RESOURCES: Final = 1450 +ERROR_NONPAGED_SYSTEM_RESOURCES: Final = 1451 +ERROR_PAGED_SYSTEM_RESOURCES: Final = 1452 +ERROR_WORKING_SET_QUOTA: Final = 1453 +ERROR_PAGEFILE_QUOTA: Final = 1454 +ERROR_COMMITMENT_LIMIT: Final = 1455 +ERROR_MENU_ITEM_NOT_FOUND: Final = 1456 +ERROR_INVALID_KEYBOARD_HANDLE: Final = 1457 +ERROR_HOOK_TYPE_NOT_ALLOWED: Final = 1458 +ERROR_REQUIRES_INTERACTIVE_WINDOWSTATION: Final = 1459 +ERROR_TIMEOUT: Final = 1460 +ERROR_INVALID_MONITOR_HANDLE: Final = 1461 +ERROR_INCORRECT_SIZE: Final = 1462 +ERROR_SYMLINK_CLASS_DISABLED: Final = 1463 +ERROR_SYMLINK_NOT_SUPPORTED: Final = 1464 +ERROR_XML_PARSE_ERROR: Final = 1465 +ERROR_XMLDSIG_ERROR: Final = 1466 +ERROR_RESTART_APPLICATION: Final = 1467 +ERROR_WRONG_COMPARTMENT: Final = 1468 +ERROR_AUTHIP_FAILURE: Final = 1469 +ERROR_NO_NVRAM_RESOURCES: Final = 1470 +ERROR_NOT_GUI_PROCESS: Final = 1471 +ERROR_EVENTLOG_FILE_CORRUPT: Final = 1500 +ERROR_EVENTLOG_CANT_START: Final = 1501 +ERROR_LOG_FILE_FULL: Final = 1502 +ERROR_EVENTLOG_FILE_CHANGED: Final = 1503 +ERROR_CONTAINER_ASSIGNED: Final = 1504 +ERROR_JOB_NO_CONTAINER: Final = 1505 +ERROR_INVALID_TASK_NAME: Final = 1550 +ERROR_INVALID_TASK_INDEX: Final = 1551 +ERROR_THREAD_ALREADY_IN_TASK: Final = 1552 +ERROR_INSTALL_SERVICE_FAILURE: Final = 1601 +ERROR_INSTALL_USEREXIT: Final = 1602 +ERROR_INSTALL_FAILURE: Final = 1603 +ERROR_INSTALL_SUSPEND: Final = 1604 +ERROR_UNKNOWN_PRODUCT: Final = 1605 +ERROR_UNKNOWN_FEATURE: Final = 1606 +ERROR_UNKNOWN_COMPONENT: Final = 1607 +ERROR_UNKNOWN_PROPERTY: Final = 1608 +ERROR_INVALID_HANDLE_STATE: Final = 1609 +ERROR_BAD_CONFIGURATION: Final = 1610 +ERROR_INDEX_ABSENT: Final = 1611 +ERROR_INSTALL_SOURCE_ABSENT: Final = 1612 +ERROR_INSTALL_PACKAGE_VERSION: Final = 1613 +ERROR_PRODUCT_UNINSTALLED: Final = 1614 +ERROR_BAD_QUERY_SYNTAX: Final = 1615 +ERROR_INVALID_FIELD: Final = 1616 +ERROR_DEVICE_REMOVED: Final = 1617 +ERROR_INSTALL_ALREADY_RUNNING: Final = 1618 +ERROR_INSTALL_PACKAGE_OPEN_FAILED: Final = 1619 +ERROR_INSTALL_PACKAGE_INVALID: Final = 1620 +ERROR_INSTALL_UI_FAILURE: Final = 1621 +ERROR_INSTALL_LOG_FAILURE: Final = 1622 +ERROR_INSTALL_LANGUAGE_UNSUPPORTED: Final = 1623 +ERROR_INSTALL_TRANSFORM_FAILURE: Final = 1624 +ERROR_INSTALL_PACKAGE_REJECTED: Final = 1625 +ERROR_FUNCTION_NOT_CALLED: Final = 1626 +ERROR_FUNCTION_FAILED: Final = 1627 +ERROR_INVALID_TABLE: Final = 1628 +ERROR_DATATYPE_MISMATCH: Final = 1629 +ERROR_UNSUPPORTED_TYPE: Final = 1630 +ERROR_CREATE_FAILED: Final = 1631 +ERROR_INSTALL_TEMP_UNWRITABLE: Final = 1632 +ERROR_INSTALL_PLATFORM_UNSUPPORTED: Final = 1633 +ERROR_INSTALL_NOTUSED: Final = 1634 +ERROR_PATCH_PACKAGE_OPEN_FAILED: Final = 1635 +ERROR_PATCH_PACKAGE_INVALID: Final = 1636 +ERROR_PATCH_PACKAGE_UNSUPPORTED: Final = 1637 +ERROR_PRODUCT_VERSION: Final = 1638 +ERROR_INVALID_COMMAND_LINE: Final = 1639 +ERROR_INSTALL_REMOTE_DISALLOWED: Final = 1640 +ERROR_SUCCESS_REBOOT_INITIATED: Final = 1641 +ERROR_PATCH_TARGET_NOT_FOUND: Final = 1642 +ERROR_PATCH_PACKAGE_REJECTED: Final = 1643 +ERROR_INSTALL_TRANSFORM_REJECTED: Final = 1644 +ERROR_INSTALL_REMOTE_PROHIBITED: Final = 1645 +ERROR_PATCH_REMOVAL_UNSUPPORTED: Final = 1646 +ERROR_UNKNOWN_PATCH: Final = 1647 +ERROR_PATCH_NO_SEQUENCE: Final = 1648 +ERROR_PATCH_REMOVAL_DISALLOWED: Final = 1649 +ERROR_INVALID_PATCH_XML: Final = 1650 +ERROR_PATCH_MANAGED_ADVERTISED_PRODUCT: Final = 1651 +ERROR_INSTALL_SERVICE_SAFEBOOT: Final = 1652 +ERROR_FAIL_FAST_EXCEPTION: Final = 1653 +ERROR_INSTALL_REJECTED: Final = 1654 +ERROR_DYNAMIC_CODE_BLOCKED: Final = 1655 +ERROR_NOT_SAME_OBJECT: Final = 1656 +ERROR_STRICT_CFG_VIOLATION: Final = 1657 +ERROR_SET_CONTEXT_DENIED: Final = 1660 +ERROR_CROSS_PARTITION_VIOLATION: Final = 1661 +ERROR_RETURN_ADDRESS_HIJACK_ATTEMPT: Final = 1662 +RPC_S_INVALID_STRING_BINDING: Final = 1700 +RPC_S_WRONG_KIND_OF_BINDING: Final = 1701 +RPC_S_INVALID_BINDING: Final = 1702 +RPC_S_PROTSEQ_NOT_SUPPORTED: Final = 1703 +RPC_S_INVALID_RPC_PROTSEQ: Final = 1704 +RPC_S_INVALID_STRING_UUID: Final = 1705 +RPC_S_INVALID_ENDPOINT_FORMAT: Final = 1706 +RPC_S_INVALID_NET_ADDR: Final = 1707 +RPC_S_NO_ENDPOINT_FOUND: Final = 1708 +RPC_S_INVALID_TIMEOUT: Final = 1709 +RPC_S_OBJECT_NOT_FOUND: Final = 1710 +RPC_S_ALREADY_REGISTERED: Final = 1711 +RPC_S_TYPE_ALREADY_REGISTERED: Final = 1712 +RPC_S_ALREADY_LISTENING: Final = 1713 +RPC_S_NO_PROTSEQS_REGISTERED: Final = 1714 +RPC_S_NOT_LISTENING: Final = 1715 +RPC_S_UNKNOWN_MGR_TYPE: Final = 1716 +RPC_S_UNKNOWN_IF: Final = 1717 +RPC_S_NO_BINDINGS: Final = 1718 +RPC_S_NO_PROTSEQS: Final = 1719 +RPC_S_CANT_CREATE_ENDPOINT: Final = 1720 +RPC_S_OUT_OF_RESOURCES: Final = 1721 +RPC_S_SERVER_UNAVAILABLE: Final = 1722 +RPC_S_SERVER_TOO_BUSY: Final = 1723 +RPC_S_INVALID_NETWORK_OPTIONS: Final = 1724 +RPC_S_NO_CALL_ACTIVE: Final = 1725 +RPC_S_CALL_FAILED: Final = 1726 +RPC_S_CALL_FAILED_DNE: Final = 1727 +RPC_S_PROTOCOL_ERROR: Final = 1728 +RPC_S_PROXY_ACCESS_DENIED: Final = 1729 +RPC_S_UNSUPPORTED_TRANS_SYN: Final = 1730 +RPC_S_UNSUPPORTED_TYPE: Final = 1732 +RPC_S_INVALID_TAG: Final = 1733 +RPC_S_INVALID_BOUND: Final = 1734 +RPC_S_NO_ENTRY_NAME: Final = 1735 +RPC_S_INVALID_NAME_SYNTAX: Final = 1736 +RPC_S_UNSUPPORTED_NAME_SYNTAX: Final = 1737 +RPC_S_UUID_NO_ADDRESS: Final = 1739 +RPC_S_DUPLICATE_ENDPOINT: Final = 1740 +RPC_S_UNKNOWN_AUTHN_TYPE: Final = 1741 +RPC_S_MAX_CALLS_TOO_SMALL: Final = 1742 +RPC_S_STRING_TOO_LONG: Final = 1743 +RPC_S_PROTSEQ_NOT_FOUND: Final = 1744 +RPC_S_PROCNUM_OUT_OF_RANGE: Final = 1745 +RPC_S_BINDING_HAS_NO_AUTH: Final = 1746 +RPC_S_UNKNOWN_AUTHN_SERVICE: Final = 1747 +RPC_S_UNKNOWN_AUTHN_LEVEL: Final = 1748 +RPC_S_INVALID_AUTH_IDENTITY: Final = 1749 +RPC_S_UNKNOWN_AUTHZ_SERVICE: Final = 1750 +EPT_S_INVALID_ENTRY: Final = 1751 +EPT_S_CANT_PERFORM_OP: Final = 1752 +EPT_S_NOT_REGISTERED: Final = 1753 +RPC_S_NOTHING_TO_EXPORT: Final = 1754 +RPC_S_INCOMPLETE_NAME: Final = 1755 +RPC_S_INVALID_VERS_OPTION: Final = 1756 +RPC_S_NO_MORE_MEMBERS: Final = 1757 +RPC_S_NOT_ALL_OBJS_UNEXPORTED: Final = 1758 +RPC_S_INTERFACE_NOT_FOUND: Final = 1759 +RPC_S_ENTRY_ALREADY_EXISTS: Final = 1760 +RPC_S_ENTRY_NOT_FOUND: Final = 1761 +RPC_S_NAME_SERVICE_UNAVAILABLE: Final = 1762 +RPC_S_INVALID_NAF_ID: Final = 1763 +RPC_S_CANNOT_SUPPORT: Final = 1764 +RPC_S_NO_CONTEXT_AVAILABLE: Final = 1765 +RPC_S_INTERNAL_ERROR: Final = 1766 +RPC_S_ZERO_DIVIDE: Final = 1767 +RPC_S_ADDRESS_ERROR: Final = 1768 +RPC_S_FP_DIV_ZERO: Final = 1769 +RPC_S_FP_UNDERFLOW: Final = 1770 +RPC_S_FP_OVERFLOW: Final = 1771 +RPC_X_NO_MORE_ENTRIES: Final = 1772 +RPC_X_SS_CHAR_TRANS_OPEN_FAIL: Final = 1773 +RPC_X_SS_CHAR_TRANS_SHORT_FILE: Final = 1774 +RPC_X_SS_IN_NULL_CONTEXT: Final = 1775 +RPC_X_SS_CONTEXT_DAMAGED: Final = 1777 +RPC_X_SS_HANDLES_MISMATCH: Final = 1778 +RPC_X_SS_CANNOT_GET_CALL_HANDLE: Final = 1779 +RPC_X_NULL_REF_POINTER: Final = 1780 +RPC_X_ENUM_VALUE_OUT_OF_RANGE: Final = 1781 +RPC_X_BYTE_COUNT_TOO_SMALL: Final = 1782 +RPC_X_BAD_STUB_DATA: Final = 1783 +ERROR_INVALID_USER_BUFFER: Final = 1784 +ERROR_UNRECOGNIZED_MEDIA: Final = 1785 +ERROR_NO_TRUST_LSA_SECRET: Final = 1786 +ERROR_NO_TRUST_SAM_ACCOUNT: Final = 1787 +ERROR_TRUSTED_DOMAIN_FAILURE: Final = 1788 +ERROR_TRUSTED_RELATIONSHIP_FAILURE: Final = 1789 +ERROR_TRUST_FAILURE: Final = 1790 +RPC_S_CALL_IN_PROGRESS: Final = 1791 +ERROR_NETLOGON_NOT_STARTED: Final = 1792 +ERROR_ACCOUNT_EXPIRED: Final = 1793 +ERROR_REDIRECTOR_HAS_OPEN_HANDLES: Final = 1794 +ERROR_PRINTER_DRIVER_ALREADY_INSTALLED: Final = 1795 +ERROR_UNKNOWN_PORT: Final = 1796 +ERROR_UNKNOWN_PRINTER_DRIVER: Final = 1797 +ERROR_UNKNOWN_PRINTPROCESSOR: Final = 1798 +ERROR_INVALID_SEPARATOR_FILE: Final = 1799 +ERROR_INVALID_PRIORITY: Final = 1800 +ERROR_INVALID_PRINTER_NAME: Final = 1801 +ERROR_PRINTER_ALREADY_EXISTS: Final = 1802 +ERROR_INVALID_PRINTER_COMMAND: Final = 1803 +ERROR_INVALID_DATATYPE: Final = 1804 +ERROR_INVALID_ENVIRONMENT: Final = 1805 +RPC_S_NO_MORE_BINDINGS: Final = 1806 +ERROR_NOLOGON_INTERDOMAIN_TRUST_ACCOUNT: Final = 1807 +ERROR_NOLOGON_WORKSTATION_TRUST_ACCOUNT: Final = 1808 +ERROR_NOLOGON_SERVER_TRUST_ACCOUNT: Final = 1809 +ERROR_DOMAIN_TRUST_INCONSISTENT: Final = 1810 +ERROR_SERVER_HAS_OPEN_HANDLES: Final = 1811 +ERROR_RESOURCE_DATA_NOT_FOUND: Final = 1812 +ERROR_RESOURCE_TYPE_NOT_FOUND: Final = 1813 +ERROR_RESOURCE_NAME_NOT_FOUND: Final = 1814 +ERROR_RESOURCE_LANG_NOT_FOUND: Final = 1815 +ERROR_NOT_ENOUGH_QUOTA: Final = 1816 +RPC_S_NO_INTERFACES: Final = 1817 +RPC_S_CALL_CANCELLED: Final = 1818 +RPC_S_BINDING_INCOMPLETE: Final = 1819 +RPC_S_COMM_FAILURE: Final = 1820 +RPC_S_UNSUPPORTED_AUTHN_LEVEL: Final = 1821 +RPC_S_NO_PRINC_NAME: Final = 1822 +RPC_S_NOT_RPC_ERROR: Final = 1823 +RPC_S_UUID_LOCAL_ONLY: Final = 1824 +RPC_S_SEC_PKG_ERROR: Final = 1825 +RPC_S_NOT_CANCELLED: Final = 1826 +RPC_X_INVALID_ES_ACTION: Final = 1827 +RPC_X_WRONG_ES_VERSION: Final = 1828 +RPC_X_WRONG_STUB_VERSION: Final = 1829 +RPC_X_INVALID_PIPE_OBJECT: Final = 1830 +RPC_X_WRONG_PIPE_ORDER: Final = 1831 +RPC_X_WRONG_PIPE_VERSION: Final = 1832 +RPC_S_COOKIE_AUTH_FAILED: Final = 1833 +RPC_S_DO_NOT_DISTURB: Final = 1834 +RPC_S_SYSTEM_HANDLE_COUNT_EXCEEDED: Final = 1835 +RPC_S_SYSTEM_HANDLE_TYPE_MISMATCH: Final = 1836 +RPC_S_GROUP_MEMBER_NOT_FOUND: Final = 1898 +EPT_S_CANT_CREATE: Final = 1899 +RPC_S_INVALID_OBJECT: Final = 1900 +ERROR_INVALID_TIME: Final = 1901 +ERROR_INVALID_FORM_NAME: Final = 1902 +ERROR_INVALID_FORM_SIZE: Final = 1903 +ERROR_ALREADY_WAITING: Final = 1904 +ERROR_PRINTER_DELETED: Final = 1905 +ERROR_INVALID_PRINTER_STATE: Final = 1906 +ERROR_PASSWORD_MUST_CHANGE: Final = 1907 +ERROR_DOMAIN_CONTROLLER_NOT_FOUND: Final = 1908 +ERROR_ACCOUNT_LOCKED_OUT: Final = 1909 +OR_INVALID_OXID: Final = 1910 +OR_INVALID_OID: Final = 1911 +OR_INVALID_SET: Final = 1912 +RPC_S_SEND_INCOMPLETE: Final = 1913 +RPC_S_INVALID_ASYNC_HANDLE: Final = 1914 +RPC_S_INVALID_ASYNC_CALL: Final = 1915 +RPC_X_PIPE_CLOSED: Final = 1916 +RPC_X_PIPE_DISCIPLINE_ERROR: Final = 1917 +RPC_X_PIPE_EMPTY: Final = 1918 +ERROR_NO_SITENAME: Final = 1919 +ERROR_CANT_ACCESS_FILE: Final = 1920 +ERROR_CANT_RESOLVE_FILENAME: Final = 1921 +RPC_S_ENTRY_TYPE_MISMATCH: Final = 1922 +RPC_S_NOT_ALL_OBJS_EXPORTED: Final = 1923 +RPC_S_INTERFACE_NOT_EXPORTED: Final = 1924 +RPC_S_PROFILE_NOT_ADDED: Final = 1925 +RPC_S_PRF_ELT_NOT_ADDED: Final = 1926 +RPC_S_PRF_ELT_NOT_REMOVED: Final = 1927 +RPC_S_GRP_ELT_NOT_ADDED: Final = 1928 +RPC_S_GRP_ELT_NOT_REMOVED: Final = 1929 +ERROR_KM_DRIVER_BLOCKED: Final = 1930 +ERROR_CONTEXT_EXPIRED: Final = 1931 +ERROR_PER_USER_TRUST_QUOTA_EXCEEDED: Final = 1932 +ERROR_ALL_USER_TRUST_QUOTA_EXCEEDED: Final = 1933 +ERROR_USER_DELETE_TRUST_QUOTA_EXCEEDED: Final = 1934 +ERROR_AUTHENTICATION_FIREWALL_FAILED: Final = 1935 +ERROR_REMOTE_PRINT_CONNECTIONS_BLOCKED: Final = 1936 +ERROR_NTLM_BLOCKED: Final = 1937 +ERROR_PASSWORD_CHANGE_REQUIRED: Final = 1938 +ERROR_LOST_MODE_LOGON_RESTRICTION: Final = 1939 +ERROR_INVALID_PIXEL_FORMAT: Final = 2000 +ERROR_BAD_DRIVER: Final = 2001 +ERROR_INVALID_WINDOW_STYLE: Final = 2002 +ERROR_METAFILE_NOT_SUPPORTED: Final = 2003 +ERROR_TRANSFORM_NOT_SUPPORTED: Final = 2004 +ERROR_CLIPPING_NOT_SUPPORTED: Final = 2005 +ERROR_INVALID_CMM: Final = 2010 +ERROR_INVALID_PROFILE: Final = 2011 +ERROR_TAG_NOT_FOUND: Final = 2012 +ERROR_TAG_NOT_PRESENT: Final = 2013 +ERROR_DUPLICATE_TAG: Final = 2014 +ERROR_PROFILE_NOT_ASSOCIATED_WITH_DEVICE: Final = 2015 +ERROR_PROFILE_NOT_FOUND: Final = 2016 +ERROR_INVALID_COLORSPACE: Final = 2017 +ERROR_ICM_NOT_ENABLED: Final = 2018 +ERROR_DELETING_ICM_XFORM: Final = 2019 +ERROR_INVALID_TRANSFORM: Final = 2020 +ERROR_COLORSPACE_MISMATCH: Final = 2021 +ERROR_INVALID_COLORINDEX: Final = 2022 +ERROR_PROFILE_DOES_NOT_MATCH_DEVICE: Final = 2023 +ERROR_CONNECTED_OTHER_PASSWORD: Final = 2108 +ERROR_CONNECTED_OTHER_PASSWORD_DEFAULT: Final = 2109 +ERROR_BAD_USERNAME: Final = 2202 +ERROR_NOT_CONNECTED: Final = 2250 +ERROR_OPEN_FILES: Final = 2401 +ERROR_ACTIVE_CONNECTIONS: Final = 2402 +ERROR_DEVICE_IN_USE: Final = 2404 +ERROR_UNKNOWN_PRINT_MONITOR: Final = 3000 +ERROR_PRINTER_DRIVER_IN_USE: Final = 3001 +ERROR_SPOOL_FILE_NOT_FOUND: Final = 3002 +ERROR_SPL_NO_STARTDOC: Final = 3003 +ERROR_SPL_NO_ADDJOB: Final = 3004 +ERROR_PRINT_PROCESSOR_ALREADY_INSTALLED: Final = 3005 +ERROR_PRINT_MONITOR_ALREADY_INSTALLED: Final = 3006 +ERROR_INVALID_PRINT_MONITOR: Final = 3007 +ERROR_PRINT_MONITOR_IN_USE: Final = 3008 +ERROR_PRINTER_HAS_JOBS_QUEUED: Final = 3009 +ERROR_SUCCESS_REBOOT_REQUIRED: Final = 3010 +ERROR_SUCCESS_RESTART_REQUIRED: Final = 3011 +ERROR_PRINTER_NOT_FOUND: Final = 3012 +ERROR_PRINTER_DRIVER_WARNED: Final = 3013 +ERROR_PRINTER_DRIVER_BLOCKED: Final = 3014 +ERROR_PRINTER_DRIVER_PACKAGE_IN_USE: Final = 3015 +ERROR_CORE_DRIVER_PACKAGE_NOT_FOUND: Final = 3016 +ERROR_FAIL_REBOOT_REQUIRED: Final = 3017 +ERROR_FAIL_REBOOT_INITIATED: Final = 3018 +ERROR_PRINTER_DRIVER_DOWNLOAD_NEEDED: Final = 3019 +ERROR_PRINT_JOB_RESTART_REQUIRED: Final = 3020 +ERROR_INVALID_PRINTER_DRIVER_MANIFEST: Final = 3021 +ERROR_PRINTER_NOT_SHAREABLE: Final = 3022 +ERROR_SERVER_SERVICE_CALL_REQUIRES_SMB1: Final = 3023 +ERROR_NETWORK_AUTHENTICATION_PROMPT_CANCELED: Final = 3024 +ERROR_REQUEST_PAUSED: Final = 3050 +ERROR_APPEXEC_CONDITION_NOT_SATISFIED: Final = 3060 +ERROR_APPEXEC_HANDLE_INVALIDATED: Final = 3061 +ERROR_APPEXEC_INVALID_HOST_GENERATION: Final = 3062 +ERROR_APPEXEC_UNEXPECTED_PROCESS_REGISTRATION: Final = 3063 +ERROR_APPEXEC_INVALID_HOST_STATE: Final = 3064 +ERROR_APPEXEC_NO_DONOR: Final = 3065 +ERROR_APPEXEC_HOST_ID_MISMATCH: Final = 3066 +ERROR_APPEXEC_UNKNOWN_USER: Final = 3067 +ERROR_APPEXEC_APP_COMPAT_BLOCK: Final = 3068 +ERROR_APPEXEC_CALLER_WAIT_TIMEOUT: Final = 3069 +ERROR_APPEXEC_CALLER_WAIT_TIMEOUT_TERMINATION: Final = 3070 +ERROR_APPEXEC_CALLER_WAIT_TIMEOUT_LICENSING: Final = 3071 +ERROR_APPEXEC_CALLER_WAIT_TIMEOUT_RESOURCES: Final = 3072 +ERROR_VRF_VOLATILE_CFG_AND_IO_ENABLED: Final = 3080 +ERROR_VRF_VOLATILE_NOT_STOPPABLE: Final = 3081 +ERROR_VRF_VOLATILE_SAFE_MODE: Final = 3082 +ERROR_VRF_VOLATILE_NOT_RUNNABLE_SYSTEM: Final = 3083 +ERROR_VRF_VOLATILE_NOT_SUPPORTED_RULECLASS: Final = 3084 +ERROR_VRF_VOLATILE_PROTECTED_DRIVER: Final = 3085 +ERROR_VRF_VOLATILE_NMI_REGISTERED: Final = 3086 +ERROR_VRF_VOLATILE_SETTINGS_CONFLICT: Final = 3087 +ERROR_DIF_IOCALLBACK_NOT_REPLACED: Final = 3190 +ERROR_DIF_LIVEDUMP_LIMIT_EXCEEDED: Final = 3191 +ERROR_DIF_VOLATILE_SECTION_NOT_LOCKED: Final = 3192 +ERROR_DIF_VOLATILE_DRIVER_HOTPATCHED: Final = 3193 +ERROR_DIF_VOLATILE_INVALID_INFO: Final = 3194 +ERROR_DIF_VOLATILE_DRIVER_IS_NOT_RUNNING: Final = 3195 +ERROR_DIF_VOLATILE_PLUGIN_IS_NOT_RUNNING: Final = 3196 +ERROR_DIF_VOLATILE_PLUGIN_CHANGE_NOT_ALLOWED: Final = 3197 +ERROR_DIF_VOLATILE_NOT_ALLOWED: Final = 3198 +ERROR_DIF_BINDING_API_NOT_FOUND: Final = 3199 +ERROR_IO_REISSUE_AS_CACHED: Final = 3950 +ERROR_WINS_INTERNAL: Final = 4000 +ERROR_CAN_NOT_DEL_LOCAL_WINS: Final = 4001 +ERROR_STATIC_INIT: Final = 4002 +ERROR_INC_BACKUP: Final = 4003 +ERROR_FULL_BACKUP: Final = 4004 +ERROR_REC_NON_EXISTENT: Final = 4005 +ERROR_RPL_NOT_ALLOWED: Final = 4006 +PEERDIST_ERROR_CONTENTINFO_VERSION_UNSUPPORTED: Final = 4050 +PEERDIST_ERROR_CANNOT_PARSE_CONTENTINFO: Final = 4051 +PEERDIST_ERROR_MISSING_DATA: Final = 4052 +PEERDIST_ERROR_NO_MORE: Final = 4053 +PEERDIST_ERROR_NOT_INITIALIZED: Final = 4054 +PEERDIST_ERROR_ALREADY_INITIALIZED: Final = 4055 +PEERDIST_ERROR_SHUTDOWN_IN_PROGRESS: Final = 4056 +PEERDIST_ERROR_INVALIDATED: Final = 4057 +PEERDIST_ERROR_ALREADY_EXISTS: Final = 4058 +PEERDIST_ERROR_OPERATION_NOTFOUND: Final = 4059 +PEERDIST_ERROR_ALREADY_COMPLETED: Final = 4060 +PEERDIST_ERROR_OUT_OF_BOUNDS: Final = 4061 +PEERDIST_ERROR_VERSION_UNSUPPORTED: Final = 4062 +PEERDIST_ERROR_INVALID_CONFIGURATION: Final = 4063 +PEERDIST_ERROR_NOT_LICENSED: Final = 4064 +PEERDIST_ERROR_SERVICE_UNAVAILABLE: Final = 4065 +PEERDIST_ERROR_TRUST_FAILURE: Final = 4066 +ERROR_DHCP_ADDRESS_CONFLICT: Final = 4100 +ERROR_WMI_GUID_NOT_FOUND: Final = 4200 +ERROR_WMI_INSTANCE_NOT_FOUND: Final = 4201 +ERROR_WMI_ITEMID_NOT_FOUND: Final = 4202 +ERROR_WMI_TRY_AGAIN: Final = 4203 +ERROR_WMI_DP_NOT_FOUND: Final = 4204 +ERROR_WMI_UNRESOLVED_INSTANCE_REF: Final = 4205 +ERROR_WMI_ALREADY_ENABLED: Final = 4206 +ERROR_WMI_GUID_DISCONNECTED: Final = 4207 +ERROR_WMI_SERVER_UNAVAILABLE: Final = 4208 +ERROR_WMI_DP_FAILED: Final = 4209 +ERROR_WMI_INVALID_MOF: Final = 4210 +ERROR_WMI_INVALID_REGINFO: Final = 4211 +ERROR_WMI_ALREADY_DISABLED: Final = 4212 +ERROR_WMI_READ_ONLY: Final = 4213 +ERROR_WMI_SET_FAILURE: Final = 4214 +ERROR_NOT_APPCONTAINER: Final = 4250 +ERROR_APPCONTAINER_REQUIRED: Final = 4251 +ERROR_NOT_SUPPORTED_IN_APPCONTAINER: Final = 4252 +ERROR_INVALID_PACKAGE_SID_LENGTH: Final = 4253 +ERROR_INVALID_MEDIA: Final = 4300 +ERROR_INVALID_LIBRARY: Final = 4301 +ERROR_INVALID_MEDIA_POOL: Final = 4302 +ERROR_DRIVE_MEDIA_MISMATCH: Final = 4303 +ERROR_MEDIA_OFFLINE: Final = 4304 +ERROR_LIBRARY_OFFLINE: Final = 4305 +ERROR_EMPTY: Final = 4306 +ERROR_NOT_EMPTY: Final = 4307 +ERROR_MEDIA_UNAVAILABLE: Final = 4308 +ERROR_RESOURCE_DISABLED: Final = 4309 +ERROR_INVALID_CLEANER: Final = 4310 +ERROR_UNABLE_TO_CLEAN: Final = 4311 +ERROR_OBJECT_NOT_FOUND: Final = 4312 +ERROR_DATABASE_FAILURE: Final = 4313 +ERROR_DATABASE_FULL: Final = 4314 +ERROR_MEDIA_INCOMPATIBLE: Final = 4315 +ERROR_RESOURCE_NOT_PRESENT: Final = 4316 +ERROR_INVALID_OPERATION: Final = 4317 +ERROR_MEDIA_NOT_AVAILABLE: Final = 4318 +ERROR_DEVICE_NOT_AVAILABLE: Final = 4319 +ERROR_REQUEST_REFUSED: Final = 4320 +ERROR_INVALID_DRIVE_OBJECT: Final = 4321 +ERROR_LIBRARY_FULL: Final = 4322 +ERROR_MEDIUM_NOT_ACCESSIBLE: Final = 4323 +ERROR_UNABLE_TO_LOAD_MEDIUM: Final = 4324 +ERROR_UNABLE_TO_INVENTORY_DRIVE: Final = 4325 +ERROR_UNABLE_TO_INVENTORY_SLOT: Final = 4326 +ERROR_UNABLE_TO_INVENTORY_TRANSPORT: Final = 4327 +ERROR_TRANSPORT_FULL: Final = 4328 +ERROR_CONTROLLING_IEPORT: Final = 4329 +ERROR_UNABLE_TO_EJECT_MOUNTED_MEDIA: Final = 4330 +ERROR_CLEANER_SLOT_SET: Final = 4331 +ERROR_CLEANER_SLOT_NOT_SET: Final = 4332 +ERROR_CLEANER_CARTRIDGE_SPENT: Final = 4333 +ERROR_UNEXPECTED_OMID: Final = 4334 +ERROR_CANT_DELETE_LAST_ITEM: Final = 4335 +ERROR_MESSAGE_EXCEEDS_MAX_SIZE: Final = 4336 +ERROR_VOLUME_CONTAINS_SYS_FILES: Final = 4337 +ERROR_INDIGENOUS_TYPE: Final = 4338 +ERROR_NO_SUPPORTING_DRIVES: Final = 4339 +ERROR_CLEANER_CARTRIDGE_INSTALLED: Final = 4340 +ERROR_IEPORT_FULL: Final = 4341 +ERROR_FILE_OFFLINE: Final = 4350 +ERROR_REMOTE_STORAGE_NOT_ACTIVE: Final = 4351 +ERROR_REMOTE_STORAGE_MEDIA_ERROR: Final = 4352 +ERROR_NOT_A_REPARSE_POINT: Final = 4390 +ERROR_REPARSE_ATTRIBUTE_CONFLICT: Final = 4391 +ERROR_INVALID_REPARSE_DATA: Final = 4392 +ERROR_REPARSE_TAG_INVALID: Final = 4393 +ERROR_REPARSE_TAG_MISMATCH: Final = 4394 +ERROR_REPARSE_POINT_ENCOUNTERED: Final = 4395 +ERROR_APP_DATA_NOT_FOUND: Final = 4400 +ERROR_APP_DATA_EXPIRED: Final = 4401 +ERROR_APP_DATA_CORRUPT: Final = 4402 +ERROR_APP_DATA_LIMIT_EXCEEDED: Final = 4403 +ERROR_APP_DATA_REBOOT_REQUIRED: Final = 4404 +ERROR_SECUREBOOT_ROLLBACK_DETECTED: Final = 4420 +ERROR_SECUREBOOT_POLICY_VIOLATION: Final = 4421 +ERROR_SECUREBOOT_INVALID_POLICY: Final = 4422 +ERROR_SECUREBOOT_POLICY_PUBLISHER_NOT_FOUND: Final = 4423 +ERROR_SECUREBOOT_POLICY_NOT_SIGNED: Final = 4424 +ERROR_SECUREBOOT_NOT_ENABLED: Final = 4425 +ERROR_SECUREBOOT_FILE_REPLACED: Final = 4426 +ERROR_SECUREBOOT_POLICY_NOT_AUTHORIZED: Final = 4427 +ERROR_SECUREBOOT_POLICY_UNKNOWN: Final = 4428 +ERROR_SECUREBOOT_POLICY_MISSING_ANTIROLLBACKVERSION: Final = 4429 +ERROR_SECUREBOOT_PLATFORM_ID_MISMATCH: Final = 4430 +ERROR_SECUREBOOT_POLICY_ROLLBACK_DETECTED: Final = 4431 +ERROR_SECUREBOOT_POLICY_UPGRADE_MISMATCH: Final = 4432 +ERROR_SECUREBOOT_REQUIRED_POLICY_FILE_MISSING: Final = 4433 +ERROR_SECUREBOOT_NOT_BASE_POLICY: Final = 4434 +ERROR_SECUREBOOT_NOT_SUPPLEMENTAL_POLICY: Final = 4435 +ERROR_OFFLOAD_READ_FLT_NOT_SUPPORTED: Final = 4440 +ERROR_OFFLOAD_WRITE_FLT_NOT_SUPPORTED: Final = 4441 +ERROR_OFFLOAD_READ_FILE_NOT_SUPPORTED: Final = 4442 +ERROR_OFFLOAD_WRITE_FILE_NOT_SUPPORTED: Final = 4443 +ERROR_ALREADY_HAS_STREAM_ID: Final = 4444 +ERROR_SMR_GARBAGE_COLLECTION_REQUIRED: Final = 4445 +ERROR_WOF_WIM_HEADER_CORRUPT: Final = 4446 +ERROR_WOF_WIM_RESOURCE_TABLE_CORRUPT: Final = 4447 +ERROR_WOF_FILE_RESOURCE_TABLE_CORRUPT: Final = 4448 +ERROR_OBJECT_IS_IMMUTABLE: Final = 4449 +ERROR_VOLUME_NOT_SIS_ENABLED: Final = 4500 +ERROR_SYSTEM_INTEGRITY_ROLLBACK_DETECTED: Final = 4550 +ERROR_SYSTEM_INTEGRITY_POLICY_VIOLATION: Final = 4551 +ERROR_SYSTEM_INTEGRITY_INVALID_POLICY: Final = 4552 +ERROR_SYSTEM_INTEGRITY_POLICY_NOT_SIGNED: Final = 4553 +ERROR_SYSTEM_INTEGRITY_TOO_MANY_POLICIES: Final = 4554 +ERROR_SYSTEM_INTEGRITY_SUPPLEMENTAL_POLICY_NOT_AUTHORIZED: Final = 4555 +ERROR_SYSTEM_INTEGRITY_REPUTATION_MALICIOUS: Final = 4556 +ERROR_SYSTEM_INTEGRITY_REPUTATION_PUA: Final = 4557 +ERROR_SYSTEM_INTEGRITY_REPUTATION_DANGEROUS_EXT: Final = 4558 +ERROR_SYSTEM_INTEGRITY_REPUTATION_OFFLINE: Final = 4559 +ERROR_VSM_NOT_INITIALIZED: Final = 4560 +ERROR_VSM_DMA_PROTECTION_NOT_IN_USE: Final = 4561 +ERROR_PLATFORM_MANIFEST_NOT_AUTHORIZED: Final = 4570 +ERROR_PLATFORM_MANIFEST_INVALID: Final = 4571 +ERROR_PLATFORM_MANIFEST_FILE_NOT_AUTHORIZED: Final = 4572 +ERROR_PLATFORM_MANIFEST_CATALOG_NOT_AUTHORIZED: Final = 4573 +ERROR_PLATFORM_MANIFEST_BINARY_ID_NOT_FOUND: Final = 4574 +ERROR_PLATFORM_MANIFEST_NOT_ACTIVE: Final = 4575 +ERROR_PLATFORM_MANIFEST_NOT_SIGNED: Final = 4576 +ERROR_SYSTEM_INTEGRITY_REPUTATION_UNFRIENDLY_FILE: Final = 4580 +ERROR_SYSTEM_INTEGRITY_REPUTATION_UNATTAINABLE: Final = 4581 +ERROR_SYSTEM_INTEGRITY_REPUTATION_EXPLICIT_DENY_FILE: Final = 4582 +ERROR_DEPENDENT_RESOURCE_EXISTS: Final = 5001 +ERROR_DEPENDENCY_NOT_FOUND: Final = 5002 +ERROR_DEPENDENCY_ALREADY_EXISTS: Final = 5003 +ERROR_RESOURCE_NOT_ONLINE: Final = 5004 +ERROR_HOST_NODE_NOT_AVAILABLE: Final = 5005 +ERROR_RESOURCE_NOT_AVAILABLE: Final = 5006 +ERROR_RESOURCE_NOT_FOUND: Final = 5007 +ERROR_SHUTDOWN_CLUSTER: Final = 5008 +ERROR_CANT_EVICT_ACTIVE_NODE: Final = 5009 +ERROR_OBJECT_ALREADY_EXISTS: Final = 5010 +ERROR_OBJECT_IN_LIST: Final = 5011 +ERROR_GROUP_NOT_AVAILABLE: Final = 5012 +ERROR_GROUP_NOT_FOUND: Final = 5013 +ERROR_GROUP_NOT_ONLINE: Final = 5014 +ERROR_HOST_NODE_NOT_RESOURCE_OWNER: Final = 5015 +ERROR_HOST_NODE_NOT_GROUP_OWNER: Final = 5016 +ERROR_RESMON_CREATE_FAILED: Final = 5017 +ERROR_RESMON_ONLINE_FAILED: Final = 5018 +ERROR_RESOURCE_ONLINE: Final = 5019 +ERROR_QUORUM_RESOURCE: Final = 5020 +ERROR_NOT_QUORUM_CAPABLE: Final = 5021 +ERROR_CLUSTER_SHUTTING_DOWN: Final = 5022 +ERROR_INVALID_STATE: Final = 5023 +ERROR_RESOURCE_PROPERTIES_STORED: Final = 5024 +ERROR_NOT_QUORUM_CLASS: Final = 5025 +ERROR_CORE_RESOURCE: Final = 5026 +ERROR_QUORUM_RESOURCE_ONLINE_FAILED: Final = 5027 +ERROR_QUORUMLOG_OPEN_FAILED: Final = 5028 +ERROR_CLUSTERLOG_CORRUPT: Final = 5029 +ERROR_CLUSTERLOG_RECORD_EXCEEDS_MAXSIZE: Final = 5030 +ERROR_CLUSTERLOG_EXCEEDS_MAXSIZE: Final = 5031 +ERROR_CLUSTERLOG_CHKPOINT_NOT_FOUND: Final = 5032 +ERROR_CLUSTERLOG_NOT_ENOUGH_SPACE: Final = 5033 +ERROR_QUORUM_OWNER_ALIVE: Final = 5034 +ERROR_NETWORK_NOT_AVAILABLE: Final = 5035 +ERROR_NODE_NOT_AVAILABLE: Final = 5036 +ERROR_ALL_NODES_NOT_AVAILABLE: Final = 5037 +ERROR_RESOURCE_FAILED: Final = 5038 +ERROR_CLUSTER_INVALID_NODE: Final = 5039 +ERROR_CLUSTER_NODE_EXISTS: Final = 5040 +ERROR_CLUSTER_JOIN_IN_PROGRESS: Final = 5041 +ERROR_CLUSTER_NODE_NOT_FOUND: Final = 5042 +ERROR_CLUSTER_LOCAL_NODE_NOT_FOUND: Final = 5043 +ERROR_CLUSTER_NETWORK_EXISTS: Final = 5044 +ERROR_CLUSTER_NETWORK_NOT_FOUND: Final = 5045 +ERROR_CLUSTER_NETINTERFACE_EXISTS: Final = 5046 +ERROR_CLUSTER_NETINTERFACE_NOT_FOUND: Final = 5047 +ERROR_CLUSTER_INVALID_REQUEST: Final = 5048 +ERROR_CLUSTER_INVALID_NETWORK_PROVIDER: Final = 5049 +ERROR_CLUSTER_NODE_DOWN: Final = 5050 +ERROR_CLUSTER_NODE_UNREACHABLE: Final = 5051 +ERROR_CLUSTER_NODE_NOT_MEMBER: Final = 5052 +ERROR_CLUSTER_JOIN_NOT_IN_PROGRESS: Final = 5053 +ERROR_CLUSTER_INVALID_NETWORK: Final = 5054 +ERROR_CLUSTER_NODE_UP: Final = 5056 +ERROR_CLUSTER_IPADDR_IN_USE: Final = 5057 +ERROR_CLUSTER_NODE_NOT_PAUSED: Final = 5058 +ERROR_CLUSTER_NO_SECURITY_CONTEXT: Final = 5059 +ERROR_CLUSTER_NETWORK_NOT_INTERNAL: Final = 5060 +ERROR_CLUSTER_NODE_ALREADY_UP: Final = 5061 +ERROR_CLUSTER_NODE_ALREADY_DOWN: Final = 5062 +ERROR_CLUSTER_NETWORK_ALREADY_ONLINE: Final = 5063 +ERROR_CLUSTER_NETWORK_ALREADY_OFFLINE: Final = 5064 +ERROR_CLUSTER_NODE_ALREADY_MEMBER: Final = 5065 +ERROR_CLUSTER_LAST_INTERNAL_NETWORK: Final = 5066 +ERROR_CLUSTER_NETWORK_HAS_DEPENDENTS: Final = 5067 +ERROR_INVALID_OPERATION_ON_QUORUM: Final = 5068 +ERROR_DEPENDENCY_NOT_ALLOWED: Final = 5069 +ERROR_CLUSTER_NODE_PAUSED: Final = 5070 +ERROR_NODE_CANT_HOST_RESOURCE: Final = 5071 +ERROR_CLUSTER_NODE_NOT_READY: Final = 5072 +ERROR_CLUSTER_NODE_SHUTTING_DOWN: Final = 5073 +ERROR_CLUSTER_JOIN_ABORTED: Final = 5074 +ERROR_CLUSTER_INCOMPATIBLE_VERSIONS: Final = 5075 +ERROR_CLUSTER_MAXNUM_OF_RESOURCES_EXCEEDED: Final = 5076 +ERROR_CLUSTER_SYSTEM_CONFIG_CHANGED: Final = 5077 +ERROR_CLUSTER_RESOURCE_TYPE_NOT_FOUND: Final = 5078 +ERROR_CLUSTER_RESTYPE_NOT_SUPPORTED: Final = 5079 +ERROR_CLUSTER_RESNAME_NOT_FOUND: Final = 5080 +ERROR_CLUSTER_NO_RPC_PACKAGES_REGISTERED: Final = 5081 +ERROR_CLUSTER_OWNER_NOT_IN_PREFLIST: Final = 5082 +ERROR_CLUSTER_DATABASE_SEQMISMATCH: Final = 5083 +ERROR_RESMON_INVALID_STATE: Final = 5084 +ERROR_CLUSTER_GUM_NOT_LOCKER: Final = 5085 +ERROR_QUORUM_DISK_NOT_FOUND: Final = 5086 +ERROR_DATABASE_BACKUP_CORRUPT: Final = 5087 +ERROR_CLUSTER_NODE_ALREADY_HAS_DFS_ROOT: Final = 5088 +ERROR_RESOURCE_PROPERTY_UNCHANGEABLE: Final = 5089 +ERROR_NO_ADMIN_ACCESS_POINT: Final = 5090 +ERROR_CLUSTER_MEMBERSHIP_INVALID_STATE: Final = 5890 +ERROR_CLUSTER_QUORUMLOG_NOT_FOUND: Final = 5891 +ERROR_CLUSTER_MEMBERSHIP_HALT: Final = 5892 +ERROR_CLUSTER_INSTANCE_ID_MISMATCH: Final = 5893 +ERROR_CLUSTER_NETWORK_NOT_FOUND_FOR_IP: Final = 5894 +ERROR_CLUSTER_PROPERTY_DATA_TYPE_MISMATCH: Final = 5895 +ERROR_CLUSTER_EVICT_WITHOUT_CLEANUP: Final = 5896 +ERROR_CLUSTER_PARAMETER_MISMATCH: Final = 5897 +ERROR_NODE_CANNOT_BE_CLUSTERED: Final = 5898 +ERROR_CLUSTER_WRONG_OS_VERSION: Final = 5899 +ERROR_CLUSTER_CANT_CREATE_DUP_CLUSTER_NAME: Final = 5900 +ERROR_CLUSCFG_ALREADY_COMMITTED: Final = 5901 +ERROR_CLUSCFG_ROLLBACK_FAILED: Final = 5902 +ERROR_CLUSCFG_SYSTEM_DISK_DRIVE_LETTER_CONFLICT: Final = 5903 +ERROR_CLUSTER_OLD_VERSION: Final = 5904 +ERROR_CLUSTER_MISMATCHED_COMPUTER_ACCT_NAME: Final = 5905 +ERROR_CLUSTER_NO_NET_ADAPTERS: Final = 5906 +ERROR_CLUSTER_POISONED: Final = 5907 +ERROR_CLUSTER_GROUP_MOVING: Final = 5908 +ERROR_CLUSTER_RESOURCE_TYPE_BUSY: Final = 5909 +ERROR_RESOURCE_CALL_TIMED_OUT: Final = 5910 +ERROR_INVALID_CLUSTER_IPV6_ADDRESS: Final = 5911 +ERROR_CLUSTER_INTERNAL_INVALID_FUNCTION: Final = 5912 +ERROR_CLUSTER_PARAMETER_OUT_OF_BOUNDS: Final = 5913 +ERROR_CLUSTER_PARTIAL_SEND: Final = 5914 +ERROR_CLUSTER_REGISTRY_INVALID_FUNCTION: Final = 5915 +ERROR_CLUSTER_INVALID_STRING_TERMINATION: Final = 5916 +ERROR_CLUSTER_INVALID_STRING_FORMAT: Final = 5917 +ERROR_CLUSTER_DATABASE_TRANSACTION_IN_PROGRESS: Final = 5918 +ERROR_CLUSTER_DATABASE_TRANSACTION_NOT_IN_PROGRESS: Final = 5919 +ERROR_CLUSTER_NULL_DATA: Final = 5920 +ERROR_CLUSTER_PARTIAL_READ: Final = 5921 +ERROR_CLUSTER_PARTIAL_WRITE: Final = 5922 +ERROR_CLUSTER_CANT_DESERIALIZE_DATA: Final = 5923 +ERROR_DEPENDENT_RESOURCE_PROPERTY_CONFLICT: Final = 5924 +ERROR_CLUSTER_NO_QUORUM: Final = 5925 +ERROR_CLUSTER_INVALID_IPV6_NETWORK: Final = 5926 +ERROR_CLUSTER_INVALID_IPV6_TUNNEL_NETWORK: Final = 5927 +ERROR_QUORUM_NOT_ALLOWED_IN_THIS_GROUP: Final = 5928 +ERROR_DEPENDENCY_TREE_TOO_COMPLEX: Final = 5929 +ERROR_EXCEPTION_IN_RESOURCE_CALL: Final = 5930 +ERROR_CLUSTER_RHS_FAILED_INITIALIZATION: Final = 5931 +ERROR_CLUSTER_NOT_INSTALLED: Final = 5932 +ERROR_CLUSTER_RESOURCES_MUST_BE_ONLINE_ON_THE_SAME_NODE: Final = 5933 +ERROR_CLUSTER_MAX_NODES_IN_CLUSTER: Final = 5934 +ERROR_CLUSTER_TOO_MANY_NODES: Final = 5935 +ERROR_CLUSTER_OBJECT_ALREADY_USED: Final = 5936 +ERROR_NONCORE_GROUPS_FOUND: Final = 5937 +ERROR_FILE_SHARE_RESOURCE_CONFLICT: Final = 5938 +ERROR_CLUSTER_EVICT_INVALID_REQUEST: Final = 5939 +ERROR_CLUSTER_SINGLETON_RESOURCE: Final = 5940 +ERROR_CLUSTER_GROUP_SINGLETON_RESOURCE: Final = 5941 +ERROR_CLUSTER_RESOURCE_PROVIDER_FAILED: Final = 5942 +ERROR_CLUSTER_RESOURCE_CONFIGURATION_ERROR: Final = 5943 +ERROR_CLUSTER_GROUP_BUSY: Final = 5944 +ERROR_CLUSTER_NOT_SHARED_VOLUME: Final = 5945 +ERROR_CLUSTER_INVALID_SECURITY_DESCRIPTOR: Final = 5946 +ERROR_CLUSTER_SHARED_VOLUMES_IN_USE: Final = 5947 +ERROR_CLUSTER_USE_SHARED_VOLUMES_API: Final = 5948 +ERROR_CLUSTER_BACKUP_IN_PROGRESS: Final = 5949 +ERROR_NON_CSV_PATH: Final = 5950 +ERROR_CSV_VOLUME_NOT_LOCAL: Final = 5951 +ERROR_CLUSTER_WATCHDOG_TERMINATING: Final = 5952 +ERROR_CLUSTER_RESOURCE_VETOED_MOVE_INCOMPATIBLE_NODES: Final = 5953 +ERROR_CLUSTER_INVALID_NODE_WEIGHT: Final = 5954 +ERROR_CLUSTER_RESOURCE_VETOED_CALL: Final = 5955 +ERROR_RESMON_SYSTEM_RESOURCES_LACKING: Final = 5956 +ERROR_CLUSTER_RESOURCE_VETOED_MOVE_NOT_ENOUGH_RESOURCES_ON_DESTINATION: Final = 5957 +ERROR_CLUSTER_RESOURCE_VETOED_MOVE_NOT_ENOUGH_RESOURCES_ON_SOURCE: Final = 5958 +ERROR_CLUSTER_GROUP_QUEUED: Final = 5959 +ERROR_CLUSTER_RESOURCE_LOCKED_STATUS: Final = 5960 +ERROR_CLUSTER_SHARED_VOLUME_FAILOVER_NOT_ALLOWED: Final = 5961 +ERROR_CLUSTER_NODE_DRAIN_IN_PROGRESS: Final = 5962 +ERROR_CLUSTER_DISK_NOT_CONNECTED: Final = 5963 +ERROR_DISK_NOT_CSV_CAPABLE: Final = 5964 +ERROR_RESOURCE_NOT_IN_AVAILABLE_STORAGE: Final = 5965 +ERROR_CLUSTER_SHARED_VOLUME_REDIRECTED: Final = 5966 +ERROR_CLUSTER_SHARED_VOLUME_NOT_REDIRECTED: Final = 5967 +ERROR_CLUSTER_CANNOT_RETURN_PROPERTIES: Final = 5968 +ERROR_CLUSTER_RESOURCE_CONTAINS_UNSUPPORTED_DIFF_AREA_FOR_SHARED_VOLUMES: Final = 5969 +ERROR_CLUSTER_RESOURCE_IS_IN_MAINTENANCE_MODE: Final = 5970 +ERROR_CLUSTER_AFFINITY_CONFLICT: Final = 5971 +ERROR_CLUSTER_RESOURCE_IS_REPLICA_VIRTUAL_MACHINE: Final = 5972 +ERROR_CLUSTER_UPGRADE_INCOMPATIBLE_VERSIONS: Final = 5973 +ERROR_CLUSTER_UPGRADE_FIX_QUORUM_NOT_SUPPORTED: Final = 5974 +ERROR_CLUSTER_UPGRADE_RESTART_REQUIRED: Final = 5975 +ERROR_CLUSTER_UPGRADE_IN_PROGRESS: Final = 5976 +ERROR_CLUSTER_UPGRADE_INCOMPLETE: Final = 5977 +ERROR_CLUSTER_NODE_IN_GRACE_PERIOD: Final = 5978 +ERROR_CLUSTER_CSV_IO_PAUSE_TIMEOUT: Final = 5979 +ERROR_NODE_NOT_ACTIVE_CLUSTER_MEMBER: Final = 5980 +ERROR_CLUSTER_RESOURCE_NOT_MONITORED: Final = 5981 +ERROR_CLUSTER_RESOURCE_DOES_NOT_SUPPORT_UNMONITORED: Final = 5982 +ERROR_CLUSTER_RESOURCE_IS_REPLICATED: Final = 5983 +ERROR_CLUSTER_NODE_ISOLATED: Final = 5984 +ERROR_CLUSTER_NODE_QUARANTINED: Final = 5985 +ERROR_CLUSTER_DATABASE_UPDATE_CONDITION_FAILED: Final = 5986 +ERROR_CLUSTER_SPACE_DEGRADED: Final = 5987 +ERROR_CLUSTER_TOKEN_DELEGATION_NOT_SUPPORTED: Final = 5988 +ERROR_CLUSTER_CSV_INVALID_HANDLE: Final = 5989 +ERROR_CLUSTER_CSV_SUPPORTED_ONLY_ON_COORDINATOR: Final = 5990 +ERROR_GROUPSET_NOT_AVAILABLE: Final = 5991 +ERROR_GROUPSET_NOT_FOUND: Final = 5992 +ERROR_GROUPSET_CANT_PROVIDE: Final = 5993 +ERROR_CLUSTER_FAULT_DOMAIN_PARENT_NOT_FOUND: Final = 5994 +ERROR_CLUSTER_FAULT_DOMAIN_INVALID_HIERARCHY: Final = 5995 +ERROR_CLUSTER_FAULT_DOMAIN_FAILED_S2D_VALIDATION: Final = 5996 +ERROR_CLUSTER_FAULT_DOMAIN_S2D_CONNECTIVITY_LOSS: Final = 5997 +ERROR_CLUSTER_INVALID_INFRASTRUCTURE_FILESERVER_NAME: Final = 5998 +ERROR_CLUSTERSET_MANAGEMENT_CLUSTER_UNREACHABLE: Final = 5999 +ERROR_ENCRYPTION_FAILED: Final = 6000 +ERROR_DECRYPTION_FAILED: Final = 6001 +ERROR_FILE_ENCRYPTED: Final = 6002 +ERROR_NO_RECOVERY_POLICY: Final = 6003 +ERROR_NO_EFS: Final = 6004 +ERROR_WRONG_EFS: Final = 6005 +ERROR_NO_USER_KEYS: Final = 6006 +ERROR_FILE_NOT_ENCRYPTED: Final = 6007 +ERROR_NOT_EXPORT_FORMAT: Final = 6008 +ERROR_FILE_READ_ONLY: Final = 6009 +ERROR_DIR_EFS_DISALLOWED: Final = 6010 +ERROR_EFS_SERVER_NOT_TRUSTED: Final = 6011 +ERROR_BAD_RECOVERY_POLICY: Final = 6012 +ERROR_EFS_ALG_BLOB_TOO_BIG: Final = 6013 +ERROR_VOLUME_NOT_SUPPORT_EFS: Final = 6014 +ERROR_EFS_DISABLED: Final = 6015 +ERROR_EFS_VERSION_NOT_SUPPORT: Final = 6016 +ERROR_CS_ENCRYPTION_INVALID_SERVER_RESPONSE: Final = 6017 +ERROR_CS_ENCRYPTION_UNSUPPORTED_SERVER: Final = 6018 +ERROR_CS_ENCRYPTION_EXISTING_ENCRYPTED_FILE: Final = 6019 +ERROR_CS_ENCRYPTION_NEW_ENCRYPTED_FILE: Final = 6020 +ERROR_CS_ENCRYPTION_FILE_NOT_CSE: Final = 6021 +ERROR_ENCRYPTION_POLICY_DENIES_OPERATION: Final = 6022 +ERROR_WIP_ENCRYPTION_FAILED: Final = 6023 +ERROR_NO_BROWSER_SERVERS_FOUND: Final = 6118 +SCHED_E_SERVICE_NOT_LOCALSYSTEM: Final = 6200 +ERROR_CLUSTER_OBJECT_IS_CLUSTER_SET_VM: Final = 6250 +ERROR_LOG_SECTOR_INVALID: Final = 6600 +ERROR_LOG_SECTOR_PARITY_INVALID: Final = 6601 +ERROR_LOG_SECTOR_REMAPPED: Final = 6602 +ERROR_LOG_BLOCK_INCOMPLETE: Final = 6603 +ERROR_LOG_INVALID_RANGE: Final = 6604 +ERROR_LOG_BLOCKS_EXHAUSTED: Final = 6605 +ERROR_LOG_READ_CONTEXT_INVALID: Final = 6606 +ERROR_LOG_RESTART_INVALID: Final = 6607 +ERROR_LOG_BLOCK_VERSION: Final = 6608 +ERROR_LOG_BLOCK_INVALID: Final = 6609 +ERROR_LOG_READ_MODE_INVALID: Final = 6610 +ERROR_LOG_NO_RESTART: Final = 6611 +ERROR_LOG_METADATA_CORRUPT: Final = 6612 +ERROR_LOG_METADATA_INVALID: Final = 6613 +ERROR_LOG_METADATA_INCONSISTENT: Final = 6614 +ERROR_LOG_RESERVATION_INVALID: Final = 6615 +ERROR_LOG_CANT_DELETE: Final = 6616 +ERROR_LOG_CONTAINER_LIMIT_EXCEEDED: Final = 6617 +ERROR_LOG_START_OF_LOG: Final = 6618 +ERROR_LOG_POLICY_ALREADY_INSTALLED: Final = 6619 +ERROR_LOG_POLICY_NOT_INSTALLED: Final = 6620 +ERROR_LOG_POLICY_INVALID: Final = 6621 +ERROR_LOG_POLICY_CONFLICT: Final = 6622 +ERROR_LOG_PINNED_ARCHIVE_TAIL: Final = 6623 +ERROR_LOG_RECORD_NONEXISTENT: Final = 6624 +ERROR_LOG_RECORDS_RESERVED_INVALID: Final = 6625 +ERROR_LOG_SPACE_RESERVED_INVALID: Final = 6626 +ERROR_LOG_TAIL_INVALID: Final = 6627 +ERROR_LOG_FULL: Final = 6628 +ERROR_COULD_NOT_RESIZE_LOG: Final = 6629 +ERROR_LOG_MULTIPLEXED: Final = 6630 +ERROR_LOG_DEDICATED: Final = 6631 +ERROR_LOG_ARCHIVE_NOT_IN_PROGRESS: Final = 6632 +ERROR_LOG_ARCHIVE_IN_PROGRESS: Final = 6633 +ERROR_LOG_EPHEMERAL: Final = 6634 +ERROR_LOG_NOT_ENOUGH_CONTAINERS: Final = 6635 +ERROR_LOG_CLIENT_ALREADY_REGISTERED: Final = 6636 +ERROR_LOG_CLIENT_NOT_REGISTERED: Final = 6637 +ERROR_LOG_FULL_HANDLER_IN_PROGRESS: Final = 6638 +ERROR_LOG_CONTAINER_READ_FAILED: Final = 6639 +ERROR_LOG_CONTAINER_WRITE_FAILED: Final = 6640 +ERROR_LOG_CONTAINER_OPEN_FAILED: Final = 6641 +ERROR_LOG_CONTAINER_STATE_INVALID: Final = 6642 +ERROR_LOG_STATE_INVALID: Final = 6643 +ERROR_LOG_PINNED: Final = 6644 +ERROR_LOG_METADATA_FLUSH_FAILED: Final = 6645 +ERROR_LOG_INCONSISTENT_SECURITY: Final = 6646 +ERROR_LOG_APPENDED_FLUSH_FAILED: Final = 6647 +ERROR_LOG_PINNED_RESERVATION: Final = 6648 +ERROR_INVALID_TRANSACTION: Final = 6700 +ERROR_TRANSACTION_NOT_ACTIVE: Final = 6701 +ERROR_TRANSACTION_REQUEST_NOT_VALID: Final = 6702 +ERROR_TRANSACTION_NOT_REQUESTED: Final = 6703 +ERROR_TRANSACTION_ALREADY_ABORTED: Final = 6704 +ERROR_TRANSACTION_ALREADY_COMMITTED: Final = 6705 +ERROR_TM_INITIALIZATION_FAILED: Final = 6706 +ERROR_RESOURCEMANAGER_READ_ONLY: Final = 6707 +ERROR_TRANSACTION_NOT_JOINED: Final = 6708 +ERROR_TRANSACTION_SUPERIOR_EXISTS: Final = 6709 +ERROR_CRM_PROTOCOL_ALREADY_EXISTS: Final = 6710 +ERROR_TRANSACTION_PROPAGATION_FAILED: Final = 6711 +ERROR_CRM_PROTOCOL_NOT_FOUND: Final = 6712 +ERROR_TRANSACTION_INVALID_MARSHALL_BUFFER: Final = 6713 +ERROR_CURRENT_TRANSACTION_NOT_VALID: Final = 6714 +ERROR_TRANSACTION_NOT_FOUND: Final = 6715 +ERROR_RESOURCEMANAGER_NOT_FOUND: Final = 6716 +ERROR_ENLISTMENT_NOT_FOUND: Final = 6717 +ERROR_TRANSACTIONMANAGER_NOT_FOUND: Final = 6718 +ERROR_TRANSACTIONMANAGER_NOT_ONLINE: Final = 6719 +ERROR_TRANSACTIONMANAGER_RECOVERY_NAME_COLLISION: Final = 6720 +ERROR_TRANSACTION_NOT_ROOT: Final = 6721 +ERROR_TRANSACTION_OBJECT_EXPIRED: Final = 6722 +ERROR_TRANSACTION_RESPONSE_NOT_ENLISTED: Final = 6723 +ERROR_TRANSACTION_RECORD_TOO_LONG: Final = 6724 +ERROR_IMPLICIT_TRANSACTION_NOT_SUPPORTED: Final = 6725 +ERROR_TRANSACTION_INTEGRITY_VIOLATED: Final = 6726 +ERROR_TRANSACTIONMANAGER_IDENTITY_MISMATCH: Final = 6727 +ERROR_RM_CANNOT_BE_FROZEN_FOR_SNAPSHOT: Final = 6728 +ERROR_TRANSACTION_MUST_WRITETHROUGH: Final = 6729 +ERROR_TRANSACTION_NO_SUPERIOR: Final = 6730 +ERROR_HEURISTIC_DAMAGE_POSSIBLE: Final = 6731 +ERROR_TRANSACTIONAL_CONFLICT: Final = 6800 +ERROR_RM_NOT_ACTIVE: Final = 6801 +ERROR_RM_METADATA_CORRUPT: Final = 6802 +ERROR_DIRECTORY_NOT_RM: Final = 6803 +ERROR_TRANSACTIONS_UNSUPPORTED_REMOTE: Final = 6805 +ERROR_LOG_RESIZE_INVALID_SIZE: Final = 6806 +ERROR_OBJECT_NO_LONGER_EXISTS: Final = 6807 +ERROR_STREAM_MINIVERSION_NOT_FOUND: Final = 6808 +ERROR_STREAM_MINIVERSION_NOT_VALID: Final = 6809 +ERROR_MINIVERSION_INACCESSIBLE_FROM_SPECIFIED_TRANSACTION: Final = 6810 +ERROR_CANT_OPEN_MINIVERSION_WITH_MODIFY_INTENT: Final = 6811 +ERROR_CANT_CREATE_MORE_STREAM_MINIVERSIONS: Final = 6812 +ERROR_REMOTE_FILE_VERSION_MISMATCH: Final = 6814 +ERROR_HANDLE_NO_LONGER_VALID: Final = 6815 +ERROR_NO_TXF_METADATA: Final = 6816 +ERROR_LOG_CORRUPTION_DETECTED: Final = 6817 +ERROR_CANT_RECOVER_WITH_HANDLE_OPEN: Final = 6818 +ERROR_RM_DISCONNECTED: Final = 6819 +ERROR_ENLISTMENT_NOT_SUPERIOR: Final = 6820 +ERROR_RECOVERY_NOT_NEEDED: Final = 6821 +ERROR_RM_ALREADY_STARTED: Final = 6822 +ERROR_FILE_IDENTITY_NOT_PERSISTENT: Final = 6823 +ERROR_CANT_BREAK_TRANSACTIONAL_DEPENDENCY: Final = 6824 +ERROR_CANT_CROSS_RM_BOUNDARY: Final = 6825 +ERROR_TXF_DIR_NOT_EMPTY: Final = 6826 +ERROR_INDOUBT_TRANSACTIONS_EXIST: Final = 6827 +ERROR_TM_VOLATILE: Final = 6828 +ERROR_ROLLBACK_TIMER_EXPIRED: Final = 6829 +ERROR_TXF_ATTRIBUTE_CORRUPT: Final = 6830 +ERROR_EFS_NOT_ALLOWED_IN_TRANSACTION: Final = 6831 +ERROR_TRANSACTIONAL_OPEN_NOT_ALLOWED: Final = 6832 +ERROR_LOG_GROWTH_FAILED: Final = 6833 +ERROR_TRANSACTED_MAPPING_UNSUPPORTED_REMOTE: Final = 6834 +ERROR_TXF_METADATA_ALREADY_PRESENT: Final = 6835 +ERROR_TRANSACTION_SCOPE_CALLBACKS_NOT_SET: Final = 6836 +ERROR_TRANSACTION_REQUIRED_PROMOTION: Final = 6837 +ERROR_CANNOT_EXECUTE_FILE_IN_TRANSACTION: Final = 6838 +ERROR_TRANSACTIONS_NOT_FROZEN: Final = 6839 +ERROR_TRANSACTION_FREEZE_IN_PROGRESS: Final = 6840 +ERROR_NOT_SNAPSHOT_VOLUME: Final = 6841 +ERROR_NO_SAVEPOINT_WITH_OPEN_FILES: Final = 6842 +ERROR_DATA_LOST_REPAIR: Final = 6843 +ERROR_SPARSE_NOT_ALLOWED_IN_TRANSACTION: Final = 6844 +ERROR_TM_IDENTITY_MISMATCH: Final = 6845 +ERROR_FLOATED_SECTION: Final = 6846 +ERROR_CANNOT_ACCEPT_TRANSACTED_WORK: Final = 6847 +ERROR_CANNOT_ABORT_TRANSACTIONS: Final = 6848 +ERROR_BAD_CLUSTERS: Final = 6849 +ERROR_COMPRESSION_NOT_ALLOWED_IN_TRANSACTION: Final = 6850 +ERROR_VOLUME_DIRTY: Final = 6851 +ERROR_NO_LINK_TRACKING_IN_TRANSACTION: Final = 6852 +ERROR_OPERATION_NOT_SUPPORTED_IN_TRANSACTION: Final = 6853 +ERROR_EXPIRED_HANDLE: Final = 6854 +ERROR_TRANSACTION_NOT_ENLISTED: Final = 6855 +ERROR_CTX_WINSTATION_NAME_INVALID: Final = 7001 +ERROR_CTX_INVALID_PD: Final = 7002 +ERROR_CTX_PD_NOT_FOUND: Final = 7003 +ERROR_CTX_WD_NOT_FOUND: Final = 7004 +ERROR_CTX_CANNOT_MAKE_EVENTLOG_ENTRY: Final = 7005 +ERROR_CTX_SERVICE_NAME_COLLISION: Final = 7006 +ERROR_CTX_CLOSE_PENDING: Final = 7007 +ERROR_CTX_NO_OUTBUF: Final = 7008 +ERROR_CTX_MODEM_INF_NOT_FOUND: Final = 7009 +ERROR_CTX_INVALID_MODEMNAME: Final = 7010 +ERROR_CTX_MODEM_RESPONSE_ERROR: Final = 7011 +ERROR_CTX_MODEM_RESPONSE_TIMEOUT: Final = 7012 +ERROR_CTX_MODEM_RESPONSE_NO_CARRIER: Final = 7013 +ERROR_CTX_MODEM_RESPONSE_NO_DIALTONE: Final = 7014 +ERROR_CTX_MODEM_RESPONSE_BUSY: Final = 7015 +ERROR_CTX_MODEM_RESPONSE_VOICE: Final = 7016 +ERROR_CTX_TD_ERROR: Final = 7017 +ERROR_CTX_WINSTATION_NOT_FOUND: Final = 7022 +ERROR_CTX_WINSTATION_ALREADY_EXISTS: Final = 7023 +ERROR_CTX_WINSTATION_BUSY: Final = 7024 +ERROR_CTX_BAD_VIDEO_MODE: Final = 7025 +ERROR_CTX_GRAPHICS_INVALID: Final = 7035 +ERROR_CTX_LOGON_DISABLED: Final = 7037 +ERROR_CTX_NOT_CONSOLE: Final = 7038 +ERROR_CTX_CLIENT_QUERY_TIMEOUT: Final = 7040 +ERROR_CTX_CONSOLE_DISCONNECT: Final = 7041 +ERROR_CTX_CONSOLE_CONNECT: Final = 7042 +ERROR_CTX_SHADOW_DENIED: Final = 7044 +ERROR_CTX_WINSTATION_ACCESS_DENIED: Final = 7045 +ERROR_CTX_INVALID_WD: Final = 7049 +ERROR_CTX_SHADOW_INVALID: Final = 7050 +ERROR_CTX_SHADOW_DISABLED: Final = 7051 +ERROR_CTX_CLIENT_LICENSE_IN_USE: Final = 7052 +ERROR_CTX_CLIENT_LICENSE_NOT_SET: Final = 7053 +ERROR_CTX_LICENSE_NOT_AVAILABLE: Final = 7054 +ERROR_CTX_LICENSE_CLIENT_INVALID: Final = 7055 +ERROR_CTX_LICENSE_EXPIRED: Final = 7056 +ERROR_CTX_SHADOW_NOT_RUNNING: Final = 7057 +ERROR_CTX_SHADOW_ENDED_BY_MODE_CHANGE: Final = 7058 +ERROR_ACTIVATION_COUNT_EXCEEDED: Final = 7059 +ERROR_CTX_WINSTATIONS_DISABLED: Final = 7060 +ERROR_CTX_ENCRYPTION_LEVEL_REQUIRED: Final = 7061 +ERROR_CTX_SESSION_IN_USE: Final = 7062 +ERROR_CTX_NO_FORCE_LOGOFF: Final = 7063 +ERROR_CTX_ACCOUNT_RESTRICTION: Final = 7064 +ERROR_RDP_PROTOCOL_ERROR: Final = 7065 +ERROR_CTX_CDM_CONNECT: Final = 7066 +ERROR_CTX_CDM_DISCONNECT: Final = 7067 +ERROR_CTX_SECURITY_LAYER_ERROR: Final = 7068 +ERROR_TS_INCOMPATIBLE_SESSIONS: Final = 7069 +ERROR_TS_VIDEO_SUBSYSTEM_ERROR: Final = 7070 +FRS_ERR_INVALID_API_SEQUENCE: Final = 8001 +FRS_ERR_STARTING_SERVICE: Final = 8002 +FRS_ERR_STOPPING_SERVICE: Final = 8003 +FRS_ERR_INTERNAL_API: Final = 8004 +FRS_ERR_INTERNAL: Final = 8005 +FRS_ERR_SERVICE_COMM: Final = 8006 +FRS_ERR_INSUFFICIENT_PRIV: Final = 8007 +FRS_ERR_AUTHENTICATION: Final = 8008 +FRS_ERR_PARENT_INSUFFICIENT_PRIV: Final = 8009 +FRS_ERR_PARENT_AUTHENTICATION: Final = 8010 +FRS_ERR_CHILD_TO_PARENT_COMM: Final = 8011 +FRS_ERR_PARENT_TO_CHILD_COMM: Final = 8012 +FRS_ERR_SYSVOL_POPULATE: Final = 8013 +FRS_ERR_SYSVOL_POPULATE_TIMEOUT: Final = 8014 +FRS_ERR_SYSVOL_IS_BUSY: Final = 8015 +FRS_ERR_SYSVOL_DEMOTE: Final = 8016 +FRS_ERR_INVALID_SERVICE_PARAMETER: Final = 8017 +DS_S_SUCCESS: Final = NO_ERROR +ERROR_DS_NOT_INSTALLED: Final = 8200 +ERROR_DS_MEMBERSHIP_EVALUATED_LOCALLY: Final = 8201 +ERROR_DS_NO_ATTRIBUTE_OR_VALUE: Final = 8202 +ERROR_DS_INVALID_ATTRIBUTE_SYNTAX: Final = 8203 +ERROR_DS_ATTRIBUTE_TYPE_UNDEFINED: Final = 8204 +ERROR_DS_ATTRIBUTE_OR_VALUE_EXISTS: Final = 8205 +ERROR_DS_BUSY: Final = 8206 +ERROR_DS_UNAVAILABLE: Final = 8207 +ERROR_DS_NO_RIDS_ALLOCATED: Final = 8208 +ERROR_DS_NO_MORE_RIDS: Final = 8209 +ERROR_DS_INCORRECT_ROLE_OWNER: Final = 8210 +ERROR_DS_RIDMGR_INIT_ERROR: Final = 8211 +ERROR_DS_OBJ_CLASS_VIOLATION: Final = 8212 +ERROR_DS_CANT_ON_NON_LEAF: Final = 8213 +ERROR_DS_CANT_ON_RDN: Final = 8214 +ERROR_DS_CANT_MOD_OBJ_CLASS: Final = 8215 +ERROR_DS_CROSS_DOM_MOVE_ERROR: Final = 8216 +ERROR_DS_GC_NOT_AVAILABLE: Final = 8217 +ERROR_SHARED_POLICY: Final = 8218 +ERROR_POLICY_OBJECT_NOT_FOUND: Final = 8219 +ERROR_POLICY_ONLY_IN_DS: Final = 8220 +ERROR_PROMOTION_ACTIVE: Final = 8221 +ERROR_NO_PROMOTION_ACTIVE: Final = 8222 +ERROR_DS_OPERATIONS_ERROR: Final = 8224 +ERROR_DS_PROTOCOL_ERROR: Final = 8225 +ERROR_DS_TIMELIMIT_EXCEEDED: Final = 8226 +ERROR_DS_SIZELIMIT_EXCEEDED: Final = 8227 +ERROR_DS_ADMIN_LIMIT_EXCEEDED: Final = 8228 +ERROR_DS_COMPARE_FALSE: Final = 8229 +ERROR_DS_COMPARE_TRUE: Final = 8230 +ERROR_DS_AUTH_METHOD_NOT_SUPPORTED: Final = 8231 +ERROR_DS_STRONG_AUTH_REQUIRED: Final = 8232 +ERROR_DS_INAPPROPRIATE_AUTH: Final = 8233 +ERROR_DS_AUTH_UNKNOWN: Final = 8234 +ERROR_DS_REFERRAL: Final = 8235 +ERROR_DS_UNAVAILABLE_CRIT_EXTENSION: Final = 8236 +ERROR_DS_CONFIDENTIALITY_REQUIRED: Final = 8237 +ERROR_DS_INAPPROPRIATE_MATCHING: Final = 8238 +ERROR_DS_CONSTRAINT_VIOLATION: Final = 8239 +ERROR_DS_NO_SUCH_OBJECT: Final = 8240 +ERROR_DS_ALIAS_PROBLEM: Final = 8241 +ERROR_DS_INVALID_DN_SYNTAX: Final = 8242 +ERROR_DS_IS_LEAF: Final = 8243 +ERROR_DS_ALIAS_DEREF_PROBLEM: Final = 8244 +ERROR_DS_UNWILLING_TO_PERFORM: Final = 8245 +ERROR_DS_LOOP_DETECT: Final = 8246 +ERROR_DS_NAMING_VIOLATION: Final = 8247 +ERROR_DS_OBJECT_RESULTS_TOO_LARGE: Final = 8248 +ERROR_DS_AFFECTS_MULTIPLE_DSAS: Final = 8249 +ERROR_DS_SERVER_DOWN: Final = 8250 +ERROR_DS_LOCAL_ERROR: Final = 8251 +ERROR_DS_ENCODING_ERROR: Final = 8252 +ERROR_DS_DECODING_ERROR: Final = 8253 +ERROR_DS_FILTER_UNKNOWN: Final = 8254 +ERROR_DS_PARAM_ERROR: Final = 8255 +ERROR_DS_NOT_SUPPORTED: Final = 8256 +ERROR_DS_NO_RESULTS_RETURNED: Final = 8257 +ERROR_DS_CONTROL_NOT_FOUND: Final = 8258 +ERROR_DS_CLIENT_LOOP: Final = 8259 +ERROR_DS_REFERRAL_LIMIT_EXCEEDED: Final = 8260 +ERROR_DS_SORT_CONTROL_MISSING: Final = 8261 +ERROR_DS_OFFSET_RANGE_ERROR: Final = 8262 +ERROR_DS_RIDMGR_DISABLED: Final = 8263 +ERROR_DS_ROOT_MUST_BE_NC: Final = 8301 +ERROR_DS_ADD_REPLICA_INHIBITED: Final = 8302 +ERROR_DS_ATT_NOT_DEF_IN_SCHEMA: Final = 8303 +ERROR_DS_MAX_OBJ_SIZE_EXCEEDED: Final = 8304 +ERROR_DS_OBJ_STRING_NAME_EXISTS: Final = 8305 +ERROR_DS_NO_RDN_DEFINED_IN_SCHEMA: Final = 8306 +ERROR_DS_RDN_DOESNT_MATCH_SCHEMA: Final = 8307 +ERROR_DS_NO_REQUESTED_ATTS_FOUND: Final = 8308 +ERROR_DS_USER_BUFFER_TO_SMALL: Final = 8309 +ERROR_DS_ATT_IS_NOT_ON_OBJ: Final = 8310 +ERROR_DS_ILLEGAL_MOD_OPERATION: Final = 8311 +ERROR_DS_OBJ_TOO_LARGE: Final = 8312 +ERROR_DS_BAD_INSTANCE_TYPE: Final = 8313 +ERROR_DS_MASTERDSA_REQUIRED: Final = 8314 +ERROR_DS_OBJECT_CLASS_REQUIRED: Final = 8315 +ERROR_DS_MISSING_REQUIRED_ATT: Final = 8316 +ERROR_DS_ATT_NOT_DEF_FOR_CLASS: Final = 8317 +ERROR_DS_ATT_ALREADY_EXISTS: Final = 8318 +ERROR_DS_CANT_ADD_ATT_VALUES: Final = 8320 +ERROR_DS_SINGLE_VALUE_CONSTRAINT: Final = 8321 +ERROR_DS_RANGE_CONSTRAINT: Final = 8322 +ERROR_DS_ATT_VAL_ALREADY_EXISTS: Final = 8323 +ERROR_DS_CANT_REM_MISSING_ATT: Final = 8324 +ERROR_DS_CANT_REM_MISSING_ATT_VAL: Final = 8325 +ERROR_DS_ROOT_CANT_BE_SUBREF: Final = 8326 +ERROR_DS_NO_CHAINING: Final = 8327 +ERROR_DS_NO_CHAINED_EVAL: Final = 8328 +ERROR_DS_NO_PARENT_OBJECT: Final = 8329 +ERROR_DS_PARENT_IS_AN_ALIAS: Final = 8330 +ERROR_DS_CANT_MIX_MASTER_AND_REPS: Final = 8331 +ERROR_DS_CHILDREN_EXIST: Final = 8332 +ERROR_DS_OBJ_NOT_FOUND: Final = 8333 +ERROR_DS_ALIASED_OBJ_MISSING: Final = 8334 +ERROR_DS_BAD_NAME_SYNTAX: Final = 8335 +ERROR_DS_ALIAS_POINTS_TO_ALIAS: Final = 8336 +ERROR_DS_CANT_DEREF_ALIAS: Final = 8337 +ERROR_DS_OUT_OF_SCOPE: Final = 8338 +ERROR_DS_OBJECT_BEING_REMOVED: Final = 8339 +ERROR_DS_CANT_DELETE_DSA_OBJ: Final = 8340 +ERROR_DS_GENERIC_ERROR: Final = 8341 +ERROR_DS_DSA_MUST_BE_INT_MASTER: Final = 8342 +ERROR_DS_CLASS_NOT_DSA: Final = 8343 +ERROR_DS_INSUFF_ACCESS_RIGHTS: Final = 8344 +ERROR_DS_ILLEGAL_SUPERIOR: Final = 8345 +ERROR_DS_ATTRIBUTE_OWNED_BY_SAM: Final = 8346 +ERROR_DS_NAME_TOO_MANY_PARTS: Final = 8347 +ERROR_DS_NAME_TOO_LONG: Final = 8348 +ERROR_DS_NAME_VALUE_TOO_LONG: Final = 8349 +ERROR_DS_NAME_UNPARSEABLE: Final = 8350 +ERROR_DS_NAME_TYPE_UNKNOWN: Final = 8351 +ERROR_DS_NOT_AN_OBJECT: Final = 8352 +ERROR_DS_SEC_DESC_TOO_SHORT: Final = 8353 +ERROR_DS_SEC_DESC_INVALID: Final = 8354 +ERROR_DS_NO_DELETED_NAME: Final = 8355 +ERROR_DS_SUBREF_MUST_HAVE_PARENT: Final = 8356 +ERROR_DS_NCNAME_MUST_BE_NC: Final = 8357 +ERROR_DS_CANT_ADD_SYSTEM_ONLY: Final = 8358 +ERROR_DS_CLASS_MUST_BE_CONCRETE: Final = 8359 +ERROR_DS_INVALID_DMD: Final = 8360 +ERROR_DS_OBJ_GUID_EXISTS: Final = 8361 +ERROR_DS_NOT_ON_BACKLINK: Final = 8362 +ERROR_DS_NO_CROSSREF_FOR_NC: Final = 8363 +ERROR_DS_SHUTTING_DOWN: Final = 8364 +ERROR_DS_UNKNOWN_OPERATION: Final = 8365 +ERROR_DS_INVALID_ROLE_OWNER: Final = 8366 +ERROR_DS_COULDNT_CONTACT_FSMO: Final = 8367 +ERROR_DS_CROSS_NC_DN_RENAME: Final = 8368 +ERROR_DS_CANT_MOD_SYSTEM_ONLY: Final = 8369 +ERROR_DS_REPLICATOR_ONLY: Final = 8370 +ERROR_DS_OBJ_CLASS_NOT_DEFINED: Final = 8371 +ERROR_DS_OBJ_CLASS_NOT_SUBCLASS: Final = 8372 +ERROR_DS_NAME_REFERENCE_INVALID: Final = 8373 +ERROR_DS_CROSS_REF_EXISTS: Final = 8374 +ERROR_DS_CANT_DEL_MASTER_CROSSREF: Final = 8375 +ERROR_DS_SUBTREE_NOTIFY_NOT_NC_HEAD: Final = 8376 +ERROR_DS_NOTIFY_FILTER_TOO_COMPLEX: Final = 8377 +ERROR_DS_DUP_RDN: Final = 8378 +ERROR_DS_DUP_OID: Final = 8379 +ERROR_DS_DUP_MAPI_ID: Final = 8380 +ERROR_DS_DUP_SCHEMA_ID_GUID: Final = 8381 +ERROR_DS_DUP_LDAP_DISPLAY_NAME: Final = 8382 +ERROR_DS_SEMANTIC_ATT_TEST: Final = 8383 +ERROR_DS_SYNTAX_MISMATCH: Final = 8384 +ERROR_DS_EXISTS_IN_MUST_HAVE: Final = 8385 +ERROR_DS_EXISTS_IN_MAY_HAVE: Final = 8386 +ERROR_DS_NONEXISTENT_MAY_HAVE: Final = 8387 +ERROR_DS_NONEXISTENT_MUST_HAVE: Final = 8388 +ERROR_DS_AUX_CLS_TEST_FAIL: Final = 8389 +ERROR_DS_NONEXISTENT_POSS_SUP: Final = 8390 +ERROR_DS_SUB_CLS_TEST_FAIL: Final = 8391 +ERROR_DS_BAD_RDN_ATT_ID_SYNTAX: Final = 8392 +ERROR_DS_EXISTS_IN_AUX_CLS: Final = 8393 +ERROR_DS_EXISTS_IN_SUB_CLS: Final = 8394 +ERROR_DS_EXISTS_IN_POSS_SUP: Final = 8395 +ERROR_DS_RECALCSCHEMA_FAILED: Final = 8396 +ERROR_DS_TREE_DELETE_NOT_FINISHED: Final = 8397 +ERROR_DS_CANT_DELETE: Final = 8398 +ERROR_DS_ATT_SCHEMA_REQ_ID: Final = 8399 +ERROR_DS_BAD_ATT_SCHEMA_SYNTAX: Final = 8400 +ERROR_DS_CANT_CACHE_ATT: Final = 8401 +ERROR_DS_CANT_CACHE_CLASS: Final = 8402 +ERROR_DS_CANT_REMOVE_ATT_CACHE: Final = 8403 +ERROR_DS_CANT_REMOVE_CLASS_CACHE: Final = 8404 +ERROR_DS_CANT_RETRIEVE_DN: Final = 8405 +ERROR_DS_MISSING_SUPREF: Final = 8406 +ERROR_DS_CANT_RETRIEVE_INSTANCE: Final = 8407 +ERROR_DS_CODE_INCONSISTENCY: Final = 8408 +ERROR_DS_DATABASE_ERROR: Final = 8409 +ERROR_DS_GOVERNSID_MISSING: Final = 8410 +ERROR_DS_MISSING_EXPECTED_ATT: Final = 8411 +ERROR_DS_NCNAME_MISSING_CR_REF: Final = 8412 +ERROR_DS_SECURITY_CHECKING_ERROR: Final = 8413 +ERROR_DS_SCHEMA_NOT_LOADED: Final = 8414 +ERROR_DS_SCHEMA_ALLOC_FAILED: Final = 8415 +ERROR_DS_ATT_SCHEMA_REQ_SYNTAX: Final = 8416 +ERROR_DS_GCVERIFY_ERROR: Final = 8417 +ERROR_DS_DRA_SCHEMA_MISMATCH: Final = 8418 +ERROR_DS_CANT_FIND_DSA_OBJ: Final = 8419 +ERROR_DS_CANT_FIND_EXPECTED_NC: Final = 8420 +ERROR_DS_CANT_FIND_NC_IN_CACHE: Final = 8421 +ERROR_DS_CANT_RETRIEVE_CHILD: Final = 8422 +ERROR_DS_SECURITY_ILLEGAL_MODIFY: Final = 8423 +ERROR_DS_CANT_REPLACE_HIDDEN_REC: Final = 8424 +ERROR_DS_BAD_HIERARCHY_FILE: Final = 8425 +ERROR_DS_BUILD_HIERARCHY_TABLE_FAILED: Final = 8426 +ERROR_DS_CONFIG_PARAM_MISSING: Final = 8427 +ERROR_DS_COUNTING_AB_INDICES_FAILED: Final = 8428 +ERROR_DS_HIERARCHY_TABLE_MALLOC_FAILED: Final = 8429 +ERROR_DS_INTERNAL_FAILURE: Final = 8430 +ERROR_DS_UNKNOWN_ERROR: Final = 8431 +ERROR_DS_ROOT_REQUIRES_CLASS_TOP: Final = 8432 +ERROR_DS_REFUSING_FSMO_ROLES: Final = 8433 +ERROR_DS_MISSING_FSMO_SETTINGS: Final = 8434 +ERROR_DS_UNABLE_TO_SURRENDER_ROLES: Final = 8435 +ERROR_DS_DRA_GENERIC: Final = 8436 +ERROR_DS_DRA_INVALID_PARAMETER: Final = 8437 +ERROR_DS_DRA_BUSY: Final = 8438 +ERROR_DS_DRA_BAD_DN: Final = 8439 +ERROR_DS_DRA_BAD_NC: Final = 8440 +ERROR_DS_DRA_DN_EXISTS: Final = 8441 +ERROR_DS_DRA_INTERNAL_ERROR: Final = 8442 +ERROR_DS_DRA_INCONSISTENT_DIT: Final = 8443 +ERROR_DS_DRA_CONNECTION_FAILED: Final = 8444 +ERROR_DS_DRA_BAD_INSTANCE_TYPE: Final = 8445 +ERROR_DS_DRA_OUT_OF_MEM: Final = 8446 +ERROR_DS_DRA_MAIL_PROBLEM: Final = 8447 +ERROR_DS_DRA_REF_ALREADY_EXISTS: Final = 8448 +ERROR_DS_DRA_REF_NOT_FOUND: Final = 8449 +ERROR_DS_DRA_OBJ_IS_REP_SOURCE: Final = 8450 +ERROR_DS_DRA_DB_ERROR: Final = 8451 +ERROR_DS_DRA_NO_REPLICA: Final = 8452 +ERROR_DS_DRA_ACCESS_DENIED: Final = 8453 +ERROR_DS_DRA_NOT_SUPPORTED: Final = 8454 +ERROR_DS_DRA_RPC_CANCELLED: Final = 8455 +ERROR_DS_DRA_SOURCE_DISABLED: Final = 8456 +ERROR_DS_DRA_SINK_DISABLED: Final = 8457 +ERROR_DS_DRA_NAME_COLLISION: Final = 8458 +ERROR_DS_DRA_SOURCE_REINSTALLED: Final = 8459 +ERROR_DS_DRA_MISSING_PARENT: Final = 8460 +ERROR_DS_DRA_PREEMPTED: Final = 8461 +ERROR_DS_DRA_ABANDON_SYNC: Final = 8462 +ERROR_DS_DRA_SHUTDOWN: Final = 8463 +ERROR_DS_DRA_INCOMPATIBLE_PARTIAL_SET: Final = 8464 +ERROR_DS_DRA_SOURCE_IS_PARTIAL_REPLICA: Final = 8465 +ERROR_DS_DRA_EXTN_CONNECTION_FAILED: Final = 8466 +ERROR_DS_INSTALL_SCHEMA_MISMATCH: Final = 8467 +ERROR_DS_DUP_LINK_ID: Final = 8468 +ERROR_DS_NAME_ERROR_RESOLVING: Final = 8469 +ERROR_DS_NAME_ERROR_NOT_FOUND: Final = 8470 +ERROR_DS_NAME_ERROR_NOT_UNIQUE: Final = 8471 +ERROR_DS_NAME_ERROR_NO_MAPPING: Final = 8472 +ERROR_DS_NAME_ERROR_DOMAIN_ONLY: Final = 8473 +ERROR_DS_NAME_ERROR_NO_SYNTACTICAL_MAPPING: Final = 8474 +ERROR_DS_CONSTRUCTED_ATT_MOD: Final = 8475 +ERROR_DS_WRONG_OM_OBJ_CLASS: Final = 8476 +ERROR_DS_DRA_REPL_PENDING: Final = 8477 +ERROR_DS_DS_REQUIRED: Final = 8478 +ERROR_DS_INVALID_LDAP_DISPLAY_NAME: Final = 8479 +ERROR_DS_NON_BASE_SEARCH: Final = 8480 +ERROR_DS_CANT_RETRIEVE_ATTS: Final = 8481 +ERROR_DS_BACKLINK_WITHOUT_LINK: Final = 8482 +ERROR_DS_EPOCH_MISMATCH: Final = 8483 +ERROR_DS_SRC_NAME_MISMATCH: Final = 8484 +ERROR_DS_SRC_AND_DST_NC_IDENTICAL: Final = 8485 +ERROR_DS_DST_NC_MISMATCH: Final = 8486 +ERROR_DS_NOT_AUTHORITIVE_FOR_DST_NC: Final = 8487 +ERROR_DS_SRC_GUID_MISMATCH: Final = 8488 +ERROR_DS_CANT_MOVE_DELETED_OBJECT: Final = 8489 +ERROR_DS_PDC_OPERATION_IN_PROGRESS: Final = 8490 +ERROR_DS_CROSS_DOMAIN_CLEANUP_REQD: Final = 8491 +ERROR_DS_ILLEGAL_XDOM_MOVE_OPERATION: Final = 8492 +ERROR_DS_CANT_WITH_ACCT_GROUP_MEMBERSHPS: Final = 8493 +ERROR_DS_NC_MUST_HAVE_NC_PARENT: Final = 8494 +ERROR_DS_CR_IMPOSSIBLE_TO_VALIDATE: Final = 8495 +ERROR_DS_DST_DOMAIN_NOT_NATIVE: Final = 8496 +ERROR_DS_MISSING_INFRASTRUCTURE_CONTAINER: Final = 8497 +ERROR_DS_CANT_MOVE_ACCOUNT_GROUP: Final = 8498 +ERROR_DS_CANT_MOVE_RESOURCE_GROUP: Final = 8499 +ERROR_DS_INVALID_SEARCH_FLAG: Final = 8500 +ERROR_DS_NO_TREE_DELETE_ABOVE_NC: Final = 8501 +ERROR_DS_COULDNT_LOCK_TREE_FOR_DELETE: Final = 8502 +ERROR_DS_COULDNT_IDENTIFY_OBJECTS_FOR_TREE_DELETE: Final = 8503 +ERROR_DS_SAM_INIT_FAILURE: Final = 8504 +ERROR_DS_SENSITIVE_GROUP_VIOLATION: Final = 8505 +ERROR_DS_CANT_MOD_PRIMARYGROUPID: Final = 8506 +ERROR_DS_ILLEGAL_BASE_SCHEMA_MOD: Final = 8507 +ERROR_DS_NONSAFE_SCHEMA_CHANGE: Final = 8508 +ERROR_DS_SCHEMA_UPDATE_DISALLOWED: Final = 8509 +ERROR_DS_CANT_CREATE_UNDER_SCHEMA: Final = 8510 +ERROR_DS_INSTALL_NO_SRC_SCH_VERSION: Final = 8511 +ERROR_DS_INSTALL_NO_SCH_VERSION_IN_INIFILE: Final = 8512 +ERROR_DS_INVALID_GROUP_TYPE: Final = 8513 +ERROR_DS_NO_NEST_GLOBALGROUP_IN_MIXEDDOMAIN: Final = 8514 +ERROR_DS_NO_NEST_LOCALGROUP_IN_MIXEDDOMAIN: Final = 8515 +ERROR_DS_GLOBAL_CANT_HAVE_LOCAL_MEMBER: Final = 8516 +ERROR_DS_GLOBAL_CANT_HAVE_UNIVERSAL_MEMBER: Final = 8517 +ERROR_DS_UNIVERSAL_CANT_HAVE_LOCAL_MEMBER: Final = 8518 +ERROR_DS_GLOBAL_CANT_HAVE_CROSSDOMAIN_MEMBER: Final = 8519 +ERROR_DS_LOCAL_CANT_HAVE_CROSSDOMAIN_LOCAL_MEMBER: Final = 8520 +ERROR_DS_HAVE_PRIMARY_MEMBERS: Final = 8521 +ERROR_DS_STRING_SD_CONVERSION_FAILED: Final = 8522 +ERROR_DS_NAMING_MASTER_GC: Final = 8523 +ERROR_DS_DNS_LOOKUP_FAILURE: Final = 8524 +ERROR_DS_COULDNT_UPDATE_SPNS: Final = 8525 +ERROR_DS_CANT_RETRIEVE_SD: Final = 8526 +ERROR_DS_KEY_NOT_UNIQUE: Final = 8527 +ERROR_DS_WRONG_LINKED_ATT_SYNTAX: Final = 8528 +ERROR_DS_SAM_NEED_BOOTKEY_PASSWORD: Final = 8529 +ERROR_DS_SAM_NEED_BOOTKEY_FLOPPY: Final = 8530 +ERROR_DS_CANT_START: Final = 8531 +ERROR_DS_INIT_FAILURE: Final = 8532 +ERROR_DS_NO_PKT_PRIVACY_ON_CONNECTION: Final = 8533 +ERROR_DS_SOURCE_DOMAIN_IN_FOREST: Final = 8534 +ERROR_DS_DESTINATION_DOMAIN_NOT_IN_FOREST: Final = 8535 +ERROR_DS_DESTINATION_AUDITING_NOT_ENABLED: Final = 8536 +ERROR_DS_CANT_FIND_DC_FOR_SRC_DOMAIN: Final = 8537 +ERROR_DS_SRC_OBJ_NOT_GROUP_OR_USER: Final = 8538 +ERROR_DS_SRC_SID_EXISTS_IN_FOREST: Final = 8539 +ERROR_DS_SRC_AND_DST_OBJECT_CLASS_MISMATCH: Final = 8540 +ERROR_SAM_INIT_FAILURE: Final = 8541 +ERROR_DS_DRA_SCHEMA_INFO_SHIP: Final = 8542 +ERROR_DS_DRA_SCHEMA_CONFLICT: Final = 8543 +ERROR_DS_DRA_EARLIER_SCHEMA_CONFLICT: Final = 8544 +ERROR_DS_DRA_OBJ_NC_MISMATCH: Final = 8545 +ERROR_DS_NC_STILL_HAS_DSAS: Final = 8546 +ERROR_DS_GC_REQUIRED: Final = 8547 +ERROR_DS_LOCAL_MEMBER_OF_LOCAL_ONLY: Final = 8548 +ERROR_DS_NO_FPO_IN_UNIVERSAL_GROUPS: Final = 8549 +ERROR_DS_CANT_ADD_TO_GC: Final = 8550 +ERROR_DS_NO_CHECKPOINT_WITH_PDC: Final = 8551 +ERROR_DS_SOURCE_AUDITING_NOT_ENABLED: Final = 8552 +ERROR_DS_CANT_CREATE_IN_NONDOMAIN_NC: Final = 8553 +ERROR_DS_INVALID_NAME_FOR_SPN: Final = 8554 +ERROR_DS_FILTER_USES_CONTRUCTED_ATTRS: Final = 8555 +ERROR_DS_UNICODEPWD_NOT_IN_QUOTES: Final = 8556 +ERROR_DS_MACHINE_ACCOUNT_QUOTA_EXCEEDED: Final = 8557 +ERROR_DS_MUST_BE_RUN_ON_DST_DC: Final = 8558 +ERROR_DS_SRC_DC_MUST_BE_SP4_OR_GREATER: Final = 8559 +ERROR_DS_CANT_TREE_DELETE_CRITICAL_OBJ: Final = 8560 +ERROR_DS_INIT_FAILURE_CONSOLE: Final = 8561 +ERROR_DS_SAM_INIT_FAILURE_CONSOLE: Final = 8562 +ERROR_DS_FOREST_VERSION_TOO_HIGH: Final = 8563 +ERROR_DS_DOMAIN_VERSION_TOO_HIGH: Final = 8564 +ERROR_DS_FOREST_VERSION_TOO_LOW: Final = 8565 +ERROR_DS_DOMAIN_VERSION_TOO_LOW: Final = 8566 +ERROR_DS_INCOMPATIBLE_VERSION: Final = 8567 +ERROR_DS_LOW_DSA_VERSION: Final = 8568 +ERROR_DS_NO_BEHAVIOR_VERSION_IN_MIXEDDOMAIN: Final = 8569 +ERROR_DS_NOT_SUPPORTED_SORT_ORDER: Final = 8570 +ERROR_DS_NAME_NOT_UNIQUE: Final = 8571 +ERROR_DS_MACHINE_ACCOUNT_CREATED_PRENT4: Final = 8572 +ERROR_DS_OUT_OF_VERSION_STORE: Final = 8573 +ERROR_DS_INCOMPATIBLE_CONTROLS_USED: Final = 8574 +ERROR_DS_NO_REF_DOMAIN: Final = 8575 +ERROR_DS_RESERVED_LINK_ID: Final = 8576 +ERROR_DS_LINK_ID_NOT_AVAILABLE: Final = 8577 +ERROR_DS_AG_CANT_HAVE_UNIVERSAL_MEMBER: Final = 8578 +ERROR_DS_MODIFYDN_DISALLOWED_BY_INSTANCE_TYPE: Final = 8579 +ERROR_DS_NO_OBJECT_MOVE_IN_SCHEMA_NC: Final = 8580 +ERROR_DS_MODIFYDN_DISALLOWED_BY_FLAG: Final = 8581 +ERROR_DS_MODIFYDN_WRONG_GRANDPARENT: Final = 8582 +ERROR_DS_NAME_ERROR_TRUST_REFERRAL: Final = 8583 +ERROR_NOT_SUPPORTED_ON_STANDARD_SERVER: Final = 8584 +ERROR_DS_CANT_ACCESS_REMOTE_PART_OF_AD: Final = 8585 +ERROR_DS_CR_IMPOSSIBLE_TO_VALIDATE_V2: Final = 8586 +ERROR_DS_THREAD_LIMIT_EXCEEDED: Final = 8587 +ERROR_DS_NOT_CLOSEST: Final = 8588 +ERROR_DS_CANT_DERIVE_SPN_WITHOUT_SERVER_REF: Final = 8589 +ERROR_DS_SINGLE_USER_MODE_FAILED: Final = 8590 +ERROR_DS_NTDSCRIPT_SYNTAX_ERROR: Final = 8591 +ERROR_DS_NTDSCRIPT_PROCESS_ERROR: Final = 8592 +ERROR_DS_DIFFERENT_REPL_EPOCHS: Final = 8593 +ERROR_DS_DRS_EXTENSIONS_CHANGED: Final = 8594 +ERROR_DS_REPLICA_SET_CHANGE_NOT_ALLOWED_ON_DISABLED_CR: Final = 8595 +ERROR_DS_NO_MSDS_INTID: Final = 8596 +ERROR_DS_DUP_MSDS_INTID: Final = 8597 +ERROR_DS_EXISTS_IN_RDNATTID: Final = 8598 +ERROR_DS_AUTHORIZATION_FAILED: Final = 8599 +ERROR_DS_INVALID_SCRIPT: Final = 8600 +ERROR_DS_REMOTE_CROSSREF_OP_FAILED: Final = 8601 +ERROR_DS_CROSS_REF_BUSY: Final = 8602 +ERROR_DS_CANT_DERIVE_SPN_FOR_DELETED_DOMAIN: Final = 8603 +ERROR_DS_CANT_DEMOTE_WITH_WRITEABLE_NC: Final = 8604 +ERROR_DS_DUPLICATE_ID_FOUND: Final = 8605 +ERROR_DS_INSUFFICIENT_ATTR_TO_CREATE_OBJECT: Final = 8606 +ERROR_DS_GROUP_CONVERSION_ERROR: Final = 8607 +ERROR_DS_CANT_MOVE_APP_BASIC_GROUP: Final = 8608 +ERROR_DS_CANT_MOVE_APP_QUERY_GROUP: Final = 8609 +ERROR_DS_ROLE_NOT_VERIFIED: Final = 8610 +ERROR_DS_WKO_CONTAINER_CANNOT_BE_SPECIAL: Final = 8611 +ERROR_DS_DOMAIN_RENAME_IN_PROGRESS: Final = 8612 +ERROR_DS_EXISTING_AD_CHILD_NC: Final = 8613 +ERROR_DS_REPL_LIFETIME_EXCEEDED: Final = 8614 +ERROR_DS_DISALLOWED_IN_SYSTEM_CONTAINER: Final = 8615 +ERROR_DS_LDAP_SEND_QUEUE_FULL: Final = 8616 +ERROR_DS_DRA_OUT_SCHEDULE_WINDOW: Final = 8617 +ERROR_DS_POLICY_NOT_KNOWN: Final = 8618 +ERROR_NO_SITE_SETTINGS_OBJECT: Final = 8619 +ERROR_NO_SECRETS: Final = 8620 +ERROR_NO_WRITABLE_DC_FOUND: Final = 8621 +ERROR_DS_NO_SERVER_OBJECT: Final = 8622 +ERROR_DS_NO_NTDSA_OBJECT: Final = 8623 +ERROR_DS_NON_ASQ_SEARCH: Final = 8624 +ERROR_DS_AUDIT_FAILURE: Final = 8625 +ERROR_DS_INVALID_SEARCH_FLAG_SUBTREE: Final = 8626 +ERROR_DS_INVALID_SEARCH_FLAG_TUPLE: Final = 8627 +ERROR_DS_HIERARCHY_TABLE_TOO_DEEP: Final = 8628 +ERROR_DS_DRA_CORRUPT_UTD_VECTOR: Final = 8629 +ERROR_DS_DRA_SECRETS_DENIED: Final = 8630 +ERROR_DS_RESERVED_MAPI_ID: Final = 8631 +ERROR_DS_MAPI_ID_NOT_AVAILABLE: Final = 8632 +ERROR_DS_DRA_MISSING_KRBTGT_SECRET: Final = 8633 +ERROR_DS_DOMAIN_NAME_EXISTS_IN_FOREST: Final = 8634 +ERROR_DS_FLAT_NAME_EXISTS_IN_FOREST: Final = 8635 +ERROR_INVALID_USER_PRINCIPAL_NAME: Final = 8636 +ERROR_DS_OID_MAPPED_GROUP_CANT_HAVE_MEMBERS: Final = 8637 +ERROR_DS_OID_NOT_FOUND: Final = 8638 +ERROR_DS_DRA_RECYCLED_TARGET: Final = 8639 +ERROR_DS_DISALLOWED_NC_REDIRECT: Final = 8640 +ERROR_DS_HIGH_ADLDS_FFL: Final = 8641 +ERROR_DS_HIGH_DSA_VERSION: Final = 8642 +ERROR_DS_LOW_ADLDS_FFL: Final = 8643 +ERROR_DOMAIN_SID_SAME_AS_LOCAL_WORKSTATION: Final = 8644 +ERROR_DS_UNDELETE_SAM_VALIDATION_FAILED: Final = 8645 +ERROR_INCORRECT_ACCOUNT_TYPE: Final = 8646 +ERROR_DS_SPN_VALUE_NOT_UNIQUE_IN_FOREST: Final = 8647 +ERROR_DS_UPN_VALUE_NOT_UNIQUE_IN_FOREST: Final = 8648 +ERROR_DS_MISSING_FOREST_TRUST: Final = 8649 +ERROR_DS_VALUE_KEY_NOT_UNIQUE: Final = 8650 +ERROR_WEAK_WHFBKEY_BLOCKED: Final = 8651 +ERROR_DS_PER_ATTRIBUTE_AUTHZ_FAILED_DURING_ADD: Final = 8652 +ERROR_LOCAL_POLICY_MODIFICATION_NOT_SUPPORTED: Final = 8653 +ERROR_POLICY_CONTROLLED_ACCOUNT: Final = 8654 +ERROR_LAPS_LEGACY_SCHEMA_MISSING: Final = 8655 +ERROR_LAPS_SCHEMA_MISSING: Final = 8656 +ERROR_LAPS_ENCRYPTION_REQUIRES_2016_DFL: Final = 8657 +DNS_ERROR_RESPONSE_CODES_BASE: Final = 9000 +DNS_ERROR_RCODE_NO_ERROR: Final = NO_ERROR +DNS_ERROR_MASK: Final = 0x00002328 +DNS_ERROR_RCODE_FORMAT_ERROR: Final = 9001 +DNS_ERROR_RCODE_SERVER_FAILURE: Final = 9002 +DNS_ERROR_RCODE_NAME_ERROR: Final = 9003 +DNS_ERROR_RCODE_NOT_IMPLEMENTED: Final = 9004 +DNS_ERROR_RCODE_REFUSED: Final = 9005 +DNS_ERROR_RCODE_YXDOMAIN: Final = 9006 +DNS_ERROR_RCODE_YXRRSET: Final = 9007 +DNS_ERROR_RCODE_NXRRSET: Final = 9008 +DNS_ERROR_RCODE_NOTAUTH: Final = 9009 +DNS_ERROR_RCODE_NOTZONE: Final = 9010 +DNS_ERROR_RCODE_BADSIG: Final = 9016 +DNS_ERROR_RCODE_BADKEY: Final = 9017 +DNS_ERROR_RCODE_BADTIME: Final = 9018 +DNS_ERROR_RCODE_LAST: Final = DNS_ERROR_RCODE_BADTIME +DNS_ERROR_DNSSEC_BASE: Final = 9100 +DNS_ERROR_KEYMASTER_REQUIRED: Final = 9101 +DNS_ERROR_NOT_ALLOWED_ON_SIGNED_ZONE: Final = 9102 +DNS_ERROR_NSEC3_INCOMPATIBLE_WITH_RSA_SHA1: Final = 9103 +DNS_ERROR_NOT_ENOUGH_SIGNING_KEY_DESCRIPTORS: Final = 9104 +DNS_ERROR_UNSUPPORTED_ALGORITHM: Final = 9105 +DNS_ERROR_INVALID_KEY_SIZE: Final = 9106 +DNS_ERROR_SIGNING_KEY_NOT_ACCESSIBLE: Final = 9107 +DNS_ERROR_KSP_DOES_NOT_SUPPORT_PROTECTION: Final = 9108 +DNS_ERROR_UNEXPECTED_DATA_PROTECTION_ERROR: Final = 9109 +DNS_ERROR_UNEXPECTED_CNG_ERROR: Final = 9110 +DNS_ERROR_UNKNOWN_SIGNING_PARAMETER_VERSION: Final = 9111 +DNS_ERROR_KSP_NOT_ACCESSIBLE: Final = 9112 +DNS_ERROR_TOO_MANY_SKDS: Final = 9113 +DNS_ERROR_INVALID_ROLLOVER_PERIOD: Final = 9114 +DNS_ERROR_INVALID_INITIAL_ROLLOVER_OFFSET: Final = 9115 +DNS_ERROR_ROLLOVER_IN_PROGRESS: Final = 9116 +DNS_ERROR_STANDBY_KEY_NOT_PRESENT: Final = 9117 +DNS_ERROR_NOT_ALLOWED_ON_ZSK: Final = 9118 +DNS_ERROR_NOT_ALLOWED_ON_ACTIVE_SKD: Final = 9119 +DNS_ERROR_ROLLOVER_ALREADY_QUEUED: Final = 9120 +DNS_ERROR_NOT_ALLOWED_ON_UNSIGNED_ZONE: Final = 9121 +DNS_ERROR_BAD_KEYMASTER: Final = 9122 +DNS_ERROR_INVALID_SIGNATURE_VALIDITY_PERIOD: Final = 9123 +DNS_ERROR_INVALID_NSEC3_ITERATION_COUNT: Final = 9124 +DNS_ERROR_DNSSEC_IS_DISABLED: Final = 9125 +DNS_ERROR_INVALID_XML: Final = 9126 +DNS_ERROR_NO_VALID_TRUST_ANCHORS: Final = 9127 +DNS_ERROR_ROLLOVER_NOT_POKEABLE: Final = 9128 +DNS_ERROR_NSEC3_NAME_COLLISION: Final = 9129 +DNS_ERROR_NSEC_INCOMPATIBLE_WITH_NSEC3_RSA_SHA1: Final = 9130 +DNS_ERROR_PACKET_FMT_BASE: Final = 9500 +DNS_INFO_NO_RECORDS: Final = 9501 +DNS_ERROR_BAD_PACKET: Final = 9502 +DNS_ERROR_NO_PACKET: Final = 9503 +DNS_ERROR_RCODE: Final = 9504 +DNS_ERROR_UNSECURE_PACKET: Final = 9505 +DNS_STATUS_PACKET_UNSECURE: Final = DNS_ERROR_UNSECURE_PACKET +DNS_REQUEST_PENDING: Final = 9506 +DNS_ERROR_NO_MEMORY: Final = ERROR_OUTOFMEMORY +DNS_ERROR_INVALID_NAME: Final = ERROR_INVALID_NAME +DNS_ERROR_INVALID_DATA: Final = ERROR_INVALID_DATA +DNS_ERROR_GENERAL_API_BASE: Final = 9550 +DNS_ERROR_INVALID_TYPE: Final = 9551 +DNS_ERROR_INVALID_IP_ADDRESS: Final = 9552 +DNS_ERROR_INVALID_PROPERTY: Final = 9553 +DNS_ERROR_TRY_AGAIN_LATER: Final = 9554 +DNS_ERROR_NOT_UNIQUE: Final = 9555 +DNS_ERROR_NON_RFC_NAME: Final = 9556 +DNS_STATUS_FQDN: Final = 9557 +DNS_STATUS_DOTTED_NAME: Final = 9558 +DNS_STATUS_SINGLE_PART_NAME: Final = 9559 +DNS_ERROR_INVALID_NAME_CHAR: Final = 9560 +DNS_ERROR_NUMERIC_NAME: Final = 9561 +DNS_ERROR_NOT_ALLOWED_ON_ROOT_SERVER: Final = 9562 +DNS_ERROR_NOT_ALLOWED_UNDER_DELEGATION: Final = 9563 +DNS_ERROR_CANNOT_FIND_ROOT_HINTS: Final = 9564 +DNS_ERROR_INCONSISTENT_ROOT_HINTS: Final = 9565 +DNS_ERROR_DWORD_VALUE_TOO_SMALL: Final = 9566 +DNS_ERROR_DWORD_VALUE_TOO_LARGE: Final = 9567 +DNS_ERROR_BACKGROUND_LOADING: Final = 9568 +DNS_ERROR_NOT_ALLOWED_ON_RODC: Final = 9569 +DNS_ERROR_NOT_ALLOWED_UNDER_DNAME: Final = 9570 +DNS_ERROR_DELEGATION_REQUIRED: Final = 9571 +DNS_ERROR_INVALID_POLICY_TABLE: Final = 9572 +DNS_ERROR_ADDRESS_REQUIRED: Final = 9573 +DNS_ERROR_ZONE_BASE: Final = 9600 +DNS_ERROR_ZONE_DOES_NOT_EXIST: Final = 9601 +DNS_ERROR_NO_ZONE_INFO: Final = 9602 +DNS_ERROR_INVALID_ZONE_OPERATION: Final = 9603 +DNS_ERROR_ZONE_CONFIGURATION_ERROR: Final = 9604 +DNS_ERROR_ZONE_HAS_NO_SOA_RECORD: Final = 9605 +DNS_ERROR_ZONE_HAS_NO_NS_RECORDS: Final = 9606 +DNS_ERROR_ZONE_LOCKED: Final = 9607 +DNS_ERROR_ZONE_CREATION_FAILED: Final = 9608 +DNS_ERROR_ZONE_ALREADY_EXISTS: Final = 9609 +DNS_ERROR_AUTOZONE_ALREADY_EXISTS: Final = 9610 +DNS_ERROR_INVALID_ZONE_TYPE: Final = 9611 +DNS_ERROR_SECONDARY_REQUIRES_MASTER_IP: Final = 9612 +DNS_ERROR_ZONE_NOT_SECONDARY: Final = 9613 +DNS_ERROR_NEED_SECONDARY_ADDRESSES: Final = 9614 +DNS_ERROR_WINS_INIT_FAILED: Final = 9615 +DNS_ERROR_NEED_WINS_SERVERS: Final = 9616 +DNS_ERROR_NBSTAT_INIT_FAILED: Final = 9617 +DNS_ERROR_SOA_DELETE_INVALID: Final = 9618 +DNS_ERROR_FORWARDER_ALREADY_EXISTS: Final = 9619 +DNS_ERROR_ZONE_REQUIRES_MASTER_IP: Final = 9620 +DNS_ERROR_ZONE_IS_SHUTDOWN: Final = 9621 +DNS_ERROR_ZONE_LOCKED_FOR_SIGNING: Final = 9622 +DNS_ERROR_DATAFILE_BASE: Final = 9650 +DNS_ERROR_PRIMARY_REQUIRES_DATAFILE: Final = 9651 +DNS_ERROR_INVALID_DATAFILE_NAME: Final = 9652 +DNS_ERROR_DATAFILE_OPEN_FAILURE: Final = 9653 +DNS_ERROR_FILE_WRITEBACK_FAILED: Final = 9654 +DNS_ERROR_DATAFILE_PARSING: Final = 9655 +DNS_ERROR_DATABASE_BASE: Final = 9700 +DNS_ERROR_RECORD_DOES_NOT_EXIST: Final = 9701 +DNS_ERROR_RECORD_FORMAT: Final = 9702 +DNS_ERROR_NODE_CREATION_FAILED: Final = 9703 +DNS_ERROR_UNKNOWN_RECORD_TYPE: Final = 9704 +DNS_ERROR_RECORD_TIMED_OUT: Final = 9705 +DNS_ERROR_NAME_NOT_IN_ZONE: Final = 9706 +DNS_ERROR_CNAME_LOOP: Final = 9707 +DNS_ERROR_NODE_IS_CNAME: Final = 9708 +DNS_ERROR_CNAME_COLLISION: Final = 9709 +DNS_ERROR_RECORD_ONLY_AT_ZONE_ROOT: Final = 9710 +DNS_ERROR_RECORD_ALREADY_EXISTS: Final = 9711 +DNS_ERROR_SECONDARY_DATA: Final = 9712 +DNS_ERROR_NO_CREATE_CACHE_DATA: Final = 9713 +DNS_ERROR_NAME_DOES_NOT_EXIST: Final = 9714 +DNS_WARNING_PTR_CREATE_FAILED: Final = 9715 +DNS_WARNING_DOMAIN_UNDELETED: Final = 9716 +DNS_ERROR_DS_UNAVAILABLE: Final = 9717 +DNS_ERROR_DS_ZONE_ALREADY_EXISTS: Final = 9718 +DNS_ERROR_NO_BOOTFILE_IF_DS_ZONE: Final = 9719 +DNS_ERROR_NODE_IS_DNAME: Final = 9720 +DNS_ERROR_DNAME_COLLISION: Final = 9721 +DNS_ERROR_ALIAS_LOOP: Final = 9722 +DNS_ERROR_OPERATION_BASE: Final = 9750 +DNS_INFO_AXFR_COMPLETE: Final = 9751 +DNS_ERROR_AXFR: Final = 9752 +DNS_INFO_ADDED_LOCAL_WINS: Final = 9753 +DNS_ERROR_SECURE_BASE: Final = 9800 +DNS_STATUS_CONTINUE_NEEDED: Final = 9801 +DNS_ERROR_SETUP_BASE: Final = 9850 +DNS_ERROR_NO_TCPIP: Final = 9851 +DNS_ERROR_NO_DNS_SERVERS: Final = 9852 +DNS_ERROR_DP_BASE: Final = 9900 +DNS_ERROR_DP_DOES_NOT_EXIST: Final = 9901 +DNS_ERROR_DP_ALREADY_EXISTS: Final = 9902 +DNS_ERROR_DP_NOT_ENLISTED: Final = 9903 +DNS_ERROR_DP_ALREADY_ENLISTED: Final = 9904 +DNS_ERROR_DP_NOT_AVAILABLE: Final = 9905 +DNS_ERROR_DP_FSMO_ERROR: Final = 9906 +DNS_ERROR_RRL_NOT_ENABLED: Final = 9911 +DNS_ERROR_RRL_INVALID_WINDOW_SIZE: Final = 9912 +DNS_ERROR_RRL_INVALID_IPV4_PREFIX: Final = 9913 +DNS_ERROR_RRL_INVALID_IPV6_PREFIX: Final = 9914 +DNS_ERROR_RRL_INVALID_TC_RATE: Final = 9915 +DNS_ERROR_RRL_INVALID_LEAK_RATE: Final = 9916 +DNS_ERROR_RRL_LEAK_RATE_LESSTHAN_TC_RATE: Final = 9917 +DNS_ERROR_VIRTUALIZATION_INSTANCE_ALREADY_EXISTS: Final = 9921 +DNS_ERROR_VIRTUALIZATION_INSTANCE_DOES_NOT_EXIST: Final = 9922 +DNS_ERROR_VIRTUALIZATION_TREE_LOCKED: Final = 9923 +DNS_ERROR_INVAILD_VIRTUALIZATION_INSTANCE_NAME: Final = 9924 +DNS_ERROR_DEFAULT_VIRTUALIZATION_INSTANCE: Final = 9925 +DNS_ERROR_ZONESCOPE_ALREADY_EXISTS: Final = 9951 +DNS_ERROR_ZONESCOPE_DOES_NOT_EXIST: Final = 9952 +DNS_ERROR_DEFAULT_ZONESCOPE: Final = 9953 +DNS_ERROR_INVALID_ZONESCOPE_NAME: Final = 9954 +DNS_ERROR_NOT_ALLOWED_WITH_ZONESCOPES: Final = 9955 +DNS_ERROR_LOAD_ZONESCOPE_FAILED: Final = 9956 +DNS_ERROR_ZONESCOPE_FILE_WRITEBACK_FAILED: Final = 9957 +DNS_ERROR_INVALID_SCOPE_NAME: Final = 9958 +DNS_ERROR_SCOPE_DOES_NOT_EXIST: Final = 9959 +DNS_ERROR_DEFAULT_SCOPE: Final = 9960 +DNS_ERROR_INVALID_SCOPE_OPERATION: Final = 9961 +DNS_ERROR_SCOPE_LOCKED: Final = 9962 +DNS_ERROR_SCOPE_ALREADY_EXISTS: Final = 9963 +DNS_ERROR_POLICY_ALREADY_EXISTS: Final = 9971 +DNS_ERROR_POLICY_DOES_NOT_EXIST: Final = 9972 +DNS_ERROR_POLICY_INVALID_CRITERIA: Final = 9973 +DNS_ERROR_POLICY_INVALID_SETTINGS: Final = 9974 +DNS_ERROR_CLIENT_SUBNET_IS_ACCESSED: Final = 9975 +DNS_ERROR_CLIENT_SUBNET_DOES_NOT_EXIST: Final = 9976 +DNS_ERROR_CLIENT_SUBNET_ALREADY_EXISTS: Final = 9977 +DNS_ERROR_SUBNET_DOES_NOT_EXIST: Final = 9978 +DNS_ERROR_SUBNET_ALREADY_EXISTS: Final = 9979 +DNS_ERROR_POLICY_LOCKED: Final = 9980 +DNS_ERROR_POLICY_INVALID_WEIGHT: Final = 9981 +DNS_ERROR_POLICY_INVALID_NAME: Final = 9982 +DNS_ERROR_POLICY_MISSING_CRITERIA: Final = 9983 +DNS_ERROR_INVALID_CLIENT_SUBNET_NAME: Final = 9984 +DNS_ERROR_POLICY_PROCESSING_ORDER_INVALID: Final = 9985 +DNS_ERROR_POLICY_SCOPE_MISSING: Final = 9986 +DNS_ERROR_POLICY_SCOPE_NOT_ALLOWED: Final = 9987 +DNS_ERROR_SERVERSCOPE_IS_REFERENCED: Final = 9988 +DNS_ERROR_ZONESCOPE_IS_REFERENCED: Final = 9989 +DNS_ERROR_POLICY_INVALID_CRITERIA_CLIENT_SUBNET: Final = 9990 +DNS_ERROR_POLICY_INVALID_CRITERIA_TRANSPORT_PROTOCOL: Final = 9991 +DNS_ERROR_POLICY_INVALID_CRITERIA_NETWORK_PROTOCOL: Final = 9992 +DNS_ERROR_POLICY_INVALID_CRITERIA_INTERFACE: Final = 9993 +DNS_ERROR_POLICY_INVALID_CRITERIA_FQDN: Final = 9994 +DNS_ERROR_POLICY_INVALID_CRITERIA_QUERY_TYPE: Final = 9995 +DNS_ERROR_POLICY_INVALID_CRITERIA_TIME_OF_DAY: Final = 9996 +WSABASEERR: Final = 10000 +WSAEINTR: Final = 10004 +WSAEBADF: Final = 10009 +WSAEACCES: Final = 10013 +WSAEFAULT: Final = 10014 +WSAEINVAL: Final = 10022 +WSAEMFILE: Final = 10024 +WSAEWOULDBLOCK: Final = 10035 +WSAEINPROGRESS: Final = 10036 +WSAEALREADY: Final = 10037 +WSAENOTSOCK: Final = 10038 +WSAEDESTADDRREQ: Final = 10039 +WSAEMSGSIZE: Final = 10040 +WSAEPROTOTYPE: Final = 10041 +WSAENOPROTOOPT: Final = 10042 +WSAEPROTONOSUPPORT: Final = 10043 +WSAESOCKTNOSUPPORT: Final = 10044 +WSAEOPNOTSUPP: Final = 10045 +WSAEPFNOSUPPORT: Final = 10046 +WSAEAFNOSUPPORT: Final = 10047 +WSAEADDRINUSE: Final = 10048 +WSAEADDRNOTAVAIL: Final = 10049 +WSAENETDOWN: Final = 10050 +WSAENETUNREACH: Final = 10051 +WSAENETRESET: Final = 10052 +WSAECONNABORTED: Final = 10053 +WSAECONNRESET: Final = 10054 +WSAENOBUFS: Final = 10055 +WSAEISCONN: Final = 10056 +WSAENOTCONN: Final = 10057 +WSAESHUTDOWN: Final = 10058 +WSAETOOMANYREFS: Final = 10059 +WSAETIMEDOUT: Final = 10060 +WSAECONNREFUSED: Final = 10061 +WSAELOOP: Final = 10062 +WSAENAMETOOLONG: Final = 10063 +WSAEHOSTDOWN: Final = 10064 +WSAEHOSTUNREACH: Final = 10065 +WSAENOTEMPTY: Final = 10066 +WSAEPROCLIM: Final = 10067 +WSAEUSERS: Final = 10068 +WSAEDQUOT: Final = 10069 +WSAESTALE: Final = 10070 +WSAEREMOTE: Final = 10071 +WSASYSNOTREADY: Final = 10091 +WSAVERNOTSUPPORTED: Final = 10092 +WSANOTINITIALISED: Final = 10093 +WSAEDISCON: Final = 10101 +WSAENOMORE: Final = 10102 +WSAECANCELLED: Final = 10103 +WSAEINVALIDPROCTABLE: Final = 10104 +WSAEINVALIDPROVIDER: Final = 10105 +WSAEPROVIDERFAILEDINIT: Final = 10106 +WSASYSCALLFAILURE: Final = 10107 +WSASERVICE_NOT_FOUND: Final = 10108 +WSATYPE_NOT_FOUND: Final = 10109 +WSA_E_NO_MORE: Final = 10110 +WSA_E_CANCELLED: Final = 10111 +WSAEREFUSED: Final = 10112 +WSAHOST_NOT_FOUND: Final = 11001 +WSATRY_AGAIN: Final = 11002 +WSANO_RECOVERY: Final = 11003 +WSANO_DATA: Final = 11004 +WSA_QOS_RECEIVERS: Final = 11005 +WSA_QOS_SENDERS: Final = 11006 +WSA_QOS_NO_SENDERS: Final = 11007 +WSA_QOS_NO_RECEIVERS: Final = 11008 +WSA_QOS_REQUEST_CONFIRMED: Final = 11009 +WSA_QOS_ADMISSION_FAILURE: Final = 11010 +WSA_QOS_POLICY_FAILURE: Final = 11011 +WSA_QOS_BAD_STYLE: Final = 11012 +WSA_QOS_BAD_OBJECT: Final = 11013 +WSA_QOS_TRAFFIC_CTRL_ERROR: Final = 11014 +WSA_QOS_GENERIC_ERROR: Final = 11015 +WSA_QOS_ESERVICETYPE: Final = 11016 +WSA_QOS_EFLOWSPEC: Final = 11017 +WSA_QOS_EPROVSPECBUF: Final = 11018 +WSA_QOS_EFILTERSTYLE: Final = 11019 +WSA_QOS_EFILTERTYPE: Final = 11020 +WSA_QOS_EFILTERCOUNT: Final = 11021 +WSA_QOS_EOBJLENGTH: Final = 11022 +WSA_QOS_EFLOWCOUNT: Final = 11023 +WSA_QOS_EUNKOWNPSOBJ: Final = 11024 +WSA_QOS_EPOLICYOBJ: Final = 11025 +WSA_QOS_EFLOWDESC: Final = 11026 +WSA_QOS_EPSFLOWSPEC: Final = 11027 +WSA_QOS_EPSFILTERSPEC: Final = 11028 +WSA_QOS_ESDMODEOBJ: Final = 11029 +WSA_QOS_ESHAPERATEOBJ: Final = 11030 +WSA_QOS_RESERVED_PETYPE: Final = 11031 +WSA_SECURE_HOST_NOT_FOUND: Final = 11032 +WSA_IPSEC_NAME_POLICY_ERROR: Final = 11033 +ERROR_IPSEC_QM_POLICY_EXISTS: Final = 13000 +ERROR_IPSEC_QM_POLICY_NOT_FOUND: Final = 13001 +ERROR_IPSEC_QM_POLICY_IN_USE: Final = 13002 +ERROR_IPSEC_MM_POLICY_EXISTS: Final = 13003 +ERROR_IPSEC_MM_POLICY_NOT_FOUND: Final = 13004 +ERROR_IPSEC_MM_POLICY_IN_USE: Final = 13005 +ERROR_IPSEC_MM_FILTER_EXISTS: Final = 13006 +ERROR_IPSEC_MM_FILTER_NOT_FOUND: Final = 13007 +ERROR_IPSEC_TRANSPORT_FILTER_EXISTS: Final = 13008 +ERROR_IPSEC_TRANSPORT_FILTER_NOT_FOUND: Final = 13009 +ERROR_IPSEC_MM_AUTH_EXISTS: Final = 13010 +ERROR_IPSEC_MM_AUTH_NOT_FOUND: Final = 13011 +ERROR_IPSEC_MM_AUTH_IN_USE: Final = 13012 +ERROR_IPSEC_DEFAULT_MM_POLICY_NOT_FOUND: Final = 13013 +ERROR_IPSEC_DEFAULT_MM_AUTH_NOT_FOUND: Final = 13014 +ERROR_IPSEC_DEFAULT_QM_POLICY_NOT_FOUND: Final = 13015 +ERROR_IPSEC_TUNNEL_FILTER_EXISTS: Final = 13016 +ERROR_IPSEC_TUNNEL_FILTER_NOT_FOUND: Final = 13017 +ERROR_IPSEC_MM_FILTER_PENDING_DELETION: Final = 13018 +ERROR_IPSEC_TRANSPORT_FILTER_PENDING_DELETION: Final = 13019 +ERROR_IPSEC_TUNNEL_FILTER_PENDING_DELETION: Final = 13020 +ERROR_IPSEC_MM_POLICY_PENDING_DELETION: Final = 13021 +ERROR_IPSEC_MM_AUTH_PENDING_DELETION: Final = 13022 +ERROR_IPSEC_QM_POLICY_PENDING_DELETION: Final = 13023 +WARNING_IPSEC_MM_POLICY_PRUNED: Final = 13024 +WARNING_IPSEC_QM_POLICY_PRUNED: Final = 13025 +ERROR_IPSEC_IKE_NEG_STATUS_BEGIN: Final = 13800 +ERROR_IPSEC_IKE_AUTH_FAIL: Final = 13801 +ERROR_IPSEC_IKE_ATTRIB_FAIL: Final = 13802 +ERROR_IPSEC_IKE_NEGOTIATION_PENDING: Final = 13803 +ERROR_IPSEC_IKE_GENERAL_PROCESSING_ERROR: Final = 13804 +ERROR_IPSEC_IKE_TIMED_OUT: Final = 13805 +ERROR_IPSEC_IKE_NO_CERT: Final = 13806 +ERROR_IPSEC_IKE_SA_DELETED: Final = 13807 +ERROR_IPSEC_IKE_SA_REAPED: Final = 13808 +ERROR_IPSEC_IKE_MM_ACQUIRE_DROP: Final = 13809 +ERROR_IPSEC_IKE_QM_ACQUIRE_DROP: Final = 13810 +ERROR_IPSEC_IKE_QUEUE_DROP_MM: Final = 13811 +ERROR_IPSEC_IKE_QUEUE_DROP_NO_MM: Final = 13812 +ERROR_IPSEC_IKE_DROP_NO_RESPONSE: Final = 13813 +ERROR_IPSEC_IKE_MM_DELAY_DROP: Final = 13814 +ERROR_IPSEC_IKE_QM_DELAY_DROP: Final = 13815 +ERROR_IPSEC_IKE_ERROR: Final = 13816 +ERROR_IPSEC_IKE_CRL_FAILED: Final = 13817 +ERROR_IPSEC_IKE_INVALID_KEY_USAGE: Final = 13818 +ERROR_IPSEC_IKE_INVALID_CERT_TYPE: Final = 13819 +ERROR_IPSEC_IKE_NO_PRIVATE_KEY: Final = 13820 +ERROR_IPSEC_IKE_SIMULTANEOUS_REKEY: Final = 13821 +ERROR_IPSEC_IKE_DH_FAIL: Final = 13822 +ERROR_IPSEC_IKE_CRITICAL_PAYLOAD_NOT_RECOGNIZED: Final = 13823 +ERROR_IPSEC_IKE_INVALID_HEADER: Final = 13824 +ERROR_IPSEC_IKE_NO_POLICY: Final = 13825 +ERROR_IPSEC_IKE_INVALID_SIGNATURE: Final = 13826 +ERROR_IPSEC_IKE_KERBEROS_ERROR: Final = 13827 +ERROR_IPSEC_IKE_NO_PUBLIC_KEY: Final = 13828 +ERROR_IPSEC_IKE_PROCESS_ERR: Final = 13829 +ERROR_IPSEC_IKE_PROCESS_ERR_SA: Final = 13830 +ERROR_IPSEC_IKE_PROCESS_ERR_PROP: Final = 13831 +ERROR_IPSEC_IKE_PROCESS_ERR_TRANS: Final = 13832 +ERROR_IPSEC_IKE_PROCESS_ERR_KE: Final = 13833 +ERROR_IPSEC_IKE_PROCESS_ERR_ID: Final = 13834 +ERROR_IPSEC_IKE_PROCESS_ERR_CERT: Final = 13835 +ERROR_IPSEC_IKE_PROCESS_ERR_CERT_REQ: Final = 13836 +ERROR_IPSEC_IKE_PROCESS_ERR_HASH: Final = 13837 +ERROR_IPSEC_IKE_PROCESS_ERR_SIG: Final = 13838 +ERROR_IPSEC_IKE_PROCESS_ERR_NONCE: Final = 13839 +ERROR_IPSEC_IKE_PROCESS_ERR_NOTIFY: Final = 13840 +ERROR_IPSEC_IKE_PROCESS_ERR_DELETE: Final = 13841 +ERROR_IPSEC_IKE_PROCESS_ERR_VENDOR: Final = 13842 +ERROR_IPSEC_IKE_INVALID_PAYLOAD: Final = 13843 +ERROR_IPSEC_IKE_LOAD_SOFT_SA: Final = 13844 +ERROR_IPSEC_IKE_SOFT_SA_TORN_DOWN: Final = 13845 +ERROR_IPSEC_IKE_INVALID_COOKIE: Final = 13846 +ERROR_IPSEC_IKE_NO_PEER_CERT: Final = 13847 +ERROR_IPSEC_IKE_PEER_CRL_FAILED: Final = 13848 +ERROR_IPSEC_IKE_POLICY_CHANGE: Final = 13849 +ERROR_IPSEC_IKE_NO_MM_POLICY: Final = 13850 +ERROR_IPSEC_IKE_NOTCBPRIV: Final = 13851 +ERROR_IPSEC_IKE_SECLOADFAIL: Final = 13852 +ERROR_IPSEC_IKE_FAILSSPINIT: Final = 13853 +ERROR_IPSEC_IKE_FAILQUERYSSP: Final = 13854 +ERROR_IPSEC_IKE_SRVACQFAIL: Final = 13855 +ERROR_IPSEC_IKE_SRVQUERYCRED: Final = 13856 +ERROR_IPSEC_IKE_GETSPIFAIL: Final = 13857 +ERROR_IPSEC_IKE_INVALID_FILTER: Final = 13858 +ERROR_IPSEC_IKE_OUT_OF_MEMORY: Final = 13859 +ERROR_IPSEC_IKE_ADD_UPDATE_KEY_FAILED: Final = 13860 +ERROR_IPSEC_IKE_INVALID_POLICY: Final = 13861 +ERROR_IPSEC_IKE_UNKNOWN_DOI: Final = 13862 +ERROR_IPSEC_IKE_INVALID_SITUATION: Final = 13863 +ERROR_IPSEC_IKE_DH_FAILURE: Final = 13864 +ERROR_IPSEC_IKE_INVALID_GROUP: Final = 13865 +ERROR_IPSEC_IKE_ENCRYPT: Final = 13866 +ERROR_IPSEC_IKE_DECRYPT: Final = 13867 +ERROR_IPSEC_IKE_POLICY_MATCH: Final = 13868 +ERROR_IPSEC_IKE_UNSUPPORTED_ID: Final = 13869 +ERROR_IPSEC_IKE_INVALID_HASH: Final = 13870 +ERROR_IPSEC_IKE_INVALID_HASH_ALG: Final = 13871 +ERROR_IPSEC_IKE_INVALID_HASH_SIZE: Final = 13872 +ERROR_IPSEC_IKE_INVALID_ENCRYPT_ALG: Final = 13873 +ERROR_IPSEC_IKE_INVALID_AUTH_ALG: Final = 13874 +ERROR_IPSEC_IKE_INVALID_SIG: Final = 13875 +ERROR_IPSEC_IKE_LOAD_FAILED: Final = 13876 +ERROR_IPSEC_IKE_RPC_DELETE: Final = 13877 +ERROR_IPSEC_IKE_BENIGN_REINIT: Final = 13878 +ERROR_IPSEC_IKE_INVALID_RESPONDER_LIFETIME_NOTIFY: Final = 13879 +ERROR_IPSEC_IKE_INVALID_MAJOR_VERSION: Final = 13880 +ERROR_IPSEC_IKE_INVALID_CERT_KEYLEN: Final = 13881 +ERROR_IPSEC_IKE_MM_LIMIT: Final = 13882 +ERROR_IPSEC_IKE_NEGOTIATION_DISABLED: Final = 13883 +ERROR_IPSEC_IKE_QM_LIMIT: Final = 13884 +ERROR_IPSEC_IKE_MM_EXPIRED: Final = 13885 +ERROR_IPSEC_IKE_PEER_MM_ASSUMED_INVALID: Final = 13886 +ERROR_IPSEC_IKE_CERT_CHAIN_POLICY_MISMATCH: Final = 13887 +ERROR_IPSEC_IKE_UNEXPECTED_MESSAGE_ID: Final = 13888 +ERROR_IPSEC_IKE_INVALID_AUTH_PAYLOAD: Final = 13889 +ERROR_IPSEC_IKE_DOS_COOKIE_SENT: Final = 13890 +ERROR_IPSEC_IKE_SHUTTING_DOWN: Final = 13891 +ERROR_IPSEC_IKE_CGA_AUTH_FAILED: Final = 13892 +ERROR_IPSEC_IKE_PROCESS_ERR_NATOA: Final = 13893 +ERROR_IPSEC_IKE_INVALID_MM_FOR_QM: Final = 13894 +ERROR_IPSEC_IKE_QM_EXPIRED: Final = 13895 +ERROR_IPSEC_IKE_TOO_MANY_FILTERS: Final = 13896 +ERROR_IPSEC_IKE_NEG_STATUS_END: Final = 13897 +ERROR_IPSEC_IKE_KILL_DUMMY_NAP_TUNNEL: Final = 13898 +ERROR_IPSEC_IKE_INNER_IP_ASSIGNMENT_FAILURE: Final = 13899 +ERROR_IPSEC_IKE_REQUIRE_CP_PAYLOAD_MISSING: Final = 13900 +ERROR_IPSEC_KEY_MODULE_IMPERSONATION_NEGOTIATION_PENDING: Final = 13901 +ERROR_IPSEC_IKE_COEXISTENCE_SUPPRESS: Final = 13902 +ERROR_IPSEC_IKE_RATELIMIT_DROP: Final = 13903 +ERROR_IPSEC_IKE_PEER_DOESNT_SUPPORT_MOBIKE: Final = 13904 +ERROR_IPSEC_IKE_AUTHORIZATION_FAILURE: Final = 13905 +ERROR_IPSEC_IKE_STRONG_CRED_AUTHORIZATION_FAILURE: Final = 13906 +ERROR_IPSEC_IKE_AUTHORIZATION_FAILURE_WITH_OPTIONAL_RETRY: Final = 13907 +ERROR_IPSEC_IKE_STRONG_CRED_AUTHORIZATION_AND_CERTMAP_FAILURE: Final = 13908 +ERROR_IPSEC_IKE_NEG_STATUS_EXTENDED_END: Final = 13909 +ERROR_IPSEC_BAD_SPI: Final = 13910 +ERROR_IPSEC_SA_LIFETIME_EXPIRED: Final = 13911 +ERROR_IPSEC_WRONG_SA: Final = 13912 +ERROR_IPSEC_REPLAY_CHECK_FAILED: Final = 13913 +ERROR_IPSEC_INVALID_PACKET: Final = 13914 +ERROR_IPSEC_INTEGRITY_CHECK_FAILED: Final = 13915 +ERROR_IPSEC_CLEAR_TEXT_DROP: Final = 13916 +ERROR_IPSEC_AUTH_FIREWALL_DROP: Final = 13917 +ERROR_IPSEC_THROTTLE_DROP: Final = 13918 +ERROR_IPSEC_DOSP_BLOCK: Final = 13925 +ERROR_IPSEC_DOSP_RECEIVED_MULTICAST: Final = 13926 +ERROR_IPSEC_DOSP_INVALID_PACKET: Final = 13927 +ERROR_IPSEC_DOSP_STATE_LOOKUP_FAILED: Final = 13928 +ERROR_IPSEC_DOSP_MAX_ENTRIES: Final = 13929 +ERROR_IPSEC_DOSP_KEYMOD_NOT_ALLOWED: Final = 13930 +ERROR_IPSEC_DOSP_NOT_INSTALLED: Final = 13931 +ERROR_IPSEC_DOSP_MAX_PER_IP_RATELIMIT_QUEUES: Final = 13932 +ERROR_SXS_SECTION_NOT_FOUND: Final = 14000 +ERROR_SXS_CANT_GEN_ACTCTX: Final = 14001 +ERROR_SXS_INVALID_ACTCTXDATA_FORMAT: Final = 14002 +ERROR_SXS_ASSEMBLY_NOT_FOUND: Final = 14003 +ERROR_SXS_MANIFEST_FORMAT_ERROR: Final = 14004 +ERROR_SXS_MANIFEST_PARSE_ERROR: Final = 14005 +ERROR_SXS_ACTIVATION_CONTEXT_DISABLED: Final = 14006 +ERROR_SXS_KEY_NOT_FOUND: Final = 14007 +ERROR_SXS_VERSION_CONFLICT: Final = 14008 +ERROR_SXS_WRONG_SECTION_TYPE: Final = 14009 +ERROR_SXS_THREAD_QUERIES_DISABLED: Final = 14010 +ERROR_SXS_PROCESS_DEFAULT_ALREADY_SET: Final = 14011 +ERROR_SXS_UNKNOWN_ENCODING_GROUP: Final = 14012 +ERROR_SXS_UNKNOWN_ENCODING: Final = 14013 +ERROR_SXS_INVALID_XML_NAMESPACE_URI: Final = 14014 +ERROR_SXS_ROOT_MANIFEST_DEPENDENCY_NOT_INSTALLED: Final = 14015 +ERROR_SXS_LEAF_MANIFEST_DEPENDENCY_NOT_INSTALLED: Final = 14016 +ERROR_SXS_INVALID_ASSEMBLY_IDENTITY_ATTRIBUTE: Final = 14017 +ERROR_SXS_MANIFEST_MISSING_REQUIRED_DEFAULT_NAMESPACE: Final = 14018 +ERROR_SXS_MANIFEST_INVALID_REQUIRED_DEFAULT_NAMESPACE: Final = 14019 +ERROR_SXS_PRIVATE_MANIFEST_CROSS_PATH_WITH_REPARSE_POINT: Final = 14020 +ERROR_SXS_DUPLICATE_DLL_NAME: Final = 14021 +ERROR_SXS_DUPLICATE_WINDOWCLASS_NAME: Final = 14022 +ERROR_SXS_DUPLICATE_CLSID: Final = 14023 +ERROR_SXS_DUPLICATE_IID: Final = 14024 +ERROR_SXS_DUPLICATE_TLBID: Final = 14025 +ERROR_SXS_DUPLICATE_PROGID: Final = 14026 +ERROR_SXS_DUPLICATE_ASSEMBLY_NAME: Final = 14027 +ERROR_SXS_FILE_HASH_MISMATCH: Final = 14028 +ERROR_SXS_POLICY_PARSE_ERROR: Final = 14029 +ERROR_SXS_XML_E_MISSINGQUOTE: Final = 14030 +ERROR_SXS_XML_E_COMMENTSYNTAX: Final = 14031 +ERROR_SXS_XML_E_BADSTARTNAMECHAR: Final = 14032 +ERROR_SXS_XML_E_BADNAMECHAR: Final = 14033 +ERROR_SXS_XML_E_BADCHARINSTRING: Final = 14034 +ERROR_SXS_XML_E_XMLDECLSYNTAX: Final = 14035 +ERROR_SXS_XML_E_BADCHARDATA: Final = 14036 +ERROR_SXS_XML_E_MISSINGWHITESPACE: Final = 14037 +ERROR_SXS_XML_E_EXPECTINGTAGEND: Final = 14038 +ERROR_SXS_XML_E_MISSINGSEMICOLON: Final = 14039 +ERROR_SXS_XML_E_UNBALANCEDPAREN: Final = 14040 +ERROR_SXS_XML_E_INTERNALERROR: Final = 14041 +ERROR_SXS_XML_E_UNEXPECTED_WHITESPACE: Final = 14042 +ERROR_SXS_XML_E_INCOMPLETE_ENCODING: Final = 14043 +ERROR_SXS_XML_E_MISSING_PAREN: Final = 14044 +ERROR_SXS_XML_E_EXPECTINGCLOSEQUOTE: Final = 14045 +ERROR_SXS_XML_E_MULTIPLE_COLONS: Final = 14046 +ERROR_SXS_XML_E_INVALID_DECIMAL: Final = 14047 +ERROR_SXS_XML_E_INVALID_HEXIDECIMAL: Final = 14048 +ERROR_SXS_XML_E_INVALID_UNICODE: Final = 14049 +ERROR_SXS_XML_E_WHITESPACEORQUESTIONMARK: Final = 14050 +ERROR_SXS_XML_E_UNEXPECTEDENDTAG: Final = 14051 +ERROR_SXS_XML_E_UNCLOSEDTAG: Final = 14052 +ERROR_SXS_XML_E_DUPLICATEATTRIBUTE: Final = 14053 +ERROR_SXS_XML_E_MULTIPLEROOTS: Final = 14054 +ERROR_SXS_XML_E_INVALIDATROOTLEVEL: Final = 14055 +ERROR_SXS_XML_E_BADXMLDECL: Final = 14056 +ERROR_SXS_XML_E_MISSINGROOT: Final = 14057 +ERROR_SXS_XML_E_UNEXPECTEDEOF: Final = 14058 +ERROR_SXS_XML_E_BADPEREFINSUBSET: Final = 14059 +ERROR_SXS_XML_E_UNCLOSEDSTARTTAG: Final = 14060 +ERROR_SXS_XML_E_UNCLOSEDENDTAG: Final = 14061 +ERROR_SXS_XML_E_UNCLOSEDSTRING: Final = 14062 +ERROR_SXS_XML_E_UNCLOSEDCOMMENT: Final = 14063 +ERROR_SXS_XML_E_UNCLOSEDDECL: Final = 14064 +ERROR_SXS_XML_E_UNCLOSEDCDATA: Final = 14065 +ERROR_SXS_XML_E_RESERVEDNAMESPACE: Final = 14066 +ERROR_SXS_XML_E_INVALIDENCODING: Final = 14067 +ERROR_SXS_XML_E_INVALIDSWITCH: Final = 14068 +ERROR_SXS_XML_E_BADXMLCASE: Final = 14069 +ERROR_SXS_XML_E_INVALID_STANDALONE: Final = 14070 +ERROR_SXS_XML_E_UNEXPECTED_STANDALONE: Final = 14071 +ERROR_SXS_XML_E_INVALID_VERSION: Final = 14072 +ERROR_SXS_XML_E_MISSINGEQUALS: Final = 14073 +ERROR_SXS_PROTECTION_RECOVERY_FAILED: Final = 14074 +ERROR_SXS_PROTECTION_PUBLIC_KEY_TOO_SHORT: Final = 14075 +ERROR_SXS_PROTECTION_CATALOG_NOT_VALID: Final = 14076 +ERROR_SXS_UNTRANSLATABLE_HRESULT: Final = 14077 +ERROR_SXS_PROTECTION_CATALOG_FILE_MISSING: Final = 14078 +ERROR_SXS_MISSING_ASSEMBLY_IDENTITY_ATTRIBUTE: Final = 14079 +ERROR_SXS_INVALID_ASSEMBLY_IDENTITY_ATTRIBUTE_NAME: Final = 14080 +ERROR_SXS_ASSEMBLY_MISSING: Final = 14081 +ERROR_SXS_CORRUPT_ACTIVATION_STACK: Final = 14082 +ERROR_SXS_CORRUPTION: Final = 14083 +ERROR_SXS_EARLY_DEACTIVATION: Final = 14084 +ERROR_SXS_INVALID_DEACTIVATION: Final = 14085 +ERROR_SXS_MULTIPLE_DEACTIVATION: Final = 14086 +ERROR_SXS_PROCESS_TERMINATION_REQUESTED: Final = 14087 +ERROR_SXS_RELEASE_ACTIVATION_CONTEXT: Final = 14088 +ERROR_SXS_SYSTEM_DEFAULT_ACTIVATION_CONTEXT_EMPTY: Final = 14089 +ERROR_SXS_INVALID_IDENTITY_ATTRIBUTE_VALUE: Final = 14090 +ERROR_SXS_INVALID_IDENTITY_ATTRIBUTE_NAME: Final = 14091 +ERROR_SXS_IDENTITY_DUPLICATE_ATTRIBUTE: Final = 14092 +ERROR_SXS_IDENTITY_PARSE_ERROR: Final = 14093 +ERROR_MALFORMED_SUBSTITUTION_STRING: Final = 14094 +ERROR_SXS_INCORRECT_PUBLIC_KEY_TOKEN: Final = 14095 +ERROR_UNMAPPED_SUBSTITUTION_STRING: Final = 14096 +ERROR_SXS_ASSEMBLY_NOT_LOCKED: Final = 14097 +ERROR_SXS_COMPONENT_STORE_CORRUPT: Final = 14098 +ERROR_ADVANCED_INSTALLER_FAILED: Final = 14099 +ERROR_XML_ENCODING_MISMATCH: Final = 14100 +ERROR_SXS_MANIFEST_IDENTITY_SAME_BUT_CONTENTS_DIFFERENT: Final = 14101 +ERROR_SXS_IDENTITIES_DIFFERENT: Final = 14102 +ERROR_SXS_ASSEMBLY_IS_NOT_A_DEPLOYMENT: Final = 14103 +ERROR_SXS_FILE_NOT_PART_OF_ASSEMBLY: Final = 14104 +ERROR_SXS_MANIFEST_TOO_BIG: Final = 14105 +ERROR_SXS_SETTING_NOT_REGISTERED: Final = 14106 +ERROR_SXS_TRANSACTION_CLOSURE_INCOMPLETE: Final = 14107 +ERROR_SMI_PRIMITIVE_INSTALLER_FAILED: Final = 14108 +ERROR_GENERIC_COMMAND_FAILED: Final = 14109 +ERROR_SXS_FILE_HASH_MISSING: Final = 14110 +ERROR_SXS_DUPLICATE_ACTIVATABLE_CLASS: Final = 14111 +ERROR_EVT_INVALID_CHANNEL_PATH: Final = 15000 +ERROR_EVT_INVALID_QUERY: Final = 15001 +ERROR_EVT_PUBLISHER_METADATA_NOT_FOUND: Final = 15002 +ERROR_EVT_EVENT_TEMPLATE_NOT_FOUND: Final = 15003 +ERROR_EVT_INVALID_PUBLISHER_NAME: Final = 15004 +ERROR_EVT_INVALID_EVENT_DATA: Final = 15005 +ERROR_EVT_CHANNEL_NOT_FOUND: Final = 15007 +ERROR_EVT_MALFORMED_XML_TEXT: Final = 15008 +ERROR_EVT_SUBSCRIPTION_TO_DIRECT_CHANNEL: Final = 15009 +ERROR_EVT_CONFIGURATION_ERROR: Final = 15010 +ERROR_EVT_QUERY_RESULT_STALE: Final = 15011 +ERROR_EVT_QUERY_RESULT_INVALID_POSITION: Final = 15012 +ERROR_EVT_NON_VALIDATING_MSXML: Final = 15013 +ERROR_EVT_FILTER_ALREADYSCOPED: Final = 15014 +ERROR_EVT_FILTER_NOTELTSET: Final = 15015 +ERROR_EVT_FILTER_INVARG: Final = 15016 +ERROR_EVT_FILTER_INVTEST: Final = 15017 +ERROR_EVT_FILTER_INVTYPE: Final = 15018 +ERROR_EVT_FILTER_PARSEERR: Final = 15019 +ERROR_EVT_FILTER_UNSUPPORTEDOP: Final = 15020 +ERROR_EVT_FILTER_UNEXPECTEDTOKEN: Final = 15021 +ERROR_EVT_INVALID_OPERATION_OVER_ENABLED_DIRECT_CHANNEL: Final = 15022 +ERROR_EVT_INVALID_CHANNEL_PROPERTY_VALUE: Final = 15023 +ERROR_EVT_INVALID_PUBLISHER_PROPERTY_VALUE: Final = 15024 +ERROR_EVT_CHANNEL_CANNOT_ACTIVATE: Final = 15025 +ERROR_EVT_FILTER_TOO_COMPLEX: Final = 15026 +ERROR_EVT_MESSAGE_NOT_FOUND: Final = 15027 +ERROR_EVT_MESSAGE_ID_NOT_FOUND: Final = 15028 +ERROR_EVT_UNRESOLVED_VALUE_INSERT: Final = 15029 +ERROR_EVT_UNRESOLVED_PARAMETER_INSERT: Final = 15030 +ERROR_EVT_MAX_INSERTS_REACHED: Final = 15031 +ERROR_EVT_EVENT_DEFINITION_NOT_FOUND: Final = 15032 +ERROR_EVT_MESSAGE_LOCALE_NOT_FOUND: Final = 15033 +ERROR_EVT_VERSION_TOO_OLD: Final = 15034 +ERROR_EVT_VERSION_TOO_NEW: Final = 15035 +ERROR_EVT_CANNOT_OPEN_CHANNEL_OF_QUERY: Final = 15036 +ERROR_EVT_PUBLISHER_DISABLED: Final = 15037 +ERROR_EVT_FILTER_OUT_OF_RANGE: Final = 15038 +ERROR_EC_SUBSCRIPTION_CANNOT_ACTIVATE: Final = 15080 +ERROR_EC_LOG_DISABLED: Final = 15081 +ERROR_EC_CIRCULAR_FORWARDING: Final = 15082 +ERROR_EC_CREDSTORE_FULL: Final = 15083 +ERROR_EC_CRED_NOT_FOUND: Final = 15084 +ERROR_EC_NO_ACTIVE_CHANNEL: Final = 15085 +ERROR_MUI_FILE_NOT_FOUND: Final = 15100 +ERROR_MUI_INVALID_FILE: Final = 15101 +ERROR_MUI_INVALID_RC_CONFIG: Final = 15102 +ERROR_MUI_INVALID_LOCALE_NAME: Final = 15103 +ERROR_MUI_INVALID_ULTIMATEFALLBACK_NAME: Final = 15104 +ERROR_MUI_FILE_NOT_LOADED: Final = 15105 +ERROR_RESOURCE_ENUM_USER_STOP: Final = 15106 +ERROR_MUI_INTLSETTINGS_UILANG_NOT_INSTALLED: Final = 15107 +ERROR_MUI_INTLSETTINGS_INVALID_LOCALE_NAME: Final = 15108 +ERROR_MRM_RUNTIME_NO_DEFAULT_OR_NEUTRAL_RESOURCE: Final = 15110 +ERROR_MRM_INVALID_PRICONFIG: Final = 15111 +ERROR_MRM_INVALID_FILE_TYPE: Final = 15112 +ERROR_MRM_UNKNOWN_QUALIFIER: Final = 15113 +ERROR_MRM_INVALID_QUALIFIER_VALUE: Final = 15114 +ERROR_MRM_NO_CANDIDATE: Final = 15115 +ERROR_MRM_NO_MATCH_OR_DEFAULT_CANDIDATE: Final = 15116 +ERROR_MRM_RESOURCE_TYPE_MISMATCH: Final = 15117 +ERROR_MRM_DUPLICATE_MAP_NAME: Final = 15118 +ERROR_MRM_DUPLICATE_ENTRY: Final = 15119 +ERROR_MRM_INVALID_RESOURCE_IDENTIFIER: Final = 15120 +ERROR_MRM_FILEPATH_TOO_LONG: Final = 15121 +ERROR_MRM_UNSUPPORTED_DIRECTORY_TYPE: Final = 15122 +ERROR_MRM_INVALID_PRI_FILE: Final = 15126 +ERROR_MRM_NAMED_RESOURCE_NOT_FOUND: Final = 15127 +ERROR_MRM_MAP_NOT_FOUND: Final = 15135 +ERROR_MRM_UNSUPPORTED_PROFILE_TYPE: Final = 15136 +ERROR_MRM_INVALID_QUALIFIER_OPERATOR: Final = 15137 +ERROR_MRM_INDETERMINATE_QUALIFIER_VALUE: Final = 15138 +ERROR_MRM_AUTOMERGE_ENABLED: Final = 15139 +ERROR_MRM_TOO_MANY_RESOURCES: Final = 15140 +ERROR_MRM_UNSUPPORTED_FILE_TYPE_FOR_MERGE: Final = 15141 +ERROR_MRM_UNSUPPORTED_FILE_TYPE_FOR_LOAD_UNLOAD_PRI_FILE: Final = 15142 +ERROR_MRM_NO_CURRENT_VIEW_ON_THREAD: Final = 15143 +ERROR_DIFFERENT_PROFILE_RESOURCE_MANAGER_EXIST: Final = 15144 +ERROR_OPERATION_NOT_ALLOWED_FROM_SYSTEM_COMPONENT: Final = 15145 +ERROR_MRM_DIRECT_REF_TO_NON_DEFAULT_RESOURCE: Final = 15146 +ERROR_MRM_GENERATION_COUNT_MISMATCH: Final = 15147 +ERROR_PRI_MERGE_VERSION_MISMATCH: Final = 15148 +ERROR_PRI_MERGE_MISSING_SCHEMA: Final = 15149 +ERROR_PRI_MERGE_LOAD_FILE_FAILED: Final = 15150 +ERROR_PRI_MERGE_ADD_FILE_FAILED: Final = 15151 +ERROR_PRI_MERGE_WRITE_FILE_FAILED: Final = 15152 +ERROR_PRI_MERGE_MULTIPLE_PACKAGE_FAMILIES_NOT_ALLOWED: Final = 15153 +ERROR_PRI_MERGE_MULTIPLE_MAIN_PACKAGES_NOT_ALLOWED: Final = 15154 +ERROR_PRI_MERGE_BUNDLE_PACKAGES_NOT_ALLOWED: Final = 15155 +ERROR_PRI_MERGE_MAIN_PACKAGE_REQUIRED: Final = 15156 +ERROR_PRI_MERGE_RESOURCE_PACKAGE_REQUIRED: Final = 15157 +ERROR_PRI_MERGE_INVALID_FILE_NAME: Final = 15158 +ERROR_MRM_PACKAGE_NOT_FOUND: Final = 15159 +ERROR_MRM_MISSING_DEFAULT_LANGUAGE: Final = 15160 +ERROR_MRM_SCOPE_ITEM_CONFLICT: Final = 15161 +ERROR_MCA_INVALID_CAPABILITIES_STRING: Final = 15200 +ERROR_MCA_INVALID_VCP_VERSION: Final = 15201 +ERROR_MCA_MONITOR_VIOLATES_MCCS_SPECIFICATION: Final = 15202 +ERROR_MCA_MCCS_VERSION_MISMATCH: Final = 15203 +ERROR_MCA_UNSUPPORTED_MCCS_VERSION: Final = 15204 +ERROR_MCA_INTERNAL_ERROR: Final = 15205 +ERROR_MCA_INVALID_TECHNOLOGY_TYPE_RETURNED: Final = 15206 +ERROR_MCA_UNSUPPORTED_COLOR_TEMPERATURE: Final = 15207 +ERROR_AMBIGUOUS_SYSTEM_DEVICE: Final = 15250 +ERROR_SYSTEM_DEVICE_NOT_FOUND: Final = 15299 +ERROR_HASH_NOT_SUPPORTED: Final = 15300 +ERROR_HASH_NOT_PRESENT: Final = 15301 +ERROR_SECONDARY_IC_PROVIDER_NOT_REGISTERED: Final = 15321 +ERROR_GPIO_CLIENT_INFORMATION_INVALID: Final = 15322 +ERROR_GPIO_VERSION_NOT_SUPPORTED: Final = 15323 +ERROR_GPIO_INVALID_REGISTRATION_PACKET: Final = 15324 +ERROR_GPIO_OPERATION_DENIED: Final = 15325 +ERROR_GPIO_INCOMPATIBLE_CONNECT_MODE: Final = 15326 +ERROR_GPIO_INTERRUPT_ALREADY_UNMASKED: Final = 15327 +ERROR_CANNOT_SWITCH_RUNLEVEL: Final = 15400 +ERROR_INVALID_RUNLEVEL_SETTING: Final = 15401 +ERROR_RUNLEVEL_SWITCH_TIMEOUT: Final = 15402 +ERROR_RUNLEVEL_SWITCH_AGENT_TIMEOUT: Final = 15403 +ERROR_RUNLEVEL_SWITCH_IN_PROGRESS: Final = 15404 +ERROR_SERVICES_FAILED_AUTOSTART: Final = 15405 +ERROR_COM_TASK_STOP_PENDING: Final = 15501 +ERROR_INSTALL_OPEN_PACKAGE_FAILED: Final = 15600 +ERROR_INSTALL_PACKAGE_NOT_FOUND: Final = 15601 +ERROR_INSTALL_INVALID_PACKAGE: Final = 15602 +ERROR_INSTALL_RESOLVE_DEPENDENCY_FAILED: Final = 15603 +ERROR_INSTALL_OUT_OF_DISK_SPACE: Final = 15604 +ERROR_INSTALL_NETWORK_FAILURE: Final = 15605 +ERROR_INSTALL_REGISTRATION_FAILURE: Final = 15606 +ERROR_INSTALL_DEREGISTRATION_FAILURE: Final = 15607 +ERROR_INSTALL_CANCEL: Final = 15608 +ERROR_INSTALL_FAILED: Final = 15609 +ERROR_REMOVE_FAILED: Final = 15610 +ERROR_PACKAGE_ALREADY_EXISTS: Final = 15611 +ERROR_NEEDS_REMEDIATION: Final = 15612 +ERROR_INSTALL_PREREQUISITE_FAILED: Final = 15613 +ERROR_PACKAGE_REPOSITORY_CORRUPTED: Final = 15614 +ERROR_INSTALL_POLICY_FAILURE: Final = 15615 +ERROR_PACKAGE_UPDATING: Final = 15616 +ERROR_DEPLOYMENT_BLOCKED_BY_POLICY: Final = 15617 +ERROR_PACKAGES_IN_USE: Final = 15618 +ERROR_RECOVERY_FILE_CORRUPT: Final = 15619 +ERROR_INVALID_STAGED_SIGNATURE: Final = 15620 +ERROR_DELETING_EXISTING_APPLICATIONDATA_STORE_FAILED: Final = 15621 +ERROR_INSTALL_PACKAGE_DOWNGRADE: Final = 15622 +ERROR_SYSTEM_NEEDS_REMEDIATION: Final = 15623 +ERROR_APPX_INTEGRITY_FAILURE_CLR_NGEN: Final = 15624 +ERROR_RESILIENCY_FILE_CORRUPT: Final = 15625 +ERROR_INSTALL_FIREWALL_SERVICE_NOT_RUNNING: Final = 15626 +ERROR_PACKAGE_MOVE_FAILED: Final = 15627 +ERROR_INSTALL_VOLUME_NOT_EMPTY: Final = 15628 +ERROR_INSTALL_VOLUME_OFFLINE: Final = 15629 +ERROR_INSTALL_VOLUME_CORRUPT: Final = 15630 +ERROR_NEEDS_REGISTRATION: Final = 15631 +ERROR_INSTALL_WRONG_PROCESSOR_ARCHITECTURE: Final = 15632 +ERROR_DEV_SIDELOAD_LIMIT_EXCEEDED: Final = 15633 +ERROR_INSTALL_OPTIONAL_PACKAGE_REQUIRES_MAIN_PACKAGE: Final = 15634 +ERROR_PACKAGE_NOT_SUPPORTED_ON_FILESYSTEM: Final = 15635 +ERROR_PACKAGE_MOVE_BLOCKED_BY_STREAMING: Final = 15636 +ERROR_INSTALL_OPTIONAL_PACKAGE_APPLICATIONID_NOT_UNIQUE: Final = 15637 +ERROR_PACKAGE_STAGING_ONHOLD: Final = 15638 +ERROR_INSTALL_INVALID_RELATED_SET_UPDATE: Final = 15639 +ERROR_INSTALL_OPTIONAL_PACKAGE_REQUIRES_MAIN_PACKAGE_FULLTRUST_CAPABILITY: Final = 15640 +ERROR_DEPLOYMENT_BLOCKED_BY_USER_LOG_OFF: Final = 15641 +ERROR_PROVISION_OPTIONAL_PACKAGE_REQUIRES_MAIN_PACKAGE_PROVISIONED: Final = 15642 +ERROR_PACKAGES_REPUTATION_CHECK_FAILED: Final = 15643 +ERROR_PACKAGES_REPUTATION_CHECK_TIMEDOUT: Final = 15644 +ERROR_DEPLOYMENT_OPTION_NOT_SUPPORTED: Final = 15645 +ERROR_APPINSTALLER_ACTIVATION_BLOCKED: Final = 15646 +ERROR_REGISTRATION_FROM_REMOTE_DRIVE_NOT_SUPPORTED: Final = 15647 +ERROR_APPX_RAW_DATA_WRITE_FAILED: Final = 15648 +ERROR_DEPLOYMENT_BLOCKED_BY_VOLUME_POLICY_PACKAGE: Final = 15649 +ERROR_DEPLOYMENT_BLOCKED_BY_VOLUME_POLICY_MACHINE: Final = 15650 +ERROR_DEPLOYMENT_BLOCKED_BY_PROFILE_POLICY: Final = 15651 +ERROR_DEPLOYMENT_FAILED_CONFLICTING_MUTABLE_PACKAGE_DIRECTORY: Final = 15652 +ERROR_SINGLETON_RESOURCE_INSTALLED_IN_ACTIVE_USER: Final = 15653 +ERROR_DIFFERENT_VERSION_OF_PACKAGED_SERVICE_INSTALLED: Final = 15654 +ERROR_SERVICE_EXISTS_AS_NON_PACKAGED_SERVICE: Final = 15655 +ERROR_PACKAGED_SERVICE_REQUIRES_ADMIN_PRIVILEGES: Final = 15656 +ERROR_REDIRECTION_TO_DEFAULT_ACCOUNT_NOT_ALLOWED: Final = 15657 +ERROR_PACKAGE_LACKS_CAPABILITY_TO_DEPLOY_ON_HOST: Final = 15658 +ERROR_UNSIGNED_PACKAGE_INVALID_CONTENT: Final = 15659 +ERROR_UNSIGNED_PACKAGE_INVALID_PUBLISHER_NAMESPACE: Final = 15660 +ERROR_SIGNED_PACKAGE_INVALID_PUBLISHER_NAMESPACE: Final = 15661 +ERROR_PACKAGE_EXTERNAL_LOCATION_NOT_ALLOWED: Final = 15662 +ERROR_INSTALL_FULLTRUST_HOSTRUNTIME_REQUIRES_MAIN_PACKAGE_FULLTRUST_CAPABILITY: Final = 15663 +ERROR_PACKAGE_LACKS_CAPABILITY_FOR_MANDATORY_STARTUPTASKS: Final = 15664 +ERROR_INSTALL_RESOLVE_HOSTRUNTIME_DEPENDENCY_FAILED: Final = 15665 +ERROR_MACHINE_SCOPE_NOT_ALLOWED: Final = 15666 +ERROR_CLASSIC_COMPAT_MODE_NOT_ALLOWED: Final = 15667 +ERROR_STAGEFROMUPDATEAGENT_PACKAGE_NOT_APPLICABLE: Final = 15668 +ERROR_PACKAGE_NOT_REGISTERED_FOR_USER: Final = 15669 +ERROR_PACKAGE_NAME_MISMATCH: Final = 15670 +ERROR_APPINSTALLER_URI_IN_USE: Final = 15671 +ERROR_APPINSTALLER_IS_MANAGED_BY_SYSTEM: Final = 15672 +APPMODEL_ERROR_NO_PACKAGE: Final = 15700 +APPMODEL_ERROR_PACKAGE_RUNTIME_CORRUPT: Final = 15701 +APPMODEL_ERROR_PACKAGE_IDENTITY_CORRUPT: Final = 15702 +APPMODEL_ERROR_NO_APPLICATION: Final = 15703 +APPMODEL_ERROR_DYNAMIC_PROPERTY_READ_FAILED: Final = 15704 +APPMODEL_ERROR_DYNAMIC_PROPERTY_INVALID: Final = 15705 +APPMODEL_ERROR_PACKAGE_NOT_AVAILABLE: Final = 15706 +APPMODEL_ERROR_NO_MUTABLE_DIRECTORY: Final = 15707 +ERROR_STATE_LOAD_STORE_FAILED: Final = 15800 +ERROR_STATE_GET_VERSION_FAILED: Final = 15801 +ERROR_STATE_SET_VERSION_FAILED: Final = 15802 +ERROR_STATE_STRUCTURED_RESET_FAILED: Final = 15803 +ERROR_STATE_OPEN_CONTAINER_FAILED: Final = 15804 +ERROR_STATE_CREATE_CONTAINER_FAILED: Final = 15805 +ERROR_STATE_DELETE_CONTAINER_FAILED: Final = 15806 +ERROR_STATE_READ_SETTING_FAILED: Final = 15807 +ERROR_STATE_WRITE_SETTING_FAILED: Final = 15808 +ERROR_STATE_DELETE_SETTING_FAILED: Final = 15809 +ERROR_STATE_QUERY_SETTING_FAILED: Final = 15810 +ERROR_STATE_READ_COMPOSITE_SETTING_FAILED: Final = 15811 +ERROR_STATE_WRITE_COMPOSITE_SETTING_FAILED: Final = 15812 +ERROR_STATE_ENUMERATE_CONTAINER_FAILED: Final = 15813 +ERROR_STATE_ENUMERATE_SETTINGS_FAILED: Final = 15814 +ERROR_STATE_COMPOSITE_SETTING_VALUE_SIZE_LIMIT_EXCEEDED: Final = 15815 +ERROR_STATE_SETTING_VALUE_SIZE_LIMIT_EXCEEDED: Final = 15816 +ERROR_STATE_SETTING_NAME_SIZE_LIMIT_EXCEEDED: Final = 15817 +ERROR_STATE_CONTAINER_NAME_SIZE_LIMIT_EXCEEDED: Final = 15818 +ERROR_API_UNAVAILABLE: Final = 15841 +STORE_ERROR_UNLICENSED: Final = 15861 +STORE_ERROR_UNLICENSED_USER: Final = 15862 +STORE_ERROR_PENDING_COM_TRANSACTION: Final = 15863 +STORE_ERROR_LICENSE_REVOKED: Final = 15864 +SEVERITY_SUCCESS: Final = 0 +SEVERITY_ERROR: Final = 1 + +def SUCCEEDED(hr): ... +def FAILED(hr): ... +def HRESULT_CODE(hr): ... def SCODE_CODE(sc): ... def HRESULT_FACILITY(hr): ... def SCODE_FACILITY(sc): ... def HRESULT_SEVERITY(hr): ... def SCODE_SEVERITY(sc): ... -FACILITY_NT_BIT: int +FACILITY_NT_BIT: Final = 0x10000000 +def HRESULT_FROM_WIN32(x): ... def HRESULT_FROM_NT(x): ... def GetScode(hr): ... def ResultFromScode(sc): ... -NOERROR: int -E_UNEXPECTED: int -E_NOTIMPL: int -E_OUTOFMEMORY: int -E_INVALIDARG: int -E_NOINTERFACE: int -E_POINTER: int -E_HANDLE: int -E_ABORT: int -E_FAIL: int -E_ACCESSDENIED: int -win16_E_NOTIMPL: int -win16_E_OUTOFMEMORY: int -win16_E_INVALIDARG: int -win16_E_NOINTERFACE: int -win16_E_POINTER: int -win16_E_HANDLE: int -win16_E_ABORT: int -win16_E_FAIL: int -win16_E_ACCESSDENIED: int -E_PENDING: int -CO_E_INIT_TLS: int -CO_E_INIT_SHARED_ALLOCATOR: int -CO_E_INIT_MEMORY_ALLOCATOR: int -CO_E_INIT_CLASS_CACHE: int -CO_E_INIT_RPC_CHANNEL: int -CO_E_INIT_TLS_SET_CHANNEL_CONTROL: int -CO_E_INIT_TLS_CHANNEL_CONTROL: int -CO_E_INIT_UNACCEPTED_USER_ALLOCATOR: int -CO_E_INIT_SCM_MUTEX_EXISTS: int -CO_E_INIT_SCM_FILE_MAPPING_EXISTS: int -CO_E_INIT_SCM_MAP_VIEW_OF_FILE: int -CO_E_INIT_SCM_EXEC_FAILURE: int -CO_E_INIT_ONLY_SINGLE_THREADED: int -CO_E_CANT_REMOTE: int -CO_E_BAD_SERVER_NAME: int -CO_E_WRONG_SERVER_IDENTITY: int -CO_E_OLE1DDE_DISABLED: int -CO_E_RUNAS_SYNTAX: int -CO_E_CREATEPROCESS_FAILURE: int -CO_E_RUNAS_CREATEPROCESS_FAILURE: int -CO_E_RUNAS_LOGON_FAILURE: int -CO_E_LAUNCH_PERMSSION_DENIED: int -CO_E_START_SERVICE_FAILURE: int -CO_E_REMOTE_COMMUNICATION_FAILURE: int -CO_E_SERVER_START_TIMEOUT: int -CO_E_CLSREG_INCONSISTENT: int -CO_E_IIDREG_INCONSISTENT: int -CO_E_NOT_SUPPORTED: int -CO_E_RELOAD_DLL: int -CO_E_MSI_ERROR: int -OLE_E_FIRST: int -OLE_E_LAST: int -OLE_S_FIRST: int -OLE_S_LAST: int -OLE_E_OLEVERB: int -OLE_E_ADVF: int -OLE_E_ENUM_NOMORE: int -OLE_E_ADVISENOTSUPPORTED: int -OLE_E_NOCONNECTION: int -OLE_E_NOTRUNNING: int -OLE_E_NOCACHE: int -OLE_E_BLANK: int -OLE_E_CLASSDIFF: int -OLE_E_CANT_GETMONIKER: int -OLE_E_CANT_BINDTOSOURCE: int -OLE_E_STATIC: int -OLE_E_PROMPTSAVECANCELLED: int -OLE_E_INVALIDRECT: int -OLE_E_WRONGCOMPOBJ: int -OLE_E_INVALIDHWND: int -OLE_E_NOT_INPLACEACTIVE: int -OLE_E_CANTCONVERT: int -OLE_E_NOSTORAGE: int -DV_E_FORMATETC: int -DV_E_DVTARGETDEVICE: int -DV_E_STGMEDIUM: int -DV_E_STATDATA: int -DV_E_LINDEX: int -DV_E_TYMED: int -DV_E_CLIPFORMAT: int -DV_E_DVASPECT: int -DV_E_DVTARGETDEVICE_SIZE: int -DV_E_NOIVIEWOBJECT: int -DRAGDROP_E_FIRST: int -DRAGDROP_E_LAST: int -DRAGDROP_S_FIRST: int -DRAGDROP_S_LAST: int -DRAGDROP_E_NOTREGISTERED: int -DRAGDROP_E_ALREADYREGISTERED: int -DRAGDROP_E_INVALIDHWND: int -CLASSFACTORY_E_FIRST: int -CLASSFACTORY_E_LAST: int -CLASSFACTORY_S_FIRST: int -CLASSFACTORY_S_LAST: int -CLASS_E_NOAGGREGATION: int -CLASS_E_CLASSNOTAVAILABLE: int -CLASS_E_NOTLICENSED: int -MARSHAL_E_FIRST: int -MARSHAL_E_LAST: int -MARSHAL_S_FIRST: int -MARSHAL_S_LAST: int -DATA_E_FIRST: int -DATA_E_LAST: int -DATA_S_FIRST: int -DATA_S_LAST: int -VIEW_E_FIRST: int -VIEW_E_LAST: int -VIEW_S_FIRST: int -VIEW_S_LAST: int -VIEW_E_DRAW: int -REGDB_E_FIRST: int -REGDB_E_LAST: int -REGDB_S_FIRST: int -REGDB_S_LAST: int -REGDB_E_READREGDB: int -REGDB_E_WRITEREGDB: int -REGDB_E_KEYMISSING: int -REGDB_E_INVALIDVALUE: int -REGDB_E_CLASSNOTREG: int -REGDB_E_IIDNOTREG: int -CAT_E_FIRST: int -CAT_E_LAST: int -CAT_E_CATIDNOEXIST: int -CAT_E_NODESCRIPTION: int -CS_E_FIRST: int -CS_E_LAST: int -CS_E_PACKAGE_NOTFOUND: int -CS_E_NOT_DELETABLE: int -CS_E_CLASS_NOTFOUND: int -CS_E_INVALID_VERSION: int -CS_E_NO_CLASSSTORE: int -CACHE_E_FIRST: int -CACHE_E_LAST: int -CACHE_S_FIRST: int -CACHE_S_LAST: int -CACHE_E_NOCACHE_UPDATED: int -OLEOBJ_E_FIRST: int -OLEOBJ_E_LAST: int -OLEOBJ_S_FIRST: int -OLEOBJ_S_LAST: int -OLEOBJ_E_NOVERBS: int -OLEOBJ_E_INVALIDVERB: int -CLIENTSITE_E_FIRST: int -CLIENTSITE_E_LAST: int -CLIENTSITE_S_FIRST: int -CLIENTSITE_S_LAST: int -INPLACE_E_NOTUNDOABLE: int -INPLACE_E_NOTOOLSPACE: int -INPLACE_E_FIRST: int -INPLACE_E_LAST: int -INPLACE_S_FIRST: int -INPLACE_S_LAST: int -ENUM_E_FIRST: int -ENUM_E_LAST: int -ENUM_S_FIRST: int -ENUM_S_LAST: int -CONVERT10_E_FIRST: int -CONVERT10_E_LAST: int -CONVERT10_S_FIRST: int -CONVERT10_S_LAST: int -CONVERT10_E_OLESTREAM_GET: int -CONVERT10_E_OLESTREAM_PUT: int -CONVERT10_E_OLESTREAM_FMT: int -CONVERT10_E_OLESTREAM_BITMAP_TO_DIB: int -CONVERT10_E_STG_FMT: int -CONVERT10_E_STG_NO_STD_STREAM: int -CONVERT10_E_STG_DIB_TO_BITMAP: int -CLIPBRD_E_FIRST: int -CLIPBRD_E_LAST: int -CLIPBRD_S_FIRST: int -CLIPBRD_S_LAST: int -CLIPBRD_E_CANT_OPEN: int -CLIPBRD_E_CANT_EMPTY: int -CLIPBRD_E_CANT_SET: int -CLIPBRD_E_BAD_DATA: int -CLIPBRD_E_CANT_CLOSE: int -MK_E_FIRST: int -MK_E_LAST: int -MK_S_FIRST: int -MK_S_LAST: int -MK_E_CONNECTMANUALLY: int -MK_E_EXCEEDEDDEADLINE: int -MK_E_NEEDGENERIC: int -MK_E_UNAVAILABLE: int -MK_E_SYNTAX: int -MK_E_NOOBJECT: int -MK_E_INVALIDEXTENSION: int -MK_E_INTERMEDIATEINTERFACENOTSUPPORTED: int -MK_E_NOTBINDABLE: int -MK_E_NOTBOUND: int -MK_E_CANTOPENFILE: int -MK_E_MUSTBOTHERUSER: int -MK_E_NOINVERSE: int -MK_E_NOSTORAGE: int -MK_E_NOPREFIX: int -MK_E_ENUMERATION_FAILED: int -CO_E_FIRST: int -CO_E_LAST: int -CO_S_FIRST: int -CO_S_LAST: int -CO_E_NOTINITIALIZED: int -CO_E_ALREADYINITIALIZED: int -CO_E_CANTDETERMINECLASS: int -CO_E_CLASSSTRING: int -CO_E_IIDSTRING: int -CO_E_APPNOTFOUND: int -CO_E_APPSINGLEUSE: int -CO_E_ERRORINAPP: int -CO_E_DLLNOTFOUND: int -CO_E_ERRORINDLL: int -CO_E_WRONGOSFORAPP: int -CO_E_OBJNOTREG: int -CO_E_OBJISREG: int -CO_E_OBJNOTCONNECTED: int -CO_E_APPDIDNTREG: int -CO_E_RELEASED: int -CO_E_FAILEDTOIMPERSONATE: int -CO_E_FAILEDTOGETSECCTX: int -CO_E_FAILEDTOOPENTHREADTOKEN: int -CO_E_FAILEDTOGETTOKENINFO: int -CO_E_TRUSTEEDOESNTMATCHCLIENT: int -CO_E_FAILEDTOQUERYCLIENTBLANKET: int -CO_E_FAILEDTOSETDACL: int -CO_E_ACCESSCHECKFAILED: int -CO_E_NETACCESSAPIFAILED: int -CO_E_WRONGTRUSTEENAMESYNTAX: int -CO_E_INVALIDSID: int -CO_E_CONVERSIONFAILED: int -CO_E_NOMATCHINGSIDFOUND: int -CO_E_LOOKUPACCSIDFAILED: int -CO_E_NOMATCHINGNAMEFOUND: int -CO_E_LOOKUPACCNAMEFAILED: int -CO_E_SETSERLHNDLFAILED: int -CO_E_FAILEDTOGETWINDIR: int -CO_E_PATHTOOLONG: int -CO_E_FAILEDTOGENUUID: int -CO_E_FAILEDTOCREATEFILE: int -CO_E_FAILEDTOCLOSEHANDLE: int -CO_E_EXCEEDSYSACLLIMIT: int -CO_E_ACESINWRONGORDER: int -CO_E_INCOMPATIBLESTREAMVERSION: int -CO_E_FAILEDTOOPENPROCESSTOKEN: int -CO_E_DECODEFAILED: int -CO_E_ACNOTINITIALIZED: int -OLE_S_USEREG: int -OLE_S_STATIC: int -OLE_S_MAC_CLIPFORMAT: int -DRAGDROP_S_DROP: int -DRAGDROP_S_CANCEL: int -DRAGDROP_S_USEDEFAULTCURSORS: int -DATA_S_SAMEFORMATETC: int -VIEW_S_ALREADY_FROZEN: int -CACHE_S_FORMATETC_NOTSUPPORTED: int -CACHE_S_SAMECACHE: int -CACHE_S_SOMECACHES_NOTUPDATED: int -OLEOBJ_S_INVALIDVERB: int -OLEOBJ_S_CANNOT_DOVERB_NOW: int -OLEOBJ_S_INVALIDHWND: int -INPLACE_S_TRUNCATED: int -CONVERT10_S_NO_PRESENTATION: int -MK_S_REDUCED_TO_SELF: int -MK_S_ME: int -MK_S_HIM: int -MK_S_US: int -MK_S_MONIKERALREADYREGISTERED: int -CO_E_CLASS_CREATE_FAILED: int -CO_E_SCM_ERROR: int -CO_E_SCM_RPC_FAILURE: int -CO_E_BAD_PATH: int -CO_E_SERVER_EXEC_FAILURE: int -CO_E_OBJSRV_RPC_FAILURE: int -MK_E_NO_NORMALIZED: int -CO_E_SERVER_STOPPING: int -MEM_E_INVALID_ROOT: int -MEM_E_INVALID_LINK: int -MEM_E_INVALID_SIZE: int -CO_S_NOTALLINTERFACES: int -DISP_E_UNKNOWNINTERFACE: int -DISP_E_MEMBERNOTFOUND: int -DISP_E_PARAMNOTFOUND: int -DISP_E_TYPEMISMATCH: int -DISP_E_UNKNOWNNAME: int -DISP_E_NONAMEDARGS: int -DISP_E_BADVARTYPE: int -DISP_E_EXCEPTION: int -DISP_E_OVERFLOW: int -DISP_E_BADINDEX: int -DISP_E_UNKNOWNLCID: int -DISP_E_ARRAYISLOCKED: int -DISP_E_BADPARAMCOUNT: int -DISP_E_PARAMNOTOPTIONAL: int -DISP_E_BADCALLEE: int -DISP_E_NOTACOLLECTION: int -DISP_E_DIVBYZERO: int -TYPE_E_BUFFERTOOSMALL: int -TYPE_E_FIELDNOTFOUND: int -TYPE_E_INVDATAREAD: int -TYPE_E_UNSUPFORMAT: int -TYPE_E_REGISTRYACCESS: int -TYPE_E_LIBNOTREGISTERED: int -TYPE_E_UNDEFINEDTYPE: int -TYPE_E_QUALIFIEDNAMEDISALLOWED: int -TYPE_E_INVALIDSTATE: int -TYPE_E_WRONGTYPEKIND: int -TYPE_E_ELEMENTNOTFOUND: int -TYPE_E_AMBIGUOUSNAME: int -TYPE_E_NAMECONFLICT: int -TYPE_E_UNKNOWNLCID: int -TYPE_E_DLLFUNCTIONNOTFOUND: int -TYPE_E_BADMODULEKIND: int -TYPE_E_SIZETOOBIG: int -TYPE_E_DUPLICATEID: int -TYPE_E_INVALIDID: int -TYPE_E_TYPEMISMATCH: int -TYPE_E_OUTOFBOUNDS: int -TYPE_E_IOERROR: int -TYPE_E_CANTCREATETMPFILE: int -TYPE_E_CANTLOADLIBRARY: int -TYPE_E_INCONSISTENTPROPFUNCS: int -TYPE_E_CIRCULARTYPE: int -STG_E_INVALIDFUNCTION: int -STG_E_FILENOTFOUND: int -STG_E_PATHNOTFOUND: int -STG_E_TOOMANYOPENFILES: int -STG_E_ACCESSDENIED: int -STG_E_INVALIDHANDLE: int -STG_E_INSUFFICIENTMEMORY: int -STG_E_INVALIDPOINTER: int -STG_E_NOMOREFILES: int -STG_E_DISKISWRITEPROTECTED: int -STG_E_SEEKERROR: int -STG_E_WRITEFAULT: int -STG_E_READFAULT: int -STG_E_SHAREVIOLATION: int -STG_E_LOCKVIOLATION: int -STG_E_FILEALREADYEXISTS: int -STG_E_INVALIDPARAMETER: int -STG_E_MEDIUMFULL: int -STG_E_PROPSETMISMATCHED: int -STG_E_ABNORMALAPIEXIT: int -STG_E_INVALIDHEADER: int -STG_E_INVALIDNAME: int -STG_E_UNKNOWN: int -STG_E_UNIMPLEMENTEDFUNCTION: int -STG_E_INVALIDFLAG: int -STG_E_INUSE: int -STG_E_NOTCURRENT: int -STG_E_REVERTED: int -STG_E_CANTSAVE: int -STG_E_OLDFORMAT: int -STG_E_OLDDLL: int -STG_E_SHAREREQUIRED: int -STG_E_NOTFILEBASEDSTORAGE: int -STG_E_EXTANTMARSHALLINGS: int -STG_E_DOCFILECORRUPT: int -STG_E_BADBASEADDRESS: int -STG_E_INCOMPLETE: int -STG_E_TERMINATED: int -STG_S_CONVERTED: int -STG_S_BLOCK: int -STG_S_RETRYNOW: int -STG_S_MONITORING: int -STG_S_MULTIPLEOPENS: int -STG_S_CONSOLIDATIONFAILED: int -STG_S_CANNOTCONSOLIDATE: int -RPC_E_CALL_REJECTED: int -RPC_E_CALL_CANCELED: int -RPC_E_CANTPOST_INSENDCALL: int -RPC_E_CANTCALLOUT_INASYNCCALL: int -RPC_E_CANTCALLOUT_INEXTERNALCALL: int -RPC_E_CONNECTION_TERMINATED: int -RPC_E_SERVER_DIED: int -RPC_E_CLIENT_DIED: int -RPC_E_INVALID_DATAPACKET: int -RPC_E_CANTTRANSMIT_CALL: int -RPC_E_CLIENT_CANTMARSHAL_DATA: int -RPC_E_CLIENT_CANTUNMARSHAL_DATA: int -RPC_E_SERVER_CANTMARSHAL_DATA: int -RPC_E_SERVER_CANTUNMARSHAL_DATA: int -RPC_E_INVALID_DATA: int -RPC_E_INVALID_PARAMETER: int -RPC_E_CANTCALLOUT_AGAIN: int -RPC_E_SERVER_DIED_DNE: int -RPC_E_SYS_CALL_FAILED: int -RPC_E_OUT_OF_RESOURCES: int -RPC_E_ATTEMPTED_MULTITHREAD: int -RPC_E_NOT_REGISTERED: int -RPC_E_FAULT: int -RPC_E_SERVERFAULT: int -RPC_E_CHANGED_MODE: int -RPC_E_INVALIDMETHOD: int -RPC_E_DISCONNECTED: int -RPC_E_RETRY: int -RPC_E_SERVERCALL_RETRYLATER: int -RPC_E_SERVERCALL_REJECTED: int -RPC_E_INVALID_CALLDATA: int -RPC_E_CANTCALLOUT_ININPUTSYNCCALL: int -RPC_E_WRONG_THREAD: int -RPC_E_THREAD_NOT_INIT: int -RPC_E_VERSION_MISMATCH: int -RPC_E_INVALID_HEADER: int -RPC_E_INVALID_EXTENSION: int -RPC_E_INVALID_IPID: int -RPC_E_INVALID_OBJECT: int -RPC_S_CALLPENDING: int -RPC_S_WAITONTIMER: int -RPC_E_CALL_COMPLETE: int -RPC_E_UNSECURE_CALL: int -RPC_E_TOO_LATE: int -RPC_E_NO_GOOD_SECURITY_PACKAGES: int -RPC_E_ACCESS_DENIED: int -RPC_E_REMOTE_DISABLED: int -RPC_E_INVALID_OBJREF: int -RPC_E_NO_CONTEXT: int -RPC_E_TIMEOUT: int -RPC_E_NO_SYNC: int -RPC_E_UNEXPECTED: int -NTE_BAD_UID: int -NTE_BAD_HASH: int -NTE_BAD_KEY: int -NTE_BAD_LEN: int -NTE_BAD_DATA: int -NTE_BAD_SIGNATURE: int -NTE_BAD_VER: int -NTE_BAD_ALGID: int -NTE_BAD_FLAGS: int -NTE_BAD_TYPE: int -NTE_BAD_KEY_STATE: int -NTE_BAD_HASH_STATE: int -NTE_NO_KEY: int -NTE_NO_MEMORY: int -NTE_EXISTS: int -NTE_PERM: int -NTE_NOT_FOUND: int -NTE_DOUBLE_ENCRYPT: int -NTE_BAD_PROVIDER: int -NTE_BAD_PROV_TYPE: int -NTE_BAD_PUBLIC_KEY: int -NTE_BAD_KEYSET: int -NTE_PROV_TYPE_NOT_DEF: int -NTE_PROV_TYPE_ENTRY_BAD: int -NTE_KEYSET_NOT_DEF: int -NTE_KEYSET_ENTRY_BAD: int -NTE_PROV_TYPE_NO_MATCH: int -NTE_SIGNATURE_FILE_BAD: int -NTE_PROVIDER_DLL_FAIL: int -NTE_PROV_DLL_NOT_FOUND: int -NTE_BAD_KEYSET_PARAM: int -NTE_FAIL: int -NTE_SYS_ERR: int -CRYPT_E_MSG_ERROR: int -CRYPT_E_UNKNOWN_ALGO: int -CRYPT_E_OID_FORMAT: int -CRYPT_E_INVALID_MSG_TYPE: int -CRYPT_E_UNEXPECTED_ENCODING: int -CRYPT_E_AUTH_ATTR_MISSING: int -CRYPT_E_HASH_VALUE: int -CRYPT_E_INVALID_INDEX: int -CRYPT_E_ALREADY_DECRYPTED: int -CRYPT_E_NOT_DECRYPTED: int -CRYPT_E_RECIPIENT_NOT_FOUND: int -CRYPT_E_CONTROL_TYPE: int -CRYPT_E_ISSUER_SERIALNUMBER: int -CRYPT_E_SIGNER_NOT_FOUND: int -CRYPT_E_ATTRIBUTES_MISSING: int -CRYPT_E_STREAM_MSG_NOT_READY: int -CRYPT_E_STREAM_INSUFFICIENT_DATA: int -CRYPT_E_BAD_LEN: int -CRYPT_E_BAD_ENCODE: int -CRYPT_E_FILE_ERROR: int -CRYPT_E_NOT_FOUND: int -CRYPT_E_EXISTS: int -CRYPT_E_NO_PROVIDER: int -CRYPT_E_SELF_SIGNED: int -CRYPT_E_DELETED_PREV: int -CRYPT_E_NO_MATCH: int -CRYPT_E_UNEXPECTED_MSG_TYPE: int -CRYPT_E_NO_KEY_PROPERTY: int -CRYPT_E_NO_DECRYPT_CERT: int -CRYPT_E_BAD_MSG: int -CRYPT_E_NO_SIGNER: int -CRYPT_E_PENDING_CLOSE: int -CRYPT_E_REVOKED: int -CRYPT_E_NO_REVOCATION_DLL: int -CRYPT_E_NO_REVOCATION_CHECK: int -CRYPT_E_REVOCATION_OFFLINE: int -CRYPT_E_NOT_IN_REVOCATION_DATABASE: int -CRYPT_E_INVALID_NUMERIC_STRING: int -CRYPT_E_INVALID_PRINTABLE_STRING: int -CRYPT_E_INVALID_IA5_STRING: int -CRYPT_E_INVALID_X500_STRING: int -CRYPT_E_NOT_CHAR_STRING: int -CRYPT_E_FILERESIZED: int -CRYPT_E_SECURITY_SETTINGS: int -CRYPT_E_NO_VERIFY_USAGE_DLL: int -CRYPT_E_NO_VERIFY_USAGE_CHECK: int -CRYPT_E_VERIFY_USAGE_OFFLINE: int -CRYPT_E_NOT_IN_CTL: int -CRYPT_E_NO_TRUSTED_SIGNER: int -CRYPT_E_OSS_ERROR: int -CERTSRV_E_BAD_REQUESTSUBJECT: int -CERTSRV_E_NO_REQUEST: int -CERTSRV_E_BAD_REQUESTSTATUS: int -CERTSRV_E_PROPERTY_EMPTY: int -CERTDB_E_JET_ERROR: int -TRUST_E_SYSTEM_ERROR: int -TRUST_E_NO_SIGNER_CERT: int -TRUST_E_COUNTER_SIGNER: int -TRUST_E_CERT_SIGNATURE: int -TRUST_E_TIME_STAMP: int -TRUST_E_BAD_DIGEST: int -TRUST_E_BASIC_CONSTRAINTS: int -TRUST_E_FINANCIAL_CRITERIA: int -NTE_OP_OK: int -DIGSIG_E_ENCODE: int -DIGSIG_E_DECODE: int -DIGSIG_E_EXTENSIBILITY: int -DIGSIG_E_CRYPTO: int -PERSIST_E_SIZEDEFINITE: int -PERSIST_E_SIZEINDEFINITE: int -PERSIST_E_NOTSELFSIZING: int -TRUST_E_NOSIGNATURE: int -CERT_E_EXPIRED: int -CERT_E_VALIDITYPERIODNESTING: int -CERT_E_ROLE: int -CERT_E_PATHLENCONST: int -CERT_E_CRITICAL: int -CERT_E_PURPOSE: int -CERT_E_ISSUERCHAINING: int -CERT_E_MALFORMED: int -CERT_E_UNTRUSTEDROOT: int -CERT_E_CHAINING: int -TRUST_E_FAIL: int -CERT_E_REVOKED: int -CERT_E_UNTRUSTEDTESTROOT: int -CERT_E_REVOCATION_FAILURE: int -CERT_E_CN_NO_MATCH: int -CERT_E_WRONG_USAGE: int -SPAPI_E_EXPECTED_SECTION_NAME: int -SPAPI_E_BAD_SECTION_NAME_LINE: int -SPAPI_E_SECTION_NAME_TOO_LONG: int -SPAPI_E_GENERAL_SYNTAX: int -SPAPI_E_WRONG_INF_STYLE: int -SPAPI_E_SECTION_NOT_FOUND: int -SPAPI_E_LINE_NOT_FOUND: int -SPAPI_E_NO_ASSOCIATED_CLASS: int -SPAPI_E_CLASS_MISMATCH: int -SPAPI_E_DUPLICATE_FOUND: int -SPAPI_E_NO_DRIVER_SELECTED: int -SPAPI_E_KEY_DOES_NOT_EXIST: int -SPAPI_E_INVALID_DEVINST_NAME: int -SPAPI_E_INVALID_CLASS: int -SPAPI_E_DEVINST_ALREADY_EXISTS: int -SPAPI_E_DEVINFO_NOT_REGISTERED: int -SPAPI_E_INVALID_REG_PROPERTY: int -SPAPI_E_NO_INF: int -SPAPI_E_NO_SUCH_DEVINST: int -SPAPI_E_CANT_LOAD_CLASS_ICON: int -SPAPI_E_INVALID_CLASS_INSTALLER: int -SPAPI_E_DI_DO_DEFAULT: int -SPAPI_E_DI_NOFILECOPY: int -SPAPI_E_INVALID_HWPROFILE: int -SPAPI_E_NO_DEVICE_SELECTED: int -SPAPI_E_DEVINFO_LIST_LOCKED: int -SPAPI_E_DEVINFO_DATA_LOCKED: int -SPAPI_E_DI_BAD_PATH: int -SPAPI_E_NO_CLASSINSTALL_PARAMS: int -SPAPI_E_FILEQUEUE_LOCKED: int -SPAPI_E_BAD_SERVICE_INSTALLSECT: int -SPAPI_E_NO_CLASS_DRIVER_LIST: int -SPAPI_E_NO_ASSOCIATED_SERVICE: int -SPAPI_E_NO_DEFAULT_DEVICE_INTERFACE: int -SPAPI_E_DEVICE_INTERFACE_ACTIVE: int -SPAPI_E_DEVICE_INTERFACE_REMOVED: int -SPAPI_E_BAD_INTERFACE_INSTALLSECT: int -SPAPI_E_NO_SUCH_INTERFACE_CLASS: int -SPAPI_E_INVALID_REFERENCE_STRING: int -SPAPI_E_INVALID_MACHINENAME: int -SPAPI_E_REMOTE_COMM_FAILURE: int -SPAPI_E_MACHINE_UNAVAILABLE: int -SPAPI_E_NO_CONFIGMGR_SERVICES: int -SPAPI_E_INVALID_PROPPAGE_PROVIDER: int -SPAPI_E_NO_SUCH_DEVICE_INTERFACE: int -SPAPI_E_DI_POSTPROCESSING_REQUIRED: int -SPAPI_E_INVALID_COINSTALLER: int -SPAPI_E_NO_COMPAT_DRIVERS: int -SPAPI_E_NO_DEVICE_ICON: int -SPAPI_E_INVALID_INF_LOGCONFIG: int -SPAPI_E_DI_DONT_INSTALL: int -SPAPI_E_INVALID_FILTER_DRIVER: int -SPAPI_E_ERROR_NOT_INSTALLED: int -CDERR_DIALOGFAILURE: int -CDERR_GENERALCODES: int -CDERR_STRUCTSIZE: int -CDERR_INITIALIZATION: int -CDERR_NOTEMPLATE: int -CDERR_NOHINSTANCE: int -CDERR_LOADSTRFAILURE: int -CDERR_FINDRESFAILURE: int -CDERR_LOADRESFAILURE: int -CDERR_LOCKRESFAILURE: int -CDERR_MEMALLOCFAILURE: int -CDERR_MEMLOCKFAILURE: int -CDERR_NOHOOK: int -CDERR_REGISTERMSGFAIL: int -PDERR_PRINTERCODES: int -PDERR_SETUPFAILURE: int -PDERR_PARSEFAILURE: int -PDERR_RETDEFFAILURE: int -PDERR_LOADDRVFAILURE: int -PDERR_GETDEVMODEFAIL: int -PDERR_INITFAILURE: int -PDERR_NODEVICES: int -PDERR_NODEFAULTPRN: int -PDERR_DNDMMISMATCH: int -PDERR_CREATEICFAILURE: int -PDERR_PRINTERNOTFOUND: int -PDERR_DEFAULTDIFFERENT: int -CFERR_CHOOSEFONTCODES: int -CFERR_NOFONTS: int -CFERR_MAXLESSTHANMIN: int -FNERR_FILENAMECODES: int -FNERR_SUBCLASSFAILURE: int -FNERR_INVALIDFILENAME: int -FNERR_BUFFERTOOSMALL: int -FRERR_FINDREPLACECODES: int -FRERR_BUFFERLENGTHZERO: int -CCERR_CHOOSECOLORCODES: int +NOERROR: Final = 0 +E_UNEXPECTED: Final = -2147418113 +E_NOTIMPL: Final = -2147467263 +E_OUTOFMEMORY: Final = -2147024882 +E_INVALIDARG: Final = -2147024809 +E_NOINTERFACE: Final = -2147467262 +E_POINTER: Final = -2147467261 +E_HANDLE: Final = -2147024890 +E_ABORT: Final = -2147467260 +E_FAIL: Final = -2147467259 +E_ACCESSDENIED: Final = -2147024891 +E_PENDING: Final = -2147483638 +E_BOUNDS: Final = -2147483637 +E_CHANGED_STATE: Final = -2147483636 +E_ILLEGAL_STATE_CHANGE: Final = -2147483635 +E_ILLEGAL_METHOD_CALL: Final = -2147483634 +RO_E_METADATA_NAME_NOT_FOUND: Final = -2147483633 +RO_E_METADATA_NAME_IS_NAMESPACE: Final = -2147483632 +RO_E_METADATA_INVALID_TYPE_FORMAT: Final = -2147483631 +RO_E_INVALID_METADATA_FILE: Final = -2147483630 +RO_E_CLOSED: Final = -2147483629 +RO_E_EXCLUSIVE_WRITE: Final = -2147483628 +RO_E_CHANGE_NOTIFICATION_IN_PROGRESS: Final = -2147483627 +RO_E_ERROR_STRING_NOT_FOUND: Final = -2147483626 +E_STRING_NOT_NULL_TERMINATED: Final = -2147483625 +E_ILLEGAL_DELEGATE_ASSIGNMENT: Final = -2147483624 +E_ASYNC_OPERATION_NOT_STARTED: Final = -2147483623 +E_APPLICATION_EXITING: Final = -2147483622 +E_APPLICATION_VIEW_EXITING: Final = -2147483621 +RO_E_MUST_BE_AGILE: Final = -2147483620 +RO_E_UNSUPPORTED_FROM_MTA: Final = -2147483619 +RO_E_COMMITTED: Final = -2147483618 +RO_E_BLOCKED_CROSS_ASTA_CALL: Final = -2147483617 +RO_E_CANNOT_ACTIVATE_FULL_TRUST_SERVER: Final = -2147483616 +RO_E_CANNOT_ACTIVATE_UNIVERSAL_APPLICATION_SERVER: Final = -2147483615 +CO_E_INIT_TLS: Final = -2147467258 +CO_E_INIT_SHARED_ALLOCATOR: Final = -2147467257 +CO_E_INIT_MEMORY_ALLOCATOR: Final = -2147467256 +CO_E_INIT_CLASS_CACHE: Final = -2147467255 +CO_E_INIT_RPC_CHANNEL: Final = -2147467254 +CO_E_INIT_TLS_SET_CHANNEL_CONTROL: Final = -2147467253 +CO_E_INIT_TLS_CHANNEL_CONTROL: Final = -2147467252 +CO_E_INIT_UNACCEPTED_USER_ALLOCATOR: Final = -2147467251 +CO_E_INIT_SCM_MUTEX_EXISTS: Final = -2147467250 +CO_E_INIT_SCM_FILE_MAPPING_EXISTS: Final = -2147467249 +CO_E_INIT_SCM_MAP_VIEW_OF_FILE: Final = -2147467248 +CO_E_INIT_SCM_EXEC_FAILURE: Final = -2147467247 +CO_E_INIT_ONLY_SINGLE_THREADED: Final = -2147467246 +CO_E_CANT_REMOTE: Final = -2147467245 +CO_E_BAD_SERVER_NAME: Final = -2147467244 +CO_E_WRONG_SERVER_IDENTITY: Final = -2147467243 +CO_E_OLE1DDE_DISABLED: Final = -2147467242 +CO_E_RUNAS_SYNTAX: Final = -2147467241 +CO_E_CREATEPROCESS_FAILURE: Final = -2147467240 +CO_E_RUNAS_CREATEPROCESS_FAILURE: Final = -2147467239 +CO_E_RUNAS_LOGON_FAILURE: Final = -2147467238 +CO_E_LAUNCH_PERMSSION_DENIED: Final = -2147467237 +CO_E_START_SERVICE_FAILURE: Final = -2147467236 +CO_E_REMOTE_COMMUNICATION_FAILURE: Final = -2147467235 +CO_E_SERVER_START_TIMEOUT: Final = -2147467234 +CO_E_CLSREG_INCONSISTENT: Final = -2147467233 +CO_E_IIDREG_INCONSISTENT: Final = -2147467232 +CO_E_NOT_SUPPORTED: Final = -2147467231 +CO_E_RELOAD_DLL: Final = -2147467230 +CO_E_MSI_ERROR: Final = -2147467229 +CO_E_ATTEMPT_TO_CREATE_OUTSIDE_CLIENT_CONTEXT: Final = -2147467228 +CO_E_SERVER_PAUSED: Final = -2147467227 +CO_E_SERVER_NOT_PAUSED: Final = -2147467226 +CO_E_CLASS_DISABLED: Final = -2147467225 +CO_E_CLRNOTAVAILABLE: Final = -2147467224 +CO_E_ASYNC_WORK_REJECTED: Final = -2147467223 +CO_E_SERVER_INIT_TIMEOUT: Final = -2147467222 +CO_E_NO_SECCTX_IN_ACTIVATE: Final = -2147467221 +CO_E_TRACKER_CONFIG: Final = -2147467216 +CO_E_THREADPOOL_CONFIG: Final = -2147467215 +CO_E_SXS_CONFIG: Final = -2147467214 +CO_E_MALFORMED_SPN: Final = -2147467213 +CO_E_UNREVOKED_REGISTRATION_ON_APARTMENT_SHUTDOWN: Final = -2147467212 +CO_E_PREMATURE_STUB_RUNDOWN: Final = -2147467211 +S_OK: Final = 0 +S_FALSE: Final = 1 +OLE_E_FIRST: Final = -2147221504 +OLE_E_LAST: Final = -2147221249 +OLE_S_FIRST: Final = 0x00040000 +OLE_S_LAST: Final = 0x000400FF +OLE_E_OLEVERB: Final = -2147221504 +OLE_E_ADVF: Final = -2147221503 +OLE_E_ENUM_NOMORE: Final = -2147221502 +OLE_E_ADVISENOTSUPPORTED: Final = -2147221501 +OLE_E_NOCONNECTION: Final = -2147221500 +OLE_E_NOTRUNNING: Final = -2147221499 +OLE_E_NOCACHE: Final = -2147221498 +OLE_E_BLANK: Final = -2147221497 +OLE_E_CLASSDIFF: Final = -2147221496 +OLE_E_CANT_GETMONIKER: Final = -2147221495 +OLE_E_CANT_BINDTOSOURCE: Final = -2147221494 +OLE_E_STATIC: Final = -2147221493 +OLE_E_PROMPTSAVECANCELLED: Final = -2147221492 +OLE_E_INVALIDRECT: Final = -2147221491 +OLE_E_WRONGCOMPOBJ: Final = -2147221490 +OLE_E_INVALIDHWND: Final = -2147221489 +OLE_E_NOT_INPLACEACTIVE: Final = -2147221488 +OLE_E_CANTCONVERT: Final = -2147221487 +OLE_E_NOSTORAGE: Final = -2147221486 +DV_E_FORMATETC: Final = -2147221404 +DV_E_DVTARGETDEVICE: Final = -2147221403 +DV_E_STGMEDIUM: Final = -2147221402 +DV_E_STATDATA: Final = -2147221401 +DV_E_LINDEX: Final = -2147221400 +DV_E_TYMED: Final = -2147221399 +DV_E_CLIPFORMAT: Final = -2147221398 +DV_E_DVASPECT: Final = -2147221397 +DV_E_DVTARGETDEVICE_SIZE: Final = -2147221396 +DV_E_NOIVIEWOBJECT: Final = -2147221395 +DRAGDROP_E_FIRST: Final = -2147221248 +DRAGDROP_E_LAST: Final = -2147221233 +DRAGDROP_S_FIRST: Final = 0x00040100 +DRAGDROP_S_LAST: Final = 0x0004010F +DRAGDROP_E_NOTREGISTERED: Final = -2147221248 +DRAGDROP_E_ALREADYREGISTERED: Final = -2147221247 +DRAGDROP_E_INVALIDHWND: Final = -2147221246 +DRAGDROP_E_CONCURRENT_DRAG_ATTEMPTED: Final = -2147221245 +CLASSFACTORY_E_FIRST: Final = -2147221232 +CLASSFACTORY_E_LAST: Final = -2147221217 +CLASSFACTORY_S_FIRST: Final = 0x00040110 +CLASSFACTORY_S_LAST: Final = 0x0004011F +CLASS_E_NOAGGREGATION: Final = -2147221232 +CLASS_E_CLASSNOTAVAILABLE: Final = -2147221231 +CLASS_E_NOTLICENSED: Final = -2147221230 +MARSHAL_E_FIRST: Final = -2147221216 +MARSHAL_E_LAST: Final = -2147221201 +MARSHAL_S_FIRST: Final = 0x00040120 +MARSHAL_S_LAST: Final = 0x0004012F +DATA_E_FIRST: Final = -2147221200 +DATA_E_LAST: Final = -2147221185 +DATA_S_FIRST: Final = 0x00040130 +DATA_S_LAST: Final = 0x0004013F +VIEW_E_FIRST: Final = -2147221184 +VIEW_E_LAST: Final = -2147221169 +VIEW_S_FIRST: Final = 0x00040140 +VIEW_S_LAST: Final = 0x0004014F +VIEW_E_DRAW: Final = -2147221184 +REGDB_E_FIRST: Final = -2147221168 +REGDB_E_LAST: Final = -2147221153 +REGDB_S_FIRST: Final = 0x00040150 +REGDB_S_LAST: Final = 0x0004015F +REGDB_E_READREGDB: Final = -2147221168 +REGDB_E_WRITEREGDB: Final = -2147221167 +REGDB_E_KEYMISSING: Final = -2147221166 +REGDB_E_INVALIDVALUE: Final = -2147221165 +REGDB_E_CLASSNOTREG: Final = -2147221164 +REGDB_E_IIDNOTREG: Final = -2147221163 +REGDB_E_BADTHREADINGMODEL: Final = -2147221162 +REGDB_E_PACKAGEPOLICYVIOLATION: Final = -2147221161 +CAT_E_FIRST: Final = -2147221152 +CAT_E_LAST: Final = -2147221151 +CAT_E_CATIDNOEXIST: Final = -2147221152 +CAT_E_NODESCRIPTION: Final = -2147221151 +CS_E_FIRST: Final = -2147221148 +CS_E_LAST: Final = -2147221137 +CS_E_PACKAGE_NOTFOUND: Final = -2147221148 +CS_E_NOT_DELETABLE: Final = -2147221147 +CS_E_CLASS_NOTFOUND: Final = -2147221146 +CS_E_INVALID_VERSION: Final = -2147221145 +CS_E_NO_CLASSSTORE: Final = -2147221144 +CS_E_OBJECT_NOTFOUND: Final = -2147221143 +CS_E_OBJECT_ALREADY_EXISTS: Final = -2147221142 +CS_E_INVALID_PATH: Final = -2147221141 +CS_E_NETWORK_ERROR: Final = -2147221140 +CS_E_ADMIN_LIMIT_EXCEEDED: Final = -2147221139 +CS_E_SCHEMA_MISMATCH: Final = -2147221138 +CS_E_INTERNAL_ERROR: Final = -2147221137 +CACHE_E_FIRST: Final = -2147221136 +CACHE_E_LAST: Final = -2147221121 +CACHE_S_FIRST: Final = 0x00040170 +CACHE_S_LAST: Final = 0x0004017F +CACHE_E_NOCACHE_UPDATED: Final = -2147221136 +OLEOBJ_E_FIRST: Final = -2147221120 +OLEOBJ_E_LAST: Final = -2147221105 +OLEOBJ_S_FIRST: Final = 0x00040180 +OLEOBJ_S_LAST: Final = 0x0004018F +OLEOBJ_E_NOVERBS: Final = -2147221120 +OLEOBJ_E_INVALIDVERB: Final = -2147221119 +CLIENTSITE_E_FIRST: Final = -2147221104 +CLIENTSITE_E_LAST: Final = -2147221089 +CLIENTSITE_S_FIRST: Final = 0x00040190 +CLIENTSITE_S_LAST: Final = 0x0004019F +INPLACE_E_NOTUNDOABLE: Final = -2147221088 +INPLACE_E_NOTOOLSPACE: Final = -2147221087 +INPLACE_E_FIRST: Final = -2147221088 +INPLACE_E_LAST: Final = -2147221073 +INPLACE_S_FIRST: Final = 0x000401A0 +INPLACE_S_LAST: Final = 0x000401AF +ENUM_E_FIRST: Final = -2147221072 +ENUM_E_LAST: Final = -2147221057 +ENUM_S_FIRST: Final = 0x000401B0 +ENUM_S_LAST: Final = 0x000401BF +CONVERT10_E_FIRST: Final = -2147221056 +CONVERT10_E_LAST: Final = -2147221041 +CONVERT10_S_FIRST: Final = 0x000401C0 +CONVERT10_S_LAST: Final = 0x000401CF +CONVERT10_E_OLESTREAM_GET: Final = -2147221056 +CONVERT10_E_OLESTREAM_PUT: Final = -2147221055 +CONVERT10_E_OLESTREAM_FMT: Final = -2147221054 +CONVERT10_E_OLESTREAM_BITMAP_TO_DIB: Final = -2147221053 +CONVERT10_E_STG_FMT: Final = -2147221052 +CONVERT10_E_STG_NO_STD_STREAM: Final = -2147221051 +CONVERT10_E_STG_DIB_TO_BITMAP: Final = -2147221050 +CONVERT10_E_OLELINK_DISABLED: Final = -2147221049 +CLIPBRD_E_FIRST: Final = -2147221040 +CLIPBRD_E_LAST: Final = -2147221025 +CLIPBRD_S_FIRST: Final = 0x000401D0 +CLIPBRD_S_LAST: Final = 0x000401DF +CLIPBRD_E_CANT_OPEN: Final = -2147221040 +CLIPBRD_E_CANT_EMPTY: Final = -2147221039 +CLIPBRD_E_CANT_SET: Final = -2147221038 +CLIPBRD_E_BAD_DATA: Final = -2147221037 +CLIPBRD_E_CANT_CLOSE: Final = -2147221036 +MK_E_FIRST: Final = -2147221024 +MK_E_LAST: Final = -2147221009 +MK_S_FIRST: Final = 0x000401E0 +MK_S_LAST: Final = 0x000401EF +MK_E_CONNECTMANUALLY: Final = -2147221024 +MK_E_EXCEEDEDDEADLINE: Final = -2147221023 +MK_E_NEEDGENERIC: Final = -2147221022 +MK_E_UNAVAILABLE: Final = -2147221021 +MK_E_SYNTAX: Final = -2147221020 +MK_E_NOOBJECT: Final = -2147221019 +MK_E_INVALIDEXTENSION: Final = -2147221018 +MK_E_INTERMEDIATEINTERFACENOTSUPPORTED: Final = -2147221017 +MK_E_NOTBINDABLE: Final = -2147221016 +MK_E_NOTBOUND: Final = -2147221015 +MK_E_CANTOPENFILE: Final = -2147221014 +MK_E_MUSTBOTHERUSER: Final = -2147221013 +MK_E_NOINVERSE: Final = -2147221012 +MK_E_NOSTORAGE: Final = -2147221011 +MK_E_NOPREFIX: Final = -2147221010 +MK_E_ENUMERATION_FAILED: Final = -2147221009 +CO_E_FIRST: Final = -2147221008 +CO_E_LAST: Final = -2147220993 +CO_S_FIRST: Final = 0x000401F0 +CO_S_LAST: Final = 0x000401FF +CO_E_NOTINITIALIZED: Final = -2147221008 +CO_E_ALREADYINITIALIZED: Final = -2147221007 +CO_E_CANTDETERMINECLASS: Final = -2147221006 +CO_E_CLASSSTRING: Final = -2147221005 +CO_E_IIDSTRING: Final = -2147221004 +CO_E_APPNOTFOUND: Final = -2147221003 +CO_E_APPSINGLEUSE: Final = -2147221002 +CO_E_ERRORINAPP: Final = -2147221001 +CO_E_DLLNOTFOUND: Final = -2147221000 +CO_E_ERRORINDLL: Final = -2147220999 +CO_E_WRONGOSFORAPP: Final = -2147220998 +CO_E_OBJNOTREG: Final = -2147220997 +CO_E_OBJISREG: Final = -2147220996 +CO_E_OBJNOTCONNECTED: Final = -2147220995 +CO_E_APPDIDNTREG: Final = -2147220994 +CO_E_RELEASED: Final = -2147220993 +EVENT_E_FIRST: Final = -2147220992 +EVENT_E_LAST: Final = -2147220961 +EVENT_S_FIRST: Final = 0x00040200 +EVENT_S_LAST: Final = 0x0004021F +EVENT_S_SOME_SUBSCRIBERS_FAILED: Final = 0x00040200 +EVENT_E_ALL_SUBSCRIBERS_FAILED: Final = -2147220991 +EVENT_S_NOSUBSCRIBERS: Final = 0x00040202 +EVENT_E_QUERYSYNTAX: Final = -2147220989 +EVENT_E_QUERYFIELD: Final = -2147220988 +EVENT_E_INTERNALEXCEPTION: Final = -2147220987 +EVENT_E_INTERNALERROR: Final = -2147220986 +EVENT_E_INVALID_PER_USER_SID: Final = -2147220985 +EVENT_E_USER_EXCEPTION: Final = -2147220984 +EVENT_E_TOO_MANY_METHODS: Final = -2147220983 +EVENT_E_MISSING_EVENTCLASS: Final = -2147220982 +EVENT_E_NOT_ALL_REMOVED: Final = -2147220981 +EVENT_E_COMPLUS_NOT_INSTALLED: Final = -2147220980 +EVENT_E_CANT_MODIFY_OR_DELETE_UNCONFIGURED_OBJECT: Final = -2147220979 +EVENT_E_CANT_MODIFY_OR_DELETE_CONFIGURED_OBJECT: Final = -2147220978 +EVENT_E_INVALID_EVENT_CLASS_PARTITION: Final = -2147220977 +EVENT_E_PER_USER_SID_NOT_LOGGED_ON: Final = -2147220976 +TPC_E_INVALID_PROPERTY: Final = -2147220927 +TPC_E_NO_DEFAULT_TABLET: Final = -2147220974 +TPC_E_UNKNOWN_PROPERTY: Final = -2147220965 +TPC_E_INVALID_INPUT_RECT: Final = -2147220967 +TPC_E_INVALID_STROKE: Final = -2147220958 +TPC_E_INITIALIZE_FAIL: Final = -2147220957 +TPC_E_NOT_RELEVANT: Final = -2147220942 +TPC_E_INVALID_PACKET_DESCRIPTION: Final = -2147220941 +TPC_E_RECOGNIZER_NOT_REGISTERED: Final = -2147220939 +TPC_E_INVALID_RIGHTS: Final = -2147220938 +TPC_E_OUT_OF_ORDER_CALL: Final = -2147220937 +TPC_E_QUEUE_FULL: Final = -2147220936 +TPC_E_INVALID_CONFIGURATION: Final = -2147220935 +TPC_E_INVALID_DATA_FROM_RECOGNIZER: Final = -2147220934 +TPC_S_TRUNCATED: Final = 0x00040252 +TPC_S_INTERRUPTED: Final = 0x00040253 +TPC_S_NO_DATA_TO_PROCESS: Final = 0x00040254 +XACT_E_FIRST: Final = -2147168256 +XACT_E_LAST: Final = -2147168213 +XACT_S_FIRST: Final = 0x0004D000 +XACT_S_LAST: Final = 0x0004D010 +XACT_E_ALREADYOTHERSINGLEPHASE: Final = -2147168256 +XACT_E_CANTRETAIN: Final = -2147168255 +XACT_E_COMMITFAILED: Final = -2147168254 +XACT_E_COMMITPREVENTED: Final = -2147168253 +XACT_E_HEURISTICABORT: Final = -2147168252 +XACT_E_HEURISTICCOMMIT: Final = -2147168251 +XACT_E_HEURISTICDAMAGE: Final = -2147168250 +XACT_E_HEURISTICDANGER: Final = -2147168249 +XACT_E_ISOLATIONLEVEL: Final = -2147168248 +XACT_E_NOASYNC: Final = -2147168247 +XACT_E_NOENLIST: Final = -2147168246 +XACT_E_NOISORETAIN: Final = -2147168245 +XACT_E_NORESOURCE: Final = -2147168244 +XACT_E_NOTCURRENT: Final = -2147168243 +XACT_E_NOTRANSACTION: Final = -2147168242 +XACT_E_NOTSUPPORTED: Final = -2147168241 +XACT_E_UNKNOWNRMGRID: Final = -2147168240 +XACT_E_WRONGSTATE: Final = -2147168239 +XACT_E_WRONGUOW: Final = -2147168238 +XACT_E_XTIONEXISTS: Final = -2147168237 +XACT_E_NOIMPORTOBJECT: Final = -2147168236 +XACT_E_INVALIDCOOKIE: Final = -2147168235 +XACT_E_INDOUBT: Final = -2147168234 +XACT_E_NOTIMEOUT: Final = -2147168233 +XACT_E_ALREADYINPROGRESS: Final = -2147168232 +XACT_E_ABORTED: Final = -2147168231 +XACT_E_LOGFULL: Final = -2147168230 +XACT_E_TMNOTAVAILABLE: Final = -2147168229 +XACT_E_CONNECTION_DOWN: Final = -2147168228 +XACT_E_CONNECTION_DENIED: Final = -2147168227 +XACT_E_REENLISTTIMEOUT: Final = -2147168226 +XACT_E_TIP_CONNECT_FAILED: Final = -2147168225 +XACT_E_TIP_PROTOCOL_ERROR: Final = -2147168224 +XACT_E_TIP_PULL_FAILED: Final = -2147168223 +XACT_E_DEST_TMNOTAVAILABLE: Final = -2147168222 +XACT_E_TIP_DISABLED: Final = -2147168221 +XACT_E_NETWORK_TX_DISABLED: Final = -2147168220 +XACT_E_PARTNER_NETWORK_TX_DISABLED: Final = -2147168219 +XACT_E_XA_TX_DISABLED: Final = -2147168218 +XACT_E_UNABLE_TO_READ_DTC_CONFIG: Final = -2147168217 +XACT_E_UNABLE_TO_LOAD_DTC_PROXY: Final = -2147168216 +XACT_E_ABORTING: Final = -2147168215 +XACT_E_PUSH_COMM_FAILURE: Final = -2147168214 +XACT_E_PULL_COMM_FAILURE: Final = -2147168213 +XACT_E_LU_TX_DISABLED: Final = -2147168212 +XACT_E_CLERKNOTFOUND: Final = -2147168128 +XACT_E_CLERKEXISTS: Final = -2147168127 +XACT_E_RECOVERYINPROGRESS: Final = -2147168126 +XACT_E_TRANSACTIONCLOSED: Final = -2147168125 +XACT_E_INVALIDLSN: Final = -2147168124 +XACT_E_REPLAYREQUEST: Final = -2147168123 +XACT_S_ASYNC: Final = 0x0004D000 +XACT_S_DEFECT: Final = 0x0004D001 +XACT_S_READONLY: Final = 0x0004D002 +XACT_S_SOMENORETAIN: Final = 0x0004D003 +XACT_S_OKINFORM: Final = 0x0004D004 +XACT_S_MADECHANGESCONTENT: Final = 0x0004D005 +XACT_S_MADECHANGESINFORM: Final = 0x0004D006 +XACT_S_ALLNORETAIN: Final = 0x0004D007 +XACT_S_ABORTING: Final = 0x0004D008 +XACT_S_SINGLEPHASE: Final = 0x0004D009 +XACT_S_LOCALLY_OK: Final = 0x0004D00A +XACT_S_LASTRESOURCEMANAGER: Final = 0x0004D010 +CONTEXT_E_FIRST: Final = -2147164160 +CONTEXT_E_LAST: Final = -2147164113 +CONTEXT_S_FIRST: Final = 0x0004E000 +CONTEXT_S_LAST: Final = 0x0004E02F +CONTEXT_E_ABORTED: Final = -2147164158 +CONTEXT_E_ABORTING: Final = -2147164157 +CONTEXT_E_NOCONTEXT: Final = -2147164156 +CONTEXT_E_WOULD_DEADLOCK: Final = -2147164155 +CONTEXT_E_SYNCH_TIMEOUT: Final = -2147164154 +CONTEXT_E_OLDREF: Final = -2147164153 +CONTEXT_E_ROLENOTFOUND: Final = -2147164148 +CONTEXT_E_TMNOTAVAILABLE: Final = -2147164145 +CO_E_ACTIVATIONFAILED: Final = -2147164127 +CO_E_ACTIVATIONFAILED_EVENTLOGGED: Final = -2147164126 +CO_E_ACTIVATIONFAILED_CATALOGERROR: Final = -2147164125 +CO_E_ACTIVATIONFAILED_TIMEOUT: Final = -2147164124 +CO_E_INITIALIZATIONFAILED: Final = -2147164123 +CONTEXT_E_NOJIT: Final = -2147164122 +CONTEXT_E_NOTRANSACTION: Final = -2147164121 +CO_E_THREADINGMODEL_CHANGED: Final = -2147164120 +CO_E_NOIISINTRINSICS: Final = -2147164119 +CO_E_NOCOOKIES: Final = -2147164118 +CO_E_DBERROR: Final = -2147164117 +CO_E_NOTPOOLED: Final = -2147164116 +CO_E_NOTCONSTRUCTED: Final = -2147164115 +CO_E_NOSYNCHRONIZATION: Final = -2147164114 +CO_E_ISOLEVELMISMATCH: Final = -2147164113 +CO_E_CALL_OUT_OF_TX_SCOPE_NOT_ALLOWED: Final = -2147164112 +CO_E_EXIT_TRANSACTION_SCOPE_NOT_CALLED: Final = -2147164111 +OLE_S_USEREG: Final = 0x00040000 +OLE_S_STATIC: Final = 0x00040001 +OLE_S_MAC_CLIPFORMAT: Final = 0x00040002 +DRAGDROP_S_DROP: Final = 0x00040100 +DRAGDROP_S_CANCEL: Final = 0x00040101 +DRAGDROP_S_USEDEFAULTCURSORS: Final = 0x00040102 +DATA_S_SAMEFORMATETC: Final = 0x00040130 +VIEW_S_ALREADY_FROZEN: Final = 0x00040140 +CACHE_S_FORMATETC_NOTSUPPORTED: Final = 0x00040170 +CACHE_S_SAMECACHE: Final = 0x00040171 +CACHE_S_SOMECACHES_NOTUPDATED: Final = 0x00040172 +OLEOBJ_S_INVALIDVERB: Final = 0x00040180 +OLEOBJ_S_CANNOT_DOVERB_NOW: Final = 0x00040181 +OLEOBJ_S_INVALIDHWND: Final = 0x00040182 +INPLACE_S_TRUNCATED: Final = 0x000401A0 +CONVERT10_S_NO_PRESENTATION: Final = 0x000401C0 +MK_S_REDUCED_TO_SELF: Final = 0x000401E2 +MK_S_ME: Final = 0x000401E4 +MK_S_HIM: Final = 0x000401E5 +MK_S_US: Final = 0x000401E6 +MK_S_MONIKERALREADYREGISTERED: Final = 0x000401E7 +SCHED_S_TASK_READY: Final = 0x00041300 +SCHED_S_TASK_RUNNING: Final = 0x00041301 +SCHED_S_TASK_DISABLED: Final = 0x00041302 +SCHED_S_TASK_HAS_NOT_RUN: Final = 0x00041303 +SCHED_S_TASK_NO_MORE_RUNS: Final = 0x00041304 +SCHED_S_TASK_NOT_SCHEDULED: Final = 0x00041305 +SCHED_S_TASK_TERMINATED: Final = 0x00041306 +SCHED_S_TASK_NO_VALID_TRIGGERS: Final = 0x00041307 +SCHED_S_EVENT_TRIGGER: Final = 0x00041308 +SCHED_E_TRIGGER_NOT_FOUND: Final = -2147216631 +SCHED_E_TASK_NOT_READY: Final = -2147216630 +SCHED_E_TASK_NOT_RUNNING: Final = -2147216629 +SCHED_E_SERVICE_NOT_INSTALLED: Final = -2147216628 +SCHED_E_CANNOT_OPEN_TASK: Final = -2147216627 +SCHED_E_INVALID_TASK: Final = -2147216626 +SCHED_E_ACCOUNT_INFORMATION_NOT_SET: Final = -2147216625 +SCHED_E_ACCOUNT_NAME_NOT_FOUND: Final = -2147216624 +SCHED_E_ACCOUNT_DBASE_CORRUPT: Final = -2147216623 +SCHED_E_NO_SECURITY_SERVICES: Final = -2147216622 +SCHED_E_UNKNOWN_OBJECT_VERSION: Final = -2147216621 +SCHED_E_UNSUPPORTED_ACCOUNT_OPTION: Final = -2147216620 +SCHED_E_SERVICE_NOT_RUNNING: Final = -2147216619 +SCHED_E_UNEXPECTEDNODE: Final = -2147216618 +SCHED_E_NAMESPACE: Final = -2147216617 +SCHED_E_INVALIDVALUE: Final = -2147216616 +SCHED_E_MISSINGNODE: Final = -2147216615 +SCHED_E_MALFORMEDXML: Final = -2147216614 +SCHED_S_SOME_TRIGGERS_FAILED: Final = 0x0004131B +SCHED_S_BATCH_LOGON_PROBLEM: Final = 0x0004131C +SCHED_E_TOO_MANY_NODES: Final = -2147216611 +SCHED_E_PAST_END_BOUNDARY: Final = -2147216610 +SCHED_E_ALREADY_RUNNING: Final = -2147216609 +SCHED_E_USER_NOT_LOGGED_ON: Final = -2147216608 +SCHED_E_INVALID_TASK_HASH: Final = -2147216607 +SCHED_E_SERVICE_NOT_AVAILABLE: Final = -2147216606 +SCHED_E_SERVICE_TOO_BUSY: Final = -2147216605 +SCHED_E_TASK_ATTEMPTED: Final = -2147216604 +SCHED_S_TASK_QUEUED: Final = 0x00041325 +SCHED_E_TASK_DISABLED: Final = -2147216602 +SCHED_E_TASK_NOT_V1_COMPAT: Final = -2147216601 +SCHED_E_START_ON_DEMAND: Final = -2147216600 +SCHED_E_TASK_NOT_UBPM_COMPAT: Final = -2147216599 +SCHED_E_DEPRECATED_FEATURE_USED: Final = -2147216592 +CO_E_CLASS_CREATE_FAILED: Final = -2146959359 +CO_E_SCM_ERROR: Final = -2146959358 +CO_E_SCM_RPC_FAILURE: Final = -2146959357 +CO_E_BAD_PATH: Final = -2146959356 +CO_E_SERVER_EXEC_FAILURE: Final = -2146959355 +CO_E_OBJSRV_RPC_FAILURE: Final = -2146959354 +MK_E_NO_NORMALIZED: Final = -2146959353 +CO_E_SERVER_STOPPING: Final = -2146959352 +MEM_E_INVALID_ROOT: Final = -2146959351 +MEM_E_INVALID_LINK: Final = -2146959344 +MEM_E_INVALID_SIZE: Final = -2146959343 +CO_S_NOTALLINTERFACES: Final = 0x00080012 +CO_S_MACHINENAMENOTFOUND: Final = 0x00080013 +CO_E_MISSING_DISPLAYNAME: Final = -2146959339 +CO_E_RUNAS_VALUE_MUST_BE_AAA: Final = -2146959338 +CO_E_ELEVATION_DISABLED: Final = -2146959337 +APPX_E_PACKAGING_INTERNAL: Final = -2146958848 +APPX_E_INTERLEAVING_NOT_ALLOWED: Final = -2146958847 +APPX_E_RELATIONSHIPS_NOT_ALLOWED: Final = -2146958846 +APPX_E_MISSING_REQUIRED_FILE: Final = -2146958845 +APPX_E_INVALID_MANIFEST: Final = -2146958844 +APPX_E_INVALID_BLOCKMAP: Final = -2146958843 +APPX_E_CORRUPT_CONTENT: Final = -2146958842 +APPX_E_BLOCK_HASH_INVALID: Final = -2146958841 +APPX_E_REQUESTED_RANGE_TOO_LARGE: Final = -2146958840 +APPX_E_INVALID_SIP_CLIENT_DATA: Final = -2146958839 +APPX_E_INVALID_KEY_INFO: Final = -2146958838 +APPX_E_INVALID_CONTENTGROUPMAP: Final = -2146958837 +APPX_E_INVALID_APPINSTALLER: Final = -2146958836 +APPX_E_DELTA_BASELINE_VERSION_MISMATCH: Final = -2146958835 +APPX_E_DELTA_PACKAGE_MISSING_FILE: Final = -2146958834 +APPX_E_INVALID_DELTA_PACKAGE: Final = -2146958833 +APPX_E_DELTA_APPENDED_PACKAGE_NOT_ALLOWED: Final = -2146958832 +APPX_E_INVALID_PACKAGING_LAYOUT: Final = -2146958831 +APPX_E_INVALID_PACKAGESIGNCONFIG: Final = -2146958830 +APPX_E_RESOURCESPRI_NOT_ALLOWED: Final = -2146958829 +APPX_E_FILE_COMPRESSION_MISMATCH: Final = -2146958828 +APPX_E_INVALID_PAYLOAD_PACKAGE_EXTENSION: Final = -2146958827 +APPX_E_INVALID_ENCRYPTION_EXCLUSION_FILE_LIST: Final = -2146958826 +APPX_E_INVALID_PACKAGE_FOLDER_ACLS: Final = -2146958825 +APPX_E_INVALID_PUBLISHER_BRIDGING: Final = -2146958824 +APPX_E_DIGEST_MISMATCH: Final = -2146958823 +BT_E_SPURIOUS_ACTIVATION: Final = -2146958592 +DISP_E_UNKNOWNINTERFACE: Final = -2147352575 +DISP_E_MEMBERNOTFOUND: Final = -2147352573 +DISP_E_PARAMNOTFOUND: Final = -2147352572 +DISP_E_TYPEMISMATCH: Final = -2147352571 +DISP_E_UNKNOWNNAME: Final = -2147352570 +DISP_E_NONAMEDARGS: Final = -2147352569 +DISP_E_BADVARTYPE: Final = -2147352568 +DISP_E_EXCEPTION: Final = -2147352567 +DISP_E_OVERFLOW: Final = -2147352566 +DISP_E_BADINDEX: Final = -2147352565 +DISP_E_UNKNOWNLCID: Final = -2147352564 +DISP_E_ARRAYISLOCKED: Final = -2147352563 +DISP_E_BADPARAMCOUNT: Final = -2147352562 +DISP_E_PARAMNOTOPTIONAL: Final = -2147352561 +DISP_E_BADCALLEE: Final = -2147352560 +DISP_E_NOTACOLLECTION: Final = -2147352559 +DISP_E_DIVBYZERO: Final = -2147352558 +DISP_E_BUFFERTOOSMALL: Final = -2147352557 +TYPE_E_BUFFERTOOSMALL: Final = -2147319786 +TYPE_E_FIELDNOTFOUND: Final = -2147319785 +TYPE_E_INVDATAREAD: Final = -2147319784 +TYPE_E_UNSUPFORMAT: Final = -2147319783 +TYPE_E_REGISTRYACCESS: Final = -2147319780 +TYPE_E_LIBNOTREGISTERED: Final = -2147319779 +TYPE_E_UNDEFINEDTYPE: Final = -2147319769 +TYPE_E_QUALIFIEDNAMEDISALLOWED: Final = -2147319768 +TYPE_E_INVALIDSTATE: Final = -2147319767 +TYPE_E_WRONGTYPEKIND: Final = -2147319766 +TYPE_E_ELEMENTNOTFOUND: Final = -2147319765 +TYPE_E_AMBIGUOUSNAME: Final = -2147319764 +TYPE_E_NAMECONFLICT: Final = -2147319763 +TYPE_E_UNKNOWNLCID: Final = -2147319762 +TYPE_E_DLLFUNCTIONNOTFOUND: Final = -2147319761 +TYPE_E_BADMODULEKIND: Final = -2147317571 +TYPE_E_SIZETOOBIG: Final = -2147317563 +TYPE_E_DUPLICATEID: Final = -2147317562 +TYPE_E_INVALIDID: Final = -2147317553 +TYPE_E_TYPEMISMATCH: Final = -2147316576 +TYPE_E_OUTOFBOUNDS: Final = -2147316575 +TYPE_E_IOERROR: Final = -2147316574 +TYPE_E_CANTCREATETMPFILE: Final = -2147316573 +TYPE_E_CANTLOADLIBRARY: Final = -2147312566 +TYPE_E_INCONSISTENTPROPFUNCS: Final = -2147312509 +TYPE_E_CIRCULARTYPE: Final = -2147312508 +STG_E_INVALIDFUNCTION: Final = -2147287039 +STG_E_FILENOTFOUND: Final = -2147287038 +STG_E_PATHNOTFOUND: Final = -2147287037 +STG_E_TOOMANYOPENFILES: Final = -2147287036 +STG_E_ACCESSDENIED: Final = -2147287035 +STG_E_INVALIDHANDLE: Final = -2147287034 +STG_E_INSUFFICIENTMEMORY: Final = -2147287032 +STG_E_INVALIDPOINTER: Final = -2147287031 +STG_E_NOMOREFILES: Final = -2147287022 +STG_E_DISKISWRITEPROTECTED: Final = -2147287021 +STG_E_SEEKERROR: Final = -2147287015 +STG_E_WRITEFAULT: Final = -2147287011 +STG_E_READFAULT: Final = -2147287010 +STG_E_SHAREVIOLATION: Final = -2147287008 +STG_E_LOCKVIOLATION: Final = -2147287007 +STG_E_FILEALREADYEXISTS: Final = -2147286960 +STG_E_INVALIDPARAMETER: Final = -2147286953 +STG_E_MEDIUMFULL: Final = -2147286928 +STG_E_PROPSETMISMATCHED: Final = -2147286800 +STG_E_ABNORMALAPIEXIT: Final = -2147286790 +STG_E_INVALIDHEADER: Final = -2147286789 +STG_E_INVALIDNAME: Final = -2147286788 +STG_E_UNKNOWN: Final = -2147286787 +STG_E_UNIMPLEMENTEDFUNCTION: Final = -2147286786 +STG_E_INVALIDFLAG: Final = -2147286785 +STG_E_INUSE: Final = -2147286784 +STG_E_NOTCURRENT: Final = -2147286783 +STG_E_REVERTED: Final = -2147286782 +STG_E_CANTSAVE: Final = -2147286781 +STG_E_OLDFORMAT: Final = -2147286780 +STG_E_OLDDLL: Final = -2147286779 +STG_E_SHAREREQUIRED: Final = -2147286778 +STG_E_NOTFILEBASEDSTORAGE: Final = -2147286777 +STG_E_EXTANTMARSHALLINGS: Final = -2147286776 +STG_E_DOCFILECORRUPT: Final = -2147286775 +STG_E_BADBASEADDRESS: Final = -2147286768 +STG_E_DOCFILETOOLARGE: Final = -2147286767 +STG_E_NOTSIMPLEFORMAT: Final = -2147286766 +STG_E_INCOMPLETE: Final = -2147286527 +STG_E_TERMINATED: Final = -2147286526 +STG_S_CONVERTED: Final = 0x00030200 +STG_S_BLOCK: Final = 0x00030201 +STG_S_RETRYNOW: Final = 0x00030202 +STG_S_MONITORING: Final = 0x00030203 +STG_S_MULTIPLEOPENS: Final = 0x00030204 +STG_S_CONSOLIDATIONFAILED: Final = 0x00030205 +STG_S_CANNOTCONSOLIDATE: Final = 0x00030206 +STG_S_POWER_CYCLE_REQUIRED: Final = 0x00030207 +STG_E_FIRMWARE_SLOT_INVALID: Final = -2147286520 +STG_E_FIRMWARE_IMAGE_INVALID: Final = -2147286519 +STG_E_DEVICE_UNRESPONSIVE: Final = -2147286518 +STG_E_STATUS_COPY_PROTECTION_FAILURE: Final = -2147286267 +STG_E_CSS_AUTHENTICATION_FAILURE: Final = -2147286266 +STG_E_CSS_KEY_NOT_PRESENT: Final = -2147286265 +STG_E_CSS_KEY_NOT_ESTABLISHED: Final = -2147286264 +STG_E_CSS_SCRAMBLED_SECTOR: Final = -2147286263 +STG_E_CSS_REGION_MISMATCH: Final = -2147286262 +STG_E_RESETS_EXHAUSTED: Final = -2147286261 +RPC_E_CALL_REJECTED: Final = -2147418111 +RPC_E_CALL_CANCELED: Final = -2147418110 +RPC_E_CANTPOST_INSENDCALL: Final = -2147418109 +RPC_E_CANTCALLOUT_INASYNCCALL: Final = -2147418108 +RPC_E_CANTCALLOUT_INEXTERNALCALL: Final = -2147418107 +RPC_E_CONNECTION_TERMINATED: Final = -2147418106 +RPC_E_SERVER_DIED: Final = -2147418105 +RPC_E_CLIENT_DIED: Final = -2147418104 +RPC_E_INVALID_DATAPACKET: Final = -2147418103 +RPC_E_CANTTRANSMIT_CALL: Final = -2147418102 +RPC_E_CLIENT_CANTMARSHAL_DATA: Final = -2147418101 +RPC_E_CLIENT_CANTUNMARSHAL_DATA: Final = -2147418100 +RPC_E_SERVER_CANTMARSHAL_DATA: Final = -2147418099 +RPC_E_SERVER_CANTUNMARSHAL_DATA: Final = -2147418098 +RPC_E_INVALID_DATA: Final = -2147418097 +RPC_E_INVALID_PARAMETER: Final = -2147418096 +RPC_E_CANTCALLOUT_AGAIN: Final = -2147418095 +RPC_E_SERVER_DIED_DNE: Final = -2147418094 +RPC_E_SYS_CALL_FAILED: Final = -2147417856 +RPC_E_OUT_OF_RESOURCES: Final = -2147417855 +RPC_E_ATTEMPTED_MULTITHREAD: Final = -2147417854 +RPC_E_NOT_REGISTERED: Final = -2147417853 +RPC_E_FAULT: Final = -2147417852 +RPC_E_SERVERFAULT: Final = -2147417851 +RPC_E_CHANGED_MODE: Final = -2147417850 +RPC_E_INVALIDMETHOD: Final = -2147417849 +RPC_E_DISCONNECTED: Final = -2147417848 +RPC_E_RETRY: Final = -2147417847 +RPC_E_SERVERCALL_RETRYLATER: Final = -2147417846 +RPC_E_SERVERCALL_REJECTED: Final = -2147417845 +RPC_E_INVALID_CALLDATA: Final = -2147417844 +RPC_E_CANTCALLOUT_ININPUTSYNCCALL: Final = -2147417843 +RPC_E_WRONG_THREAD: Final = -2147417842 +RPC_E_THREAD_NOT_INIT: Final = -2147417841 +RPC_E_VERSION_MISMATCH: Final = -2147417840 +RPC_E_INVALID_HEADER: Final = -2147417839 +RPC_E_INVALID_EXTENSION: Final = -2147417838 +RPC_E_INVALID_IPID: Final = -2147417837 +RPC_E_INVALID_OBJECT: Final = -2147417836 +RPC_S_CALLPENDING: Final = -2147417835 +RPC_S_WAITONTIMER: Final = -2147417834 +RPC_E_CALL_COMPLETE: Final = -2147417833 +RPC_E_UNSECURE_CALL: Final = -2147417832 +RPC_E_TOO_LATE: Final = -2147417831 +RPC_E_NO_GOOD_SECURITY_PACKAGES: Final = -2147417830 +RPC_E_ACCESS_DENIED: Final = -2147417829 +RPC_E_REMOTE_DISABLED: Final = -2147417828 +RPC_E_INVALID_OBJREF: Final = -2147417827 +RPC_E_NO_CONTEXT: Final = -2147417826 +RPC_E_TIMEOUT: Final = -2147417825 +RPC_E_NO_SYNC: Final = -2147417824 +RPC_E_FULLSIC_REQUIRED: Final = -2147417823 +RPC_E_INVALID_STD_NAME: Final = -2147417822 +CO_E_FAILEDTOIMPERSONATE: Final = -2147417821 +CO_E_FAILEDTOGETSECCTX: Final = -2147417820 +CO_E_FAILEDTOOPENTHREADTOKEN: Final = -2147417819 +CO_E_FAILEDTOGETTOKENINFO: Final = -2147417818 +CO_E_TRUSTEEDOESNTMATCHCLIENT: Final = -2147417817 +CO_E_FAILEDTOQUERYCLIENTBLANKET: Final = -2147417816 +CO_E_FAILEDTOSETDACL: Final = -2147417815 +CO_E_ACCESSCHECKFAILED: Final = -2147417814 +CO_E_NETACCESSAPIFAILED: Final = -2147417813 +CO_E_WRONGTRUSTEENAMESYNTAX: Final = -2147417812 +CO_E_INVALIDSID: Final = -2147417811 +CO_E_CONVERSIONFAILED: Final = -2147417810 +CO_E_NOMATCHINGSIDFOUND: Final = -2147417809 +CO_E_LOOKUPACCSIDFAILED: Final = -2147417808 +CO_E_NOMATCHINGNAMEFOUND: Final = -2147417807 +CO_E_LOOKUPACCNAMEFAILED: Final = -2147417806 +CO_E_SETSERLHNDLFAILED: Final = -2147417805 +CO_E_FAILEDTOGETWINDIR: Final = -2147417804 +CO_E_PATHTOOLONG: Final = -2147417803 +CO_E_FAILEDTOGENUUID: Final = -2147417802 +CO_E_FAILEDTOCREATEFILE: Final = -2147417801 +CO_E_FAILEDTOCLOSEHANDLE: Final = -2147417800 +CO_E_EXCEEDSYSACLLIMIT: Final = -2147417799 +CO_E_ACESINWRONGORDER: Final = -2147417798 +CO_E_INCOMPATIBLESTREAMVERSION: Final = -2147417797 +CO_E_FAILEDTOOPENPROCESSTOKEN: Final = -2147417796 +CO_E_DECODEFAILED: Final = -2147417795 +CO_E_ACNOTINITIALIZED: Final = -2147417793 +CO_E_CANCEL_DISABLED: Final = -2147417792 +RPC_E_UNEXPECTED: Final = -2147352577 +ERROR_AUDITING_DISABLED: Final = -1073151999 +ERROR_ALL_SIDS_FILTERED: Final = -1073151998 +ERROR_BIZRULES_NOT_ENABLED: Final = -1073151997 +NTE_BAD_UID: Final = -2146893823 +NTE_BAD_HASH: Final = -2146893822 +NTE_BAD_KEY: Final = -2146893821 +NTE_BAD_LEN: Final = -2146893820 +NTE_BAD_DATA: Final = -2146893819 +NTE_BAD_SIGNATURE: Final = -2146893818 +NTE_BAD_VER: Final = -2146893817 +NTE_BAD_ALGID: Final = -2146893816 +NTE_BAD_FLAGS: Final = -2146893815 +NTE_BAD_TYPE: Final = -2146893814 +NTE_BAD_KEY_STATE: Final = -2146893813 +NTE_BAD_HASH_STATE: Final = -2146893812 +NTE_NO_KEY: Final = -2146893811 +NTE_NO_MEMORY: Final = -2146893810 +NTE_EXISTS: Final = -2146893809 +NTE_PERM: Final = -2146893808 +NTE_NOT_FOUND: Final = -2146893807 +NTE_DOUBLE_ENCRYPT: Final = -2146893806 +NTE_BAD_PROVIDER: Final = -2146893805 +NTE_BAD_PROV_TYPE: Final = -2146893804 +NTE_BAD_PUBLIC_KEY: Final = -2146893803 +NTE_BAD_KEYSET: Final = -2146893802 +NTE_PROV_TYPE_NOT_DEF: Final = -2146893801 +NTE_PROV_TYPE_ENTRY_BAD: Final = -2146893800 +NTE_KEYSET_NOT_DEF: Final = -2146893799 +NTE_KEYSET_ENTRY_BAD: Final = -2146893798 +NTE_PROV_TYPE_NO_MATCH: Final = -2146893797 +NTE_SIGNATURE_FILE_BAD: Final = -2146893796 +NTE_PROVIDER_DLL_FAIL: Final = -2146893795 +NTE_PROV_DLL_NOT_FOUND: Final = -2146893794 +NTE_BAD_KEYSET_PARAM: Final = -2146893793 +NTE_FAIL: Final = -2146893792 +NTE_SYS_ERR: Final = -2146893791 +NTE_SILENT_CONTEXT: Final = -2146893790 +NTE_TOKEN_KEYSET_STORAGE_FULL: Final = -2146893789 +NTE_TEMPORARY_PROFILE: Final = -2146893788 +NTE_FIXEDPARAMETER: Final = -2146893787 +NTE_INVALID_HANDLE: Final = -2146893786 +NTE_INVALID_PARAMETER: Final = -2146893785 +NTE_BUFFER_TOO_SMALL: Final = -2146893784 +NTE_NOT_SUPPORTED: Final = -2146893783 +NTE_NO_MORE_ITEMS: Final = -2146893782 +NTE_BUFFERS_OVERLAP: Final = -2146893781 +NTE_DECRYPTION_FAILURE: Final = -2146893780 +NTE_INTERNAL_ERROR: Final = -2146893779 +NTE_UI_REQUIRED: Final = -2146893778 +NTE_HMAC_NOT_SUPPORTED: Final = -2146893777 +NTE_DEVICE_NOT_READY: Final = -2146893776 +NTE_AUTHENTICATION_IGNORED: Final = -2146893775 +NTE_VALIDATION_FAILED: Final = -2146893774 +NTE_INCORRECT_PASSWORD: Final = -2146893773 +NTE_ENCRYPTION_FAILURE: Final = -2146893772 +NTE_DEVICE_NOT_FOUND: Final = -2146893771 +NTE_USER_CANCELLED: Final = -2146893770 +NTE_PASSWORD_CHANGE_REQUIRED: Final = -2146893769 +NTE_NOT_ACTIVE_CONSOLE: Final = -2146893768 +SEC_E_INSUFFICIENT_MEMORY: Final = -2146893056 +SEC_E_INVALID_HANDLE: Final = -2146893055 +SEC_E_UNSUPPORTED_FUNCTION: Final = -2146893054 +SEC_E_TARGET_UNKNOWN: Final = -2146893053 +SEC_E_INTERNAL_ERROR: Final = -2146893052 +SEC_E_SECPKG_NOT_FOUND: Final = -2146893051 +SEC_E_NOT_OWNER: Final = -2146893050 +SEC_E_CANNOT_INSTALL: Final = -2146893049 +SEC_E_INVALID_TOKEN: Final = -2146893048 +SEC_E_CANNOT_PACK: Final = -2146893047 +SEC_E_QOP_NOT_SUPPORTED: Final = -2146893046 +SEC_E_NO_IMPERSONATION: Final = -2146893045 +SEC_E_LOGON_DENIED: Final = -2146893044 +SEC_E_UNKNOWN_CREDENTIALS: Final = -2146893043 +SEC_E_NO_CREDENTIALS: Final = -2146893042 +SEC_E_MESSAGE_ALTERED: Final = -2146893041 +SEC_E_OUT_OF_SEQUENCE: Final = -2146893040 +SEC_E_NO_AUTHENTICATING_AUTHORITY: Final = -2146893039 +SEC_I_CONTINUE_NEEDED: Final = 0x00090312 +SEC_I_COMPLETE_NEEDED: Final = 0x00090313 +SEC_I_COMPLETE_AND_CONTINUE: Final = 0x00090314 +SEC_I_LOCAL_LOGON: Final = 0x00090315 +SEC_I_GENERIC_EXTENSION_RECEIVED: Final = 0x00090316 +SEC_E_BAD_PKGID: Final = -2146893034 +SEC_E_CONTEXT_EXPIRED: Final = -2146893033 +SEC_I_CONTEXT_EXPIRED: Final = 0x00090317 +SEC_E_INCOMPLETE_MESSAGE: Final = -2146893032 +SEC_E_INCOMPLETE_CREDENTIALS: Final = -2146893024 +SEC_E_BUFFER_TOO_SMALL: Final = -2146893023 +SEC_I_INCOMPLETE_CREDENTIALS: Final = 0x00090320 +SEC_I_RENEGOTIATE: Final = 0x00090321 +SEC_E_WRONG_PRINCIPAL: Final = -2146893022 +SEC_I_NO_LSA_CONTEXT: Final = 0x00090323 +SEC_E_TIME_SKEW: Final = -2146893020 +SEC_E_UNTRUSTED_ROOT: Final = -2146893019 +SEC_E_ILLEGAL_MESSAGE: Final = -2146893018 +SEC_E_CERT_UNKNOWN: Final = -2146893017 +SEC_E_CERT_EXPIRED: Final = -2146893016 +SEC_E_ENCRYPT_FAILURE: Final = -2146893015 +SEC_E_DECRYPT_FAILURE: Final = -2146893008 +SEC_E_ALGORITHM_MISMATCH: Final = -2146893007 +SEC_E_SECURITY_QOS_FAILED: Final = -2146893006 +SEC_E_UNFINISHED_CONTEXT_DELETED: Final = -2146893005 +SEC_E_NO_TGT_REPLY: Final = -2146893004 +SEC_E_NO_IP_ADDRESSES: Final = -2146893003 +SEC_E_WRONG_CREDENTIAL_HANDLE: Final = -2146893002 +SEC_E_CRYPTO_SYSTEM_INVALID: Final = -2146893001 +SEC_E_MAX_REFERRALS_EXCEEDED: Final = -2146893000 +SEC_E_MUST_BE_KDC: Final = -2146892999 +SEC_E_STRONG_CRYPTO_NOT_SUPPORTED: Final = -2146892998 +SEC_E_TOO_MANY_PRINCIPALS: Final = -2146892997 +SEC_E_NO_PA_DATA: Final = -2146892996 +SEC_E_PKINIT_NAME_MISMATCH: Final = -2146892995 +SEC_E_SMARTCARD_LOGON_REQUIRED: Final = -2146892994 +SEC_E_SHUTDOWN_IN_PROGRESS: Final = -2146892993 +SEC_E_KDC_INVALID_REQUEST: Final = -2146892992 +SEC_E_KDC_UNABLE_TO_REFER: Final = -2146892991 +SEC_E_KDC_UNKNOWN_ETYPE: Final = -2146892990 +SEC_E_UNSUPPORTED_PREAUTH: Final = -2146892989 +SEC_E_DELEGATION_REQUIRED: Final = -2146892987 +SEC_E_BAD_BINDINGS: Final = -2146892986 +SEC_E_MULTIPLE_ACCOUNTS: Final = -2146892985 +SEC_E_NO_KERB_KEY: Final = -2146892984 +SEC_E_CERT_WRONG_USAGE: Final = -2146892983 +SEC_E_DOWNGRADE_DETECTED: Final = -2146892976 +SEC_E_SMARTCARD_CERT_REVOKED: Final = -2146892975 +SEC_E_ISSUING_CA_UNTRUSTED: Final = -2146892974 +SEC_E_REVOCATION_OFFLINE_C: Final = -2146892973 +SEC_E_PKINIT_CLIENT_FAILURE: Final = -2146892972 +SEC_E_SMARTCARD_CERT_EXPIRED: Final = -2146892971 +SEC_E_NO_S4U_PROT_SUPPORT: Final = -2146892970 +SEC_E_CROSSREALM_DELEGATION_FAILURE: Final = -2146892969 +SEC_E_REVOCATION_OFFLINE_KDC: Final = -2146892968 +SEC_E_ISSUING_CA_UNTRUSTED_KDC: Final = -2146892967 +SEC_E_KDC_CERT_EXPIRED: Final = -2146892966 +SEC_E_KDC_CERT_REVOKED: Final = -2146892965 +SEC_I_SIGNATURE_NEEDED: Final = 0x0009035C +SEC_E_INVALID_PARAMETER: Final = -2146892963 +SEC_E_DELEGATION_POLICY: Final = -2146892962 +SEC_E_POLICY_NLTM_ONLY: Final = -2146892961 +SEC_I_NO_RENEGOTIATION: Final = 0x00090360 +SEC_E_NO_CONTEXT: Final = -2146892959 +SEC_E_PKU2U_CERT_FAILURE: Final = -2146892958 +SEC_E_MUTUAL_AUTH_FAILED: Final = -2146892957 +SEC_I_MESSAGE_FRAGMENT: Final = 0x00090364 +SEC_E_ONLY_HTTPS_ALLOWED: Final = -2146892955 +SEC_I_CONTINUE_NEEDED_MESSAGE_OK: Final = 0x00090366 +SEC_E_APPLICATION_PROTOCOL_MISMATCH: Final = -2146892953 +SEC_I_ASYNC_CALL_PENDING: Final = 0x00090368 +SEC_E_INVALID_UPN_NAME: Final = -2146892951 +SEC_E_EXT_BUFFER_TOO_SMALL: Final = -2146892950 +SEC_E_INSUFFICIENT_BUFFERS: Final = -2146892949 +SEC_E_NO_SPM: Final = SEC_E_INTERNAL_ERROR +SEC_E_NOT_SUPPORTED: Final = SEC_E_UNSUPPORTED_FUNCTION +CRYPT_E_MSG_ERROR: Final = -2146889727 +CRYPT_E_UNKNOWN_ALGO: Final = -2146889726 +CRYPT_E_OID_FORMAT: Final = -2146889725 +CRYPT_E_INVALID_MSG_TYPE: Final = -2146889724 +CRYPT_E_UNEXPECTED_ENCODING: Final = -2146889723 +CRYPT_E_AUTH_ATTR_MISSING: Final = -2146889722 +CRYPT_E_HASH_VALUE: Final = -2146889721 +CRYPT_E_INVALID_INDEX: Final = -2146889720 +CRYPT_E_ALREADY_DECRYPTED: Final = -2146889719 +CRYPT_E_NOT_DECRYPTED: Final = -2146889718 +CRYPT_E_RECIPIENT_NOT_FOUND: Final = -2146889717 +CRYPT_E_CONTROL_TYPE: Final = -2146889716 +CRYPT_E_ISSUER_SERIALNUMBER: Final = -2146889715 +CRYPT_E_SIGNER_NOT_FOUND: Final = -2146889714 +CRYPT_E_ATTRIBUTES_MISSING: Final = -2146889713 +CRYPT_E_STREAM_MSG_NOT_READY: Final = -2146889712 +CRYPT_E_STREAM_INSUFFICIENT_DATA: Final = -2146889711 +CRYPT_I_NEW_PROTECTION_REQUIRED: Final = 0x00091012 +CRYPT_E_BAD_LEN: Final = -2146885631 +CRYPT_E_BAD_ENCODE: Final = -2146885630 +CRYPT_E_FILE_ERROR: Final = -2146885629 +CRYPT_E_NOT_FOUND: Final = -2146885628 +CRYPT_E_EXISTS: Final = -2146885627 +CRYPT_E_NO_PROVIDER: Final = -2146885626 +CRYPT_E_SELF_SIGNED: Final = -2146885625 +CRYPT_E_DELETED_PREV: Final = -2146885624 +CRYPT_E_NO_MATCH: Final = -2146885623 +CRYPT_E_UNEXPECTED_MSG_TYPE: Final = -2146885622 +CRYPT_E_NO_KEY_PROPERTY: Final = -2146885621 +CRYPT_E_NO_DECRYPT_CERT: Final = -2146885620 +CRYPT_E_BAD_MSG: Final = -2146885619 +CRYPT_E_NO_SIGNER: Final = -2146885618 +CRYPT_E_PENDING_CLOSE: Final = -2146885617 +CRYPT_E_REVOKED: Final = -2146885616 +CRYPT_E_NO_REVOCATION_DLL: Final = -2146885615 +CRYPT_E_NO_REVOCATION_CHECK: Final = -2146885614 +CRYPT_E_REVOCATION_OFFLINE: Final = -2146885613 +CRYPT_E_NOT_IN_REVOCATION_DATABASE: Final = -2146885612 +CRYPT_E_INVALID_NUMERIC_STRING: Final = -2146885600 +CRYPT_E_INVALID_PRINTABLE_STRING: Final = -2146885599 +CRYPT_E_INVALID_IA5_STRING: Final = -2146885598 +CRYPT_E_INVALID_X500_STRING: Final = -2146885597 +CRYPT_E_NOT_CHAR_STRING: Final = -2146885596 +CRYPT_E_FILERESIZED: Final = -2146885595 +CRYPT_E_SECURITY_SETTINGS: Final = -2146885594 +CRYPT_E_NO_VERIFY_USAGE_DLL: Final = -2146885593 +CRYPT_E_NO_VERIFY_USAGE_CHECK: Final = -2146885592 +CRYPT_E_VERIFY_USAGE_OFFLINE: Final = -2146885591 +CRYPT_E_NOT_IN_CTL: Final = -2146885590 +CRYPT_E_NO_TRUSTED_SIGNER: Final = -2146885589 +CRYPT_E_MISSING_PUBKEY_PARA: Final = -2146885588 +CRYPT_E_OBJECT_LOCATOR_OBJECT_NOT_FOUND: Final = -2146885587 +CRYPT_E_OSS_ERROR: Final = -2146881536 +OSS_MORE_BUF: Final = -2146881535 +OSS_NEGATIVE_UINTEGER: Final = -2146881534 +OSS_PDU_RANGE: Final = -2146881533 +OSS_MORE_INPUT: Final = -2146881532 +OSS_DATA_ERROR: Final = -2146881531 +OSS_BAD_ARG: Final = -2146881530 +OSS_BAD_VERSION: Final = -2146881529 +OSS_OUT_MEMORY: Final = -2146881528 +OSS_PDU_MISMATCH: Final = -2146881527 +OSS_LIMITED: Final = -2146881526 +OSS_BAD_PTR: Final = -2146881525 +OSS_BAD_TIME: Final = -2146881524 +OSS_INDEFINITE_NOT_SUPPORTED: Final = -2146881523 +OSS_MEM_ERROR: Final = -2146881522 +OSS_BAD_TABLE: Final = -2146881521 +OSS_TOO_LONG: Final = -2146881520 +OSS_CONSTRAINT_VIOLATED: Final = -2146881519 +OSS_FATAL_ERROR: Final = -2146881518 +OSS_ACCESS_SERIALIZATION_ERROR: Final = -2146881517 +OSS_NULL_TBL: Final = -2146881516 +OSS_NULL_FCN: Final = -2146881515 +OSS_BAD_ENCRULES: Final = -2146881514 +OSS_UNAVAIL_ENCRULES: Final = -2146881513 +OSS_CANT_OPEN_TRACE_WINDOW: Final = -2146881512 +OSS_UNIMPLEMENTED: Final = -2146881511 +OSS_OID_DLL_NOT_LINKED: Final = -2146881510 +OSS_CANT_OPEN_TRACE_FILE: Final = -2146881509 +OSS_TRACE_FILE_ALREADY_OPEN: Final = -2146881508 +OSS_TABLE_MISMATCH: Final = -2146881507 +OSS_TYPE_NOT_SUPPORTED: Final = -2146881506 +OSS_REAL_DLL_NOT_LINKED: Final = -2146881505 +OSS_REAL_CODE_NOT_LINKED: Final = -2146881504 +OSS_OUT_OF_RANGE: Final = -2146881503 +OSS_COPIER_DLL_NOT_LINKED: Final = -2146881502 +OSS_CONSTRAINT_DLL_NOT_LINKED: Final = -2146881501 +OSS_COMPARATOR_DLL_NOT_LINKED: Final = -2146881500 +OSS_COMPARATOR_CODE_NOT_LINKED: Final = -2146881499 +OSS_MEM_MGR_DLL_NOT_LINKED: Final = -2146881498 +OSS_PDV_DLL_NOT_LINKED: Final = -2146881497 +OSS_PDV_CODE_NOT_LINKED: Final = -2146881496 +OSS_API_DLL_NOT_LINKED: Final = -2146881495 +OSS_BERDER_DLL_NOT_LINKED: Final = -2146881494 +OSS_PER_DLL_NOT_LINKED: Final = -2146881493 +OSS_OPEN_TYPE_ERROR: Final = -2146881492 +OSS_MUTEX_NOT_CREATED: Final = -2146881491 +OSS_CANT_CLOSE_TRACE_FILE: Final = -2146881490 +CRYPT_E_ASN1_ERROR: Final = -2146881280 +CRYPT_E_ASN1_INTERNAL: Final = -2146881279 +CRYPT_E_ASN1_EOD: Final = -2146881278 +CRYPT_E_ASN1_CORRUPT: Final = -2146881277 +CRYPT_E_ASN1_LARGE: Final = -2146881276 +CRYPT_E_ASN1_CONSTRAINT: Final = -2146881275 +CRYPT_E_ASN1_MEMORY: Final = -2146881274 +CRYPT_E_ASN1_OVERFLOW: Final = -2146881273 +CRYPT_E_ASN1_BADPDU: Final = -2146881272 +CRYPT_E_ASN1_BADARGS: Final = -2146881271 +CRYPT_E_ASN1_BADREAL: Final = -2146881270 +CRYPT_E_ASN1_BADTAG: Final = -2146881269 +CRYPT_E_ASN1_CHOICE: Final = -2146881268 +CRYPT_E_ASN1_RULE: Final = -2146881267 +CRYPT_E_ASN1_UTF8: Final = -2146881266 +CRYPT_E_ASN1_PDU_TYPE: Final = -2146881229 +CRYPT_E_ASN1_NYI: Final = -2146881228 +CRYPT_E_ASN1_EXTENDED: Final = -2146881023 +CRYPT_E_ASN1_NOEOD: Final = -2146881022 +CERTSRV_E_BAD_REQUESTSUBJECT: Final = -2146877439 +CERTSRV_E_NO_REQUEST: Final = -2146877438 +CERTSRV_E_BAD_REQUESTSTATUS: Final = -2146877437 +CERTSRV_E_PROPERTY_EMPTY: Final = -2146877436 +CERTSRV_E_INVALID_CA_CERTIFICATE: Final = -2146877435 +CERTSRV_E_SERVER_SUSPENDED: Final = -2146877434 +CERTSRV_E_ENCODING_LENGTH: Final = -2146877433 +CERTSRV_E_ROLECONFLICT: Final = -2146877432 +CERTSRV_E_RESTRICTEDOFFICER: Final = -2146877431 +CERTSRV_E_KEY_ARCHIVAL_NOT_CONFIGURED: Final = -2146877430 +CERTSRV_E_NO_VALID_KRA: Final = -2146877429 +CERTSRV_E_BAD_REQUEST_KEY_ARCHIVAL: Final = -2146877428 +CERTSRV_E_NO_CAADMIN_DEFINED: Final = -2146877427 +CERTSRV_E_BAD_RENEWAL_CERT_ATTRIBUTE: Final = -2146877426 +CERTSRV_E_NO_DB_SESSIONS: Final = -2146877425 +CERTSRV_E_ALIGNMENT_FAULT: Final = -2146877424 +CERTSRV_E_ENROLL_DENIED: Final = -2146877423 +CERTSRV_E_TEMPLATE_DENIED: Final = -2146877422 +CERTSRV_E_DOWNLEVEL_DC_SSL_OR_UPGRADE: Final = -2146877421 +CERTSRV_E_ADMIN_DENIED_REQUEST: Final = -2146877420 +CERTSRV_E_NO_POLICY_SERVER: Final = -2146877419 +CERTSRV_E_WEAK_SIGNATURE_OR_KEY: Final = -2146877418 +CERTSRV_E_KEY_ATTESTATION_NOT_SUPPORTED: Final = -2146877417 +CERTSRV_E_ENCRYPTION_CERT_REQUIRED: Final = -2146877416 +CERTSRV_E_UNSUPPORTED_CERT_TYPE: Final = -2146875392 +CERTSRV_E_NO_CERT_TYPE: Final = -2146875391 +CERTSRV_E_TEMPLATE_CONFLICT: Final = -2146875390 +CERTSRV_E_SUBJECT_ALT_NAME_REQUIRED: Final = -2146875389 +CERTSRV_E_ARCHIVED_KEY_REQUIRED: Final = -2146875388 +CERTSRV_E_SMIME_REQUIRED: Final = -2146875387 +CERTSRV_E_BAD_RENEWAL_SUBJECT: Final = -2146875386 +CERTSRV_E_BAD_TEMPLATE_VERSION: Final = -2146875385 +CERTSRV_E_TEMPLATE_POLICY_REQUIRED: Final = -2146875384 +CERTSRV_E_SIGNATURE_POLICY_REQUIRED: Final = -2146875383 +CERTSRV_E_SIGNATURE_COUNT: Final = -2146875382 +CERTSRV_E_SIGNATURE_REJECTED: Final = -2146875381 +CERTSRV_E_ISSUANCE_POLICY_REQUIRED: Final = -2146875380 +CERTSRV_E_SUBJECT_UPN_REQUIRED: Final = -2146875379 +CERTSRV_E_SUBJECT_DIRECTORY_GUID_REQUIRED: Final = -2146875378 +CERTSRV_E_SUBJECT_DNS_REQUIRED: Final = -2146875377 +CERTSRV_E_ARCHIVED_KEY_UNEXPECTED: Final = -2146875376 +CERTSRV_E_KEY_LENGTH: Final = -2146875375 +CERTSRV_E_SUBJECT_EMAIL_REQUIRED: Final = -2146875374 +CERTSRV_E_UNKNOWN_CERT_TYPE: Final = -2146875373 +CERTSRV_E_CERT_TYPE_OVERLAP: Final = -2146875372 +CERTSRV_E_TOO_MANY_SIGNATURES: Final = -2146875371 +CERTSRV_E_RENEWAL_BAD_PUBLIC_KEY: Final = -2146875370 +CERTSRV_E_INVALID_EK: Final = -2146875369 +CERTSRV_E_INVALID_IDBINDING: Final = -2146875368 +CERTSRV_E_INVALID_ATTESTATION: Final = -2146875367 +CERTSRV_E_KEY_ATTESTATION: Final = -2146875366 +CERTSRV_E_CORRUPT_KEY_ATTESTATION: Final = -2146875365 +CERTSRV_E_EXPIRED_CHALLENGE: Final = -2146875364 +CERTSRV_E_INVALID_RESPONSE: Final = -2146875363 +CERTSRV_E_INVALID_REQUESTID: Final = -2146875362 +CERTSRV_E_REQUEST_PRECERTIFICATE_MISMATCH: Final = -2146875361 +CERTSRV_E_PENDING_CLIENT_RESPONSE: Final = -2146875360 +CERTSRV_E_SEC_EXT_DIRECTORY_SID_REQUIRED: Final = -2146875359 +XENROLL_E_KEY_NOT_EXPORTABLE: Final = -2146873344 +XENROLL_E_CANNOT_ADD_ROOT_CERT: Final = -2146873343 +XENROLL_E_RESPONSE_KA_HASH_NOT_FOUND: Final = -2146873342 +XENROLL_E_RESPONSE_UNEXPECTED_KA_HASH: Final = -2146873341 +XENROLL_E_RESPONSE_KA_HASH_MISMATCH: Final = -2146873340 +XENROLL_E_KEYSPEC_SMIME_MISMATCH: Final = -2146873339 +TRUST_E_SYSTEM_ERROR: Final = -2146869247 +TRUST_E_NO_SIGNER_CERT: Final = -2146869246 +TRUST_E_COUNTER_SIGNER: Final = -2146869245 +TRUST_E_CERT_SIGNATURE: Final = -2146869244 +TRUST_E_TIME_STAMP: Final = -2146869243 +TRUST_E_BAD_DIGEST: Final = -2146869232 +TRUST_E_MALFORMED_SIGNATURE: Final = -2146869231 +TRUST_E_BASIC_CONSTRAINTS: Final = -2146869223 +TRUST_E_FINANCIAL_CRITERIA: Final = -2146869218 +MSSIPOTF_E_OUTOFMEMRANGE: Final = -2146865151 +MSSIPOTF_E_CANTGETOBJECT: Final = -2146865150 +MSSIPOTF_E_NOHEADTABLE: Final = -2146865149 +MSSIPOTF_E_BAD_MAGICNUMBER: Final = -2146865148 +MSSIPOTF_E_BAD_OFFSET_TABLE: Final = -2146865147 +MSSIPOTF_E_TABLE_TAGORDER: Final = -2146865146 +MSSIPOTF_E_TABLE_LONGWORD: Final = -2146865145 +MSSIPOTF_E_BAD_FIRST_TABLE_PLACEMENT: Final = -2146865144 +MSSIPOTF_E_TABLES_OVERLAP: Final = -2146865143 +MSSIPOTF_E_TABLE_PADBYTES: Final = -2146865142 +MSSIPOTF_E_FILETOOSMALL: Final = -2146865141 +MSSIPOTF_E_TABLE_CHECKSUM: Final = -2146865140 +MSSIPOTF_E_FILE_CHECKSUM: Final = -2146865139 +MSSIPOTF_E_FAILED_POLICY: Final = -2146865136 +MSSIPOTF_E_FAILED_HINTS_CHECK: Final = -2146865135 +MSSIPOTF_E_NOT_OPENTYPE: Final = -2146865134 +MSSIPOTF_E_FILE: Final = -2146865133 +MSSIPOTF_E_CRYPT: Final = -2146865132 +MSSIPOTF_E_BADVERSION: Final = -2146865131 +MSSIPOTF_E_DSIG_STRUCTURE: Final = -2146865130 +MSSIPOTF_E_PCONST_CHECK: Final = -2146865129 +MSSIPOTF_E_STRUCTURE: Final = -2146865128 +ERROR_CRED_REQUIRES_CONFIRMATION: Final = -2146865127 +NTE_OP_OK: Final = 0 +TRUST_E_PROVIDER_UNKNOWN: Final = -2146762751 +TRUST_E_ACTION_UNKNOWN: Final = -2146762750 +TRUST_E_SUBJECT_FORM_UNKNOWN: Final = -2146762749 +TRUST_E_SUBJECT_NOT_TRUSTED: Final = -2146762748 +DIGSIG_E_ENCODE: Final = -2146762747 +DIGSIG_E_DECODE: Final = -2146762746 +DIGSIG_E_EXTENSIBILITY: Final = -2146762745 +DIGSIG_E_CRYPTO: Final = -2146762744 +PERSIST_E_SIZEDEFINITE: Final = -2146762743 +PERSIST_E_SIZEINDEFINITE: Final = -2146762742 +PERSIST_E_NOTSELFSIZING: Final = -2146762741 +TRUST_E_NOSIGNATURE: Final = -2146762496 +CERT_E_EXPIRED: Final = -2146762495 +CERT_E_VALIDITYPERIODNESTING: Final = -2146762494 +CERT_E_ROLE: Final = -2146762493 +CERT_E_PATHLENCONST: Final = -2146762492 +CERT_E_CRITICAL: Final = -2146762491 +CERT_E_PURPOSE: Final = -2146762490 +CERT_E_ISSUERCHAINING: Final = -2146762489 +CERT_E_MALFORMED: Final = -2146762488 +CERT_E_UNTRUSTEDROOT: Final = -2146762487 +CERT_E_CHAINING: Final = -2146762486 +TRUST_E_FAIL: Final = -2146762485 +CERT_E_REVOKED: Final = -2146762484 +CERT_E_UNTRUSTEDTESTROOT: Final = -2146762483 +CERT_E_REVOCATION_FAILURE: Final = -2146762482 +CERT_E_CN_NO_MATCH: Final = -2146762481 +CERT_E_WRONG_USAGE: Final = -2146762480 +TRUST_E_EXPLICIT_DISTRUST: Final = -2146762479 +CERT_E_UNTRUSTEDCA: Final = -2146762478 +CERT_E_INVALID_POLICY: Final = -2146762477 +CERT_E_INVALID_NAME: Final = -2146762476 + +def HRESULT_FROM_SETUPAPI(x): ... + +SPAPI_E_EXPECTED_SECTION_NAME: Final = -2146500608 +SPAPI_E_BAD_SECTION_NAME_LINE: Final = -2146500607 +SPAPI_E_SECTION_NAME_TOO_LONG: Final = -2146500606 +SPAPI_E_GENERAL_SYNTAX: Final = -2146500605 +SPAPI_E_WRONG_INF_STYLE: Final = -2146500352 +SPAPI_E_SECTION_NOT_FOUND: Final = -2146500351 +SPAPI_E_LINE_NOT_FOUND: Final = -2146500350 +SPAPI_E_NO_BACKUP: Final = -2146500349 +SPAPI_E_NO_ASSOCIATED_CLASS: Final = -2146500096 +SPAPI_E_CLASS_MISMATCH: Final = -2146500095 +SPAPI_E_DUPLICATE_FOUND: Final = -2146500094 +SPAPI_E_NO_DRIVER_SELECTED: Final = -2146500093 +SPAPI_E_KEY_DOES_NOT_EXIST: Final = -2146500092 +SPAPI_E_INVALID_DEVINST_NAME: Final = -2146500091 +SPAPI_E_INVALID_CLASS: Final = -2146500090 +SPAPI_E_DEVINST_ALREADY_EXISTS: Final = -2146500089 +SPAPI_E_DEVINFO_NOT_REGISTERED: Final = -2146500088 +SPAPI_E_INVALID_REG_PROPERTY: Final = -2146500087 +SPAPI_E_NO_INF: Final = -2146500086 +SPAPI_E_NO_SUCH_DEVINST: Final = -2146500085 +SPAPI_E_CANT_LOAD_CLASS_ICON: Final = -2146500084 +SPAPI_E_INVALID_CLASS_INSTALLER: Final = -2146500083 +SPAPI_E_DI_DO_DEFAULT: Final = -2146500082 +SPAPI_E_DI_NOFILECOPY: Final = -2146500081 +SPAPI_E_INVALID_HWPROFILE: Final = -2146500080 +SPAPI_E_NO_DEVICE_SELECTED: Final = -2146500079 +SPAPI_E_DEVINFO_LIST_LOCKED: Final = -2146500078 +SPAPI_E_DEVINFO_DATA_LOCKED: Final = -2146500077 +SPAPI_E_DI_BAD_PATH: Final = -2146500076 +SPAPI_E_NO_CLASSINSTALL_PARAMS: Final = -2146500075 +SPAPI_E_FILEQUEUE_LOCKED: Final = -2146500074 +SPAPI_E_BAD_SERVICE_INSTALLSECT: Final = -2146500073 +SPAPI_E_NO_CLASS_DRIVER_LIST: Final = -2146500072 +SPAPI_E_NO_ASSOCIATED_SERVICE: Final = -2146500071 +SPAPI_E_NO_DEFAULT_DEVICE_INTERFACE: Final = -2146500070 +SPAPI_E_DEVICE_INTERFACE_ACTIVE: Final = -2146500069 +SPAPI_E_DEVICE_INTERFACE_REMOVED: Final = -2146500068 +SPAPI_E_BAD_INTERFACE_INSTALLSECT: Final = -2146500067 +SPAPI_E_NO_SUCH_INTERFACE_CLASS: Final = -2146500066 +SPAPI_E_INVALID_REFERENCE_STRING: Final = -2146500065 +SPAPI_E_INVALID_MACHINENAME: Final = -2146500064 +SPAPI_E_REMOTE_COMM_FAILURE: Final = -2146500063 +SPAPI_E_MACHINE_UNAVAILABLE: Final = -2146500062 +SPAPI_E_NO_CONFIGMGR_SERVICES: Final = -2146500061 +SPAPI_E_INVALID_PROPPAGE_PROVIDER: Final = -2146500060 +SPAPI_E_NO_SUCH_DEVICE_INTERFACE: Final = -2146500059 +SPAPI_E_DI_POSTPROCESSING_REQUIRED: Final = -2146500058 +SPAPI_E_INVALID_COINSTALLER: Final = -2146500057 +SPAPI_E_NO_COMPAT_DRIVERS: Final = -2146500056 +SPAPI_E_NO_DEVICE_ICON: Final = -2146500055 +SPAPI_E_INVALID_INF_LOGCONFIG: Final = -2146500054 +SPAPI_E_DI_DONT_INSTALL: Final = -2146500053 +SPAPI_E_INVALID_FILTER_DRIVER: Final = -2146500052 +SPAPI_E_NON_WINDOWS_NT_DRIVER: Final = -2146500051 +SPAPI_E_NON_WINDOWS_DRIVER: Final = -2146500050 +SPAPI_E_NO_CATALOG_FOR_OEM_INF: Final = -2146500049 +SPAPI_E_DEVINSTALL_QUEUE_NONNATIVE: Final = -2146500048 +SPAPI_E_NOT_DISABLEABLE: Final = -2146500047 +SPAPI_E_CANT_REMOVE_DEVINST: Final = -2146500046 +SPAPI_E_INVALID_TARGET: Final = -2146500045 +SPAPI_E_DRIVER_NONNATIVE: Final = -2146500044 +SPAPI_E_IN_WOW64: Final = -2146500043 +SPAPI_E_SET_SYSTEM_RESTORE_POINT: Final = -2146500042 +SPAPI_E_INCORRECTLY_COPIED_INF: Final = -2146500041 +SPAPI_E_SCE_DISABLED: Final = -2146500040 +SPAPI_E_UNKNOWN_EXCEPTION: Final = -2146500039 +SPAPI_E_PNP_REGISTRY_ERROR: Final = -2146500038 +SPAPI_E_REMOTE_REQUEST_UNSUPPORTED: Final = -2146500037 +SPAPI_E_NOT_AN_INSTALLED_OEM_INF: Final = -2146500036 +SPAPI_E_INF_IN_USE_BY_DEVICES: Final = -2146500035 +SPAPI_E_DI_FUNCTION_OBSOLETE: Final = -2146500034 +SPAPI_E_NO_AUTHENTICODE_CATALOG: Final = -2146500033 +SPAPI_E_AUTHENTICODE_DISALLOWED: Final = -2146500032 +SPAPI_E_AUTHENTICODE_TRUSTED_PUBLISHER: Final = -2146500031 +SPAPI_E_AUTHENTICODE_TRUST_NOT_ESTABLISHED: Final = -2146500030 +SPAPI_E_AUTHENTICODE_PUBLISHER_NOT_TRUSTED: Final = -2146500029 +SPAPI_E_SIGNATURE_OSATTRIBUTE_MISMATCH: Final = -2146500028 +SPAPI_E_ONLY_VALIDATE_VIA_AUTHENTICODE: Final = -2146500027 +SPAPI_E_DEVICE_INSTALLER_NOT_READY: Final = -2146500026 +SPAPI_E_DRIVER_STORE_ADD_FAILED: Final = -2146500025 +SPAPI_E_DEVICE_INSTALL_BLOCKED: Final = -2146500024 +SPAPI_E_DRIVER_INSTALL_BLOCKED: Final = -2146500023 +SPAPI_E_WRONG_INF_TYPE: Final = -2146500022 +SPAPI_E_FILE_HASH_NOT_IN_CATALOG: Final = -2146500021 +SPAPI_E_DRIVER_STORE_DELETE_FAILED: Final = -2146500020 +SPAPI_E_UNRECOVERABLE_STACK_OVERFLOW: Final = -2146499840 +SPAPI_E_ERROR_NOT_INSTALLED: Final = -2146496512 +SCARD_S_SUCCESS: Final = NO_ERROR +SCARD_F_INTERNAL_ERROR: Final = -2146435071 +SCARD_E_CANCELLED: Final = -2146435070 +SCARD_E_INVALID_HANDLE: Final = -2146435069 +SCARD_E_INVALID_PARAMETER: Final = -2146435068 +SCARD_E_INVALID_TARGET: Final = -2146435067 +SCARD_E_NO_MEMORY: Final = -2146435066 +SCARD_F_WAITED_TOO_LONG: Final = -2146435065 +SCARD_E_INSUFFICIENT_BUFFER: Final = -2146435064 +SCARD_E_UNKNOWN_READER: Final = -2146435063 +SCARD_E_TIMEOUT: Final = -2146435062 +SCARD_E_SHARING_VIOLATION: Final = -2146435061 +SCARD_E_NO_SMARTCARD: Final = -2146435060 +SCARD_E_UNKNOWN_CARD: Final = -2146435059 +SCARD_E_CANT_DISPOSE: Final = -2146435058 +SCARD_E_PROTO_MISMATCH: Final = -2146435057 +SCARD_E_NOT_READY: Final = -2146435056 +SCARD_E_INVALID_VALUE: Final = -2146435055 +SCARD_E_SYSTEM_CANCELLED: Final = -2146435054 +SCARD_F_COMM_ERROR: Final = -2146435053 +SCARD_F_UNKNOWN_ERROR: Final = -2146435052 +SCARD_E_INVALID_ATR: Final = -2146435051 +SCARD_E_NOT_TRANSACTED: Final = -2146435050 +SCARD_E_READER_UNAVAILABLE: Final = -2146435049 +SCARD_P_SHUTDOWN: Final = -2146435048 +SCARD_E_PCI_TOO_SMALL: Final = -2146435047 +SCARD_E_READER_UNSUPPORTED: Final = -2146435046 +SCARD_E_DUPLICATE_READER: Final = -2146435045 +SCARD_E_CARD_UNSUPPORTED: Final = -2146435044 +SCARD_E_NO_SERVICE: Final = -2146435043 +SCARD_E_SERVICE_STOPPED: Final = -2146435042 +SCARD_E_UNEXPECTED: Final = -2146435041 +SCARD_E_ICC_INSTALLATION: Final = -2146435040 +SCARD_E_ICC_CREATEORDER: Final = -2146435039 +SCARD_E_UNSUPPORTED_FEATURE: Final = -2146435038 +SCARD_E_DIR_NOT_FOUND: Final = -2146435037 +SCARD_E_FILE_NOT_FOUND: Final = -2146435036 +SCARD_E_NO_DIR: Final = -2146435035 +SCARD_E_NO_FILE: Final = -2146435034 +SCARD_E_NO_ACCESS: Final = -2146435033 +SCARD_E_WRITE_TOO_MANY: Final = -2146435032 +SCARD_E_BAD_SEEK: Final = -2146435031 +SCARD_E_INVALID_CHV: Final = -2146435030 +SCARD_E_UNKNOWN_RES_MNG: Final = -2146435029 +SCARD_E_NO_SUCH_CERTIFICATE: Final = -2146435028 +SCARD_E_CERTIFICATE_UNAVAILABLE: Final = -2146435027 +SCARD_E_NO_READERS_AVAILABLE: Final = -2146435026 +SCARD_E_COMM_DATA_LOST: Final = -2146435025 +SCARD_E_NO_KEY_CONTAINER: Final = -2146435024 +SCARD_E_SERVER_TOO_BUSY: Final = -2146435023 +SCARD_E_PIN_CACHE_EXPIRED: Final = -2146435022 +SCARD_E_NO_PIN_CACHE: Final = -2146435021 +SCARD_E_READ_ONLY_CARD: Final = -2146435020 +SCARD_W_UNSUPPORTED_CARD: Final = -2146434971 +SCARD_W_UNRESPONSIVE_CARD: Final = -2146434970 +SCARD_W_UNPOWERED_CARD: Final = -2146434969 +SCARD_W_RESET_CARD: Final = -2146434968 +SCARD_W_REMOVED_CARD: Final = -2146434967 +SCARD_W_SECURITY_VIOLATION: Final = -2146434966 +SCARD_W_WRONG_CHV: Final = -2146434965 +SCARD_W_CHV_BLOCKED: Final = -2146434964 +SCARD_W_EOF: Final = -2146434963 +SCARD_W_CANCELLED_BY_USER: Final = -2146434962 +SCARD_W_CARD_NOT_AUTHENTICATED: Final = -2146434961 +SCARD_W_CACHE_ITEM_NOT_FOUND: Final = -2146434960 +SCARD_W_CACHE_ITEM_STALE: Final = -2146434959 +SCARD_W_CACHE_ITEM_TOO_BIG: Final = -2146434958 +COMADMIN_E_OBJECTERRORS: Final = -2146368511 +COMADMIN_E_OBJECTINVALID: Final = -2146368510 +COMADMIN_E_KEYMISSING: Final = -2146368509 +COMADMIN_E_ALREADYINSTALLED: Final = -2146368508 +COMADMIN_E_APP_FILE_WRITEFAIL: Final = -2146368505 +COMADMIN_E_APP_FILE_READFAIL: Final = -2146368504 +COMADMIN_E_APP_FILE_VERSION: Final = -2146368503 +COMADMIN_E_BADPATH: Final = -2146368502 +COMADMIN_E_APPLICATIONEXISTS: Final = -2146368501 +COMADMIN_E_ROLEEXISTS: Final = -2146368500 +COMADMIN_E_CANTCOPYFILE: Final = -2146368499 +COMADMIN_E_NOUSER: Final = -2146368497 +COMADMIN_E_INVALIDUSERIDS: Final = -2146368496 +COMADMIN_E_NOREGISTRYCLSID: Final = -2146368495 +COMADMIN_E_BADREGISTRYPROGID: Final = -2146368494 +COMADMIN_E_AUTHENTICATIONLEVEL: Final = -2146368493 +COMADMIN_E_USERPASSWDNOTVALID: Final = -2146368492 +COMADMIN_E_CLSIDORIIDMISMATCH: Final = -2146368488 +COMADMIN_E_REMOTEINTERFACE: Final = -2146368487 +COMADMIN_E_DLLREGISTERSERVER: Final = -2146368486 +COMADMIN_E_NOSERVERSHARE: Final = -2146368485 +COMADMIN_E_DLLLOADFAILED: Final = -2146368483 +COMADMIN_E_BADREGISTRYLIBID: Final = -2146368482 +COMADMIN_E_APPDIRNOTFOUND: Final = -2146368481 +COMADMIN_E_REGISTRARFAILED: Final = -2146368477 +COMADMIN_E_COMPFILE_DOESNOTEXIST: Final = -2146368476 +COMADMIN_E_COMPFILE_LOADDLLFAIL: Final = -2146368475 +COMADMIN_E_COMPFILE_GETCLASSOBJ: Final = -2146368474 +COMADMIN_E_COMPFILE_CLASSNOTAVAIL: Final = -2146368473 +COMADMIN_E_COMPFILE_BADTLB: Final = -2146368472 +COMADMIN_E_COMPFILE_NOTINSTALLABLE: Final = -2146368471 +COMADMIN_E_NOTCHANGEABLE: Final = -2146368470 +COMADMIN_E_NOTDELETEABLE: Final = -2146368469 +COMADMIN_E_SESSION: Final = -2146368468 +COMADMIN_E_COMP_MOVE_LOCKED: Final = -2146368467 +COMADMIN_E_COMP_MOVE_BAD_DEST: Final = -2146368466 +COMADMIN_E_REGISTERTLB: Final = -2146368464 +COMADMIN_E_SYSTEMAPP: Final = -2146368461 +COMADMIN_E_COMPFILE_NOREGISTRAR: Final = -2146368460 +COMADMIN_E_COREQCOMPINSTALLED: Final = -2146368459 +COMADMIN_E_SERVICENOTINSTALLED: Final = -2146368458 +COMADMIN_E_PROPERTYSAVEFAILED: Final = -2146368457 +COMADMIN_E_OBJECTEXISTS: Final = -2146368456 +COMADMIN_E_COMPONENTEXISTS: Final = -2146368455 +COMADMIN_E_REGFILE_CORRUPT: Final = -2146368453 +COMADMIN_E_PROPERTY_OVERFLOW: Final = -2146368452 +COMADMIN_E_NOTINREGISTRY: Final = -2146368450 +COMADMIN_E_OBJECTNOTPOOLABLE: Final = -2146368449 +COMADMIN_E_APPLID_MATCHES_CLSID: Final = -2146368442 +COMADMIN_E_ROLE_DOES_NOT_EXIST: Final = -2146368441 +COMADMIN_E_START_APP_NEEDS_COMPONENTS: Final = -2146368440 +COMADMIN_E_REQUIRES_DIFFERENT_PLATFORM: Final = -2146368439 +COMADMIN_E_CAN_NOT_EXPORT_APP_PROXY: Final = -2146368438 +COMADMIN_E_CAN_NOT_START_APP: Final = -2146368437 +COMADMIN_E_CAN_NOT_EXPORT_SYS_APP: Final = -2146368436 +COMADMIN_E_CANT_SUBSCRIBE_TO_COMPONENT: Final = -2146368435 +COMADMIN_E_EVENTCLASS_CANT_BE_SUBSCRIBER: Final = -2146368434 +COMADMIN_E_LIB_APP_PROXY_INCOMPATIBLE: Final = -2146368433 +COMADMIN_E_BASE_PARTITION_ONLY: Final = -2146368432 +COMADMIN_E_START_APP_DISABLED: Final = -2146368431 +COMADMIN_E_CAT_DUPLICATE_PARTITION_NAME: Final = -2146368425 +COMADMIN_E_CAT_INVALID_PARTITION_NAME: Final = -2146368424 +COMADMIN_E_CAT_PARTITION_IN_USE: Final = -2146368423 +COMADMIN_E_FILE_PARTITION_DUPLICATE_FILES: Final = -2146368422 +COMADMIN_E_CAT_IMPORTED_COMPONENTS_NOT_ALLOWED: Final = -2146368421 +COMADMIN_E_AMBIGUOUS_APPLICATION_NAME: Final = -2146368420 +COMADMIN_E_AMBIGUOUS_PARTITION_NAME: Final = -2146368419 +COMADMIN_E_REGDB_NOTINITIALIZED: Final = -2146368398 +COMADMIN_E_REGDB_NOTOPEN: Final = -2146368397 +COMADMIN_E_REGDB_SYSTEMERR: Final = -2146368396 +COMADMIN_E_REGDB_ALREADYRUNNING: Final = -2146368395 +COMADMIN_E_MIG_VERSIONNOTSUPPORTED: Final = -2146368384 +COMADMIN_E_MIG_SCHEMANOTFOUND: Final = -2146368383 +COMADMIN_E_CAT_BITNESSMISMATCH: Final = -2146368382 +COMADMIN_E_CAT_UNACCEPTABLEBITNESS: Final = -2146368381 +COMADMIN_E_CAT_WRONGAPPBITNESS: Final = -2146368380 +COMADMIN_E_CAT_PAUSE_RESUME_NOT_SUPPORTED: Final = -2146368379 +COMADMIN_E_CAT_SERVERFAULT: Final = -2146368378 +COMQC_E_APPLICATION_NOT_QUEUED: Final = -2146368000 +COMQC_E_NO_QUEUEABLE_INTERFACES: Final = -2146367999 +COMQC_E_QUEUING_SERVICE_NOT_AVAILABLE: Final = -2146367998 +COMQC_E_NO_IPERSISTSTREAM: Final = -2146367997 +COMQC_E_BAD_MESSAGE: Final = -2146367996 +COMQC_E_UNAUTHENTICATED: Final = -2146367995 +COMQC_E_UNTRUSTED_ENQUEUER: Final = -2146367994 +MSDTC_E_DUPLICATE_RESOURCE: Final = -2146367743 +COMADMIN_E_OBJECT_PARENT_MISSING: Final = -2146367480 +COMADMIN_E_OBJECT_DOES_NOT_EXIST: Final = -2146367479 +COMADMIN_E_APP_NOT_RUNNING: Final = -2146367478 +COMADMIN_E_INVALID_PARTITION: Final = -2146367477 +COMADMIN_E_SVCAPP_NOT_POOLABLE_OR_RECYCLABLE: Final = -2146367475 +COMADMIN_E_USER_IN_SET: Final = -2146367474 +COMADMIN_E_CANTRECYCLELIBRARYAPPS: Final = -2146367473 +COMADMIN_E_CANTRECYCLESERVICEAPPS: Final = -2146367471 +COMADMIN_E_PROCESSALREADYRECYCLED: Final = -2146367470 +COMADMIN_E_PAUSEDPROCESSMAYNOTBERECYCLED: Final = -2146367469 +COMADMIN_E_CANTMAKEINPROCSERVICE: Final = -2146367468 +COMADMIN_E_PROGIDINUSEBYCLSID: Final = -2146367467 +COMADMIN_E_DEFAULT_PARTITION_NOT_IN_SET: Final = -2146367466 +COMADMIN_E_RECYCLEDPROCESSMAYNOTBEPAUSED: Final = -2146367465 +COMADMIN_E_PARTITION_ACCESSDENIED: Final = -2146367464 +COMADMIN_E_PARTITION_MSI_ONLY: Final = -2146367463 +COMADMIN_E_LEGACYCOMPS_NOT_ALLOWED_IN_1_0_FORMAT: Final = -2146367462 +COMADMIN_E_LEGACYCOMPS_NOT_ALLOWED_IN_NONBASE_PARTITIONS: Final = -2146367461 +COMADMIN_E_COMP_MOVE_SOURCE: Final = -2146367460 +COMADMIN_E_COMP_MOVE_DEST: Final = -2146367459 +COMADMIN_E_COMP_MOVE_PRIVATE: Final = -2146367458 +COMADMIN_E_BASEPARTITION_REQUIRED_IN_SET: Final = -2146367457 +COMADMIN_E_CANNOT_ALIAS_EVENTCLASS: Final = -2146367456 +COMADMIN_E_PRIVATE_ACCESSDENIED: Final = -2146367455 +COMADMIN_E_SAFERINVALID: Final = -2146367454 +COMADMIN_E_REGISTRY_ACCESSDENIED: Final = -2146367453 +COMADMIN_E_PARTITIONS_DISABLED: Final = -2146367452 +MENROLL_E_DEVICE_MESSAGE_FORMAT_ERROR: Final = -2145910783 +MENROLL_E_DEVICE_AUTHENTICATION_ERROR: Final = -2145910782 +MENROLL_E_DEVICE_AUTHORIZATION_ERROR: Final = -2145910781 +MENROLL_E_DEVICE_CERTIFICATEREQUEST_ERROR: Final = -2145910780 +MENROLL_E_DEVICE_CONFIGMGRSERVER_ERROR: Final = -2145910779 +MENROLL_E_DEVICE_INTERNALSERVICE_ERROR: Final = -2145910778 +MENROLL_E_DEVICE_INVALIDSECURITY_ERROR: Final = -2145910777 +MENROLL_E_DEVICE_UNKNOWN_ERROR: Final = -2145910776 +MENROLL_E_ENROLLMENT_IN_PROGRESS: Final = -2145910775 +MENROLL_E_DEVICE_ALREADY_ENROLLED: Final = -2145910774 +MENROLL_E_DISCOVERY_SEC_CERT_DATE_INVALID: Final = -2145910771 +MENROLL_E_PASSWORD_NEEDED: Final = -2145910770 +MENROLL_E_WAB_ERROR: Final = -2145910769 +MENROLL_E_CONNECTIVITY: Final = -2145910768 +MENROLL_S_ENROLLMENT_SUSPENDED: Final = 0x00180011 +MENROLL_E_INVALIDSSLCERT: Final = -2145910766 +MENROLL_E_DEVICECAPREACHED: Final = -2145910765 +MENROLL_E_DEVICENOTSUPPORTED: Final = -2145910764 +MENROLL_E_NOT_SUPPORTED: Final = -2145910763 +MENROLL_E_NOTELIGIBLETORENEW: Final = -2145910762 +MENROLL_E_INMAINTENANCE: Final = -2145910761 +MENROLL_E_USER_LICENSE: Final = -2145910760 +MENROLL_E_ENROLLMENTDATAINVALID: Final = -2145910759 +MENROLL_E_INSECUREREDIRECT: Final = -2145910758 +MENROLL_E_PLATFORM_WRONG_STATE: Final = -2145910757 +MENROLL_E_PLATFORM_LICENSE_ERROR: Final = -2145910756 +MENROLL_E_PLATFORM_UNKNOWN_ERROR: Final = -2145910755 +MENROLL_E_PROV_CSP_CERTSTORE: Final = -2145910754 +MENROLL_E_PROV_CSP_W7: Final = -2145910753 +MENROLL_E_PROV_CSP_DMCLIENT: Final = -2145910752 +MENROLL_E_PROV_CSP_PFW: Final = -2145910751 +MENROLL_E_PROV_CSP_MISC: Final = -2145910750 +MENROLL_E_PROV_UNKNOWN: Final = -2145910749 +MENROLL_E_PROV_SSLCERTNOTFOUND: Final = -2145910748 +MENROLL_E_PROV_CSP_APPMGMT: Final = -2145910747 +MENROLL_E_DEVICE_MANAGEMENT_BLOCKED: Final = -2145910746 +MENROLL_E_CERTPOLICY_PRIVATEKEYCREATION_FAILED: Final = -2145910745 +MENROLL_E_CERTAUTH_FAILED_TO_FIND_CERT: Final = -2145910744 +MENROLL_E_EMPTY_MESSAGE: Final = -2145910743 +MENROLL_E_USER_CANCELLED: Final = -2145910736 +MENROLL_E_MDM_NOT_CONFIGURED: Final = -2145910735 +MENROLL_E_CUSTOMSERVERERROR: Final = -2145910734 +WER_S_REPORT_DEBUG: Final = 0x001B0000 +WER_S_REPORT_UPLOADED: Final = 0x001B0001 +WER_S_REPORT_QUEUED: Final = 0x001B0002 +WER_S_DISABLED: Final = 0x001B0003 +WER_S_SUSPENDED_UPLOAD: Final = 0x001B0004 +WER_S_DISABLED_QUEUE: Final = 0x001B0005 +WER_S_DISABLED_ARCHIVE: Final = 0x001B0006 +WER_S_REPORT_ASYNC: Final = 0x001B0007 +WER_S_IGNORE_ASSERT_INSTANCE: Final = 0x001B0008 +WER_S_IGNORE_ALL_ASSERTS: Final = 0x001B0009 +WER_S_ASSERT_CONTINUE: Final = 0x001B000A +WER_S_THROTTLED: Final = 0x001B000B +WER_S_REPORT_UPLOADED_CAB: Final = 0x001B000C +WER_E_CRASH_FAILURE: Final = -2145681408 +WER_E_CANCELED: Final = -2145681407 +WER_E_NETWORK_FAILURE: Final = -2145681406 +WER_E_NOT_INITIALIZED: Final = -2145681405 +WER_E_ALREADY_REPORTING: Final = -2145681404 +WER_E_DUMP_THROTTLED: Final = -2145681403 +WER_E_INSUFFICIENT_CONSENT: Final = -2145681402 +WER_E_TOO_HEAVY: Final = -2145681401 + +def FILTER_HRESULT_FROM_FLT_NTSTATUS(x: int) -> int: ... + +ERROR_FLT_IO_COMPLETE: Final = 0x001F0001 +ERROR_FLT_NO_HANDLER_DEFINED: Final = -2145452031 +ERROR_FLT_CONTEXT_ALREADY_DEFINED: Final = -2145452030 +ERROR_FLT_INVALID_ASYNCHRONOUS_REQUEST: Final = -2145452029 +ERROR_FLT_DISALLOW_FAST_IO: Final = -2145452028 +ERROR_FLT_INVALID_NAME_REQUEST: Final = -2145452027 +ERROR_FLT_NOT_SAFE_TO_POST_OPERATION: Final = -2145452026 +ERROR_FLT_NOT_INITIALIZED: Final = -2145452025 +ERROR_FLT_FILTER_NOT_READY: Final = -2145452024 +ERROR_FLT_POST_OPERATION_CLEANUP: Final = -2145452023 +ERROR_FLT_INTERNAL_ERROR: Final = -2145452022 +ERROR_FLT_DELETING_OBJECT: Final = -2145452021 +ERROR_FLT_MUST_BE_NONPAGED_POOL: Final = -2145452020 +ERROR_FLT_DUPLICATE_ENTRY: Final = -2145452019 +ERROR_FLT_CBDQ_DISABLED: Final = -2145452018 +ERROR_FLT_DO_NOT_ATTACH: Final = -2145452017 +ERROR_FLT_DO_NOT_DETACH: Final = -2145452016 +ERROR_FLT_INSTANCE_ALTITUDE_COLLISION: Final = -2145452015 +ERROR_FLT_INSTANCE_NAME_COLLISION: Final = -2145452014 +ERROR_FLT_FILTER_NOT_FOUND: Final = -2145452013 +ERROR_FLT_VOLUME_NOT_FOUND: Final = -2145452012 +ERROR_FLT_INSTANCE_NOT_FOUND: Final = -2145452011 +ERROR_FLT_CONTEXT_ALLOCATION_NOT_FOUND: Final = -2145452010 +ERROR_FLT_INVALID_CONTEXT_REGISTRATION: Final = -2145452009 +ERROR_FLT_NAME_CACHE_MISS: Final = -2145452008 +ERROR_FLT_NO_DEVICE_OBJECT: Final = -2145452007 +ERROR_FLT_VOLUME_ALREADY_MOUNTED: Final = -2145452006 +ERROR_FLT_ALREADY_ENLISTED: Final = -2145452005 +ERROR_FLT_CONTEXT_ALREADY_LINKED: Final = -2145452004 +ERROR_FLT_NO_WAITER_FOR_REPLY: Final = -2145452000 +ERROR_FLT_REGISTRATION_BUSY: Final = -2145451997 +ERROR_FLT_WCOS_NOT_SUPPORTED: Final = -2145451996 +ERROR_HUNG_DISPLAY_DRIVER_THREAD: Final = -2144993279 +DWM_E_COMPOSITIONDISABLED: Final = -2144980991 +DWM_E_REMOTING_NOT_SUPPORTED: Final = -2144980990 +DWM_E_NO_REDIRECTION_SURFACE_AVAILABLE: Final = -2144980989 +DWM_E_NOT_QUEUING_PRESENTS: Final = -2144980988 +DWM_E_ADAPTER_NOT_FOUND: Final = -2144980987 +DWM_S_GDI_REDIRECTION_SURFACE: Final = 0x00263005 +DWM_E_TEXTURE_TOO_LARGE: Final = -2144980985 +DWM_S_GDI_REDIRECTION_SURFACE_BLT_VIA_GDI: Final = 0x00263008 +ERROR_MONITOR_NO_DESCRIPTOR: Final = 0x00261001 +ERROR_MONITOR_UNKNOWN_DESCRIPTOR_FORMAT: Final = 0x00261002 +ERROR_MONITOR_INVALID_DESCRIPTOR_CHECKSUM: Final = -1071247357 +ERROR_MONITOR_INVALID_STANDARD_TIMING_BLOCK: Final = -1071247356 +ERROR_MONITOR_WMI_DATABLOCK_REGISTRATION_FAILED: Final = -1071247355 +ERROR_MONITOR_INVALID_SERIAL_NUMBER_MONDSC_BLOCK: Final = -1071247354 +ERROR_MONITOR_INVALID_USER_FRIENDLY_MONDSC_BLOCK: Final = -1071247353 +ERROR_MONITOR_NO_MORE_DESCRIPTOR_DATA: Final = -1071247352 +ERROR_MONITOR_INVALID_DETAILED_TIMING_BLOCK: Final = -1071247351 +ERROR_MONITOR_INVALID_MANUFACTURE_DATE: Final = -1071247350 +ERROR_GRAPHICS_NOT_EXCLUSIVE_MODE_OWNER: Final = -1071243264 +ERROR_GRAPHICS_INSUFFICIENT_DMA_BUFFER: Final = -1071243263 +ERROR_GRAPHICS_INVALID_DISPLAY_ADAPTER: Final = -1071243262 +ERROR_GRAPHICS_ADAPTER_WAS_RESET: Final = -1071243261 +ERROR_GRAPHICS_INVALID_DRIVER_MODEL: Final = -1071243260 +ERROR_GRAPHICS_PRESENT_MODE_CHANGED: Final = -1071243259 +ERROR_GRAPHICS_PRESENT_OCCLUDED: Final = -1071243258 +ERROR_GRAPHICS_PRESENT_DENIED: Final = -1071243257 +ERROR_GRAPHICS_CANNOTCOLORCONVERT: Final = -1071243256 +ERROR_GRAPHICS_DRIVER_MISMATCH: Final = -1071243255 +ERROR_GRAPHICS_PARTIAL_DATA_POPULATED: Final = 0x4026200A +ERROR_GRAPHICS_PRESENT_REDIRECTION_DISABLED: Final = -1071243253 +ERROR_GRAPHICS_PRESENT_UNOCCLUDED: Final = -1071243252 +ERROR_GRAPHICS_WINDOWDC_NOT_AVAILABLE: Final = -1071243251 +ERROR_GRAPHICS_WINDOWLESS_PRESENT_DISABLED: Final = -1071243250 +ERROR_GRAPHICS_PRESENT_INVALID_WINDOW: Final = -1071243249 +ERROR_GRAPHICS_PRESENT_BUFFER_NOT_BOUND: Final = -1071243248 +ERROR_GRAPHICS_VAIL_STATE_CHANGED: Final = -1071243247 +ERROR_GRAPHICS_INDIRECT_DISPLAY_ABANDON_SWAPCHAIN: Final = -1071243246 +ERROR_GRAPHICS_INDIRECT_DISPLAY_DEVICE_STOPPED: Final = -1071243245 +ERROR_GRAPHICS_VAIL_FAILED_TO_SEND_CREATE_SUPERWETINK_MESSAGE: Final = -1071243244 +ERROR_GRAPHICS_VAIL_FAILED_TO_SEND_DESTROY_SUPERWETINK_MESSAGE: Final = -1071243243 +ERROR_GRAPHICS_VAIL_FAILED_TO_SEND_COMPOSITION_WINDOW_DPI_MESSAGE: Final = -1071243242 +ERROR_GRAPHICS_LINK_CONFIGURATION_IN_PROGRESS: Final = -1071243241 +ERROR_GRAPHICS_MPO_ALLOCATION_UNPINNED: Final = -1071243240 +ERROR_GRAPHICS_NO_VIDEO_MEMORY: Final = -1071243008 +ERROR_GRAPHICS_CANT_LOCK_MEMORY: Final = -1071243007 +ERROR_GRAPHICS_ALLOCATION_BUSY: Final = -1071243006 +ERROR_GRAPHICS_TOO_MANY_REFERENCES: Final = -1071243005 +ERROR_GRAPHICS_TRY_AGAIN_LATER: Final = -1071243004 +ERROR_GRAPHICS_TRY_AGAIN_NOW: Final = -1071243003 +ERROR_GRAPHICS_ALLOCATION_INVALID: Final = -1071243002 +ERROR_GRAPHICS_UNSWIZZLING_APERTURE_UNAVAILABLE: Final = -1071243001 +ERROR_GRAPHICS_UNSWIZZLING_APERTURE_UNSUPPORTED: Final = -1071243000 +ERROR_GRAPHICS_CANT_EVICT_PINNED_ALLOCATION: Final = -1071242999 +ERROR_GRAPHICS_INVALID_ALLOCATION_USAGE: Final = -1071242992 +ERROR_GRAPHICS_CANT_RENDER_LOCKED_ALLOCATION: Final = -1071242991 +ERROR_GRAPHICS_ALLOCATION_CLOSED: Final = -1071242990 +ERROR_GRAPHICS_INVALID_ALLOCATION_INSTANCE: Final = -1071242989 +ERROR_GRAPHICS_INVALID_ALLOCATION_HANDLE: Final = -1071242988 +ERROR_GRAPHICS_WRONG_ALLOCATION_DEVICE: Final = -1071242987 +ERROR_GRAPHICS_ALLOCATION_CONTENT_LOST: Final = -1071242986 +ERROR_GRAPHICS_GPU_EXCEPTION_ON_DEVICE: Final = -1071242752 +ERROR_GRAPHICS_SKIP_ALLOCATION_PREPARATION: Final = 0x40262201 +ERROR_GRAPHICS_INVALID_VIDPN_TOPOLOGY: Final = -1071242496 +ERROR_GRAPHICS_VIDPN_TOPOLOGY_NOT_SUPPORTED: Final = -1071242495 +ERROR_GRAPHICS_VIDPN_TOPOLOGY_CURRENTLY_NOT_SUPPORTED: Final = -1071242494 +ERROR_GRAPHICS_INVALID_VIDPN: Final = -1071242493 +ERROR_GRAPHICS_INVALID_VIDEO_PRESENT_SOURCE: Final = -1071242492 +ERROR_GRAPHICS_INVALID_VIDEO_PRESENT_TARGET: Final = -1071242491 +ERROR_GRAPHICS_VIDPN_MODALITY_NOT_SUPPORTED: Final = -1071242490 +ERROR_GRAPHICS_MODE_NOT_PINNED: Final = 0x00262307 +ERROR_GRAPHICS_INVALID_VIDPN_SOURCEMODESET: Final = -1071242488 +ERROR_GRAPHICS_INVALID_VIDPN_TARGETMODESET: Final = -1071242487 +ERROR_GRAPHICS_INVALID_FREQUENCY: Final = -1071242486 +ERROR_GRAPHICS_INVALID_ACTIVE_REGION: Final = -1071242485 +ERROR_GRAPHICS_INVALID_TOTAL_REGION: Final = -1071242484 +ERROR_GRAPHICS_INVALID_VIDEO_PRESENT_SOURCE_MODE: Final = -1071242480 +ERROR_GRAPHICS_INVALID_VIDEO_PRESENT_TARGET_MODE: Final = -1071242479 +ERROR_GRAPHICS_PINNED_MODE_MUST_REMAIN_IN_SET: Final = -1071242478 +ERROR_GRAPHICS_PATH_ALREADY_IN_TOPOLOGY: Final = -1071242477 +ERROR_GRAPHICS_MODE_ALREADY_IN_MODESET: Final = -1071242476 +ERROR_GRAPHICS_INVALID_VIDEOPRESENTSOURCESET: Final = -1071242475 +ERROR_GRAPHICS_INVALID_VIDEOPRESENTTARGETSET: Final = -1071242474 +ERROR_GRAPHICS_SOURCE_ALREADY_IN_SET: Final = -1071242473 +ERROR_GRAPHICS_TARGET_ALREADY_IN_SET: Final = -1071242472 +ERROR_GRAPHICS_INVALID_VIDPN_PRESENT_PATH: Final = -1071242471 +ERROR_GRAPHICS_NO_RECOMMENDED_VIDPN_TOPOLOGY: Final = -1071242470 +ERROR_GRAPHICS_INVALID_MONITOR_FREQUENCYRANGESET: Final = -1071242469 +ERROR_GRAPHICS_INVALID_MONITOR_FREQUENCYRANGE: Final = -1071242468 +ERROR_GRAPHICS_FREQUENCYRANGE_NOT_IN_SET: Final = -1071242467 +ERROR_GRAPHICS_NO_PREFERRED_MODE: Final = 0x0026231E +ERROR_GRAPHICS_FREQUENCYRANGE_ALREADY_IN_SET: Final = -1071242465 +ERROR_GRAPHICS_STALE_MODESET: Final = -1071242464 +ERROR_GRAPHICS_INVALID_MONITOR_SOURCEMODESET: Final = -1071242463 +ERROR_GRAPHICS_INVALID_MONITOR_SOURCE_MODE: Final = -1071242462 +ERROR_GRAPHICS_NO_RECOMMENDED_FUNCTIONAL_VIDPN: Final = -1071242461 +ERROR_GRAPHICS_MODE_ID_MUST_BE_UNIQUE: Final = -1071242460 +ERROR_GRAPHICS_EMPTY_ADAPTER_MONITOR_MODE_SUPPORT_INTERSECTION: Final = -1071242459 +ERROR_GRAPHICS_VIDEO_PRESENT_TARGETS_LESS_THAN_SOURCES: Final = -1071242458 +ERROR_GRAPHICS_PATH_NOT_IN_TOPOLOGY: Final = -1071242457 +ERROR_GRAPHICS_ADAPTER_MUST_HAVE_AT_LEAST_ONE_SOURCE: Final = -1071242456 +ERROR_GRAPHICS_ADAPTER_MUST_HAVE_AT_LEAST_ONE_TARGET: Final = -1071242455 +ERROR_GRAPHICS_INVALID_MONITORDESCRIPTORSET: Final = -1071242454 +ERROR_GRAPHICS_INVALID_MONITORDESCRIPTOR: Final = -1071242453 +ERROR_GRAPHICS_MONITORDESCRIPTOR_NOT_IN_SET: Final = -1071242452 +ERROR_GRAPHICS_MONITORDESCRIPTOR_ALREADY_IN_SET: Final = -1071242451 +ERROR_GRAPHICS_MONITORDESCRIPTOR_ID_MUST_BE_UNIQUE: Final = -1071242450 +ERROR_GRAPHICS_INVALID_VIDPN_TARGET_SUBSET_TYPE: Final = -1071242449 +ERROR_GRAPHICS_RESOURCES_NOT_RELATED: Final = -1071242448 +ERROR_GRAPHICS_SOURCE_ID_MUST_BE_UNIQUE: Final = -1071242447 +ERROR_GRAPHICS_TARGET_ID_MUST_BE_UNIQUE: Final = -1071242446 +ERROR_GRAPHICS_NO_AVAILABLE_VIDPN_TARGET: Final = -1071242445 +ERROR_GRAPHICS_MONITOR_COULD_NOT_BE_ASSOCIATED_WITH_ADAPTER: Final = -1071242444 +ERROR_GRAPHICS_NO_VIDPNMGR: Final = -1071242443 +ERROR_GRAPHICS_NO_ACTIVE_VIDPN: Final = -1071242442 +ERROR_GRAPHICS_STALE_VIDPN_TOPOLOGY: Final = -1071242441 +ERROR_GRAPHICS_MONITOR_NOT_CONNECTED: Final = -1071242440 +ERROR_GRAPHICS_SOURCE_NOT_IN_TOPOLOGY: Final = -1071242439 +ERROR_GRAPHICS_INVALID_PRIMARYSURFACE_SIZE: Final = -1071242438 +ERROR_GRAPHICS_INVALID_VISIBLEREGION_SIZE: Final = -1071242437 +ERROR_GRAPHICS_INVALID_STRIDE: Final = -1071242436 +ERROR_GRAPHICS_INVALID_PIXELFORMAT: Final = -1071242435 +ERROR_GRAPHICS_INVALID_COLORBASIS: Final = -1071242434 +ERROR_GRAPHICS_INVALID_PIXELVALUEACCESSMODE: Final = -1071242433 +ERROR_GRAPHICS_TARGET_NOT_IN_TOPOLOGY: Final = -1071242432 +ERROR_GRAPHICS_NO_DISPLAY_MODE_MANAGEMENT_SUPPORT: Final = -1071242431 +ERROR_GRAPHICS_VIDPN_SOURCE_IN_USE: Final = -1071242430 +ERROR_GRAPHICS_CANT_ACCESS_ACTIVE_VIDPN: Final = -1071242429 +ERROR_GRAPHICS_INVALID_PATH_IMPORTANCE_ORDINAL: Final = -1071242428 +ERROR_GRAPHICS_INVALID_PATH_CONTENT_GEOMETRY_TRANSFORMATION: Final = -1071242427 +ERROR_GRAPHICS_PATH_CONTENT_GEOMETRY_TRANSFORMATION_NOT_SUPPORTED: Final = -1071242426 +ERROR_GRAPHICS_INVALID_GAMMA_RAMP: Final = -1071242425 +ERROR_GRAPHICS_GAMMA_RAMP_NOT_SUPPORTED: Final = -1071242424 +ERROR_GRAPHICS_MULTISAMPLING_NOT_SUPPORTED: Final = -1071242423 +ERROR_GRAPHICS_MODE_NOT_IN_MODESET: Final = -1071242422 +ERROR_GRAPHICS_DATASET_IS_EMPTY: Final = 0x0026234B +ERROR_GRAPHICS_NO_MORE_ELEMENTS_IN_DATASET: Final = 0x0026234C +ERROR_GRAPHICS_INVALID_VIDPN_TOPOLOGY_RECOMMENDATION_REASON: Final = -1071242419 +ERROR_GRAPHICS_INVALID_PATH_CONTENT_TYPE: Final = -1071242418 +ERROR_GRAPHICS_INVALID_COPYPROTECTION_TYPE: Final = -1071242417 +ERROR_GRAPHICS_UNASSIGNED_MODESET_ALREADY_EXISTS: Final = -1071242416 +ERROR_GRAPHICS_PATH_CONTENT_GEOMETRY_TRANSFORMATION_NOT_PINNED: Final = 0x00262351 +ERROR_GRAPHICS_INVALID_SCANLINE_ORDERING: Final = -1071242414 +ERROR_GRAPHICS_TOPOLOGY_CHANGES_NOT_ALLOWED: Final = -1071242413 +ERROR_GRAPHICS_NO_AVAILABLE_IMPORTANCE_ORDINALS: Final = -1071242412 +ERROR_GRAPHICS_INCOMPATIBLE_PRIVATE_FORMAT: Final = -1071242411 +ERROR_GRAPHICS_INVALID_MODE_PRUNING_ALGORITHM: Final = -1071242410 +ERROR_GRAPHICS_INVALID_MONITOR_CAPABILITY_ORIGIN: Final = -1071242409 +ERROR_GRAPHICS_INVALID_MONITOR_FREQUENCYRANGE_CONSTRAINT: Final = -1071242408 +ERROR_GRAPHICS_MAX_NUM_PATHS_REACHED: Final = -1071242407 +ERROR_GRAPHICS_CANCEL_VIDPN_TOPOLOGY_AUGMENTATION: Final = -1071242406 +ERROR_GRAPHICS_INVALID_CLIENT_TYPE: Final = -1071242405 +ERROR_GRAPHICS_CLIENTVIDPN_NOT_SET: Final = -1071242404 +ERROR_GRAPHICS_SPECIFIED_CHILD_ALREADY_CONNECTED: Final = -1071242240 +ERROR_GRAPHICS_CHILD_DESCRIPTOR_NOT_SUPPORTED: Final = -1071242239 +ERROR_GRAPHICS_UNKNOWN_CHILD_STATUS: Final = 0x4026242F +ERROR_GRAPHICS_NOT_A_LINKED_ADAPTER: Final = -1071242192 +ERROR_GRAPHICS_LEADLINK_NOT_ENUMERATED: Final = -1071242191 +ERROR_GRAPHICS_CHAINLINKS_NOT_ENUMERATED: Final = -1071242190 +ERROR_GRAPHICS_ADAPTER_CHAIN_NOT_READY: Final = -1071242189 +ERROR_GRAPHICS_CHAINLINKS_NOT_STARTED: Final = -1071242188 +ERROR_GRAPHICS_CHAINLINKS_NOT_POWERED_ON: Final = -1071242187 +ERROR_GRAPHICS_INCONSISTENT_DEVICE_LINK_STATE: Final = -1071242186 +ERROR_GRAPHICS_LEADLINK_START_DEFERRED: Final = 0x40262437 +ERROR_GRAPHICS_NOT_POST_DEVICE_DRIVER: Final = -1071242184 +ERROR_GRAPHICS_POLLING_TOO_FREQUENTLY: Final = 0x40262439 +ERROR_GRAPHICS_START_DEFERRED: Final = 0x4026243A +ERROR_GRAPHICS_ADAPTER_ACCESS_NOT_EXCLUDED: Final = -1071242181 +ERROR_GRAPHICS_DEPENDABLE_CHILD_STATUS: Final = 0x4026243C +ERROR_GRAPHICS_OPM_NOT_SUPPORTED: Final = -1071241984 +ERROR_GRAPHICS_COPP_NOT_SUPPORTED: Final = -1071241983 +ERROR_GRAPHICS_UAB_NOT_SUPPORTED: Final = -1071241982 +ERROR_GRAPHICS_OPM_INVALID_ENCRYPTED_PARAMETERS: Final = -1071241981 +ERROR_GRAPHICS_OPM_NO_VIDEO_OUTPUTS_EXIST: Final = -1071241979 +ERROR_GRAPHICS_OPM_INTERNAL_ERROR: Final = -1071241973 +ERROR_GRAPHICS_OPM_INVALID_HANDLE: Final = -1071241972 +ERROR_GRAPHICS_PVP_INVALID_CERTIFICATE_LENGTH: Final = -1071241970 +ERROR_GRAPHICS_OPM_SPANNING_MODE_ENABLED: Final = -1071241969 +ERROR_GRAPHICS_OPM_THEATER_MODE_ENABLED: Final = -1071241968 +ERROR_GRAPHICS_PVP_HFS_FAILED: Final = -1071241967 +ERROR_GRAPHICS_OPM_INVALID_SRM: Final = -1071241966 +ERROR_GRAPHICS_OPM_OUTPUT_DOES_NOT_SUPPORT_HDCP: Final = -1071241965 +ERROR_GRAPHICS_OPM_OUTPUT_DOES_NOT_SUPPORT_ACP: Final = -1071241964 +ERROR_GRAPHICS_OPM_OUTPUT_DOES_NOT_SUPPORT_CGMSA: Final = -1071241963 +ERROR_GRAPHICS_OPM_HDCP_SRM_NEVER_SET: Final = -1071241962 +ERROR_GRAPHICS_OPM_RESOLUTION_TOO_HIGH: Final = -1071241961 +ERROR_GRAPHICS_OPM_ALL_HDCP_HARDWARE_ALREADY_IN_USE: Final = -1071241960 +ERROR_GRAPHICS_OPM_VIDEO_OUTPUT_NO_LONGER_EXISTS: Final = -1071241958 +ERROR_GRAPHICS_OPM_SESSION_TYPE_CHANGE_IN_PROGRESS: Final = -1071241957 +ERROR_GRAPHICS_OPM_VIDEO_OUTPUT_DOES_NOT_HAVE_COPP_SEMANTICS: Final = -1071241956 +ERROR_GRAPHICS_OPM_INVALID_INFORMATION_REQUEST: Final = -1071241955 +ERROR_GRAPHICS_OPM_DRIVER_INTERNAL_ERROR: Final = -1071241954 +ERROR_GRAPHICS_OPM_VIDEO_OUTPUT_DOES_NOT_HAVE_OPM_SEMANTICS: Final = -1071241953 +ERROR_GRAPHICS_OPM_SIGNALING_NOT_SUPPORTED: Final = -1071241952 +ERROR_GRAPHICS_OPM_INVALID_CONFIGURATION_REQUEST: Final = -1071241951 +ERROR_GRAPHICS_I2C_NOT_SUPPORTED: Final = -1071241856 +ERROR_GRAPHICS_I2C_DEVICE_DOES_NOT_EXIST: Final = -1071241855 +ERROR_GRAPHICS_I2C_ERROR_TRANSMITTING_DATA: Final = -1071241854 +ERROR_GRAPHICS_I2C_ERROR_RECEIVING_DATA: Final = -1071241853 +ERROR_GRAPHICS_DDCCI_VCP_NOT_SUPPORTED: Final = -1071241852 +ERROR_GRAPHICS_DDCCI_INVALID_DATA: Final = -1071241851 +ERROR_GRAPHICS_DDCCI_MONITOR_RETURNED_INVALID_TIMING_STATUS_BYTE: Final = -1071241850 +ERROR_GRAPHICS_MCA_INVALID_CAPABILITIES_STRING: Final = -1071241849 +ERROR_GRAPHICS_MCA_INTERNAL_ERROR: Final = -1071241848 +ERROR_GRAPHICS_DDCCI_INVALID_MESSAGE_COMMAND: Final = -1071241847 +ERROR_GRAPHICS_DDCCI_INVALID_MESSAGE_LENGTH: Final = -1071241846 +ERROR_GRAPHICS_DDCCI_INVALID_MESSAGE_CHECKSUM: Final = -1071241845 +ERROR_GRAPHICS_INVALID_PHYSICAL_MONITOR_HANDLE: Final = -1071241844 +ERROR_GRAPHICS_MONITOR_NO_LONGER_EXISTS: Final = -1071241843 +ERROR_GRAPHICS_DDCCI_CURRENT_CURRENT_VALUE_GREATER_THAN_MAXIMUM_VALUE: Final = -1071241768 +ERROR_GRAPHICS_MCA_INVALID_VCP_VERSION: Final = -1071241767 +ERROR_GRAPHICS_MCA_MONITOR_VIOLATES_MCCS_SPECIFICATION: Final = -1071241766 +ERROR_GRAPHICS_MCA_MCCS_VERSION_MISMATCH: Final = -1071241765 +ERROR_GRAPHICS_MCA_UNSUPPORTED_MCCS_VERSION: Final = -1071241764 +ERROR_GRAPHICS_MCA_INVALID_TECHNOLOGY_TYPE_RETURNED: Final = -1071241762 +ERROR_GRAPHICS_MCA_UNSUPPORTED_COLOR_TEMPERATURE: Final = -1071241761 +ERROR_GRAPHICS_ONLY_CONSOLE_SESSION_SUPPORTED: Final = -1071241760 +ERROR_GRAPHICS_NO_DISPLAY_DEVICE_CORRESPONDS_TO_NAME: Final = -1071241759 +ERROR_GRAPHICS_DISPLAY_DEVICE_NOT_ATTACHED_TO_DESKTOP: Final = -1071241758 +ERROR_GRAPHICS_MIRRORING_DEVICES_NOT_SUPPORTED: Final = -1071241757 +ERROR_GRAPHICS_INVALID_POINTER: Final = -1071241756 +ERROR_GRAPHICS_NO_MONITORS_CORRESPOND_TO_DISPLAY_DEVICE: Final = -1071241755 +ERROR_GRAPHICS_PARAMETER_ARRAY_TOO_SMALL: Final = -1071241754 +ERROR_GRAPHICS_INTERNAL_ERROR: Final = -1071241753 +ERROR_GRAPHICS_SESSION_TYPE_CHANGE_IN_PROGRESS: Final = -1071249944 +NAP_E_INVALID_PACKET: Final = -2144927743 +NAP_E_MISSING_SOH: Final = -2144927742 +NAP_E_CONFLICTING_ID: Final = -2144927741 +NAP_E_NO_CACHED_SOH: Final = -2144927740 +NAP_E_STILL_BOUND: Final = -2144927739 +NAP_E_NOT_REGISTERED: Final = -2144927738 +NAP_E_NOT_INITIALIZED: Final = -2144927737 +NAP_E_MISMATCHED_ID: Final = -2144927736 +NAP_E_NOT_PENDING: Final = -2144927735 +NAP_E_ID_NOT_FOUND: Final = -2144927734 +NAP_E_MAXSIZE_TOO_SMALL: Final = -2144927733 +NAP_E_SERVICE_NOT_RUNNING: Final = -2144927732 +NAP_S_CERT_ALREADY_PRESENT: Final = 0x0027000D +NAP_E_ENTITY_DISABLED: Final = -2144927730 +NAP_E_NETSH_GROUPPOLICY_ERROR: Final = -2144927729 +NAP_E_TOO_MANY_CALLS: Final = -2144927728 +NAP_E_SHV_CONFIG_EXISTED: Final = -2144927727 +NAP_E_SHV_CONFIG_NOT_FOUND: Final = -2144927726 +NAP_E_SHV_TIMEOUT: Final = -2144927725 +TPM_E_ERROR_MASK: Final = -2144862208 +TPM_E_AUTHFAIL: Final = -2144862207 +TPM_E_BADINDEX: Final = -2144862206 +TPM_E_BAD_PARAMETER: Final = -2144862205 +TPM_E_AUDITFAILURE: Final = -2144862204 +TPM_E_CLEAR_DISABLED: Final = -2144862203 +TPM_E_DEACTIVATED: Final = -2144862202 +TPM_E_DISABLED: Final = -2144862201 +TPM_E_DISABLED_CMD: Final = -2144862200 +TPM_E_FAIL: Final = -2144862199 +TPM_E_BAD_ORDINAL: Final = -2144862198 +TPM_E_INSTALL_DISABLED: Final = -2144862197 +TPM_E_INVALID_KEYHANDLE: Final = -2144862196 +TPM_E_KEYNOTFOUND: Final = -2144862195 +TPM_E_INAPPROPRIATE_ENC: Final = -2144862194 +TPM_E_MIGRATEFAIL: Final = -2144862193 +TPM_E_INVALID_PCR_INFO: Final = -2144862192 +TPM_E_NOSPACE: Final = -2144862191 +TPM_E_NOSRK: Final = -2144862190 +TPM_E_NOTSEALED_BLOB: Final = -2144862189 +TPM_E_OWNER_SET: Final = -2144862188 +TPM_E_RESOURCES: Final = -2144862187 +TPM_E_SHORTRANDOM: Final = -2144862186 +TPM_E_SIZE: Final = -2144862185 +TPM_E_WRONGPCRVAL: Final = -2144862184 +TPM_E_BAD_PARAM_SIZE: Final = -2144862183 +TPM_E_SHA_THREAD: Final = -2144862182 +TPM_E_SHA_ERROR: Final = -2144862181 +TPM_E_FAILEDSELFTEST: Final = -2144862180 +TPM_E_AUTH2FAIL: Final = -2144862179 +TPM_E_BADTAG: Final = -2144862178 +TPM_E_IOERROR: Final = -2144862177 +TPM_E_ENCRYPT_ERROR: Final = -2144862176 +TPM_E_DECRYPT_ERROR: Final = -2144862175 +TPM_E_INVALID_AUTHHANDLE: Final = -2144862174 +TPM_E_NO_ENDORSEMENT: Final = -2144862173 +TPM_E_INVALID_KEYUSAGE: Final = -2144862172 +TPM_E_WRONG_ENTITYTYPE: Final = -2144862171 +TPM_E_INVALID_POSTINIT: Final = -2144862170 +TPM_E_INAPPROPRIATE_SIG: Final = -2144862169 +TPM_E_BAD_KEY_PROPERTY: Final = -2144862168 +TPM_E_BAD_MIGRATION: Final = -2144862167 +TPM_E_BAD_SCHEME: Final = -2144862166 +TPM_E_BAD_DATASIZE: Final = -2144862165 +TPM_E_BAD_MODE: Final = -2144862164 +TPM_E_BAD_PRESENCE: Final = -2144862163 +TPM_E_BAD_VERSION: Final = -2144862162 +TPM_E_NO_WRAP_TRANSPORT: Final = -2144862161 +TPM_E_AUDITFAIL_UNSUCCESSFUL: Final = -2144862160 +TPM_E_AUDITFAIL_SUCCESSFUL: Final = -2144862159 +TPM_E_NOTRESETABLE: Final = -2144862158 +TPM_E_NOTLOCAL: Final = -2144862157 +TPM_E_BAD_TYPE: Final = -2144862156 +TPM_E_INVALID_RESOURCE: Final = -2144862155 +TPM_E_NOTFIPS: Final = -2144862154 +TPM_E_INVALID_FAMILY: Final = -2144862153 +TPM_E_NO_NV_PERMISSION: Final = -2144862152 +TPM_E_REQUIRES_SIGN: Final = -2144862151 +TPM_E_KEY_NOTSUPPORTED: Final = -2144862150 +TPM_E_AUTH_CONFLICT: Final = -2144862149 +TPM_E_AREA_LOCKED: Final = -2144862148 +TPM_E_BAD_LOCALITY: Final = -2144862147 +TPM_E_READ_ONLY: Final = -2144862146 +TPM_E_PER_NOWRITE: Final = -2144862145 +TPM_E_FAMILYCOUNT: Final = -2144862144 +TPM_E_WRITE_LOCKED: Final = -2144862143 +TPM_E_BAD_ATTRIBUTES: Final = -2144862142 +TPM_E_INVALID_STRUCTURE: Final = -2144862141 +TPM_E_KEY_OWNER_CONTROL: Final = -2144862140 +TPM_E_BAD_COUNTER: Final = -2144862139 +TPM_E_NOT_FULLWRITE: Final = -2144862138 +TPM_E_CONTEXT_GAP: Final = -2144862137 +TPM_E_MAXNVWRITES: Final = -2144862136 +TPM_E_NOOPERATOR: Final = -2144862135 +TPM_E_RESOURCEMISSING: Final = -2144862134 +TPM_E_DELEGATE_LOCK: Final = -2144862133 +TPM_E_DELEGATE_FAMILY: Final = -2144862132 +TPM_E_DELEGATE_ADMIN: Final = -2144862131 +TPM_E_TRANSPORT_NOTEXCLUSIVE: Final = -2144862130 +TPM_E_OWNER_CONTROL: Final = -2144862129 +TPM_E_DAA_RESOURCES: Final = -2144862128 +TPM_E_DAA_INPUT_DATA0: Final = -2144862127 +TPM_E_DAA_INPUT_DATA1: Final = -2144862126 +TPM_E_DAA_ISSUER_SETTINGS: Final = -2144862125 +TPM_E_DAA_TPM_SETTINGS: Final = -2144862124 +TPM_E_DAA_STAGE: Final = -2144862123 +TPM_E_DAA_ISSUER_VALIDITY: Final = -2144862122 +TPM_E_DAA_WRONG_W: Final = -2144862121 +TPM_E_BAD_HANDLE: Final = -2144862120 +TPM_E_BAD_DELEGATE: Final = -2144862119 +TPM_E_BADCONTEXT: Final = -2144862118 +TPM_E_TOOMANYCONTEXTS: Final = -2144862117 +TPM_E_MA_TICKET_SIGNATURE: Final = -2144862116 +TPM_E_MA_DESTINATION: Final = -2144862115 +TPM_E_MA_SOURCE: Final = -2144862114 +TPM_E_MA_AUTHORITY: Final = -2144862113 +TPM_E_PERMANENTEK: Final = -2144862111 +TPM_E_BAD_SIGNATURE: Final = -2144862110 +TPM_E_NOCONTEXTSPACE: Final = -2144862109 +TPM_20_E_ASYMMETRIC: Final = -2144862079 +TPM_20_E_ATTRIBUTES: Final = -2144862078 +TPM_20_E_HASH: Final = -2144862077 +TPM_20_E_VALUE: Final = -2144862076 +TPM_20_E_HIERARCHY: Final = -2144862075 +TPM_20_E_KEY_SIZE: Final = -2144862073 +TPM_20_E_MGF: Final = -2144862072 +TPM_20_E_MODE: Final = -2144862071 +TPM_20_E_TYPE: Final = -2144862070 +TPM_20_E_HANDLE: Final = -2144862069 +TPM_20_E_KDF: Final = -2144862068 +TPM_20_E_RANGE: Final = -2144862067 +TPM_20_E_AUTH_FAIL: Final = -2144862066 +TPM_20_E_NONCE: Final = -2144862065 +TPM_20_E_PP: Final = -2144862064 +TPM_20_E_SCHEME: Final = -2144862062 +TPM_20_E_SIZE: Final = -2144862059 +TPM_20_E_SYMMETRIC: Final = -2144862058 +TPM_20_E_TAG: Final = -2144862057 +TPM_20_E_SELECTOR: Final = -2144862056 +TPM_20_E_INSUFFICIENT: Final = -2144862054 +TPM_20_E_SIGNATURE: Final = -2144862053 +TPM_20_E_KEY: Final = -2144862052 +TPM_20_E_POLICY_FAIL: Final = -2144862051 +TPM_20_E_INTEGRITY: Final = -2144862049 +TPM_20_E_TICKET: Final = -2144862048 +TPM_20_E_RESERVED_BITS: Final = -2144862047 +TPM_20_E_BAD_AUTH: Final = -2144862046 +TPM_20_E_EXPIRED: Final = -2144862045 +TPM_20_E_POLICY_CC: Final = -2144862044 +TPM_20_E_BINDING: Final = -2144862043 +TPM_20_E_CURVE: Final = -2144862042 +TPM_20_E_ECC_POINT: Final = -2144862041 +TPM_20_E_INITIALIZE: Final = -2144861952 +TPM_20_E_FAILURE: Final = -2144861951 +TPM_20_E_SEQUENCE: Final = -2144861949 +TPM_20_E_PRIVATE: Final = -2144861941 +TPM_20_E_HMAC: Final = -2144861927 +TPM_20_E_DISABLED: Final = -2144861920 +TPM_20_E_EXCLUSIVE: Final = -2144861919 +TPM_20_E_ECC_CURVE: Final = -2144861917 +TPM_20_E_AUTH_TYPE: Final = -2144861916 +TPM_20_E_AUTH_MISSING: Final = -2144861915 +TPM_20_E_POLICY: Final = -2144861914 +TPM_20_E_PCR: Final = -2144861913 +TPM_20_E_PCR_CHANGED: Final = -2144861912 +TPM_20_E_UPGRADE: Final = -2144861907 +TPM_20_E_TOO_MANY_CONTEXTS: Final = -2144861906 +TPM_20_E_AUTH_UNAVAILABLE: Final = -2144861905 +TPM_20_E_REBOOT: Final = -2144861904 +TPM_20_E_UNBALANCED: Final = -2144861903 +TPM_20_E_COMMAND_SIZE: Final = -2144861886 +TPM_20_E_COMMAND_CODE: Final = -2144861885 +TPM_20_E_AUTHSIZE: Final = -2144861884 +TPM_20_E_AUTH_CONTEXT: Final = -2144861883 +TPM_20_E_NV_RANGE: Final = -2144861882 +TPM_20_E_NV_SIZE: Final = -2144861881 +TPM_20_E_NV_LOCKED: Final = -2144861880 +TPM_20_E_NV_AUTHORIZATION: Final = -2144861879 +TPM_20_E_NV_UNINITIALIZED: Final = -2144861878 +TPM_20_E_NV_SPACE: Final = -2144861877 +TPM_20_E_NV_DEFINED: Final = -2144861876 +TPM_20_E_BAD_CONTEXT: Final = -2144861872 +TPM_20_E_CPHASH: Final = -2144861871 +TPM_20_E_PARENT: Final = -2144861870 +TPM_20_E_NEEDS_TEST: Final = -2144861869 +TPM_20_E_NO_RESULT: Final = -2144861868 +TPM_20_E_SENSITIVE: Final = -2144861867 +TPM_E_COMMAND_BLOCKED: Final = -2144861184 +TPM_E_INVALID_HANDLE: Final = -2144861183 +TPM_E_DUPLICATE_VHANDLE: Final = -2144861182 +TPM_E_EMBEDDED_COMMAND_BLOCKED: Final = -2144861181 +TPM_E_EMBEDDED_COMMAND_UNSUPPORTED: Final = -2144861180 +TPM_E_RETRY: Final = -2144860160 +TPM_E_NEEDS_SELFTEST: Final = -2144860159 +TPM_E_DOING_SELFTEST: Final = -2144860158 +TPM_E_DEFEND_LOCK_RUNNING: Final = -2144860157 +TPM_20_E_CONTEXT_GAP: Final = -2144859903 +TPM_20_E_OBJECT_MEMORY: Final = -2144859902 +TPM_20_E_SESSION_MEMORY: Final = -2144859901 +TPM_20_E_MEMORY: Final = -2144859900 +TPM_20_E_SESSION_HANDLES: Final = -2144859899 +TPM_20_E_OBJECT_HANDLES: Final = -2144859898 +TPM_20_E_LOCALITY: Final = -2144859897 +TPM_20_E_YIELDED: Final = -2144859896 +TPM_20_E_CANCELED: Final = -2144859895 +TPM_20_E_TESTING: Final = -2144859894 +TPM_20_E_NV_RATE: Final = -2144859872 +TPM_20_E_LOCKOUT: Final = -2144859871 +TPM_20_E_RETRY: Final = -2144859870 +TPM_20_E_NV_UNAVAILABLE: Final = -2144859869 +TBS_E_INTERNAL_ERROR: Final = -2144845823 +TBS_E_BAD_PARAMETER: Final = -2144845822 +TBS_E_INVALID_OUTPUT_POINTER: Final = -2144845821 +TBS_E_INVALID_CONTEXT: Final = -2144845820 +TBS_E_INSUFFICIENT_BUFFER: Final = -2144845819 +TBS_E_IOERROR: Final = -2144845818 +TBS_E_INVALID_CONTEXT_PARAM: Final = -2144845817 +TBS_E_SERVICE_NOT_RUNNING: Final = -2144845816 +TBS_E_TOO_MANY_TBS_CONTEXTS: Final = -2144845815 +TBS_E_TOO_MANY_RESOURCES: Final = -2144845814 +TBS_E_SERVICE_START_PENDING: Final = -2144845813 +TBS_E_PPI_NOT_SUPPORTED: Final = -2144845812 +TBS_E_COMMAND_CANCELED: Final = -2144845811 +TBS_E_BUFFER_TOO_LARGE: Final = -2144845810 +TBS_E_TPM_NOT_FOUND: Final = -2144845809 +TBS_E_SERVICE_DISABLED: Final = -2144845808 +TBS_E_NO_EVENT_LOG: Final = -2144845807 +TBS_E_ACCESS_DENIED: Final = -2144845806 +TBS_E_PROVISIONING_NOT_ALLOWED: Final = -2144845805 +TBS_E_PPI_FUNCTION_UNSUPPORTED: Final = -2144845804 +TBS_E_OWNERAUTH_NOT_FOUND: Final = -2144845803 +TBS_E_PROVISIONING_INCOMPLETE: Final = -2144845802 +TPMAPI_E_INVALID_STATE: Final = -2144796416 +TPMAPI_E_NOT_ENOUGH_DATA: Final = -2144796415 +TPMAPI_E_TOO_MUCH_DATA: Final = -2144796414 +TPMAPI_E_INVALID_OUTPUT_POINTER: Final = -2144796413 +TPMAPI_E_INVALID_PARAMETER: Final = -2144796412 +TPMAPI_E_OUT_OF_MEMORY: Final = -2144796411 +TPMAPI_E_BUFFER_TOO_SMALL: Final = -2144796410 +TPMAPI_E_INTERNAL_ERROR: Final = -2144796409 +TPMAPI_E_ACCESS_DENIED: Final = -2144796408 +TPMAPI_E_AUTHORIZATION_FAILED: Final = -2144796407 +TPMAPI_E_INVALID_CONTEXT_HANDLE: Final = -2144796406 +TPMAPI_E_TBS_COMMUNICATION_ERROR: Final = -2144796405 +TPMAPI_E_TPM_COMMAND_ERROR: Final = -2144796404 +TPMAPI_E_MESSAGE_TOO_LARGE: Final = -2144796403 +TPMAPI_E_INVALID_ENCODING: Final = -2144796402 +TPMAPI_E_INVALID_KEY_SIZE: Final = -2144796401 +TPMAPI_E_ENCRYPTION_FAILED: Final = -2144796400 +TPMAPI_E_INVALID_KEY_PARAMS: Final = -2144796399 +TPMAPI_E_INVALID_MIGRATION_AUTHORIZATION_BLOB: Final = -2144796398 +TPMAPI_E_INVALID_PCR_INDEX: Final = -2144796397 +TPMAPI_E_INVALID_DELEGATE_BLOB: Final = -2144796396 +TPMAPI_E_INVALID_CONTEXT_PARAMS: Final = -2144796395 +TPMAPI_E_INVALID_KEY_BLOB: Final = -2144796394 +TPMAPI_E_INVALID_PCR_DATA: Final = -2144796393 +TPMAPI_E_INVALID_OWNER_AUTH: Final = -2144796392 +TPMAPI_E_FIPS_RNG_CHECK_FAILED: Final = -2144796391 +TPMAPI_E_EMPTY_TCG_LOG: Final = -2144796390 +TPMAPI_E_INVALID_TCG_LOG_ENTRY: Final = -2144796389 +TPMAPI_E_TCG_SEPARATOR_ABSENT: Final = -2144796388 +TPMAPI_E_TCG_INVALID_DIGEST_ENTRY: Final = -2144796387 +TPMAPI_E_POLICY_DENIES_OPERATION: Final = -2144796386 +TPMAPI_E_NV_BITS_NOT_DEFINED: Final = -2144796385 +TPMAPI_E_NV_BITS_NOT_READY: Final = -2144796384 +TPMAPI_E_SEALING_KEY_NOT_AVAILABLE: Final = -2144796383 +TPMAPI_E_NO_AUTHORIZATION_CHAIN_FOUND: Final = -2144796382 +TPMAPI_E_SVN_COUNTER_NOT_AVAILABLE: Final = -2144796381 +TPMAPI_E_OWNER_AUTH_NOT_NULL: Final = -2144796380 +TPMAPI_E_ENDORSEMENT_AUTH_NOT_NULL: Final = -2144796379 +TPMAPI_E_AUTHORIZATION_REVOKED: Final = -2144796378 +TPMAPI_E_MALFORMED_AUTHORIZATION_KEY: Final = -2144796377 +TPMAPI_E_AUTHORIZING_KEY_NOT_SUPPORTED: Final = -2144796376 +TPMAPI_E_INVALID_AUTHORIZATION_SIGNATURE: Final = -2144796375 +TPMAPI_E_MALFORMED_AUTHORIZATION_POLICY: Final = -2144796374 +TPMAPI_E_MALFORMED_AUTHORIZATION_OTHER: Final = -2144796373 +TPMAPI_E_SEALING_KEY_CHANGED: Final = -2144796372 +TPMAPI_E_INVALID_TPM_VERSION: Final = -2144796371 +TPMAPI_E_INVALID_POLICYAUTH_BLOB_TYPE: Final = -2144796370 +TBSIMP_E_BUFFER_TOO_SMALL: Final = -2144796160 +TBSIMP_E_CLEANUP_FAILED: Final = -2144796159 +TBSIMP_E_INVALID_CONTEXT_HANDLE: Final = -2144796158 +TBSIMP_E_INVALID_CONTEXT_PARAM: Final = -2144796157 +TBSIMP_E_TPM_ERROR: Final = -2144796156 +TBSIMP_E_HASH_BAD_KEY: Final = -2144796155 +TBSIMP_E_DUPLICATE_VHANDLE: Final = -2144796154 +TBSIMP_E_INVALID_OUTPUT_POINTER: Final = -2144796153 +TBSIMP_E_INVALID_PARAMETER: Final = -2144796152 +TBSIMP_E_RPC_INIT_FAILED: Final = -2144796151 +TBSIMP_E_SCHEDULER_NOT_RUNNING: Final = -2144796150 +TBSIMP_E_COMMAND_CANCELED: Final = -2144796149 +TBSIMP_E_OUT_OF_MEMORY: Final = -2144796148 +TBSIMP_E_LIST_NO_MORE_ITEMS: Final = -2144796147 +TBSIMP_E_LIST_NOT_FOUND: Final = -2144796146 +TBSIMP_E_NOT_ENOUGH_SPACE: Final = -2144796145 +TBSIMP_E_NOT_ENOUGH_TPM_CONTEXTS: Final = -2144796144 +TBSIMP_E_COMMAND_FAILED: Final = -2144796143 +TBSIMP_E_UNKNOWN_ORDINAL: Final = -2144796142 +TBSIMP_E_RESOURCE_EXPIRED: Final = -2144796141 +TBSIMP_E_INVALID_RESOURCE: Final = -2144796140 +TBSIMP_E_NOTHING_TO_UNLOAD: Final = -2144796139 +TBSIMP_E_HASH_TABLE_FULL: Final = -2144796138 +TBSIMP_E_TOO_MANY_TBS_CONTEXTS: Final = -2144796137 +TBSIMP_E_TOO_MANY_RESOURCES: Final = -2144796136 +TBSIMP_E_PPI_NOT_SUPPORTED: Final = -2144796135 +TBSIMP_E_TPM_INCOMPATIBLE: Final = -2144796134 +TBSIMP_E_NO_EVENT_LOG: Final = -2144796133 +TPM_E_PPI_ACPI_FAILURE: Final = -2144795904 +TPM_E_PPI_USER_ABORT: Final = -2144795903 +TPM_E_PPI_BIOS_FAILURE: Final = -2144795902 +TPM_E_PPI_NOT_SUPPORTED: Final = -2144795901 +TPM_E_PPI_BLOCKED_IN_BIOS: Final = -2144795900 +TPM_E_PCP_ERROR_MASK: Final = -2144795648 +TPM_E_PCP_DEVICE_NOT_READY: Final = -2144795647 +TPM_E_PCP_INVALID_HANDLE: Final = -2144795646 +TPM_E_PCP_INVALID_PARAMETER: Final = -2144795645 +TPM_E_PCP_FLAG_NOT_SUPPORTED: Final = -2144795644 +TPM_E_PCP_NOT_SUPPORTED: Final = -2144795643 +TPM_E_PCP_BUFFER_TOO_SMALL: Final = -2144795642 +TPM_E_PCP_INTERNAL_ERROR: Final = -2144795641 +TPM_E_PCP_AUTHENTICATION_FAILED: Final = -2144795640 +TPM_E_PCP_AUTHENTICATION_IGNORED: Final = -2144795639 +TPM_E_PCP_POLICY_NOT_FOUND: Final = -2144795638 +TPM_E_PCP_PROFILE_NOT_FOUND: Final = -2144795637 +TPM_E_PCP_VALIDATION_FAILED: Final = -2144795636 +TPM_E_PCP_WRONG_PARENT: Final = -2144795634 +TPM_E_KEY_NOT_LOADED: Final = -2144795633 +TPM_E_NO_KEY_CERTIFICATION: Final = -2144795632 +TPM_E_KEY_NOT_FINALIZED: Final = -2144795631 +TPM_E_ATTESTATION_CHALLENGE_NOT_SET: Final = -2144795630 +TPM_E_NOT_PCR_BOUND: Final = -2144795629 +TPM_E_KEY_ALREADY_FINALIZED: Final = -2144795628 +TPM_E_KEY_USAGE_POLICY_NOT_SUPPORTED: Final = -2144795627 +TPM_E_KEY_USAGE_POLICY_INVALID: Final = -2144795626 +TPM_E_SOFT_KEY_ERROR: Final = -2144795625 +TPM_E_KEY_NOT_AUTHENTICATED: Final = -2144795624 +TPM_E_PCP_KEY_NOT_AIK: Final = -2144795623 +TPM_E_KEY_NOT_SIGNING_KEY: Final = -2144795622 +TPM_E_LOCKED_OUT: Final = -2144795621 +TPM_E_CLAIM_TYPE_NOT_SUPPORTED: Final = -2144795620 +TPM_E_VERSION_NOT_SUPPORTED: Final = -2144795619 +TPM_E_BUFFER_LENGTH_MISMATCH: Final = -2144795618 +TPM_E_PCP_IFX_RSA_KEY_CREATION_BLOCKED: Final = -2144795617 +TPM_E_PCP_TICKET_MISSING: Final = -2144795616 +TPM_E_PCP_RAW_POLICY_NOT_SUPPORTED: Final = -2144795615 +TPM_E_PCP_KEY_HANDLE_INVALIDATED: Final = -2144795614 +TPM_E_PCP_UNSUPPORTED_PSS_SALT: Final = 0x40290423 +TPM_E_PCP_PLATFORM_CLAIM_MAY_BE_OUTDATED: Final = 0x40290424 +TPM_E_PCP_PLATFORM_CLAIM_OUTDATED: Final = 0x40290425 +TPM_E_PCP_PLATFORM_CLAIM_REBOOT: Final = 0x40290426 +TPM_E_ZERO_EXHAUST_ENABLED: Final = -2144795392 +TPM_E_PROVISIONING_INCOMPLETE: Final = -2144795136 +TPM_E_INVALID_OWNER_AUTH: Final = -2144795135 +TPM_E_TOO_MUCH_DATA: Final = -2144795134 +TPM_E_TPM_GENERATED_EPS: Final = -2144795133 +PLA_E_DCS_NOT_FOUND: Final = -2144337918 +PLA_E_DCS_IN_USE: Final = -2144337750 +PLA_E_TOO_MANY_FOLDERS: Final = -2144337851 +PLA_E_NO_MIN_DISK: Final = -2144337808 +PLA_E_DCS_ALREADY_EXISTS: Final = -2144337737 +PLA_S_PROPERTY_IGNORED: Final = 0x00300100 +PLA_E_PROPERTY_CONFLICT: Final = -2144337663 +PLA_E_DCS_SINGLETON_REQUIRED: Final = -2144337662 +PLA_E_CREDENTIALS_REQUIRED: Final = -2144337661 +PLA_E_DCS_NOT_RUNNING: Final = -2144337660 +PLA_E_CONFLICT_INCL_EXCL_API: Final = -2144337659 +PLA_E_NETWORK_EXE_NOT_VALID: Final = -2144337658 +PLA_E_EXE_ALREADY_CONFIGURED: Final = -2144337657 +PLA_E_EXE_PATH_NOT_VALID: Final = -2144337656 +PLA_E_DC_ALREADY_EXISTS: Final = -2144337655 +PLA_E_DCS_START_WAIT_TIMEOUT: Final = -2144337654 +PLA_E_DC_START_WAIT_TIMEOUT: Final = -2144337653 +PLA_E_REPORT_WAIT_TIMEOUT: Final = -2144337652 +PLA_E_NO_DUPLICATES: Final = -2144337651 +PLA_E_EXE_FULL_PATH_REQUIRED: Final = -2144337650 +PLA_E_INVALID_SESSION_NAME: Final = -2144337649 +PLA_E_PLA_CHANNEL_NOT_ENABLED: Final = -2144337648 +PLA_E_TASKSCHED_CHANNEL_NOT_ENABLED: Final = -2144337647 +PLA_E_RULES_MANAGER_FAILED: Final = -2144337646 +PLA_E_CABAPI_FAILURE: Final = -2144337645 +FVE_E_LOCKED_VOLUME: Final = -2144272384 +FVE_E_NOT_ENCRYPTED: Final = -2144272383 +FVE_E_NO_TPM_BIOS: Final = -2144272382 +FVE_E_NO_MBR_METRIC: Final = -2144272381 +FVE_E_NO_BOOTSECTOR_METRIC: Final = -2144272380 +FVE_E_NO_BOOTMGR_METRIC: Final = -2144272379 +FVE_E_WRONG_BOOTMGR: Final = -2144272378 +FVE_E_SECURE_KEY_REQUIRED: Final = -2144272377 +FVE_E_NOT_ACTIVATED: Final = -2144272376 +FVE_E_ACTION_NOT_ALLOWED: Final = -2144272375 +FVE_E_AD_SCHEMA_NOT_INSTALLED: Final = -2144272374 +FVE_E_AD_INVALID_DATATYPE: Final = -2144272373 +FVE_E_AD_INVALID_DATASIZE: Final = -2144272372 +FVE_E_AD_NO_VALUES: Final = -2144272371 +FVE_E_AD_ATTR_NOT_SET: Final = -2144272370 +FVE_E_AD_GUID_NOT_FOUND: Final = -2144272369 +FVE_E_BAD_INFORMATION: Final = -2144272368 +FVE_E_TOO_SMALL: Final = -2144272367 +FVE_E_SYSTEM_VOLUME: Final = -2144272366 +FVE_E_FAILED_WRONG_FS: Final = -2144272365 +FVE_E_BAD_PARTITION_SIZE: Final = -2144272364 +FVE_E_NOT_SUPPORTED: Final = -2144272363 +FVE_E_BAD_DATA: Final = -2144272362 +FVE_E_VOLUME_NOT_BOUND: Final = -2144272361 +FVE_E_TPM_NOT_OWNED: Final = -2144272360 +FVE_E_NOT_DATA_VOLUME: Final = -2144272359 +FVE_E_AD_INSUFFICIENT_BUFFER: Final = -2144272358 +FVE_E_CONV_READ: Final = -2144272357 +FVE_E_CONV_WRITE: Final = -2144272356 +FVE_E_KEY_REQUIRED: Final = -2144272355 +FVE_E_CLUSTERING_NOT_SUPPORTED: Final = -2144272354 +FVE_E_VOLUME_BOUND_ALREADY: Final = -2144272353 +FVE_E_OS_NOT_PROTECTED: Final = -2144272352 +FVE_E_PROTECTION_DISABLED: Final = -2144272351 +FVE_E_RECOVERY_KEY_REQUIRED: Final = -2144272350 +FVE_E_FOREIGN_VOLUME: Final = -2144272349 +FVE_E_OVERLAPPED_UPDATE: Final = -2144272348 +FVE_E_TPM_SRK_AUTH_NOT_ZERO: Final = -2144272347 +FVE_E_FAILED_SECTOR_SIZE: Final = -2144272346 +FVE_E_FAILED_AUTHENTICATION: Final = -2144272345 +FVE_E_NOT_OS_VOLUME: Final = -2144272344 +FVE_E_AUTOUNLOCK_ENABLED: Final = -2144272343 +FVE_E_WRONG_BOOTSECTOR: Final = -2144272342 +FVE_E_WRONG_SYSTEM_FS: Final = -2144272341 +FVE_E_POLICY_PASSWORD_REQUIRED: Final = -2144272340 +FVE_E_CANNOT_SET_FVEK_ENCRYPTED: Final = -2144272339 +FVE_E_CANNOT_ENCRYPT_NO_KEY: Final = -2144272338 +FVE_E_BOOTABLE_CDDVD: Final = -2144272336 +FVE_E_PROTECTOR_EXISTS: Final = -2144272335 +FVE_E_RELATIVE_PATH: Final = -2144272334 +FVE_E_PROTECTOR_NOT_FOUND: Final = -2144272333 +FVE_E_INVALID_KEY_FORMAT: Final = -2144272332 +FVE_E_INVALID_PASSWORD_FORMAT: Final = -2144272331 +FVE_E_FIPS_RNG_CHECK_FAILED: Final = -2144272330 +FVE_E_FIPS_PREVENTS_RECOVERY_PASSWORD: Final = -2144272329 +FVE_E_FIPS_PREVENTS_EXTERNAL_KEY_EXPORT: Final = -2144272328 +FVE_E_NOT_DECRYPTED: Final = -2144272327 +FVE_E_INVALID_PROTECTOR_TYPE: Final = -2144272326 +FVE_E_NO_PROTECTORS_TO_TEST: Final = -2144272325 +FVE_E_KEYFILE_NOT_FOUND: Final = -2144272324 +FVE_E_KEYFILE_INVALID: Final = -2144272323 +FVE_E_KEYFILE_NO_VMK: Final = -2144272322 +FVE_E_TPM_DISABLED: Final = -2144272321 +FVE_E_NOT_ALLOWED_IN_SAFE_MODE: Final = -2144272320 +FVE_E_TPM_INVALID_PCR: Final = -2144272319 +FVE_E_TPM_NO_VMK: Final = -2144272318 +FVE_E_PIN_INVALID: Final = -2144272317 +FVE_E_AUTH_INVALID_APPLICATION: Final = -2144272316 +FVE_E_AUTH_INVALID_CONFIG: Final = -2144272315 +FVE_E_FIPS_DISABLE_PROTECTION_NOT_ALLOWED: Final = -2144272314 +FVE_E_FS_NOT_EXTENDED: Final = -2144272313 +FVE_E_FIRMWARE_TYPE_NOT_SUPPORTED: Final = -2144272312 +FVE_E_NO_LICENSE: Final = -2144272311 +FVE_E_NOT_ON_STACK: Final = -2144272310 +FVE_E_FS_MOUNTED: Final = -2144272309 +FVE_E_TOKEN_NOT_IMPERSONATED: Final = -2144272308 +FVE_E_DRY_RUN_FAILED: Final = -2144272307 +FVE_E_REBOOT_REQUIRED: Final = -2144272306 +FVE_E_DEBUGGER_ENABLED: Final = -2144272305 +FVE_E_RAW_ACCESS: Final = -2144272304 +FVE_E_RAW_BLOCKED: Final = -2144272303 +FVE_E_BCD_APPLICATIONS_PATH_INCORRECT: Final = -2144272302 +FVE_E_NOT_ALLOWED_IN_VERSION: Final = -2144272301 +FVE_E_NO_AUTOUNLOCK_MASTER_KEY: Final = -2144272300 +FVE_E_MOR_FAILED: Final = -2144272299 +FVE_E_HIDDEN_VOLUME: Final = -2144272298 +FVE_E_TRANSIENT_STATE: Final = -2144272297 +FVE_E_PUBKEY_NOT_ALLOWED: Final = -2144272296 +FVE_E_VOLUME_HANDLE_OPEN: Final = -2144272295 +FVE_E_NO_FEATURE_LICENSE: Final = -2144272294 +FVE_E_INVALID_STARTUP_OPTIONS: Final = -2144272293 +FVE_E_POLICY_RECOVERY_PASSWORD_NOT_ALLOWED: Final = -2144272292 +FVE_E_POLICY_RECOVERY_PASSWORD_REQUIRED: Final = -2144272291 +FVE_E_POLICY_RECOVERY_KEY_NOT_ALLOWED: Final = -2144272290 +FVE_E_POLICY_RECOVERY_KEY_REQUIRED: Final = -2144272289 +FVE_E_POLICY_STARTUP_PIN_NOT_ALLOWED: Final = -2144272288 +FVE_E_POLICY_STARTUP_PIN_REQUIRED: Final = -2144272287 +FVE_E_POLICY_STARTUP_KEY_NOT_ALLOWED: Final = -2144272286 +FVE_E_POLICY_STARTUP_KEY_REQUIRED: Final = -2144272285 +FVE_E_POLICY_STARTUP_PIN_KEY_NOT_ALLOWED: Final = -2144272284 +FVE_E_POLICY_STARTUP_PIN_KEY_REQUIRED: Final = -2144272283 +FVE_E_POLICY_STARTUP_TPM_NOT_ALLOWED: Final = -2144272282 +FVE_E_POLICY_STARTUP_TPM_REQUIRED: Final = -2144272281 +FVE_E_POLICY_INVALID_PIN_LENGTH: Final = -2144272280 +FVE_E_KEY_PROTECTOR_NOT_SUPPORTED: Final = -2144272279 +FVE_E_POLICY_PASSPHRASE_NOT_ALLOWED: Final = -2144272278 +FVE_E_POLICY_PASSPHRASE_REQUIRED: Final = -2144272277 +FVE_E_FIPS_PREVENTS_PASSPHRASE: Final = -2144272276 +FVE_E_OS_VOLUME_PASSPHRASE_NOT_ALLOWED: Final = -2144272275 +FVE_E_INVALID_BITLOCKER_OID: Final = -2144272274 +FVE_E_VOLUME_TOO_SMALL: Final = -2144272273 +FVE_E_DV_NOT_SUPPORTED_ON_FS: Final = -2144272272 +FVE_E_DV_NOT_ALLOWED_BY_GP: Final = -2144272271 +FVE_E_POLICY_USER_CERTIFICATE_NOT_ALLOWED: Final = -2144272270 +FVE_E_POLICY_USER_CERTIFICATE_REQUIRED: Final = -2144272269 +FVE_E_POLICY_USER_CERT_MUST_BE_HW: Final = -2144272268 +FVE_E_POLICY_USER_CONFIGURE_FDV_AUTOUNLOCK_NOT_ALLOWED: Final = -2144272267 +FVE_E_POLICY_USER_CONFIGURE_RDV_AUTOUNLOCK_NOT_ALLOWED: Final = -2144272266 +FVE_E_POLICY_USER_CONFIGURE_RDV_NOT_ALLOWED: Final = -2144272265 +FVE_E_POLICY_USER_ENABLE_RDV_NOT_ALLOWED: Final = -2144272264 +FVE_E_POLICY_USER_DISABLE_RDV_NOT_ALLOWED: Final = -2144272263 +FVE_E_POLICY_INVALID_PASSPHRASE_LENGTH: Final = -2144272256 +FVE_E_POLICY_PASSPHRASE_TOO_SIMPLE: Final = -2144272255 +FVE_E_RECOVERY_PARTITION: Final = -2144272254 +FVE_E_POLICY_CONFLICT_FDV_RK_OFF_AUK_ON: Final = -2144272253 +FVE_E_POLICY_CONFLICT_RDV_RK_OFF_AUK_ON: Final = -2144272252 +FVE_E_NON_BITLOCKER_OID: Final = -2144272251 +FVE_E_POLICY_PROHIBITS_SELFSIGNED: Final = -2144272250 +FVE_E_POLICY_CONFLICT_RO_AND_STARTUP_KEY_REQUIRED: Final = -2144272249 +FVE_E_CONV_RECOVERY_FAILED: Final = -2144272248 +FVE_E_VIRTUALIZED_SPACE_TOO_BIG: Final = -2144272247 +FVE_E_POLICY_CONFLICT_OSV_RP_OFF_ADB_ON: Final = -2144272240 +FVE_E_POLICY_CONFLICT_FDV_RP_OFF_ADB_ON: Final = -2144272239 +FVE_E_POLICY_CONFLICT_RDV_RP_OFF_ADB_ON: Final = -2144272238 +FVE_E_NON_BITLOCKER_KU: Final = -2144272237 +FVE_E_PRIVATEKEY_AUTH_FAILED: Final = -2144272236 +FVE_E_REMOVAL_OF_DRA_FAILED: Final = -2144272235 +FVE_E_OPERATION_NOT_SUPPORTED_ON_VISTA_VOLUME: Final = -2144272234 +FVE_E_CANT_LOCK_AUTOUNLOCK_ENABLED_VOLUME: Final = -2144272233 +FVE_E_FIPS_HASH_KDF_NOT_ALLOWED: Final = -2144272232 +FVE_E_ENH_PIN_INVALID: Final = -2144272231 +FVE_E_INVALID_PIN_CHARS: Final = -2144272230 +FVE_E_INVALID_DATUM_TYPE: Final = -2144272229 +FVE_E_EFI_ONLY: Final = -2144272228 +FVE_E_MULTIPLE_NKP_CERTS: Final = -2144272227 +FVE_E_REMOVAL_OF_NKP_FAILED: Final = -2144272226 +FVE_E_INVALID_NKP_CERT: Final = -2144272225 +FVE_E_NO_EXISTING_PIN: Final = -2144272224 +FVE_E_PROTECTOR_CHANGE_PIN_MISMATCH: Final = -2144272223 +FVE_E_PIN_PROTECTOR_CHANGE_BY_STD_USER_DISALLOWED: Final = -2144272222 +FVE_E_PROTECTOR_CHANGE_MAX_PIN_CHANGE_ATTEMPTS_REACHED: Final = -2144272221 +FVE_E_POLICY_PASSPHRASE_REQUIRES_ASCII: Final = -2144272220 +FVE_E_FULL_ENCRYPTION_NOT_ALLOWED_ON_TP_STORAGE: Final = -2144272219 +FVE_E_WIPE_NOT_ALLOWED_ON_TP_STORAGE: Final = -2144272218 +FVE_E_KEY_LENGTH_NOT_SUPPORTED_BY_EDRIVE: Final = -2144272217 +FVE_E_NO_EXISTING_PASSPHRASE: Final = -2144272216 +FVE_E_PROTECTOR_CHANGE_PASSPHRASE_MISMATCH: Final = -2144272215 +FVE_E_PASSPHRASE_TOO_LONG: Final = -2144272214 +FVE_E_NO_PASSPHRASE_WITH_TPM: Final = -2144272213 +FVE_E_NO_TPM_WITH_PASSPHRASE: Final = -2144272212 +FVE_E_NOT_ALLOWED_ON_CSV_STACK: Final = -2144272211 +FVE_E_NOT_ALLOWED_ON_CLUSTER: Final = -2144272210 +FVE_E_EDRIVE_NO_FAILOVER_TO_SW: Final = -2144272209 +FVE_E_EDRIVE_BAND_IN_USE: Final = -2144272208 +FVE_E_EDRIVE_DISALLOWED_BY_GP: Final = -2144272207 +FVE_E_EDRIVE_INCOMPATIBLE_VOLUME: Final = -2144272206 +FVE_E_NOT_ALLOWED_TO_UPGRADE_WHILE_CONVERTING: Final = -2144272205 +FVE_E_EDRIVE_DV_NOT_SUPPORTED: Final = -2144272204 +FVE_E_NO_PREBOOT_KEYBOARD_DETECTED: Final = -2144272203 +FVE_E_NO_PREBOOT_KEYBOARD_OR_WINRE_DETECTED: Final = -2144272202 +FVE_E_POLICY_REQUIRES_STARTUP_PIN_ON_TOUCH_DEVICE: Final = -2144272201 +FVE_E_POLICY_REQUIRES_RECOVERY_PASSWORD_ON_TOUCH_DEVICE: Final = -2144272200 +FVE_E_WIPE_CANCEL_NOT_APPLICABLE: Final = -2144272199 +FVE_E_SECUREBOOT_DISABLED: Final = -2144272198 +FVE_E_SECUREBOOT_CONFIGURATION_INVALID: Final = -2144272197 +FVE_E_EDRIVE_DRY_RUN_FAILED: Final = -2144272196 +FVE_E_SHADOW_COPY_PRESENT: Final = -2144272195 +FVE_E_POLICY_INVALID_ENHANCED_BCD_SETTINGS: Final = -2144272194 +FVE_E_EDRIVE_INCOMPATIBLE_FIRMWARE: Final = -2144272193 +FVE_E_PROTECTOR_CHANGE_MAX_PASSPHRASE_CHANGE_ATTEMPTS_REACHED: Final = -2144272192 +FVE_E_PASSPHRASE_PROTECTOR_CHANGE_BY_STD_USER_DISALLOWED: Final = -2144272191 +FVE_E_LIVEID_ACCOUNT_SUSPENDED: Final = -2144272190 +FVE_E_LIVEID_ACCOUNT_BLOCKED: Final = -2144272189 +FVE_E_NOT_PROVISIONED_ON_ALL_VOLUMES: Final = -2144272188 +FVE_E_DE_FIXED_DATA_NOT_SUPPORTED: Final = -2144272187 +FVE_E_DE_HARDWARE_NOT_COMPLIANT: Final = -2144272186 +FVE_E_DE_WINRE_NOT_CONFIGURED: Final = -2144272185 +FVE_E_DE_PROTECTION_SUSPENDED: Final = -2144272184 +FVE_E_DE_OS_VOLUME_NOT_PROTECTED: Final = -2144272183 +FVE_E_DE_DEVICE_LOCKEDOUT: Final = -2144272182 +FVE_E_DE_PROTECTION_NOT_YET_ENABLED: Final = -2144272181 +FVE_E_INVALID_PIN_CHARS_DETAILED: Final = -2144272180 +FVE_E_DEVICE_LOCKOUT_COUNTER_UNAVAILABLE: Final = -2144272179 +FVE_E_DEVICELOCKOUT_COUNTER_MISMATCH: Final = -2144272178 +FVE_E_BUFFER_TOO_LARGE: Final = -2144272177 +FVE_E_NO_SUCH_CAPABILITY_ON_TARGET: Final = -2144272176 +FVE_E_DE_PREVENTED_FOR_OS: Final = -2144272175 +FVE_E_DE_VOLUME_OPTED_OUT: Final = -2144272174 +FVE_E_DE_VOLUME_NOT_SUPPORTED: Final = -2144272173 +FVE_E_EOW_NOT_SUPPORTED_IN_VERSION: Final = -2144272172 +FVE_E_ADBACKUP_NOT_ENABLED: Final = -2144272171 +FVE_E_VOLUME_EXTEND_PREVENTS_EOW_DECRYPT: Final = -2144272170 +FVE_E_NOT_DE_VOLUME: Final = -2144272169 +FVE_E_PROTECTION_CANNOT_BE_DISABLED: Final = -2144272168 +FVE_E_OSV_KSR_NOT_ALLOWED: Final = -2144272167 +FVE_E_AD_BACKUP_REQUIRED_POLICY_NOT_SET_OS_DRIVE: Final = -2144272166 +FVE_E_AD_BACKUP_REQUIRED_POLICY_NOT_SET_FIXED_DRIVE: Final = -2144272165 +FVE_E_AD_BACKUP_REQUIRED_POLICY_NOT_SET_REMOVABLE_DRIVE: Final = -2144272164 +FVE_E_KEY_ROTATION_NOT_SUPPORTED: Final = -2144272163 +FVE_E_EXECUTE_REQUEST_SENT_TOO_SOON: Final = -2144272162 +FVE_E_KEY_ROTATION_NOT_ENABLED: Final = -2144272161 +FVE_E_DEVICE_NOT_JOINED: Final = -2144272160 +FVE_E_AAD_ENDPOINT_BUSY: Final = -2144272159 +FVE_E_INVALID_NBP_CERT: Final = -2144272158 +FVE_E_EDRIVE_BAND_ENUMERATION_FAILED: Final = -2144272157 +FVE_E_POLICY_ON_RDV_EXCLUSION_LIST: Final = -2144272156 +FVE_E_PREDICTED_TPM_PROTECTOR_NOT_SUPPORTED: Final = -2144272155 +FVE_E_SETUP_TPM_CALLBACK_NOT_SUPPORTED: Final = -2144272154 +FVE_E_TPM_CONTEXT_SETUP_NOT_SUPPORTED: Final = -2144272153 +FVE_E_UPDATE_INVALID_CONFIG: Final = -2144272152 +FVE_E_AAD_SERVER_FAIL_RETRY_AFTER: Final = -2144272151 +FVE_E_AAD_SERVER_FAIL_BACKOFF: Final = -2144272150 +FVE_E_DATASET_FULL: Final = -2144272149 +FVE_E_METADATA_FULL: Final = -2144272148 +FWP_E_CALLOUT_NOT_FOUND: Final = -2144206847 +FWP_E_CONDITION_NOT_FOUND: Final = -2144206846 +FWP_E_FILTER_NOT_FOUND: Final = -2144206845 +FWP_E_LAYER_NOT_FOUND: Final = -2144206844 +FWP_E_PROVIDER_NOT_FOUND: Final = -2144206843 +FWP_E_PROVIDER_CONTEXT_NOT_FOUND: Final = -2144206842 +FWP_E_SUBLAYER_NOT_FOUND: Final = -2144206841 +FWP_E_NOT_FOUND: Final = -2144206840 +FWP_E_ALREADY_EXISTS: Final = -2144206839 +FWP_E_IN_USE: Final = -2144206838 +FWP_E_DYNAMIC_SESSION_IN_PROGRESS: Final = -2144206837 +FWP_E_WRONG_SESSION: Final = -2144206836 +FWP_E_NO_TXN_IN_PROGRESS: Final = -2144206835 +FWP_E_TXN_IN_PROGRESS: Final = -2144206834 +FWP_E_TXN_ABORTED: Final = -2144206833 +FWP_E_SESSION_ABORTED: Final = -2144206832 +FWP_E_INCOMPATIBLE_TXN: Final = -2144206831 +FWP_E_TIMEOUT: Final = -2144206830 +FWP_E_NET_EVENTS_DISABLED: Final = -2144206829 +FWP_E_INCOMPATIBLE_LAYER: Final = -2144206828 +FWP_E_KM_CLIENTS_ONLY: Final = -2144206827 +FWP_E_LIFETIME_MISMATCH: Final = -2144206826 +FWP_E_BUILTIN_OBJECT: Final = -2144206825 +FWP_E_TOO_MANY_CALLOUTS: Final = -2144206824 +FWP_E_NOTIFICATION_DROPPED: Final = -2144206823 +FWP_E_TRAFFIC_MISMATCH: Final = -2144206822 +FWP_E_INCOMPATIBLE_SA_STATE: Final = -2144206821 +FWP_E_NULL_POINTER: Final = -2144206820 +FWP_E_INVALID_ENUMERATOR: Final = -2144206819 +FWP_E_INVALID_FLAGS: Final = -2144206818 +FWP_E_INVALID_NET_MASK: Final = -2144206817 +FWP_E_INVALID_RANGE: Final = -2144206816 +FWP_E_INVALID_INTERVAL: Final = -2144206815 +FWP_E_ZERO_LENGTH_ARRAY: Final = -2144206814 +FWP_E_NULL_DISPLAY_NAME: Final = -2144206813 +FWP_E_INVALID_ACTION_TYPE: Final = -2144206812 +FWP_E_INVALID_WEIGHT: Final = -2144206811 +FWP_E_MATCH_TYPE_MISMATCH: Final = -2144206810 +FWP_E_TYPE_MISMATCH: Final = -2144206809 +FWP_E_OUT_OF_BOUNDS: Final = -2144206808 +FWP_E_RESERVED: Final = -2144206807 +FWP_E_DUPLICATE_CONDITION: Final = -2144206806 +FWP_E_DUPLICATE_KEYMOD: Final = -2144206805 +FWP_E_ACTION_INCOMPATIBLE_WITH_LAYER: Final = -2144206804 +FWP_E_ACTION_INCOMPATIBLE_WITH_SUBLAYER: Final = -2144206803 +FWP_E_CONTEXT_INCOMPATIBLE_WITH_LAYER: Final = -2144206802 +FWP_E_CONTEXT_INCOMPATIBLE_WITH_CALLOUT: Final = -2144206801 +FWP_E_INCOMPATIBLE_AUTH_METHOD: Final = -2144206800 +FWP_E_INCOMPATIBLE_DH_GROUP: Final = -2144206799 +FWP_E_EM_NOT_SUPPORTED: Final = -2144206798 +FWP_E_NEVER_MATCH: Final = -2144206797 +FWP_E_PROVIDER_CONTEXT_MISMATCH: Final = -2144206796 +FWP_E_INVALID_PARAMETER: Final = -2144206795 +FWP_E_TOO_MANY_SUBLAYERS: Final = -2144206794 +FWP_E_CALLOUT_NOTIFICATION_FAILED: Final = -2144206793 +FWP_E_INVALID_AUTH_TRANSFORM: Final = -2144206792 +FWP_E_INVALID_CIPHER_TRANSFORM: Final = -2144206791 +FWP_E_INCOMPATIBLE_CIPHER_TRANSFORM: Final = -2144206790 +FWP_E_INVALID_TRANSFORM_COMBINATION: Final = -2144206789 +FWP_E_DUPLICATE_AUTH_METHOD: Final = -2144206788 +FWP_E_INVALID_TUNNEL_ENDPOINT: Final = -2144206787 +FWP_E_L2_DRIVER_NOT_READY: Final = -2144206786 +FWP_E_KEY_DICTATOR_ALREADY_REGISTERED: Final = -2144206785 +FWP_E_KEY_DICTATION_INVALID_KEYING_MATERIAL: Final = -2144206784 +FWP_E_CONNECTIONS_DISABLED: Final = -2144206783 +FWP_E_INVALID_DNS_NAME: Final = -2144206782 +FWP_E_STILL_ON: Final = -2144206781 +FWP_E_IKEEXT_NOT_RUNNING: Final = -2144206780 +FWP_E_DROP_NOICMP: Final = -2144206588 +WS_S_ASYNC: Final = 0x003D0000 +WS_S_END: Final = 0x003D0001 +WS_E_INVALID_FORMAT: Final = -2143485952 +WS_E_OBJECT_FAULTED: Final = -2143485951 +WS_E_NUMERIC_OVERFLOW: Final = -2143485950 +WS_E_INVALID_OPERATION: Final = -2143485949 +WS_E_OPERATION_ABORTED: Final = -2143485948 +WS_E_ENDPOINT_ACCESS_DENIED: Final = -2143485947 +WS_E_OPERATION_TIMED_OUT: Final = -2143485946 +WS_E_OPERATION_ABANDONED: Final = -2143485945 +WS_E_QUOTA_EXCEEDED: Final = -2143485944 +WS_E_NO_TRANSLATION_AVAILABLE: Final = -2143485943 +WS_E_SECURITY_VERIFICATION_FAILURE: Final = -2143485942 +WS_E_ADDRESS_IN_USE: Final = -2143485941 +WS_E_ADDRESS_NOT_AVAILABLE: Final = -2143485940 +WS_E_ENDPOINT_NOT_FOUND: Final = -2143485939 +WS_E_ENDPOINT_NOT_AVAILABLE: Final = -2143485938 +WS_E_ENDPOINT_FAILURE: Final = -2143485937 +WS_E_ENDPOINT_UNREACHABLE: Final = -2143485936 +WS_E_ENDPOINT_ACTION_NOT_SUPPORTED: Final = -2143485935 +WS_E_ENDPOINT_TOO_BUSY: Final = -2143485934 +WS_E_ENDPOINT_FAULT_RECEIVED: Final = -2143485933 +WS_E_ENDPOINT_DISCONNECTED: Final = -2143485932 +WS_E_PROXY_FAILURE: Final = -2143485931 +WS_E_PROXY_ACCESS_DENIED: Final = -2143485930 +WS_E_NOT_SUPPORTED: Final = -2143485929 +WS_E_PROXY_REQUIRES_BASIC_AUTH: Final = -2143485928 +WS_E_PROXY_REQUIRES_DIGEST_AUTH: Final = -2143485927 +WS_E_PROXY_REQUIRES_NTLM_AUTH: Final = -2143485926 +WS_E_PROXY_REQUIRES_NEGOTIATE_AUTH: Final = -2143485925 +WS_E_SERVER_REQUIRES_BASIC_AUTH: Final = -2143485924 +WS_E_SERVER_REQUIRES_DIGEST_AUTH: Final = -2143485923 +WS_E_SERVER_REQUIRES_NTLM_AUTH: Final = -2143485922 +WS_E_SERVER_REQUIRES_NEGOTIATE_AUTH: Final = -2143485921 +WS_E_INVALID_ENDPOINT_URL: Final = -2143485920 +WS_E_OTHER: Final = -2143485919 +WS_E_SECURITY_TOKEN_EXPIRED: Final = -2143485918 +WS_E_SECURITY_SYSTEM_FAILURE: Final = -2143485917 + +ERROR_NDIS_INTERFACE_CLOSING: Final = -2144075774 +ERROR_NDIS_BAD_VERSION: Final = -2144075772 +ERROR_NDIS_BAD_CHARACTERISTICS: Final = -2144075771 +ERROR_NDIS_ADAPTER_NOT_FOUND: Final = -2144075770 +ERROR_NDIS_OPEN_FAILED: Final = -2144075769 +ERROR_NDIS_DEVICE_FAILED: Final = -2144075768 +ERROR_NDIS_MULTICAST_FULL: Final = -2144075767 +ERROR_NDIS_MULTICAST_EXISTS: Final = -2144075766 +ERROR_NDIS_MULTICAST_NOT_FOUND: Final = -2144075765 +ERROR_NDIS_REQUEST_ABORTED: Final = -2144075764 +ERROR_NDIS_RESET_IN_PROGRESS: Final = -2144075763 +ERROR_NDIS_NOT_SUPPORTED: Final = -2144075589 +ERROR_NDIS_INVALID_PACKET: Final = -2144075761 +ERROR_NDIS_ADAPTER_NOT_READY: Final = -2144075759 +ERROR_NDIS_INVALID_LENGTH: Final = -2144075756 +ERROR_NDIS_INVALID_DATA: Final = -2144075755 +ERROR_NDIS_BUFFER_TOO_SHORT: Final = -2144075754 +ERROR_NDIS_INVALID_OID: Final = -2144075753 +ERROR_NDIS_ADAPTER_REMOVED: Final = -2144075752 +ERROR_NDIS_UNSUPPORTED_MEDIA: Final = -2144075751 +ERROR_NDIS_GROUP_ADDRESS_IN_USE: Final = -2144075750 +ERROR_NDIS_FILE_NOT_FOUND: Final = -2144075749 +ERROR_NDIS_ERROR_READING_FILE: Final = -2144075748 +ERROR_NDIS_ALREADY_MAPPED: Final = -2144075747 +ERROR_NDIS_RESOURCE_CONFLICT: Final = -2144075746 +ERROR_NDIS_MEDIA_DISCONNECTED: Final = -2144075745 +ERROR_NDIS_INVALID_ADDRESS: Final = -2144075742 +ERROR_NDIS_INVALID_DEVICE_REQUEST: Final = -2144075760 +ERROR_NDIS_PAUSED: Final = -2144075734 +ERROR_NDIS_INTERFACE_NOT_FOUND: Final = -2144075733 +ERROR_NDIS_UNSUPPORTED_REVISION: Final = -2144075732 +ERROR_NDIS_INVALID_PORT: Final = -2144075731 +ERROR_NDIS_INVALID_PORT_STATE: Final = -2144075730 +ERROR_NDIS_LOW_POWER_STATE: Final = -2144075729 +ERROR_NDIS_REINIT_REQUIRED: Final = -2144075728 +ERROR_NDIS_NO_QUEUES: Final = -2144075727 +ERROR_NDIS_DOT11_AUTO_CONFIG_ENABLED: Final = -2144067584 +ERROR_NDIS_DOT11_MEDIA_IN_USE: Final = -2144067583 +ERROR_NDIS_DOT11_POWER_STATE_INVALID: Final = -2144067582 +ERROR_NDIS_PM_WOL_PATTERN_LIST_FULL: Final = -2144067581 +ERROR_NDIS_PM_PROTOCOL_OFFLOAD_LIST_FULL: Final = -2144067580 +ERROR_NDIS_DOT11_AP_CHANNEL_CURRENTLY_NOT_AVAILABLE: Final = -2144067579 +ERROR_NDIS_DOT11_AP_BAND_CURRENTLY_NOT_AVAILABLE: Final = -2144067578 +ERROR_NDIS_DOT11_AP_CHANNEL_NOT_ALLOWED: Final = -2144067577 +ERROR_NDIS_DOT11_AP_BAND_NOT_ALLOWED: Final = -2144067576 +ERROR_NDIS_INDICATION_REQUIRED: Final = 0x00340001 +ERROR_NDIS_OFFLOAD_POLICY: Final = -1070329841 +ERROR_NDIS_OFFLOAD_CONNECTION_REJECTED: Final = -1070329838 +ERROR_NDIS_OFFLOAD_PATH_REJECTED: Final = -1070329837 +ERROR_HV_INVALID_HYPERCALL_CODE: Final = -1070268414 +ERROR_HV_INVALID_HYPERCALL_INPUT: Final = -1070268413 +ERROR_HV_INVALID_ALIGNMENT: Final = -1070268412 +ERROR_HV_INVALID_PARAMETER: Final = -1070268411 +ERROR_HV_ACCESS_DENIED: Final = -1070268410 +ERROR_HV_INVALID_PARTITION_STATE: Final = -1070268409 +ERROR_HV_OPERATION_DENIED: Final = -1070268408 +ERROR_HV_UNKNOWN_PROPERTY: Final = -1070268407 +ERROR_HV_PROPERTY_VALUE_OUT_OF_RANGE: Final = -1070268406 +ERROR_HV_INSUFFICIENT_MEMORY: Final = -1070268405 +ERROR_HV_PARTITION_TOO_DEEP: Final = -1070268404 +ERROR_HV_INVALID_PARTITION_ID: Final = -1070268403 +ERROR_HV_INVALID_VP_INDEX: Final = -1070268402 +ERROR_HV_INVALID_PORT_ID: Final = -1070268399 +ERROR_HV_INVALID_CONNECTION_ID: Final = -1070268398 +ERROR_HV_INSUFFICIENT_BUFFERS: Final = -1070268397 +ERROR_HV_NOT_ACKNOWLEDGED: Final = -1070268396 +ERROR_HV_INVALID_VP_STATE: Final = -1070268395 +ERROR_HV_ACKNOWLEDGED: Final = -1070268394 +ERROR_HV_INVALID_SAVE_RESTORE_STATE: Final = -1070268393 +ERROR_HV_INVALID_SYNIC_STATE: Final = -1070268392 +ERROR_HV_OBJECT_IN_USE: Final = -1070268391 +ERROR_HV_INVALID_PROXIMITY_DOMAIN_INFO: Final = -1070268390 +ERROR_HV_NO_DATA: Final = -1070268389 +ERROR_HV_INACTIVE: Final = -1070268388 +ERROR_HV_NO_RESOURCES: Final = -1070268387 +ERROR_HV_FEATURE_UNAVAILABLE: Final = -1070268386 +ERROR_HV_INSUFFICIENT_BUFFER: Final = -1070268365 +ERROR_HV_INSUFFICIENT_DEVICE_DOMAINS: Final = -1070268360 +ERROR_HV_CPUID_FEATURE_VALIDATION: Final = -1070268356 +ERROR_HV_CPUID_XSAVE_FEATURE_VALIDATION: Final = -1070268355 +ERROR_HV_PROCESSOR_STARTUP_TIMEOUT: Final = -1070268354 +ERROR_HV_SMX_ENABLED: Final = -1070268353 +ERROR_HV_INVALID_LP_INDEX: Final = -1070268351 +ERROR_HV_INVALID_REGISTER_VALUE: Final = -1070268336 +ERROR_HV_INVALID_VTL_STATE: Final = -1070268335 +ERROR_HV_NX_NOT_DETECTED: Final = -1070268331 +ERROR_HV_INVALID_DEVICE_ID: Final = -1070268329 +ERROR_HV_INVALID_DEVICE_STATE: Final = -1070268328 +ERROR_HV_PENDING_PAGE_REQUESTS: Final = 0x00350059 +ERROR_HV_PAGE_REQUEST_INVALID: Final = -1070268320 +ERROR_HV_INVALID_CPU_GROUP_ID: Final = -1070268305 +ERROR_HV_INVALID_CPU_GROUP_STATE: Final = -1070268304 +ERROR_HV_OPERATION_FAILED: Final = -1070268303 +ERROR_HV_NOT_ALLOWED_WITH_NESTED_VIRT_ACTIVE: Final = -1070268302 +ERROR_HV_INSUFFICIENT_ROOT_MEMORY: Final = -1070268301 +ERROR_HV_EVENT_BUFFER_ALREADY_FREED: Final = -1070268300 +ERROR_HV_INSUFFICIENT_CONTIGUOUS_MEMORY: Final = -1070268299 +ERROR_HV_DEVICE_NOT_IN_DOMAIN: Final = -1070268298 +ERROR_HV_NESTED_VM_EXIT: Final = -1070268297 +ERROR_HV_MSR_ACCESS_FAILED: Final = -1070268288 +ERROR_HV_INSUFFICIENT_MEMORY_MIRRORING: Final = -1070268287 +ERROR_HV_INSUFFICIENT_CONTIGUOUS_MEMORY_MIRRORING: Final = -1070268286 +ERROR_HV_INSUFFICIENT_CONTIGUOUS_ROOT_MEMORY: Final = -1070268285 +ERROR_HV_INSUFFICIENT_ROOT_MEMORY_MIRRORING: Final = -1070268284 +ERROR_HV_INSUFFICIENT_CONTIGUOUS_ROOT_MEMORY_MIRRORING: Final = -1070268283 +ERROR_HV_NOT_PRESENT: Final = -1070264320 +ERROR_VID_DUPLICATE_HANDLER: Final = -1070137343 +ERROR_VID_TOO_MANY_HANDLERS: Final = -1070137342 +ERROR_VID_QUEUE_FULL: Final = -1070137341 +ERROR_VID_HANDLER_NOT_PRESENT: Final = -1070137340 +ERROR_VID_INVALID_OBJECT_NAME: Final = -1070137339 +ERROR_VID_PARTITION_NAME_TOO_LONG: Final = -1070137338 +ERROR_VID_MESSAGE_QUEUE_NAME_TOO_LONG: Final = -1070137337 +ERROR_VID_PARTITION_ALREADY_EXISTS: Final = -1070137336 +ERROR_VID_PARTITION_DOES_NOT_EXIST: Final = -1070137335 +ERROR_VID_PARTITION_NAME_NOT_FOUND: Final = -1070137334 +ERROR_VID_MESSAGE_QUEUE_ALREADY_EXISTS: Final = -1070137333 +ERROR_VID_EXCEEDED_MBP_ENTRY_MAP_LIMIT: Final = -1070137332 +ERROR_VID_MB_STILL_REFERENCED: Final = -1070137331 +ERROR_VID_CHILD_GPA_PAGE_SET_CORRUPTED: Final = -1070137330 +ERROR_VID_INVALID_NUMA_SETTINGS: Final = -1070137329 +ERROR_VID_INVALID_NUMA_NODE_INDEX: Final = -1070137328 +ERROR_VID_NOTIFICATION_QUEUE_ALREADY_ASSOCIATED: Final = -1070137327 +ERROR_VID_INVALID_MEMORY_BLOCK_HANDLE: Final = -1070137326 +ERROR_VID_PAGE_RANGE_OVERFLOW: Final = -1070137325 +ERROR_VID_INVALID_MESSAGE_QUEUE_HANDLE: Final = -1070137324 +ERROR_VID_INVALID_GPA_RANGE_HANDLE: Final = -1070137323 +ERROR_VID_NO_MEMORY_BLOCK_NOTIFICATION_QUEUE: Final = -1070137322 +ERROR_VID_MEMORY_BLOCK_LOCK_COUNT_EXCEEDED: Final = -1070137321 +ERROR_VID_INVALID_PPM_HANDLE: Final = -1070137320 +ERROR_VID_MBPS_ARE_LOCKED: Final = -1070137319 +ERROR_VID_MESSAGE_QUEUE_CLOSED: Final = -1070137318 +ERROR_VID_VIRTUAL_PROCESSOR_LIMIT_EXCEEDED: Final = -1070137317 +ERROR_VID_STOP_PENDING: Final = -1070137316 +ERROR_VID_INVALID_PROCESSOR_STATE: Final = -1070137315 +ERROR_VID_EXCEEDED_KM_CONTEXT_COUNT_LIMIT: Final = -1070137314 +ERROR_VID_KM_INTERFACE_ALREADY_INITIALIZED: Final = -1070137313 +ERROR_VID_MB_PROPERTY_ALREADY_SET_RESET: Final = -1070137312 +ERROR_VID_MMIO_RANGE_DESTROYED: Final = -1070137311 +ERROR_VID_INVALID_CHILD_GPA_PAGE_SET: Final = -1070137310 +ERROR_VID_RESERVE_PAGE_SET_IS_BEING_USED: Final = -1070137309 +ERROR_VID_RESERVE_PAGE_SET_TOO_SMALL: Final = -1070137308 +ERROR_VID_MBP_ALREADY_LOCKED_USING_RESERVED_PAGE: Final = -1070137307 +ERROR_VID_MBP_COUNT_EXCEEDED_LIMIT: Final = -1070137306 +ERROR_VID_SAVED_STATE_CORRUPT: Final = -1070137305 +ERROR_VID_SAVED_STATE_UNRECOGNIZED_ITEM: Final = -1070137304 +ERROR_VID_SAVED_STATE_INCOMPATIBLE: Final = -1070137303 +ERROR_VID_VTL_ACCESS_DENIED: Final = -1070137302 +ERROR_VID_INSUFFICIENT_RESOURCES_RESERVE: Final = -1070137301 +ERROR_VID_INSUFFICIENT_RESOURCES_PHYSICAL_BUFFER: Final = -1070137300 +ERROR_VID_INSUFFICIENT_RESOURCES_HV_DEPOSIT: Final = -1070137299 +ERROR_VID_MEMORY_TYPE_NOT_SUPPORTED: Final = -1070137298 +ERROR_VID_INSUFFICIENT_RESOURCES_WITHDRAW: Final = -1070137297 +ERROR_VID_PROCESS_ALREADY_SET: Final = -1070137296 +ERROR_VMCOMPUTE_TERMINATED_DURING_START: Final = -1070137088 +ERROR_VMCOMPUTE_IMAGE_MISMATCH: Final = -1070137087 +ERROR_VMCOMPUTE_HYPERV_NOT_INSTALLED: Final = -1070137086 +ERROR_VMCOMPUTE_OPERATION_PENDING: Final = -1070137085 +ERROR_VMCOMPUTE_TOO_MANY_NOTIFICATIONS: Final = -1070137084 +ERROR_VMCOMPUTE_INVALID_STATE: Final = -1070137083 +ERROR_VMCOMPUTE_UNEXPECTED_EXIT: Final = -1070137082 +ERROR_VMCOMPUTE_TERMINATED: Final = -1070137081 +ERROR_VMCOMPUTE_CONNECT_FAILED: Final = -1070137080 +ERROR_VMCOMPUTE_TIMEOUT: Final = -1070137079 +ERROR_VMCOMPUTE_CONNECTION_CLOSED: Final = -1070137078 +ERROR_VMCOMPUTE_UNKNOWN_MESSAGE: Final = -1070137077 +ERROR_VMCOMPUTE_UNSUPPORTED_PROTOCOL_VERSION: Final = -1070137076 +ERROR_VMCOMPUTE_INVALID_JSON: Final = -1070137075 +ERROR_VMCOMPUTE_SYSTEM_NOT_FOUND: Final = -1070137074 +ERROR_VMCOMPUTE_SYSTEM_ALREADY_EXISTS: Final = -1070137073 +ERROR_VMCOMPUTE_SYSTEM_ALREADY_STOPPED: Final = -1070137072 +ERROR_VMCOMPUTE_PROTOCOL_ERROR: Final = -1070137071 +ERROR_VMCOMPUTE_INVALID_LAYER: Final = -1070137070 +ERROR_VMCOMPUTE_WINDOWS_INSIDER_REQUIRED: Final = -1070137069 +HCS_E_TERMINATED_DURING_START: Final = -2143878912 +HCS_E_IMAGE_MISMATCH: Final = -2143878911 +HCS_E_HYPERV_NOT_INSTALLED: Final = -2143878910 +HCS_E_INVALID_STATE: Final = -2143878907 +HCS_E_UNEXPECTED_EXIT: Final = -2143878906 +HCS_E_TERMINATED: Final = -2143878905 +HCS_E_CONNECT_FAILED: Final = -2143878904 +HCS_E_CONNECTION_TIMEOUT: Final = -2143878903 +HCS_E_CONNECTION_CLOSED: Final = -2143878902 +HCS_E_UNKNOWN_MESSAGE: Final = -2143878901 +HCS_E_UNSUPPORTED_PROTOCOL_VERSION: Final = -2143878900 +HCS_E_INVALID_JSON: Final = -2143878899 +HCS_E_SYSTEM_NOT_FOUND: Final = -2143878898 +HCS_E_SYSTEM_ALREADY_EXISTS: Final = -2143878897 +HCS_E_SYSTEM_ALREADY_STOPPED: Final = -2143878896 +HCS_E_PROTOCOL_ERROR: Final = -2143878895 +HCS_E_INVALID_LAYER: Final = -2143878894 +HCS_E_WINDOWS_INSIDER_REQUIRED: Final = -2143878893 +HCS_E_SERVICE_NOT_AVAILABLE: Final = -2143878892 +HCS_E_OPERATION_NOT_STARTED: Final = -2143878891 +HCS_E_OPERATION_ALREADY_STARTED: Final = -2143878890 +HCS_E_OPERATION_PENDING: Final = -2143878889 +HCS_E_OPERATION_TIMEOUT: Final = -2143878888 +HCS_E_OPERATION_SYSTEM_CALLBACK_ALREADY_SET: Final = -2143878887 +HCS_E_OPERATION_RESULT_ALLOCATION_FAILED: Final = -2143878886 +HCS_E_ACCESS_DENIED: Final = -2143878885 +HCS_E_GUEST_CRITICAL_ERROR: Final = -2143878884 +HCS_E_PROCESS_INFO_NOT_AVAILABLE: Final = -2143878883 +HCS_E_SERVICE_DISCONNECT: Final = -2143878882 +HCS_E_PROCESS_ALREADY_STOPPED: Final = -2143878881 +HCS_E_SYSTEM_NOT_CONFIGURED_FOR_OPERATION: Final = -2143878880 +HCS_E_OPERATION_ALREADY_CANCELLED: Final = -2143878879 +ERROR_VNET_VIRTUAL_SWITCH_NAME_NOT_FOUND: Final = -1070136832 +ERROR_VID_REMOTE_NODE_PARENT_GPA_PAGES_USED: Final = -2143879167 +WHV_E_UNKNOWN_CAPABILITY: Final = -2143878400 +WHV_E_INSUFFICIENT_BUFFER: Final = -2143878399 +WHV_E_UNKNOWN_PROPERTY: Final = -2143878398 +WHV_E_UNSUPPORTED_HYPERVISOR_CONFIG: Final = -2143878397 +WHV_E_INVALID_PARTITION_CONFIG: Final = -2143878396 +WHV_E_GPA_RANGE_NOT_FOUND: Final = -2143878395 +WHV_E_VP_ALREADY_EXISTS: Final = -2143878394 +WHV_E_VP_DOES_NOT_EXIST: Final = -2143878393 +WHV_E_INVALID_VP_STATE: Final = -2143878392 +WHV_E_INVALID_VP_REGISTER_NAME: Final = -2143878391 +WHV_E_UNSUPPORTED_PROCESSOR_CONFIG: Final = -2143878384 +ERROR_VSMB_SAVED_STATE_FILE_NOT_FOUND: Final = -1070136320 +ERROR_VSMB_SAVED_STATE_CORRUPT: Final = -1070136319 +VM_SAVED_STATE_DUMP_E_PARTITION_STATE_NOT_FOUND: Final = -1070136064 +VM_SAVED_STATE_DUMP_E_GUEST_MEMORY_NOT_FOUND: Final = -1070136063 +VM_SAVED_STATE_DUMP_E_NO_VP_FOUND_IN_PARTITION_STATE: Final = -1070136062 +VM_SAVED_STATE_DUMP_E_NESTED_VIRTUALIZATION_NOT_SUPPORTED: Final = -1070136061 +VM_SAVED_STATE_DUMP_E_WINDOWS_KERNEL_IMAGE_NOT_FOUND: Final = -1070136060 +VM_SAVED_STATE_DUMP_E_VA_NOT_MAPPED: Final = -1070136059 +VM_SAVED_STATE_DUMP_E_INVALID_VP_STATE: Final = -1070136058 +VM_SAVED_STATE_DUMP_E_VP_VTL_NOT_ENABLED: Final = -1070136055 +ERROR_DM_OPERATION_LIMIT_EXCEEDED: Final = -1070135808 +ERROR_VOLMGR_INCOMPLETE_REGENERATION: Final = -2143813631 +ERROR_VOLMGR_INCOMPLETE_DISK_MIGRATION: Final = -2143813630 +ERROR_VOLMGR_DATABASE_FULL: Final = -1070071807 +ERROR_VOLMGR_DISK_CONFIGURATION_CORRUPTED: Final = -1070071806 +ERROR_VOLMGR_DISK_CONFIGURATION_NOT_IN_SYNC: Final = -1070071805 +ERROR_VOLMGR_PACK_CONFIG_UPDATE_FAILED: Final = -1070071804 +ERROR_VOLMGR_DISK_CONTAINS_NON_SIMPLE_VOLUME: Final = -1070071803 +ERROR_VOLMGR_DISK_DUPLICATE: Final = -1070071802 +ERROR_VOLMGR_DISK_DYNAMIC: Final = -1070071801 +ERROR_VOLMGR_DISK_ID_INVALID: Final = -1070071800 +ERROR_VOLMGR_DISK_INVALID: Final = -1070071799 +ERROR_VOLMGR_DISK_LAST_VOTER: Final = -1070071798 +ERROR_VOLMGR_DISK_LAYOUT_INVALID: Final = -1070071797 +ERROR_VOLMGR_DISK_LAYOUT_NON_BASIC_BETWEEN_BASIC_PARTITIONS: Final = -1070071796 +ERROR_VOLMGR_DISK_LAYOUT_NOT_CYLINDER_ALIGNED: Final = -1070071795 +ERROR_VOLMGR_DISK_LAYOUT_PARTITIONS_TOO_SMALL: Final = -1070071794 +ERROR_VOLMGR_DISK_LAYOUT_PRIMARY_BETWEEN_LOGICAL_PARTITIONS: Final = -1070071793 +ERROR_VOLMGR_DISK_LAYOUT_TOO_MANY_PARTITIONS: Final = -1070071792 +ERROR_VOLMGR_DISK_MISSING: Final = -1070071791 +ERROR_VOLMGR_DISK_NOT_EMPTY: Final = -1070071790 +ERROR_VOLMGR_DISK_NOT_ENOUGH_SPACE: Final = -1070071789 +ERROR_VOLMGR_DISK_REVECTORING_FAILED: Final = -1070071788 +ERROR_VOLMGR_DISK_SECTOR_SIZE_INVALID: Final = -1070071787 +ERROR_VOLMGR_DISK_SET_NOT_CONTAINED: Final = -1070071786 +ERROR_VOLMGR_DISK_USED_BY_MULTIPLE_MEMBERS: Final = -1070071785 +ERROR_VOLMGR_DISK_USED_BY_MULTIPLE_PLEXES: Final = -1070071784 +ERROR_VOLMGR_DYNAMIC_DISK_NOT_SUPPORTED: Final = -1070071783 +ERROR_VOLMGR_EXTENT_ALREADY_USED: Final = -1070071782 +ERROR_VOLMGR_EXTENT_NOT_CONTIGUOUS: Final = -1070071781 +ERROR_VOLMGR_EXTENT_NOT_IN_PUBLIC_REGION: Final = -1070071780 +ERROR_VOLMGR_EXTENT_NOT_SECTOR_ALIGNED: Final = -1070071779 +ERROR_VOLMGR_EXTENT_OVERLAPS_EBR_PARTITION: Final = -1070071778 +ERROR_VOLMGR_EXTENT_VOLUME_LENGTHS_DO_NOT_MATCH: Final = -1070071777 +ERROR_VOLMGR_FAULT_TOLERANT_NOT_SUPPORTED: Final = -1070071776 +ERROR_VOLMGR_INTERLEAVE_LENGTH_INVALID: Final = -1070071775 +ERROR_VOLMGR_MAXIMUM_REGISTERED_USERS: Final = -1070071774 +ERROR_VOLMGR_MEMBER_IN_SYNC: Final = -1070071773 +ERROR_VOLMGR_MEMBER_INDEX_DUPLICATE: Final = -1070071772 +ERROR_VOLMGR_MEMBER_INDEX_INVALID: Final = -1070071771 +ERROR_VOLMGR_MEMBER_MISSING: Final = -1070071770 +ERROR_VOLMGR_MEMBER_NOT_DETACHED: Final = -1070071769 +ERROR_VOLMGR_MEMBER_REGENERATING: Final = -1070071768 +ERROR_VOLMGR_ALL_DISKS_FAILED: Final = -1070071767 +ERROR_VOLMGR_NO_REGISTERED_USERS: Final = -1070071766 +ERROR_VOLMGR_NO_SUCH_USER: Final = -1070071765 +ERROR_VOLMGR_NOTIFICATION_RESET: Final = -1070071764 +ERROR_VOLMGR_NUMBER_OF_MEMBERS_INVALID: Final = -1070071763 +ERROR_VOLMGR_NUMBER_OF_PLEXES_INVALID: Final = -1070071762 +ERROR_VOLMGR_PACK_DUPLICATE: Final = -1070071761 +ERROR_VOLMGR_PACK_ID_INVALID: Final = -1070071760 +ERROR_VOLMGR_PACK_INVALID: Final = -1070071759 +ERROR_VOLMGR_PACK_NAME_INVALID: Final = -1070071758 +ERROR_VOLMGR_PACK_OFFLINE: Final = -1070071757 +ERROR_VOLMGR_PACK_HAS_QUORUM: Final = -1070071756 +ERROR_VOLMGR_PACK_WITHOUT_QUORUM: Final = -1070071755 +ERROR_VOLMGR_PARTITION_STYLE_INVALID: Final = -1070071754 +ERROR_VOLMGR_PARTITION_UPDATE_FAILED: Final = -1070071753 +ERROR_VOLMGR_PLEX_IN_SYNC: Final = -1070071752 +ERROR_VOLMGR_PLEX_INDEX_DUPLICATE: Final = -1070071751 +ERROR_VOLMGR_PLEX_INDEX_INVALID: Final = -1070071750 +ERROR_VOLMGR_PLEX_LAST_ACTIVE: Final = -1070071749 +ERROR_VOLMGR_PLEX_MISSING: Final = -1070071748 +ERROR_VOLMGR_PLEX_REGENERATING: Final = -1070071747 +ERROR_VOLMGR_PLEX_TYPE_INVALID: Final = -1070071746 +ERROR_VOLMGR_PLEX_NOT_RAID5: Final = -1070071745 +ERROR_VOLMGR_PLEX_NOT_SIMPLE: Final = -1070071744 +ERROR_VOLMGR_STRUCTURE_SIZE_INVALID: Final = -1070071743 +ERROR_VOLMGR_TOO_MANY_NOTIFICATION_REQUESTS: Final = -1070071742 +ERROR_VOLMGR_TRANSACTION_IN_PROGRESS: Final = -1070071741 +ERROR_VOLMGR_UNEXPECTED_DISK_LAYOUT_CHANGE: Final = -1070071740 +ERROR_VOLMGR_VOLUME_CONTAINS_MISSING_DISK: Final = -1070071739 +ERROR_VOLMGR_VOLUME_ID_INVALID: Final = -1070071738 +ERROR_VOLMGR_VOLUME_LENGTH_INVALID: Final = -1070071737 +ERROR_VOLMGR_VOLUME_LENGTH_NOT_SECTOR_SIZE_MULTIPLE: Final = -1070071736 +ERROR_VOLMGR_VOLUME_NOT_MIRRORED: Final = -1070071735 +ERROR_VOLMGR_VOLUME_NOT_RETAINED: Final = -1070071734 +ERROR_VOLMGR_VOLUME_OFFLINE: Final = -1070071733 +ERROR_VOLMGR_VOLUME_RETAINED: Final = -1070071732 +ERROR_VOLMGR_NUMBER_OF_EXTENTS_INVALID: Final = -1070071731 +ERROR_VOLMGR_DIFFERENT_SECTOR_SIZE: Final = -1070071730 +ERROR_VOLMGR_BAD_BOOT_DISK: Final = -1070071729 +ERROR_VOLMGR_PACK_CONFIG_OFFLINE: Final = -1070071728 +ERROR_VOLMGR_PACK_CONFIG_ONLINE: Final = -1070071727 +ERROR_VOLMGR_NOT_PRIMARY_PACK: Final = -1070071726 +ERROR_VOLMGR_PACK_LOG_UPDATE_FAILED: Final = -1070071725 +ERROR_VOLMGR_NUMBER_OF_DISKS_IN_PLEX_INVALID: Final = -1070071724 +ERROR_VOLMGR_NUMBER_OF_DISKS_IN_MEMBER_INVALID: Final = -1070071723 +ERROR_VOLMGR_VOLUME_MIRRORED: Final = -1070071722 +ERROR_VOLMGR_PLEX_NOT_SIMPLE_SPANNED: Final = -1070071721 +ERROR_VOLMGR_NO_VALID_LOG_COPIES: Final = -1070071720 +ERROR_VOLMGR_PRIMARY_PACK_PRESENT: Final = -1070071719 +ERROR_VOLMGR_NUMBER_OF_DISKS_INVALID: Final = -1070071718 +ERROR_VOLMGR_MIRROR_NOT_SUPPORTED: Final = -1070071717 +ERROR_VOLMGR_RAID5_NOT_SUPPORTED: Final = -1070071716 +ERROR_BCD_NOT_ALL_ENTRIES_IMPORTED: Final = -2143748095 +ERROR_BCD_TOO_MANY_ELEMENTS: Final = -1070006270 +ERROR_BCD_NOT_ALL_ENTRIES_SYNCHRONIZED: Final = -2143748093 +ERROR_VHD_DRIVE_FOOTER_MISSING: Final = -1069940735 +ERROR_VHD_DRIVE_FOOTER_CHECKSUM_MISMATCH: Final = -1069940734 +ERROR_VHD_DRIVE_FOOTER_CORRUPT: Final = -1069940733 +ERROR_VHD_FORMAT_UNKNOWN: Final = -1069940732 +ERROR_VHD_FORMAT_UNSUPPORTED_VERSION: Final = -1069940731 +ERROR_VHD_SPARSE_HEADER_CHECKSUM_MISMATCH: Final = -1069940730 +ERROR_VHD_SPARSE_HEADER_UNSUPPORTED_VERSION: Final = -1069940729 +ERROR_VHD_SPARSE_HEADER_CORRUPT: Final = -1069940728 +ERROR_VHD_BLOCK_ALLOCATION_FAILURE: Final = -1069940727 +ERROR_VHD_BLOCK_ALLOCATION_TABLE_CORRUPT: Final = -1069940726 +ERROR_VHD_INVALID_BLOCK_SIZE: Final = -1069940725 +ERROR_VHD_BITMAP_MISMATCH: Final = -1069940724 +ERROR_VHD_PARENT_VHD_NOT_FOUND: Final = -1069940723 +ERROR_VHD_CHILD_PARENT_ID_MISMATCH: Final = -1069940722 +ERROR_VHD_CHILD_PARENT_TIMESTAMP_MISMATCH: Final = -1069940721 +ERROR_VHD_METADATA_READ_FAILURE: Final = -1069940720 +ERROR_VHD_METADATA_WRITE_FAILURE: Final = -1069940719 +ERROR_VHD_INVALID_SIZE: Final = -1069940718 +ERROR_VHD_INVALID_FILE_SIZE: Final = -1069940717 +ERROR_VIRTDISK_PROVIDER_NOT_FOUND: Final = -1069940716 +ERROR_VIRTDISK_NOT_VIRTUAL_DISK: Final = -1069940715 +ERROR_VHD_PARENT_VHD_ACCESS_DENIED: Final = -1069940714 +ERROR_VHD_CHILD_PARENT_SIZE_MISMATCH: Final = -1069940713 +ERROR_VHD_DIFFERENCING_CHAIN_CYCLE_DETECTED: Final = -1069940712 +ERROR_VHD_DIFFERENCING_CHAIN_ERROR_IN_PARENT: Final = -1069940711 +ERROR_VIRTUAL_DISK_LIMITATION: Final = -1069940710 +ERROR_VHD_INVALID_TYPE: Final = -1069940709 +ERROR_VHD_INVALID_STATE: Final = -1069940708 +ERROR_VIRTDISK_UNSUPPORTED_DISK_SECTOR_SIZE: Final = -1069940707 +ERROR_VIRTDISK_DISK_ALREADY_OWNED: Final = -1069940706 +ERROR_VIRTDISK_DISK_ONLINE_AND_WRITABLE: Final = -1069940705 +ERROR_CTLOG_TRACKING_NOT_INITIALIZED: Final = -1069940704 +ERROR_CTLOG_LOGFILE_SIZE_EXCEEDED_MAXSIZE: Final = -1069940703 +ERROR_CTLOG_VHD_CHANGED_OFFLINE: Final = -1069940702 +ERROR_CTLOG_INVALID_TRACKING_STATE: Final = -1069940701 +ERROR_CTLOG_INCONSISTENT_TRACKING_FILE: Final = -1069940700 +ERROR_VHD_RESIZE_WOULD_TRUNCATE_DATA: Final = -1069940699 +ERROR_VHD_COULD_NOT_COMPUTE_MINIMUM_VIRTUAL_SIZE: Final = -1069940698 +ERROR_VHD_ALREADY_AT_OR_BELOW_MINIMUM_VIRTUAL_SIZE: Final = -1069940697 +ERROR_VHD_METADATA_FULL: Final = -1069940696 +ERROR_VHD_INVALID_CHANGE_TRACKING_ID: Final = -1069940695 +ERROR_VHD_CHANGE_TRACKING_DISABLED: Final = -1069940694 +ERROR_VHD_MISSING_CHANGE_TRACKING_INFORMATION: Final = -1069940688 +ERROR_VHD_UNEXPECTED_ID: Final = -1069940684 +ERROR_QUERY_STORAGE_ERROR: Final = -2143682559 +HCN_E_NETWORK_NOT_FOUND: Final = -2143617023 +HCN_E_ENDPOINT_NOT_FOUND: Final = -2143617022 +HCN_E_LAYER_NOT_FOUND: Final = -2143617021 +HCN_E_SWITCH_NOT_FOUND: Final = -2143617020 +HCN_E_SUBNET_NOT_FOUND: Final = -2143617019 +HCN_E_ADAPTER_NOT_FOUND: Final = -2143617018 +HCN_E_PORT_NOT_FOUND: Final = -2143617017 +HCN_E_POLICY_NOT_FOUND: Final = -2143617016 +HCN_E_VFP_PORTSETTING_NOT_FOUND: Final = -2143617015 +HCN_E_INVALID_NETWORK: Final = -2143617014 +HCN_E_INVALID_NETWORK_TYPE: Final = -2143617013 +HCN_E_INVALID_ENDPOINT: Final = -2143617012 +HCN_E_INVALID_POLICY: Final = -2143617011 +HCN_E_INVALID_POLICY_TYPE: Final = -2143617010 +HCN_E_INVALID_REMOTE_ENDPOINT_OPERATION: Final = -2143617009 +HCN_E_NETWORK_ALREADY_EXISTS: Final = -2143617008 +HCN_E_LAYER_ALREADY_EXISTS: Final = -2143617007 +HCN_E_POLICY_ALREADY_EXISTS: Final = -2143617006 +HCN_E_PORT_ALREADY_EXISTS: Final = -2143617005 +HCN_E_ENDPOINT_ALREADY_ATTACHED: Final = -2143617004 +HCN_E_REQUEST_UNSUPPORTED: Final = -2143617003 +HCN_E_MAPPING_NOT_SUPPORTED: Final = -2143617002 +HCN_E_DEGRADED_OPERATION: Final = -2143617001 +HCN_E_SHARED_SWITCH_MODIFICATION: Final = -2143617000 +HCN_E_GUID_CONVERSION_FAILURE: Final = -2143616999 +HCN_E_REGKEY_FAILURE: Final = -2143616998 +HCN_E_INVALID_JSON: Final = -2143616997 +HCN_E_INVALID_JSON_REFERENCE: Final = -2143616996 +HCN_E_ENDPOINT_SHARING_DISABLED: Final = -2143616995 +HCN_E_INVALID_IP: Final = -2143616994 +HCN_E_SWITCH_EXTENSION_NOT_FOUND: Final = -2143616993 +HCN_E_MANAGER_STOPPED: Final = -2143616992 +GCN_E_MODULE_NOT_FOUND: Final = -2143616991 +GCN_E_NO_REQUEST_HANDLERS: Final = -2143616990 +GCN_E_REQUEST_UNSUPPORTED: Final = -2143616989 +GCN_E_RUNTIMEKEYS_FAILED: Final = -2143616988 +GCN_E_NETADAPTER_TIMEOUT: Final = -2143616987 +GCN_E_NETADAPTER_NOT_FOUND: Final = -2143616986 +GCN_E_NETCOMPARTMENT_NOT_FOUND: Final = -2143616985 +GCN_E_NETINTERFACE_NOT_FOUND: Final = -2143616984 +GCN_E_DEFAULTNAMESPACE_EXISTS: Final = -2143616983 +HCN_E_ICS_DISABLED: Final = -2143616982 +HCN_E_ENDPOINT_NAMESPACE_ALREADY_EXISTS: Final = -2143616981 +HCN_E_ENTITY_HAS_REFERENCES: Final = -2143616980 +HCN_E_INVALID_INTERNAL_PORT: Final = -2143616979 +HCN_E_NAMESPACE_ATTACH_FAILED: Final = -2143616978 +HCN_E_ADDR_INVALID_OR_RESERVED: Final = -2143616977 +HCN_E_INVALID_PREFIX: Final = -2143616976 +HCN_E_OBJECT_USED_AFTER_UNLOAD: Final = -2143616975 +HCN_E_INVALID_SUBNET: Final = -2143616974 +HCN_E_INVALID_IP_SUBNET: Final = -2143616973 +HCN_E_ENDPOINT_NOT_ATTACHED: Final = -2143616972 +HCN_E_ENDPOINT_NOT_LOCAL: Final = -2143616971 +HCN_INTERFACEPARAMETERS_ALREADY_APPLIED: Final = -2143616970 +HCN_E_VFP_NOT_ALLOWED: Final = -2143616969 +SDIAG_E_CANCELLED: Final = -2143551232 +SDIAG_E_SCRIPT: Final = -2143551231 +SDIAG_E_POWERSHELL: Final = -2143551230 +SDIAG_E_MANAGEDHOST: Final = -2143551229 +SDIAG_E_NOVERIFIER: Final = -2143551228 +SDIAG_S_CANNOTRUN: Final = 0x003C0105 +SDIAG_E_DISABLED: Final = -2143551226 +SDIAG_E_TRUST: Final = -2143551225 +SDIAG_E_CANNOTRUN: Final = -2143551224 +SDIAG_E_VERSION: Final = -2143551223 +SDIAG_E_RESOURCE: Final = -2143551222 +SDIAG_E_ROOTCAUSE: Final = -2143551221 +WPN_E_CHANNEL_CLOSED: Final = -2143420160 +WPN_E_CHANNEL_REQUEST_NOT_COMPLETE: Final = -2143420159 +WPN_E_INVALID_APP: Final = -2143420158 +WPN_E_OUTSTANDING_CHANNEL_REQUEST: Final = -2143420157 +WPN_E_DUPLICATE_CHANNEL: Final = -2143420156 +WPN_E_PLATFORM_UNAVAILABLE: Final = -2143420155 +WPN_E_NOTIFICATION_POSTED: Final = -2143420154 +WPN_E_NOTIFICATION_HIDDEN: Final = -2143420153 +WPN_E_NOTIFICATION_NOT_POSTED: Final = -2143420152 +WPN_E_CLOUD_DISABLED: Final = -2143420151 +WPN_E_CLOUD_INCAPABLE: Final = -2143420144 +WPN_E_CLOUD_AUTH_UNAVAILABLE: Final = -2143420134 +WPN_E_CLOUD_SERVICE_UNAVAILABLE: Final = -2143420133 +WPN_E_FAILED_LOCK_SCREEN_UPDATE_INTIALIZATION: Final = -2143420132 +WPN_E_NOTIFICATION_DISABLED: Final = -2143420143 +WPN_E_NOTIFICATION_INCAPABLE: Final = -2143420142 +WPN_E_INTERNET_INCAPABLE: Final = -2143420141 +WPN_E_NOTIFICATION_TYPE_DISABLED: Final = -2143420140 +WPN_E_NOTIFICATION_SIZE: Final = -2143420139 +WPN_E_TAG_SIZE: Final = -2143420138 +WPN_E_ACCESS_DENIED: Final = -2143420137 +WPN_E_DUPLICATE_REGISTRATION: Final = -2143420136 +WPN_E_PUSH_NOTIFICATION_INCAPABLE: Final = -2143420135 +WPN_E_DEV_ID_SIZE: Final = -2143420128 +WPN_E_TAG_ALPHANUMERIC: Final = -2143420118 +WPN_E_INVALID_HTTP_STATUS_CODE: Final = -2143420117 +WPN_E_OUT_OF_SESSION: Final = -2143419904 +WPN_E_POWER_SAVE: Final = -2143419903 +WPN_E_IMAGE_NOT_FOUND_IN_CACHE: Final = -2143419902 +WPN_E_ALL_URL_NOT_COMPLETED: Final = -2143419901 +WPN_E_INVALID_CLOUD_IMAGE: Final = -2143419900 +WPN_E_NOTIFICATION_ID_MATCHED: Final = -2143419899 +WPN_E_CALLBACK_ALREADY_REGISTERED: Final = -2143419898 +WPN_E_TOAST_NOTIFICATION_DROPPED: Final = -2143419897 +WPN_E_STORAGE_LOCKED: Final = -2143419896 +WPN_E_GROUP_SIZE: Final = -2143419895 +WPN_E_GROUP_ALPHANUMERIC: Final = -2143419894 +WPN_E_CLOUD_DISABLED_FOR_APP: Final = -2143419893 +E_MBN_CONTEXT_NOT_ACTIVATED: Final = -2141945343 +E_MBN_BAD_SIM: Final = -2141945342 +E_MBN_DATA_CLASS_NOT_AVAILABLE: Final = -2141945341 +E_MBN_INVALID_ACCESS_STRING: Final = -2141945340 +E_MBN_MAX_ACTIVATED_CONTEXTS: Final = -2141945339 +E_MBN_PACKET_SVC_DETACHED: Final = -2141945338 +E_MBN_PROVIDER_NOT_VISIBLE: Final = -2141945337 +E_MBN_RADIO_POWER_OFF: Final = -2141945336 +E_MBN_SERVICE_NOT_ACTIVATED: Final = -2141945335 +E_MBN_SIM_NOT_INSERTED: Final = -2141945334 +E_MBN_VOICE_CALL_IN_PROGRESS: Final = -2141945333 +E_MBN_INVALID_CACHE: Final = -2141945332 +E_MBN_NOT_REGISTERED: Final = -2141945331 +E_MBN_PROVIDERS_NOT_FOUND: Final = -2141945330 +E_MBN_PIN_NOT_SUPPORTED: Final = -2141945329 +E_MBN_PIN_REQUIRED: Final = -2141945328 +E_MBN_PIN_DISABLED: Final = -2141945327 +E_MBN_FAILURE: Final = -2141945326 +E_MBN_INVALID_PROFILE: Final = -2141945320 +E_MBN_DEFAULT_PROFILE_EXIST: Final = -2141945319 +E_MBN_SMS_ENCODING_NOT_SUPPORTED: Final = -2141945312 +E_MBN_SMS_FILTER_NOT_SUPPORTED: Final = -2141945311 +E_MBN_SMS_INVALID_MEMORY_INDEX: Final = -2141945310 +E_MBN_SMS_LANG_NOT_SUPPORTED: Final = -2141945309 +E_MBN_SMS_MEMORY_FAILURE: Final = -2141945308 +E_MBN_SMS_NETWORK_TIMEOUT: Final = -2141945307 +E_MBN_SMS_UNKNOWN_SMSC_ADDRESS: Final = -2141945306 +E_MBN_SMS_FORMAT_NOT_SUPPORTED: Final = -2141945305 +E_MBN_SMS_OPERATION_NOT_ALLOWED: Final = -2141945304 +E_MBN_SMS_MEMORY_FULL: Final = -2141945303 +PEER_E_IPV6_NOT_INSTALLED: Final = -2140995583 +PEER_E_NOT_INITIALIZED: Final = -2140995582 +PEER_E_CANNOT_START_SERVICE: Final = -2140995581 +PEER_E_NOT_LICENSED: Final = -2140995580 +PEER_E_INVALID_GRAPH: Final = -2140995568 +PEER_E_DBNAME_CHANGED: Final = -2140995567 +PEER_E_DUPLICATE_GRAPH: Final = -2140995566 +PEER_E_GRAPH_NOT_READY: Final = -2140995565 +PEER_E_GRAPH_SHUTTING_DOWN: Final = -2140995564 +PEER_E_GRAPH_IN_USE: Final = -2140995563 +PEER_E_INVALID_DATABASE: Final = -2140995562 +PEER_E_TOO_MANY_ATTRIBUTES: Final = -2140995561 +PEER_E_CONNECTION_NOT_FOUND: Final = -2140995325 +PEER_E_CONNECT_SELF: Final = -2140995322 +PEER_E_ALREADY_LISTENING: Final = -2140995321 +PEER_E_NODE_NOT_FOUND: Final = -2140995320 +PEER_E_CONNECTION_FAILED: Final = -2140995319 +PEER_E_CONNECTION_NOT_AUTHENTICATED: Final = -2140995318 +PEER_E_CONNECTION_REFUSED: Final = -2140995317 +PEER_E_CLASSIFIER_TOO_LONG: Final = -2140995071 +PEER_E_TOO_MANY_IDENTITIES: Final = -2140995070 +PEER_E_NO_KEY_ACCESS: Final = -2140995069 +PEER_E_GROUPS_EXIST: Final = -2140995068 +PEER_E_RECORD_NOT_FOUND: Final = -2140994815 +PEER_E_DATABASE_ACCESSDENIED: Final = -2140994814 +PEER_E_DBINITIALIZATION_FAILED: Final = -2140994813 +PEER_E_MAX_RECORD_SIZE_EXCEEDED: Final = -2140994812 +PEER_E_DATABASE_ALREADY_PRESENT: Final = -2140994811 +PEER_E_DATABASE_NOT_PRESENT: Final = -2140994810 +PEER_E_IDENTITY_NOT_FOUND: Final = -2140994559 +PEER_E_EVENT_HANDLE_NOT_FOUND: Final = -2140994303 +PEER_E_INVALID_SEARCH: Final = -2140994047 +PEER_E_INVALID_ATTRIBUTES: Final = -2140994046 +PEER_E_INVITATION_NOT_TRUSTED: Final = -2140993791 +PEER_E_CHAIN_TOO_LONG: Final = -2140993789 +PEER_E_INVALID_TIME_PERIOD: Final = -2140993787 +PEER_E_CIRCULAR_CHAIN_DETECTED: Final = -2140993786 +PEER_E_CERT_STORE_CORRUPTED: Final = -2140993535 +PEER_E_NO_CLOUD: Final = -2140991487 +PEER_E_CLOUD_NAME_AMBIGUOUS: Final = -2140991483 +PEER_E_INVALID_RECORD: Final = -2140987376 +PEER_E_NOT_AUTHORIZED: Final = -2140987360 +PEER_E_PASSWORD_DOES_NOT_MEET_POLICY: Final = -2140987359 +PEER_E_DEFERRED_VALIDATION: Final = -2140987344 +PEER_E_INVALID_GROUP_PROPERTIES: Final = -2140987328 +PEER_E_INVALID_PEER_NAME: Final = -2140987312 +PEER_E_INVALID_CLASSIFIER: Final = -2140987296 +PEER_E_INVALID_FRIENDLY_NAME: Final = -2140987280 +PEER_E_INVALID_ROLE_PROPERTY: Final = -2140987279 +PEER_E_INVALID_CLASSIFIER_PROPERTY: Final = -2140987278 +PEER_E_INVALID_RECORD_EXPIRATION: Final = -2140987264 +PEER_E_INVALID_CREDENTIAL_INFO: Final = -2140987263 +PEER_E_INVALID_CREDENTIAL: Final = -2140987262 +PEER_E_INVALID_RECORD_SIZE: Final = -2140987261 +PEER_E_UNSUPPORTED_VERSION: Final = -2140987248 +PEER_E_GROUP_NOT_READY: Final = -2140987247 +PEER_E_GROUP_IN_USE: Final = -2140987246 +PEER_E_INVALID_GROUP: Final = -2140987245 +PEER_E_NO_MEMBERS_FOUND: Final = -2140987244 +PEER_E_NO_MEMBER_CONNECTIONS: Final = -2140987243 +PEER_E_UNABLE_TO_LISTEN: Final = -2140987242 +PEER_E_IDENTITY_DELETED: Final = -2140987232 +PEER_E_SERVICE_NOT_AVAILABLE: Final = -2140987231 +PEER_E_CONTACT_NOT_FOUND: Final = -2140971007 +PEER_S_GRAPH_DATA_CREATED: Final = 0x00630001 +PEER_S_NO_EVENT_DATA: Final = 0x00630002 +PEER_S_ALREADY_CONNECTED: Final = 0x00632000 +PEER_S_SUBSCRIPTION_EXISTS: Final = 0x00636000 +PEER_S_NO_CONNECTIVITY: Final = 0x00630005 +PEER_S_ALREADY_A_MEMBER: Final = 0x00630006 +PEER_E_CANNOT_CONVERT_PEER_NAME: Final = -2140979199 +PEER_E_INVALID_PEER_HOST_NAME: Final = -2140979198 +PEER_E_NO_MORE: Final = -2140979197 +PEER_E_PNRP_DUPLICATE_PEER_NAME: Final = -2140979195 +PEER_E_INVITE_CANCELLED: Final = -2140966912 +PEER_E_INVITE_RESPONSE_NOT_AVAILABLE: Final = -2140966911 +PEER_E_NOT_SIGNED_IN: Final = -2140966909 +PEER_E_PRIVACY_DECLINED: Final = -2140966908 +PEER_E_TIMEOUT: Final = -2140966907 +PEER_E_INVALID_ADDRESS: Final = -2140966905 +PEER_E_FW_EXCEPTION_DISABLED: Final = -2140966904 +PEER_E_FW_BLOCKED_BY_POLICY: Final = -2140966903 +PEER_E_FW_BLOCKED_BY_SHIELDS_UP: Final = -2140966902 +PEER_E_FW_DECLINED: Final = -2140966901 +UI_E_CREATE_FAILED: Final = -2144731135 +UI_E_SHUTDOWN_CALLED: Final = -2144731134 +UI_E_ILLEGAL_REENTRANCY: Final = -2144731133 +UI_E_OBJECT_SEALED: Final = -2144731132 +UI_E_VALUE_NOT_SET: Final = -2144731131 +UI_E_VALUE_NOT_DETERMINED: Final = -2144731130 +UI_E_INVALID_OUTPUT: Final = -2144731129 +UI_E_BOOLEAN_EXPECTED: Final = -2144731128 +UI_E_DIFFERENT_OWNER: Final = -2144731127 +UI_E_AMBIGUOUS_MATCH: Final = -2144731126 +UI_E_FP_OVERFLOW: Final = -2144731125 +UI_E_WRONG_THREAD: Final = -2144731124 +UI_E_STORYBOARD_ACTIVE: Final = -2144730879 +UI_E_STORYBOARD_NOT_PLAYING: Final = -2144730878 +UI_E_START_KEYFRAME_AFTER_END: Final = -2144730877 +UI_E_END_KEYFRAME_NOT_DETERMINED: Final = -2144730876 +UI_E_LOOPS_OVERLAP: Final = -2144730875 +UI_E_TRANSITION_ALREADY_USED: Final = -2144730874 +UI_E_TRANSITION_NOT_IN_STORYBOARD: Final = -2144730873 +UI_E_TRANSITION_ECLIPSED: Final = -2144730872 +UI_E_TIME_BEFORE_LAST_UPDATE: Final = -2144730871 +UI_E_TIMER_CLIENT_ALREADY_CONNECTED: Final = -2144730870 +UI_E_INVALID_DIMENSION: Final = -2144730869 +UI_E_PRIMITIVE_OUT_OF_BOUNDS: Final = -2144730868 +UI_E_WINDOW_CLOSED: Final = -2144730623 +E_BLUETOOTH_ATT_INVALID_HANDLE: Final = -2140864511 +E_BLUETOOTH_ATT_READ_NOT_PERMITTED: Final = -2140864510 +E_BLUETOOTH_ATT_WRITE_NOT_PERMITTED: Final = -2140864509 +E_BLUETOOTH_ATT_INVALID_PDU: Final = -2140864508 +E_BLUETOOTH_ATT_INSUFFICIENT_AUTHENTICATION: Final = -2140864507 +E_BLUETOOTH_ATT_REQUEST_NOT_SUPPORTED: Final = -2140864506 +E_BLUETOOTH_ATT_INVALID_OFFSET: Final = -2140864505 +E_BLUETOOTH_ATT_INSUFFICIENT_AUTHORIZATION: Final = -2140864504 +E_BLUETOOTH_ATT_PREPARE_QUEUE_FULL: Final = -2140864503 +E_BLUETOOTH_ATT_ATTRIBUTE_NOT_FOUND: Final = -2140864502 +E_BLUETOOTH_ATT_ATTRIBUTE_NOT_LONG: Final = -2140864501 +E_BLUETOOTH_ATT_INSUFFICIENT_ENCRYPTION_KEY_SIZE: Final = -2140864500 +E_BLUETOOTH_ATT_INVALID_ATTRIBUTE_VALUE_LENGTH: Final = -2140864499 +E_BLUETOOTH_ATT_UNLIKELY: Final = -2140864498 +E_BLUETOOTH_ATT_INSUFFICIENT_ENCRYPTION: Final = -2140864497 +E_BLUETOOTH_ATT_UNSUPPORTED_GROUP_TYPE: Final = -2140864496 +E_BLUETOOTH_ATT_INSUFFICIENT_RESOURCES: Final = -2140864495 +E_BLUETOOTH_ATT_UNKNOWN_ERROR: Final = -2140860416 +E_AUDIO_ENGINE_NODE_NOT_FOUND: Final = -2140798975 +E_HDAUDIO_EMPTY_CONNECTION_LIST: Final = -2140798974 +E_HDAUDIO_CONNECTION_LIST_NOT_SUPPORTED: Final = -2140798973 +E_HDAUDIO_NO_LOGICAL_DEVICES_CREATED: Final = -2140798972 +E_HDAUDIO_NULL_LINKED_LIST_ENTRY: Final = -2140798971 +STATEREPOSITORY_E_CONCURRENCY_LOCKING_FAILURE: Final = -2140733439 +STATEREPOSITORY_E_STATEMENT_INPROGRESS: Final = -2140733438 +STATEREPOSITORY_E_CONFIGURATION_INVALID: Final = -2140733437 +STATEREPOSITORY_E_UNKNOWN_SCHEMA_VERSION: Final = -2140733436 +STATEREPOSITORY_ERROR_DICTIONARY_CORRUPTED: Final = -2140733435 +STATEREPOSITORY_E_BLOCKED: Final = -2140733434 +STATEREPOSITORY_E_BUSY_RETRY: Final = -2140733433 +STATEREPOSITORY_E_BUSY_RECOVERY_RETRY: Final = -2140733432 +STATEREPOSITORY_E_LOCKED_RETRY: Final = -2140733431 +STATEREPOSITORY_E_LOCKED_SHAREDCACHE_RETRY: Final = -2140733430 +STATEREPOSITORY_E_TRANSACTION_REQUIRED: Final = -2140733429 +STATEREPOSITORY_E_BUSY_TIMEOUT_EXCEEDED: Final = -2140733428 +STATEREPOSITORY_E_BUSY_RECOVERY_TIMEOUT_EXCEEDED: Final = -2140733427 +STATEREPOSITORY_E_LOCKED_TIMEOUT_EXCEEDED: Final = -2140733426 +STATEREPOSITORY_E_LOCKED_SHAREDCACHE_TIMEOUT_EXCEEDED: Final = -2140733425 +STATEREPOSITORY_E_SERVICE_STOP_IN_PROGRESS: Final = -2140733424 +STATEREPOSTORY_E_NESTED_TRANSACTION_NOT_SUPPORTED: Final = -2140733423 +STATEREPOSITORY_ERROR_CACHE_CORRUPTED: Final = -2140733422 +STATEREPOSITORY_TRANSACTION_CALLER_ID_CHANGED: Final = 0x00670013 +STATEREPOSITORY_TRANSACTION_IN_PROGRESS: Final = -2140733420 +STATEREPOSITORY_E_CACHE_NOT_INIITALIZED: Final = -2140733419 +STATEREPOSITORY_E_DEPENDENCY_NOT_RESOLVED: Final = -2140733418 +ERROR_SPACES_POOL_WAS_DELETED: Final = 0x00E70001 +ERROR_SPACES_FAULT_DOMAIN_TYPE_INVALID: Final = -2132344831 +ERROR_SPACES_INTERNAL_ERROR: Final = -2132344830 +ERROR_SPACES_RESILIENCY_TYPE_INVALID: Final = -2132344829 +ERROR_SPACES_DRIVE_SECTOR_SIZE_INVALID: Final = -2132344828 +ERROR_SPACES_DRIVE_REDUNDANCY_INVALID: Final = -2132344826 +ERROR_SPACES_NUMBER_OF_DATA_COPIES_INVALID: Final = -2132344825 +ERROR_SPACES_PARITY_LAYOUT_INVALID: Final = -2132344824 +ERROR_SPACES_INTERLEAVE_LENGTH_INVALID: Final = -2132344823 +ERROR_SPACES_NUMBER_OF_COLUMNS_INVALID: Final = -2132344822 +ERROR_SPACES_NOT_ENOUGH_DRIVES: Final = -2132344821 +ERROR_SPACES_EXTENDED_ERROR: Final = -2132344820 +ERROR_SPACES_PROVISIONING_TYPE_INVALID: Final = -2132344819 +ERROR_SPACES_ALLOCATION_SIZE_INVALID: Final = -2132344818 +ERROR_SPACES_ENCLOSURE_AWARE_INVALID: Final = -2132344817 +ERROR_SPACES_WRITE_CACHE_SIZE_INVALID: Final = -2132344816 +ERROR_SPACES_NUMBER_OF_GROUPS_INVALID: Final = -2132344815 +ERROR_SPACES_DRIVE_OPERATIONAL_STATE_INVALID: Final = -2132344814 +ERROR_SPACES_ENTRY_INCOMPLETE: Final = -2132344813 +ERROR_SPACES_ENTRY_INVALID: Final = -2132344812 +ERROR_SPACES_UPDATE_COLUMN_STATE: Final = -2132344811 +ERROR_SPACES_MAP_REQUIRED: Final = -2132344810 +ERROR_SPACES_UNSUPPORTED_VERSION: Final = -2132344809 +ERROR_SPACES_CORRUPT_METADATA: Final = -2132344808 +ERROR_SPACES_DRT_FULL: Final = -2132344807 +ERROR_SPACES_INCONSISTENCY: Final = -2132344806 +ERROR_SPACES_LOG_NOT_READY: Final = -2132344805 +ERROR_SPACES_NO_REDUNDANCY: Final = -2132344804 +ERROR_SPACES_DRIVE_NOT_READY: Final = -2132344803 +ERROR_SPACES_DRIVE_SPLIT: Final = -2132344802 +ERROR_SPACES_DRIVE_LOST_DATA: Final = -2132344801 +ERROR_SPACES_MARK_DIRTY: Final = -2132344800 +ERROR_SPACES_FLUSH_METADATA: Final = -2132344795 +ERROR_SPACES_CACHE_FULL: Final = -2132344794 +ERROR_SPACES_REPAIR_IN_PROGRESS: Final = -2132344793 +ERROR_VOLSNAP_BOOTFILE_NOT_VALID: Final = -2138963967 +ERROR_VOLSNAP_ACTIVATION_TIMEOUT: Final = -2138963966 +ERROR_VOLSNAP_NO_BYPASSIO_WITH_SNAPSHOT: Final = -2138963965 +ERROR_TIERING_NOT_SUPPORTED_ON_VOLUME: Final = -2138898431 +ERROR_TIERING_VOLUME_DISMOUNT_IN_PROGRESS: Final = -2138898430 +ERROR_TIERING_STORAGE_TIER_NOT_FOUND: Final = -2138898429 +ERROR_TIERING_INVALID_FILE_ID: Final = -2138898428 +ERROR_TIERING_WRONG_CLUSTER_NODE: Final = -2138898427 +ERROR_TIERING_ALREADY_PROCESSING: Final = -2138898426 +ERROR_TIERING_CANNOT_PIN_OBJECT: Final = -2138898425 +ERROR_TIERING_FILE_IS_NOT_PINNED: Final = -2138898424 +ERROR_NOT_A_TIERED_VOLUME: Final = -2138898423 +ERROR_ATTRIBUTE_NOT_PRESENT: Final = -2138898422 +ERROR_SECCORE_INVALID_COMMAND: Final = -1058537472 +ERROR_NO_APPLICABLE_APP_LICENSES_FOUND: Final = -1058406399 +ERROR_CLIP_LICENSE_NOT_FOUND: Final = -1058406398 +ERROR_CLIP_DEVICE_LICENSE_MISSING: Final = -1058406397 +ERROR_CLIP_LICENSE_INVALID_SIGNATURE: Final = -1058406396 +ERROR_CLIP_KEYHOLDER_LICENSE_MISSING_OR_INVALID: Final = -1058406395 +ERROR_CLIP_LICENSE_EXPIRED: Final = -1058406394 +ERROR_CLIP_LICENSE_SIGNED_BY_UNKNOWN_SOURCE: Final = -1058406393 +ERROR_CLIP_LICENSE_NOT_SIGNED: Final = -1058406392 +ERROR_CLIP_LICENSE_HARDWARE_ID_OUT_OF_TOLERANCE: Final = -1058406391 +ERROR_CLIP_LICENSE_DEVICE_ID_MISMATCH: Final = -1058406390 +DXGI_STATUS_OCCLUDED: Final = 0x087A0001 +DXGI_STATUS_CLIPPED: Final = 0x087A0002 +DXGI_STATUS_NO_REDIRECTION: Final = 0x087A0004 +DXGI_STATUS_NO_DESKTOP_ACCESS: Final = 0x087A0005 +DXGI_STATUS_GRAPHICS_VIDPN_SOURCE_IN_USE: Final = 0x087A0006 +DXGI_STATUS_MODE_CHANGED: Final = 0x087A0007 +DXGI_STATUS_MODE_CHANGE_IN_PROGRESS: Final = 0x087A0008 +DXGI_ERROR_INVALID_CALL: Final = -2005270527 +DXGI_ERROR_NOT_FOUND: Final = -2005270526 +DXGI_ERROR_MORE_DATA: Final = -2005270525 +DXGI_ERROR_UNSUPPORTED: Final = -2005270524 +DXGI_ERROR_DEVICE_REMOVED: Final = -2005270523 +DXGI_ERROR_DEVICE_HUNG: Final = -2005270522 +DXGI_ERROR_DEVICE_RESET: Final = -2005270521 +DXGI_ERROR_WAS_STILL_DRAWING: Final = -2005270518 +DXGI_ERROR_FRAME_STATISTICS_DISJOINT: Final = -2005270517 +DXGI_ERROR_GRAPHICS_VIDPN_SOURCE_IN_USE: Final = -2005270516 +DXGI_ERROR_DRIVER_INTERNAL_ERROR: Final = -2005270496 +DXGI_ERROR_NONEXCLUSIVE: Final = -2005270495 +DXGI_ERROR_NOT_CURRENTLY_AVAILABLE: Final = -2005270494 +DXGI_ERROR_REMOTE_CLIENT_DISCONNECTED: Final = -2005270493 +DXGI_ERROR_REMOTE_OUTOFMEMORY: Final = -2005270492 +DXGI_ERROR_ACCESS_LOST: Final = -2005270490 +DXGI_ERROR_WAIT_TIMEOUT: Final = -2005270489 +DXGI_ERROR_SESSION_DISCONNECTED: Final = -2005270488 +DXGI_ERROR_RESTRICT_TO_OUTPUT_STALE: Final = -2005270487 +DXGI_ERROR_CANNOT_PROTECT_CONTENT: Final = -2005270486 +DXGI_ERROR_ACCESS_DENIED: Final = -2005270485 +DXGI_ERROR_NAME_ALREADY_EXISTS: Final = -2005270484 +DXGI_ERROR_SDK_COMPONENT_MISSING: Final = -2005270483 +DXGI_ERROR_NOT_CURRENT: Final = -2005270482 +DXGI_ERROR_HW_PROTECTION_OUTOFMEMORY: Final = -2005270480 +DXGI_ERROR_DYNAMIC_CODE_POLICY_VIOLATION: Final = -2005270479 +DXGI_ERROR_NON_COMPOSITED_UI: Final = -2005270478 +DXCORE_ERROR_EVENT_NOT_UNREGISTERED: Final = -2004877311 +PRESENTATION_ERROR_LOST: Final = -2004811775 +DXGI_STATUS_UNOCCLUDED: Final = 0x087A0009 +DXGI_STATUS_DDA_WAS_STILL_DRAWING: Final = 0x087A000A +DXGI_ERROR_MODE_CHANGE_IN_PROGRESS: Final = -2005270491 +DXGI_STATUS_PRESENT_REQUIRED: Final = 0x087A002F +DXGI_ERROR_CACHE_CORRUPT: Final = -2005270477 +DXGI_ERROR_CACHE_FULL: Final = -2005270476 +DXGI_ERROR_CACHE_HASH_COLLISION: Final = -2005270475 +DXGI_ERROR_ALREADY_EXISTS: Final = -2005270474 +DXGI_ERROR_MPO_UNPINNED: Final = -2005270428 +DXGI_DDI_ERR_WASSTILLDRAWING: Final = -2005204991 +DXGI_DDI_ERR_UNSUPPORTED: Final = -2005204990 +DXGI_DDI_ERR_NONEXCLUSIVE: Final = -2005204989 +D3D10_ERROR_TOO_MANY_UNIQUE_STATE_OBJECTS: Final = -2005336063 +D3D10_ERROR_FILE_NOT_FOUND: Final = -2005336062 +D3D11_ERROR_TOO_MANY_UNIQUE_STATE_OBJECTS: Final = -2005139455 +D3D11_ERROR_FILE_NOT_FOUND: Final = -2005139454 +D3D11_ERROR_TOO_MANY_UNIQUE_VIEW_OBJECTS: Final = -2005139453 +D3D11_ERROR_DEFERRED_CONTEXT_MAP_WITHOUT_INITIAL_DISCARD: Final = -2005139452 +D3D12_ERROR_ADAPTER_NOT_FOUND: Final = -2005008383 +D3D12_ERROR_DRIVER_VERSION_MISMATCH: Final = -2005008382 +D3D12_ERROR_INVALID_REDIST: Final = -2005008381 +D2DERR_WRONG_STATE: Final = -2003238911 +D2DERR_NOT_INITIALIZED: Final = -2003238910 +D2DERR_UNSUPPORTED_OPERATION: Final = -2003238909 +D2DERR_SCANNER_FAILED: Final = -2003238908 +D2DERR_SCREEN_ACCESS_DENIED: Final = -2003238907 +D2DERR_DISPLAY_STATE_INVALID: Final = -2003238906 +D2DERR_ZERO_VECTOR: Final = -2003238905 +D2DERR_INTERNAL_ERROR: Final = -2003238904 +D2DERR_DISPLAY_FORMAT_NOT_SUPPORTED: Final = -2003238903 +D2DERR_INVALID_CALL: Final = -2003238902 +D2DERR_NO_HARDWARE_DEVICE: Final = -2003238901 +D2DERR_RECREATE_TARGET: Final = -2003238900 +D2DERR_TOO_MANY_SHADER_ELEMENTS: Final = -2003238899 +D2DERR_SHADER_COMPILE_FAILED: Final = -2003238898 +D2DERR_MAX_TEXTURE_SIZE_EXCEEDED: Final = -2003238897 +D2DERR_UNSUPPORTED_VERSION: Final = -2003238896 +D2DERR_BAD_NUMBER: Final = -2003238895 +D2DERR_WRONG_FACTORY: Final = -2003238894 +D2DERR_LAYER_ALREADY_IN_USE: Final = -2003238893 +D2DERR_POP_CALL_DID_NOT_MATCH_PUSH: Final = -2003238892 +D2DERR_WRONG_RESOURCE_DOMAIN: Final = -2003238891 +D2DERR_PUSH_POP_UNBALANCED: Final = -2003238890 +D2DERR_RENDER_TARGET_HAS_LAYER_OR_CLIPRECT: Final = -2003238889 +D2DERR_INCOMPATIBLE_BRUSH_TYPES: Final = -2003238888 +D2DERR_WIN32_ERROR: Final = -2003238887 +D2DERR_TARGET_NOT_GDI_COMPATIBLE: Final = -2003238886 +D2DERR_TEXT_EFFECT_IS_WRONG_TYPE: Final = -2003238885 +D2DERR_TEXT_RENDERER_NOT_RELEASED: Final = -2003238884 +D2DERR_EXCEEDS_MAX_BITMAP_SIZE: Final = -2003238883 +D2DERR_INVALID_GRAPH_CONFIGURATION: Final = -2003238882 +D2DERR_INVALID_INTERNAL_GRAPH_CONFIGURATION: Final = -2003238881 +D2DERR_CYCLIC_GRAPH: Final = -2003238880 +D2DERR_BITMAP_CANNOT_DRAW: Final = -2003238879 +D2DERR_OUTSTANDING_BITMAP_REFERENCES: Final = -2003238878 +D2DERR_ORIGINAL_TARGET_NOT_BOUND: Final = -2003238877 +D2DERR_INVALID_TARGET: Final = -2003238876 +D2DERR_BITMAP_BOUND_AS_TARGET: Final = -2003238875 +D2DERR_INSUFFICIENT_DEVICE_CAPABILITIES: Final = -2003238874 +D2DERR_INTERMEDIATE_TOO_LARGE: Final = -2003238873 +D2DERR_EFFECT_IS_NOT_REGISTERED: Final = -2003238872 +D2DERR_INVALID_PROPERTY: Final = -2003238871 +D2DERR_NO_SUBPROPERTIES: Final = -2003238870 +D2DERR_PRINT_JOB_CLOSED: Final = -2003238869 +D2DERR_PRINT_FORMAT_NOT_SUPPORTED: Final = -2003238868 +D2DERR_TOO_MANY_TRANSFORM_INPUTS: Final = -2003238867 +D2DERR_INVALID_GLYPH_IMAGE: Final = -2003238866 +DWRITE_E_FILEFORMAT: Final = -2003283968 +DWRITE_E_UNEXPECTED: Final = -2003283967 +DWRITE_E_NOFONT: Final = -2003283966 +DWRITE_E_FILENOTFOUND: Final = -2003283965 +DWRITE_E_FILEACCESS: Final = -2003283964 +DWRITE_E_FONTCOLLECTIONOBSOLETE: Final = -2003283963 +DWRITE_E_ALREADYREGISTERED: Final = -2003283962 +DWRITE_E_CACHEFORMAT: Final = -2003283961 +DWRITE_E_CACHEVERSION: Final = -2003283960 +DWRITE_E_UNSUPPORTEDOPERATION: Final = -2003283959 +DWRITE_E_TEXTRENDERERINCOMPATIBLE: Final = -2003283958 +DWRITE_E_FLOWDIRECTIONCONFLICTS: Final = -2003283957 +DWRITE_E_NOCOLOR: Final = -2003283956 +DWRITE_E_REMOTEFONT: Final = -2003283955 +DWRITE_E_DOWNLOADCANCELLED: Final = -2003283954 +DWRITE_E_DOWNLOADFAILED: Final = -2003283953 +DWRITE_E_TOOMANYDOWNLOADS: Final = -2003283952 +WINCODEC_ERR_WRONGSTATE: Final = -2003292412 +WINCODEC_ERR_VALUEOUTOFRANGE: Final = -2003292411 +WINCODEC_ERR_UNKNOWNIMAGEFORMAT: Final = -2003292409 +WINCODEC_ERR_UNSUPPORTEDVERSION: Final = -2003292405 +WINCODEC_ERR_NOTINITIALIZED: Final = -2003292404 +WINCODEC_ERR_ALREADYLOCKED: Final = -2003292403 +WINCODEC_ERR_PROPERTYNOTFOUND: Final = -2003292352 +WINCODEC_ERR_PROPERTYNOTSUPPORTED: Final = -2003292351 +WINCODEC_ERR_PROPERTYSIZE: Final = -2003292350 +WINCODEC_ERR_CODECPRESENT: Final = -2003292349 +WINCODEC_ERR_CODECNOTHUMBNAIL: Final = -2003292348 +WINCODEC_ERR_PALETTEUNAVAILABLE: Final = -2003292347 +WINCODEC_ERR_CODECTOOMANYSCANLINES: Final = -2003292346 +WINCODEC_ERR_INTERNALERROR: Final = -2003292344 +WINCODEC_ERR_SOURCERECTDOESNOTMATCHDIMENSIONS: Final = -2003292343 +WINCODEC_ERR_COMPONENTNOTFOUND: Final = -2003292336 +WINCODEC_ERR_IMAGESIZEOUTOFRANGE: Final = -2003292335 +WINCODEC_ERR_TOOMUCHMETADATA: Final = -2003292334 +WINCODEC_ERR_BADIMAGE: Final = -2003292320 +WINCODEC_ERR_BADHEADER: Final = -2003292319 +WINCODEC_ERR_FRAMEMISSING: Final = -2003292318 +WINCODEC_ERR_BADMETADATAHEADER: Final = -2003292317 +WINCODEC_ERR_BADSTREAMDATA: Final = -2003292304 +WINCODEC_ERR_STREAMWRITE: Final = -2003292303 +WINCODEC_ERR_STREAMREAD: Final = -2003292302 +WINCODEC_ERR_STREAMNOTAVAILABLE: Final = -2003292301 +WINCODEC_ERR_UNSUPPORTEDPIXELFORMAT: Final = -2003292288 +WINCODEC_ERR_UNSUPPORTEDOPERATION: Final = -2003292287 +WINCODEC_ERR_INVALIDREGISTRATION: Final = -2003292278 +WINCODEC_ERR_COMPONENTINITIALIZEFAILURE: Final = -2003292277 +WINCODEC_ERR_INSUFFICIENTBUFFER: Final = -2003292276 +WINCODEC_ERR_DUPLICATEMETADATAPRESENT: Final = -2003292275 +WINCODEC_ERR_PROPERTYUNEXPECTEDTYPE: Final = -2003292274 +WINCODEC_ERR_UNEXPECTEDSIZE: Final = -2003292273 +WINCODEC_ERR_INVALIDQUERYREQUEST: Final = -2003292272 +WINCODEC_ERR_UNEXPECTEDMETADATATYPE: Final = -2003292271 +WINCODEC_ERR_REQUESTONLYVALIDATMETADATAROOT: Final = -2003292270 +WINCODEC_ERR_INVALIDQUERYCHARACTER: Final = -2003292269 +WINCODEC_ERR_WIN32ERROR: Final = -2003292268 +WINCODEC_ERR_INVALIDPROGRESSIVELEVEL: Final = -2003292267 +WINCODEC_ERR_INVALIDJPEGSCANINDEX: Final = -2003292266 +MILERR_OBJECTBUSY: Final = -2003304447 +MILERR_INSUFFICIENTBUFFER: Final = -2003304446 +MILERR_WIN32ERROR: Final = -2003304445 +MILERR_SCANNER_FAILED: Final = -2003304444 +MILERR_SCREENACCESSDENIED: Final = -2003304443 +MILERR_DISPLAYSTATEINVALID: Final = -2003304442 +MILERR_NONINVERTIBLEMATRIX: Final = -2003304441 +MILERR_ZEROVECTOR: Final = -2003304440 +MILERR_TERMINATED: Final = -2003304439 +MILERR_BADNUMBER: Final = -2003304438 +MILERR_INTERNALERROR: Final = -2003304320 +MILERR_DISPLAYFORMATNOTSUPPORTED: Final = -2003304316 +MILERR_INVALIDCALL: Final = -2003304315 +MILERR_ALREADYLOCKED: Final = -2003304314 +MILERR_NOTLOCKED: Final = -2003304313 +MILERR_DEVICECANNOTRENDERTEXT: Final = -2003304312 +MILERR_GLYPHBITMAPMISSED: Final = -2003304311 +MILERR_MALFORMEDGLYPHCACHE: Final = -2003304310 +MILERR_GENERIC_IGNORE: Final = -2003304309 +MILERR_MALFORMED_GUIDELINE_DATA: Final = -2003304308 +MILERR_NO_HARDWARE_DEVICE: Final = -2003304307 +MILERR_NEED_RECREATE_AND_PRESENT: Final = -2003304306 +MILERR_ALREADY_INITIALIZED: Final = -2003304305 +MILERR_MISMATCHED_SIZE: Final = -2003304304 +MILERR_NO_REDIRECTION_SURFACE_AVAILABLE: Final = -2003304303 +MILERR_REMOTING_NOT_SUPPORTED: Final = -2003304302 +MILERR_QUEUED_PRESENT_NOT_SUPPORTED: Final = -2003304301 +MILERR_NOT_QUEUING_PRESENTS: Final = -2003304300 +MILERR_NO_REDIRECTION_SURFACE_RETRY_LATER: Final = -2003304299 +MILERR_TOOMANYSHADERELEMNTS: Final = -2003304298 +MILERR_MROW_READLOCK_FAILED: Final = -2003304297 +MILERR_MROW_UPDATE_FAILED: Final = -2003304296 +MILERR_SHADER_COMPILE_FAILED: Final = -2003304295 +MILERR_MAX_TEXTURE_SIZE_EXCEEDED: Final = -2003304294 +MILERR_QPC_TIME_WENT_BACKWARD: Final = -2003304293 +MILERR_DXGI_ENUMERATION_OUT_OF_SYNC: Final = -2003304291 +MILERR_ADAPTER_NOT_FOUND: Final = -2003304290 +MILERR_COLORSPACE_NOT_SUPPORTED: Final = -2003304289 +MILERR_PREFILTER_NOT_SUPPORTED: Final = -2003304288 +MILERR_DISPLAYID_ACCESS_DENIED: Final = -2003304287 +UCEERR_INVALIDPACKETHEADER: Final = -2003303424 +UCEERR_UNKNOWNPACKET: Final = -2003303423 +UCEERR_ILLEGALPACKET: Final = -2003303422 +UCEERR_MALFORMEDPACKET: Final = -2003303421 +UCEERR_ILLEGALHANDLE: Final = -2003303420 +UCEERR_HANDLELOOKUPFAILED: Final = -2003303419 +UCEERR_RENDERTHREADFAILURE: Final = -2003303418 +UCEERR_CTXSTACKFRSTTARGETNULL: Final = -2003303417 +UCEERR_CONNECTIONIDLOOKUPFAILED: Final = -2003303416 +UCEERR_BLOCKSFULL: Final = -2003303415 +UCEERR_MEMORYFAILURE: Final = -2003303414 +UCEERR_PACKETRECORDOUTOFRANGE: Final = -2003303413 +UCEERR_ILLEGALRECORDTYPE: Final = -2003303412 +UCEERR_OUTOFHANDLES: Final = -2003303411 +UCEERR_UNCHANGABLE_UPDATE_ATTEMPTED: Final = -2003303410 +UCEERR_NO_MULTIPLE_WORKER_THREADS: Final = -2003303409 +UCEERR_REMOTINGNOTSUPPORTED: Final = -2003303408 +UCEERR_MISSINGENDCOMMAND: Final = -2003303407 +UCEERR_MISSINGBEGINCOMMAND: Final = -2003303406 +UCEERR_CHANNELSYNCTIMEDOUT: Final = -2003303405 +UCEERR_CHANNELSYNCABANDONED: Final = -2003303404 +UCEERR_UNSUPPORTEDTRANSPORTVERSION: Final = -2003303403 +UCEERR_TRANSPORTUNAVAILABLE: Final = -2003303402 +UCEERR_FEEDBACK_UNSUPPORTED: Final = -2003303401 +UCEERR_COMMANDTRANSPORTDENIED: Final = -2003303400 +UCEERR_GRAPHICSSTREAMUNAVAILABLE: Final = -2003303399 +UCEERR_GRAPHICSSTREAMALREADYOPEN: Final = -2003303392 +UCEERR_TRANSPORTDISCONNECTED: Final = -2003303391 +UCEERR_TRANSPORTOVERLOADED: Final = -2003303390 +UCEERR_PARTITION_ZOMBIED: Final = -2003303389 +MILAVERR_NOCLOCK: Final = -2003303168 +MILAVERR_NOMEDIATYPE: Final = -2003303167 +MILAVERR_NOVIDEOMIXER: Final = -2003303166 +MILAVERR_NOVIDEOPRESENTER: Final = -2003303165 +MILAVERR_NOREADYFRAMES: Final = -2003303164 +MILAVERR_MODULENOTLOADED: Final = -2003303163 +MILAVERR_WMPFACTORYNOTREGISTERED: Final = -2003303162 +MILAVERR_INVALIDWMPVERSION: Final = -2003303161 +MILAVERR_INSUFFICIENTVIDEORESOURCES: Final = -2003303160 +MILAVERR_VIDEOACCELERATIONNOTAVAILABLE: Final = -2003303159 +MILAVERR_REQUESTEDTEXTURETOOBIG: Final = -2003303158 +MILAVERR_SEEKFAILED: Final = -2003303157 +MILAVERR_UNEXPECTEDWMPFAILURE: Final = -2003303156 +MILAVERR_MEDIAPLAYERCLOSED: Final = -2003303155 +MILAVERR_UNKNOWNHARDWAREERROR: Final = -2003303154 +MILEFFECTSERR_UNKNOWNPROPERTY: Final = -2003302898 +MILEFFECTSERR_EFFECTNOTPARTOFGROUP: Final = -2003302897 +MILEFFECTSERR_NOINPUTSOURCEATTACHED: Final = -2003302896 +MILEFFECTSERR_CONNECTORNOTCONNECTED: Final = -2003302895 +MILEFFECTSERR_CONNECTORNOTASSOCIATEDWITHEFFECT: Final = -2003302894 +MILEFFECTSERR_RESERVED: Final = -2003302893 +MILEFFECTSERR_CYCLEDETECTED: Final = -2003302892 +MILEFFECTSERR_EFFECTINMORETHANONEGRAPH: Final = -2003302891 +MILEFFECTSERR_EFFECTALREADYINAGRAPH: Final = -2003302890 +MILEFFECTSERR_EFFECTHASNOCHILDREN: Final = -2003302889 +MILEFFECTSERR_ALREADYATTACHEDTOLISTENER: Final = -2003302888 +MILEFFECTSERR_NOTAFFINETRANSFORM: Final = -2003302887 +MILEFFECTSERR_EMPTYBOUNDS: Final = -2003302886 +MILEFFECTSERR_OUTPUTSIZETOOLARGE: Final = -2003302885 +DWMERR_STATE_TRANSITION_FAILED: Final = -2003302656 +DWMERR_THEME_FAILED: Final = -2003302655 +DWMERR_CATASTROPHIC_FAILURE: Final = -2003302654 +DCOMPOSITION_ERROR_WINDOW_ALREADY_COMPOSED: Final = -2003302400 +DCOMPOSITION_ERROR_SURFACE_BEING_RENDERED: Final = -2003302399 +DCOMPOSITION_ERROR_SURFACE_NOT_BEING_RENDERED: Final = -2003302398 +ONL_E_INVALID_AUTHENTICATION_TARGET: Final = -2138701823 +ONL_E_ACCESS_DENIED_BY_TOU: Final = -2138701822 +ONL_E_INVALID_APPLICATION: Final = -2138701821 +ONL_E_PASSWORD_UPDATE_REQUIRED: Final = -2138701820 +ONL_E_ACCOUNT_UPDATE_REQUIRED: Final = -2138701819 +ONL_E_FORCESIGNIN: Final = -2138701818 +ONL_E_ACCOUNT_LOCKED: Final = -2138701817 +ONL_E_PARENTAL_CONSENT_REQUIRED: Final = -2138701816 +ONL_E_EMAIL_VERIFICATION_REQUIRED: Final = -2138701815 +ONL_E_ACCOUNT_SUSPENDED_COMPROIMISE: Final = -2138701814 +ONL_E_ACCOUNT_SUSPENDED_ABUSE: Final = -2138701813 +ONL_E_ACTION_REQUIRED: Final = -2138701812 +ONL_CONNECTION_COUNT_LIMIT: Final = -2138701811 +ONL_E_CONNECTED_ACCOUNT_CAN_NOT_SIGNOUT: Final = -2138701810 +ONL_E_USER_AUTHENTICATION_REQUIRED: Final = -2138701809 +ONL_E_REQUEST_THROTTLED: Final = -2138701808 +FA_E_MAX_PERSISTED_ITEMS_REACHED: Final = -2144927200 +FA_E_HOMEGROUP_NOT_AVAILABLE: Final = -2144927198 +E_MONITOR_RESOLUTION_TOO_LOW: Final = -2144927152 +E_ELEVATED_ACTIVATION_NOT_SUPPORTED: Final = -2144927151 +E_UAC_DISABLED: Final = -2144927150 +E_FULL_ADMIN_NOT_SUPPORTED: Final = -2144927149 +E_APPLICATION_NOT_REGISTERED: Final = -2144927148 +E_MULTIPLE_EXTENSIONS_FOR_APPLICATION: Final = -2144927147 +E_MULTIPLE_PACKAGES_FOR_FAMILY: Final = -2144927146 +E_APPLICATION_MANAGER_NOT_RUNNING: Final = -2144927145 +S_STORE_LAUNCHED_FOR_REMEDIATION: Final = 0x00270258 +S_APPLICATION_ACTIVATION_ERROR_HANDLED_BY_DIALOG: Final = 0x00270259 +E_APPLICATION_ACTIVATION_TIMED_OUT: Final = -2144927142 +E_APPLICATION_ACTIVATION_EXEC_FAILURE: Final = -2144927141 +E_APPLICATION_TEMPORARY_LICENSE_ERROR: Final = -2144927140 +E_APPLICATION_TRIAL_LICENSE_EXPIRED: Final = -2144927139 +E_SKYDRIVE_ROOT_TARGET_FILE_SYSTEM_NOT_SUPPORTED: Final = -2144927136 +E_SKYDRIVE_ROOT_TARGET_OVERLAP: Final = -2144927135 +E_SKYDRIVE_ROOT_TARGET_CANNOT_INDEX: Final = -2144927134 +E_SKYDRIVE_FILE_NOT_UPLOADED: Final = -2144927133 +E_SKYDRIVE_UPDATE_AVAILABILITY_FAIL: Final = -2144927132 +E_SKYDRIVE_ROOT_TARGET_VOLUME_ROOT_NOT_SUPPORTED: Final = -2144927131 +E_SYNCENGINE_FILE_SIZE_OVER_LIMIT: Final = -2013089791 +E_SYNCENGINE_FILE_SIZE_EXCEEDS_REMAINING_QUOTA: Final = -2013089790 +E_SYNCENGINE_UNSUPPORTED_FILE_NAME: Final = -2013089789 +E_SYNCENGINE_FOLDER_ITEM_COUNT_LIMIT_EXCEEDED: Final = -2013089788 +E_SYNCENGINE_FILE_SYNC_PARTNER_ERROR: Final = -2013089787 +E_SYNCENGINE_SYNC_PAUSED_BY_SERVICE: Final = -2013089786 +E_SYNCENGINE_FILE_IDENTIFIER_UNKNOWN: Final = -2013085694 +E_SYNCENGINE_SERVICE_AUTHENTICATION_FAILED: Final = -2013085693 +E_SYNCENGINE_UNKNOWN_SERVICE_ERROR: Final = -2013085692 +E_SYNCENGINE_SERVICE_RETURNED_UNEXPECTED_SIZE: Final = -2013085691 +E_SYNCENGINE_REQUEST_BLOCKED_BY_SERVICE: Final = -2013085690 +E_SYNCENGINE_REQUEST_BLOCKED_DUE_TO_CLIENT_ERROR: Final = -2013085689 +E_SYNCENGINE_FOLDER_INACCESSIBLE: Final = -2013081599 +E_SYNCENGINE_UNSUPPORTED_FOLDER_NAME: Final = -2013081598 +E_SYNCENGINE_UNSUPPORTED_MARKET: Final = -2013081597 +E_SYNCENGINE_PATH_LENGTH_LIMIT_EXCEEDED: Final = -2013081596 +E_SYNCENGINE_REMOTE_PATH_LENGTH_LIMIT_EXCEEDED: Final = -2013081595 +E_SYNCENGINE_CLIENT_UPDATE_NEEDED: Final = -2013081594 +E_SYNCENGINE_PROXY_AUTHENTICATION_REQUIRED: Final = -2013081593 +E_SYNCENGINE_STORAGE_SERVICE_PROVISIONING_FAILED: Final = -2013081592 +E_SYNCENGINE_UNSUPPORTED_REPARSE_POINT: Final = -2013081591 +E_SYNCENGINE_STORAGE_SERVICE_BLOCKED: Final = -2013081590 +E_SYNCENGINE_FOLDER_IN_REDIRECTION: Final = -2013081589 +EAS_E_POLICY_NOT_MANAGED_BY_OS: Final = -2141913087 +EAS_E_POLICY_COMPLIANT_WITH_ACTIONS: Final = -2141913086 +EAS_E_REQUESTED_POLICY_NOT_ENFORCEABLE: Final = -2141913085 +EAS_E_CURRENT_USER_HAS_BLANK_PASSWORD: Final = -2141913084 +EAS_E_REQUESTED_POLICY_PASSWORD_EXPIRATION_INCOMPATIBLE: Final = -2141913083 +EAS_E_USER_CANNOT_CHANGE_PASSWORD: Final = -2141913082 +EAS_E_ADMINS_HAVE_BLANK_PASSWORD: Final = -2141913081 +EAS_E_ADMINS_CANNOT_CHANGE_PASSWORD: Final = -2141913080 +EAS_E_LOCAL_CONTROLLED_USERS_CANNOT_CHANGE_PASSWORD: Final = -2141913079 +EAS_E_PASSWORD_POLICY_NOT_ENFORCEABLE_FOR_CONNECTED_ADMINS: Final = -2141913078 +EAS_E_CONNECTED_ADMINS_NEED_TO_CHANGE_PASSWORD: Final = -2141913077 +EAS_E_PASSWORD_POLICY_NOT_ENFORCEABLE_FOR_CURRENT_CONNECTED_USER: Final = -2141913076 +EAS_E_CURRENT_CONNECTED_USER_NEED_TO_CHANGE_PASSWORD: Final = -2141913075 +WEB_E_UNSUPPORTED_FORMAT: Final = -2089484287 +WEB_E_INVALID_XML: Final = -2089484286 +WEB_E_MISSING_REQUIRED_ELEMENT: Final = -2089484285 +WEB_E_MISSING_REQUIRED_ATTRIBUTE: Final = -2089484284 +WEB_E_UNEXPECTED_CONTENT: Final = -2089484283 +WEB_E_RESOURCE_TOO_LARGE: Final = -2089484282 +WEB_E_INVALID_JSON_STRING: Final = -2089484281 +WEB_E_INVALID_JSON_NUMBER: Final = -2089484280 +WEB_E_JSON_VALUE_NOT_FOUND: Final = -2089484279 +HTTP_E_STATUS_UNEXPECTED: Final = -2145845247 +HTTP_E_STATUS_UNEXPECTED_REDIRECTION: Final = -2145845245 +HTTP_E_STATUS_UNEXPECTED_CLIENT_ERROR: Final = -2145845244 +HTTP_E_STATUS_UNEXPECTED_SERVER_ERROR: Final = -2145845243 +HTTP_E_STATUS_AMBIGUOUS: Final = -2145844948 +HTTP_E_STATUS_MOVED: Final = -2145844947 +HTTP_E_STATUS_REDIRECT: Final = -2145844946 +HTTP_E_STATUS_REDIRECT_METHOD: Final = -2145844945 +HTTP_E_STATUS_NOT_MODIFIED: Final = -2145844944 +HTTP_E_STATUS_USE_PROXY: Final = -2145844943 +HTTP_E_STATUS_REDIRECT_KEEP_VERB: Final = -2145844941 +HTTP_E_STATUS_BAD_REQUEST: Final = -2145844848 +HTTP_E_STATUS_DENIED: Final = -2145844847 +HTTP_E_STATUS_PAYMENT_REQ: Final = -2145844846 +HTTP_E_STATUS_FORBIDDEN: Final = -2145844845 +HTTP_E_STATUS_NOT_FOUND: Final = -2145844844 +HTTP_E_STATUS_BAD_METHOD: Final = -2145844843 +HTTP_E_STATUS_NONE_ACCEPTABLE: Final = -2145844842 +HTTP_E_STATUS_PROXY_AUTH_REQ: Final = -2145844841 +HTTP_E_STATUS_REQUEST_TIMEOUT: Final = -2145844840 +HTTP_E_STATUS_CONFLICT: Final = -2145844839 +HTTP_E_STATUS_GONE: Final = -2145844838 +HTTP_E_STATUS_LENGTH_REQUIRED: Final = -2145844837 +HTTP_E_STATUS_PRECOND_FAILED: Final = -2145844836 +HTTP_E_STATUS_REQUEST_TOO_LARGE: Final = -2145844835 +HTTP_E_STATUS_URI_TOO_LONG: Final = -2145844834 +HTTP_E_STATUS_UNSUPPORTED_MEDIA: Final = -2145844833 +HTTP_E_STATUS_RANGE_NOT_SATISFIABLE: Final = -2145844832 +HTTP_E_STATUS_EXPECTATION_FAILED: Final = -2145844831 +HTTP_E_STATUS_SERVER_ERROR: Final = -2145844748 +HTTP_E_STATUS_NOT_SUPPORTED: Final = -2145844747 +HTTP_E_STATUS_BAD_GATEWAY: Final = -2145844746 +HTTP_E_STATUS_SERVICE_UNAVAIL: Final = -2145844745 +HTTP_E_STATUS_GATEWAY_TIMEOUT: Final = -2145844744 +HTTP_E_STATUS_VERSION_NOT_SUP: Final = -2145844743 +E_INVALID_PROTOCOL_OPERATION: Final = -2089418751 +E_INVALID_PROTOCOL_FORMAT: Final = -2089418750 +E_PROTOCOL_EXTENSIONS_NOT_SUPPORTED: Final = -2089418749 +E_SUBPROTOCOL_NOT_SUPPORTED: Final = -2089418748 +E_PROTOCOL_VERSION_NOT_SUPPORTED: Final = -2089418747 +INPUT_E_OUT_OF_ORDER: Final = -2143289344 +INPUT_E_REENTRANCY: Final = -2143289343 +INPUT_E_MULTIMODAL: Final = -2143289342 +INPUT_E_PACKET: Final = -2143289341 +INPUT_E_FRAME: Final = -2143289340 +INPUT_E_HISTORY: Final = -2143289339 +INPUT_E_DEVICE_INFO: Final = -2143289338 +INPUT_E_TRANSFORM: Final = -2143289337 +INPUT_E_DEVICE_PROPERTY: Final = -2143289336 +INET_E_INVALID_URL: Final = -2146697214 +INET_E_NO_SESSION: Final = -2146697213 +INET_E_CANNOT_CONNECT: Final = -2146697212 +INET_E_RESOURCE_NOT_FOUND: Final = -2146697211 +INET_E_OBJECT_NOT_FOUND: Final = -2146697210 +INET_E_DATA_NOT_AVAILABLE: Final = -2146697209 +INET_E_DOWNLOAD_FAILURE: Final = -2146697208 +INET_E_AUTHENTICATION_REQUIRED: Final = -2146697207 +INET_E_NO_VALID_MEDIA: Final = -2146697206 +INET_E_CONNECTION_TIMEOUT: Final = -2146697205 +INET_E_INVALID_REQUEST: Final = -2146697204 +INET_E_UNKNOWN_PROTOCOL: Final = -2146697203 +INET_E_SECURITY_PROBLEM: Final = -2146697202 +INET_E_CANNOT_LOAD_DATA: Final = -2146697201 +INET_E_CANNOT_INSTANTIATE_OBJECT: Final = -2146697200 +INET_E_INVALID_CERTIFICATE: Final = -2146697191 +INET_E_REDIRECT_FAILED: Final = -2146697196 +INET_E_REDIRECT_TO_DIR: Final = -2146697195 +ERROR_DBG_CREATE_PROCESS_FAILURE_LOCKDOWN: Final = -2135949311 +ERROR_DBG_ATTACH_PROCESS_FAILURE_LOCKDOWN: Final = -2135949310 +ERROR_DBG_CONNECT_SERVER_FAILURE_LOCKDOWN: Final = -2135949309 +ERROR_DBG_START_SERVER_FAILURE_LOCKDOWN: Final = -2135949308 +HSP_E_ERROR_MASK: Final = -2128084992 +HSP_E_INTERNAL_ERROR: Final = -2128080897 +HSP_BS_ERROR_MASK: Final = -2128080896 +HSP_BS_INTERNAL_ERROR: Final = -2128080641 +HSP_DRV_ERROR_MASK: Final = -2128019456 +HSP_DRV_INTERNAL_ERROR: Final = -2128019201 +HSP_BASE_ERROR_MASK: Final = -2128019200 +HSP_BASE_INTERNAL_ERROR: Final = -2128018945 +HSP_KSP_ERROR_MASK: Final = -2128018944 +HSP_KSP_DEVICE_NOT_READY: Final = -2128018943 +HSP_KSP_INVALID_PROVIDER_HANDLE: Final = -2128018942 +HSP_KSP_INVALID_KEY_HANDLE: Final = -2128018941 +HSP_KSP_INVALID_PARAMETER: Final = -2128018940 +HSP_KSP_BUFFER_TOO_SMALL: Final = -2128018939 +HSP_KSP_NOT_SUPPORTED: Final = -2128018938 +HSP_KSP_INVALID_DATA: Final = -2128018937 +HSP_KSP_INVALID_FLAGS: Final = -2128018936 +HSP_KSP_ALGORITHM_NOT_SUPPORTED: Final = -2128018935 +HSP_KSP_KEY_ALREADY_FINALIZED: Final = -2128018934 +HSP_KSP_KEY_NOT_FINALIZED: Final = -2128018933 +HSP_KSP_INVALID_KEY_TYPE: Final = -2128018932 +HSP_KSP_NO_MEMORY: Final = -2128018928 +HSP_KSP_PARAMETER_NOT_SET: Final = -2128018927 +HSP_KSP_KEY_EXISTS: Final = -2128018923 +HSP_KSP_KEY_MISSING: Final = -2128018922 +HSP_KSP_KEY_LOAD_FAIL: Final = -2128018921 +HSP_KSP_NO_MORE_ITEMS: Final = -2128018920 +HSP_KSP_INTERNAL_ERROR: Final = -2128018689 +ERROR_IO_PREEMPTED: Final = -1996423167 +JSCRIPT_E_CANTEXECUTE: Final = -1996357631 +WEP_E_NOT_PROVISIONED_ON_ALL_VOLUMES: Final = -2013200383 +WEP_E_FIXED_DATA_NOT_SUPPORTED: Final = -2013200382 +WEP_E_HARDWARE_NOT_COMPLIANT: Final = -2013200381 +WEP_E_LOCK_NOT_CONFIGURED: Final = -2013200380 +WEP_E_PROTECTION_SUSPENDED: Final = -2013200379 +WEP_E_NO_LICENSE: Final = -2013200378 +WEP_E_OS_NOT_PROTECTED: Final = -2013200377 +WEP_E_UNEXPECTED_FAIL: Final = -2013200376 +WEP_E_BUFFER_TOO_LARGE: Final = -2013200375 +ERROR_SVHDX_ERROR_STORED: Final = -1067712512 +ERROR_SVHDX_ERROR_NOT_AVAILABLE: Final = -1067647232 +ERROR_SVHDX_UNIT_ATTENTION_AVAILABLE: Final = -1067647231 +ERROR_SVHDX_UNIT_ATTENTION_CAPACITY_DATA_CHANGED: Final = -1067647230 +ERROR_SVHDX_UNIT_ATTENTION_RESERVATIONS_PREEMPTED: Final = -1067647229 +ERROR_SVHDX_UNIT_ATTENTION_RESERVATIONS_RELEASED: Final = -1067647228 +ERROR_SVHDX_UNIT_ATTENTION_REGISTRATIONS_PREEMPTED: Final = -1067647227 +ERROR_SVHDX_UNIT_ATTENTION_OPERATING_DEFINITION_CHANGED: Final = -1067647226 +ERROR_SVHDX_RESERVATION_CONFLICT: Final = -1067647225 +ERROR_SVHDX_WRONG_FILE_TYPE: Final = -1067647224 +ERROR_SVHDX_VERSION_MISMATCH: Final = -1067647223 +ERROR_VHD_SHARED: Final = -1067647222 +ERROR_SVHDX_NO_INITIATOR: Final = -1067647221 +ERROR_VHDSET_BACKING_STORAGE_NOT_FOUND: Final = -1067647220 +ERROR_SMB_NO_PREAUTH_INTEGRITY_HASH_OVERLAP: Final = -1067646976 +ERROR_SMB_BAD_CLUSTER_DIALECT: Final = -1067646975 +ERROR_SMB_NO_SIGNING_ALGORITHM_OVERLAP: Final = -1067646974 +WININET_E_OUT_OF_HANDLES: Final = -2147012895 +WININET_E_TIMEOUT: Final = -2147012894 +WININET_E_EXTENDED_ERROR: Final = -2147012893 +WININET_E_INTERNAL_ERROR: Final = -2147012892 +WININET_E_INVALID_URL: Final = -2147012891 +WININET_E_UNRECOGNIZED_SCHEME: Final = -2147012890 +WININET_E_NAME_NOT_RESOLVED: Final = -2147012889 +WININET_E_PROTOCOL_NOT_FOUND: Final = -2147012888 +WININET_E_INVALID_OPTION: Final = -2147012887 +WININET_E_BAD_OPTION_LENGTH: Final = -2147012886 +WININET_E_OPTION_NOT_SETTABLE: Final = -2147012885 +WININET_E_SHUTDOWN: Final = -2147012884 +WININET_E_INCORRECT_USER_NAME: Final = -2147012883 +WININET_E_INCORRECT_PASSWORD: Final = -2147012882 +WININET_E_LOGIN_FAILURE: Final = -2147012881 +WININET_E_INVALID_OPERATION: Final = -2147012880 +WININET_E_OPERATION_CANCELLED: Final = -2147012879 +WININET_E_INCORRECT_HANDLE_TYPE: Final = -2147012878 +WININET_E_INCORRECT_HANDLE_STATE: Final = -2147012877 +WININET_E_NOT_PROXY_REQUEST: Final = -2147012876 +WININET_E_REGISTRY_VALUE_NOT_FOUND: Final = -2147012875 +WININET_E_BAD_REGISTRY_PARAMETER: Final = -2147012874 +WININET_E_NO_DIRECT_ACCESS: Final = -2147012873 +WININET_E_NO_CONTEXT: Final = -2147012872 +WININET_E_NO_CALLBACK: Final = -2147012871 +WININET_E_REQUEST_PENDING: Final = -2147012870 +WININET_E_INCORRECT_FORMAT: Final = -2147012869 +WININET_E_ITEM_NOT_FOUND: Final = -2147012868 +WININET_E_CANNOT_CONNECT: Final = -2147012867 +WININET_E_CONNECTION_ABORTED: Final = -2147012866 +WININET_E_CONNECTION_RESET: Final = -2147012865 +WININET_E_FORCE_RETRY: Final = -2147012864 +WININET_E_INVALID_PROXY_REQUEST: Final = -2147012863 +WININET_E_NEED_UI: Final = -2147012862 +WININET_E_HANDLE_EXISTS: Final = -2147012860 +WININET_E_SEC_CERT_DATE_INVALID: Final = -2147012859 +WININET_E_SEC_CERT_CN_INVALID: Final = -2147012858 +WININET_E_HTTP_TO_HTTPS_ON_REDIR: Final = -2147012857 +WININET_E_HTTPS_TO_HTTP_ON_REDIR: Final = -2147012856 +WININET_E_MIXED_SECURITY: Final = -2147012855 +WININET_E_CHG_POST_IS_NON_SECURE: Final = -2147012854 +WININET_E_POST_IS_NON_SECURE: Final = -2147012853 +WININET_E_CLIENT_AUTH_CERT_NEEDED: Final = -2147012852 +WININET_E_INVALID_CA: Final = -2147012851 +WININET_E_CLIENT_AUTH_NOT_SETUP: Final = -2147012850 +WININET_E_ASYNC_THREAD_FAILED: Final = -2147012849 +WININET_E_REDIRECT_SCHEME_CHANGE: Final = -2147012848 +WININET_E_DIALOG_PENDING: Final = -2147012847 +WININET_E_RETRY_DIALOG: Final = -2147012846 +WININET_E_NO_NEW_CONTAINERS: Final = -2147012845 +WININET_E_HTTPS_HTTP_SUBMIT_REDIR: Final = -2147012844 +WININET_E_SEC_CERT_ERRORS: Final = -2147012841 +WININET_E_SEC_CERT_REV_FAILED: Final = -2147012839 +WININET_E_HEADER_NOT_FOUND: Final = -2147012746 +WININET_E_DOWNLEVEL_SERVER: Final = -2147012745 +WININET_E_INVALID_SERVER_RESPONSE: Final = -2147012744 +WININET_E_INVALID_HEADER: Final = -2147012743 +WININET_E_INVALID_QUERY_REQUEST: Final = -2147012742 +WININET_E_HEADER_ALREADY_EXISTS: Final = -2147012741 +WININET_E_REDIRECT_FAILED: Final = -2147012740 +WININET_E_SECURITY_CHANNEL_ERROR: Final = -2147012739 +WININET_E_UNABLE_TO_CACHE_FILE: Final = -2147012738 +WININET_E_TCPIP_NOT_INSTALLED: Final = -2147012737 +WININET_E_DISCONNECTED: Final = -2147012733 +WININET_E_SERVER_UNREACHABLE: Final = -2147012732 +WININET_E_PROXY_SERVER_UNREACHABLE: Final = -2147012731 +WININET_E_BAD_AUTO_PROXY_SCRIPT: Final = -2147012730 +WININET_E_UNABLE_TO_DOWNLOAD_SCRIPT: Final = -2147012729 +WININET_E_SEC_INVALID_CERT: Final = -2147012727 +WININET_E_SEC_CERT_REVOKED: Final = -2147012726 +WININET_E_FAILED_DUETOSECURITYCHECK: Final = -2147012725 +WININET_E_NOT_INITIALIZED: Final = -2147012724 +WININET_E_LOGIN_FAILURE_DISPLAY_ENTITY_BODY: Final = -2147012722 +WININET_E_DECODING_FAILED: Final = -2147012721 +WININET_E_NOT_REDIRECTED: Final = -2147012736 +WININET_E_COOKIE_NEEDS_CONFIRMATION: Final = -2147012735 +WININET_E_COOKIE_DECLINED: Final = -2147012734 +WININET_E_REDIRECT_NEEDS_CONFIRMATION: Final = -2147012728 +SQLITE_E_ERROR: Final = -2018574335 +SQLITE_E_INTERNAL: Final = -2018574334 +SQLITE_E_PERM: Final = -2018574333 +SQLITE_E_ABORT: Final = -2018574332 +SQLITE_E_BUSY: Final = -2018574331 +SQLITE_E_LOCKED: Final = -2018574330 +SQLITE_E_NOMEM: Final = -2018574329 +SQLITE_E_READONLY: Final = -2018574328 +SQLITE_E_INTERRUPT: Final = -2018574327 +SQLITE_E_IOERR: Final = -2018574326 +SQLITE_E_CORRUPT: Final = -2018574325 +SQLITE_E_NOTFOUND: Final = -2018574324 +SQLITE_E_FULL: Final = -2018574323 +SQLITE_E_CANTOPEN: Final = -2018574322 +SQLITE_E_PROTOCOL: Final = -2018574321 +SQLITE_E_EMPTY: Final = -2018574320 +SQLITE_E_SCHEMA: Final = -2018574319 +SQLITE_E_TOOBIG: Final = -2018574318 +SQLITE_E_CONSTRAINT: Final = -2018574317 +SQLITE_E_MISMATCH: Final = -2018574316 +SQLITE_E_MISUSE: Final = -2018574315 +SQLITE_E_NOLFS: Final = -2018574314 +SQLITE_E_AUTH: Final = -2018574313 +SQLITE_E_FORMAT: Final = -2018574312 +SQLITE_E_RANGE: Final = -2018574311 +SQLITE_E_NOTADB: Final = -2018574310 +SQLITE_E_NOTICE: Final = -2018574309 +SQLITE_E_WARNING: Final = -2018574308 +SQLITE_E_ROW: Final = -2018574236 +SQLITE_E_DONE: Final = -2018574235 +SQLITE_E_IOERR_READ: Final = -2018574070 +SQLITE_E_IOERR_SHORT_READ: Final = -2018573814 +SQLITE_E_IOERR_WRITE: Final = -2018573558 +SQLITE_E_IOERR_FSYNC: Final = -2018573302 +SQLITE_E_IOERR_DIR_FSYNC: Final = -2018573046 +SQLITE_E_IOERR_TRUNCATE: Final = -2018572790 +SQLITE_E_IOERR_FSTAT: Final = -2018572534 +SQLITE_E_IOERR_UNLOCK: Final = -2018572278 +SQLITE_E_IOERR_RDLOCK: Final = -2018572022 +SQLITE_E_IOERR_DELETE: Final = -2018571766 +SQLITE_E_IOERR_BLOCKED: Final = -2018571510 +SQLITE_E_IOERR_NOMEM: Final = -2018571254 +SQLITE_E_IOERR_ACCESS: Final = -2018570998 +SQLITE_E_IOERR_CHECKRESERVEDLOCK: Final = -2018570742 +SQLITE_E_IOERR_LOCK: Final = -2018570486 +SQLITE_E_IOERR_CLOSE: Final = -2018570230 +SQLITE_E_IOERR_DIR_CLOSE: Final = -2018569974 +SQLITE_E_IOERR_SHMOPEN: Final = -2018569718 +SQLITE_E_IOERR_SHMSIZE: Final = -2018569462 +SQLITE_E_IOERR_SHMLOCK: Final = -2018569206 +SQLITE_E_IOERR_SHMMAP: Final = -2018568950 +SQLITE_E_IOERR_SEEK: Final = -2018568694 +SQLITE_E_IOERR_DELETE_NOENT: Final = -2018568438 +SQLITE_E_IOERR_MMAP: Final = -2018568182 +SQLITE_E_IOERR_GETTEMPPATH: Final = -2018567926 +SQLITE_E_IOERR_CONVPATH: Final = -2018567670 +SQLITE_E_IOERR_VNODE: Final = -2018567678 +SQLITE_E_IOERR_AUTH: Final = -2018567677 +SQLITE_E_LOCKED_SHAREDCACHE: Final = -2018574074 +SQLITE_E_BUSY_RECOVERY: Final = -2018574075 +SQLITE_E_BUSY_SNAPSHOT: Final = -2018573819 +SQLITE_E_CANTOPEN_NOTEMPDIR: Final = -2018574066 +SQLITE_E_CANTOPEN_ISDIR: Final = -2018573810 +SQLITE_E_CANTOPEN_FULLPATH: Final = -2018573554 +SQLITE_E_CANTOPEN_CONVPATH: Final = -2018573298 +SQLITE_E_CORRUPT_VTAB: Final = -2018574069 +SQLITE_E_READONLY_RECOVERY: Final = -2018574072 +SQLITE_E_READONLY_CANTLOCK: Final = -2018573816 +SQLITE_E_READONLY_ROLLBACK: Final = -2018573560 +SQLITE_E_READONLY_DBMOVED: Final = -2018573304 +SQLITE_E_ABORT_ROLLBACK: Final = -2018573820 +SQLITE_E_CONSTRAINT_CHECK: Final = -2018574061 +SQLITE_E_CONSTRAINT_COMMITHOOK: Final = -2018573805 +SQLITE_E_CONSTRAINT_FOREIGNKEY: Final = -2018573549 +SQLITE_E_CONSTRAINT_FUNCTION: Final = -2018573293 +SQLITE_E_CONSTRAINT_NOTNULL: Final = -2018573037 +SQLITE_E_CONSTRAINT_PRIMARYKEY: Final = -2018572781 +SQLITE_E_CONSTRAINT_TRIGGER: Final = -2018572525 +SQLITE_E_CONSTRAINT_UNIQUE: Final = -2018572269 +SQLITE_E_CONSTRAINT_VTAB: Final = -2018572013 +SQLITE_E_CONSTRAINT_ROWID: Final = -2018571757 +SQLITE_E_NOTICE_RECOVER_WAL: Final = -2018574053 +SQLITE_E_NOTICE_RECOVER_ROLLBACK: Final = -2018573797 +SQLITE_E_WARNING_AUTOINDEX: Final = -2018574052 +UTC_E_TOGGLE_TRACE_STARTED: Final = -2017128447 +UTC_E_ALTERNATIVE_TRACE_CANNOT_PREEMPT: Final = -2017128446 +UTC_E_AOT_NOT_RUNNING: Final = -2017128445 +UTC_E_SCRIPT_TYPE_INVALID: Final = -2017128444 +UTC_E_SCENARIODEF_NOT_FOUND: Final = -2017128443 +UTC_E_TRACEPROFILE_NOT_FOUND: Final = -2017128442 +UTC_E_FORWARDER_ALREADY_ENABLED: Final = -2017128441 +UTC_E_FORWARDER_ALREADY_DISABLED: Final = -2017128440 +UTC_E_EVENTLOG_ENTRY_MALFORMED: Final = -2017128439 +UTC_E_DIAGRULES_SCHEMAVERSION_MISMATCH: Final = -2017128438 +UTC_E_SCRIPT_TERMINATED: Final = -2017128437 +UTC_E_INVALID_CUSTOM_FILTER: Final = -2017128436 +UTC_E_TRACE_NOT_RUNNING: Final = -2017128435 +UTC_E_REESCALATED_TOO_QUICKLY: Final = -2017128434 +UTC_E_ESCALATION_ALREADY_RUNNING: Final = -2017128433 +UTC_E_PERFTRACK_ALREADY_TRACING: Final = -2017128432 +UTC_E_REACHED_MAX_ESCALATIONS: Final = -2017128431 +UTC_E_FORWARDER_PRODUCER_MISMATCH: Final = -2017128430 +UTC_E_INTENTIONAL_SCRIPT_FAILURE: Final = -2017128429 +UTC_E_SQM_INIT_FAILED: Final = -2017128428 +UTC_E_NO_WER_LOGGER_SUPPORTED: Final = -2017128427 +UTC_E_TRACERS_DONT_EXIST: Final = -2017128426 +UTC_E_WINRT_INIT_FAILED: Final = -2017128425 +UTC_E_SCENARIODEF_SCHEMAVERSION_MISMATCH: Final = -2017128424 +UTC_E_INVALID_FILTER: Final = -2017128423 +UTC_E_EXE_TERMINATED: Final = -2017128422 +UTC_E_ESCALATION_NOT_AUTHORIZED: Final = -2017128421 +UTC_E_SETUP_NOT_AUTHORIZED: Final = -2017128420 +UTC_E_CHILD_PROCESS_FAILED: Final = -2017128419 +UTC_E_COMMAND_LINE_NOT_AUTHORIZED: Final = -2017128418 +UTC_E_CANNOT_LOAD_SCENARIO_EDITOR_XML: Final = -2017128417 +UTC_E_ESCALATION_TIMED_OUT: Final = -2017128416 +UTC_E_SETUP_TIMED_OUT: Final = -2017128415 +UTC_E_TRIGGER_MISMATCH: Final = -2017128414 +UTC_E_TRIGGER_NOT_FOUND: Final = -2017128413 +UTC_E_SIF_NOT_SUPPORTED: Final = -2017128412 +UTC_E_DELAY_TERMINATED: Final = -2017128411 +UTC_E_DEVICE_TICKET_ERROR: Final = -2017128410 +UTC_E_TRACE_BUFFER_LIMIT_EXCEEDED: Final = -2017128409 +UTC_E_API_RESULT_UNAVAILABLE: Final = -2017128408 +UTC_E_RPC_TIMEOUT: Final = -2017128407 +UTC_E_RPC_WAIT_FAILED: Final = -2017128406 +UTC_E_API_BUSY: Final = -2017128405 +UTC_E_TRACE_MIN_DURATION_REQUIREMENT_NOT_MET: Final = -2017128404 +UTC_E_EXCLUSIVITY_NOT_AVAILABLE: Final = -2017128403 +UTC_E_GETFILE_FILE_PATH_NOT_APPROVED: Final = -2017128402 +UTC_E_ESCALATION_DIRECTORY_ALREADY_EXISTS: Final = -2017128401 +UTC_E_TIME_TRIGGER_ON_START_INVALID: Final = -2017128400 +UTC_E_TIME_TRIGGER_ONLY_VALID_ON_SINGLE_TRANSITION: Final = -2017128399 +UTC_E_TIME_TRIGGER_INVALID_TIME_RANGE: Final = -2017128398 +UTC_E_MULTIPLE_TIME_TRIGGER_ON_SINGLE_STATE: Final = -2017128397 +UTC_E_BINARY_MISSING: Final = -2017128396 +UTC_E_FAILED_TO_RESOLVE_CONTAINER_ID: Final = -2017128394 +UTC_E_UNABLE_TO_RESOLVE_SESSION: Final = -2017128393 +UTC_E_THROTTLED: Final = -2017128392 +UTC_E_UNAPPROVED_SCRIPT: Final = -2017128391 +UTC_E_SCRIPT_MISSING: Final = -2017128390 +UTC_E_SCENARIO_THROTTLED: Final = -2017128389 +UTC_E_API_NOT_SUPPORTED: Final = -2017128388 +UTC_E_GETFILE_EXTERNAL_PATH_NOT_APPROVED: Final = -2017128387 +UTC_E_TRY_GET_SCENARIO_TIMEOUT_EXCEEDED: Final = -2017128386 +UTC_E_CERT_REV_FAILED: Final = -2017128385 +UTC_E_FAILED_TO_START_NDISCAP: Final = -2017128384 +UTC_E_KERNELDUMP_LIMIT_REACHED: Final = -2017128383 +UTC_E_MISSING_AGGREGATE_EVENT_TAG: Final = -2017128382 +UTC_E_INVALID_AGGREGATION_STRUCT: Final = -2017128381 +UTC_E_ACTION_NOT_SUPPORTED_IN_DESTINATION: Final = -2017128380 +UTC_E_FILTER_MISSING_ATTRIBUTE: Final = -2017128379 +UTC_E_FILTER_INVALID_TYPE: Final = -2017128378 +UTC_E_FILTER_VARIABLE_NOT_FOUND: Final = -2017128377 +UTC_E_FILTER_FUNCTION_RESTRICTED: Final = -2017128376 +UTC_E_FILTER_VERSION_MISMATCH: Final = -2017128375 +UTC_E_FILTER_INVALID_FUNCTION: Final = -2017128368 +UTC_E_FILTER_INVALID_FUNCTION_PARAMS: Final = -2017128367 +UTC_E_FILTER_INVALID_COMMAND: Final = -2017128366 +UTC_E_FILTER_ILLEGAL_EVAL: Final = -2017128365 +UTC_E_TTTRACER_RETURNED_ERROR: Final = -2017128364 +UTC_E_AGENT_DIAGNOSTICS_TOO_LARGE: Final = -2017128363 +UTC_E_FAILED_TO_RECEIVE_AGENT_DIAGNOSTICS: Final = -2017128362 +UTC_E_SCENARIO_HAS_NO_ACTIONS: Final = -2017128361 +UTC_E_TTTRACER_STORAGE_FULL: Final = -2017128360 +UTC_E_INSUFFICIENT_SPACE_TO_START_TRACE: Final = -2017128359 +UTC_E_ESCALATION_CANCELLED_AT_SHUTDOWN: Final = -2017128358 +UTC_E_GETFILEINFOACTION_FILE_NOT_APPROVED: Final = -2017128357 +UTC_E_SETREGKEYACTION_TYPE_NOT_APPROVED: Final = -2017128356 +UTC_E_TRACE_THROTTLED: Final = -2017128355 +WINML_ERR_INVALID_DEVICE: Final = -2003828735 +WINML_ERR_INVALID_BINDING: Final = -2003828734 +WINML_ERR_VALUE_NOTFOUND: Final = -2003828733 +WINML_ERR_SIZE_MISMATCH: Final = -2003828732 +ERROR_QUIC_HANDSHAKE_FAILURE: Final = -2143223808 +ERROR_QUIC_VER_NEG_FAILURE: Final = -2143223807 +ERROR_QUIC_USER_CANCELED: Final = -2143223806 +ERROR_QUIC_INTERNAL_ERROR: Final = -2143223805 +ERROR_QUIC_PROTOCOL_VIOLATION: Final = -2143223804 +ERROR_QUIC_CONNECTION_IDLE: Final = -2143223803 +ERROR_QUIC_CONNECTION_TIMEOUT: Final = -2143223802 +ERROR_QUIC_ALPN_NEG_FAILURE: Final = -2143223801 +IORING_E_REQUIRED_FLAG_NOT_SUPPORTED: Final = -2142896127 +IORING_E_SUBMISSION_QUEUE_FULL: Final = -2142896126 +IORING_E_VERSION_NOT_SUPPORTED: Final = -2142896125 +IORING_E_SUBMISSION_QUEUE_TOO_BIG: Final = -2142896124 +IORING_E_COMPLETION_QUEUE_TOO_BIG: Final = -2142896123 +IORING_E_SUBMIT_IN_PROGRESS: Final = -2142896122 +IORING_E_CORRUPT: Final = -2142896121 +IORING_E_COMPLETION_QUEUE_TOO_FULL: Final = -2142896120 + +CDERR_DIALOGFAILURE: Final = 0xFFFF +CDERR_GENERALCODES: Final = 0x0000 +CDERR_STRUCTSIZE: Final = 0x0001 +CDERR_INITIALIZATION: Final = 0x0002 +CDERR_NOTEMPLATE: Final = 0x0003 +CDERR_NOHINSTANCE: Final = 0x0004 +CDERR_LOADSTRFAILURE: Final = 0x0005 +CDERR_FINDRESFAILURE: Final = 0x0006 +CDERR_LOADRESFAILURE: Final = 0x0007 +CDERR_LOCKRESFAILURE: Final = 0x0008 +CDERR_MEMALLOCFAILURE: Final = 0x0009 +CDERR_MEMLOCKFAILURE: Final = 0x000A +CDERR_NOHOOK: Final = 0x000B +CDERR_REGISTERMSGFAIL: Final = 0x000C +PDERR_PRINTERCODES: Final = 0x1000 +PDERR_SETUPFAILURE: Final = 0x1001 +PDERR_PARSEFAILURE: Final = 0x1002 +PDERR_RETDEFFAILURE: Final = 0x1003 +PDERR_LOADDRVFAILURE: Final = 0x1004 +PDERR_GETDEVMODEFAIL: Final = 0x1005 +PDERR_INITFAILURE: Final = 0x1006 +PDERR_NODEVICES: Final = 0x1007 +PDERR_NODEFAULTPRN: Final = 0x1008 +PDERR_DNDMMISMATCH: Final = 0x1009 +PDERR_CREATEICFAILURE: Final = 0x100A +PDERR_PRINTERNOTFOUND: Final = 0x100B +PDERR_DEFAULTDIFFERENT: Final = 0x100C +CFERR_CHOOSEFONTCODES: Final = 0x2000 +CFERR_NOFONTS: Final = 0x2001 +CFERR_MAXLESSTHANMIN: Final = 0x2002 +FNERR_FILENAMECODES: Final = 0x3000 +FNERR_SUBCLASSFAILURE: Final = 0x3001 +FNERR_INVALIDFILENAME: Final = 0x3002 +FNERR_BUFFERTOOSMALL: Final = 0x3003 +FRERR_FINDREPLACECODES: Final = 0x4000 +FRERR_BUFFERLENGTHZERO: Final = 0x4001 +CCERR_CHOOSECOLORCODES: Final = 0x5000 diff --git a/stubs/pywin32/win32/servicemanager.pyi b/stubs/pywin32/win32/servicemanager.pyi index 2bfeb514c01b..6acc15a79403 100644 --- a/stubs/pywin32/win32/servicemanager.pyi +++ b/stubs/pywin32/win32/servicemanager.pyi @@ -15,7 +15,7 @@ def PrepareToHostSingle(klass: Incomplete | None = ..., /) -> None: ... def PrepareToHostMultiple(service_name: str, klass, /) -> None: ... def RunningAsService(): ... def SetEventSourceName(sourceName: str, registerNow: bool = ..., /) -> None: ... -def StartServiceCtrlDispatcher(*args): ... # incomplete +def StartServiceCtrlDispatcher(): ... COINIT_APARTMENTTHREADED: int COINIT_DISABLE_OLE1DDE: int diff --git a/stubs/pywin32/win32/win32trace.pyi b/stubs/pywin32/win32/win32trace.pyi index 6ca1235bd905..1e3fe7945cb6 100644 --- a/stubs/pywin32/win32/win32trace.pyi +++ b/stubs/pywin32/win32/win32trace.pyi @@ -1,7 +1,7 @@ from win32.lib.pywintypes import error as error def GetHandle(*args): ... # incomplete -def GetTracer(*args): ... # incomplete +def GetTracer(): ... def InitRead(*args): ... # incomplete def InitWrite(*args): ... # incomplete def TermRead(*args): ... # incomplete diff --git a/stubs/pywin32/win32/win32ts.pyi b/stubs/pywin32/win32/win32ts.pyi index 7776648dfd7c..f9c2b5db1488 100644 --- a/stubs/pywin32/win32/win32ts.pyi +++ b/stubs/pywin32/win32/win32ts.pyi @@ -32,6 +32,7 @@ WTSClientHardwareId: int WTSClientName: int WTSClientProductId: int WTSClientProtocolType: int +WTSIsRemoteSession: int WTSConnectQuery: int WTSConnectState: int WTSConnected: int diff --git a/stubs/pywin32/win32com/client/__init__.pyi b/stubs/pywin32/win32com/client/__init__.pyi index 4b43a733bf42..c4c3f2a4e1ad 100644 --- a/stubs/pywin32/win32com/client/__init__.pyi +++ b/stubs/pywin32/win32com/client/__init__.pyi @@ -3,6 +3,7 @@ from typing import Final from typing_extensions import TypeAlias import _win32typing +from pythoncom import com_record from win32com.client import dynamic _Stringifiable: TypeAlias = object @@ -46,7 +47,8 @@ class EventsProxy: def DispatchWithEvents(clsid, user_event_class): ... def WithEvents(disp, user_event_class): ... def getevents(clsid): ... -def Record(name, object): ... +def Record(name, object) -> com_record: ... +def register_record_class(cls) -> None: ... class DispatchBaseClass: def __init__(self, oobj: Incomplete | None = ...) -> None: ... From fd6b95dc8c68abfbd87e9c15c895a3810fe02948 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Mon, 10 Mar 2025 06:41:10 +0100 Subject: [PATCH 073/388] [stubsabot] Bump ibm-db to 3.2.6 (#13610) --- stubs/ibm-db/METADATA.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stubs/ibm-db/METADATA.toml b/stubs/ibm-db/METADATA.toml index 365a88f84835..6f7b307e221e 100644 --- a/stubs/ibm-db/METADATA.toml +++ b/stubs/ibm-db/METADATA.toml @@ -1,2 +1,2 @@ -version = "3.2.5" +version = "3.2.6" upstream_repository = "https://github.com/ibmdb/python-ibmdb" From 62fd403170c761d277c6bb4d6edbde6234b2fc4f Mon Sep 17 00:00:00 2001 From: Avasam Date: Tue, 11 Mar 2025 06:18:03 -0400 Subject: [PATCH 074/388] distutils: Allow overriding Command and Distribution boolean attributes with actual bool in subclasses (#13615) --- stdlib/distutils/cmd.pyi | 2 +- stdlib/distutils/dist.pyi | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/stdlib/distutils/cmd.pyi b/stdlib/distutils/cmd.pyi index dcb423a49b09..a4e77ddf1388 100644 --- a/stdlib/distutils/cmd.pyi +++ b/stdlib/distutils/cmd.pyi @@ -30,7 +30,7 @@ _CommandT = TypeVar("_CommandT", bound=Command) _Ts = TypeVarTuple("_Ts") class Command: - dry_run: Literal[0, 1] # Exposed from __getattr_. Same as Distribution.dry_run + dry_run: bool | Literal[0, 1] # Exposed from __getattr_. Same as Distribution.dry_run distribution: Distribution # Any to work around variance issues sub_commands: ClassVar[list[tuple[str, Callable[[Any], bool] | None]]] diff --git a/stdlib/distutils/dist.pyi b/stdlib/distutils/dist.pyi index 75fc7dbb388d..09f2b456d263 100644 --- a/stdlib/distutils/dist.pyi +++ b/stdlib/distutils/dist.pyi @@ -88,9 +88,9 @@ class Distribution: display_options: ClassVar[_OptionsList] display_option_names: ClassVar[list[str]] negative_opt: ClassVar[dict[str, str]] - verbose: Literal[0, 1] - dry_run: Literal[0, 1] - help: Literal[0, 1] + verbose: bool | Literal[0, 1] + dry_run: bool | Literal[0, 1] + help: bool | Literal[0, 1] command_packages: list[str] | None script_name: str | None script_args: list[str] | None From 16124ead3817d22af293d56ed23dd6b44b7a3a26 Mon Sep 17 00:00:00 2001 From: Max Muoto Date: Tue, 11 Mar 2025 05:45:28 -0500 Subject: [PATCH 075/388] Fix `asyncio` executor types (#13616) Update type annotations for `run_in_executor` and `set_default_executor` in asyncio event loop interfaces to use more specific executor types from `concurrent.futures` --- stdlib/asyncio/base_events.pyi | 5 +++-- stdlib/asyncio/events.pyi | 5 +++-- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/stdlib/asyncio/base_events.pyi b/stdlib/asyncio/base_events.pyi index d410193a3379..9527e9d052aa 100644 --- a/stdlib/asyncio/base_events.pyi +++ b/stdlib/asyncio/base_events.pyi @@ -8,6 +8,7 @@ from asyncio.protocols import BaseProtocol from asyncio.tasks import Task from asyncio.transports import BaseTransport, DatagramTransport, ReadTransport, SubprocessTransport, Transport, WriteTransport from collections.abc import Callable, Iterable, Sequence +from concurrent.futures import Executor, ThreadPoolExecutor from contextvars import Context from socket import AddressFamily, SocketKind, _Address, _RetAddress, socket from typing import IO, Any, Literal, TypeVar, overload @@ -96,8 +97,8 @@ class BaseEventLoop(AbstractEventLoop): def call_soon_threadsafe( self, callback: Callable[[Unpack[_Ts]], object], *args: Unpack[_Ts], context: Context | None = None ) -> Handle: ... - def run_in_executor(self, executor: Any, func: Callable[[Unpack[_Ts]], _T], *args: Unpack[_Ts]) -> Future[_T]: ... - def set_default_executor(self, executor: Any) -> None: ... + def run_in_executor(self, executor: Executor | None, func: Callable[[Unpack[_Ts]], _T], *args: Unpack[_Ts]) -> Future[_T]: ... + def set_default_executor(self, executor: ThreadPoolExecutor) -> None: ... # type: ignore[override] # Network I/O methods returning Futures. async def getaddrinfo( self, diff --git a/stdlib/asyncio/events.pyi b/stdlib/asyncio/events.pyi index af1594524c45..a9f7d24237a4 100644 --- a/stdlib/asyncio/events.pyi +++ b/stdlib/asyncio/events.pyi @@ -9,6 +9,7 @@ from _asyncio import ( from _typeshed import FileDescriptorLike, ReadableBuffer, StrPath, Unused, WriteableBuffer from abc import ABCMeta, abstractmethod from collections.abc import Callable, Sequence +from concurrent.futures import Executor from contextvars import Context from socket import AddressFamily, SocketKind, _Address, _RetAddress, socket from typing import IO, Any, Literal, Protocol, TypeVar, overload @@ -188,9 +189,9 @@ class AbstractEventLoop: def call_soon_threadsafe(self, callback: Callable[[Unpack[_Ts]], object], *args: Unpack[_Ts]) -> Handle: ... @abstractmethod - def run_in_executor(self, executor: Any, func: Callable[[Unpack[_Ts]], _T], *args: Unpack[_Ts]) -> Future[_T]: ... + def run_in_executor(self, executor: Executor | None, func: Callable[[Unpack[_Ts]], _T], *args: Unpack[_Ts]) -> Future[_T]: ... @abstractmethod - def set_default_executor(self, executor: Any) -> None: ... + def set_default_executor(self, executor: Executor) -> None: ... # Network I/O methods returning Futures. @abstractmethod async def getaddrinfo( From 6e2f9134d14bfc907a659df1254c33cbd7b2ef25 Mon Sep 17 00:00:00 2001 From: Kyllian Broers Date: Tue, 11 Mar 2025 11:52:29 +0100 Subject: [PATCH 076/388] [networkx] Fix argument type of node_connected_component (#13612) --- stubs/networkx/networkx/algorithms/components/connected.pyi | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stubs/networkx/networkx/algorithms/components/connected.pyi b/stubs/networkx/networkx/algorithms/components/connected.pyi index 67a380977343..55dc4cd2b8fd 100644 --- a/stubs/networkx/networkx/algorithms/components/connected.pyi +++ b/stubs/networkx/networkx/algorithms/components/connected.pyi @@ -11,4 +11,4 @@ def number_connected_components(G: Graph[_Node]): ... @_dispatchable def is_connected(G: Graph[_Node]): ... @_dispatchable -def node_connected_component(G: Graph[_Node], n: str): ... +def node_connected_component(G: Graph[_Node], n: _Node): ... From b8f3db9c78971cb898d6dcad8b1bfe0b2e8313e8 Mon Sep 17 00:00:00 2001 From: Dan Pascu Date: Tue, 11 Mar 2025 18:11:19 +0200 Subject: [PATCH 077/388] Fix return type for GenericAlias.__origin__ for type aliases parameterised with a type (#13619) --- stdlib/types.pyi | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/stdlib/types.pyi b/stdlib/types.pyi index 57d1ec935a76..849db3ece938 100644 --- a/stdlib/types.pyi +++ b/stdlib/types.pyi @@ -18,7 +18,7 @@ from importlib.machinery import ModuleSpec # pytype crashes if types.MappingProxyType inherits from collections.abc.Mapping instead of typing.Mapping from typing import Any, ClassVar, Literal, Mapping, TypeVar, final, overload # noqa: Y022 -from typing_extensions import ParamSpec, Self, TypeVarTuple, deprecated +from typing_extensions import ParamSpec, Self, TypeAliasType, TypeVarTuple, deprecated __all__ = [ "FunctionType", @@ -650,7 +650,7 @@ def coroutine(func: _Fn) -> _Fn: ... if sys.version_info >= (3, 9): class GenericAlias: @property - def __origin__(self) -> type: ... + def __origin__(self) -> type | TypeAliasType: ... @property def __args__(self) -> tuple[Any, ...]: ... @property From aa3c1f8fbd2432208dee9167d0b69fa4c276a078 Mon Sep 17 00:00:00 2001 From: Joren Hammudoglu Date: Tue, 11 Mar 2025 18:45:40 +0100 Subject: [PATCH 078/388] Annotate `pathlib.Path.{owner,group,is_mount}` on windows (#13613) --- .../@tests/stubtest_allowlists/win32-py310.txt | 14 -------------- .../@tests/stubtest_allowlists/win32-py311.txt | 14 -------------- .../@tests/stubtest_allowlists/win32-py312.txt | 4 ---- stdlib/@tests/stubtest_allowlists/win32-py39.txt | 14 -------------- stdlib/pathlib.pyi | 16 +++++++++++----- 5 files changed, 11 insertions(+), 51 deletions(-) diff --git a/stdlib/@tests/stubtest_allowlists/win32-py310.txt b/stdlib/@tests/stubtest_allowlists/win32-py310.txt index 555f97cd49d9..4bc95349eedb 100644 --- a/stdlib/@tests/stubtest_allowlists/win32-py310.txt +++ b/stdlib/@tests/stubtest_allowlists/win32-py310.txt @@ -44,16 +44,6 @@ xml.parsers.expat.XMLParserType.SetReparseDeferralEnabled xml.sax.expatreader.ExpatParser.flush -# ============================================================= -# Allowlist entries that cannot or should not be fixed; <= 3.11 -# ============================================================= - -# pathlib methods that exist on Windows, but always raise NotImplementedError, -# so are omitted from the stub -pathlib.Path.is_mount -pathlib.WindowsPath.is_mount - - # ============================================================= # Allowlist entries that cannot or should not be fixed; <= 3.12 # ============================================================= @@ -63,7 +53,3 @@ crypt nis ossaudiodev spwd - -# pathlib functions that rely on modules that don't exist on Windows -pathlib.Path.owner -pathlib.Path.group diff --git a/stdlib/@tests/stubtest_allowlists/win32-py311.txt b/stdlib/@tests/stubtest_allowlists/win32-py311.txt index 0078cb02632f..771e0a090c78 100644 --- a/stdlib/@tests/stubtest_allowlists/win32-py311.txt +++ b/stdlib/@tests/stubtest_allowlists/win32-py311.txt @@ -11,16 +11,6 @@ email.utils.getaddresses email.utils.parseaddr -# ============================================================= -# Allowlist entries that cannot or should not be fixed; <= 3.11 -# ============================================================= - -# pathlib methods that exist on Windows, but always raise NotImplementedError, -# so are omitted from the stub -pathlib.Path.is_mount -pathlib.WindowsPath.is_mount - - # ============================================================= # Allowlist entries that cannot or should not be fixed; <= 3.12 # ============================================================= @@ -30,7 +20,3 @@ crypt nis ossaudiodev spwd - -# pathlib functions that rely on modules that don't exist on Windows -pathlib.Path.owner -pathlib.Path.group diff --git a/stdlib/@tests/stubtest_allowlists/win32-py312.txt b/stdlib/@tests/stubtest_allowlists/win32-py312.txt index aa2a6d30322a..8c7e5fa72a73 100644 --- a/stdlib/@tests/stubtest_allowlists/win32-py312.txt +++ b/stdlib/@tests/stubtest_allowlists/win32-py312.txt @@ -25,7 +25,3 @@ crypt nis ossaudiodev spwd - -# pathlib functions that rely on modules that don't exist on Windows -pathlib.Path.owner -pathlib.Path.group diff --git a/stdlib/@tests/stubtest_allowlists/win32-py39.txt b/stdlib/@tests/stubtest_allowlists/win32-py39.txt index d2b82ecdc98d..40a525566cc5 100644 --- a/stdlib/@tests/stubtest_allowlists/win32-py39.txt +++ b/stdlib/@tests/stubtest_allowlists/win32-py39.txt @@ -53,16 +53,6 @@ xml.parsers.expat.XMLParserType.SetReparseDeferralEnabled xml.sax.expatreader.ExpatParser.flush -# ============================================================= -# Allowlist entries that cannot or should not be fixed; <= 3.11 -# ============================================================= - -# pathlib methods that exist on Windows, but always raise NotImplementedError, -# so are omitted from the stub -pathlib.Path.is_mount -pathlib.WindowsPath.is_mount - - # ============================================================= # Allowlist entries that cannot or should not be fixed; <= 3.12 # ============================================================= @@ -72,7 +62,3 @@ crypt nis ossaudiodev spwd - -# pathlib functions that rely on modules that don't exist on Windows -pathlib.Path.owner -pathlib.Path.group diff --git a/stdlib/pathlib.pyi b/stdlib/pathlib.pyi index e2a816ae1ca4..a18aed4ba57a 100644 --- a/stdlib/pathlib.pyi +++ b/stdlib/pathlib.pyi @@ -16,7 +16,7 @@ from io import BufferedRandom, BufferedReader, BufferedWriter, FileIO, TextIOWra from os import PathLike, stat_result from types import TracebackType from typing import IO, Any, BinaryIO, ClassVar, Literal, overload -from typing_extensions import Self, deprecated +from typing_extensions import Never, Self, deprecated if sys.version_info >= (3, 9): from types import GenericAlias @@ -226,9 +226,13 @@ class Path(PurePath): def open( self, mode: str, buffering: int = -1, encoding: str | None = None, errors: str | None = None, newline: str | None = None ) -> IO[Any]: ... - if sys.platform != "win32": - # These methods do "exist" on Windows, but they always raise NotImplementedError, - # so it's safer to pretend they don't exist + + # These methods do "exist" on Windows on <3.13, but they always raise NotImplementedError. + if sys.platform == "win32": + if sys.version_info < (3, 13): + def owner(self: Never) -> str: ... # type: ignore[misc] + def group(self: Never) -> str: ... # type: ignore[misc] + else: if sys.version_info >= (3, 13): def owner(self, *, follow_symlinks: bool = True) -> str: ... def group(self, *, follow_symlinks: bool = True) -> str: ... @@ -238,7 +242,9 @@ class Path(PurePath): # This method does "exist" on Windows on <3.12, but always raises NotImplementedError # On py312+, it works properly on Windows, as with all other platforms - if sys.platform != "win32" or sys.version_info >= (3, 12): + if sys.platform == "win32" and sys.version_info < (3, 12): + def is_mount(self: Never) -> bool: ... # type: ignore[misc] + else: def is_mount(self) -> bool: ... if sys.version_info >= (3, 9): From 681650c77790eb7215771bc33b40e1c12aaea58d Mon Sep 17 00:00:00 2001 From: Avasam Date: Wed, 12 Mar 2025 08:25:47 -0400 Subject: [PATCH 079/388] Bump setuptools to 76.0.0 (#13614) --- .../setuptools/@tests/stubtest_allowlist.txt | 10 +- stubs/setuptools/METADATA.toml | 2 +- stubs/setuptools/distutils/ccompiler.pyi | 11 + .../setuptools/distutils/compilers/C/base.pyi | 1 + .../distutils/compilers/C/errors.pyi | 1 + .../setuptools/distutils/compilers/C/msvc.pyi | 1 + .../setuptools/_distutils/_msvccompiler.pyi | 23 +- .../setuptools/_distutils/ccompiler.pyi | 191 ++--------------- .../_distutils/compilers/C/base.pyi | 196 ++++++++++++++++++ .../_distutils/compilers/C/errors.pyi | 6 + .../_distutils/compilers/C/msvc.pyi | 24 +++ .../setuptools/_distutils/errors.pyi | 18 +- 12 files changed, 271 insertions(+), 213 deletions(-) create mode 100644 stubs/setuptools/distutils/compilers/C/base.pyi create mode 100644 stubs/setuptools/distutils/compilers/C/errors.pyi create mode 100644 stubs/setuptools/distutils/compilers/C/msvc.pyi create mode 100644 stubs/setuptools/setuptools/_distutils/compilers/C/base.pyi create mode 100644 stubs/setuptools/setuptools/_distutils/compilers/C/errors.pyi create mode 100644 stubs/setuptools/setuptools/_distutils/compilers/C/msvc.pyi diff --git a/stubs/setuptools/@tests/stubtest_allowlist.txt b/stubs/setuptools/@tests/stubtest_allowlist.txt index 4ab351051980..ef8af309865d 100644 --- a/stubs/setuptools/@tests/stubtest_allowlist.txt +++ b/stubs/setuptools/@tests/stubtest_allowlist.txt @@ -2,6 +2,9 @@ setuptools.modified.newer_pairwise_group setuptools._distutils._modified.newer_pairwise_group +# Runtime initializes to None, but this really should never be None when used +setuptools._distutils.compilers.C.base.Compiler.compiler_type + # Dynamically created in __init__ setuptools._distutils.dist.Distribution.get_name setuptools._distutils.dist.Distribution.get_version @@ -28,13 +31,6 @@ setuptools._distutils.dist.Distribution.get_obsoletes # Missing objects from setuptools._distutils setuptools._distutils.archive_util.ARCHIVE_FORMATS setuptools._distutils.archive_util.check_archive_formats -setuptools._distutils.ccompiler.CCompiler.EXECUTABLE -setuptools._distutils.ccompiler.CCompiler.SHARED_LIBRARY -setuptools._distutils.ccompiler.CCompiler.SHARED_OBJECT -setuptools._distutils.ccompiler.CCompiler.compiler_type -setuptools._distutils.ccompiler.CCompiler.out_extensions -setuptools._distutils.ccompiler.CCompiler.set_executable -setuptools._distutils.ccompiler.compiler_class setuptools._distutils.cmd.Command.dump_options setuptools._distutils.command.build_clib.show_compilers setuptools._distutils.command.build_ext.extension_name_re diff --git a/stubs/setuptools/METADATA.toml b/stubs/setuptools/METADATA.toml index b5e6c51d7221..f8c0ecb7c936 100644 --- a/stubs/setuptools/METADATA.toml +++ b/stubs/setuptools/METADATA.toml @@ -1,4 +1,4 @@ -version = "~=75.8.2" +version = "~=76.0.0" upstream_repository = "https://github.com/pypa/setuptools" extra_description = """\ Given that `pkg_resources` is typed since `setuptools >= 71.1`, \ diff --git a/stubs/setuptools/distutils/ccompiler.pyi b/stubs/setuptools/distutils/ccompiler.pyi index e1770cfdf09d..d8f1af11ef33 100644 --- a/stubs/setuptools/distutils/ccompiler.pyi +++ b/stubs/setuptools/distutils/ccompiler.pyi @@ -1 +1,12 @@ from setuptools._distutils.ccompiler import * +from setuptools._distutils.ccompiler import CCompiler as CCompiler + +__all__ = [ + "CompileError", + "LinkError", + "gen_lib_options", + "gen_preprocess_options", + "get_default_compiler", + "new_compiler", + "show_compilers", +] diff --git a/stubs/setuptools/distutils/compilers/C/base.pyi b/stubs/setuptools/distutils/compilers/C/base.pyi new file mode 100644 index 000000000000..5e58ee9cbe68 --- /dev/null +++ b/stubs/setuptools/distutils/compilers/C/base.pyi @@ -0,0 +1 @@ +from setuptools._distutils.compilers.C.base import * diff --git a/stubs/setuptools/distutils/compilers/C/errors.pyi b/stubs/setuptools/distutils/compilers/C/errors.pyi new file mode 100644 index 000000000000..6cc2192bd587 --- /dev/null +++ b/stubs/setuptools/distutils/compilers/C/errors.pyi @@ -0,0 +1 @@ +from setuptools._distutils.compilers.C.errors import * diff --git a/stubs/setuptools/distutils/compilers/C/msvc.pyi b/stubs/setuptools/distutils/compilers/C/msvc.pyi new file mode 100644 index 000000000000..733c0ce32c3b --- /dev/null +++ b/stubs/setuptools/distutils/compilers/C/msvc.pyi @@ -0,0 +1 @@ +from setuptools._distutils.compilers.C.msvc import * diff --git a/stubs/setuptools/setuptools/_distutils/_msvccompiler.pyi b/stubs/setuptools/setuptools/_distutils/_msvccompiler.pyi index fb3046071af7..34d9735b0614 100644 --- a/stubs/setuptools/setuptools/_distutils/_msvccompiler.pyi +++ b/stubs/setuptools/setuptools/_distutils/_msvccompiler.pyi @@ -1,22 +1,3 @@ -from binascii import Incomplete -from typing import ClassVar, Final +from .compilers.C import msvc -from .ccompiler import CCompiler - -PLAT_SPEC_TO_RUNTIME: Final[dict[str, str]] - -class MSVCCompiler(CCompiler): - compiler_type: ClassVar[str] - executables: ClassVar[dict[Incomplete, Incomplete]] - src_extensions: ClassVar[list[str]] - res_extension: ClassVar[str] - obj_extension: ClassVar[str] - static_lib_extension: ClassVar[str] - shared_lib_extension: ClassVar[str] - shared_lib_format: ClassVar[str] - static_lib_format = shared_lib_format - exe_extension: ClassVar[str] - initialized: bool - def initialize(self, plat_name: str | None = None) -> None: ... - @property - def out_extensions(self) -> dict[str, str]: ... +MSVCCompiler = msvc.Compiler diff --git a/stubs/setuptools/setuptools/_distutils/ccompiler.pyi b/stubs/setuptools/setuptools/_distutils/ccompiler.pyi index dd0cddb919fb..cbb794e101a0 100644 --- a/stubs/setuptools/setuptools/_distutils/ccompiler.pyi +++ b/stubs/setuptools/setuptools/_distutils/ccompiler.pyi @@ -1,180 +1,15 @@ -from _typeshed import BytesPath, StrPath, Unused -from collections.abc import Callable, Iterable, MutableSequence, Sequence -from typing import ClassVar, Literal, TypeVar, overload -from typing_extensions import TypeAlias, TypeVarTuple, Unpack +from .compilers.C import base +from .compilers.C.base import gen_lib_options, gen_preprocess_options, get_default_compiler, new_compiler, show_compilers +from .compilers.C.errors import CompileError, LinkError -_Macro: TypeAlias = tuple[str] | tuple[str, str | None] -_StrPathT = TypeVar("_StrPathT", bound=StrPath) -_BytesPathT = TypeVar("_BytesPathT", bound=BytesPath) -_Ts = TypeVarTuple("_Ts") +__all__ = [ + "CompileError", + "LinkError", + "gen_lib_options", + "gen_preprocess_options", + "get_default_compiler", + "new_compiler", + "show_compilers", +] -def gen_lib_options( - compiler: CCompiler, library_dirs: Iterable[str], runtime_library_dirs: Iterable[str], libraries: Iterable[str] -) -> list[str]: ... -def gen_preprocess_options(macros: Iterable[_Macro], include_dirs: Iterable[str]) -> list[str]: ... -def get_default_compiler(osname: str | None = None, platform: str | None = None) -> str: ... -def new_compiler( - plat: str | None = None, compiler: str | None = None, verbose: bool = False, dry_run: bool = False, force: bool = False -) -> CCompiler: ... -def show_compilers() -> None: ... - -class CCompiler: - src_extensions: ClassVar[list[str] | None] - obj_extension: ClassVar[str | None] - static_lib_extension: ClassVar[str | None] - shared_lib_extension: ClassVar[str | None] - static_lib_format: ClassVar[str | None] - shared_lib_format: ClassVar[str | None] - exe_extension: ClassVar[str | None] - language_map: ClassVar[dict[str, str]] - language_order: ClassVar[list[str]] - dry_run: bool - force: bool - verbose: bool - output_dir: str | None - macros: list[_Macro] - include_dirs: list[str] - libraries: list[str] - library_dirs: list[str] - runtime_library_dirs: list[str] - objects: list[str] - def __init__(self, verbose: bool = False, dry_run: bool = False, force: bool = False) -> None: ... - def add_include_dir(self, dir: str) -> None: ... - def set_include_dirs(self, dirs: list[str]) -> None: ... - def add_library(self, libname: str) -> None: ... - def set_libraries(self, libnames: list[str]) -> None: ... - def add_library_dir(self, dir: str) -> None: ... - def set_library_dirs(self, dirs: list[str]) -> None: ... - def add_runtime_library_dir(self, dir: str) -> None: ... - def set_runtime_library_dirs(self, dirs: list[str]) -> None: ... - def define_macro(self, name: str, value: str | None = None) -> None: ... - def undefine_macro(self, name: str) -> None: ... - def add_link_object(self, object: str) -> None: ... - def set_link_objects(self, objects: list[str]) -> None: ... - def detect_language(self, sources: str | list[str]) -> str | None: ... - def find_library_file(self, dirs: Iterable[str], lib: str, debug: bool = False) -> str | None: ... - def has_function( - self, - funcname: str, - includes: Iterable[str] | None = None, - include_dirs: list[str] | tuple[str, ...] | None = None, - libraries: list[str] | None = None, - library_dirs: list[str] | tuple[str, ...] | None = None, - ) -> bool: ... - def library_dir_option(self, dir: str) -> str: ... - def library_option(self, lib: str) -> str: ... - def runtime_library_dir_option(self, dir: str) -> str: ... - def set_executables(self, **kwargs: str) -> None: ... - def compile( - self, - sources: Sequence[StrPath], - output_dir: str | None = None, - macros: list[_Macro] | None = None, - include_dirs: list[str] | tuple[str, ...] | None = None, - debug: bool = False, - extra_preargs: list[str] | None = None, - extra_postargs: list[str] | None = None, - depends: list[str] | tuple[str, ...] | None = None, - ) -> list[str]: ... - def create_static_lib( - self, - objects: list[str] | tuple[str, ...], - output_libname: str, - output_dir: str | None = None, - debug: bool = False, - target_lang: str | None = None, - ) -> None: ... - def link( - self, - target_desc: str, - objects: list[str] | tuple[str, ...], - output_filename: str, - output_dir: str | None = None, - libraries: list[str] | tuple[str, ...] | None = None, - library_dirs: list[str] | tuple[str, ...] | None = None, - runtime_library_dirs: list[str] | tuple[str, ...] | None = None, - export_symbols: Iterable[str] | None = None, - debug: bool = False, - extra_preargs: list[str] | None = None, - extra_postargs: list[str] | None = None, - build_temp: StrPath | None = None, - target_lang: str | None = None, - ) -> None: ... - def link_executable( - self, - objects: list[str] | tuple[str, ...], - output_progname: str, - output_dir: str | None = None, - libraries: list[str] | tuple[str, ...] | None = None, - library_dirs: list[str] | tuple[str, ...] | None = None, - runtime_library_dirs: list[str] | tuple[str, ...] | None = None, - debug: bool = False, - extra_preargs: list[str] | None = None, - extra_postargs: list[str] | None = None, - target_lang: str | None = None, - ) -> None: ... - def link_shared_lib( - self, - objects: list[str] | tuple[str, ...], - output_libname: str, - output_dir: str | None = None, - libraries: list[str] | tuple[str, ...] | None = None, - library_dirs: list[str] | tuple[str, ...] | None = None, - runtime_library_dirs: list[str] | tuple[str, ...] | None = None, - export_symbols: Iterable[str] | None = None, - debug: bool = False, - extra_preargs: list[str] | None = None, - extra_postargs: list[str] | None = None, - build_temp: StrPath | None = None, - target_lang: str | None = None, - ) -> None: ... - def link_shared_object( - self, - objects: list[str] | tuple[str, ...], - output_filename: str, - output_dir: str | None = None, - libraries: list[str] | tuple[str, ...] | None = None, - library_dirs: list[str] | tuple[str, ...] | None = None, - runtime_library_dirs: list[str] | tuple[str, ...] | None = None, - export_symbols: Iterable[str] | None = None, - debug: bool = False, - extra_preargs: list[str] | None = None, - extra_postargs: list[str] | None = None, - build_temp: StrPath | None = None, - target_lang: str | None = None, - ) -> None: ... - def preprocess( - self, - source: StrPath, - output_file: StrPath | None = None, - macros: list[_Macro] | None = None, - include_dirs: list[str] | tuple[str, ...] | None = None, - extra_preargs: list[str] | None = None, - extra_postargs: Iterable[str] | None = None, - ) -> None: ... - @overload - def executable_filename(self, basename: str, strip_dir: Literal[False] = False, output_dir: StrPath = "") -> str: ... - @overload - def executable_filename(self, basename: StrPath, strip_dir: Literal[True], output_dir: StrPath = "") -> str: ... - def library_filename( - self, libname: str, lib_type: str = "static", strip_dir: bool = False, output_dir: StrPath = "" - ) -> str: ... - def object_filenames( - self, source_filenames: Iterable[StrPath], strip_dir: bool = False, output_dir: StrPath | None = "" - ) -> list[str]: ... - @overload - def shared_object_filename(self, basename: str, strip_dir: Literal[False] = False, output_dir: StrPath = "") -> str: ... - @overload - def shared_object_filename(self, basename: StrPath, strip_dir: Literal[True], output_dir: StrPath = "") -> str: ... - def execute( - self, func: Callable[[Unpack[_Ts]], Unused], args: tuple[Unpack[_Ts]], msg: str | None = None, level: int = 1 - ) -> None: ... - def spawn(self, cmd: MutableSequence[bytes | StrPath]) -> None: ... - def mkpath(self, name: str, mode: int = 0o777) -> None: ... - @overload - def move_file(self, src: StrPath, dst: _StrPathT) -> _StrPathT | str: ... - @overload - def move_file(self, src: BytesPath, dst: _BytesPathT) -> _BytesPathT | bytes: ... - def announce(self, msg: str, level: int = 1) -> None: ... - def warn(self, msg: str) -> None: ... - def debug_print(self, msg: str) -> None: ... +CCompiler = base.Compiler diff --git a/stubs/setuptools/setuptools/_distutils/compilers/C/base.pyi b/stubs/setuptools/setuptools/_distutils/compilers/C/base.pyi new file mode 100644 index 000000000000..f33e86e6ba9f --- /dev/null +++ b/stubs/setuptools/setuptools/_distutils/compilers/C/base.pyi @@ -0,0 +1,196 @@ +from _typeshed import BytesPath, Incomplete, StrPath, Unused +from collections.abc import Callable, Iterable, MutableSequence, Sequence +from typing import ClassVar, Final, Literal, TypeVar, overload +from typing_extensions import TypeAlias, TypeVarTuple, Unpack + +_Macro: TypeAlias = tuple[str] | tuple[str, str | None] +_StrPathT = TypeVar("_StrPathT", bound=StrPath) +_BytesPathT = TypeVar("_BytesPathT", bound=BytesPath) +_Ts = TypeVarTuple("_Ts") + +class Compiler: + compiler_type: ClassVar[str] + executables: ClassVar[dict[str, Incomplete]] + + # Subclasses that rely on the standard filename generation methods + # implemented below should override these + src_extensions: ClassVar[list[str] | None] + obj_extension: ClassVar[str | None] + static_lib_extension: ClassVar[str | None] + shared_lib_extension: ClassVar[str | None] + static_lib_format: ClassVar[str | None] + shared_lib_format: ClassVar[str | None] + exe_extension: ClassVar[str | None] + + language_map: ClassVar[dict[str, str]] + language_order: ClassVar[list[str]] + dry_run: bool + force: bool + verbose: bool + output_dir: str | None + macros: list[_Macro] + include_dirs: list[str] + libraries: list[str] + library_dirs: list[str] + runtime_library_dirs: list[str] + objects: list[str] + + SHARED_OBJECT: Final = "shared_object" + SHARED_LIBRARY: Final = "shared_library" + EXECUTABLE: Final = "executable" + def __init__(self, verbose: bool = False, dry_run: bool = False, force: bool = False) -> None: ... + def add_include_dir(self, dir: str) -> None: ... + def set_include_dirs(self, dirs: list[str]) -> None: ... + def add_library(self, libname: str) -> None: ... + def set_libraries(self, libnames: list[str]) -> None: ... + def add_library_dir(self, dir: str) -> None: ... + def set_library_dirs(self, dirs: list[str]) -> None: ... + def add_runtime_library_dir(self, dir: str) -> None: ... + def set_runtime_library_dirs(self, dirs: list[str]) -> None: ... + def define_macro(self, name: str, value: str | None = None) -> None: ... + def undefine_macro(self, name: str) -> None: ... + def add_link_object(self, object: str) -> None: ... + def set_link_objects(self, objects: list[str]) -> None: ... + def detect_language(self, sources: str | list[str]) -> str | None: ... + def find_library_file(self, dirs: Iterable[str], lib: str, debug: bool = False) -> str | None: ... + def has_function( + self, + funcname: str, + includes: Iterable[str] | None = None, + include_dirs: list[str] | tuple[str, ...] | None = None, + libraries: list[str] | None = None, + library_dirs: list[str] | tuple[str, ...] | None = None, + ) -> bool: ... + def library_dir_option(self, dir: str) -> str: ... + def library_option(self, lib: str) -> str: ... + def runtime_library_dir_option(self, dir: str) -> str: ... + def set_executables(self, **kwargs: str) -> None: ... + def set_executable(self, key: str, value) -> None: ... + def compile( + self, + sources: Sequence[StrPath], + output_dir: str | None = None, + macros: list[_Macro] | None = None, + include_dirs: list[str] | tuple[str, ...] | None = None, + debug: bool = False, + extra_preargs: list[str] | None = None, + extra_postargs: list[str] | None = None, + depends: list[str] | tuple[str, ...] | None = None, + ) -> list[str]: ... + def create_static_lib( + self, + objects: list[str] | tuple[str, ...], + output_libname: str, + output_dir: str | None = None, + debug: bool = False, + target_lang: str | None = None, + ) -> None: ... + def link( + self, + target_desc: str, + objects: list[str] | tuple[str, ...], + output_filename: str, + output_dir: str | None = None, + libraries: list[str] | tuple[str, ...] | None = None, + library_dirs: list[str] | tuple[str, ...] | None = None, + runtime_library_dirs: list[str] | tuple[str, ...] | None = None, + export_symbols: Iterable[str] | None = None, + debug: bool = False, + extra_preargs: list[str] | None = None, + extra_postargs: list[str] | None = None, + build_temp: StrPath | None = None, + target_lang: str | None = None, + ) -> None: ... + def link_executable( + self, + objects: list[str] | tuple[str, ...], + output_progname: str, + output_dir: str | None = None, + libraries: list[str] | tuple[str, ...] | None = None, + library_dirs: list[str] | tuple[str, ...] | None = None, + runtime_library_dirs: list[str] | tuple[str, ...] | None = None, + debug: bool = False, + extra_preargs: list[str] | None = None, + extra_postargs: list[str] | None = None, + target_lang: str | None = None, + ) -> None: ... + def link_shared_lib( + self, + objects: list[str] | tuple[str, ...], + output_libname: str, + output_dir: str | None = None, + libraries: list[str] | tuple[str, ...] | None = None, + library_dirs: list[str] | tuple[str, ...] | None = None, + runtime_library_dirs: list[str] | tuple[str, ...] | None = None, + export_symbols: Iterable[str] | None = None, + debug: bool = False, + extra_preargs: list[str] | None = None, + extra_postargs: list[str] | None = None, + build_temp: StrPath | None = None, + target_lang: str | None = None, + ) -> None: ... + def link_shared_object( + self, + objects: list[str] | tuple[str, ...], + output_filename: str, + output_dir: str | None = None, + libraries: list[str] | tuple[str, ...] | None = None, + library_dirs: list[str] | tuple[str, ...] | None = None, + runtime_library_dirs: list[str] | tuple[str, ...] | None = None, + export_symbols: Iterable[str] | None = None, + debug: bool = False, + extra_preargs: list[str] | None = None, + extra_postargs: list[str] | None = None, + build_temp: StrPath | None = None, + target_lang: str | None = None, + ) -> None: ... + def preprocess( + self, + source: StrPath, + output_file: StrPath | None = None, + macros: list[_Macro] | None = None, + include_dirs: list[str] | tuple[str, ...] | None = None, + extra_preargs: list[str] | None = None, + extra_postargs: Iterable[str] | None = None, + ) -> None: ... + @overload + def executable_filename(self, basename: str, strip_dir: Literal[False] = False, output_dir: StrPath = "") -> str: ... + @overload + def executable_filename(self, basename: StrPath, strip_dir: Literal[True], output_dir: StrPath = "") -> str: ... + def library_filename( + self, libname: str, lib_type: str = "static", strip_dir: bool = False, output_dir: StrPath = "" + ) -> str: ... + @property + def out_extensions(self) -> dict[str, str]: ... + def object_filenames( + self, source_filenames: Iterable[StrPath], strip_dir: bool = False, output_dir: StrPath | None = "" + ) -> list[str]: ... + @overload + def shared_object_filename(self, basename: str, strip_dir: Literal[False] = False, output_dir: StrPath = "") -> str: ... + @overload + def shared_object_filename(self, basename: StrPath, strip_dir: Literal[True], output_dir: StrPath = "") -> str: ... + def execute( + self, func: Callable[[Unpack[_Ts]], Unused], args: tuple[Unpack[_Ts]], msg: str | None = None, level: int = 1 + ) -> None: ... + def spawn(self, cmd: MutableSequence[bytes | StrPath]) -> None: ... + def mkpath(self, name: str, mode: int = 0o777) -> None: ... + @overload + def move_file(self, src: StrPath, dst: _StrPathT) -> _StrPathT | str: ... + @overload + def move_file(self, src: BytesPath, dst: _BytesPathT) -> _BytesPathT | bytes: ... + def announce(self, msg: str, level: int = 1) -> None: ... + def warn(self, msg: str) -> None: ... + def debug_print(self, msg: str) -> None: ... + +def get_default_compiler(osname: str | None = None, platform: str | None = None) -> str: ... + +compiler_class: dict[str, tuple[str, str, str]] + +def show_compilers() -> None: ... +def new_compiler( + plat: str | None = None, compiler: str | None = None, verbose: bool = False, dry_run: bool = False, force: bool = False +) -> Compiler: ... +def gen_preprocess_options(macros: Iterable[_Macro], include_dirs: Iterable[str]) -> list[str]: ... +def gen_lib_options( + compiler: Compiler, library_dirs: Iterable[str], runtime_library_dirs: Iterable[str], libraries: Iterable[str] +) -> list[str]: ... diff --git a/stubs/setuptools/setuptools/_distutils/compilers/C/errors.pyi b/stubs/setuptools/setuptools/_distutils/compilers/C/errors.pyi new file mode 100644 index 000000000000..5c3cf1e9f0c6 --- /dev/null +++ b/stubs/setuptools/setuptools/_distutils/compilers/C/errors.pyi @@ -0,0 +1,6 @@ +class Error(Exception): ... +class PreprocessError(Error): ... +class CompileError(Error): ... +class LibError(Error): ... +class LinkError(Error): ... +class UnknownFileType(Error): ... diff --git a/stubs/setuptools/setuptools/_distutils/compilers/C/msvc.pyi b/stubs/setuptools/setuptools/_distutils/compilers/C/msvc.pyi new file mode 100644 index 000000000000..2b419aa986d7 --- /dev/null +++ b/stubs/setuptools/setuptools/_distutils/compilers/C/msvc.pyi @@ -0,0 +1,24 @@ +from _typeshed import Incomplete +from typing import ClassVar, Final + +from . import base + +PLAT_SPEC_TO_RUNTIME: Final[dict[str, str]] + +class Compiler(base.Compiler): + compiler_type: ClassVar[str] + executables: ClassVar[dict[str, Incomplete]] + src_extensions: ClassVar[list[str]] + res_extension: ClassVar[str] + obj_extension: ClassVar[str] + static_lib_extension: ClassVar[str] + shared_lib_extension: ClassVar[str] + # This was accidentally removed upstream and should be back pretty soon. + # shared_lib_format: ClassVar[str] + # static_lib_format = shared_lib_format + static_lib_format: ClassVar[str] + exe_extension: ClassVar[str] + initialized: bool + def initialize(self, plat_name: str | None = None) -> None: ... + @property + def out_extensions(self) -> dict[str, str]: ... diff --git a/stubs/setuptools/setuptools/_distutils/errors.pyi b/stubs/setuptools/setuptools/_distutils/errors.pyi index e483362bfbf1..79eb7f7baa0f 100644 --- a/stubs/setuptools/setuptools/_distutils/errors.pyi +++ b/stubs/setuptools/setuptools/_distutils/errors.pyi @@ -1,3 +1,15 @@ +from .compilers.C.errors import ( + CompileError as CompileError, + Error as _Error, + LibError as LibError, + LinkError as LinkError, + PreprocessError as PreprocessError, + UnknownFileType as _UnknownFileType, +) + +CCompilerError = _Error +UnknownFileError = _UnknownFileType + class DistutilsError(Exception): ... class DistutilsModuleError(DistutilsError): ... class DistutilsClassError(DistutilsError): ... @@ -11,9 +23,3 @@ class DistutilsExecError(DistutilsError): ... class DistutilsInternalError(DistutilsError): ... class DistutilsTemplateError(DistutilsError): ... class DistutilsByteCompileError(DistutilsError): ... -class CCompilerError(Exception): ... -class PreprocessError(CCompilerError): ... -class CompileError(CCompilerError): ... -class LibError(CCompilerError): ... -class LinkError(CCompilerError): ... -class UnknownFileError(CCompilerError): ... From ad720968fedec44bfdd5628c1ab63475ab8c4f9f Mon Sep 17 00:00:00 2001 From: Avasam Date: Wed, 12 Mar 2025 08:33:04 -0400 Subject: [PATCH 080/388] Allow `uv run` to be used without `--no-project` (#13599) --- .gitignore | 4 +++- pyproject.toml | 7 +++++++ 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index 4c5c5703eb12..edd0a9568bc0 100644 --- a/.gitignore +++ b/.gitignore @@ -68,8 +68,10 @@ analyze.py # Mypy cache .mypy_cache/ -# pyenv local python version +# pyenv and uv local python version .python-version +# we don't use uv's lock as we're not actually a project +uv.lock # deliberately local test configuration files stdlib/@tests/stubtest_allowlists/*.local diff --git a/pyproject.toml b/pyproject.toml index 2d6e8f2ea013..7adabb9eab66 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,3 +1,10 @@ +[project] +# This section is needed to avoid writing --no-project everytime when using "uv run" +# https://github.com/astral-sh/uv/issues/8666 +name = "typeshed" +version = "0" +requires-python = ">=3.9" # Minimum version to run tests, used by uv run + [tool.black] line-length = 130 target-version = ["py310"] From 0512919144bee2d2c86ee94b86927947aa814d4e Mon Sep 17 00:00:00 2001 From: Abdrakhman <54412983+guitvcer@users.noreply.github.com> Date: Thu, 13 Mar 2025 14:03:50 +0500 Subject: [PATCH 081/388] Set `hatchling` as the build system for `ts_utils` (#13622) --- lib/pyproject.toml | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/lib/pyproject.toml b/lib/pyproject.toml index c72817077e3e..34720aa9acba 100644 --- a/lib/pyproject.toml +++ b/lib/pyproject.toml @@ -1 +1,9 @@ # Utilities for typeshed infrastructure scripts. + +[project] +name = "ts_utils" +version = "0.0.0" + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" From b5e32701a0ce1d97ac5458ae0e944e4df82b8254 Mon Sep 17 00:00:00 2001 From: Avasam Date: Fri, 14 Mar 2025 06:55:01 -0400 Subject: [PATCH 082/388] Fix mypy test temporary config file creation (#13620) --- tests/mypy_test.py | 25 +++++++++++++++++++++++-- 1 file changed, 23 insertions(+), 2 deletions(-) diff --git a/tests/mypy_test.py b/tests/mypy_test.py index 15cd44bb8b8a..b5e91bd8af58 100755 --- a/tests/mypy_test.py +++ b/tests/mypy_test.py @@ -5,12 +5,14 @@ import argparse import concurrent.futures +import functools import os import subprocess import sys import tempfile import time from collections import defaultdict +from collections.abc import Generator from dataclasses import dataclass from enum import Enum from itertools import product @@ -44,6 +46,24 @@ print_error("Cannot import mypy. Did you install it?") sys.exit(1) +# We need to work around a limitation of tempfile.NamedTemporaryFile on Windows +# For details, see https://github.com/python/typeshed/pull/13620#discussion_r1990185997 +# Python 3.12 added a workaround with `tempfile.NamedTemporaryFile("w+", delete_on_close=False)` +if sys.platform != "win32": + _named_temporary_file = functools.partial(tempfile.NamedTemporaryFile, "w+") +else: + from contextlib import contextmanager + + @contextmanager + def _named_temporary_file() -> Generator[tempfile._TemporaryFileWrapper[str]]: # pyright: ignore[reportPrivateUsage] + temp = tempfile.NamedTemporaryFile("w+", delete=False) # noqa: SIM115 + try: + yield temp + finally: + temp.close() + os.remove(temp.name) + + SUPPORTED_VERSIONS = ["3.13", "3.12", "3.11", "3.10", "3.9"] SUPPORTED_PLATFORMS = ("linux", "win32", "darwin") DIRECTORIES_TO_TEST = [STDLIB_PATH, STUBS_PATH] @@ -214,7 +234,8 @@ def run_mypy( env_vars = dict(os.environ) if mypypath is not None: env_vars["MYPYPATH"] = mypypath - with tempfile.NamedTemporaryFile("w+") as temp: + + with _named_temporary_file() as temp: temp.write("[mypy]\n") for dist_conf in configurations: temp.write(f"[mypy-{dist_conf.module_name}]\n") @@ -290,7 +311,7 @@ def add_third_party_files( if name.startswith("."): continue add_files(files, (root / name), args) - add_configuration(configurations, distribution) + add_configuration(configurations, distribution) class TestResult(NamedTuple): From 099804b7fe4702477dd54c663051789e81761e66 Mon Sep 17 00:00:00 2001 From: Ageev Maxim <90645107+ApostolFet@users.noreply.github.com> Date: Fri, 14 Mar 2025 23:24:14 +0300 Subject: [PATCH 083/388] [protobuf] Add missing properties for class FieldDescriptor (#13625) --- stubs/protobuf/google/protobuf/descriptor.pyi | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/stubs/protobuf/google/protobuf/descriptor.pyi b/stubs/protobuf/google/protobuf/descriptor.pyi index 291ca0c72968..45891fda76c5 100644 --- a/stubs/protobuf/google/protobuf/descriptor.pyi +++ b/stubs/protobuf/google/protobuf/descriptor.pyi @@ -156,6 +156,12 @@ class FieldDescriptor(DescriptorBase): cpp_type: Any @property def label(self): ... + @property + def camelcase_name(self) -> str: ... + @property + def has_presence(self) -> bool: ... + @property + def is_packed(self) -> bool: ... has_default_value: Any default_value: Any containing_type: Any From f4a8c8a3f2450d362d49f8d291fe038552ba5bb5 Mon Sep 17 00:00:00 2001 From: Joren Hammudoglu Date: Sat, 15 Mar 2025 18:51:36 +0100 Subject: [PATCH 084/388] Add `types.UnionType.__parameters__` (#13628) annotate the `types.UnionType.__parameters__` property --- stdlib/types.pyi | 2 ++ 1 file changed, 2 insertions(+) diff --git a/stdlib/types.pyi b/stdlib/types.pyi index 849db3ece938..542979d4afc5 100644 --- a/stdlib/types.pyi +++ b/stdlib/types.pyi @@ -687,6 +687,8 @@ if sys.version_info >= (3, 10): class UnionType: @property def __args__(self) -> tuple[Any, ...]: ... + @property + def __parameters__(self) -> tuple[Any, ...]: ... def __or__(self, value: Any, /) -> UnionType: ... def __ror__(self, value: Any, /) -> UnionType: ... def __eq__(self, value: object, /) -> bool: ... From d1b67180c65f8278d1394de4d991765dae485cac Mon Sep 17 00:00:00 2001 From: Joren Hammudoglu Date: Sat, 15 Mar 2025 20:07:47 +0100 Subject: [PATCH 085/388] Some minor `statistics.NormalDist` adjustments (#13626) Co-authored-by: Alex Waygood --- stdlib/statistics.pyi | 29 ++++++++++++++--------------- 1 file changed, 14 insertions(+), 15 deletions(-) diff --git a/stdlib/statistics.pyi b/stdlib/statistics.pyi index c8ecbbceab1a..9418bdea9d6d 100644 --- a/stdlib/statistics.pyi +++ b/stdlib/statistics.pyi @@ -3,7 +3,7 @@ from _typeshed import SupportsRichComparisonT from collections.abc import Callable, Hashable, Iterable, Sequence from decimal import Decimal from fractions import Fraction -from typing import Any, Literal, NamedTuple, SupportsFloat, TypeVar +from typing import Literal, NamedTuple, SupportsFloat, SupportsIndex, TypeVar from typing_extensions import Self, TypeAlias __all__ = [ @@ -38,6 +38,9 @@ _NumberT = TypeVar("_NumberT", float, Decimal, Fraction) # Used in mode, multimode _HashableT = TypeVar("_HashableT", bound=Hashable) +# Used in NormalDist.samples and kde_random +_Seed: TypeAlias = int | float | str | bytes | bytearray # noqa: Y041 + class StatisticsError(ValueError): ... if sys.version_info >= (3, 11): @@ -89,7 +92,7 @@ class NormalDist: def variance(self) -> float: ... @classmethod def from_samples(cls, data: Iterable[SupportsFloat]) -> Self: ... - def samples(self, n: int, *, seed: Any | None = None) -> list[float]: ... + def samples(self, n: SupportsIndex, *, seed: _Seed | None = None) -> list[float]: ... def pdf(self, x: float) -> float: ... def cdf(self, x: float) -> float: ... def inv_cdf(self, p: float) -> float: ... @@ -98,15 +101,15 @@ class NormalDist: if sys.version_info >= (3, 9): def zscore(self, x: float) -> float: ... - def __eq__(self, x2: object) -> bool: ... - def __add__(self, x2: float | NormalDist) -> NormalDist: ... - def __sub__(self, x2: float | NormalDist) -> NormalDist: ... - def __mul__(self, x2: float) -> NormalDist: ... - def __truediv__(self, x2: float) -> NormalDist: ... - def __pos__(self) -> NormalDist: ... - def __neg__(self) -> NormalDist: ... + def __eq__(x1, x2: object) -> bool: ... + def __add__(x1, x2: float | NormalDist) -> NormalDist: ... + def __sub__(x1, x2: float | NormalDist) -> NormalDist: ... + def __mul__(x1, x2: float) -> NormalDist: ... + def __truediv__(x1, x2: float) -> NormalDist: ... + def __pos__(x1) -> NormalDist: ... + def __neg__(x1) -> NormalDist: ... __radd__ = __add__ - def __rsub__(self, x2: float | NormalDist) -> NormalDist: ... + def __rsub__(x1, x2: float | NormalDist) -> NormalDist: ... __rmul__ = __mul__ def __hash__(self) -> int: ... @@ -153,9 +156,5 @@ if sys.version_info >= (3, 13): data: Sequence[float], h: float, kernel: _Kernel = "normal", *, cumulative: bool = False ) -> Callable[[float], float]: ... def kde_random( - data: Sequence[float], - h: float, - kernel: _Kernel = "normal", - *, - seed: int | float | str | bytes | bytearray | None = None, # noqa: Y041 + data: Sequence[float], h: float, kernel: _Kernel = "normal", *, seed: _Seed | None = None ) -> Callable[[], float]: ... From fde9b681be54be9c9dc1da66e48edddf9bfe2236 Mon Sep 17 00:00:00 2001 From: Sebastian Rittau Date: Sun, 16 Mar 2025 15:33:55 +0100 Subject: [PATCH 086/388] [google-cloud-ndb] Remove unncessary stubtest requirements (#13631) --- stubs/google-cloud-ndb/METADATA.toml | 1 - 1 file changed, 1 deletion(-) diff --git a/stubs/google-cloud-ndb/METADATA.toml b/stubs/google-cloud-ndb/METADATA.toml index a24360431107..a0eac2308f1a 100644 --- a/stubs/google-cloud-ndb/METADATA.toml +++ b/stubs/google-cloud-ndb/METADATA.toml @@ -3,5 +3,4 @@ upstream_repository = "https://github.com/googleapis/python-ndb" partial_stub = true [tool.stubtest] -stubtest_requirements = ["protobuf==3.20.2", "six"] ignore_missing_stub = true From e1a6423fc2d06150148bf3cebce65b84b9455140 Mon Sep 17 00:00:00 2001 From: Sebastian Rittau Date: Sun, 16 Mar 2025 16:06:43 +0100 Subject: [PATCH 087/388] [CI] Quieten apt-get (#13629) Don't print progress when installing packages via apt-get. This prevent spurious output in CI logs. --- .github/workflows/daily.yml | 2 +- .github/workflows/stubtest_third_party.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/daily.yml b/.github/workflows/daily.yml index 7dac40f12878..19712022aa51 100644 --- a/.github/workflows/daily.yml +++ b/.github/workflows/daily.yml @@ -80,7 +80,7 @@ jobs: if [ "${{ runner.os }}" = "Linux" ]; then if [ -n "$PACKAGES" ]; then - sudo apt-get update && sudo apt-get install -y $PACKAGES + sudo apt-get update -q && sudo apt-get install -qy $PACKAGES fi PYTHON_EXECUTABLE="xvfb-run python" diff --git a/.github/workflows/stubtest_third_party.yml b/.github/workflows/stubtest_third_party.yml index 4b4b1dd0e47b..18b8ee69586c 100644 --- a/.github/workflows/stubtest_third_party.yml +++ b/.github/workflows/stubtest_third_party.yml @@ -67,7 +67,7 @@ jobs: if [ "${{ runner.os }}" = "Linux" ]; then if [ -n "$PACKAGES" ]; then echo "Installing apt packages: $PACKAGES" - sudo apt-get update && sudo apt-get install -y $PACKAGES + sudo apt-get update -q && sudo apt-get install -qy $PACKAGES fi PYTHON_EXECUTABLE="xvfb-run python" From bceaa18b0ce518a6c4e4a49656cb64de8c00aafd Mon Sep 17 00:00:00 2001 From: Sebastian Rittau Date: Sun, 16 Mar 2025 20:40:38 +0100 Subject: [PATCH 088/388] Move error handling to main function (#13630) * This makes is easier to call `run_stubtest()` manually, for example when testing the script - no need to construct an `ArgumentParser` instance. * This concentrates argument error handling in the `main()` function and prevents an unexpected process exit when calling `run_stubtest()`. --- tests/stubtest_third_party.py | 27 +++++++++------------------ 1 file changed, 9 insertions(+), 18 deletions(-) diff --git a/tests/stubtest_third_party.py b/tests/stubtest_third_party.py index a83d4fbc572f..379ba718d1fd 100755 --- a/tests/stubtest_third_party.py +++ b/tests/stubtest_third_party.py @@ -31,20 +31,12 @@ def run_stubtest( - dist: Path, - *, - parser: argparse.ArgumentParser, - verbose: bool = False, - specified_platforms_only: bool = False, - keep_tmp_dir: bool = False, + dist: Path, *, verbose: bool = False, specified_platforms_only: bool = False, keep_tmp_dir: bool = False ) -> bool: """Run stubtest for a single distribution.""" dist_name = dist.name - try: - metadata = read_metadata(dist_name) - except NoSuchStubError as e: - parser.error(str(e)) + metadata = read_metadata(dist_name) print(f"{dist_name}... ", end="", flush=True) t = time() @@ -410,14 +402,13 @@ def main() -> NoReturn: for i, dist in enumerate(dists): if i % args.num_shards != args.shard_index: continue - if not run_stubtest( - dist, - parser=parser, - verbose=args.verbose, - specified_platforms_only=args.specified_platforms_only, - keep_tmp_dir=args.keep_tmp_dir, - ): - result = 1 + try: + if not run_stubtest( + dist, verbose=args.verbose, specified_platforms_only=args.specified_platforms_only, keep_tmp_dir=args.keep_tmp_dir + ): + result = 1 + except NoSuchStubError as e: + parser.error(str(e)) sys.exit(result) From dbc706aca137ffaa10b70cdedda7528f620ec48f Mon Sep 17 00:00:00 2001 From: aldonadi Date: Mon, 17 Mar 2025 01:26:26 -0400 Subject: [PATCH 089/388] RPi.GPIO: Fix typo in name of `ChangeFrequency` method (#13634) --- stubs/RPi.GPIO/RPi/GPIO/__init__.pyi | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stubs/RPi.GPIO/RPi/GPIO/__init__.pyi b/stubs/RPi.GPIO/RPi/GPIO/__init__.pyi index 8222ec224e62..861eb9a170da 100644 --- a/stubs/RPi.GPIO/RPi/GPIO/__init__.pyi +++ b/stubs/RPi.GPIO/RPi/GPIO/__init__.pyi @@ -62,5 +62,5 @@ class PWM: def __init__(self, channel: int, frequency: float, /) -> None: ... def start(self, dutycycle: float, /) -> None: ... def ChangeDutyCycle(self, dutycycle: float, /) -> None: ... - def ChangeFrequence(self, frequency: float, /) -> None: ... + def ChangeFrequency(self, frequency: float, /) -> None: ... def stop(self) -> None: ... From f686f91434ae021f56832bdaa76ca1a8bfc3c909 Mon Sep 17 00:00:00 2001 From: Sebastian Rittau Date: Mon, 17 Mar 2025 15:06:16 +0100 Subject: [PATCH 090/388] [CI] Shorten job names (#13635) When looking at GitHub's action runs, the individual jobs are listed in the left sidebar, but since the job names are fairly long, they get cut off, leading to a lot of guessing when finding the correct job. Shorter job names should fix this. Co-authored-by: Alex Waygood --- .github/workflows/daily.yml | 8 ++++---- .github/workflows/meta_tests.yml | 4 ++-- .github/workflows/stubsabot.yml | 2 +- .github/workflows/stubtest_stdlib.yml | 2 +- .github/workflows/stubtest_third_party.yml | 2 +- .github/workflows/tests.yml | 10 +++++----- 6 files changed, 14 insertions(+), 14 deletions(-) diff --git a/.github/workflows/daily.yml b/.github/workflows/daily.yml index 19712022aa51..90823ea70308 100644 --- a/.github/workflows/daily.yml +++ b/.github/workflows/daily.yml @@ -29,7 +29,7 @@ env: jobs: stubtest-stdlib: - name: Check stdlib with stubtest + name: "stubtest: stdlib" if: ${{ github.repository == 'python/typeshed' || github.event_name == 'workflow_dispatch' }} runs-on: ${{ matrix.os }} strategy: @@ -54,7 +54,7 @@ jobs: run: python tests/stubtest_stdlib.py stubtest-third-party: - name: Check third party stubs with stubtest + name: "stubtest: third party" if: ${{ github.repository == 'python/typeshed' || github.event_name == 'workflow_dispatch' }} runs-on: ${{ matrix.os }} strategy: @@ -99,7 +99,7 @@ jobs: $PYTHON_EXECUTABLE tests/stubtest_third_party.py --specified-platforms-only --num-shards 4 --shard-index ${{ matrix.shard-index }} stub-uploader: - name: Run the stub_uploader tests + name: stub_uploader tests if: ${{ github.repository == 'python/typeshed' || github.event_name == 'workflow_dispatch' }} runs-on: ubuntu-latest steps: @@ -124,7 +124,7 @@ jobs: # https://github.community/t/run-github-actions-job-only-if-previous-job-has-failed/174786/2 create-issue-on-failure: - name: Create an issue if daily tests failed + name: Create issue on failure runs-on: ubuntu-latest needs: [stubtest-stdlib, stubtest-third-party, stub-uploader] if: ${{ github.repository == 'python/typeshed' && always() && github.event_name == 'schedule' && (needs.stubtest-stdlib.result == 'failure' || needs.stubtest-third-party.result == 'failure' || needs.stub-uploader.result == 'failure') }} diff --git a/.github/workflows/meta_tests.yml b/.github/workflows/meta_tests.yml index d2e77ab958df..d3749c72b2d3 100644 --- a/.github/workflows/meta_tests.yml +++ b/.github/workflows/meta_tests.yml @@ -28,7 +28,7 @@ concurrency: jobs: mypy: - name: Run mypy against the scripts and tests directories + name: Check scripts and tests with mypy runs-on: ubuntu-latest strategy: matrix: @@ -43,7 +43,7 @@ jobs: - run: uv pip install -r requirements-tests.txt --system - run: python ./tests/typecheck_typeshed.py --platform=${{ matrix.platform }} pyright: - name: Run pyright against the scripts and tests directories + name: Check scripts and tests with pyright runs-on: ubuntu-latest strategy: matrix: diff --git a/.github/workflows/stubsabot.yml b/.github/workflows/stubsabot.yml index 5fb3265a6c5e..ef9a4e40cea0 100644 --- a/.github/workflows/stubsabot.yml +++ b/.github/workflows/stubsabot.yml @@ -39,7 +39,7 @@ jobs: # https://github.community/t/run-github-actions-job-only-if-previous-job-has-failed/174786/2 create-issue-on-failure: - name: Create an issue if stubsabot failed + name: Create issue on failure runs-on: ubuntu-latest needs: [stubsabot] if: ${{ github.repository == 'python/typeshed' && always() && (needs.stubsabot.result == 'failure') }} diff --git a/.github/workflows/stubtest_stdlib.yml b/.github/workflows/stubtest_stdlib.yml index a13d747c2504..b2ac305aefd2 100644 --- a/.github/workflows/stubtest_stdlib.yml +++ b/.github/workflows/stubtest_stdlib.yml @@ -26,7 +26,7 @@ concurrency: jobs: stubtest-stdlib: - name: Check stdlib with stubtest + name: "stubtest: stdlib" runs-on: ${{ matrix.os }} strategy: matrix: diff --git a/.github/workflows/stubtest_third_party.yml b/.github/workflows/stubtest_third_party.yml index 18b8ee69586c..8e7105b9ddf0 100644 --- a/.github/workflows/stubtest_third_party.yml +++ b/.github/workflows/stubtest_third_party.yml @@ -27,7 +27,7 @@ concurrency: jobs: stubtest-third-party: - name: Check third party stubs with stubtest + name: "stubtest: third party" runs-on: ${{ matrix.os }} strategy: diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index da646296677b..eabca8c7f28f 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -35,7 +35,7 @@ jobs: - run: python ./tests/check_typeshed_structure.py pytype: - name: Run pytype against the stubs + name: "pytype: Check stubs" runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 @@ -56,7 +56,7 @@ jobs: - run: ./tests/pytype_test.py --print-stderr mypy: - name: Run mypy against the stubs + name: "mypy: Check stubs" runs-on: ubuntu-latest strategy: matrix: @@ -73,7 +73,7 @@ jobs: - run: python ./tests/mypy_test.py --platform=${{ matrix.platform }} --python-version=${{ matrix.python-version }} regression-tests: - name: Run mypy on the test cases + name: "mypy: Run test cases" runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 @@ -87,7 +87,7 @@ jobs: - run: python ./tests/regr_test.py --all --verbosity QUIET pyright: - name: Test typeshed with pyright + name: "pyright: Run test cases" runs-on: ubuntu-latest strategy: matrix: @@ -141,7 +141,7 @@ jobs: project: ./pyrightconfig.testcases.json stub-uploader: - name: Run the stub_uploader tests + name: stub_uploader tests runs-on: ubuntu-latest steps: - name: Checkout typeshed From d2653e606cfa8745c5eaf056fb60b87a1aa749f8 Mon Sep 17 00:00:00 2001 From: Sebastian Rittau Date: Mon, 17 Mar 2025 16:20:14 +0100 Subject: [PATCH 091/388] [CI] Use Python 3.13 where possible (#13637) --- .github/workflows/daily.yml | 3 ++- .github/workflows/meta_tests.yml | 6 ++++-- .github/workflows/stubsabot.yml | 2 +- .github/workflows/tests.yml | 15 ++++++++------- 4 files changed, 15 insertions(+), 11 deletions(-) diff --git a/.github/workflows/daily.yml b/.github/workflows/daily.yml index 90823ea70308..7949f351f5f9 100644 --- a/.github/workflows/daily.yml +++ b/.github/workflows/daily.yml @@ -114,7 +114,8 @@ jobs: path: stub_uploader - uses: actions/setup-python@v5 with: - python-version: "3.12" + # Keep in sync with stub_uploader's check_scripts.yml workflow. + python-version: "3.13" - uses: astral-sh/setup-uv@v5 - name: Run tests run: | diff --git a/.github/workflows/meta_tests.yml b/.github/workflows/meta_tests.yml index d3749c72b2d3..dd368ed0cdde 100644 --- a/.github/workflows/meta_tests.yml +++ b/.github/workflows/meta_tests.yml @@ -38,7 +38,7 @@ jobs: - uses: actions/checkout@v4 - uses: actions/setup-python@v5 with: - python-version: "3.12" + python-version: "3.13" - run: curl -LsSf https://astral.sh/uv/install.sh | sh - run: uv pip install -r requirements-tests.txt --system - run: python ./tests/typecheck_typeshed.py --platform=${{ matrix.platform }} @@ -53,6 +53,8 @@ jobs: - uses: actions/checkout@v4 - uses: actions/setup-python@v5 with: + # TODO: Since pytype is not available for Python 3.13, and + # pytype_test.py imports pytype, we need to use Python 3.12 for now. python-version: "3.12" - run: curl -LsSf https://astral.sh/uv/install.sh | sh - run: uv pip install -r requirements-tests.txt --system @@ -70,7 +72,7 @@ jobs: - uses: actions/checkout@v4 - uses: actions/setup-python@v5 with: - python-version: "3.12" + python-version: "3.13" - run: curl -LsSf https://astral.sh/uv/install.sh | sh - name: Git config run: | diff --git a/.github/workflows/stubsabot.yml b/.github/workflows/stubsabot.yml index ef9a4e40cea0..3648d5168aba 100644 --- a/.github/workflows/stubsabot.yml +++ b/.github/workflows/stubsabot.yml @@ -26,7 +26,7 @@ jobs: fetch-depth: 0 - uses: actions/setup-python@v5 with: - python-version: "3.12" + python-version: "3.13" - run: curl -LsSf https://astral.sh/uv/install.sh | sh - name: git config run: | diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index eabca8c7f28f..5031b000ddb5 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -29,7 +29,7 @@ jobs: - uses: actions/checkout@v4 - uses: actions/setup-python@v5 with: - python-version: "3.12" + python-version: "3.13" - run: curl -LsSf https://astral.sh/uv/install.sh | sh - run: uv pip install -r requirements-tests.txt --system - run: python ./tests/check_typeshed_structure.py @@ -41,7 +41,7 @@ jobs: - uses: actions/checkout@v4 - uses: actions/setup-python@v5 with: - # Max supported Python version as of pytype 2024.9.13 + # Max supported Python version as of pytype 2024.10.11 python-version: "3.12" - uses: astral-sh/setup-uv@v5 - run: uv pip install -r requirements-tests.txt --system @@ -98,7 +98,7 @@ jobs: - uses: actions/checkout@v4 - uses: actions/setup-python@v5 with: - python-version: "3.12" + python-version: "3.13" - uses: astral-sh/setup-uv@v5 - name: Install typeshed test-suite requirements # Install these so we can run `get_external_stub_requirements.py` @@ -122,14 +122,14 @@ jobs: version: PATH python-platform: ${{ matrix.python-platform }} python-version: ${{ matrix.python-version }} - annotate: ${{ matrix.python-version == '3.12' && matrix.python-platform == 'Linux' }} # Having each job create the same comment is too noisy. + annotate: ${{ matrix.python-version == '3.13' && matrix.python-platform == 'Linux' }} # Having each job create the same comment is too noisy. - name: Run pyright with stricter settings on some of the stubs uses: jakebailey/pyright-action@v2 with: version: PATH python-platform: ${{ matrix.python-platform }} python-version: ${{ matrix.python-version }} - annotate: ${{ matrix.python-version == '3.12' && matrix.python-platform == 'Linux' }} # Having each job create the same comment is too noisy. + annotate: ${{ matrix.python-version == '3.13' && matrix.python-platform == 'Linux' }} # Having each job create the same comment is too noisy. project: ./pyrightconfig.stricter.json - name: Run pyright on the test cases uses: jakebailey/pyright-action@v2 @@ -137,7 +137,7 @@ jobs: version: PATH python-platform: ${{ matrix.python-platform }} python-version: ${{ matrix.python-version }} - annotate: ${{ matrix.python-version == '3.12' && matrix.python-platform == 'Linux' }} # Having each job create the same comment is too noisy. + annotate: ${{ matrix.python-version == '3.13' && matrix.python-platform == 'Linux' }} # Having each job create the same comment is too noisy. project: ./pyrightconfig.testcases.json stub-uploader: @@ -155,7 +155,8 @@ jobs: path: stub_uploader - uses: actions/setup-python@v5 with: - python-version: "3.12" + # Keep in sync with stub_uploader's check_scripts.yml workflow. + python-version: "3.13" - uses: astral-sh/setup-uv@v5 - name: Run tests run: | From ded14cfb52d303bbd179923deea0fd344327dd75 Mon Sep 17 00:00:00 2001 From: Brian Schubert Date: Mon, 17 Mar 2025 13:02:07 -0400 Subject: [PATCH 092/388] Fix parameter types for `pkgutil` functions accepting pathlike arguments (#13642) --- stdlib/pkgutil.pyi | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/stdlib/pkgutil.pyi b/stdlib/pkgutil.pyi index 7e7fa4fda9a1..59d70779c72f 100644 --- a/stdlib/pkgutil.pyi +++ b/stdlib/pkgutil.pyi @@ -1,5 +1,5 @@ import sys -from _typeshed import SupportsRead +from _typeshed import StrOrBytesPath, SupportsRead from _typeshed.importlib import LoaderProtocol, MetaPathFinderProtocol, PathEntryFinderProtocol from collections.abc import Callable, Iterable, Iterator from typing import IO, Any, NamedTuple, TypeVar @@ -31,21 +31,21 @@ def extend_path(path: _PathT, name: str) -> _PathT: ... if sys.version_info < (3, 12): class ImpImporter: - def __init__(self, path: str | None = None) -> None: ... + def __init__(self, path: StrOrBytesPath | None = None) -> None: ... class ImpLoader: - def __init__(self, fullname: str, file: IO[str], filename: str, etc: tuple[str, str, int]) -> None: ... + def __init__(self, fullname: str, file: IO[str], filename: StrOrBytesPath, etc: tuple[str, str, int]) -> None: ... @deprecated("Use importlib.util.find_spec() instead. Will be removed in Python 3.14.") def find_loader(fullname: str) -> LoaderProtocol | None: ... -def get_importer(path_item: str) -> PathEntryFinderProtocol | None: ... +def get_importer(path_item: StrOrBytesPath) -> PathEntryFinderProtocol | None: ... @deprecated("Use importlib.util.find_spec() instead. Will be removed in Python 3.14.") def get_loader(module_or_name: str) -> LoaderProtocol | None: ... def iter_importers(fullname: str = "") -> Iterator[MetaPathFinderProtocol | PathEntryFinderProtocol]: ... -def iter_modules(path: Iterable[str] | None = None, prefix: str = "") -> Iterator[ModuleInfo]: ... +def iter_modules(path: Iterable[StrOrBytesPath] | None = None, prefix: str = "") -> Iterator[ModuleInfo]: ... def read_code(stream: SupportsRead[bytes]) -> Any: ... # undocumented def walk_packages( - path: Iterable[str] | None = None, prefix: str = "", onerror: Callable[[str], object] | None = None + path: Iterable[StrOrBytesPath] | None = None, prefix: str = "", onerror: Callable[[str], object] | None = None ) -> Iterator[ModuleInfo]: ... def get_data(package: str, resource: str) -> bytes | None: ... From 34b46a46086f19eb636a040f5457fadab57dd5eb Mon Sep 17 00:00:00 2001 From: Sebastian Rittau Date: Mon, 17 Mar 2025 19:27:17 +0100 Subject: [PATCH 093/388] [fpdf2] Make fpdf.fonts.Glyph a dataclass (#13645) --- stubs/fpdf2/@tests/stubtest_allowlist.txt | 4 ++++ stubs/fpdf2/fpdf/fonts.pyi | 10 ++-------- 2 files changed, 6 insertions(+), 8 deletions(-) diff --git a/stubs/fpdf2/@tests/stubtest_allowlist.txt b/stubs/fpdf2/@tests/stubtest_allowlist.txt index 3453af1c7378..4ceb15988f18 100644 --- a/stubs/fpdf2/@tests/stubtest_allowlist.txt +++ b/stubs/fpdf2/@tests/stubtest_allowlist.txt @@ -10,3 +10,7 @@ fpdf.fonts.HarfBuzzFont # Stubtest wants us to use Literals, but that is unreasonable. fpdf.unicode_script.UNICODE_RANGE_TO_SCRIPT + +# Ignore stubtest weirdness "fpdf.fonts.Glyph._DT is not present at runtime" +# https://github.com/python/mypy/issues/18811 +fpdf.fonts.Glyph._DT diff --git a/stubs/fpdf2/fpdf/fonts.pyi b/stubs/fpdf2/fpdf/fonts.pyi index f498efd2395a..dc793aaed1af 100644 --- a/stubs/fpdf2/fpdf/fonts.pyi +++ b/stubs/fpdf2/fpdf/fonts.pyi @@ -118,19 +118,13 @@ class PDFFontDescriptor(PDFObject): font_name: Incomplete def __init__(self, ascent, descent, cap_height, flags, font_b_box, italic_angle, stem_v, missing_width) -> None: ... +@dataclass(order=True) class Glyph: glyph_id: int unicode: tuple[Incomplete, ...] glyph_name: str glyph_width: int - def __hash__(self): ... - def __init__(self, glyph_id, unicode, glyph_name, glyph_width) -> None: ... - def __lt__(self, other): ... - def __gt__(self, other): ... - def __le__(self, other): ... - def __ge__(self, other): ... - - __match_args__ = ("glyph_id", "unicode", "glyph_name", "glyph_width") + def __hash__(self) -> int: ... class SubsetMap: font: TTFFont From dcd3e6bb746ef121396b0d23e36742acca20102a Mon Sep 17 00:00:00 2001 From: Sebastian Rittau Date: Mon, 17 Mar 2025 19:30:30 +0100 Subject: [PATCH 094/388] Add TypedDict.__{readonly,mutable}_keys__ (#13646) --- stdlib/typing.pyi | 3 +++ 1 file changed, 3 insertions(+) diff --git a/stdlib/typing.pyi b/stdlib/typing.pyi index 5875b6915762..9b0443973fcd 100644 --- a/stdlib/typing.pyi +++ b/stdlib/typing.pyi @@ -950,6 +950,9 @@ class _TypedDict(Mapping[str, object], metaclass=ABCMeta): # so we only add it to the stub on 3.12+ if sys.version_info >= (3, 12): __orig_bases__: ClassVar[tuple[Any, ...]] + if sys.version_info >= (3, 13): + __readonly_keys__: ClassVar[frozenset[str]] + __mutable_keys__: ClassVar[frozenset[str]] def copy(self) -> typing_extensions.Self: ... # Using Never so that only calls using mypy plugin hook that specialize the signature From a45d50c166bd915f0cea3e27963536da4a3c08d3 Mon Sep 17 00:00:00 2001 From: Sebastian Rittau Date: Mon, 17 Mar 2025 20:10:21 +0100 Subject: [PATCH 095/388] [pynput] Update to 1.8.1 (#13647) --- stubs/pynput/METADATA.toml | 2 +- stubs/pynput/pynput/keyboard/__init__.pyi | 6 ++++-- stubs/pynput/pynput/mouse/__init__.pyi | 9 ++++++--- 3 files changed, 11 insertions(+), 6 deletions(-) diff --git a/stubs/pynput/METADATA.toml b/stubs/pynput/METADATA.toml index 4955690e2e4a..4b63115669c3 100644 --- a/stubs/pynput/METADATA.toml +++ b/stubs/pynput/METADATA.toml @@ -1,4 +1,4 @@ -version = "1.8.*" +version = "~=1.8.1" upstream_repository = "https://github.com/moses-palmer/pynput" [tool.stubtest] diff --git a/stubs/pynput/pynput/keyboard/__init__.pyi b/stubs/pynput/pynput/keyboard/__init__.pyi index bda2b193fd7e..8d553e0087f9 100644 --- a/stubs/pynput/pynput/keyboard/__init__.pyi +++ b/stubs/pynput/pynput/keyboard/__init__.pyi @@ -9,11 +9,13 @@ from ._base import Controller as Controller, Key as Key, KeyCode as KeyCode, Lis class Events(_util.Events[Any, Listener]): class Press(_util.Events.Event): key: Key | KeyCode | None - def __init__(self, key: Key | KeyCode | None) -> None: ... + injected: bool + def __init__(self, key: Key | KeyCode | None, injected: bool) -> None: ... class Release(_util.Events.Event): key: Key | KeyCode | None - def __init__(self, key: Key | KeyCode | None) -> None: ... + injected: bool + def __init__(self, key: Key | KeyCode | None, injected: bool) -> None: ... def __init__(self) -> None: ... def __next__(self) -> Press | Release: ... diff --git a/stubs/pynput/pynput/mouse/__init__.pyi b/stubs/pynput/pynput/mouse/__init__.pyi index a06bed3945b3..42738f852a09 100644 --- a/stubs/pynput/pynput/mouse/__init__.pyi +++ b/stubs/pynput/pynput/mouse/__init__.pyi @@ -8,21 +8,24 @@ class Events(_util.Events[Any, Listener]): class Move(_util.Events.Event): x: int y: int - def __init__(self, x: int, y: int) -> None: ... + injected: bool + def __init__(self, x: int, y: int, injected: bool) -> None: ... class Click(_util.Events.Event): x: int y: int button: Button pressed: bool - def __init__(self, x: int, y: int, button: Button, pressed: bool) -> None: ... + injected: bool + def __init__(self, x: int, y: int, button: Button, pressed: bool, injected: bool) -> None: ... class Scroll(_util.Events.Event): x: int y: int dx: int dy: int - def __init__(self, x: int, y: int, dx: int, dy: int) -> None: ... + injected: bool + def __init__(self, x: int, y: int, dx: int, dy: int, injected: bool) -> None: ... def __init__(self) -> None: ... def __next__(self) -> Move | Click | Scroll: ... From f3b7b1934186badfafc4420f3fe47db73569573f Mon Sep 17 00:00:00 2001 From: Sebastian Rittau Date: Mon, 17 Mar 2025 23:52:02 +0100 Subject: [PATCH 096/388] [cffi] Update repository URL (#13652) --- stubs/cffi/METADATA.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stubs/cffi/METADATA.toml b/stubs/cffi/METADATA.toml index f1607a4d9f2a..c7ba41dd628c 100644 --- a/stubs/cffi/METADATA.toml +++ b/stubs/cffi/METADATA.toml @@ -1,5 +1,5 @@ version = "1.16.*" -upstream_repository = "https://foss.heptapod.net/pypy/cffi" +upstream_repository = "https://github.com/python-cffi/cffi/" requires = ["types-setuptools"] [tool.stubtest] From 908b93adc2f76f6a6332bdda2389db84a3e1ff0c Mon Sep 17 00:00:00 2001 From: Sebastian Rittau Date: Tue, 18 Mar 2025 01:14:45 +0100 Subject: [PATCH 097/388] [greenlet] Fix for Python 3.13 (#13648) `get_tstate_trash_delete_nesting()` is not available on Python 3.13. --- stubs/greenlet/greenlet/_greenlet.pyi | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/stubs/greenlet/greenlet/_greenlet.pyi b/stubs/greenlet/greenlet/_greenlet.pyi index 3e310a147790..ed2dac474595 100644 --- a/stubs/greenlet/greenlet/_greenlet.pyi +++ b/stubs/greenlet/greenlet/_greenlet.pyi @@ -1,3 +1,4 @@ +import sys from collections.abc import Callable from contextvars import Context from types import FrameType, TracebackType @@ -71,7 +72,10 @@ def enable_optional_cleanup(enabled: bool, /) -> None: ... def get_clocks_used_doing_optional_cleanup() -> int: ... def get_pending_cleanup_count() -> int: ... def get_total_main_greenlets() -> int: ... -def get_tstate_trash_delete_nesting() -> int: ... + +if sys.version_info < (3, 13): + def get_tstate_trash_delete_nesting() -> int: ... + def getcurrent() -> greenlet: ... def gettrace() -> _TraceCallback | None: ... def set_thread_local(key: object, value: object, /) -> None: ... From 58503b86d1c8b4d29d4e4808719333194fb81915 Mon Sep 17 00:00:00 2001 From: Sebastian Rittau Date: Tue, 18 Mar 2025 01:14:52 +0100 Subject: [PATCH 098/388] [passlib] Fix for Python 3.13 (#13649) `passlib.hosts.host_context` is not present on Python 3.13. --- stubs/passlib/passlib/hosts.pyi | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/stubs/passlib/passlib/hosts.pyi b/stubs/passlib/passlib/hosts.pyi index 732e02106f27..5b365e00ecab 100644 --- a/stubs/passlib/passlib/hosts.pyi +++ b/stubs/passlib/passlib/hosts.pyi @@ -1,3 +1,4 @@ +import sys from typing import Any from passlib.context import CryptContext @@ -8,4 +9,5 @@ freebsd_context: Any openbsd_context: Any netbsd_context: Any # Only exists if crypt is present -host_context: CryptContext +if sys.version_info < (3, 13): + host_context: CryptContext From a037894d7bec819c9de3651856ab71eeb27180f2 Mon Sep 17 00:00:00 2001 From: Sebastian Rittau Date: Tue, 18 Mar 2025 01:16:36 +0100 Subject: [PATCH 099/388] Make BaseTzInfo inherited methods pos-only (#13650) These methods are already positional-only in `datetime.tzinfo`. Leaving them pos-or-kw fail stubtest when running with Python 3.13. --- stubs/pytz/pytz/tzinfo.pyi | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/stubs/pytz/pytz/tzinfo.pyi b/stubs/pytz/pytz/tzinfo.pyi index 784c0a91db77..33655856149c 100644 --- a/stubs/pytz/pytz/tzinfo.pyi +++ b/stubs/pytz/pytz/tzinfo.pyi @@ -13,11 +13,11 @@ class BaseTzInfo(datetime.tzinfo): @abstractmethod def normalize(self, dt: datetime.datetime) -> datetime.datetime: ... @abstractmethod - def tzname(self, dt: datetime.datetime | None) -> str: ... + def tzname(self, dt: datetime.datetime | None, /) -> str: ... @abstractmethod - def utcoffset(self, dt: datetime.datetime | None) -> datetime.timedelta | None: ... + def utcoffset(self, dt: datetime.datetime | None, /) -> datetime.timedelta | None: ... @abstractmethod - def dst(self, dt: datetime.datetime | None) -> datetime.timedelta | None: ... + def dst(self, dt: datetime.datetime | None, /) -> datetime.timedelta | None: ... class StaticTzInfo(BaseTzInfo): def fromutc(self, dt: datetime.datetime) -> datetime.datetime: ... From f0d7ccaebe1d5524fda11c3b8e04a251ee23d969 Mon Sep 17 00:00:00 2001 From: Sebastian Rittau Date: Tue, 18 Mar 2025 02:22:34 +0100 Subject: [PATCH 100/388] Fix various argument of extension modules (#13651) * Mark various positional-only arguments: These are all positional-only arguments in C code using the `METH_O` flag. * Remove various `*args, **kwargs` arguments that are using the `METH_NOARGS` flag in C. --- stubs/hdbcli/hdbcli/dbapi.pyi | 2 +- stubs/mysqlclient/MySQLdb/_mysql.pyi | 64 ++++++++++++------------- stubs/psycopg2/psycopg2/_psycopg.pyi | 2 +- stubs/regex/regex/regex.pyi | 4 +- stubs/simplejson/simplejson/encoder.pyi | 2 +- 5 files changed, 37 insertions(+), 37 deletions(-) diff --git a/stubs/hdbcli/hdbcli/dbapi.pyi b/stubs/hdbcli/hdbcli/dbapi.pyi index 4ba75f81b4b6..3d28a2121a88 100644 --- a/stubs/hdbcli/hdbcli/dbapi.pyi +++ b/stubs/hdbcli/hdbcli/dbapi.pyi @@ -85,7 +85,7 @@ class Cursor: def prepare(self, operation: str, newcursor: Literal[True]) -> Cursor: ... @overload def prepare(self, operation: str, newcursor: Literal[False]) -> Any: ... - def print_message(self, *args, **kwargs): ... + def print_message(self): ... def parsenamedquery(self, *args, **kwargs): ... def scroll(self, value: int, mode: Literal["absolute", "relative"] = ...) -> None: ... def server_cpu_time(self) -> int: ... diff --git a/stubs/mysqlclient/MySQLdb/_mysql.pyi b/stubs/mysqlclient/MySQLdb/_mysql.pyi index b8246512d090..0ab789e41d46 100644 --- a/stubs/mysqlclient/MySQLdb/_mysql.pyi +++ b/stubs/mysqlclient/MySQLdb/_mysql.pyi @@ -24,46 +24,46 @@ class connection: port: Incomplete server_capabilities: Incomplete def __init__(self, *args, **kwargs) -> None: ... - def _get_native_connection(self, *args, **kwargs): ... - def affected_rows(self, *args, **kwargs): ... + def _get_native_connection(self): ... + def affected_rows(self): ... def autocommit(self, on): ... def change_user(self, *args, **kwargs): ... - def character_set_name(self, *args, **kwargs): ... - def close(self, *args, **kwargs): ... - def commit(self, *args, **kwargs): ... - def dump_debug_info(self, *args, **kwargs): ... - def errno(self, *args, **kwargs): ... - def error(self, *args, **kwargs): ... + def character_set_name(self): ... + def close(self): ... + def commit(self): ... + def dump_debug_info(self): ... + def errno(self): ... + def error(self): ... def escape(self, obj, dict): ... def escape_string(self, s): ... - def field_count(self, *args, **kwargs): ... - def fileno(self, *args, **kwargs): ... - def get_autocommit(self, *args, **kwargs): ... - def get_character_set_info(self, *args, **kwargs): ... - def get_host_info(self, *args, **kwargs): ... - def get_proto_info(self, *args, **kwargs): ... - def get_server_info(self, *args, **kwargs): ... - def info(self, *args, **kwargs): ... - def insert_id(self, *args, **kwargs): ... + def field_count(self): ... + def fileno(self): ... + def get_autocommit(self): ... + def get_character_set_info(self): ... + def get_host_info(self): ... + def get_proto_info(self): ... + def get_server_info(self): ... + def info(self): ... + def insert_id(self): ... def kill(self, *args, **kwargs): ... def next_result(self): ... def ping(self): ... def query(self, query): ... - def read_query_result(self, *args, **kwargs): ... - def rollback(self, *args, **kwargs): ... + def read_query_result(self): ... + def rollback(self): ... def select_db(self, *args, **kwargs): ... def send_query(self, *args, **kwargs): ... def set_character_set(self, charset: str) -> None: ... def set_server_option(self, option): ... - def shutdown(self, *args, **kwargs): ... - def sqlstate(self, *args, **kwargs): ... - def stat(self, *args, **kwargs): ... - def store_result(self, *args, **kwargs): ... - def string_literal(self, obj): ... - def thread_id(self, *args, **kwargs): ... - def use_result(self, *args, **kwargs): ... + def shutdown(self): ... + def sqlstate(self): ... + def stat(self): ... + def store_result(self): ... + def string_literal(self, obj, /) -> str: ... + def thread_id(self): ... + def use_result(self): ... def discard_result(self) -> None: ... - def warning_count(self, *args, **kwargs): ... + def warning_count(self): ... def __delattr__(self, name: str, /) -> None: ... def __setattr__(self, name: str, value, /) -> None: ... @@ -72,12 +72,12 @@ class result: has_next: Incomplete def __init__(self, *args, **kwargs) -> None: ... def data_seek(self, n): ... - def describe(self, *args, **kwargs): ... + def describe(self): ... def fetch_row(self, *args, **kwargs): ... def discard(self) -> None: ... - def field_flags(self, *args, **kwargs): ... - def num_fields(self, *args, **kwargs): ... - def num_rows(self, *args, **kwargs): ... + def field_flags(self): ... + def num_fields(self): ... + def num_rows(self): ... def __delattr__(self, name: str, /) -> None: ... def __setattr__(self, name: str, value, /) -> None: ... @@ -86,4 +86,4 @@ def debug(*args, **kwargs): ... def escape(obj, dict): ... def escape_string(s): ... def get_client_info(): ... -def string_literal(obj): ... +def string_literal(obj, /) -> str: ... diff --git a/stubs/psycopg2/psycopg2/_psycopg.pyi b/stubs/psycopg2/psycopg2/_psycopg.pyi index 2f5ba5032a84..1eac9616959a 100644 --- a/stubs/psycopg2/psycopg2/_psycopg.pyi +++ b/stubs/psycopg2/psycopg2/_psycopg.pyi @@ -378,7 +378,7 @@ class ReplicationCursor(cursor): wal_end: Any def __init__(self, *args, **kwargs) -> None: ... def consume_stream(self, consumer, keepalive_interval=...): ... - def read_message(self, *args, **kwargs): ... + def read_message(self) -> Incomplete | None: ... def send_feedback(self, write_lsn=..., flush_lsn=..., apply_lsn=..., reply=..., force=...): ... def start_replication_expert(self, command, decode=..., status_interval=...): ... diff --git a/stubs/regex/regex/regex.pyi b/stubs/regex/regex/regex.pyi index 07b11b145dbb..b777677381ca 100644 --- a/stubs/regex/regex/regex.pyi +++ b/stubs/regex/regex/regex.pyi @@ -632,8 +632,8 @@ class Match(Generic[AnyStr]): def ends(self, group: int | str = ..., /) -> list[int]: ... @overload def ends(self, group1: int | str, group2: int | str, /, *groups: int | str) -> tuple[list[int], ...]: ... - def expand(self, template: AnyStr) -> AnyStr: ... - def expandf(self, format: AnyStr) -> AnyStr: ... + def expand(self, template: AnyStr, /) -> AnyStr: ... + def expandf(self, format: AnyStr, /) -> AnyStr: ... @overload def captures(self, group: int | str = ..., /) -> list[AnyStr]: ... @overload diff --git a/stubs/simplejson/simplejson/encoder.pyi b/stubs/simplejson/simplejson/encoder.pyi index a4a09c654545..41017f0c1d98 100644 --- a/stubs/simplejson/simplejson/encoder.pyi +++ b/stubs/simplejson/simplejson/encoder.pyi @@ -57,4 +57,4 @@ class JSONEncoder: class JSONEncoderForHTML(JSONEncoder): ... def encode_basestring(s: str | bytes, _PY3: Literal[True] = ..., _q: str = ...) -> str: ... -def encode_basestring_ascii(s: str | bytes, _PY3: Literal[True] = ...) -> str: ... +def encode_basestring_ascii(s: str | bytes, /) -> str: ... From 44e2be00709364c65e597e6e46482b2d499bc570 Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Tue, 18 Mar 2025 14:35:20 +0400 Subject: [PATCH 101/388] Deprecate CGIHTTPRequestHandler (#13658) --- stdlib/http/server.pyi | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/stdlib/http/server.pyi b/stdlib/http/server.pyi index b273e19c10cd..1a6fde6000d9 100644 --- a/stdlib/http/server.pyi +++ b/stdlib/http/server.pyi @@ -6,6 +6,7 @@ import sys from _typeshed import StrPath, SupportsRead, SupportsWrite from collections.abc import Mapping, Sequence from typing import Any, AnyStr, BinaryIO, ClassVar +from typing_extensions import deprecated __all__ = ["HTTPServer", "ThreadingHTTPServer", "BaseHTTPRequestHandler", "SimpleHTTPRequestHandler", "CGIHTTPRequestHandler"] @@ -72,7 +73,7 @@ class SimpleHTTPRequestHandler(BaseHTTPRequestHandler): def guess_type(self, path: StrPath) -> str: ... # undocumented def executable(path: StrPath) -> bool: ... # undocumented - +@deprecated("Deprecated in Python 3.13; removal scheduled for Python 3.15") class CGIHTTPRequestHandler(SimpleHTTPRequestHandler): cgi_directories: list[str] have_fork: bool # undocumented From 8dcf23b1725e07d9e53579ec6f405e4a1b0010f9 Mon Sep 17 00:00:00 2001 From: Sebastian Rittau Date: Tue, 18 Mar 2025 12:05:23 +0100 Subject: [PATCH 102/388] [cffi] Bump to 1.17.* (#13653) --- stubs/cffi/METADATA.toml | 2 +- stubs/cffi/cffi/recompiler.pyi | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/stubs/cffi/METADATA.toml b/stubs/cffi/METADATA.toml index c7ba41dd628c..357ce0ff0599 100644 --- a/stubs/cffi/METADATA.toml +++ b/stubs/cffi/METADATA.toml @@ -1,4 +1,4 @@ -version = "1.16.*" +version = "1.17.*" upstream_repository = "https://github.com/python-cffi/cffi/" requires = ["types-setuptools"] diff --git a/stubs/cffi/cffi/recompiler.pyi b/stubs/cffi/cffi/recompiler.pyi index 053f7c46a159..d695efa00d43 100644 --- a/stubs/cffi/cffi/recompiler.pyi +++ b/stubs/cffi/cffi/recompiler.pyi @@ -90,5 +90,6 @@ def recompile( compiler_verbose: int = 1, target: Incomplete | None = None, debug: Incomplete | None = None, + uses_ffiplatform: bool = True, **kwds, ): ... From 7099898b8fbd00dc32a4d307371d3b96e91aa7ec Mon Sep 17 00:00:00 2001 From: Sebastian Rittau Date: Tue, 18 Mar 2025 12:05:59 +0100 Subject: [PATCH 103/388] [WebOb] Add cgi_FieldStorage.make_file on Python 3.13+ (#13654) --- stubs/WebOb/webob/compat.pyi | 2 ++ 1 file changed, 2 insertions(+) diff --git a/stubs/WebOb/webob/compat.pyi b/stubs/WebOb/webob/compat.pyi index 887dbe069b92..66e384c55329 100644 --- a/stubs/WebOb/webob/compat.pyi +++ b/stubs/WebOb/webob/compat.pyi @@ -1,5 +1,6 @@ import sys from html import escape as escape +from io import FileIO, TextIOWrapper from queue import Empty as Empty, Queue as Queue from typing import IO @@ -8,6 +9,7 @@ if sys.version_info >= (3, 13): class cgi_FieldStorage: filename: str file: IO[bytes] + def make_file(self) -> TextIOWrapper | FileIO: ... def parse_header(line: str) -> tuple[str, dict[str, str]]: ... From 86d1ed34d46af61d16338f38162c03634e5ab304 Mon Sep 17 00:00:00 2001 From: Abdrakhman <54412983+guitvcer@users.noreply.github.com> Date: Tue, 18 Mar 2025 16:35:50 +0500 Subject: [PATCH 104/388] [networkx] Fix a few annotations in AtlasView and Graph (#13656) --- stubs/networkx/networkx/classes/coreviews.pyi | 4 +++- stubs/networkx/networkx/classes/graph.pyi | 7 ++++--- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/stubs/networkx/networkx/classes/coreviews.pyi b/stubs/networkx/networkx/classes/coreviews.pyi index 15afbf5cf573..5c10338c679a 100644 --- a/stubs/networkx/networkx/classes/coreviews.pyi +++ b/stubs/networkx/networkx/classes/coreviews.pyi @@ -7,11 +7,13 @@ _U = TypeVar("_U") _V = TypeVar("_V") class AtlasView(Mapping[_T, dict[_U, _V]]): + def __getstate__(self) -> dict[str, Mapping[_T, dict[_U, _V]]]: ... + def __setstate__(self, state: dict[str, Mapping[_T, dict[_U, _V]]]) -> None: ... def __init__(self, d: Mapping[_T, dict[_U, _V]]) -> None: ... def __len__(self) -> int: ... def __iter__(self) -> Iterator[_T]: ... def __getitem__(self, key: _T) -> dict[_U, _V]: ... - def copy(self) -> Self: ... + def copy(self) -> dict[_T, dict[_U, _V]]: ... class AdjacencyView(AtlasView[_T, _U, _V]): ... class MultiAdjacencyView(AdjacencyView[_T, _U, _V]): ... diff --git a/stubs/networkx/networkx/classes/graph.pyi b/stubs/networkx/networkx/classes/graph.pyi index a1b61109a21f..981d61fd0b34 100644 --- a/stubs/networkx/networkx/classes/graph.pyi +++ b/stubs/networkx/networkx/classes/graph.pyi @@ -45,11 +45,11 @@ class Graph(Collection[_Node]): def name(self) -> str: ... @name.setter def name(self, s: str) -> None: ... - def __getitem__(self, n: _Node) -> AtlasView[_Node, _Node, dict[str, Incomplete]]: ... + def __getitem__(self, n: _Node) -> AtlasView[_Node, str, Any]: ... def __iter__(self) -> Iterator[_Node]: ... def __contains__(self, n: object) -> bool: ... def __len__(self) -> int: ... - def add_node(self, node_for_adding: _Node, **attr) -> None: ... + def add_node(self, node_for_adding: _Node, **attr: Any) -> None: ... # attr: Set or change node attributes using key=value def add_nodes_from(self, nodes_for_adding: Iterable[_NodePlus[_Node]], **attr) -> None: ... def remove_node(self, n: _Node) -> None: ... def remove_nodes_from(self, nodes: Iterable[_Node]) -> None: ... @@ -58,7 +58,8 @@ class Graph(Collection[_Node]): def number_of_nodes(self) -> int: ... def order(self) -> int: ... def has_node(self, n: _Node) -> bool: ... - def add_edge(self, u_of_edge: _Node, v_of_edge: _Node, **attr) -> None: ... + # attr: Edge data (or labels or objects) can be assigned using keyword arguments + def add_edge(self, u_of_edge: _Node, v_of_edge: _Node, **attr: Any) -> None: ... def add_edges_from(self, ebunch_to_add: Iterable[_EdgePlus[_Node]], **attr) -> None: ... def add_weighted_edges_from( self, ebunch_to_add: Iterable[tuple[_Node, _Node, Incomplete]], weight: str = "weight", **attr From 3e7ad90df8fc772b1117cdd3b9fe44d0033378fe Mon Sep 17 00:00:00 2001 From: Sebastian Rittau Date: Tue, 18 Mar 2025 12:53:31 +0100 Subject: [PATCH 105/388] [cffi] Fix a few argument types for C code (#13661) * CLibrary.close_lib() doesn't take any arguments. * FFI.typeof() only takes positional arguments. --- stubs/cffi/_cffi_backend.pyi | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/stubs/cffi/_cffi_backend.pyi b/stubs/cffi/_cffi_backend.pyi index 8fb8cbe6d6f5..6f97a6023cff 100644 --- a/stubs/cffi/_cffi_backend.pyi +++ b/stubs/cffi/_cffi_backend.pyi @@ -34,7 +34,7 @@ class CField: @final class CLibrary: - def close_lib(self, *args, **kwargs): ... + def close_lib(self) -> None: ... def load_function(self, *args, **kwargs): ... def read_variable(self, *args, **kwargs): ... def write_variable(self, *args, **kwargs): ... @@ -204,7 +204,7 @@ class FFI: def release(self, cdata: CData, /) -> None: ... def sizeof(self, cdecl: str | CType | CData, /) -> int: ... def string(self, cdata: CData, maxlen: int = -1) -> bytes | str: ... - def typeof(self, cdecl: str | CData) -> CType: ... + def typeof(self, cdecl: str | CData, /) -> CType: ... def unpack(self, cdata: CData, length: int) -> bytes | str | list[Any]: ... def alignof(cdecl: CType, /) -> int: ... From 7ec0be6168cf45e1a35e32c4667ebe710bcdd74a Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Tue, 18 Mar 2025 16:05:59 +0400 Subject: [PATCH 106/388] Use `Generator` for `heapq.merge` (#13663) --- stdlib/heapq.pyi | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/stdlib/heapq.pyi b/stdlib/heapq.pyi index 7a3aa8b442a5..220c41f303fb 100644 --- a/stdlib/heapq.pyi +++ b/stdlib/heapq.pyi @@ -1,6 +1,6 @@ from _heapq import * from _typeshed import SupportsRichComparison -from collections.abc import Callable, Iterable +from collections.abc import Callable, Generator, Iterable from typing import Any, Final, TypeVar __all__ = ["heappush", "heappop", "heapify", "heapreplace", "merge", "nlargest", "nsmallest", "heappushpop"] @@ -11,7 +11,7 @@ __about__: Final[str] def merge( *iterables: Iterable[_S], key: Callable[[_S], SupportsRichComparison] | None = None, reverse: bool = False -) -> Iterable[_S]: ... +) -> Generator[_S]: ... def nlargest(n: int, iterable: Iterable[_S], key: Callable[[_S], SupportsRichComparison] | None = None) -> list[_S]: ... def nsmallest(n: int, iterable: Iterable[_S], key: Callable[[_S], SupportsRichComparison] | None = None) -> list[_S]: ... def _heapify_max(heap: list[Any], /) -> None: ... # undocumented From 5c28553fe0deb5d78d571685840cd79716322544 Mon Sep 17 00:00:00 2001 From: Sebastian Rittau Date: Tue, 18 Mar 2025 13:35:25 +0100 Subject: [PATCH 107/388] [stubsabot] Extend warning text when stubtest is disabled (#13664) --- scripts/stubsabot.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/scripts/stubsabot.py b/scripts/stubsabot.py index 3e755ca2d5d4..a8e1b2ba5c73 100755 --- a/scripts/stubsabot.py +++ b/scripts/stubsabot.py @@ -676,7 +676,8 @@ def get_update_pr_body(update: Update, metadata: Mapping[str, Any]) -> str: body += textwrap.dedent( f""" - :warning: Review this PR manually, as stubtest is skipped in CI for {update.distribution}! :warning: + :warning: Review this PR manually, as stubtest is skipped in CI for {update.distribution}! + Also check whether stubtest can be reenabled. :warning: """ ) return body From 0a17c9a910d89c695fa2ee4328acaff5c457ce87 Mon Sep 17 00:00:00 2001 From: Sebastian Rittau Date: Tue, 18 Mar 2025 13:36:15 +0100 Subject: [PATCH 108/388] [CI] Run stubtest with Python 3.13 (#13638) --- .github/workflows/daily.yml | 2 +- .github/workflows/stubtest_third_party.yml | 2 +- stubs/corus/METADATA.toml | 4 ++++ stubs/humanfriendly/METADATA.toml | 2 ++ stubs/passlib/@tests/stubtest_allowlist.txt | 5 +++++ stubs/pygit2/METADATA.toml | 2 ++ stubs/tensorflow/METADATA.toml | 3 +++ stubs/tqdm/@tests/stubtest_allowlist.txt | 4 ++++ stubs/tqdm/METADATA.toml | 5 ++++- stubs/tree-sitter-languages/METADATA.toml | 6 ++++++ 10 files changed, 32 insertions(+), 3 deletions(-) diff --git a/.github/workflows/daily.yml b/.github/workflows/daily.yml index 7949f351f5f9..be0d5b7cad71 100644 --- a/.github/workflows/daily.yml +++ b/.github/workflows/daily.yml @@ -66,7 +66,7 @@ jobs: - uses: actions/checkout@v4 - uses: actions/setup-python@v5 with: - python-version: "3.12" + python-version: "3.13" cache: pip cache-dependency-path: | requirements-tests.txt diff --git a/.github/workflows/stubtest_third_party.yml b/.github/workflows/stubtest_third_party.yml index 8e7105b9ddf0..69ec7c6ac625 100644 --- a/.github/workflows/stubtest_third_party.yml +++ b/.github/workflows/stubtest_third_party.yml @@ -41,7 +41,7 @@ jobs: fetch-depth: 0 - uses: actions/setup-python@v5 with: - python-version: "3.12" + python-version: "3.13" cache: pip cache-dependency-path: | requirements-tests.txt diff --git a/stubs/corus/METADATA.toml b/stubs/corus/METADATA.toml index f966ab456995..e7cf25599620 100644 --- a/stubs/corus/METADATA.toml +++ b/stubs/corus/METADATA.toml @@ -1,2 +1,6 @@ version = "0.10.*" upstream_repository = "https://github.com/natasha/corus" + +[tool.stubtest] +# As of version 0.10.0, corus doesn't support Python 3.13. +skip = true diff --git a/stubs/humanfriendly/METADATA.toml b/stubs/humanfriendly/METADATA.toml index c8518e705021..eaca32d94d25 100644 --- a/stubs/humanfriendly/METADATA.toml +++ b/stubs/humanfriendly/METADATA.toml @@ -3,3 +3,5 @@ upstream_repository = "https://github.com/xolox/python-humanfriendly" [tool.stubtest] stubtest_requirements = ["docutils", "mock"] +# Package is unsupported and doesn't support Python 3.13 as of 2025-03-17. +skip = true diff --git a/stubs/passlib/@tests/stubtest_allowlist.txt b/stubs/passlib/@tests/stubtest_allowlist.txt index 16ec9f5109a1..c953253672cd 100644 --- a/stubs/passlib/@tests/stubtest_allowlist.txt +++ b/stubs/passlib/@tests/stubtest_allowlist.txt @@ -90,3 +90,8 @@ passlib.utils.compat.* # Tests are not included: passlib.tests.* + +# This is only available when the crypt module is available. This module +# was dropped from the standard library of Python 3.13, but is still available +# in some environments. +(passlib.hosts.host_context)? diff --git a/stubs/pygit2/METADATA.toml b/stubs/pygit2/METADATA.toml index 13eec7dc9537..8f0525f88186 100644 --- a/stubs/pygit2/METADATA.toml +++ b/stubs/pygit2/METADATA.toml @@ -5,3 +5,5 @@ obsolete_since = "1.16.0" # Released on 2024-10-11 [tool.stubtest] platforms = ["darwin", "linux", "win32"] +# Does not build on any platform on Python 3.13 as of 2025-03-17. +skip = true diff --git a/stubs/tensorflow/METADATA.toml b/stubs/tensorflow/METADATA.toml index 66c658d2a6ce..94d4b9f8a08b 100644 --- a/stubs/tensorflow/METADATA.toml +++ b/stubs/tensorflow/METADATA.toml @@ -10,3 +10,6 @@ partial_stub = true ignore_missing_stub = true # TODO: Support/update to keras 3.7 stubtest_requirements = ["keras==3.6.*"] +# tensorflow 2.19 doesn't support Python 3.13: +# https://github.com/tensorflow/tensorflow/issues/78774 +skip = true diff --git a/stubs/tqdm/@tests/stubtest_allowlist.txt b/stubs/tqdm/@tests/stubtest_allowlist.txt index ca237a8d068d..e14a351b27a6 100644 --- a/stubs/tqdm/@tests/stubtest_allowlist.txt +++ b/stubs/tqdm/@tests/stubtest_allowlist.txt @@ -6,3 +6,7 @@ tqdm._tqdm_notebook.__all__ # Cannot import in stubtest tqdm.__main__ + +# TODO: Reenable when tensorflow supports Python 3.13 and is added to the +# stubtest dependencies in METADATA.toml. +tqdm.keras diff --git a/stubs/tqdm/METADATA.toml b/stubs/tqdm/METADATA.toml index fddf4c03eb49..f218397eb207 100644 --- a/stubs/tqdm/METADATA.toml +++ b/stubs/tqdm/METADATA.toml @@ -4,4 +4,7 @@ requires = ["types-requests"] [tool.stubtest] extras = ["slack", "telegram"] -stubtest_requirements = ["dask", "pandas", "rich", "tensorflow"] +# Add `"tensorflow"` to this list when there's a tensorflow release supporting +# Python 3.13: https://github.com/tensorflow/tensorflow/issues/78774. +# Also remove tqdm.keras from @tests/stubtest_allowlist.txt. +stubtest_requirements = ["dask", "pandas", "rich"] diff --git a/stubs/tree-sitter-languages/METADATA.toml b/stubs/tree-sitter-languages/METADATA.toml index 055b8d94dd38..b139562018f3 100644 --- a/stubs/tree-sitter-languages/METADATA.toml +++ b/stubs/tree-sitter-languages/METADATA.toml @@ -1,3 +1,9 @@ version = "1.10.*" upstream_repository = "https://github.com/grantjenks/py-tree-sitter-languages" requires = ["tree-sitter>=0.20.3"] + +[tool.stubtest] +# This package is unmaintained and doesn't support Python 3.13. +# See https://github.com/grantjenks/py-tree-sitter-languages/issues/75 and +# https://github.com/grantjenks/py-tree-sitter-languages/blob/main/README.rst#status +skip = true From 8b4324d3f4b1e10c74d68dead2c415167409c276 Mon Sep 17 00:00:00 2001 From: Sebastian Rittau Date: Tue, 18 Mar 2025 13:51:32 +0100 Subject: [PATCH 109/388] Mark humanfriendly as no longer updated (#13665) Cf. #13662 --- stubs/humanfriendly/METADATA.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/stubs/humanfriendly/METADATA.toml b/stubs/humanfriendly/METADATA.toml index eaca32d94d25..0a4f7300b177 100644 --- a/stubs/humanfriendly/METADATA.toml +++ b/stubs/humanfriendly/METADATA.toml @@ -1,5 +1,6 @@ version = "10.0.*" upstream_repository = "https://github.com/xolox/python-humanfriendly" +no_longer_updated = true [tool.stubtest] stubtest_requirements = ["docutils", "mock"] From db857a9c3f34406800db92fca542c7718e58dbe5 Mon Sep 17 00:00:00 2001 From: Avasam Date: Tue, 18 Mar 2025 15:31:50 -0400 Subject: [PATCH 110/388] Bump pywin32 to 310.* (#13667) --- stubs/pywin32/@tests/stubtest_allowlist_win32.txt | 7 +------ stubs/pywin32/METADATA.toml | 2 +- 2 files changed, 2 insertions(+), 7 deletions(-) diff --git a/stubs/pywin32/@tests/stubtest_allowlist_win32.txt b/stubs/pywin32/@tests/stubtest_allowlist_win32.txt index e29b00349dc2..c2329ad6412d 100644 --- a/stubs/pywin32/@tests/stubtest_allowlist_win32.txt +++ b/stubs/pywin32/@tests/stubtest_allowlist_win32.txt @@ -45,7 +45,7 @@ win32com(ext)?.directsound.test.* # It's safer to not even expose this method as deprecated. (win32.)?win32gui.PyMakeBuffer -# Axdebug is not built on Python 3.11 anyway: https://github.com/mhammond/pywin32/blob/main/setup.py#L398-L402 +# Axdebug is not built on Python 3.11 anyway: https://github.com/mhammond/pywin32/blob/c0f06cf49252b4848d0c74832247280291b00b03/setup.py#L386-L390 # # failed to import, ImportError: DLL load failed while importing axdebug: The specified module could not be found. win32com(ext)?.axdebug.axdebug @@ -61,8 +61,3 @@ win32com(ext)?.axdebug.stackframe # failed to import, ImportError: cannot import name 'axdebug' from 'win32com.axdebug' win32com(ext)?.axdebug.gateways win32com(ext)?.axscript.client.debug - -# These missing is likely an issue with the upstream build's WINVER target on 309. -# Keep them in stubs as there will likely be a patch soon. -# https://github.com/mhammond/pywin32/issues/2486 -win32com(ext)?\.shell\.shell\.\w+? diff --git a/stubs/pywin32/METADATA.toml b/stubs/pywin32/METADATA.toml index d6fb6753ea1d..338759b1d81b 100644 --- a/stubs/pywin32/METADATA.toml +++ b/stubs/pywin32/METADATA.toml @@ -1,4 +1,4 @@ -version = "309.*" +version = "310.*" upstream_repository = "https://github.com/mhammond/pywin32" [tool.stubtest] From a2c66b5af34117c62815079d3faecf23c9729f2c Mon Sep 17 00:00:00 2001 From: Sebastian Rittau Date: Wed, 19 Mar 2025 13:56:18 +0100 Subject: [PATCH 111/388] Update typing_extensions to 4.13.0rc1 (#13671) Also sort __all__ to match the implementation --- requirements-tests.txt | 2 +- stdlib/@tests/stubtest_allowlists/common.txt | 9 +- stdlib/@tests/stubtest_allowlists/py310.txt | 3 - stdlib/@tests/stubtest_allowlists/py311.txt | 3 - stdlib/@tests/stubtest_allowlists/py312.txt | 4 + stdlib/@tests/stubtest_allowlists/py313.txt | 4 + stdlib/@tests/stubtest_allowlists/py39.txt | 3 - stdlib/inspect.pyi | 8 +- stdlib/typing_extensions.pyi | 134 +++++++++++++------ 9 files changed, 115 insertions(+), 55 deletions(-) diff --git a/requirements-tests.txt b/requirements-tests.txt index 0f7faa5dbf4a..61165b749a1f 100644 --- a/requirements-tests.txt +++ b/requirements-tests.txt @@ -18,7 +18,7 @@ stubdefaulter==0.1.0 termcolor>=2.3 tomli==2.2.1 tomlkit==0.13.2 -typing_extensions>=4.12.0rc1 +typing_extensions>=4.13.0rc1 uv==0.5.14 # Utilities for typeshed infrastructure scripts. diff --git a/stdlib/@tests/stubtest_allowlists/common.txt b/stdlib/@tests/stubtest_allowlists/common.txt index f3cb0b2b52e7..a74701e50e2a 100644 --- a/stdlib/@tests/stubtest_allowlists/common.txt +++ b/stdlib/@tests/stubtest_allowlists/common.txt @@ -501,8 +501,9 @@ typing(_extensions)?\.ValuesView typing_extensions\.Final typing_extensions\.LiteralString -typing._SpecialForm.__call__ # Typing-related weirdness -typing._SpecialForm.__init__ # Typing-related weirdness +# Typing-related weirdness +typing._SpecialForm.__call__ +typing._SpecialForm.__init__ # These are abstract properties at runtime, # but marking them as such in the stub breaks half the the typed-Python ecosystem (see #8726) @@ -525,6 +526,10 @@ typing(_extensions)?\.(Async)?ContextManager typing(_extensions)?\.IO\.__iter__ typing(_extensions)?\.IO\.__next__ +# Will always raise. Not included to avoid type checkers inferring that +# TypeAliasType instances are callable. +typing_extensions.TypeAliasType.__call__ + types.MethodType.__closure__ # read-only but not actually a property; stubtest thinks it doesn't exist. types.MethodType.__code__ # read-only but not actually a property; stubtest thinks it doesn't exist. types.MethodType.__defaults__ # read-only but not actually a property; stubtest thinks it doesn't exist. diff --git a/stdlib/@tests/stubtest_allowlists/py310.txt b/stdlib/@tests/stubtest_allowlists/py310.txt index aa3af5ccc4b8..9f6ce2cf82e6 100644 --- a/stdlib/@tests/stubtest_allowlists/py310.txt +++ b/stdlib/@tests/stubtest_allowlists/py310.txt @@ -240,9 +240,6 @@ poplib.POP3_SSL.stls # bad declaration of inherited function. See poplib.pyi tkinter.test # Modules that exist at runtime, but shouldn't be added to typeshed tkinter\.test\..+ # Modules that exist at runtime, but shouldn't be added to typeshed -# Exist at runtime for internal reasons, no need to put them in the stub -typing_extensions\.TypeAliasType\.__call__ -typing_extensions\.TypeAliasType\.__init_subclass__ # We call them read-only properties, runtime implementation is slightly different typing_extensions\.TypeAliasType\.__(parameters|type_params|name|module|value)__ diff --git a/stdlib/@tests/stubtest_allowlists/py311.txt b/stdlib/@tests/stubtest_allowlists/py311.txt index 07c67b0584bb..2b7e1b3e527c 100644 --- a/stdlib/@tests/stubtest_allowlists/py311.txt +++ b/stdlib/@tests/stubtest_allowlists/py311.txt @@ -204,9 +204,6 @@ poplib.POP3_SSL.stls # bad declaration of inherited function. See poplib.pyi tkinter.test # Modules that exist at runtime, but shouldn't be added to typeshed tkinter\.test\..+ # Modules that exist at runtime, but shouldn't be added to typeshed -# Exist at runtime for internal reasons, no need to put them in the stub -typing_extensions\.TypeAliasType\.__call__ -typing_extensions\.TypeAliasType\.__init_subclass__ # We call them read-only properties, runtime implementation is slightly different typing_extensions\.TypeAliasType\.__(parameters|type_params|name|module|value)__ diff --git a/stdlib/@tests/stubtest_allowlists/py312.txt b/stdlib/@tests/stubtest_allowlists/py312.txt index 51f169f91088..aaea7009760b 100644 --- a/stdlib/@tests/stubtest_allowlists/py312.txt +++ b/stdlib/@tests/stubtest_allowlists/py312.txt @@ -9,6 +9,10 @@ zoneinfo.ZoneInfo.from_file # Pos-only parameters had different "names" in different Python versions +# Initialized at runtime +typing_extensions.TypeAliasType.__parameters__ +typing_extensions.TypeAliasType.__value__ + # ==================================== # Pre-existing errors from Python 3.11 diff --git a/stdlib/@tests/stubtest_allowlists/py313.txt b/stdlib/@tests/stubtest_allowlists/py313.txt index 86faeafc4f65..ba44606f9179 100644 --- a/stdlib/@tests/stubtest_allowlists/py313.txt +++ b/stdlib/@tests/stubtest_allowlists/py313.txt @@ -14,6 +14,10 @@ zoneinfo.ZoneInfo.from_file # Pos-only parameters had different "names" in different Python versions +# Initialized at runtime +typing_extensions.TypeAliasType.__parameters__ +typing_extensions.TypeAliasType.__value__ + # ======= # >= 3.11 diff --git a/stdlib/@tests/stubtest_allowlists/py39.txt b/stdlib/@tests/stubtest_allowlists/py39.txt index f35239fff80b..b52ad6594b78 100644 --- a/stdlib/@tests/stubtest_allowlists/py39.txt +++ b/stdlib/@tests/stubtest_allowlists/py39.txt @@ -213,9 +213,6 @@ poplib.POP3_SSL.stls # bad declaration of inherited function. See poplib.pyi tkinter.test # Modules that exist at runtime, but shouldn't be added to typeshed tkinter\.test\..+ # Modules that exist at runtime, but shouldn't be added to typeshed -# Exist at runtime for internal reasons, no need to put them in the stub -typing_extensions\.TypeAliasType\.__call__ -typing_extensions\.TypeAliasType\.__init_subclass__ # We call them read-only properties, runtime implementation is slightly different typing_extensions\.TypeAliasType\.__(parameters|type_params|name|module|value)__ diff --git a/stdlib/inspect.pyi b/stdlib/inspect.pyi index 229eb2135690..5bebe9bf4482 100644 --- a/stdlib/inspect.pyi +++ b/stdlib/inspect.pyi @@ -345,12 +345,12 @@ class Signature: if sys.version_info >= (3, 10): def get_annotations( - obj: Callable[..., object] | type[Any] | ModuleType, + obj: Callable[..., object] | type[object] | ModuleType, # any callable, class, or module *, - globals: Mapping[str, Any] | None = None, - locals: Mapping[str, Any] | None = None, + globals: Mapping[str, Any] | None = None, # value types depend on the key + locals: Mapping[str, Any] | None = None, # value types depend on the key eval_str: bool = False, - ) -> dict[str, Any]: ... + ) -> dict[str, Any]: ... # values are type expressions # The name is the same as the enum's name in CPython class _ParameterKind(enum.IntEnum): diff --git a/stdlib/typing_extensions.pyi b/stdlib/typing_extensions.pyi index fd98722b10a8..3d369dcd63b2 100644 --- a/stdlib/typing_extensions.pyi +++ b/stdlib/typing_extensions.pyi @@ -1,9 +1,11 @@ import abc +import enum import sys import typing from _collections_abc import dict_items, dict_keys, dict_values -from _typeshed import IdentityFunction +from _typeshed import IdentityFunction, Incomplete, Unused from contextlib import AbstractAsyncContextManager as AsyncContextManager, AbstractContextManager as ContextManager +from types import ModuleType from typing import ( # noqa: Y022,Y037,Y038,Y039 IO as IO, TYPE_CHECKING as TYPE_CHECKING, @@ -68,9 +70,10 @@ if sys.version_info >= (3, 10): if sys.version_info >= (3, 9): from types import GenericAlias +# Please keep order the same as at runtime. __all__ = [ + # Super-special typing primitives. "Any", - "Buffer", "ClassVar", "Concatenate", "Final", @@ -83,14 +86,16 @@ __all__ = [ "TypeVar", "TypeVarTuple", "Unpack", + # ABCs (from collections.abc). "Awaitable", "AsyncIterator", "AsyncIterable", "Coroutine", "AsyncGenerator", "AsyncContextManager", - "CapsuleType", + "Buffer", "ChainMap", + # Concrete collection types. "ContextManager", "Counter", "Deque", @@ -98,20 +103,34 @@ __all__ = [ "NamedTuple", "OrderedDict", "TypedDict", - "SupportsIndex", + # Structural checks, a.k.a. protocols. "SupportsAbs", - "SupportsRound", "SupportsBytes", "SupportsComplex", "SupportsFloat", + "SupportsIndex", "SupportsInt", + "SupportsRound", + # One-off things. "Annotated", "assert_never", "assert_type", + "clear_overloads", "dataclass_transform", "deprecated", + "Doc", + "evaluate_forward_ref", + "get_overloads", "final", + "Format", + "get_annotations", + "get_args", + "get_origin", + "get_original_bases", + "get_protocol_members", + "get_type_hints", "IntVar", + "is_protocol", "is_typeddict", "Literal", "NewType", @@ -124,18 +143,18 @@ __all__ = [ "Text", "TypeAlias", "TypeAliasType", + "TypeForm", "TypeGuard", + "TypeIs", "TYPE_CHECKING", "Never", "NoReturn", + "ReadOnly", "Required", "NotRequired", - "clear_overloads", - "get_args", - "get_origin", - "get_original_bases", - "get_overloads", - "get_type_hints", + "NoDefault", + "NoExtraItems", + # Pure aliases, have always been in typing "AbstractSet", "AnyStr", "BinaryIO", @@ -143,7 +162,6 @@ __all__ = [ "Collection", "Container", "Dict", - "Doc", "ForwardRef", "FrozenSet", "Generator", @@ -161,7 +179,6 @@ __all__ = [ "MutableMapping", "MutableSequence", "MutableSet", - "NoDefault", "Optional", "Pattern", "Reversible", @@ -173,12 +190,10 @@ __all__ = [ "Union", "ValuesView", "cast", - "get_protocol_members", - "is_protocol", "no_type_check", "no_type_check_decorator", - "ReadOnly", - "TypeIs", + # Added dynamically + "CapsuleType", ] _T = typing.TypeVar("_T") @@ -382,33 +397,11 @@ if sys.version_info >= (3, 12): SupportsIndex as SupportsIndex, SupportsInt as SupportsInt, SupportsRound as SupportsRound, - TypeAliasType as TypeAliasType, override as override, ) else: def override(arg: _F, /) -> _F: ... def get_original_bases(cls: type, /) -> tuple[Any, ...]: ... - @final - class TypeAliasType: - def __init__( - self, name: str, value: Any, *, type_params: tuple[TypeVar | ParamSpec | TypeVarTuple, ...] = () - ) -> None: ... - @property - def __value__(self) -> Any: ... - @property - def __type_params__(self) -> tuple[TypeVar | ParamSpec | TypeVarTuple, ...]: ... - @property - def __parameters__(self) -> tuple[Any, ...]: ... - @property - def __name__(self) -> str: ... - # It's writable on types, but not on instances of TypeAliasType. - @property - def __module__(self) -> str | None: ... # type: ignore[override] - # Returns typing._GenericAlias, which isn't stubbed. - def __getitem__(self, parameters: Any) -> Any: ... - if sys.version_info >= (3, 10): - def __or__(self, right: Any) -> _SpecialForm: ... - def __ror__(self, left: Any) -> _SpecialForm: ... # mypy and pyright object to this being both ABC and Protocol. # At runtime it inherits from ABC and is not a Protocol, but it is on the @@ -569,8 +562,71 @@ else: ReadOnly: _SpecialForm TypeIs: _SpecialForm +# TypeAliasType was added in Python 3.12, but had significant changes in 3.14. +if sys.version_info >= (3, 14): + from typing import TypeAliasType as TypeAliasType +else: + @final + class TypeAliasType: + def __init__( + self, name: str, value: Any, *, type_params: tuple[TypeVar | ParamSpec | TypeVarTuple, ...] = () + ) -> None: ... # value is a type expression + @property + def __value__(self) -> Any: ... # a type expression + @property + def __type_params__(self) -> tuple[TypeVar | ParamSpec | TypeVarTuple, ...]: ... + @property + # `__parameters__` can include special forms if a `TypeVarTuple` was + # passed as a `type_params` element to the constructor method. + def __parameters__(self) -> tuple[TypeVar | ParamSpec | Any, ...]: ... + @property + def __name__(self) -> str: ... + # It's writable on types, but not on instances of TypeAliasType. + @property + def __module__(self) -> str | None: ... # type: ignore[override] + # Returns typing._GenericAlias, which isn't stubbed. + def __getitem__(self, parameters: Incomplete | tuple[Incomplete, ...]) -> Any: ... + def __init_subclass__(cls, *args: Unused, **kwargs: Unused) -> NoReturn: ... + if sys.version_info >= (3, 10): + def __or__(self, right: Any) -> _SpecialForm: ... + def __ror__(self, left: Any) -> _SpecialForm: ... + +# PEP 727 class Doc: documentation: str def __init__(self, documentation: str, /) -> None: ... def __hash__(self) -> int: ... def __eq__(self, other: object) -> bool: ... + +# PEP 728 +class _NoExtraItemsType: ... + +NoExtraItems: _NoExtraItemsType + +# PEP 747 +TypeForm: _SpecialForm + +class Format(enum.IntEnum): + VALUE = 1 + FORWARDREF = 2 + STRING = 3 + +# PEP 649/749 +def get_annotations( + obj: Callable[..., object] | type[object] | ModuleType, # any callable, class, or module + *, + globals: Mapping[str, Any] | None = None, # value types depend on the key + locals: Mapping[str, Any] | None = None, # value types depend on the key + eval_str: bool = False, + format: Format = Format.VALUE, # noqa: Y011 +) -> dict[str, Any]: ... # values are type expressions +def evaluate_forward_ref( + forward_ref: ForwardRef, + *, + owner: Callable[..., object] | type[object] | ModuleType | None = None, # any callable, class, or module + globals: Mapping[str, Any] | None = None, # value types depend on the key + locals: Mapping[str, Any] | None = None, # value types depend on the key + type_params: Iterable[TypeVar | ParamSpec | TypeVarTuple] | None = None, + format: Format = Format.VALUE, # noqa: Y011 + _recursive_guard: Container[str] = ..., +) -> Any: ... # str if format is Format.STRING, otherwise a type expression From 6672218cf9877386233f3f2d6c6315a31317c06f Mon Sep 17 00:00:00 2001 From: Sebastian Rittau Date: Wed, 19 Mar 2025 16:23:24 +0100 Subject: [PATCH 112/388] logging.config: Add @type_check_only markers (#13674) --- stdlib/logging/config.pyi | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/stdlib/logging/config.pyi b/stdlib/logging/config.pyi index 5c444e66c4c7..000ba1ebb06e 100644 --- a/stdlib/logging/config.pyi +++ b/stdlib/logging/config.pyi @@ -4,7 +4,7 @@ from collections.abc import Callable, Hashable, Iterable, Mapping, Sequence from configparser import RawConfigParser from re import Pattern from threading import Thread -from typing import IO, Any, Final, Literal, SupportsIndex, TypedDict, overload +from typing import IO, Any, Final, Literal, SupportsIndex, TypedDict, overload, type_check_only from typing_extensions import Required, TypeAlias from . import Filter, Filterer, Formatter, Handler, Logger, _FilterType, _FormatStyle, _Level @@ -14,17 +14,20 @@ RESET_ERROR: Final[int] # undocumented IDENTIFIER: Final[Pattern[str]] # undocumented if sys.version_info >= (3, 11): + @type_check_only class _RootLoggerConfiguration(TypedDict, total=False): level: _Level filters: Sequence[str | _FilterType] handlers: Sequence[str] else: + @type_check_only class _RootLoggerConfiguration(TypedDict, total=False): level: _Level filters: Sequence[str] handlers: Sequence[str] +@type_check_only class _LoggerConfiguration(_RootLoggerConfiguration, TypedDict, total=False): propagate: bool @@ -32,6 +35,7 @@ _FormatterConfigurationTypedDict = TypedDict( "_FormatterConfigurationTypedDict", {"class": str, "format": str, "datefmt": str, "style": _FormatStyle}, total=False ) +@type_check_only class _FilterConfigurationTypedDict(TypedDict): name: str @@ -43,6 +47,7 @@ _FilterConfiguration: TypeAlias = _FilterConfigurationTypedDict | dict[str, Any] # Handler config can have additional keys even when not providing a custom factory so we just use `dict`. _HandlerConfiguration: TypeAlias = dict[str, Any] +@type_check_only class _DictConfigArgs(TypedDict, total=False): version: Required[Literal[1]] formatters: dict[str, _FormatterConfiguration] From 10abc3c24d5a6c17c2b9086fbe706b8b4dee289b Mon Sep 17 00:00:00 2001 From: Tim Van Baak <40180944+tvanbaak@users.noreply.github.com> Date: Thu, 20 Mar 2025 01:19:06 -0700 Subject: [PATCH 113/388] Fix incorrect default value for callback (#13678) --- stubs/Flask-SocketIO/flask_socketio/test_client.pyi | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stubs/Flask-SocketIO/flask_socketio/test_client.pyi b/stubs/Flask-SocketIO/flask_socketio/test_client.pyi index cf7eb153405e..62f27781c868 100644 --- a/stubs/Flask-SocketIO/flask_socketio/test_client.pyi +++ b/stubs/Flask-SocketIO/flask_socketio/test_client.pyi @@ -29,7 +29,7 @@ class SocketIOTestClient: auth: dict[str, Incomplete] | None = None, ) -> None: ... def disconnect(self, namespace: str | None = None) -> None: ... - def emit(self, event: str, *args, callback: bool = True, namespace: str | None = None) -> Incomplete | None: ... + def emit(self, event: str, *args, callback: bool = False, namespace: str | None = None) -> Incomplete | None: ... def send( self, data: str | dict[str, Incomplete] | list[Incomplete], From c2c54d7e5886d46d2ddd97d946a070be824c4605 Mon Sep 17 00:00:00 2001 From: Sebastian Rittau Date: Thu, 20 Mar 2025 09:36:26 +0100 Subject: [PATCH 114/388] [icalendar] Update to 6.1.2 (#13672) Finish icalendar.prop annotations --- stubs/icalendar/@tests/stubtest_allowlist.txt | 9 + stubs/icalendar/METADATA.toml | 2 +- stubs/icalendar/icalendar/prop.pyi | 210 +++++++++++------- .../timezone/equivalent_timezone_ids.pyi | 6 +- 4 files changed, 136 insertions(+), 91 deletions(-) diff --git a/stubs/icalendar/@tests/stubtest_allowlist.txt b/stubs/icalendar/@tests/stubtest_allowlist.txt index 5c91cf0a219f..b03841be6f7f 100644 --- a/stubs/icalendar/@tests/stubtest_allowlist.txt +++ b/stubs/icalendar/@tests/stubtest_allowlist.txt @@ -12,3 +12,12 @@ icalendar.cal.Component.set_inline # Stubtest gets confused by multiple inheritance. icalendar.prop.vSkip.__new__ + +# Stubtest incorrectly reports that stub argument "cls" should be +# positional or keyword. +icalendar.vBoolean.__new__ +icalendar.vFloat.__new__ +icalendar.vInt.__new__ +icalendar.prop.vBoolean.__new__ +icalendar.prop.vFloat.__new__ +icalendar.prop.vInt.__new__ diff --git a/stubs/icalendar/METADATA.toml b/stubs/icalendar/METADATA.toml index b38656b30402..2892a10d6e38 100644 --- a/stubs/icalendar/METADATA.toml +++ b/stubs/icalendar/METADATA.toml @@ -1,4 +1,4 @@ -version = "6.1.*" +version = "~= 6.1.2" upstream_repository = "https://github.com/collective/icalendar" requires = ["types-python-dateutil", "types-pytz"] diff --git a/stubs/icalendar/icalendar/prop.pyi b/stubs/icalendar/icalendar/prop.pyi index 17d79298b676..ae5a7ec6942e 100644 --- a/stubs/icalendar/icalendar/prop.pyi +++ b/stubs/icalendar/icalendar/prop.pyi @@ -1,9 +1,9 @@ import datetime -from _typeshed import Incomplete, Unused -from collections.abc import Iterator +from _typeshed import ConvertibleToFloat, ConvertibleToInt, SupportsKeysAndGetItem, Unused +from collections.abc import Iterable, Iterator from enum import Enum from re import Pattern -from typing import Any, ClassVar, Final, TypeVar, overload +from typing import Any, ClassVar, Final, Literal, Protocol, SupportsIndex, overload from typing_extensions import Self, TypeAlias from .caselessdict import CaselessDict @@ -44,70 +44,80 @@ __all__ = [ ] _PropType: TypeAlias = type[Any] # any of the v* classes in this file -_vRecurT = TypeVar("_vRecurT", bound=vRecur) +_PeriodTuple: TypeAlias = tuple[datetime.datetime, datetime.datetime | datetime.timedelta] +_AnyTimeType: TypeAlias = datetime.datetime | datetime.date | datetime.timedelta | datetime.time | _PeriodTuple + +class _vType(Protocol): + def to_ical(self) -> bytes | str: ... DURATION_REGEX: Final[Pattern[str]] WEEKDAY_RULE: Final[Pattern[str]] class vBinary: - obj: Incomplete + obj: str params: Parameters - def __init__(self, obj) -> None: ... + def __init__(self, obj: str | bytes) -> None: ... def to_ical(self) -> bytes: ... @staticmethod - def from_ical(ical): ... - def __eq__(self, other): ... + def from_ical(ical: ICAL_TYPE) -> bytes: ... + def __eq__(self, other: object) -> bool: ... class vBoolean(int): - BOOL_MAP: Incomplete + BOOL_MAP: Final[CaselessDict[bool]] params: Parameters - def __new__(cls, *args, **kwargs): ... - def to_ical(self) -> bytes: ... + def __new__(cls, x: ConvertibleToInt = ..., /, *, params: SupportsKeysAndGetItem[str, str] = {}) -> Self: ... + def to_ical(self) -> Literal[b"TRUE", b"FALSE"]: ... @classmethod - def from_ical(cls, ical): ... + def from_ical(cls, ical: ICAL_TYPE) -> bool: ... class vText(str): encoding: str params: Parameters - def __new__(cls, value: ICAL_TYPE, encoding: str = "utf-8") -> Self: ... + def __new__(cls, value: ICAL_TYPE, encoding: str = "utf-8", params: SupportsKeysAndGetItem[str, str] = {}) -> Self: ... def to_ical(self) -> bytes: ... @classmethod def from_ical(cls, ical: ICAL_TYPE) -> Self: ... class vCalAddress(str): params: Parameters - def __new__(cls, value, encoding="utf-8"): ... + def __new__(cls, value: ICAL_TYPE, encoding="utf-8", params: SupportsKeysAndGetItem[str, str] = {}) -> Self: ... def to_ical(self) -> bytes: ... @classmethod - def from_ical(cls, ical): ... + def from_ical(cls, ical: ICAL_TYPE) -> Self: ... + @property + def email(self) -> str: ... + @property + def name(self) -> str: ... + @name.setter + def name(self, value: str) -> None: ... class vFloat(float): params: Parameters - def __new__(cls, *args, **kwargs): ... + def __new__(cls, x: ConvertibleToFloat = ..., /, *, params: SupportsKeysAndGetItem[str, str] = {}) -> Self: ... def to_ical(self) -> bytes: ... @classmethod - def from_ical(cls, ical): ... + def from_ical(cls, ical: ICAL_TYPE) -> Self: ... class vInt(int): params: Parameters - def __new__(cls, *args, **kwargs): ... + def __new__(cls, x: ConvertibleToInt = ..., /, *, params: SupportsKeysAndGetItem[str, str] = {}) -> Self: ... def to_ical(self) -> bytes: ... @classmethod def from_ical(cls, ical: ICAL_TYPE) -> Self: ... class vDDDLists: params: Parameters - dts: Incomplete - def __init__(self, dt_list) -> None: ... + dts: list[vDDDTypes] + def __init__(self, dt_list: Iterable[_AnyTimeType] | _AnyTimeType) -> None: ... def to_ical(self) -> bytes: ... @staticmethod - def from_ical(ical, timezone: Incomplete | None = None): ... - def __eq__(self, other): ... + def from_ical(ical: str, timezone: str | datetime.timezone | None = None): ... + def __eq__(self, other: object) -> bool: ... class vCategory: - cats: Incomplete + cats: list[vText] params: Parameters - def __init__(self, c_list) -> None: ... + def __init__(self, c_list: Iterable[ICAL_TYPE] | ICAL_TYPE, params: SupportsKeysAndGetItem[str, str] = {}) -> None: ... def __iter__(self) -> Iterator[str]: ... def to_ical(self) -> bytes: ... @staticmethod @@ -116,80 +126,95 @@ class vCategory: class TimeBase: def __eq__(self, other: object) -> bool: ... - def __hash__(self): ... + def __hash__(self) -> int: ... class vDDDTypes(TimeBase): params: Parameters - dt: Incomplete - def __init__(self, dt) -> None: ... + dt: _AnyTimeType + def __init__(self, dt: _AnyTimeType) -> None: ... def to_ical(self) -> bytes: ... + @overload @classmethod - def from_ical(cls, ical, timezone: Incomplete | None = None): ... + def from_ical(cls, ical: Self, timezone: Unused | None = None) -> _AnyTimeType: ... + # Return type is one of vDuration, vPeriod, vDatetime, vDate, or vTime, + # depending on the ical string. + @overload + @classmethod + def from_ical(cls, ical: str, timezone: datetime.timezone | str | None = None) -> Any: ... class vDate(TimeBase): - dt: Incomplete + dt: datetime.date params: Parameters - def __init__(self, dt) -> None: ... + def __init__(self, dt: datetime.date) -> None: ... def to_ical(self) -> bytes: ... @staticmethod - def from_ical(ical): ... + def from_ical(ical: ICAL_TYPE) -> datetime.date: ... class vDatetime(TimeBase): - dt: Incomplete + dt: datetime.datetime params: Parameters - def __init__(self, dt) -> None: ... + def __init__(self, dt: datetime.datetime, params: SupportsKeysAndGetItem[str, str] = {}) -> None: ... def to_ical(self) -> bytes: ... @staticmethod - def from_ical(ical, timezone: datetime.timezone | str | None = None) -> datetime.datetime: ... + def from_ical(ical: ICAL_TYPE, timezone: datetime.timezone | str | None = None) -> datetime.datetime: ... class vDuration(TimeBase): - td: Incomplete + td: datetime.timedelta params: Parameters - def __init__(self, td) -> None: ... + def __init__(self, td: datetime.timedelta, params: SupportsKeysAndGetItem[str, str] = {}) -> None: ... def to_ical(self) -> bytes: ... @staticmethod - def from_ical(ical): ... + def from_ical(ical: str) -> datetime.timedelta: ... @property - def dt(self): ... + def dt(self) -> datetime.timedelta: ... class vPeriod(TimeBase): params: Parameters - start: Incomplete - end: Incomplete - by_duration: Incomplete - duration: Incomplete - def __init__(self, per) -> None: ... - def overlaps(self, other): ... + start: datetime.datetime + end: datetime.datetime + by_duration: bool + duration: datetime.timedelta + def __init__(self, per: _PeriodTuple) -> None: ... + def overlaps(self, other: vPeriod) -> bool: ... def to_ical(self) -> bytes: ... + # Return type is a tuple of vDuration, vPeriod, vDatetime, vDate, or vTime, + # depending on the ical string. If the ical string is formed according to + # the iCalendar specification, this should always return a + # (datetime, datetime) or a (datetime, timedelta) tuple, but this is not + # enforced. @staticmethod - def from_ical(ical, timezone: Incomplete | None = None): ... + def from_ical(ical: str, timezone: datetime.timezone | str | None = None) -> tuple[Any, Any]: ... @property - def dt(self): ... + def dt(self) -> _PeriodTuple: ... class vWeekday(str): - week_days: Incomplete - relative: Incomplete + week_days: Final[CaselessDict[int]] + weekday: Literal["SU", "MO", "TU", "WE", "TH", "FR", "SA"] | None + relative: int | None params: Parameters - def __new__(cls, value, encoding="utf-8"): ... + def __new__(cls, value: ICAL_TYPE, encoding: str = "utf-8", params: SupportsKeysAndGetItem[str, str] = {}) -> Self: ... def to_ical(self) -> bytes: ... @classmethod - def from_ical(cls, ical): ... + def from_ical(cls, ical: ICAL_TYPE) -> Self: ... class vFrequency(str): - frequencies: Incomplete + frequencies: Final[CaselessDict[str]] params: Parameters - def __new__(cls, value, encoding="utf-8"): ... + def __new__(cls, value: ICAL_TYPE, encoding: str = "utf-8", params: SupportsKeysAndGetItem[str, str] = {}) -> Self: ... def to_ical(self) -> bytes: ... @classmethod - def from_ical(cls, ical): ... + def from_ical(cls, ical: ICAL_TYPE) -> Self: ... class vMonth(int): - leap: bool params: Parameters - def __new__(cls, month: vMonth | str | int) -> Self: ... + def __new__(cls, month: vMonth | str | int, params: SupportsKeysAndGetItem[str, str] = {}) -> Self: ... def to_ical(self) -> bytes: ... @classmethod def from_ical(cls, ical: vMonth | str | int) -> Self: ... + @property + def leap(self) -> bool: ... + @leap.setter + def leap(self, value: bool) -> None: ... class vSkip(vText, Enum): OMIT = "OMIT" @@ -198,69 +223,82 @@ class vSkip(vText, Enum): def __reduce_ex__(self, proto: Unused) -> tuple[Any, ...]: ... -class vRecur(CaselessDict[Incomplete]): - frequencies: ClassVar[list[str]] - canonical_order: ClassVar[tuple[str, ...]] - types: ClassVar[CaselessDict[_PropType]] +# The type of the values depend on the key. Each key maps to a v* class, and +# the allowed types are the types that the corresponding v* class can parse. +class vRecur(CaselessDict[Iterable[Any] | Any]): params: Parameters - def __init__(self, *args, **kwargs) -> None: ... + frequencies: Final[list[str]] + canonical_order: ClassVar[tuple[str, ...]] + types: Final[CaselessDict[_PropType]] + def __init__( + self, *args, params: SupportsKeysAndGetItem[str, str] = {}, **kwargs: list[Any] | tuple[Any, ...] | Any + ) -> None: ... def to_ical(self) -> bytes: ... @classmethod - def parse_type(cls, key, values): ... + def parse_type(cls, key: str, values: str) -> list[Any]: ... # Returns a list of v* objects @classmethod - @overload - def from_ical(cls, ical: _vRecurT) -> _vRecurT: ... - @classmethod - @overload - def from_ical(cls, ical: str) -> Self: ... + def from_ical(cls, ical: vRecur | str) -> Self: ... class vTime(TimeBase): - dt: Incomplete + dt: datetime.time | datetime.datetime params: Parameters - def __init__(self, *args) -> None: ... - def to_ical(self) -> bytes: ... + @overload + def __init__(self, dt: datetime.time | datetime.datetime, /) -> None: ... + # args are passed to the datetime.time() constructor + @overload + def __init__( + self, + hour: SupportsIndex = ..., + minute: SupportsIndex = ..., + second: SupportsIndex = ..., + microsecond: SupportsIndex = ..., + tzinfo: datetime.tzinfo | None = ..., + /, + ) -> None: ... + def to_ical(self) -> str: ... @staticmethod - def from_ical(ical): ... + def from_ical(ical: ICAL_TYPE) -> datetime.time: ... class vUri(str): params: Parameters - def __new__(cls, value, encoding="utf-8"): ... + def __new__(cls, value: ICAL_TYPE, encoding: str = "utf-8", params: SupportsKeysAndGetItem[str, str] = {}) -> Self: ... def to_ical(self) -> bytes: ... @classmethod - def from_ical(cls, ical): ... + def from_ical(cls, ical: ICAL_TYPE) -> Self: ... class vGeo: - latitude: Incomplete - longitude: Incomplete + latitude: float + longitude: float params: Parameters - def __init__(self, geo) -> None: ... - def to_ical(self) -> bytes: ... + def __init__(self, geo: tuple[float | str, float | str], params: SupportsKeysAndGetItem[str, str] = {}) -> None: ... + def to_ical(self) -> str: ... @staticmethod - def from_ical(ical): ... - def __eq__(self, other): ... + def from_ical(ical: str) -> tuple[float, float]: ... + def __eq__(self, other: _vType) -> bool: ... # type: ignore[override] class vUTCOffset: ignore_exceptions: bool td: datetime.timedelta params: Parameters - def __init__(self, td: datetime.timedelta) -> None: ... - def to_ical(self) -> bytes: ... + def __init__(self, td: datetime.timedelta, params: SupportsKeysAndGetItem[str, str] = {}) -> None: ... + def to_ical(self) -> str: ... @classmethod - def from_ical(cls, ical): ... + def from_ical(cls, ical: Self | ICAL_TYPE) -> datetime.timedelta: ... def __eq__(self, other: object) -> bool: ... def __hash__(self) -> int: ... class vInline(str): params: Parameters - def __new__(cls, value, encoding="utf-8"): ... + def __new__(cls, value: ICAL_TYPE, encoding: str = "utf-8", params: SupportsKeysAndGetItem[str, str] = {}) -> Self: ... def to_ical(self) -> bytes: ... @classmethod - def from_ical(cls, ical): ... + def from_ical(cls, ical: ICAL_TYPE) -> Self: ... class TypesFactory(CaselessDict[_PropType]): all_types: tuple[_PropType, ...] - def __init__(self, *args, **kwargs) -> None: ... types_map: CaselessDict[str] def for_property(self, name: str) -> _PropType: ... - def to_ical(self, name: str, value) -> bytes: ... - def from_ical(self, name: str, value): ... + # value is str | bytes, depending on what the v* class supports + def to_ical(self, name: str, value: Any) -> bytes: ... + # value and return type depend on what the v* class supports + def from_ical(self, name: str, value: Any) -> Any: ... diff --git a/stubs/icalendar/icalendar/timezone/equivalent_timezone_ids.pyi b/stubs/icalendar/icalendar/timezone/equivalent_timezone_ids.pyi index 046385bb0c92..f07cf5d17bdc 100644 --- a/stubs/icalendar/icalendar/timezone/equivalent_timezone_ids.pyi +++ b/stubs/icalendar/icalendar/timezone/equivalent_timezone_ids.pyi @@ -4,12 +4,10 @@ from typing import Final __all__ = ["main"] -def check(dt: datetime.datetime, tz: datetime.tzinfo) -> tuple[datetime.datetime, datetime.timedelta]: ... -def checks(tz: datetime.tzinfo) -> list[tuple[datetime.datetime, datetime.timedelta]]: ... - START: Final[datetime.datetime] END: Final[datetime.datetime] +DISTANCE_FROM_TIMEZONE_CHANGE: Final[datetime.timedelta] DTS: Final[list[datetime.datetime]] -def main(create_timezones: list[Callable[[str], datetime.tzinfo]], name: str, pool_size: int = ...) -> None: ... +def main(create_timezones: list[Callable[[str], datetime.tzinfo]], name: str) -> None: ... From a9a854fb99cb4d1819c4bf8475269d7e471f7b37 Mon Sep 17 00:00:00 2001 From: Joren Hammudoglu Date: Thu, 20 Mar 2025 13:26:43 +0100 Subject: [PATCH 115/388] covariant `warnings.catch_warnings` type-parameter with default (#13675) --- stdlib/warnings.pyi | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/stdlib/warnings.pyi b/stdlib/warnings.pyi index 533a36817506..49c98cb07540 100644 --- a/stdlib/warnings.pyi +++ b/stdlib/warnings.pyi @@ -3,8 +3,8 @@ import sys from _warnings import warn as warn, warn_explicit as warn_explicit from collections.abc import Sequence from types import ModuleType, TracebackType -from typing import Any, Generic, Literal, TextIO, TypeVar, overload -from typing_extensions import LiteralString, TypeAlias +from typing import Any, Generic, Literal, TextIO, overload +from typing_extensions import LiteralString, TypeAlias, TypeVar __all__ = [ "warn", @@ -21,7 +21,8 @@ if sys.version_info >= (3, 13): __all__ += ["deprecated"] _T = TypeVar("_T") -_W = TypeVar("_W", bound=list[WarningMessage] | None) +_W_co = TypeVar("_W_co", bound=list[WarningMessage] | None, default=list[WarningMessage] | None, covariant=True) + if sys.version_info >= (3, 14): _ActionKind: TypeAlias = Literal["default", "error", "ignore", "always", "module", "once"] else: @@ -66,7 +67,7 @@ class WarningMessage: source: Any | None = None, ) -> None: ... -class catch_warnings(Generic[_W]): +class catch_warnings(Generic[_W_co]): if sys.version_info >= (3, 11): @overload def __init__( @@ -92,7 +93,7 @@ class catch_warnings(Generic[_W]): ) -> None: ... @overload def __init__( - self: catch_warnings[list[WarningMessage] | None], + self, *, record: bool, module: ModuleType | None = None, @@ -109,11 +110,9 @@ class catch_warnings(Generic[_W]): self: catch_warnings[list[WarningMessage]], *, record: Literal[True], module: ModuleType | None = None ) -> None: ... @overload - def __init__( - self: catch_warnings[list[WarningMessage] | None], *, record: bool, module: ModuleType | None = None - ) -> None: ... + def __init__(self, *, record: bool, module: ModuleType | None = None) -> None: ... - def __enter__(self) -> _W: ... + def __enter__(self) -> _W_co: ... def __exit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None ) -> None: ... From c44339cb6ba611a909effda5814a179915a6c08f Mon Sep 17 00:00:00 2001 From: Sebastian Rittau Date: Thu, 20 Mar 2025 13:28:49 +0100 Subject: [PATCH 116/388] [gdb] Remove unused import (#13682) Unblocks #13680 --- stubs/gdb/gdb/__init__.pyi | 1 - 1 file changed, 1 deletion(-) diff --git a/stubs/gdb/gdb/__init__.pyi b/stubs/gdb/gdb/__init__.pyi index 7bf762889b23..12e35708e867 100644 --- a/stubs/gdb/gdb/__init__.pyi +++ b/stubs/gdb/gdb/__init__.pyi @@ -12,7 +12,6 @@ from typing_extensions import TypeAlias, deprecated import gdb.FrameDecorator import gdb.types -import gdb.xmethod # The following submodules are automatically imported from . import events as events, printing as printing, prompt as prompt, types as types From 151e08acac013038d650e02fd69448226256522b Mon Sep 17 00:00:00 2001 From: Sebastian Rittau Date: Thu, 20 Mar 2025 14:09:08 +0100 Subject: [PATCH 117/388] Move imports into version_info branch (#13684) --- stdlib/@tests/test_cases/check_importlib_resources.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/stdlib/@tests/test_cases/check_importlib_resources.py b/stdlib/@tests/test_cases/check_importlib_resources.py index cf1507389bef..2638ca1c1ba9 100644 --- a/stdlib/@tests/test_cases/check_importlib_resources.py +++ b/stdlib/@tests/test_cases/check_importlib_resources.py @@ -1,7 +1,5 @@ from __future__ import annotations -import importlib.resources -import pathlib import sys @@ -11,6 +9,8 @@ def __fspath__(self) -> str: if sys.version_info >= (3, 13): + import importlib.resources + import pathlib def f(pth: pathlib.Path | str | _CustomPathLike) -> None: importlib.resources.open_binary("pkg", pth) From 57060b9536ff2cb87de15b96cddcc7ce4b565a14 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Thu, 20 Mar 2025 14:21:17 +0100 Subject: [PATCH 118/388] Update dependency pyright to v1.1.397 (#13680) --- requirements-tests.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-tests.txt b/requirements-tests.txt index 61165b749a1f..3c6eff3a4aad 100644 --- a/requirements-tests.txt +++ b/requirements-tests.txt @@ -1,7 +1,7 @@ # Type checkers that we test our stubs against. These should always # be pinned to a specific version to make failure reproducible. mypy==1.15.0 -pyright==1.1.396 +pyright==1.1.397 # pytype can be installed on Windows, but requires building wheels, let's not do that on the CI pytype==2024.10.11; platform_system != "Windows" and python_version >= "3.10" and python_version < "3.13" From 408b817a961b01e9e8c7b1adcd097082c4b7833b Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Thu, 20 Mar 2025 16:49:10 +0000 Subject: [PATCH 119/388] [asyncio] Allow any memoryview in write() and writelines() (#13519) --- stdlib/asyncio/sslproto.pyi | 2 +- stdlib/asyncio/transports.pyi | 6 ++++-- tests/pytype_exclude_list.txt | 4 ++++ 3 files changed, 9 insertions(+), 3 deletions(-) diff --git a/stdlib/asyncio/sslproto.pyi b/stdlib/asyncio/sslproto.pyi index ded1933dd659..ab102f124c2e 100644 --- a/stdlib/asyncio/sslproto.pyi +++ b/stdlib/asyncio/sslproto.pyi @@ -76,7 +76,7 @@ class _SSLProtocolTransport(transports._FlowControlMixin, transports.Transport): def get_extra_info(self, name: str, default: Any | None = None) -> dict[str, Any]: ... @property def _protocol_paused(self) -> bool: ... - def write(self, data: bytes | bytearray | memoryview) -> None: ... + def write(self, data: bytes | bytearray | memoryview[Any]) -> None: ... # any memoryview format or shape def can_write_eof(self) -> Literal[False]: ... if sys.version_info >= (3, 11): def get_write_buffer_limits(self) -> tuple[int, int]: ... diff --git a/stdlib/asyncio/transports.pyi b/stdlib/asyncio/transports.pyi index c28ae234f2cc..bce54897f18f 100644 --- a/stdlib/asyncio/transports.pyi +++ b/stdlib/asyncio/transports.pyi @@ -24,8 +24,10 @@ class WriteTransport(BaseTransport): def set_write_buffer_limits(self, high: int | None = None, low: int | None = None) -> None: ... def get_write_buffer_size(self) -> int: ... def get_write_buffer_limits(self) -> tuple[int, int]: ... - def write(self, data: bytes | bytearray | memoryview) -> None: ... - def writelines(self, list_of_data: Iterable[bytes | bytearray | memoryview]) -> None: ... + def write(self, data: bytes | bytearray | memoryview[Any]) -> None: ... # any memoryview format or shape + def writelines( + self, list_of_data: Iterable[bytes | bytearray | memoryview[Any]] + ) -> None: ... # any memoryview format or shape def write_eof(self) -> None: ... def can_write_eof(self) -> bool: ... def abort(self) -> None: ... diff --git a/tests/pytype_exclude_list.txt b/tests/pytype_exclude_list.txt index 4eac426de8f6..bad4109bf3c7 100644 --- a/tests/pytype_exclude_list.txt +++ b/tests/pytype_exclude_list.txt @@ -5,6 +5,10 @@ stdlib/builtins.pyi stdlib/typing.pyi +# can't subscript memoryview for some reason +stdlib/asyncio/sslproto.pyi +stdlib/asyncio/transports.pyi + # errors about import statements stubs/mysqlclient/MySQLdb/__init__.pyi stubs/mysqlclient/MySQLdb/connections.pyi From 3e589211abbcacd443f02a1291aab48a3892d5f0 Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Fri, 21 Mar 2025 16:45:00 +0400 Subject: [PATCH 120/388] Add `__all__` to `retry` (#13691) --- stubs/retry/@tests/stubtest_allowlist.txt | 3 --- stubs/retry/retry/__init__.pyi | 2 ++ 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/stubs/retry/@tests/stubtest_allowlist.txt b/stubs/retry/@tests/stubtest_allowlist.txt index 061a568b19b9..0e4e3d2ea1ed 100644 --- a/stubs/retry/@tests/stubtest_allowlist.txt +++ b/stubs/retry/@tests/stubtest_allowlist.txt @@ -1,6 +1,3 @@ -# TODO: missing from stub -retry.__all__ - retry.compat retry.tests retry.tests.test_retry diff --git a/stubs/retry/retry/__init__.pyi b/stubs/retry/retry/__init__.pyi index 0cf7651b0569..6818afd2d64a 100644 --- a/stubs/retry/retry/__init__.pyi +++ b/stubs/retry/retry/__init__.pyi @@ -1 +1,3 @@ from .api import retry as retry + +__all__ = ["retry"] From 0662fea473b398775cfcd83821513c68187e6c6a Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Fri, 21 Mar 2025 16:46:04 +0400 Subject: [PATCH 121/388] Add `__all__` to `click-default-group` (#13694) --- stubs/click-default-group/@tests/stubtest_allowlist.txt | 2 -- stubs/click-default-group/click_default_group.pyi | 1 + 2 files changed, 1 insertion(+), 2 deletions(-) delete mode 100644 stubs/click-default-group/@tests/stubtest_allowlist.txt diff --git a/stubs/click-default-group/@tests/stubtest_allowlist.txt b/stubs/click-default-group/@tests/stubtest_allowlist.txt deleted file mode 100644 index 7a9584f692a5..000000000000 --- a/stubs/click-default-group/@tests/stubtest_allowlist.txt +++ /dev/null @@ -1,2 +0,0 @@ -# TODO: missing from stub -click_default_group.__all__ diff --git a/stubs/click-default-group/click_default_group.pyi b/stubs/click-default-group/click_default_group.pyi index 992f3b43fa91..5c73c4dd8db4 100644 --- a/stubs/click-default-group/click_default_group.pyi +++ b/stubs/click-default-group/click_default_group.pyi @@ -3,6 +3,7 @@ from _typeshed import Incomplete import click +__all__ = ["DefaultGroup"] __version__: str class DefaultGroup(click.Group): From edec5718e37a8aeb63e5adac1d4095617af5eec4 Mon Sep 17 00:00:00 2001 From: Jia Chen Date: Fri, 21 Mar 2025 05:46:23 -0700 Subject: [PATCH 122/388] Make `Any` a proper class instead of an alias to `object()` (#13520) --- stdlib/@tests/stubtest_allowlists/common.txt | 1 + stdlib/typing.pyi | 3 +-- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/stdlib/@tests/stubtest_allowlists/common.txt b/stdlib/@tests/stubtest_allowlists/common.txt index a74701e50e2a..7746a8082a51 100644 --- a/stdlib/@tests/stubtest_allowlists/common.txt +++ b/stdlib/@tests/stubtest_allowlists/common.txt @@ -464,6 +464,7 @@ typing._Final typing._Final.__init_subclass__ typing\.Protocol typing(_extensions)?\._TypedDict +typing(_extensions)?\.Any.* typing(_extensions)?\.Generic typing(_extensions)?\.TypedDict typing_extensions\.ParamSpec.* diff --git a/stdlib/typing.pyi b/stdlib/typing.pyi index 9b0443973fcd..1ab5dae09cb4 100644 --- a/stdlib/typing.pyi +++ b/stdlib/typing.pyi @@ -130,8 +130,7 @@ if sys.version_info >= (3, 12): if sys.version_info >= (3, 13): __all__ += ["get_protocol_members", "is_protocol", "NoDefault", "TypeIs", "ReadOnly"] -Any = object() - +class Any: ... class _Final: ... def final(f: _T) -> _T: ... From 2bae14ed336dae89b7896b920e6b66a04733b84f Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Fri, 21 Mar 2025 17:18:20 +0400 Subject: [PATCH 123/388] Add stubs for `rfc3339-validator` (#13688) --- stubs/rfc3339-validator/METADATA.toml | 2 ++ stubs/rfc3339-validator/rfc3339_validator.pyi | 10 ++++++++++ 2 files changed, 12 insertions(+) create mode 100644 stubs/rfc3339-validator/METADATA.toml create mode 100644 stubs/rfc3339-validator/rfc3339_validator.pyi diff --git a/stubs/rfc3339-validator/METADATA.toml b/stubs/rfc3339-validator/METADATA.toml new file mode 100644 index 000000000000..f564e7971ec4 --- /dev/null +++ b/stubs/rfc3339-validator/METADATA.toml @@ -0,0 +1,2 @@ +version = "0.1.*" +upstream_repository = "https://github.com/naimetti/rfc3339-validator" diff --git a/stubs/rfc3339-validator/rfc3339_validator.pyi b/stubs/rfc3339-validator/rfc3339_validator.pyi new file mode 100644 index 000000000000..71f545bf2415 --- /dev/null +++ b/stubs/rfc3339-validator/rfc3339_validator.pyi @@ -0,0 +1,10 @@ +import re +from typing import Final + +__version__: Final[str] +__author__: Final[str] +__email__: Final[str] +RFC3339_REGEX_FLAGS: Final[int] +RFC3339_REGEX: Final[re.Pattern[str]] + +def validate_rfc3339(date_string: str) -> bool: ... From 4ce53c52a66600af0d295e28c3d47a8ba9bc2617 Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Fri, 21 Mar 2025 17:31:36 +0400 Subject: [PATCH 124/388] Clarify sequences for `markdown` stubs (#13686) --- stubs/Markdown/markdown/__init__.pyi | 3 +++ stubs/Markdown/markdown/extensions/extra.pyi | 4 +--- stubs/Markdown/markdown/extensions/meta.pyi | 2 -- stubs/Markdown/markdown/extensions/smarty.pyi | 3 +-- stubs/Markdown/markdown/treeprocessors.pyi | 4 ++-- stubs/Markdown/markdown/util.pyi | 24 +++++++++---------- 6 files changed, 19 insertions(+), 21 deletions(-) diff --git a/stubs/Markdown/markdown/__init__.pyi b/stubs/Markdown/markdown/__init__.pyi index 3f57adcc43a2..f9e1394976fb 100644 --- a/stubs/Markdown/markdown/__init__.pyi +++ b/stubs/Markdown/markdown/__init__.pyi @@ -1,2 +1,5 @@ +from .__meta__ import __version__ as __version__, __version_info__ as __version_info__ from .core import Markdown as Markdown, markdown as markdown, markdownFromFile as markdownFromFile from .extensions import Extension as Extension + +__all__ = ["Markdown", "markdown", "markdownFromFile"] diff --git a/stubs/Markdown/markdown/extensions/extra.pyi b/stubs/Markdown/markdown/extensions/extra.pyi index 7262914db712..b063970d1a05 100644 --- a/stubs/Markdown/markdown/extensions/extra.pyi +++ b/stubs/Markdown/markdown/extensions/extra.pyi @@ -1,8 +1,6 @@ -from typing import Any - from markdown.extensions import Extension -extensions: Any +extensions: list[str] class ExtraExtension(Extension): def __init__(self, **kwargs) -> None: ... diff --git a/stubs/Markdown/markdown/extensions/meta.pyi b/stubs/Markdown/markdown/extensions/meta.pyi index d3ef7e8d59b6..b0bead44aece 100644 --- a/stubs/Markdown/markdown/extensions/meta.pyi +++ b/stubs/Markdown/markdown/extensions/meta.pyi @@ -1,11 +1,9 @@ from re import Pattern -from typing import Any from markdown.core import Markdown from markdown.extensions import Extension from markdown.preprocessors import Preprocessor -log: Any META_RE: Pattern[str] META_MORE_RE: Pattern[str] BEGIN_RE: Pattern[str] diff --git a/stubs/Markdown/markdown/extensions/smarty.pyi b/stubs/Markdown/markdown/extensions/smarty.pyi index dcf57107c141..9a0004630c84 100644 --- a/stubs/Markdown/markdown/extensions/smarty.pyi +++ b/stubs/Markdown/markdown/extensions/smarty.pyi @@ -1,5 +1,4 @@ from collections.abc import Mapping, Sequence -from typing import Any from xml.etree.ElementTree import Element from markdown import inlinepatterns, util @@ -32,7 +31,7 @@ class SubstituteTextPattern(HtmlInlineProcessor): def __init__(self, pattern: str, replace: Sequence[int | str | Element], md: Markdown) -> None: ... class SmartyExtension(Extension): - substitutions: Any + substitutions: dict[str, str] def __init__(self, **kwargs) -> None: ... def educateDashes(self, md: Markdown) -> None: ... def educateEllipses(self, md: Markdown) -> None: ... diff --git a/stubs/Markdown/markdown/treeprocessors.pyi b/stubs/Markdown/markdown/treeprocessors.pyi index 9afc5d80111d..2648d02d213b 100644 --- a/stubs/Markdown/markdown/treeprocessors.pyi +++ b/stubs/Markdown/markdown/treeprocessors.pyi @@ -1,5 +1,5 @@ from re import Pattern -from typing import Any, ClassVar +from typing import ClassVar from typing_extensions import TypeGuard from xml.etree.ElementTree import Element @@ -17,7 +17,7 @@ class InlineProcessor(Treeprocessor): ancestors: list[str] def __init__(self, md: Markdown) -> None: ... stashed_nodes: dict[str, Element | str] - parent_map: Any + parent_map: dict[Element[str], Element[str]] def run(self, tree: Element, ancestors: list[str] | None = None) -> Element: ... class PrettifyTreeprocessor(Treeprocessor): ... diff --git a/stubs/Markdown/markdown/util.pyi b/stubs/Markdown/markdown/util.pyi index 2a0d9fdd0485..d1160266b30c 100644 --- a/stubs/Markdown/markdown/util.pyi +++ b/stubs/Markdown/markdown/util.pyi @@ -1,22 +1,22 @@ from collections.abc import Iterator from re import Pattern -from typing import Any, Generic, TypedDict, TypeVar, overload +from typing import Final, Generic, TypedDict, TypeVar, overload from markdown.core import Markdown _T = TypeVar("_T") -BLOCK_LEVEL_ELEMENTS: list[str] -STX: str -ETX: str -INLINE_PLACEHOLDER_PREFIX: str -INLINE_PLACEHOLDER: str -INLINE_PLACEHOLDER_RE: Pattern[str] -AMP_SUBSTITUTE: str -HTML_PLACEHOLDER: str -HTML_PLACEHOLDER_RE: Pattern[str] -TAG_PLACEHOLDER: str -RTL_BIDI_RANGES: Any +BLOCK_LEVEL_ELEMENTS: Final[list[str]] +STX: Final[str] +ETX: Final[str] +INLINE_PLACEHOLDER_PREFIX: Final[str] +INLINE_PLACEHOLDER: Final[str] +INLINE_PLACEHOLDER_RE: Final[Pattern[str]] +AMP_SUBSTITUTE: Final[str] +HTML_PLACEHOLDER: Final[str] +HTML_PLACEHOLDER_RE: Final[Pattern[str]] +TAG_PLACEHOLDER: Final[str] +RTL_BIDI_RANGES: Final[tuple[tuple[str, str], tuple[str, str]]] def get_installed_extensions(): ... def deprecated(message: str, stacklevel: int = 2): ... From ec9e31d91f29ec84c0e54e9051dca6c4c32c7528 Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Fri, 21 Mar 2025 18:07:44 +0400 Subject: [PATCH 125/388] Improve `passlib.pwd` (#13690) --- stubs/passlib/passlib/pwd.pyi | 146 ++++++++++++++++++++++------- stubs/passlib/passlib/registry.pyi | 17 ++-- 2 files changed, 122 insertions(+), 41 deletions(-) diff --git a/stubs/passlib/passlib/pwd.pyi b/stubs/passlib/passlib/pwd.pyi index 49ae560fa3d5..b0b4a69c2b36 100644 --- a/stubs/passlib/passlib/pwd.pyi +++ b/stubs/passlib/passlib/pwd.pyi @@ -1,60 +1,140 @@ -from _typeshed import Incomplete +import random from abc import abstractmethod -from collections.abc import MutableMapping -from typing import Any +from collections.abc import Callable, Iterator, MutableMapping, Sequence +from typing import Any, Final, Literal, overload +from typing_extensions import Self, TypeAlias class SequenceGenerator: - length: Any + length: int | None requested_entropy: str - rng: Any + rng: random.Random @property @abstractmethod def symbol_count(self) -> int: ... - def __init__( - self, entropy: Incomplete | None = None, length: Incomplete | None = None, rng: Incomplete | None = None, **kwds - ) -> None: ... + def __init__(self, entropy: int | None = None, length: int | None = None, rng: random.Random | None = None) -> None: ... @property def entropy_per_symbol(self) -> float: ... @property def entropy(self) -> float: ... - def __next__(self) -> None: ... - def __call__(self, returns: Incomplete | None = None): ... - def __iter__(self): ... + def __next__(self) -> str: ... + @overload + def __call__(self, returns: None = None) -> str: ... + @overload + def __call__(self, returns: int) -> list[str]: ... + @overload + def __call__(self, returns: Callable[[Any], Iterator[Any]]) -> Iterator[str]: ... # "returns" must be the "iter" builtin + def __iter__(self) -> Self: ... -default_charsets: Any +_Charset: TypeAlias = Literal["ascii_72", "ascii_62", "ascii_50", "hex"] +default_charsets: Final[dict[_Charset, str]] class WordGenerator(SequenceGenerator): - charset: str - chars: Any - def __init__(self, chars: Incomplete | None = None, charset: Incomplete | None = None, **kwds) -> None: ... + charset: _Charset + chars: str | bytes | None + def __init__( + self, + chars: str | bytes | None = None, + charset: _Charset | None = None, + *, + entropy: int | None = None, + length: int | None = None, + rng: random.Random | None = None, + ) -> None: ... @property - def symbol_count(self): ... - def __next__(self): ... + def symbol_count(self) -> int: ... -def genword(entropy: Incomplete | None = None, length: Incomplete | None = None, returns: Incomplete | None = None, **kwds): ... +@overload +def genword( + entropy: int | None = None, + length: int | None = None, + returns: None = None, + *, + chars: str | None = None, + charset: _Charset | None = None, + rng: random.Random | None = None, +) -> str: ... +@overload +def genword( + returns: int, + entropy: int | None = None, + length: int | None = None, + *, + chars: str | None = None, + charset: _Charset | None = None, + rng: random.Random | None = None, +) -> list[str]: ... +@overload +def genword( + returns: Callable[[Any], Iterator[Any]], + entropy: int | None = None, + length: int | None = None, + *, + chars: str | None = None, + charset: _Charset | None = None, + rng: random.Random | None = None, +) -> Iterator[str]: ... class WordsetDict(MutableMapping[Any, Any]): - paths: Any + paths: dict[str, str] | None def __init__(self, *args, **kwds) -> None: ... - def __getitem__(self, key): ... - def set_path(self, key, path) -> None: ... - def __setitem__(self, key, value) -> None: ... - def __delitem__(self, key) -> None: ... - def __iter__(self): ... + def __getitem__(self, key: str) -> tuple[str | bytes, ...]: ... + def set_path(self, key: str, path: str) -> None: ... + def __setitem__(self, key: str, value: tuple[str | bytes, ...]) -> None: ... + def __delitem__(self, key: str) -> None: ... + def __iter__(self) -> Iterator[str]: ... def __len__(self) -> int: ... - def __contains__(self, key): ... + def __contains__(self, key: object) -> bool: ... -default_wordsets: Any +default_wordsets: WordsetDict +_Wordset: TypeAlias = Literal["eff_long", "eff_short", "eff_prefixed", "bip39"] class PhraseGenerator(SequenceGenerator): - wordset: str - words: Any - sep: str + wordset: _Wordset + words: Sequence[str | bytes] | None + sep: str | None def __init__( - self, wordset: Incomplete | None = None, words: Incomplete | None = None, sep: Incomplete | None = None, **kwds + self, + wordset: _Wordset | None = None, + words: Sequence[str | bytes] | None = None, + sep: str | bytes | None = None, + *, + entropy: int | None = None, + length: int | None = None, + rng: random.Random | None = None, ) -> None: ... @property - def symbol_count(self): ... - def __next__(self): ... + def symbol_count(self) -> int: ... -def genphrase(entropy: Incomplete | None = None, length: Incomplete | None = None, returns: Incomplete | None = None, **kwds): ... +@overload +def genphrase( + entropy: int | None = None, + length: int | None = None, + returns: None = None, + *, + wordset: _Wordset | None = None, + words: Sequence[str | bytes] | None = None, + sep: str | bytes | None = None, + rng: random.Random | None = None, +) -> str: ... +@overload +def genphrase( + returns: int, + entropy: int | None = None, + length: int | None = None, + *, + wordset: _Wordset | None = None, + words: Sequence[str | bytes] | None = None, + sep: str | bytes | None = None, + rng: random.Random | None = None, +) -> list[str]: ... +@overload +def genphrase( + returns: Callable[[Any], Iterator[Any]], + entropy: int | None = None, + length: int | None = None, + *, + wordset: _Wordset | None = None, + words: Sequence[str | bytes] | None = None, + sep: str | bytes | None = None, + rng: random.Random | None = None, +) -> Iterator[str]: ... diff --git a/stubs/passlib/passlib/registry.pyi b/stubs/passlib/passlib/registry.pyi index 47ac4970161e..50b55d1fec1d 100644 --- a/stubs/passlib/passlib/registry.pyi +++ b/stubs/passlib/passlib/registry.pyi @@ -1,14 +1,15 @@ -from _typeshed import Incomplete from typing import Any class _PasslibRegistryProxy: __name__: str - __package__: Any - def __getattr__(self, attr: str): ... + __package__: str | None + def __getattr__(self, attr: str) -> Any: ... # returns handler that is a value from object.__dict__ def __setattr__(self, attr: str, value) -> None: ... - def __dir__(self): ... + def __dir__(self) -> list[str]: ... -def register_crypt_handler_path(name, path) -> None: ... -def register_crypt_handler(handler, force: bool = False, _attr: Incomplete | None = None) -> None: ... -def get_crypt_handler(name, default=...): ... -def list_crypt_handlers(loaded_only: bool = False): ... +def register_crypt_handler_path(name: str, path: str) -> None: ... +def register_crypt_handler( + handler: Any, force: bool = False, _attr: str | None = None +) -> None: ... # expected handler is object with attr handler.name +def get_crypt_handler(name: str, default: Any = ...) -> Any: ... # returns handler or default +def list_crypt_handlers(loaded_only: bool = False) -> list[str]: ... From 5118a899e222ad7ea2b9c84e4f63fe2dc2fde59d Mon Sep 17 00:00:00 2001 From: Sebastian Rittau Date: Fri, 21 Mar 2025 16:01:35 +0100 Subject: [PATCH 126/388] Move maintainer guidelines to MAINTAINERS document MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The guidelines are unchanged, apart from linking back to the CONTRIBUTING document and introducting section headers. This serves multiple purposes: * The maintainer guidelines are easier to find for maintainers. * The CONTRIBUTING document needs fewer changes, meaning fewer "guideline changed" warnings for contributors. * The – already very long – CONTRIBUTING document is shorter, and doesn't contain information irrelevant for regular contributors. --- CONTRIBUTING.md | 43 --------------------------------------- MAINTAINERS.md | 54 +++++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 54 insertions(+), 43 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 4cf190229822..5ebfae6b49df 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -462,33 +462,6 @@ steps: If feeling kindly, please update [mypy](https://github.com/python/mypy/blob/master/mypy/stubinfo.py) for any stub obsoletions or removals. -## Maintainer guidelines - -The process for preparing and submitting changes also applies to -maintainers. This ensures high quality contributions and keeps -everybody on the same page. Avoid direct pushes to the repository. - -When reviewing pull requests, follow these guidelines: - -* Typing is hard. Try to be helpful and explain issues with the PR, - especially to new contributors. -* When reviewing auto-generated stubs, just scan for red flags and obvious - errors. Leave possible manual improvements for separate PRs. -* When reviewing large, hand-crafted PRs, you only need to look for red flags - and general issues, and do a few spot checks. -* Review smaller, hand-crafted PRs thoroughly. - -When merging pull requests, follow these guidelines: - -* Always wait for tests to pass before merging PRs. -* Use "[Squash and merge](https://github.com/blog/2141-squash-your-commits)" to merge PRs. -* Make sure the commit message is meaningful. For example, remove irrelevant - intermediate commit messages. -* The commit message for third-party stubs is used to generate the changelog. - It should be valid Markdown, be comprehensive, read like a changelog entry, - and assume that the reader has no access to the diff. -* Delete branches for merged PRs (by maintainers pushing to the main repo). - ### Marking PRs as "deferred" We sometimes use the ["status: deferred" label](https://github.com/python/typeshed/labels/status%3A%20deferred) @@ -500,25 +473,9 @@ external factor. Blockers can include: - A dependency on a typing PEP that is still under consideration. - A pending change in a related project, such as stub-uploader. -PRs should only be marked as "deferred" if there is a clear path towards getting -the blocking issue resolved within a reasonable time frame. If a PR depends on -a more amorphous change, such as a type system change that has not yet reached -the PEP stage, it should instead be closed. - -Maintainers who add the "deferred" label should state clearly what exactly the -blocker is, usually with a link to an open issue in another project. - ### Closing stale PRs To keep the number of open PRs manageable, we may close PRs when they have been open for too long. Specifically, we close open PRs that either have failures in CI, serious merge conflicts or unaddressed feedback, and that have not seen any activity in three months. - -We want to maintain a welcoming atmosphere for contributors, so use a friendly -message when closing the PR. Example message: - - Thanks for contributing! I'm closing this PR for now, because it still - - after three months of inactivity. If you are still interested, please feel free to open - a new PR (or ping us to reopen this one). diff --git a/MAINTAINERS.md b/MAINTAINERS.md index 9a30d7952522..c2323cece01c 100644 --- a/MAINTAINERS.md +++ b/MAINTAINERS.md @@ -27,3 +27,57 @@ They will also be moved to the "former maintainers" section here. Former maintainers who want their access rights restored should open an issue or mail one of the active maintainers. + +## Maintainer guidelines + +The process for preparing and submitting changes as outlined +in the [CONTRIBUTING document](./CONTRIBUTING.md) also applies to +maintainers. This ensures high quality contributions and keeps +everybody on the same page. Avoid direct pushes to the repository. + +### Reviewing and merging pull requests + +When reviewing pull requests, follow these guidelines: + +* Typing is hard. Try to be helpful and explain issues with the PR, + especially to new contributors. +* When reviewing auto-generated stubs, just scan for red flags and obvious + errors. Leave possible manual improvements for separate PRs. +* When reviewing large, hand-crafted PRs, you only need to look for red flags + and general issues, and do a few spot checks. +* Review smaller, hand-crafted PRs thoroughly. + +When merging pull requests, follow these guidelines: + +* Always wait for tests to pass before merging PRs. +* Use "[Squash and merge](https://github.com/blog/2141-squash-your-commits)" to merge PRs. +* Make sure the commit message is meaningful. For example, remove irrelevant + intermediate commit messages. +* The commit message for third-party stubs is used to generate the changelog. + It should be valid Markdown, be comprehensive, read like a changelog entry, + and assume that the reader has no access to the diff. +* Delete branches for merged PRs (by maintainers pushing to the main repo). + +### Marking PRs as "deferred" + +*See also the [guidelines in the CONTRIBUTING file](./CONTRIBUTING.md#marking-prs-as-deferred).* + +PRs should only be marked as "deferred" if there is a clear path towards getting +the blocking issue resolved within a reasonable time frame. If a PR depends on +a more amorphous change, such as a type system change that has not yet reached +the PEP stage, it should instead be closed. + +Maintainers who add the "deferred" label should state clearly what exactly the +blocker is, usually with a link to an open issue in another project. + +### Closing stale PRs + +*See also the [guidelines in the CONTRIBUTING file](./CONTRIBUTING.md#closing-stale-prs).* + +We want to maintain a welcoming atmosphere for contributors, so use a friendly +message when closing the PR. Example message: + + Thanks for contributing! I'm closing this PR for now, because it still + + after three months of inactivity. If you are still interested, please feel free to open + a new PR (or ping us to reopen this one). From ebed171073a8a45ae495be91b129b85a45e2e5d3 Mon Sep 17 00:00:00 2001 From: Sebastian Rittau Date: Fri, 21 Mar 2025 16:09:10 +0100 Subject: [PATCH 127/388] Clarify that direct pushes are not allowed Co-authored-by: Jelle Zijlstra --- MAINTAINERS.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/MAINTAINERS.md b/MAINTAINERS.md index c2323cece01c..f36c28758f52 100644 --- a/MAINTAINERS.md +++ b/MAINTAINERS.md @@ -33,7 +33,7 @@ an issue or mail one of the active maintainers. The process for preparing and submitting changes as outlined in the [CONTRIBUTING document](./CONTRIBUTING.md) also applies to maintainers. This ensures high quality contributions and keeps -everybody on the same page. Avoid direct pushes to the repository. +everybody on the same page. Do not make direct pushes to the repository. ### Reviewing and merging pull requests From e8c059a623aebb2c397dcc8eac47ed0835f5df08 Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Fri, 21 Mar 2025 20:50:05 +0400 Subject: [PATCH 128/388] Improve `passlib.apache` (#13689) --- stubs/passlib/@tests/stubtest_allowlist.txt | 4 + stubs/passlib/passlib/apache.pyi | 121 +++++++++++++------- 2 files changed, 85 insertions(+), 40 deletions(-) diff --git a/stubs/passlib/@tests/stubtest_allowlist.txt b/stubs/passlib/@tests/stubtest_allowlist.txt index c953253672cd..fc0d9fa23b64 100644 --- a/stubs/passlib/@tests/stubtest_allowlist.txt +++ b/stubs/passlib/@tests/stubtest_allowlist.txt @@ -95,3 +95,7 @@ passlib.tests.* # was dropped from the standard library of Python 3.13, but is still available # in some environments. (passlib.hosts.host_context)? + +# Fields differs at runtime: +passlib.apache._CommonFile.encoding +passlib.apache._CommonFile.return_unicode diff --git a/stubs/passlib/passlib/apache.pyi b/stubs/passlib/passlib/apache.pyi index 4d261bd1399a..126a0c3a15ad 100644 --- a/stubs/passlib/passlib/apache.pyi +++ b/stubs/passlib/passlib/apache.pyi @@ -1,59 +1,100 @@ -from _typeshed import Incomplete -from typing import Any +from typing_extensions import Self + +from .context import CryptContext +from .hash import htdigest class _CommonFile: - encoding: Any - return_unicode: Any + encoding: str + return_unicode: bool autosave: bool @classmethod - def from_string(cls, data, **kwds): ... + def from_string( + cls, + data: str | bytes, + *, + new: bool = False, + autoload: bool = True, + autosave: bool = False, + encoding: str = "utf-8", + return_unicode: bool = True, + ) -> Self: ... @classmethod - def from_path(cls, path, **kwds): ... + def from_path( + cls, + path: str, + *, + new: bool = False, + autoload: bool = True, + autosave: bool = False, + encoding: str = "utf-8", + return_unicode: bool = True, + ) -> Self: ... def __init__( self, - path: Incomplete | None = None, + path: str | None = None, new: bool = False, autoload: bool = True, autosave: bool = False, encoding: str = "utf-8", - return_unicode=True, + return_unicode: bool = True, ) -> None: ... @property - def path(self): ... + def path(self) -> str: ... @path.setter - def path(self, value) -> None: ... + def path(self, value: str) -> None: ... @property - def mtime(self): ... - def load_if_changed(self): ... - def load(self, path: Incomplete | None = None, force: bool = True): ... - def load_string(self, data) -> None: ... - def save(self, path: Incomplete | None = None) -> None: ... - def to_string(self): ... + def mtime(self) -> float: ... + def load_if_changed(self) -> bool: ... + def load(self, path: str | None = None, force: bool = True) -> bool: ... + def load_string(self, data: str | bytes) -> None: ... + def save(self, path: str | None = None) -> None: ... + def to_string(self) -> bytes: ... class HtpasswdFile(_CommonFile): - context: Any - def __init__(self, path: Incomplete | None = None, default_scheme: Incomplete | None = None, context=..., **kwds) -> None: ... - def users(self): ... - def set_password(self, user, password): ... - def update(self, user, password): ... - def get_hash(self, user): ... - def set_hash(self, user, hash): ... - def find(self, user): ... - def delete(self, user): ... - def check_password(self, user, password): ... - def verify(self, user, password): ... + context: CryptContext + def __init__( + self, + path: str | None = None, + default_scheme: str | None = None, + context: CryptContext = ..., + *, + new: bool = False, + autoload: bool = True, + autosave: bool = False, + encoding: str = "utf-8", + return_unicode: bool = True, + ) -> None: ... + def users(self) -> list[str | bytes]: ... + def set_password(self, user: str, password: str | bytes) -> bool: ... + def update(self, user: str, password: str | bytes) -> bool: ... + def get_hash(self, user: str) -> bytes | None: ... + def set_hash(self, user: str, hash: str | bytes) -> bool: ... + def find(self, user: str) -> bytes | None: ... + def delete(self, user: str) -> bool: ... + def check_password(self, user: str, password: str | bytes) -> bool | None: ... + def verify(self, user: str, password: str | bytes) -> bool | None: ... class HtdigestFile(_CommonFile): - default_realm: Any - def __init__(self, path: Incomplete | None = None, default_realm: Incomplete | None = None, **kwds) -> None: ... - def realms(self): ... - def users(self, realm: Incomplete | None = None): ... - def set_password(self, user, realm: Incomplete | None = None, password=...): ... - def update(self, user, realm, password): ... - def get_hash(self, user, realm: Incomplete | None = None): ... - def set_hash(self, user, realm: Incomplete | None = None, hash=...): ... - def find(self, user, realm): ... - def delete(self, user, realm: Incomplete | None = None): ... - def delete_realm(self, realm): ... - def check_password(self, user, realm: Incomplete | None = None, password=...): ... - def verify(self, user, realm, password): ... + default_realm: str | None + def __init__( + self, + path: str | None = None, + default_realm: str | None = None, + *, + new: bool = False, + autoload: bool = True, + autosave: bool = False, + encoding: str = "utf-8", + return_unicode: bool = True, + ) -> None: ... + def realms(self) -> list[str | bytes]: ... + def users(self, realm: str | None = None) -> list[str | bytes]: ... + def set_password(self, user: str, realm: str | None = None, password: str | bytes = ...) -> bool: ... + def update(self, user: str, realm: str | None, password: str | bytes) -> bool: ... + def get_hash(self, user: str, realm: str | None = None) -> htdigest | None: ... + def set_hash(self, user: str, realm: str | None = None, hash: str | bytes = ...) -> bool: ... + def find(self, user: str, realm: str | None) -> htdigest | None: ... + def delete(self, user: str, realm: str | None = None) -> bool: ... + def delete_realm(self, realm: str | None) -> int: ... + def check_password(self, user: str, realm: str | None = None, password: str | bytes = ...) -> bool | None: ... + def verify(self, user: str, realm: str | None, password: str | bytes) -> bool | None: ... From 13c0318b942d6d367fcca73a06ac2694c6a7bc7f Mon Sep 17 00:00:00 2001 From: Sebastian Rittau Date: Fri, 21 Mar 2025 19:34:11 +0100 Subject: [PATCH 129/388] Update croniter repository URL (#13698) Cf. #13260 pallets-eco/croniter#144 --- stubs/croniter/METADATA.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stubs/croniter/METADATA.toml b/stubs/croniter/METADATA.toml index 238b0e38b807..2dc7e1c08c70 100644 --- a/stubs/croniter/METADATA.toml +++ b/stubs/croniter/METADATA.toml @@ -1,2 +1,2 @@ version = "5.0.1" -upstream_repository = "https://github.com/kiorky/croniter" +upstream_repository = "https://github.com/pallets-eco/croniter" From 4059daf1a21a17598265c339575e6b0a6a02c73a Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Mon, 24 Mar 2025 02:22:26 +0400 Subject: [PATCH 130/388] Improve `decorator` (#13703) --- stubs/decorator/@tests/stubtest_allowlist.txt | 5 ----- stubs/decorator/decorator.pyi | 16 ++++++++-------- 2 files changed, 8 insertions(+), 13 deletions(-) diff --git a/stubs/decorator/@tests/stubtest_allowlist.txt b/stubs/decorator/@tests/stubtest_allowlist.txt index 52d13309671e..3dd621676f14 100644 --- a/stubs/decorator/@tests/stubtest_allowlist.txt +++ b/stubs/decorator/@tests/stubtest_allowlist.txt @@ -1,10 +1,5 @@ -decorator.ContextManager.__init__ decorator.FunctionMaker.args decorator.FunctionMaker.kwonlyargs decorator.FunctionMaker.kwonlydefaults decorator.FunctionMaker.varargs decorator.FunctionMaker.varkw -decorator.decorate -decorator.decorator -decorator.get_init -decorator.EMPTY diff --git a/stubs/decorator/decorator.pyi b/stubs/decorator/decorator.pyi index d7631417ddec..91f3df155c2e 100644 --- a/stubs/decorator/decorator.pyi +++ b/stubs/decorator/decorator.pyi @@ -1,10 +1,10 @@ import inspect from builtins import dict as _dict # alias to avoid conflicts with attribute name -from collections.abc import Callable, Iterator +from collections.abc import Callable, Generator, Iterator from contextlib import _GeneratorContextManager from inspect import Signature, getfullargspec as getfullargspec, iscoroutinefunction as iscoroutinefunction from re import Pattern -from typing import Any, Literal, TypeVar +from typing import Any, Final, Literal, TypeVar from typing_extensions import ParamSpec _C = TypeVar("_C", bound=Callable[..., Any]) @@ -12,10 +12,9 @@ _Func = TypeVar("_Func", bound=Callable[..., Any]) _T = TypeVar("_T") _P = ParamSpec("_P") -def get_init(cls: type) -> None: ... - -DEF: Pattern[str] -POS: Literal[inspect._ParameterKind.POSITIONAL_OR_KEYWORD] +DEF: Final[Pattern[str]] +POS: Final[Literal[inspect._ParameterKind.POSITIONAL_OR_KEYWORD]] +EMPTY: Final[type[inspect._empty]] class FunctionMaker: args: list[str] @@ -59,13 +58,14 @@ class FunctionMaker: ) -> Callable[..., Any]: ... def fix(args: tuple[Any, ...], kwargs: dict[str, Any], sig: Signature) -> tuple[tuple[Any, ...], dict[str, Any]]: ... -def decorate(func: _Func, caller: Callable[..., Any], extras: Any = ...) -> _Func: ... +def decorate(func: _Func, caller: Callable[..., Any], extras: tuple[Any, ...] = ..., kwsyntax: bool = False) -> _Func: ... def decoratorx(caller: Callable[..., Any]) -> Callable[..., Any]: ... def decorator( - caller: Callable[..., Any], _func: Callable[..., Any] | None = ... + caller: Callable[..., Any], _func: Callable[..., Any] | None = None, kwsyntax: bool = False ) -> Callable[[Callable[..., Any]], Callable[..., Any]]: ... class ContextManager(_GeneratorContextManager[_T]): + def __init__(self, g: Callable[..., Generator[_T]], *a: Any, **k: Any) -> None: ... def __call__(self, func: _C) -> _C: ... def contextmanager(func: Callable[_P, Iterator[_T]]) -> Callable[_P, ContextManager[_T]]: ... From dd984b4c3a28b69212f5b45872b5fbd8e39f31f8 Mon Sep 17 00:00:00 2001 From: Sebastian Rittau Date: Sun, 23 Mar 2025 23:41:53 +0100 Subject: [PATCH 131/388] Remove humanfriendly stubs (#13699) --- .../@tests/stubtest_allowlist.txt | 25 ------ stubs/humanfriendly/METADATA.toml | 8 -- .../humanfriendly/humanfriendly/__init__.pyi | 57 ------------ stubs/humanfriendly/humanfriendly/case.pyi | 27 ------ stubs/humanfriendly/humanfriendly/cli.pyi | 13 --- stubs/humanfriendly/humanfriendly/compat.pyi | 13 --- .../humanfriendly/decorators.pyi | 3 - .../humanfriendly/deprecation.pyi | 14 --- stubs/humanfriendly/humanfriendly/prompts.pyi | 14 --- stubs/humanfriendly/humanfriendly/sphinx.pyi | 15 ---- stubs/humanfriendly/humanfriendly/tables.pyi | 6 -- .../humanfriendly/terminal/__init__.pyi | 38 -------- .../humanfriendly/terminal/html.pyi | 32 ------- .../humanfriendly/terminal/spinners.pyi | 42 --------- stubs/humanfriendly/humanfriendly/testing.pyi | 87 ------------------- stubs/humanfriendly/humanfriendly/text.pyi | 17 ---- stubs/humanfriendly/humanfriendly/usage.pyi | 7 -- 17 files changed, 418 deletions(-) delete mode 100644 stubs/humanfriendly/@tests/stubtest_allowlist.txt delete mode 100644 stubs/humanfriendly/METADATA.toml delete mode 100644 stubs/humanfriendly/humanfriendly/__init__.pyi delete mode 100644 stubs/humanfriendly/humanfriendly/case.pyi delete mode 100644 stubs/humanfriendly/humanfriendly/cli.pyi delete mode 100644 stubs/humanfriendly/humanfriendly/compat.pyi delete mode 100644 stubs/humanfriendly/humanfriendly/decorators.pyi delete mode 100644 stubs/humanfriendly/humanfriendly/deprecation.pyi delete mode 100644 stubs/humanfriendly/humanfriendly/prompts.pyi delete mode 100644 stubs/humanfriendly/humanfriendly/sphinx.pyi delete mode 100644 stubs/humanfriendly/humanfriendly/tables.pyi delete mode 100644 stubs/humanfriendly/humanfriendly/terminal/__init__.pyi delete mode 100644 stubs/humanfriendly/humanfriendly/terminal/html.pyi delete mode 100644 stubs/humanfriendly/humanfriendly/terminal/spinners.pyi delete mode 100644 stubs/humanfriendly/humanfriendly/testing.pyi delete mode 100644 stubs/humanfriendly/humanfriendly/text.pyi delete mode 100644 stubs/humanfriendly/humanfriendly/usage.pyi diff --git a/stubs/humanfriendly/@tests/stubtest_allowlist.txt b/stubs/humanfriendly/@tests/stubtest_allowlist.txt deleted file mode 100644 index 2d4a8850cda8..000000000000 --- a/stubs/humanfriendly/@tests/stubtest_allowlist.txt +++ /dev/null @@ -1,25 +0,0 @@ -# TODO: missing from stub -humanfriendly.__all__ -humanfriendly.case.__all__ -humanfriendly.cli.__all__ -humanfriendly.compat.__all__ -humanfriendly.decorators.__all__ -humanfriendly.deprecation.__all__ -humanfriendly.prompts.__all__ -humanfriendly.sphinx.__all__ -humanfriendly.tables.__all__ -humanfriendly.terminal.__all__ -humanfriendly.terminal.html.__all__ -humanfriendly.terminal.spinners.__all__ -humanfriendly.testing.__all__ -humanfriendly.text.__all__ -humanfriendly.usage.__all__ - -# Re-exports: -humanfriendly.usage.import_module -humanfriendly.compat.which -humanfriendly.compat.name2codepoint -humanfriendly.compat.monotonic - -# Tests are not included into stubs: -humanfriendly.tests diff --git a/stubs/humanfriendly/METADATA.toml b/stubs/humanfriendly/METADATA.toml deleted file mode 100644 index 0a4f7300b177..000000000000 --- a/stubs/humanfriendly/METADATA.toml +++ /dev/null @@ -1,8 +0,0 @@ -version = "10.0.*" -upstream_repository = "https://github.com/xolox/python-humanfriendly" -no_longer_updated = true - -[tool.stubtest] -stubtest_requirements = ["docutils", "mock"] -# Package is unsupported and doesn't support Python 3.13 as of 2025-03-17. -skip = true diff --git a/stubs/humanfriendly/humanfriendly/__init__.pyi b/stubs/humanfriendly/humanfriendly/__init__.pyi deleted file mode 100644 index 36b34922b83a..000000000000 --- a/stubs/humanfriendly/humanfriendly/__init__.pyi +++ /dev/null @@ -1,57 +0,0 @@ -import datetime -from _typeshed import Incomplete -from re import Pattern -from types import TracebackType -from typing import Any, NamedTuple - -class SizeUnit(NamedTuple): - divider: int - symbol: str - name: str - -class CombinedUnit(NamedTuple): - decimal: SizeUnit - binary: SizeUnit - -disk_size_units: Any -length_size_units: Any -time_units: Any - -def coerce_boolean(value: object) -> bool: ... -def coerce_pattern(value: str | Pattern[str], flags: int = 0) -> Pattern[str]: ... -def coerce_seconds(value: float | datetime.timedelta) -> float: ... -def format_size(num_bytes: float, keep_width: bool = False, binary: bool = False) -> str: ... -def parse_size(size: str, binary: bool = False) -> int: ... -def format_length(num_metres: float, keep_width: bool = False) -> str: ... -def parse_length(length: str) -> float: ... -def format_number(number: float, num_decimals: int = 2) -> str: ... -def round_number(count: float, keep_width: bool = False) -> str: ... -def format_timespan(num_seconds: float | datetime.timedelta, detailed: bool = False, max_units: int = 3) -> str: ... -def parse_timespan(timespan: str) -> float: ... -def parse_date(datestring: str) -> tuple[int, int, int, int, int, int]: ... -def format_path(pathname: str) -> str: ... -def parse_path(pathname: str) -> str: ... - -class Timer: - monotonic: bool - resumable: bool - start_time: float - total_time: float - def __init__(self, start_time: Incomplete | None = None, resumable: bool = False) -> None: ... - def __enter__(self): ... - def __exit__( - self, - exc_type: type[BaseException] | None = None, - exc_value: BaseException | None = None, - traceback: TracebackType | None = None, - ) -> None: ... - def sleep(self, seconds: float) -> None: ... - @property - def elapsed_time(self): ... - @property - def rounded(self): ... - -class InvalidDate(Exception): ... -class InvalidSize(Exception): ... -class InvalidLength(Exception): ... -class InvalidTimespan(Exception): ... diff --git a/stubs/humanfriendly/humanfriendly/case.pyi b/stubs/humanfriendly/humanfriendly/case.pyi deleted file mode 100644 index d54838f7560e..000000000000 --- a/stubs/humanfriendly/humanfriendly/case.pyi +++ /dev/null @@ -1,27 +0,0 @@ -from _typeshed import Incomplete -from collections import OrderedDict -from typing import TypeVar - -from humanfriendly.compat import unicode - -_KT = TypeVar("_KT") -_VT = TypeVar("_VT") - -class CaseInsensitiveDict(OrderedDict[_KT, _VT]): - def __init__(self, other: Incomplete | None = None, **kw) -> None: ... - def coerce_key(self, key): ... - @classmethod - def fromkeys(cls, iterable, value: Incomplete | None = None): ... - def get(self, key, default: Incomplete | None = None): ... - def pop(self, key, default: Incomplete | None = None): ... - def setdefault(self, key, default: Incomplete | None = None): ... - def update(self, other: Incomplete | None = None, **kw) -> None: ... # type: ignore[override] - def __contains__(self, key): ... - def __delitem__(self, key) -> None: ... - def __getitem__(self, key): ... - def __setitem__(self, key, value) -> None: ... - -class CaseInsensitiveKey(unicode): - def __new__(cls, value): ... - def __hash__(self) -> int: ... - def __eq__(self, other): ... diff --git a/stubs/humanfriendly/humanfriendly/cli.pyi b/stubs/humanfriendly/humanfriendly/cli.pyi deleted file mode 100644 index 9a3652dd99a1..000000000000 --- a/stubs/humanfriendly/humanfriendly/cli.pyi +++ /dev/null @@ -1,13 +0,0 @@ -from _typeshed import Incomplete - -def main() -> None: ... -def run_command(command_line) -> None: ... -def print_formatted_length(value) -> None: ... -def print_formatted_number(value) -> None: ... -def print_formatted_size(value, binary) -> None: ... -def print_formatted_table(delimiter) -> None: ... -def print_formatted_timespan(value) -> None: ... -def print_parsed_length(value) -> None: ... -def print_parsed_size(value) -> None: ... -def demonstrate_ansi_formatting() -> None: ... -def demonstrate_256_colors(i, j, group: Incomplete | None = None) -> None: ... diff --git a/stubs/humanfriendly/humanfriendly/compat.pyi b/stubs/humanfriendly/humanfriendly/compat.pyi deleted file mode 100644 index c0f19717f90d..000000000000 --- a/stubs/humanfriendly/humanfriendly/compat.pyi +++ /dev/null @@ -1,13 +0,0 @@ -from html.parser import HTMLParser as HTMLParser -from io import StringIO as StringIO - -unicode = str -unichr = chr -basestring = str -interactive_prompt = input - -def coerce_string(value): ... -def is_string(value): ... -def is_unicode(value): ... -def on_macos(): ... -def on_windows(): ... diff --git a/stubs/humanfriendly/humanfriendly/decorators.pyi b/stubs/humanfriendly/humanfriendly/decorators.pyi deleted file mode 100644 index e961fd398726..000000000000 --- a/stubs/humanfriendly/humanfriendly/decorators.pyi +++ /dev/null @@ -1,3 +0,0 @@ -RESULTS_ATTRIBUTE: str - -def cached(function): ... diff --git a/stubs/humanfriendly/humanfriendly/deprecation.pyi b/stubs/humanfriendly/humanfriendly/deprecation.pyi deleted file mode 100644 index db18163bdde8..000000000000 --- a/stubs/humanfriendly/humanfriendly/deprecation.pyi +++ /dev/null @@ -1,14 +0,0 @@ -import types -from typing import Any - -def define_aliases(module_name, **aliases) -> None: ... -def get_aliases(module_name): ... -def deprecated_args(*names): ... -def is_method(function): ... - -class DeprecationProxy(types.ModuleType): - module: Any - aliases: Any - def __init__(self, module, aliases) -> None: ... - def __getattr__(self, name: str): ... - def resolve(self, target): ... diff --git a/stubs/humanfriendly/humanfriendly/prompts.pyi b/stubs/humanfriendly/humanfriendly/prompts.pyi deleted file mode 100644 index 738ac4d33e2e..000000000000 --- a/stubs/humanfriendly/humanfriendly/prompts.pyi +++ /dev/null @@ -1,14 +0,0 @@ -from _typeshed import Incomplete -from typing import Any - -MAX_ATTEMPTS: int -logger: Any - -def prompt_for_confirmation(question, default: Incomplete | None = None, padding: bool = True): ... -def prompt_for_choice(choices, default: Incomplete | None = None, padding: bool = True): ... -def prompt_for_input(question, default: Incomplete | None = None, padding: bool = True, strip: bool = True): ... -def prepare_prompt_text(prompt_text, **options): ... -def prepare_friendly_prompts() -> None: ... -def retry_limit(limit=10) -> None: ... - -class TooManyInvalidReplies(Exception): ... diff --git a/stubs/humanfriendly/humanfriendly/sphinx.pyi b/stubs/humanfriendly/humanfriendly/sphinx.pyi deleted file mode 100644 index a84ebe5c5513..000000000000 --- a/stubs/humanfriendly/humanfriendly/sphinx.pyi +++ /dev/null @@ -1,15 +0,0 @@ -from typing import Any - -logger: Any - -def deprecation_note_callback(app, what, name, obj, options, lines) -> None: ... -def enable_deprecation_notes(app) -> None: ... -def enable_man_role(app) -> None: ... -def enable_pypi_role(app) -> None: ... -def enable_special_methods(app) -> None: ... -def enable_usage_formatting(app) -> None: ... -def man_role(role, rawtext, text, lineno, inliner, options={}, content=[]): ... -def pypi_role(role, rawtext, text, lineno, inliner, options={}, content=[]): ... -def setup(app): ... -def special_methods_callback(app, what, name, obj, skip, options): ... -def usage_message_callback(app, what, name, obj, options, lines) -> None: ... diff --git a/stubs/humanfriendly/humanfriendly/tables.pyi b/stubs/humanfriendly/humanfriendly/tables.pyi deleted file mode 100644 index b53900ef384d..000000000000 --- a/stubs/humanfriendly/humanfriendly/tables.pyi +++ /dev/null @@ -1,6 +0,0 @@ -from _typeshed import Incomplete - -def format_smart_table(data, column_names): ... -def format_pretty_table(data, column_names: Incomplete | None = None, horizontal_bar: str = "-", vertical_bar: str = "|"): ... -def format_robust_table(data, column_names): ... -def format_rst_table(data, column_names: Incomplete | None = None): ... diff --git a/stubs/humanfriendly/humanfriendly/terminal/__init__.pyi b/stubs/humanfriendly/humanfriendly/terminal/__init__.pyi deleted file mode 100644 index 67e76a28b12c..000000000000 --- a/stubs/humanfriendly/humanfriendly/terminal/__init__.pyi +++ /dev/null @@ -1,38 +0,0 @@ -from _typeshed import Incomplete -from typing import Any - -ANSI_CSI: str -ANSI_SGR: str -ANSI_ERASE_LINE: Any -ANSI_RESET: Any -ANSI_HIDE_CURSOR: Any -ANSI_SHOW_CURSOR: Any -ANSI_COLOR_CODES: Any -ANSI_TEXT_STYLES: Any -CLEAN_OUTPUT_PATTERN: Any -DEFAULT_LINES: int -DEFAULT_COLUMNS: int -DEFAULT_ENCODING: str -HIGHLIGHT_COLOR: Any - -def ansi_strip(text, readline_hints: bool = True): ... -def ansi_style(**kw): ... -def ansi_width(text): ... -def ansi_wrap(text, **kw): ... -def auto_encode(stream, text, *args, **kw) -> None: ... -def clean_terminal_output(text): ... -def connected_to_terminal(stream: Incomplete | None = None): ... -def enable_ansi_support(): ... -def find_terminal_size(): ... -def find_terminal_size_using_ioctl(stream): ... -def find_terminal_size_using_stty(): ... -def get_pager_command(text: Incomplete | None = None): ... -def have_windows_native_ansi_support(): ... -def message(text, *args, **kw) -> None: ... -def output(text, *args, **kw) -> None: ... -def readline_strip(expr): ... -def readline_wrap(expr): ... -def show_pager(formatted_text, encoding="UTF-8") -> None: ... -def terminal_supports_colors(stream: Incomplete | None = None): ... -def usage(usage_text) -> None: ... -def warning(text, *args, **kw) -> None: ... diff --git a/stubs/humanfriendly/humanfriendly/terminal/html.pyi b/stubs/humanfriendly/humanfriendly/terminal/html.pyi deleted file mode 100644 index 79286ed00aa7..000000000000 --- a/stubs/humanfriendly/humanfriendly/terminal/html.pyi +++ /dev/null @@ -1,32 +0,0 @@ -from _typeshed import Incomplete -from typing import Any - -from humanfriendly.compat import HTMLParser - -def html_to_ansi(data, callback: Incomplete | None = None): ... - -class HTMLConverter(HTMLParser): - BLOCK_TAGS: Any - callback: Any - output: Any - def __init__(self, *args, **kw) -> None: ... - def __call__(self, data): ... - @property - def current_style(self): ... - stack: Any - def close(self) -> None: ... - def emit_style(self, style: Incomplete | None = None) -> None: ... - def handle_charref(self, value) -> None: ... - link_text: Any - def handle_data(self, data) -> None: ... - def handle_endtag(self, tag) -> None: ... - def handle_entityref(self, name) -> None: ... - link_url: Any - def handle_starttag(self, tag, attrs) -> None: ... - def normalize_url(self, url): ... - def parse_color(self, value): ... - def push_styles(self, **changes) -> None: ... - def render_url(self, url): ... - preformatted_text_level: int - def reset(self) -> None: ... - def urls_match(self, a, b): ... diff --git a/stubs/humanfriendly/humanfriendly/terminal/spinners.pyi b/stubs/humanfriendly/humanfriendly/terminal/spinners.pyi deleted file mode 100644 index 5c342416f4a2..000000000000 --- a/stubs/humanfriendly/humanfriendly/terminal/spinners.pyi +++ /dev/null @@ -1,42 +0,0 @@ -from _typeshed import Incomplete -from types import TracebackType -from typing import Any - -GLYPHS: Any -MINIMUM_INTERVAL: float - -class Spinner: - interactive: Any - interval: Any - label: Any - states: Any - stream: Any - timer: Any - total: Any - counter: int - last_update: int - def __init__(self, **options) -> None: ... - def step(self, progress: int = 0, label: Incomplete | None = None) -> None: ... - def sleep(self) -> None: ... - def clear(self) -> None: ... - def __enter__(self): ... - def __exit__( - self, - exc_type: type[BaseException] | None = None, - exc_value: BaseException | None = None, - traceback: TracebackType | None = None, - ) -> None: ... - -class AutomaticSpinner: - label: Any - show_time: Any - shutdown_event: Any - subprocess: Any - def __init__(self, label, show_time: bool = True) -> None: ... - def __enter__(self) -> None: ... - def __exit__( - self, - exc_type: type[BaseException] | None = None, - exc_value: BaseException | None = None, - traceback: TracebackType | None = None, - ) -> None: ... diff --git a/stubs/humanfriendly/humanfriendly/testing.pyi b/stubs/humanfriendly/humanfriendly/testing.pyi deleted file mode 100644 index 8254e6c90933..000000000000 --- a/stubs/humanfriendly/humanfriendly/testing.pyi +++ /dev/null @@ -1,87 +0,0 @@ -import unittest -from _typeshed import Incomplete -from types import TracebackType -from typing import Any - -from humanfriendly.compat import StringIO - -def configure_logging(log_level=10) -> None: ... -def make_dirs(pathname) -> None: ... -def retry(func, timeout: int = 60, exc_type=...): ... -def run_cli(entry_point, *arguments, **options): ... -def skip_on_raise(*exc_types): ... -def touch(filename) -> None: ... - -class CallableTimedOut(Exception): ... - -class ContextManager: - def __enter__(self): ... - def __exit__( - self, - exc_type: type[BaseException] | None = None, - exc_value: BaseException | None = None, - traceback: TracebackType | None = None, - ) -> None: ... - -class PatchedAttribute(ContextManager): - object_to_patch: Any - attribute_to_patch: Any - patched_value: Any - original_value: Any - def __init__(self, obj, name, value) -> None: ... - def __enter__(self): ... - -class PatchedItem(ContextManager): - object_to_patch: Any - item_to_patch: Any - patched_value: Any - original_value: Any - def __init__(self, obj, item, value) -> None: ... - def __enter__(self): ... - -class TemporaryDirectory(ContextManager): - mkdtemp_options: Any - temporary_directory: Any - def __init__(self, **options) -> None: ... - def __enter__(self): ... - -class MockedHomeDirectory(PatchedItem, TemporaryDirectory): - def __init__(self) -> None: ... - patched_value: Any - def __enter__(self): ... - -class CustomSearchPath(PatchedItem, TemporaryDirectory): - isolated_search_path: Any - def __init__(self, isolated: bool = False) -> None: ... - patched_value: Any - def __enter__(self): ... - @property - def current_search_path(self): ... - -class MockedProgram(CustomSearchPath): - program_name: Any - program_returncode: Any - program_script: Any - program_signal_file: Any - def __init__(self, name, returncode: int = 0, script: Incomplete | None = None) -> None: ... - def __enter__(self): ... - def __exit__(self, *args: object, **kw: object): ... - -class CaptureOutput(ContextManager): - stdin: Any - stdout: Any - stderr: Any - patched_attributes: Any - def __init__(self, merged: bool = False, input: str = "", enabled: bool = True) -> None: ... - def __enter__(self): ... - def get_lines(self): ... - def get_text(self): ... - def getvalue(self): ... - -class CaptureBuffer(StringIO): - def get_lines(self): ... - def get_text(self): ... - -class TestCase(unittest.TestCase): - def __init__(self, *args, **kw) -> None: ... - def setUp(self, log_level=10) -> None: ... diff --git a/stubs/humanfriendly/humanfriendly/text.pyi b/stubs/humanfriendly/humanfriendly/text.pyi deleted file mode 100644 index d368e5101445..000000000000 --- a/stubs/humanfriendly/humanfriendly/text.pyi +++ /dev/null @@ -1,17 +0,0 @@ -from _typeshed import Incomplete - -def compact(text, *args, **kw): ... -def compact_empty_lines(text): ... -def concatenate(items, conjunction: str = "and", serial_comma: bool = False): ... -def dedent(text, *args, **kw): ... -def format(text, *args, **kw): ... -def generate_slug(text, delimiter: str = "-"): ... -def is_empty_line(text): ... -def join_lines(text): ... -def pluralize(count, singular, plural: Incomplete | None = None): ... -def pluralize_raw(count, singular, plural: Incomplete | None = None): ... -def random_string(length=(25, 100), characters=...): ... -def split(text, delimiter: str = ","): ... -def split_paragraphs(text): ... -def tokenize(text): ... -def trim_empty_lines(text): ... diff --git a/stubs/humanfriendly/humanfriendly/usage.pyi b/stubs/humanfriendly/humanfriendly/usage.pyi deleted file mode 100644 index 9ebfb108f70a..000000000000 --- a/stubs/humanfriendly/humanfriendly/usage.pyi +++ /dev/null @@ -1,7 +0,0 @@ -USAGE_MARKER: str - -def format_usage(usage_text): ... -def find_meta_variables(usage_text): ... -def parse_usage(text): ... -def render_usage(text): ... -def inject_usage(module_name) -> None: ... From eb0104cd6cdf3ae10b9b0805da27788e8722f2da Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Mon, 24 Mar 2025 23:31:48 +0400 Subject: [PATCH 132/388] Bump hdbcli to 2.24.* (#13705) --- stubs/hdbcli/@tests/stubtest_allowlist.txt | 3 --- stubs/hdbcli/METADATA.toml | 2 +- stubs/hdbcli/hdbcli/__init__.pyi | 2 ++ stubs/hdbcli/hdbcli/dbapi.pyi | 26 +++++++++++----------- 4 files changed, 16 insertions(+), 17 deletions(-) diff --git a/stubs/hdbcli/@tests/stubtest_allowlist.txt b/stubs/hdbcli/@tests/stubtest_allowlist.txt index 147dd8fa4d53..e13c2f2b127b 100644 --- a/stubs/hdbcli/@tests/stubtest_allowlist.txt +++ b/stubs/hdbcli/@tests/stubtest_allowlist.txt @@ -1,6 +1,3 @@ -# TODO: missing from stub -hdbcli.__all__ - # Are set to `None` by default, initialized later: hdbcli.dbapi.Error.errorcode hdbcli.dbapi.Error.errortext diff --git a/stubs/hdbcli/METADATA.toml b/stubs/hdbcli/METADATA.toml index 5e6a9f36de50..5898c3f9f5b1 100644 --- a/stubs/hdbcli/METADATA.toml +++ b/stubs/hdbcli/METADATA.toml @@ -1,2 +1,2 @@ -version = "2.23.*" +version = "2.24.*" # upstream_repository = closed-source diff --git a/stubs/hdbcli/hdbcli/__init__.pyi b/stubs/hdbcli/hdbcli/__init__.pyi index af0d55ba50b9..539250c104a5 100644 --- a/stubs/hdbcli/hdbcli/__init__.pyi +++ b/stubs/hdbcli/hdbcli/__init__.pyi @@ -1,3 +1,5 @@ from . import dbapi as dbapi __version__: str + +__all__ = ["dbapi"] diff --git a/stubs/hdbcli/hdbcli/dbapi.pyi b/stubs/hdbcli/hdbcli/dbapi.pyi index 3d28a2121a88..2b1282a1f029 100644 --- a/stubs/hdbcli/hdbcli/dbapi.pyi +++ b/stubs/hdbcli/hdbcli/dbapi.pyi @@ -1,26 +1,26 @@ import decimal from _typeshed import Incomplete, ReadableBuffer -from collections.abc import Sequence +from collections.abc import Callable, Sequence from datetime import date, datetime, time from types import TracebackType -from typing import Any, Literal, overload +from typing import Any, Final, Literal, overload from typing_extensions import Self, TypeAlias from .resultrow import ResultRow -apilevel: str -threadsafety: int -paramstyle: tuple[str, ...] # hdbcli defines it as a tuple which does not follow PEP 249 +apilevel: Final[str] +threadsafety: Final[int] +paramstyle: Final[tuple[str, ...]] # hdbcli defines it as a tuple which does not follow PEP 249 class Connection: def __init__( self, - address: str, - port: int, - user: str, - password: str, - autocommit: bool = ..., - packetsize: int | None = ..., + address: str = "", + port: int = 0, + user: str = "", + password: str = "", + autocommit: bool = True, + packetsize: int | None = None, userkey: str | None = ..., *, sessionvariables: dict[str, str] | None = ..., @@ -38,7 +38,7 @@ class Connection: def rollback(self) -> None: ... def setautocommit(self, auto: bool = ...) -> None: ... def setclientinfo(self, key: str, value: str | None = ...) -> None: ... - def ontrace(self) -> None: ... + def ontrace(self, callback: Callable[[str], Any], options: str = ...) -> None: ... connect = Connection @@ -66,7 +66,7 @@ class Cursor: def close(self) -> None: ... def description_ext(self) -> Sequence[tuple[Any, ...]]: ... def execute(self, operation: str, parameters: tuple[Any, ...] | None = ...) -> bool: ... - def executemany(self, operation: str, parameters: _Parameters = ...) -> Any: ... + def executemany(self, operation: str, parameters: _Parameters = ..., batcherrors: bool = False) -> Any: ... def executemanyprepared(self, parameters: _Parameters = ...) -> Any: ... def executeprepared(self, parameters: _Parameters = ...) -> Any: ... def fetchone(self, uselob: bool = ...) -> ResultRow | None: ... From 425bca5a3f3c674386bfad9a2281889603f5a478 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Quioc?= Date: Tue, 25 Mar 2025 13:02:33 +0100 Subject: [PATCH 133/388] [aiofiles] Fix the file argument type in AsyncBase (#13717) --- stubs/aiofiles/aiofiles/base.pyi | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stubs/aiofiles/aiofiles/base.pyi b/stubs/aiofiles/aiofiles/base.pyi index ea79abf48b4b..1de0bbbfdcb1 100644 --- a/stubs/aiofiles/aiofiles/base.pyi +++ b/stubs/aiofiles/aiofiles/base.pyi @@ -8,7 +8,7 @@ _T = TypeVar("_T") _V_co = TypeVar("_V_co", covariant=True) class AsyncBase(Generic[_T]): - def __init__(self, file: str, loop: Any, executor: Any) -> None: ... + def __init__(self, file: TextIO | BinaryIO | None, loop: Any, executor: Any) -> None: ... def __aiter__(self) -> Self: ... async def __anext__(self) -> _T: ... From 5c9c2ede927972ea3abc62642814d8b87440425c Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Tue, 25 Mar 2025 16:13:00 +0400 Subject: [PATCH 134/388] Remove humanfriendly from pyrightconfig.stricter.json (#13713) --- pyrightconfig.stricter.json | 1 - 1 file changed, 1 deletion(-) diff --git a/pyrightconfig.stricter.json b/pyrightconfig.stricter.json index 9f20ff2c9c93..a0e8bddda967 100644 --- a/pyrightconfig.stricter.json +++ b/pyrightconfig.stricter.json @@ -48,7 +48,6 @@ "stubs/hdbcli/hdbcli/dbapi.pyi", "stubs/html5lib", "stubs/httplib2", - "stubs/humanfriendly", "stubs/hvac", "stubs/icalendar", "stubs/influxdb-client", From 3e77422f55242f27be56bfff23e1f6b8e24d7ef2 Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Tue, 25 Mar 2025 16:14:03 +0400 Subject: [PATCH 135/388] [passlib] Add ___all__, improve passlib.hosts (#13712) --- stubs/passlib/@tests/stubtest_allowlist.txt | 8 +------- stubs/passlib/passlib/apache.pyi | 2 ++ stubs/passlib/passlib/context.pyi | 2 ++ stubs/passlib/passlib/hosts.pyi | 16 +++++++++------- stubs/passlib/passlib/ifc.pyi | 2 ++ stubs/passlib/passlib/pwd.pyi | 2 ++ stubs/passlib/passlib/registry.pyi | 2 ++ stubs/passlib/passlib/totp.pyi | 11 +++++++++++ 8 files changed, 31 insertions(+), 14 deletions(-) diff --git a/stubs/passlib/@tests/stubtest_allowlist.txt b/stubs/passlib/@tests/stubtest_allowlist.txt index fc0d9fa23b64..f72936065580 100644 --- a/stubs/passlib/@tests/stubtest_allowlist.txt +++ b/stubs/passlib/@tests/stubtest_allowlist.txt @@ -1,6 +1,5 @@ # TODO: missing from stub -passlib.apache.__all__ -passlib.context.__all__ +passlib.hosts.__all__ passlib.crypto._blowfish.__all__ passlib.crypto._blowfish.base.__all__ passlib.crypto._blowfish.unrolled.__all__ @@ -28,11 +27,6 @@ passlib.handlers.sha1_crypt.__all__ passlib.handlers.sha2_crypt.__all__ passlib.handlers.sun_md5_crypt.__all__ passlib.handlers.windows.__all__ -passlib.hosts.__all__ -passlib.ifc.__all__ -passlib.pwd.__all__ -passlib.registry.__all__ -passlib.totp.__all__ passlib.utils.binary.__all__ passlib.utils.decor.__all__ passlib.utils.handlers.__all__ diff --git a/stubs/passlib/passlib/apache.pyi b/stubs/passlib/passlib/apache.pyi index 126a0c3a15ad..f474dee62763 100644 --- a/stubs/passlib/passlib/apache.pyi +++ b/stubs/passlib/passlib/apache.pyi @@ -98,3 +98,5 @@ class HtdigestFile(_CommonFile): def delete_realm(self, realm: str | None) -> int: ... def check_password(self, user: str, realm: str | None = None, password: str | bytes = ...) -> bool | None: ... def verify(self, user: str, realm: str | None, password: str | bytes) -> bool | None: ... + +__all__ = ["HtpasswdFile", "HtdigestFile"] diff --git a/stubs/passlib/passlib/context.pyi b/stubs/passlib/passlib/context.pyi index cf853f4f0d00..0d6521b91bd1 100644 --- a/stubs/passlib/passlib/context.pyi +++ b/stubs/passlib/passlib/context.pyi @@ -85,3 +85,5 @@ class CryptContext: class LazyCryptContext(CryptContext): def __init__(self, schemes: Incomplete | None = None, **kwds) -> None: ... def __getattribute__(self, attr: str) -> Any: ... + +__all__ = ["CryptContext", "LazyCryptContext", "CryptPolicy"] diff --git a/stubs/passlib/passlib/hosts.pyi b/stubs/passlib/passlib/hosts.pyi index 5b365e00ecab..c07bbc048ce3 100644 --- a/stubs/passlib/passlib/hosts.pyi +++ b/stubs/passlib/passlib/hosts.pyi @@ -1,13 +1,15 @@ import sys -from typing import Any from passlib.context import CryptContext -linux_context: Any -linux2_context: Any -freebsd_context: Any -openbsd_context: Any -netbsd_context: Any +linux_context: CryptContext +linux2_context: CryptContext +freebsd_context: CryptContext +openbsd_context: CryptContext +netbsd_context: CryptContext # Only exists if crypt is present -if sys.version_info < (3, 13): +if sys.version_info < (3, 13) and sys.platform != "win32": host_context: CryptContext + __all__ = ["linux_context", "linux2_context", "openbsd_context", "netbsd_context", "freebsd_context", "host_context"] +else: + __all__ = ["linux_context", "linux2_context", "openbsd_context", "netbsd_context", "freebsd_context"] diff --git a/stubs/passlib/passlib/ifc.pyi b/stubs/passlib/passlib/ifc.pyi index b91cb6993afa..03e22623f894 100644 --- a/stubs/passlib/passlib/ifc.pyi +++ b/stubs/passlib/passlib/ifc.pyi @@ -35,3 +35,5 @@ class DisabledHash(PasswordHash, metaclass=ABCMeta): def disable(cls, hash: str | None = None) -> str: ... @classmethod def enable(cls, hash: str) -> str: ... + +__all__ = ["PasswordHash"] diff --git a/stubs/passlib/passlib/pwd.pyi b/stubs/passlib/passlib/pwd.pyi index b0b4a69c2b36..a1189c161b7d 100644 --- a/stubs/passlib/passlib/pwd.pyi +++ b/stubs/passlib/passlib/pwd.pyi @@ -138,3 +138,5 @@ def genphrase( sep: str | bytes | None = None, rng: random.Random | None = None, ) -> Iterator[str]: ... + +__all__ = ["genword", "default_charsets", "genphrase", "default_wordsets"] diff --git a/stubs/passlib/passlib/registry.pyi b/stubs/passlib/passlib/registry.pyi index 50b55d1fec1d..ff1e5d7a213c 100644 --- a/stubs/passlib/passlib/registry.pyi +++ b/stubs/passlib/passlib/registry.pyi @@ -13,3 +13,5 @@ def register_crypt_handler( ) -> None: ... # expected handler is object with attr handler.name def get_crypt_handler(name: str, default: Any = ...) -> Any: ... # returns handler or default def list_crypt_handlers(loaded_only: bool = False) -> list[str]: ... + +__all__ = ["register_crypt_handler_path", "register_crypt_handler", "get_crypt_handler", "list_crypt_handlers"] diff --git a/stubs/passlib/passlib/totp.pyi b/stubs/passlib/passlib/totp.pyi index 22bdc4ed894f..74096fa25862 100644 --- a/stubs/passlib/passlib/totp.pyi +++ b/stubs/passlib/passlib/totp.pyi @@ -146,3 +146,14 @@ class TotpMatch(SequenceMixin): def cache_seconds(self) -> int: ... @property def cache_time(self) -> int: ... + +__all__ = [ + "AppWallet", + "TOTP", + "TokenError", + "MalformedTokenError", + "InvalidTokenError", + "UsedTokenError", + "TotpToken", + "TotpMatch", +] From 0782dec94cf09a2391ec6d1365c1f627b8bdd1f6 Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Tue, 25 Mar 2025 17:08:14 +0400 Subject: [PATCH 136/388] Support `uv` for installing third party dependencies (#13706) --- scripts/install_all_third_party_dependencies.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/scripts/install_all_third_party_dependencies.py b/scripts/install_all_third_party_dependencies.py index 25e934eb2e95..a11238da8199 100644 --- a/scripts/install_all_third_party_dependencies.py +++ b/scripts/install_all_third_party_dependencies.py @@ -1,6 +1,13 @@ import subprocess +import sys from ts_utils.requirements import get_external_stub_requirements +use_uv = "--uv" in sys.argv +if use_uv: + pip_command = ["uv", "pip", "install"] +else: + pip_command = ["pip", "install"] + requirements = get_external_stub_requirements() -subprocess.check_call(("pip", "install", *[str(requirement) for requirement in requirements])) +subprocess.check_call(pip_command + [str(requirement) for requirement in requirements]) From 5d383a4ca311cffe68491f03cb20550b85fd082c Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Tue, 25 Mar 2025 17:09:32 +0400 Subject: [PATCH 137/388] Add `__all__` (#13704) --- stubs/PyYAML/@tests/stubtest_allowlist.txt | 14 --- stubs/PyYAML/yaml/composer.pyi | 2 + stubs/PyYAML/yaml/constructor.pyi | 2 + stubs/PyYAML/yaml/dumper.pyi | 2 + stubs/PyYAML/yaml/emitter.pyi | 2 + stubs/PyYAML/yaml/error.pyi | 2 + stubs/PyYAML/yaml/loader.pyi | 2 + stubs/PyYAML/yaml/parser.pyi | 2 + stubs/PyYAML/yaml/reader.pyi | 2 + stubs/PyYAML/yaml/representer.pyi | 2 + stubs/PyYAML/yaml/resolver.pyi | 2 + stubs/PyYAML/yaml/scanner.pyi | 2 + stubs/PyYAML/yaml/serializer.pyi | 2 + stubs/aiofiles/@tests/stubtest_allowlist.txt | 5 -- stubs/aiofiles/aiofiles/__init__.pyi | 2 + stubs/aiofiles/aiofiles/tempfile/__init__.pyi | 2 + .../aiofiles/aiofiles/threadpool/__init__.pyi | 2 + stubs/boltons/@tests/stubtest_allowlist.txt | 17 ---- stubs/boltons/boltons/debugutils.pyi | 2 + stubs/boltons/boltons/dictutils.pyi | 2 + stubs/boltons/boltons/excutils.pyi | 2 + stubs/boltons/boltons/fileutils.pyi | 2 + stubs/boltons/boltons/formatutils.pyi | 9 ++ stubs/boltons/boltons/gcutils.pyi | 2 + stubs/boltons/boltons/jsonutils.pyi | 2 + stubs/boltons/boltons/listutils.pyi | 2 + stubs/boltons/boltons/namedutils.pyi | 2 + stubs/boltons/boltons/pathutils.pyi | 2 + stubs/boltons/boltons/queueutils.pyi | 2 + stubs/boltons/boltons/setutils.pyi | 2 + stubs/boltons/boltons/strutils.pyi | 35 ++++++++ stubs/boltons/boltons/tableutils.pyi | 2 + stubs/boltons/boltons/tbutils.pyi | 11 +++ .../@tests/stubtest_allowlist.txt | 4 - stubs/console-menu/consolemenu/__init__.pyi | 11 +++ .../consolemenu/format/__init__.pyi | 15 ++++ .../consolemenu/items/__init__.pyi | 2 + .../mysqlclient/@tests/stubtest_allowlist.txt | 4 - stubs/mysqlclient/MySQLdb/__init__.pyi | 44 ++++++++++ .../MySQLdb/constants/__init__.pyi | 2 + .../@tests/stubtest_allowlist_win32.txt | 3 - stubs/pyserial/serial/win32.pyi | 85 +++++++++++++++++++ stubs/pytz/@tests/stubtest_allowlist.txt | 5 -- stubs/pytz/pytz/__init__.pyi | 17 ++++ stubs/pytz/pytz/reference.pyi | 2 + stubs/pytz/pytz/tzinfo.pyi | 2 + stubs/shapely/@tests/stubtest_allowlist.txt | 3 - stubs/shapely/shapely/_ragged_array.pyi | 2 + .../simplejson/@tests/stubtest_allowlist.txt | 6 -- stubs/simplejson/simplejson/__init__.pyi | 14 +++ stubs/simplejson/simplejson/decoder.pyi | 2 + stubs/simplejson/simplejson/scanner.pyi | 2 + .../@tests/stubtest_allowlist.txt | 2 - stubs/singledispatch/singledispatch.pyi | 2 + 54 files changed, 311 insertions(+), 63 deletions(-) delete mode 100644 stubs/console-menu/@tests/stubtest_allowlist.txt delete mode 100644 stubs/singledispatch/@tests/stubtest_allowlist.txt diff --git a/stubs/PyYAML/@tests/stubtest_allowlist.txt b/stubs/PyYAML/@tests/stubtest_allowlist.txt index b9bd224901ac..a50b77fb0b53 100644 --- a/stubs/PyYAML/@tests/stubtest_allowlist.txt +++ b/stubs/PyYAML/@tests/stubtest_allowlist.txt @@ -1,17 +1,3 @@ -# TODO: missing from stub -yaml.composer.__all__ -yaml.constructor.__all__ -yaml.dumper.__all__ -yaml.emitter.__all__ -yaml.error.__all__ -yaml.loader.__all__ -yaml.parser.__all__ -yaml.reader.__all__ -yaml.representer.__all__ -yaml.resolver.__all__ -yaml.scanner.__all__ -yaml.serializer.__all__ - # yaml._yaml is for backwards compatibility so none of it matters anyway yaml._yaml.__test__ diff --git a/stubs/PyYAML/yaml/composer.pyi b/stubs/PyYAML/yaml/composer.pyi index 7bc87357c3e1..4c80c5bd3da8 100644 --- a/stubs/PyYAML/yaml/composer.pyi +++ b/stubs/PyYAML/yaml/composer.pyi @@ -16,3 +16,5 @@ class Composer: def compose_scalar_node(self, anchor: dict[Any, Node]) -> ScalarNode: ... def compose_sequence_node(self, anchor: dict[Any, Node]) -> SequenceNode: ... def compose_mapping_node(self, anchor: dict[Any, Node]) -> MappingNode: ... + +__all__ = ["Composer", "ComposerError"] diff --git a/stubs/PyYAML/yaml/constructor.pyi b/stubs/PyYAML/yaml/constructor.pyi index 14aeb646f68a..b024ba42e95c 100644 --- a/stubs/PyYAML/yaml/constructor.pyi +++ b/stubs/PyYAML/yaml/constructor.pyi @@ -101,3 +101,5 @@ class Constructor(SafeConstructor): def construct_python_object(self, suffix, node): ... def construct_python_object_apply(self, suffix, node, newobj=False): ... def construct_python_object_new(self, suffix, node): ... + +__all__ = ["BaseConstructor", "SafeConstructor", "FullConstructor", "UnsafeConstructor", "Constructor", "ConstructorError"] diff --git a/stubs/PyYAML/yaml/dumper.pyi b/stubs/PyYAML/yaml/dumper.pyi index e9b18e1dc197..f40d9db52e8f 100644 --- a/stubs/PyYAML/yaml/dumper.pyi +++ b/stubs/PyYAML/yaml/dumper.pyi @@ -69,3 +69,5 @@ class Dumper(Emitter, Serializer, Representer, Resolver): tags: Mapping[str, str] | None = None, sort_keys: bool = True, ) -> None: ... + +__all__ = ["BaseDumper", "SafeDumper", "Dumper"] diff --git a/stubs/PyYAML/yaml/emitter.pyi b/stubs/PyYAML/yaml/emitter.pyi index ee8f9403b616..68808c751095 100644 --- a/stubs/PyYAML/yaml/emitter.pyi +++ b/stubs/PyYAML/yaml/emitter.pyi @@ -126,3 +126,5 @@ class Emitter: def write_folded(self, text: str) -> None: ... def write_literal(self, text: str) -> None: ... def write_plain(self, text: str, split: bool = ...) -> None: ... + +__all__ = ["Emitter", "EmitterError"] diff --git a/stubs/PyYAML/yaml/error.pyi b/stubs/PyYAML/yaml/error.pyi index 35c410ac9eae..9fe53f15d838 100644 --- a/stubs/PyYAML/yaml/error.pyi +++ b/stubs/PyYAML/yaml/error.pyi @@ -24,3 +24,5 @@ class MarkedYAMLError(YAMLError): problem_mark: Mark | None = None, note: str | None = None, ) -> None: ... + +__all__ = ["Mark", "YAMLError", "MarkedYAMLError"] diff --git a/stubs/PyYAML/yaml/loader.pyi b/stubs/PyYAML/yaml/loader.pyi index 950f18f9cd2a..c3944411ca29 100644 --- a/stubs/PyYAML/yaml/loader.pyi +++ b/stubs/PyYAML/yaml/loader.pyi @@ -25,3 +25,5 @@ class Loader(Reader, Scanner, Parser, Composer, Constructor, Resolver): class UnsafeLoader(Reader, Scanner, Parser, Composer, Constructor, Resolver): def __init__(self, stream: _ReadStream) -> None: ... + +__all__ = ["BaseLoader", "FullLoader", "SafeLoader", "Loader", "UnsafeLoader"] diff --git a/stubs/PyYAML/yaml/parser.pyi b/stubs/PyYAML/yaml/parser.pyi index 1db45c4ed27e..b2c7b42db3ce 100644 --- a/stubs/PyYAML/yaml/parser.pyi +++ b/stubs/PyYAML/yaml/parser.pyi @@ -43,3 +43,5 @@ class Parser: def parse_flow_mapping_value(self): ... def parse_flow_mapping_empty_value(self): ... def process_empty_scalar(self, mark): ... + +__all__ = ["Parser", "ParserError"] diff --git a/stubs/PyYAML/yaml/reader.pyi b/stubs/PyYAML/yaml/reader.pyi index cace808a88fa..a642be1b88bc 100644 --- a/stubs/PyYAML/yaml/reader.pyi +++ b/stubs/PyYAML/yaml/reader.pyi @@ -37,3 +37,5 @@ class Reader: def check_printable(self, data): ... def update(self, length): ... def update_raw(self, size=4096): ... + +__all__ = ["Reader", "ReaderError"] diff --git a/stubs/PyYAML/yaml/representer.pyi b/stubs/PyYAML/yaml/representer.pyi index c84c56218871..517ea5155a30 100644 --- a/stubs/PyYAML/yaml/representer.pyi +++ b/stubs/PyYAML/yaml/representer.pyi @@ -59,3 +59,5 @@ class Representer(SafeRepresenter): def represent_module(self, data: ModuleType) -> ScalarNode: ... def represent_object(self, data) -> SequenceNode | MappingNode: ... def represent_ordered_dict(self, data: Mapping[Any, Any]) -> SequenceNode: ... + +__all__ = ["BaseRepresenter", "SafeRepresenter", "Representer", "RepresenterError"] diff --git a/stubs/PyYAML/yaml/resolver.pyi b/stubs/PyYAML/yaml/resolver.pyi index cffe02a41f52..614425ab4cbb 100644 --- a/stubs/PyYAML/yaml/resolver.pyi +++ b/stubs/PyYAML/yaml/resolver.pyi @@ -23,3 +23,5 @@ class BaseResolver: def resolve(self, kind, value, implicit): ... class Resolver(BaseResolver): ... + +__all__ = ["BaseResolver", "Resolver"] diff --git a/stubs/PyYAML/yaml/scanner.pyi b/stubs/PyYAML/yaml/scanner.pyi index 64890a19a5f6..0feaf8caa88b 100644 --- a/stubs/PyYAML/yaml/scanner.pyi +++ b/stubs/PyYAML/yaml/scanner.pyi @@ -95,3 +95,5 @@ class Scanner: def scan_tag_uri(self, name, start_mark): ... def scan_uri_escapes(self, name, start_mark): ... def scan_line_break(self): ... + +__all__ = ["Scanner", "ScannerError"] diff --git a/stubs/PyYAML/yaml/serializer.pyi b/stubs/PyYAML/yaml/serializer.pyi index 471429020ce1..ae1e98f796cf 100644 --- a/stubs/PyYAML/yaml/serializer.pyi +++ b/stubs/PyYAML/yaml/serializer.pyi @@ -23,3 +23,5 @@ class Serializer: def anchor_node(self, node): ... def generate_anchor(self, node): ... def serialize_node(self, node, parent, index): ... + +__all__ = ["Serializer", "SerializerError"] diff --git a/stubs/aiofiles/@tests/stubtest_allowlist.txt b/stubs/aiofiles/@tests/stubtest_allowlist.txt index a57cbbfca61e..6562daed8456 100644 --- a/stubs/aiofiles/@tests/stubtest_allowlist.txt +++ b/stubs/aiofiles/@tests/stubtest_allowlist.txt @@ -1,8 +1,3 @@ -# TODO: missing from stub -aiofiles.__all__ -aiofiles.tempfile.__all__ -aiofiles.threadpool.__all__ - # These all delegate using *args,**kwargs, but stubs use signature of # method they are being delegated to. aiofiles.threadpool.binary.AsyncBufferedIOBase.close diff --git a/stubs/aiofiles/aiofiles/__init__.pyi b/stubs/aiofiles/aiofiles/__init__.pyi index bc52b8e0c849..64410d4ed529 100644 --- a/stubs/aiofiles/aiofiles/__init__.pyi +++ b/stubs/aiofiles/aiofiles/__init__.pyi @@ -8,3 +8,5 @@ from .threadpool import ( stdout as stdout, stdout_bytes as stdout_bytes, ) + +__all__ = ["open", "tempfile", "stdin", "stdout", "stderr", "stdin_bytes", "stdout_bytes", "stderr_bytes"] diff --git a/stubs/aiofiles/aiofiles/tempfile/__init__.pyi b/stubs/aiofiles/aiofiles/tempfile/__init__.pyi index e8016aa8d51a..1e3bbbb33933 100644 --- a/stubs/aiofiles/aiofiles/tempfile/__init__.pyi +++ b/stubs/aiofiles/aiofiles/tempfile/__init__.pyi @@ -321,3 +321,5 @@ def TemporaryDirectory( class AiofilesContextManagerTempDir(AiofilesContextManager[AsyncTemporaryDirectory]): async def __aenter__(self) -> str: ... # type: ignore[override] + +__all__ = ["NamedTemporaryFile", "TemporaryFile", "SpooledTemporaryFile", "TemporaryDirectory"] diff --git a/stubs/aiofiles/aiofiles/threadpool/__init__.pyi b/stubs/aiofiles/aiofiles/threadpool/__init__.pyi index 00c587ccdc6a..cf4814ced833 100644 --- a/stubs/aiofiles/aiofiles/threadpool/__init__.pyi +++ b/stubs/aiofiles/aiofiles/threadpool/__init__.pyi @@ -104,3 +104,5 @@ stderr: AsyncTextIndirectIOWrapper stdin_bytes: AsyncIndirectBufferedIOBase stdout_bytes: AsyncIndirectBufferedIOBase stderr_bytes: AsyncIndirectBufferedIOBase + +__all__ = ("open", "stdin", "stdout", "stderr", "stdin_bytes", "stdout_bytes", "stderr_bytes") diff --git a/stubs/boltons/@tests/stubtest_allowlist.txt b/stubs/boltons/@tests/stubtest_allowlist.txt index 40d27d00fc3d..2ad9d612df92 100644 --- a/stubs/boltons/@tests/stubtest_allowlist.txt +++ b/stubs/boltons/@tests/stubtest_allowlist.txt @@ -1,19 +1,2 @@ -# TODO: missing from stub -boltons.debugutils.__all__ -boltons.dictutils.__all__ -boltons.excutils.__all__ -boltons.fileutils.__all__ -boltons.formatutils.__all__ -boltons.gcutils.__all__ -boltons.jsonutils.__all__ -boltons.listutils.__all__ -boltons.namedutils.__all__ -boltons.pathutils.__all__ -boltons.queueutils.__all__ -boltons.setutils.__all__ -boltons.strutils.__all__ -boltons.tableutils.__all__ -boltons.tbutils.__all__ - boltons.funcutils.CachedInstancePartial.__partialmethod__ boltons.funcutils.InstancePartial.__partialmethod__ diff --git a/stubs/boltons/boltons/debugutils.pyi b/stubs/boltons/boltons/debugutils.pyi index 12bdcfccc9c0..6b6f8f6989a4 100644 --- a/stubs/boltons/boltons/debugutils.pyi +++ b/stubs/boltons/boltons/debugutils.pyi @@ -6,3 +6,5 @@ def pdb_on_exception(limit: int = 100) -> None: ... def wrap_trace( obj, hook: Callable[..., Any] = ..., which: str | None = None, events: str | None = None, label: str | None = None ): ... + +__all__ = ["pdb_on_signal", "pdb_on_exception", "wrap_trace"] diff --git a/stubs/boltons/boltons/dictutils.pyi b/stubs/boltons/boltons/dictutils.pyi index 6a6b851a9ffb..d57469749539 100644 --- a/stubs/boltons/boltons/dictutils.pyi +++ b/stubs/boltons/boltons/dictutils.pyi @@ -99,3 +99,5 @@ class FrozenDict(dict[_KT, _VT]): def popitem(self, *a, **kw) -> NoReturn: ... def setdefault(self, *a, **kw) -> NoReturn: ... def clear(self, *a, **kw) -> NoReturn: ... + +__all__ = ["MultiDict", "OMD", "OrderedMultiDict", "OneToOne", "ManyToMany", "subdict", "FrozenDict"] diff --git a/stubs/boltons/boltons/excutils.pyi b/stubs/boltons/boltons/excutils.pyi index 5994c849e301..bb6a42dbcd11 100644 --- a/stubs/boltons/boltons/excutils.pyi +++ b/stubs/boltons/boltons/excutils.pyi @@ -7,3 +7,5 @@ class ExceptionCauseMixin(Exception): def get_str(self) -> str: ... class MathError(ExceptionCauseMixin, ValueError): ... + +__all__ = ["ExceptionCauseMixin"] diff --git a/stubs/boltons/boltons/fileutils.pyi b/stubs/boltons/boltons/fileutils.pyi index df8ca79090a4..1cf22af892e7 100644 --- a/stubs/boltons/boltons/fileutils.pyi +++ b/stubs/boltons/boltons/fileutils.pyi @@ -91,3 +91,5 @@ class DummyFile: def __next__(self) -> NoReturn: ... def __enter__(self) -> None: ... def __exit__(self, exc_type, exc_val, exc_tb) -> None: ... + +__all__ = ["mkdir_p", "atomic_save", "AtomicSaver", "FilePerms", "iter_find_files", "copytree"] diff --git a/stubs/boltons/boltons/formatutils.pyi b/stubs/boltons/boltons/formatutils.pyi index 1f0656877975..b9f3eb897b54 100644 --- a/stubs/boltons/boltons/formatutils.pyi +++ b/stubs/boltons/boltons/formatutils.pyi @@ -35,3 +35,12 @@ class DeferredValue(Generic[_T]): def __float__(self) -> float: ... def __unicode__(self) -> str: ... def __format__(self, fmt: str) -> str: ... + +__all__ = [ + "DeferredValue", + "get_format_args", + "tokenize_format_str", + "construct_format_field_str", + "infer_positional_format_args", + "BaseFormatField", +] diff --git a/stubs/boltons/boltons/gcutils.pyi b/stubs/boltons/boltons/gcutils.pyi index 455af78e315e..33e8006102b9 100644 --- a/stubs/boltons/boltons/gcutils.pyi +++ b/stubs/boltons/boltons/gcutils.pyi @@ -12,3 +12,5 @@ class GCToggler: toggle_gc: GCToggler toggle_gc_postcollect: GCToggler + +__all__ = ["get_all", "GCToggler", "toggle_gc", "toggle_gc_postcollect"] diff --git a/stubs/boltons/boltons/jsonutils.pyi b/stubs/boltons/boltons/jsonutils.pyi index bd6a6d23fbea..0cc1bf7a46dd 100644 --- a/stubs/boltons/boltons/jsonutils.pyi +++ b/stubs/boltons/boltons/jsonutils.pyi @@ -23,3 +23,5 @@ class JSONLIterator: def __iter__(self) -> Self: ... def next(self) -> Any: ... __next__ = next + +__all__ = ["JSONLIterator", "reverse_iter_lines"] diff --git a/stubs/boltons/boltons/listutils.pyi b/stubs/boltons/boltons/listutils.pyi index 89f9b122e092..35b8332ff14f 100644 --- a/stubs/boltons/boltons/listutils.pyi +++ b/stubs/boltons/boltons/listutils.pyi @@ -28,3 +28,5 @@ BList: TypeAlias = BarrelList[_T] class SplayList(list[_T]): def shift(self, item_index: int, dest_index: int = 0) -> None: ... def swap(self, item_index: SupportsIndex, dest_index: SupportsIndex) -> None: ... + +__all__ = ["BList", "BarrelList"] diff --git a/stubs/boltons/boltons/namedutils.pyi b/stubs/boltons/boltons/namedutils.pyi index c92f6bb870ca..ff57445a5998 100644 --- a/stubs/boltons/boltons/namedutils.pyi +++ b/stubs/boltons/boltons/namedutils.pyi @@ -2,3 +2,5 @@ from collections.abc import Iterable def namedtuple(typename: str, field_names: str | Iterable[str], verbose: bool = False, rename: bool = False): ... def namedlist(typename: str, field_names: str | Iterable[str], verbose: bool = False, rename: bool = False): ... + +__all__ = ["namedlist", "namedtuple"] diff --git a/stubs/boltons/boltons/pathutils.pyi b/stubs/boltons/boltons/pathutils.pyi index 58d9d4c934b6..60566fa0f302 100644 --- a/stubs/boltons/boltons/pathutils.pyi +++ b/stubs/boltons/boltons/pathutils.pyi @@ -11,3 +11,5 @@ def augpath( ) -> str: ... def shrinkuser(path: StrPath, home: str = "~") -> str: ... def expandpath(path: StrPath) -> str: ... + +__all__ = ["augpath", "shrinkuser", "expandpath"] diff --git a/stubs/boltons/boltons/queueutils.pyi b/stubs/boltons/boltons/queueutils.pyi index 9e6af5be68d7..df8eca210e9d 100644 --- a/stubs/boltons/boltons/queueutils.pyi +++ b/stubs/boltons/boltons/queueutils.pyi @@ -12,3 +12,5 @@ class HeapPriorityQueue(BasePriorityQueue): ... class SortedPriorityQueue(BasePriorityQueue): ... PriorityQueue: TypeAlias = SortedPriorityQueue + +__all__ = ["PriorityQueue", "BasePriorityQueue", "HeapPriorityQueue", "SortedPriorityQueue"] diff --git a/stubs/boltons/boltons/setutils.pyi b/stubs/boltons/boltons/setutils.pyi index 97dc8004bc5a..576687a763a7 100644 --- a/stubs/boltons/boltons/setutils.pyi +++ b/stubs/boltons/boltons/setutils.pyi @@ -97,3 +97,5 @@ class _ComplementSet: def __len__(self) -> int: ... def __iter__(self) -> Iterator[Any]: ... def __bool__(self) -> bool: ... + +__all__ = ["IndexedSet", "complement"] diff --git a/stubs/boltons/boltons/strutils.pyi b/stubs/boltons/boltons/strutils.pyi index f39ea8d3b456..2dbd4f66a056 100644 --- a/stubs/boltons/boltons/strutils.pyi +++ b/stubs/boltons/boltons/strutils.pyi @@ -64,3 +64,38 @@ class MultiReplace: def multi_replace(text: str, sub_map: dict[str, str], **kwargs) -> str: ... def unwrap_text(text: str, ending: str | None = "\n\n") -> str: ... def removeprefix(text: str, prefix: str) -> str: ... + +__all__ = [ + "camel2under", + "under2camel", + "slugify", + "split_punct_ws", + "unit_len", + "ordinalize", + "cardinalize", + "pluralize", + "singularize", + "asciify", + "is_ascii", + "is_uuid", + "html2text", + "strip_ansi", + "bytes2human", + "find_hashtags", + "a10n", + "gzip_bytes", + "gunzip_bytes", + "iter_splitlines", + "indent", + "escape_shell_args", + "args2cmd", + "args2sh", + "parse_int_list", + "format_int_list", + "complement_int_list", + "int_ranges_from_int_list", + "MultiReplace", + "multi_replace", + "unwrap_text", + "removeprefix", +] diff --git a/stubs/boltons/boltons/tableutils.pyi b/stubs/boltons/boltons/tableutils.pyi index ce6d5b72f2a8..f59606c0391d 100644 --- a/stubs/boltons/boltons/tableutils.pyi +++ b/stubs/boltons/boltons/tableutils.pyi @@ -61,3 +61,5 @@ class Table: ): ... def get_cell_html(self, value): ... def to_text(self, with_headers: bool = True, maxlen: Incomplete | None = None): ... + +__all__ = ["Table"] diff --git a/stubs/boltons/boltons/tbutils.pyi b/stubs/boltons/boltons/tbutils.pyi index 5900a48b7a74..a1fb06843031 100644 --- a/stubs/boltons/boltons/tbutils.pyi +++ b/stubs/boltons/boltons/tbutils.pyi @@ -93,3 +93,14 @@ class ParsedException: def from_string(cls, tb_str: str) -> Self: ... ParsedTB = ParsedException + +__all__ = [ + "ExceptionInfo", + "TracebackInfo", + "Callpoint", + "ContextualExceptionInfo", + "ContextualTracebackInfo", + "ContextualCallpoint", + "print_exception", + "ParsedException", +] diff --git a/stubs/console-menu/@tests/stubtest_allowlist.txt b/stubs/console-menu/@tests/stubtest_allowlist.txt deleted file mode 100644 index 242ebed35bd2..000000000000 --- a/stubs/console-menu/@tests/stubtest_allowlist.txt +++ /dev/null @@ -1,4 +0,0 @@ -# TODO: missing from stub -consolemenu.__all__ -consolemenu.format.__all__ -consolemenu.items.__all__ diff --git a/stubs/console-menu/consolemenu/__init__.pyi b/stubs/console-menu/consolemenu/__init__.pyi index 8bc6c6cd7460..e597f474c010 100644 --- a/stubs/console-menu/consolemenu/__init__.pyi +++ b/stubs/console-menu/consolemenu/__init__.pyi @@ -4,3 +4,14 @@ from .menu_formatter import MenuFormatBuilder as MenuFormatBuilder from .multiselect_menu import MultiSelectMenu as MultiSelectMenu from .prompt_utils import PromptUtils as PromptUtils from .selection_menu import SelectionMenu as SelectionMenu + +__all__ = [ + "ConsoleMenu", + "SelectionMenu", + "MultiSelectMenu", + "MenuFormatBuilder", + "PromptUtils", + "Screen", + "items", + "clear_terminal", +] diff --git a/stubs/console-menu/consolemenu/format/__init__.pyi b/stubs/console-menu/consolemenu/format/__init__.pyi index f43b6c56360a..cc963da051c8 100644 --- a/stubs/console-menu/consolemenu/format/__init__.pyi +++ b/stubs/console-menu/consolemenu/format/__init__.pyi @@ -12,3 +12,18 @@ from .menu_borders import ( from .menu_margins import MenuMargins as MenuMargins from .menu_padding import MenuPadding as MenuPadding from .menu_style import MenuStyle as MenuStyle + +__all__ = [ + "MenuBorderStyle", + "MenuBorderStyleType", + "MenuBorderStyleFactory", + "MenuMargins", + "MenuPadding", + "MenuStyle", + "AsciiBorderStyle", + "LightBorderStyle", + "HeavyBorderStyle", + "DoubleLineBorderStyle", + "DoubleLineOuterLightInnerBorderStyle", + "HeavyOuterLightInnerBorderStyle", +] diff --git a/stubs/console-menu/consolemenu/items/__init__.pyi b/stubs/console-menu/consolemenu/items/__init__.pyi index 570ff3f5e422..29943004afbf 100644 --- a/stubs/console-menu/consolemenu/items/__init__.pyi +++ b/stubs/console-menu/consolemenu/items/__init__.pyi @@ -4,3 +4,5 @@ from .external_item import ExternalItem as ExternalItem from .function_item import FunctionItem as FunctionItem from .selection_item import SelectionItem as SelectionItem from .submenu_item import SubmenuItem as SubmenuItem + +__all__ = ["CommandItem", "ExitItem", "ExternalItem", "FunctionItem", "MenuItem", "SelectionItem", "SubmenuItem"] diff --git a/stubs/mysqlclient/@tests/stubtest_allowlist.txt b/stubs/mysqlclient/@tests/stubtest_allowlist.txt index ca7d357fa40e..fbc91acd69fa 100644 --- a/stubs/mysqlclient/@tests/stubtest_allowlist.txt +++ b/stubs/mysqlclient/@tests/stubtest_allowlist.txt @@ -1,5 +1 @@ -# TODO: missing from stub -MySQLdb.__all__ -MySQLdb.constants.__all__ - MySQLdb.Connection diff --git a/stubs/mysqlclient/MySQLdb/__init__.pyi b/stubs/mysqlclient/MySQLdb/__init__.pyi index ab3a60939dad..740ff8fe5f04 100644 --- a/stubs/mysqlclient/MySQLdb/__init__.pyi +++ b/stubs/mysqlclient/MySQLdb/__init__.pyi @@ -49,3 +49,47 @@ def Binary(x): ... def Connect(*args, **kwargs) -> Connection: ... connect = Connect + +__all__ = [ + "BINARY", + "Binary", + "Connect", + "Connection", + "DATE", + "Date", + "Time", + "Timestamp", + "DateFromTicks", + "TimeFromTicks", + "TimestampFromTicks", + "DataError", + "DatabaseError", + "Error", + "FIELD_TYPE", + "IntegrityError", + "InterfaceError", + "InternalError", + "MySQLError", + "NUMBER", + "NotSupportedError", + "DBAPISet", + "OperationalError", + "ProgrammingError", + "ROWID", + "STRING", + "TIME", + "TIMESTAMP", + "Warning", + "apilevel", + "connect", + "connections", + "constants", + "converters", + "cursors", + "debug", + "get_client_info", + "paramstyle", + "string_literal", + "threadsafety", + "version_info", +] diff --git a/stubs/mysqlclient/MySQLdb/constants/__init__.pyi b/stubs/mysqlclient/MySQLdb/constants/__init__.pyi index 3eaad111bed7..df9363bc4207 100644 --- a/stubs/mysqlclient/MySQLdb/constants/__init__.pyi +++ b/stubs/mysqlclient/MySQLdb/constants/__init__.pyi @@ -1 +1,3 @@ from . import CLIENT as CLIENT, CR as CR, ER as ER, FIELD_TYPE as FIELD_TYPE, FLAG as FLAG + +__all__ = ["CR", "FIELD_TYPE", "CLIENT", "ER", "FLAG"] diff --git a/stubs/pyserial/@tests/stubtest_allowlist_win32.txt b/stubs/pyserial/@tests/stubtest_allowlist_win32.txt index 09a6a5fae473..75273f1f2bc8 100644 --- a/stubs/pyserial/@tests/stubtest_allowlist_win32.txt +++ b/stubs/pyserial/@tests/stubtest_allowlist_win32.txt @@ -1,6 +1,3 @@ -# TODO: missing from stub -serial.win32.__all__ - # Error: failed to import # ======================= serial.serialposix # Posix only diff --git a/stubs/pyserial/serial/win32.pyi b/stubs/pyserial/serial/win32.pyi index 0002114ace9a..be1cb025d166 100644 --- a/stubs/pyserial/serial/win32.pyi +++ b/stubs/pyserial/serial/win32.pyi @@ -166,3 +166,88 @@ if sys.platform == "win32": OffsetHigh: _CField[Incomplete, Incomplete, Incomplete] PVOID: TypeAlias = c_void_p + + __all__ = [ + "GetLastError", + "MS_CTS_ON", + "FILE_ATTRIBUTE_NORMAL", + "DTR_CONTROL_ENABLE", + "_COMSTAT", + "MS_RLSD_ON", + "GetOverlappedResult", + "SETXON", + "PURGE_TXABORT", + "PurgeComm", + "N11_OVERLAPPED4DOLLAR_48E", + "EV_RING", + "ONESTOPBIT", + "SETXOFF", + "PURGE_RXABORT", + "GetCommState", + "RTS_CONTROL_ENABLE", + "_DCB", + "CreateEvent", + "_COMMTIMEOUTS", + "_SECURITY_ATTRIBUTES", + "EV_DSR", + "EV_PERR", + "EV_RXFLAG", + "OPEN_EXISTING", + "DCB", + "FILE_FLAG_OVERLAPPED", + "EV_CTS", + "SetupComm", + "LPOVERLAPPED", + "EV_TXEMPTY", + "ClearCommBreak", + "LPSECURITY_ATTRIBUTES", + "SetCommBreak", + "SetCommTimeouts", + "COMMTIMEOUTS", + "ODDPARITY", + "EV_RLSD", + "GetCommModemStatus", + "EV_EVENT2", + "PURGE_TXCLEAR", + "EV_BREAK", + "EVENPARITY", + "LPCVOID", + "COMSTAT", + "ReadFile", + "PVOID", + "_OVERLAPPED", + "WriteFile", + "GetCommTimeouts", + "ResetEvent", + "EV_RXCHAR", + "LPCOMSTAT", + "ClearCommError", + "ERROR_IO_PENDING", + "EscapeCommFunction", + "GENERIC_READ", + "RTS_CONTROL_HANDSHAKE", + "OVERLAPPED", + "DTR_CONTROL_HANDSHAKE", + "PURGE_RXCLEAR", + "GENERIC_WRITE", + "LPDCB", + "CreateEventW", + "SetCommMask", + "EV_EVENT1", + "SetCommState", + "LPVOID", + "CreateFileW", + "LPDWORD", + "EV_RX80FULL", + "TWOSTOPBITS", + "LPCOMMTIMEOUTS", + "MAXDWORD", + "MS_DSR_ON", + "MS_RING_ON", + "N11_OVERLAPPED4DOLLAR_484DOLLAR_49E", + "EV_ERR", + "ULONG_PTR", + "CreateFile", + "NOPARITY", + "CloseHandle", + ] diff --git a/stubs/pytz/@tests/stubtest_allowlist.txt b/stubs/pytz/@tests/stubtest_allowlist.txt index 6af1b5cb4bb3..c25eb8283ebb 100644 --- a/stubs/pytz/@tests/stubtest_allowlist.txt +++ b/stubs/pytz/@tests/stubtest_allowlist.txt @@ -1,8 +1,3 @@ -# TODO: missing from stub -pytz.__all__ -pytz.reference.__all__ -pytz.tzinfo.__all__ - # "Abstract" methods, see the .pyi file for more details. pytz.BaseTzInfo.localize pytz.BaseTzInfo.normalize diff --git a/stubs/pytz/pytz/__init__.pyi b/stubs/pytz/pytz/__init__.pyi index e934a3d15567..78b0a8c4e506 100644 --- a/stubs/pytz/pytz/__init__.pyi +++ b/stubs/pytz/pytz/__init__.pyi @@ -44,3 +44,20 @@ country_names: Mapping[str, str] ZERO: datetime.timedelta HOUR: datetime.timedelta VERSION: str + +__all__ = [ + "timezone", + "utc", + "country_timezones", + "country_names", + "AmbiguousTimeError", + "InvalidTimeError", + "NonExistentTimeError", + "UnknownTimeZoneError", + "all_timezones", + "all_timezones_set", + "common_timezones", + "common_timezones_set", + "BaseTzInfo", + "FixedOffset", +] diff --git a/stubs/pytz/pytz/reference.pyi b/stubs/pytz/pytz/reference.pyi index cf9ceeebb266..be187db03f33 100644 --- a/stubs/pytz/pytz/reference.pyi +++ b/stubs/pytz/pytz/reference.pyi @@ -36,3 +36,5 @@ Eastern: USTimeZone Central: USTimeZone Mountain: USTimeZone Pacific: USTimeZone + +__all__ = ["FixedOffset", "LocalTimezone", "USTimeZone", "Eastern", "Central", "Mountain", "Pacific", "UTC"] diff --git a/stubs/pytz/pytz/tzinfo.pyi b/stubs/pytz/pytz/tzinfo.pyi index 33655856149c..66c707950f45 100644 --- a/stubs/pytz/pytz/tzinfo.pyi +++ b/stubs/pytz/pytz/tzinfo.pyi @@ -39,3 +39,5 @@ class DstTzInfo(BaseTzInfo): @overload def utcoffset(self, dt: datetime.datetime, is_dst: bool | None = None) -> datetime.timedelta: ... def dst(self, dt: datetime.datetime | None, is_dst: bool | None = None) -> datetime.timedelta | None: ... + +__all__: list[str] = [] diff --git a/stubs/shapely/@tests/stubtest_allowlist.txt b/stubs/shapely/@tests/stubtest_allowlist.txt index 40afc59f2234..2d094265aee0 100644 --- a/stubs/shapely/@tests/stubtest_allowlist.txt +++ b/stubs/shapely/@tests/stubtest_allowlist.txt @@ -1,6 +1,3 @@ -# TODO: missing from stub -shapely._ragged_array.__all__ - shapely\.geometry\.conftest shapely\.tests.* diff --git a/stubs/shapely/shapely/_ragged_array.pyi b/stubs/shapely/shapely/_ragged_array.pyi index e32f338d0f3f..c79038f46559 100644 --- a/stubs/shapely/shapely/_ragged_array.pyi +++ b/stubs/shapely/shapely/_ragged_array.pyi @@ -10,3 +10,5 @@ def to_ragged_array( def from_ragged_array( geometry_type: GeometryType, coords: ArrayLike[float], offsets: ArrayLikeSeq[int] | None = None ) -> GeoArray: ... + +__all__ = ["to_ragged_array", "from_ragged_array"] diff --git a/stubs/simplejson/@tests/stubtest_allowlist.txt b/stubs/simplejson/@tests/stubtest_allowlist.txt index 083d3469842d..d2e504a07432 100644 --- a/stubs/simplejson/@tests/stubtest_allowlist.txt +++ b/stubs/simplejson/@tests/stubtest_allowlist.txt @@ -1,8 +1,3 @@ -# TODO: missing from stub -simplejson.__all__ -simplejson.decoder.__all__ -simplejson.scanner.__all__ - # Speedups (C vs Python inconsistency): simplejson.scanner.make_scanner simplejson.scanner.JSONDecodeError.__init__ @@ -18,4 +13,3 @@ simplejson.tests.* simplejson.compat simplejson.ordered_dict simplejson.tool -simplejson.OrderedDict diff --git a/stubs/simplejson/simplejson/__init__.pyi b/stubs/simplejson/simplejson/__init__.pyi index 4b61e31e33ad..aec7c35da6b2 100644 --- a/stubs/simplejson/simplejson/__init__.pyi +++ b/stubs/simplejson/simplejson/__init__.pyi @@ -1,4 +1,5 @@ from _typeshed import SupportsRichComparison +from collections import OrderedDict from collections.abc import Callable from typing import IO, Any, TypeVar, overload from typing_extensions import TypeAlias @@ -166,3 +167,16 @@ def load( allow_nan: bool = ..., ) -> Any: ... def simple_first(kv: tuple[_T, object]) -> tuple[bool, _T]: ... + +__all__ = [ + "dump", + "dumps", + "load", + "loads", + "JSONDecoder", + "JSONDecodeError", + "JSONEncoder", + "OrderedDict", + "simple_first", + "RawJSON", +] diff --git a/stubs/simplejson/simplejson/decoder.pyi b/stubs/simplejson/simplejson/decoder.pyi index ff1ef5646b8f..8f9dc05065b4 100644 --- a/stubs/simplejson/simplejson/decoder.pyi +++ b/stubs/simplejson/simplejson/decoder.pyi @@ -28,3 +28,5 @@ class JSONDecoder: def raw_decode( self, s: str, idx: int = ..., _w: Callable[[str, int], Match[str]] = ..., _PY3: Literal[True] = ... ) -> tuple[Any, int]: ... + +__all__ = ["JSONDecoder"] diff --git a/stubs/simplejson/simplejson/scanner.pyi b/stubs/simplejson/simplejson/scanner.pyi index ea5ba8c47b56..2e7f9fe58eda 100644 --- a/stubs/simplejson/simplejson/scanner.pyi +++ b/stubs/simplejson/simplejson/scanner.pyi @@ -13,3 +13,5 @@ class JSONDecodeError(ValueError): endcolno: int | None def make_scanner(context: JSONDecoder) -> Callable[[str, int], tuple[bool, int]]: ... + +__all__ = ["make_scanner", "JSONDecodeError"] diff --git a/stubs/singledispatch/@tests/stubtest_allowlist.txt b/stubs/singledispatch/@tests/stubtest_allowlist.txt deleted file mode 100644 index 9ffaa22521a7..000000000000 --- a/stubs/singledispatch/@tests/stubtest_allowlist.txt +++ /dev/null @@ -1,2 +0,0 @@ -# TODO: missing from stub -singledispatch.__all__ diff --git a/stubs/singledispatch/singledispatch.pyi b/stubs/singledispatch/singledispatch.pyi index bd3671473036..8a04562c1ac6 100644 --- a/stubs/singledispatch/singledispatch.pyi +++ b/stubs/singledispatch/singledispatch.pyi @@ -29,3 +29,5 @@ class singledispatchmethod(Generic[_T]): @overload def register(self, cls: type[Any], method: Callable[..., _T]) -> Callable[..., _T]: ... def __get__(self, obj: _S, cls: type[_S] | None = ...) -> Callable[..., _T]: ... + +__all__ = ["singledispatch", "singledispatchmethod"] From f50301af102d629687e6181771f9684d1c35a114 Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Tue, 25 Mar 2025 17:16:33 +0400 Subject: [PATCH 138/388] Improve `jwcrypto` (#13715) --- stubs/jwcrypto/jwcrypto/common.pyi | 12 +- stubs/jwcrypto/jwcrypto/jwe.pyi | 32 +++-- stubs/jwcrypto/jwcrypto/jwk.pyi | 217 ++++++++++++++++++++++------- stubs/jwcrypto/jwcrypto/jws.pyi | 42 ++++-- stubs/jwcrypto/jwcrypto/jwt.pyi | 11 +- 5 files changed, 220 insertions(+), 94 deletions(-) diff --git a/stubs/jwcrypto/jwcrypto/common.pyi b/stubs/jwcrypto/jwcrypto/common.pyi index 10ee692c3fa2..b1361916a804 100644 --- a/stubs/jwcrypto/jwcrypto/common.pyi +++ b/stubs/jwcrypto/jwcrypto/common.pyi @@ -1,7 +1,9 @@ -from _typeshed import Incomplete -from collections.abc import Iterator, MutableMapping +from collections.abc import Callable, Iterator, MutableMapping from typing import Any, NamedTuple +from jwcrypto.jwe import JWE +from jwcrypto.jws import JWS + def base64url_encode(payload: str | bytes) -> str: ... def base64url_decode(payload: str) -> bytes: ... def json_encode(string: str | bytes) -> str: ... @@ -36,11 +38,11 @@ class JWSEHeaderParameter(NamedTuple): description: str mustprotect: bool supported: bool - check_fn: Incomplete | None + check_fn: Callable[[JWS | JWE], bool] | None class JWSEHeaderRegistry(MutableMapping[str, JWSEHeaderParameter]): - def __init__(self, init_registry: Incomplete | None = None) -> None: ... - def check_header(self, h: str, value) -> bool: ... + def __init__(self, init_registry: dict[str, JWSEHeaderParameter] | None = None) -> None: ... + def check_header(self, h: str, value: JWS | JWE) -> bool: ... def __getitem__(self, key: str) -> JWSEHeaderParameter: ... def __iter__(self) -> Iterator[str]: ... def __delitem__(self, key: str) -> None: ... diff --git a/stubs/jwcrypto/jwcrypto/jwe.pyi b/stubs/jwcrypto/jwcrypto/jwe.pyi index 408b8850d522..69d48ab2d806 100644 --- a/stubs/jwcrypto/jwcrypto/jwe.pyi +++ b/stubs/jwcrypto/jwcrypto/jwe.pyi @@ -1,8 +1,10 @@ from _typeshed import Incomplete from collections.abc import Mapping, Sequence +from typing import Any +from typing_extensions import Self from jwcrypto import common -from jwcrypto.common import JWException, JWSEHeaderParameter +from jwcrypto.common import JWException, JWSEHeaderParameter, JWSEHeaderRegistry from jwcrypto.jwk import JWK, JWKSet default_max_compressed_size: int @@ -18,34 +20,34 @@ InvalidJWEKeyType = common.InvalidJWEKeyType InvalidJWEOperation = common.InvalidJWEOperation class JWE: - objects: Incomplete - plaintext: Incomplete - header_registry: Incomplete + objects: dict[str, Any] + plaintext: bytes | None + header_registry: JWSEHeaderRegistry cek: Incomplete - decryptlog: Incomplete + decryptlog: list[str] | None def __init__( self, - plaintext: bytes | None = None, + plaintext: str | bytes | None = None, protected: str | None = None, unprotected: str | None = None, aad: bytes | None = None, - algs: Incomplete | None = None, + algs: list[str] | None = None, recipient: str | None = None, - header: Incomplete | None = None, - header_registry: Incomplete | None = None, + header: str | None = None, + header_registry: Mapping[str, JWSEHeaderParameter] | None = None, ) -> None: ... @property - def allowed_algs(self): ... + def allowed_algs(self) -> list[str]: ... @allowed_algs.setter - def allowed_algs(self, algs) -> None: ... - def add_recipient(self, key, header: Incomplete | None = None) -> None: ... - def serialize(self, compact: bool = False): ... + def allowed_algs(self, algs: list[str]) -> None: ... + def add_recipient(self, key: JWK, header: dict[str, Any] | str | None = None) -> None: ... + def serialize(self, compact: bool = False) -> str: ... def decrypt(self, key: JWK | JWKSet) -> None: ... def deserialize(self, raw_jwe: str | bytes, key: JWK | JWKSet | None = None) -> None: ... @property - def payload(self): ... + def payload(self) -> bytes: ... @property def jose_header(self) -> dict[Incomplete, Incomplete]: ... @classmethod - def from_jose_token(cls, token: str | bytes) -> JWE: ... + def from_jose_token(cls, token: str | bytes) -> Self: ... def __eq__(self, other: object) -> bool: ... diff --git a/stubs/jwcrypto/jwcrypto/jwk.pyi b/stubs/jwcrypto/jwcrypto/jwk.pyi index f5638cddf622..9b3d912c3686 100644 --- a/stubs/jwcrypto/jwcrypto/jwk.pyi +++ b/stubs/jwcrypto/jwcrypto/jwk.pyi @@ -1,8 +1,10 @@ -from _typeshed import Incomplete -from collections.abc import Sequence +from collections.abc import Callable, Sequence from enum import Enum -from typing import Any, NamedTuple +from typing import Any, Literal, NamedTuple, TypeVar, overload +from typing_extensions import Self, deprecated +from cryptography.hazmat.primitives import hashes +from cryptography.hazmat.primitives.asymmetric import ec, rsa from cryptography.hazmat.primitives.asymmetric.ed448 import Ed448PrivateKey as Ed448PrivateKey, Ed448PublicKey as Ed448PublicKey from cryptography.hazmat.primitives.asymmetric.ed25519 import ( Ed25519PrivateKey as Ed25519PrivateKey, @@ -15,6 +17,8 @@ from cryptography.hazmat.primitives.asymmetric.x25519 import ( ) from jwcrypto.common import JWException +_T = TypeVar("_T") + class UnimplementedOKPCurveKey: @classmethod def generate(cls) -> None: ... @@ -24,9 +28,25 @@ class UnimplementedOKPCurveKey: def from_private_bytes(cls, *args) -> None: ... ImplementedOkpCurves: Sequence[str] -priv_bytes: Incomplete +priv_bytes: Callable[[bytes], X25519PrivateKey] | None + +class _Ed25519_CURVE(NamedTuple): + pubkey: UnimplementedOKPCurveKey + privkey: UnimplementedOKPCurveKey + +class _Ed448_CURVE(NamedTuple): + pubkey: UnimplementedOKPCurveKey + privkey: UnimplementedOKPCurveKey -JWKTypesRegistry: Incomplete +class _X25519_CURVE(NamedTuple): + pubkey: UnimplementedOKPCurveKey + privkey: UnimplementedOKPCurveKey + +class _X448_CURVE(NamedTuple): + pubkey: UnimplementedOKPCurveKey + privkey: UnimplementedOKPCurveKey + +JWKTypesRegistry: dict[str, str] class ParmType(Enum): name = "A string with a name" # pyright: ignore[reportAssignmentType] @@ -35,48 +55,80 @@ class ParmType(Enum): unsupported = "Unsupported Parameter" class JWKParameter(NamedTuple): - description: Incomplete - public: Incomplete - required: Incomplete - type: Incomplete - -JWKValuesRegistry: Incomplete -JWKParamsRegistry: Incomplete -JWKEllipticCurveRegistry: Incomplete -JWKUseRegistry: Incomplete -JWKOperationsRegistry: Incomplete -JWKpycaCurveMap: Incomplete -IANANamedInformationHashAlgorithmRegistry: Incomplete + description: str + public: bool + required: bool | None + type: ParmType | None + +JWKValuesRegistry: dict[str, dict[str, JWKParameter]] +JWKParamsRegistry: dict[str, JWKParameter] +JWKEllipticCurveRegistry: dict[str, str] +JWKUseRegistry: dict[str, str] +JWKOperationsRegistry: dict[str, str] +JWKpycaCurveMap: dict[str, str] +IANANamedInformationHashAlgorithmRegistry: dict[ + str, + hashes.SHA256 + | hashes.SHA384 + | hashes.SHA512 + | hashes.SHA3_224 + | hashes.SHA3_256 + | hashes.SHA3_384 + | hashes.SHA3_512 + | hashes.BLAKE2s + | hashes.BLAKE2b + | None, +] class InvalidJWKType(JWException): - value: Incomplete - def __init__(self, value: Incomplete | None = None) -> None: ... + value: str | None + def __init__(self, value: str | None = None) -> None: ... class InvalidJWKUsage(JWException): - value: Incomplete - use: Incomplete - def __init__(self, use, value) -> None: ... + value: str + use: str + def __init__(self, use: str, value: str) -> None: ... class InvalidJWKOperation(JWException): - op: Incomplete - values: Incomplete - def __init__(self, operation, values) -> None: ... + op: str + values: Sequence[str] + def __init__(self, operation: str, values: Sequence[str]) -> None: ... class InvalidJWKValue(JWException): ... class JWK(dict[str, Any]): def __init__(self, **kwargs) -> None: ... @classmethod - def generate(cls, **kwargs): ... + def generate(cls, **kwargs) -> Self: ... def generate_key(self, **params) -> None: ... def import_key(self, **kwargs) -> None: ... @classmethod - def from_json(cls, key): ... - def export(self, private_key: bool = True, as_dict: bool = False): ... - def export_public(self, as_dict: bool = False): ... - def export_private(self, as_dict: bool = False): ... - def export_symmetric(self, as_dict: bool = False): ... - def public(self): ... + def from_json(cls, key) -> Self: ... + @overload + def export(self, private_key: bool = True, as_dict: Literal[False] = False) -> str: ... + @overload + def export(self, private_key: bool, as_dict: Literal[True]) -> dict[str, Any]: ... + @overload + def export(self, *, as_dict: Literal[True]) -> dict[str, Any]: ... + @overload + def export_public(self, as_dict: Literal[False] = False) -> str: ... + @overload + def export_public(self, as_dict: Literal[True]) -> dict[str, Any]: ... + @overload + def export_public(self, as_dict: bool = False) -> str | dict[str, Any]: ... + @overload + def export_private(self, as_dict: Literal[False] = False) -> str: ... + @overload + def export_private(self, as_dict: Literal[True]) -> dict[str, Any]: ... + @overload + def export_private(self, as_dict: bool = False) -> str | dict[str, Any]: ... + @overload + def export_symmetric(self, as_dict: Literal[False] = False) -> str: ... + @overload + def export_symmetric(self, as_dict: Literal[True]) -> dict[str, Any]: ... + @overload + def export_symmetric(self, as_dict: bool = False) -> str | dict[str, Any]: ... + def public(self) -> Self: ... @property def has_public(self) -> bool: ... @property @@ -84,32 +136,89 @@ class JWK(dict[str, Any]): @property def is_symmetric(self) -> bool: ... @property - def key_type(self): ... + @deprecated("") + def key_type(self) -> str | None: ... @property - def key_id(self): ... + @deprecated("") + def key_id(self) -> str | None: ... @property - def key_curve(self): ... - def get_curve(self, arg): ... - def get_op_key(self, operation: Incomplete | None = None, arg: Incomplete | None = None): ... - def import_from_pyca(self, key) -> None: ... - def import_from_pem(self, data, password: Incomplete | None = None, kid: Incomplete | None = None) -> None: ... - def export_to_pem(self, private_key: bool = False, password: bool = False): ... + @deprecated("") + def key_curve(self) -> str | None: ... + @deprecated("") + def get_curve( + self, arg: str + ) -> ( + ec.SECP256R1 + | ec.SECP384R1 + | ec.SECP521R1 + | ec.SECP256K1 + | ec.BrainpoolP256R1 + | ec.BrainpoolP384R1 + | ec.BrainpoolP512R1 + | _Ed25519_CURVE + | _Ed448_CURVE + | _X25519_CURVE + | _X448_CURVE + ): ... + def get_op_key( + self, operation: str | None = None, arg: str | None = None + ) -> str | rsa.RSAPrivateKey | rsa.RSAPublicKey | ec.EllipticCurvePrivateKey | ec.EllipticCurvePublicKey | None: ... + def import_from_pyca( + self, + key: ( + rsa.RSAPrivateKey + | rsa.RSAPublicKey + | ec.EllipticCurvePrivateKey + | ec.EllipticCurvePublicKey + | Ed25519PrivateKey + | Ed448PrivateKey + | X25519PrivateKey + | Ed25519PublicKey + | Ed448PublicKey + | X25519PublicKey + ), + ) -> None: ... + def import_from_pem(self, data: bytes, password: bytes | None = None, kid: str | None = None) -> None: ... + def export_to_pem(self, private_key: bool = False, password: bool = False) -> bytes: ... @classmethod - def from_pyca(cls, key): ... + def from_pyca( + cls, + key: ( + rsa.RSAPrivateKey + | rsa.RSAPublicKey + | ec.EllipticCurvePrivateKey + | ec.EllipticCurvePublicKey + | Ed25519PrivateKey + | Ed448PrivateKey + | X25519PrivateKey + | Ed25519PublicKey + | Ed448PublicKey + | X25519PublicKey + ), + ) -> Self: ... @classmethod - def from_pem(cls, data, password: Incomplete | None = None): ... - def thumbprint(self, hashalg=...): ... - def thumbprint_uri(self, hname: str = "sha-256"): ... + def from_pem(cls, data: bytes, password: bytes | None = None) -> Self: ... + def thumbprint(self, hashalg: hashes.HashAlgorithm = ...) -> str: ... + def thumbprint_uri(self, hname: str = "sha-256") -> str: ... @classmethod - def from_password(cls, password): ... - def setdefault(self, key: str, default: Incomplete | None = None): ... + def from_password(cls, password: str) -> Self: ... + def setdefault(self, key: str, default: _T | None = None) -> _T: ... -class JWKSet(dict[str, Any]): - def add(self, elem) -> None: ... - def export(self, private_keys: bool = True, as_dict: bool = False): ... - def import_keyset(self, keyset) -> None: ... +class JWKSet(dict[Literal["keys"], set[JWK]]): + @overload + def __setitem__(self, key: Literal["keys"], val: JWK) -> None: ... + @overload + def __setitem__(self, key: str, val: Any) -> None: ... + def add(self, elem: JWK) -> None: ... + @overload + def export(self, private_keys: bool = True, as_dict: Literal[False] = False) -> str: ... + @overload + def export(self, private_keys: bool, as_dict: Literal[True]) -> dict[str, Any]: ... + @overload + def export(self, *, as_dict: Literal[True]) -> dict[str, Any]: ... + def import_keyset(self, keyset: str | bytes) -> None: ... @classmethod - def from_json(cls, keyset): ... - def get_key(self, kid): ... - def get_keys(self, kid): ... - def setdefault(self, key: str, default: Incomplete | None = None): ... + def from_json(cls, keyset: str | bytes) -> Self: ... + def get_key(self, kid: str) -> JWK | None: ... + def get_keys(self, kid: str) -> set[JWK]: ... + def setdefault(self, key: str, default: _T | None = None) -> _T: ... diff --git a/stubs/jwcrypto/jwcrypto/jws.pyi b/stubs/jwcrypto/jwcrypto/jws.pyi index d1bb2f8a2c70..f5d3fab33a28 100644 --- a/stubs/jwcrypto/jwcrypto/jws.pyi +++ b/stubs/jwcrypto/jwcrypto/jws.pyi @@ -1,9 +1,14 @@ from _typeshed import Incomplete +from collections.abc import Mapping +from typing import Any, Literal +from typing_extensions import Self -from jwcrypto.common import JWException +from jwcrypto.common import JWException, JWSEHeaderParameter +from jwcrypto.jwa import JWAAlgorithm +from jwcrypto.jwk import JWK, JWKSet -JWSHeaderRegistry: Incomplete -default_allowed_algs: Incomplete +JWSHeaderRegistry: Mapping[str, JWSEHeaderParameter] +default_allowed_algs: list[str] class InvalidJWSSignature(JWException): def __init__(self, message: str | None = None, exception: BaseException | None = None) -> None: ... @@ -15,19 +20,26 @@ class InvalidJWSOperation(JWException): def __init__(self, message: str | None = None, exception: BaseException | None = None) -> None: ... class JWSCore: - alg: Incomplete - engine: Incomplete - key: Incomplete - header: Incomplete - protected: Incomplete - payload: Incomplete - def __init__(self, alg, key, header, payload, algs: Incomplete | None = None) -> None: ... - def sign(self): ... - def verify(self, signature): ... + alg: str + engine: JWAAlgorithm + key: JWK | JWKSet + header: dict[str, Any] + protected: str + payload: bytes + def __init__( + self, + alg: str, + key: JWK | JWKSet, + header: dict[str, Any] | str | None, + payload: str | bytes, + algs: list[str] | None = None, + ) -> None: ... + def sign(self) -> dict[str, str | bytes]: ... + def verify(self, signature: bytes) -> Literal[True]: ... class JWS: objects: Incomplete - verifylog: Incomplete + verifylog: list[str] | None header_registry: Incomplete def __init__(self, payload: Incomplete | None = None, header_registry: Incomplete | None = None) -> None: ... @property @@ -41,12 +53,12 @@ class JWS: def add_signature( self, key, alg: Incomplete | None = None, protected: Incomplete | None = None, header: Incomplete | None = None ) -> None: ... - def serialize(self, compact: bool = False): ... + def serialize(self, compact: bool = False) -> str: ... @property def payload(self): ... def detach_payload(self) -> None: ... @property def jose_header(self): ... @classmethod - def from_jose_token(cls, token): ... + def from_jose_token(cls, token: str | bytes) -> Self: ... def __eq__(self, other: object) -> bool: ... diff --git a/stubs/jwcrypto/jwcrypto/jwt.pyi b/stubs/jwcrypto/jwcrypto/jwt.pyi index 4bd2a5b79144..0ede9c5c633f 100644 --- a/stubs/jwcrypto/jwcrypto/jwt.pyi +++ b/stubs/jwcrypto/jwcrypto/jwt.pyi @@ -1,5 +1,6 @@ from _typeshed import Incomplete from collections.abc import Mapping +from typing import Any from typing_extensions import deprecated from jwcrypto.common import JWException, JWKeyNotFound @@ -31,11 +32,11 @@ class JWTMissingKey(JWKeyNotFound): def __init__(self, message: str | None = None, exception: BaseException | None = None) -> None: ... class JWT: - deserializelog: Incomplete + deserializelog: list[str] | None def __init__( self, - header: dict[Incomplete, Incomplete] | str | None = None, - claims: dict[Incomplete, Incomplete] | str | None = None, + header: dict[str, Any] | str | None = None, + claims: dict[str, Any] | str | None = None, jwt: Incomplete | None = None, key: JWK | JWKSet | None = None, algs: Incomplete | None = None, @@ -44,9 +45,9 @@ class JWT: expected_type: Incomplete | None = None, ) -> None: ... @property - def header(self): ... + def header(self) -> str: ... @header.setter - def header(self, h) -> None: ... + def header(self, h: dict[str, Any] | str) -> None: ... @property def claims(self): ... @claims.setter From 6507c717ba5b14a9da0cfae6bbb16a18b8ce5e7f Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Tue, 25 Mar 2025 17:40:20 +0400 Subject: [PATCH 139/388] Improve `cffi` (#13710) --- stubs/cffi/@tests/stubtest_allowlist.txt | 3 - stubs/cffi/cffi/__init__.pyi | 10 +- stubs/cffi/cffi/backend_ctypes.pyi | 3 +- stubs/cffi/cffi/cffi_opcode.pyi | 176 +++++++++++------------ stubs/cffi/cffi/ffiplatform.pyi | 9 +- stubs/cffi/cffi/model.pyi | 114 +++++++-------- stubs/cffi/cffi/pkgconfig.pyi | 8 +- stubs/cffi/cffi/recompiler.pyi | 66 ++++----- stubs/cffi/cffi/verifier.pyi | 40 +++--- 9 files changed, 221 insertions(+), 208 deletions(-) diff --git a/stubs/cffi/@tests/stubtest_allowlist.txt b/stubs/cffi/@tests/stubtest_allowlist.txt index 87052530c6df..908583146f5f 100644 --- a/stubs/cffi/@tests/stubtest_allowlist.txt +++ b/stubs/cffi/@tests/stubtest_allowlist.txt @@ -1,6 +1,3 @@ -# TODO: missing from stub -cffi.__all__ - # added dynamically and not detected by stubtest cffi.(api.)?FFI.CData cffi.(api.)?FFI.CType diff --git a/stubs/cffi/cffi/__init__.pyi b/stubs/cffi/cffi/__init__.pyi index 851066f6184c..b549eb5b1ea9 100644 --- a/stubs/cffi/cffi/__init__.pyi +++ b/stubs/cffi/cffi/__init__.pyi @@ -1,11 +1,15 @@ +from typing import Final + from .api import FFI as FFI from .error import ( CDefError as CDefError, FFIError as FFIError, + PkgConfigError as PkgConfigError, VerificationError as VerificationError, VerificationMissing as VerificationMissing, ) -__version__: str -__version_info__: tuple[int, int, int] -__version_verifier_modules__: str +__all__ = ["FFI", "VerificationError", "VerificationMissing", "CDefError", "FFIError"] +__version__: Final[str] +__version_info__: Final[tuple[int, int, int]] +__version_verifier_modules__: Final[str] diff --git a/stubs/cffi/cffi/backend_ctypes.pyi b/stubs/cffi/cffi/backend_ctypes.pyi index 9f123f1f48b0..800ddb7e7011 100644 --- a/stubs/cffi/cffi/backend_ctypes.pyi +++ b/stubs/cffi/cffi/backend_ctypes.pyi @@ -1,9 +1,10 @@ from _typeshed import Incomplete +from collections.abc import Callable unicode = str long = int xrange = range -bytechr: Incomplete +bytechr: Callable[[float], bytes] class CTypesType(type): ... diff --git a/stubs/cffi/cffi/cffi_opcode.pyi b/stubs/cffi/cffi/cffi_opcode.pyi index 364a0808ddc7..a992eb339797 100644 --- a/stubs/cffi/cffi/cffi_opcode.pyi +++ b/stubs/cffi/cffi/cffi_opcode.pyi @@ -1,92 +1,92 @@ -from _typeshed import Incomplete +from typing import Final class CffiOp: - op: Incomplete - arg: Incomplete - def __init__(self, op, arg) -> None: ... - def as_c_expr(self): ... - def as_python_bytes(self): ... + op: int | None + arg: str | None + def __init__(self, op: int | None, arg: str | None) -> None: ... + def as_c_expr(self) -> str: ... + def as_python_bytes(self) -> str: ... -def format_four_bytes(num): ... +def format_four_bytes(num: int) -> str: ... -OP_PRIMITIVE: int -OP_POINTER: int -OP_ARRAY: int -OP_OPEN_ARRAY: int -OP_STRUCT_UNION: int -OP_ENUM: int -OP_FUNCTION: int -OP_FUNCTION_END: int -OP_NOOP: int -OP_BITFIELD: int -OP_TYPENAME: int -OP_CPYTHON_BLTN_V: int -OP_CPYTHON_BLTN_N: int -OP_CPYTHON_BLTN_O: int -OP_CONSTANT: int -OP_CONSTANT_INT: int -OP_GLOBAL_VAR: int -OP_DLOPEN_FUNC: int -OP_DLOPEN_CONST: int -OP_GLOBAL_VAR_F: int -OP_EXTERN_PYTHON: int -PRIM_VOID: int -PRIM_BOOL: int -PRIM_CHAR: int -PRIM_SCHAR: int -PRIM_UCHAR: int -PRIM_SHORT: int -PRIM_USHORT: int -PRIM_INT: int -PRIM_UINT: int -PRIM_LONG: int -PRIM_ULONG: int -PRIM_LONGLONG: int -PRIM_ULONGLONG: int -PRIM_FLOAT: int -PRIM_DOUBLE: int -PRIM_LONGDOUBLE: int -PRIM_WCHAR: int -PRIM_INT8: int -PRIM_UINT8: int -PRIM_INT16: int -PRIM_UINT16: int -PRIM_INT32: int -PRIM_UINT32: int -PRIM_INT64: int -PRIM_UINT64: int -PRIM_INTPTR: int -PRIM_UINTPTR: int -PRIM_PTRDIFF: int -PRIM_SIZE: int -PRIM_SSIZE: int -PRIM_INT_LEAST8: int -PRIM_UINT_LEAST8: int -PRIM_INT_LEAST16: int -PRIM_UINT_LEAST16: int -PRIM_INT_LEAST32: int -PRIM_UINT_LEAST32: int -PRIM_INT_LEAST64: int -PRIM_UINT_LEAST64: int -PRIM_INT_FAST8: int -PRIM_UINT_FAST8: int -PRIM_INT_FAST16: int -PRIM_UINT_FAST16: int -PRIM_INT_FAST32: int -PRIM_UINT_FAST32: int -PRIM_INT_FAST64: int -PRIM_UINT_FAST64: int -PRIM_INTMAX: int -PRIM_UINTMAX: int -PRIM_FLOATCOMPLEX: int -PRIM_DOUBLECOMPLEX: int -PRIM_CHAR16: int -PRIM_CHAR32: int -PRIMITIVE_TO_INDEX: Incomplete -F_UNION: int -F_CHECK_FIELDS: int -F_PACKED: int -F_EXTERNAL: int -F_OPAQUE: int -G_FLAGS: Incomplete -CLASS_NAME: Incomplete +OP_PRIMITIVE: Final = 1 +OP_POINTER: Final = 3 +OP_ARRAY: Final = 5 +OP_OPEN_ARRAY: Final = 7 +OP_STRUCT_UNION: Final = 9 +OP_ENUM: Final = 11 +OP_FUNCTION: Final = 13 +OP_FUNCTION_END: Final = 15 +OP_NOOP: Final = 17 +OP_BITFIELD: Final = 19 +OP_TYPENAME: Final = 21 +OP_CPYTHON_BLTN_V: Final = 23 +OP_CPYTHON_BLTN_N: Final = 25 +OP_CPYTHON_BLTN_O: Final = 27 +OP_CONSTANT: Final = 29 +OP_CONSTANT_INT: Final = 31 +OP_GLOBAL_VAR: Final = 33 +OP_DLOPEN_FUNC: Final = 35 +OP_DLOPEN_CONST: Final = 37 +OP_GLOBAL_VAR_F: Final = 39 +OP_EXTERN_PYTHON: Final = 41 +PRIM_VOID: Final = 0 +PRIM_BOOL: Final = 1 +PRIM_CHAR: Final = 2 +PRIM_SCHAR: Final = 3 +PRIM_UCHAR: Final = 4 +PRIM_SHORT: Final = 5 +PRIM_USHORT: Final = 6 +PRIM_INT: Final = 7 +PRIM_UINT: Final = 8 +PRIM_LONG: Final = 9 +PRIM_ULONG: Final = 10 +PRIM_LONGLONG: Final = 11 +PRIM_ULONGLONG: Final = 12 +PRIM_FLOAT: Final = 13 +PRIM_DOUBLE: Final = 14 +PRIM_LONGDOUBLE: Final = 15 +PRIM_WCHAR: Final = 16 +PRIM_INT8: Final = 17 +PRIM_UINT8: Final = 18 +PRIM_INT16: Final = 19 +PRIM_UINT16: Final = 20 +PRIM_INT32: Final = 21 +PRIM_UINT32: Final = 22 +PRIM_INT64: Final = 23 +PRIM_UINT64: Final = 24 +PRIM_INTPTR: Final = 25 +PRIM_UINTPTR: Final = 26 +PRIM_PTRDIFF: Final = 27 +PRIM_SIZE: Final = 28 +PRIM_SSIZE: Final = 29 +PRIM_INT_LEAST8: Final = 30 +PRIM_UINT_LEAST8: Final = 31 +PRIM_INT_LEAST16: Final = 32 +PRIM_UINT_LEAST16: Final = 33 +PRIM_INT_LEAST32: Final = 34 +PRIM_UINT_LEAST32: Final = 35 +PRIM_INT_LEAST64: Final = 36 +PRIM_UINT_LEAST64: Final = 37 +PRIM_INT_FAST8: Final = 38 +PRIM_UINT_FAST8: Final = 39 +PRIM_INT_FAST16: Final = 40 +PRIM_UINT_FAST16: Final = 41 +PRIM_INT_FAST32: Final = 42 +PRIM_UINT_FAST32: Final = 43 +PRIM_INT_FAST64: Final = 44 +PRIM_UINT_FAST64: Final = 45 +PRIM_INTMAX: Final = 46 +PRIM_UINTMAX: Final = 47 +PRIM_FLOATCOMPLEX: Final = 48 +PRIM_DOUBLECOMPLEX: Final = 49 +PRIM_CHAR16: Final = 50 +PRIM_CHAR32: Final = 51 +PRIMITIVE_TO_INDEX: Final[dict[str, int]] +F_UNION: Final = 1 +F_CHECK_FIELDS: Final = 2 +F_PACKED: Final = 4 +F_EXTERNAL: Final = 8 +F_OPAQUE: Final = 16 +G_FLAGS: Final[dict[bytes, bytes]] +CLASS_NAME: Final[dict[int, str]] diff --git a/stubs/cffi/cffi/ffiplatform.pyi b/stubs/cffi/cffi/ffiplatform.pyi index 127454f6abf8..bb0e71bcb34c 100644 --- a/stubs/cffi/cffi/ffiplatform.pyi +++ b/stubs/cffi/cffi/ffiplatform.pyi @@ -1,11 +1,12 @@ -from _typeshed import Incomplete +from _typeshed import Incomplete, StrOrBytesPath +from typing import Any, Final -LIST_OF_FILE_NAMES: Incomplete +LIST_OF_FILE_NAMES: Final[list[str]] def get_extension(srcfilename, modname, sources=(), **kwds): ... def compile(tmpdir, ext, compiler_verbose: int = 0, debug: Incomplete | None = None): ... -def maybe_relative_path(path): ... +def maybe_relative_path(path: StrOrBytesPath) -> StrOrBytesPath | str: ... int_or_long = int -def flatten(x): ... +def flatten(x: int | str | list[Any] | tuple[Any] | dict[Any, Any]) -> str: ... diff --git a/stubs/cffi/cffi/model.pyi b/stubs/cffi/cffi/model.pyi index c9ab0639bb44..3c462dfa8661 100644 --- a/stubs/cffi/cffi/model.pyi +++ b/stubs/cffi/cffi/model.pyi @@ -1,26 +1,28 @@ +from _thread import LockType from _typeshed import Incomplete from collections.abc import Generator +from typing import Final from .error import CDefError as CDefError, VerificationError as VerificationError, VerificationMissing as VerificationMissing from .lock import allocate_lock as allocate_lock -Q_CONST: int -Q_RESTRICT: int -Q_VOLATILE: int +Q_CONST: Final = 1 +Q_RESTRICT: Final = 2 +Q_VOLATILE: Final = 4 -def qualify(quals, replace_with): ... +def qualify(quals: int, replace_with: str) -> str: ... class BaseTypeByIdentity: is_array_type: bool is_raw_function: bool - def get_c_name(self, replace_with: str = "", context: str = "a C file", quals: int = 0): ... - def has_c_name(self): ... - def is_integer_type(self): ... + def get_c_name(self, replace_with: str = "", context: str = "a C file", quals: int = 0) -> str: ... + def has_c_name(self) -> bool: ... + def is_integer_type(self) -> bool: ... def get_cached_btype(self, ffi, finishlist, can_delay: bool = False): ... class BaseType(BaseTypeByIdentity): - def __eq__(self, other): ... - def __ne__(self, other): ... + def __eq__(self, other: object) -> bool: ... + def __ne__(self, other: object) -> bool: ... def __hash__(self) -> int: ... class VoidType(BaseType): @@ -28,33 +30,33 @@ class VoidType(BaseType): def __init__(self) -> None: ... def build_backend_type(self, ffi, finishlist): ... -void_type: Incomplete +void_type: VoidType class BasePrimitiveType(BaseType): - def is_complex_type(self): ... + def is_complex_type(self) -> bool: ... class PrimitiveType(BasePrimitiveType): - ALL_PRIMITIVE_TYPES: Incomplete - name: Incomplete - c_name_with_marker: Incomplete - def __init__(self, name) -> None: ... - def is_char_type(self): ... - def is_integer_type(self): ... - def is_float_type(self): ... - def is_complex_type(self): ... + ALL_PRIMITIVE_TYPES: dict[str, str] + name: str + c_name_with_marker: str + def __init__(self, name: str) -> None: ... + def is_char_type(self) -> bool: ... + def is_integer_type(self) -> bool: ... + def is_float_type(self) -> bool: ... + def is_complex_type(self) -> bool: ... def build_backend_type(self, ffi, finishlist): ... class UnknownIntegerType(BasePrimitiveType): - name: Incomplete - c_name_with_marker: Incomplete - def __init__(self, name) -> None: ... - def is_integer_type(self): ... + name: str + c_name_with_marker: str + def __init__(self, name: str) -> None: ... + def is_integer_type(self) -> bool: ... def build_backend_type(self, ffi, finishlist) -> None: ... class UnknownFloatType(BasePrimitiveType): - name: Incomplete - c_name_with_marker: Incomplete - def __init__(self, name) -> None: ... + name: str + c_name_with_marker: str + def __init__(self, name: str) -> None: ... def build_backend_type(self, ffi, finishlist) -> None: ... class BaseFunctionType(BaseType): @@ -62,54 +64,54 @@ class BaseFunctionType(BaseType): result: Incomplete ellipsis: Incomplete abi: Incomplete - c_name_with_marker: Incomplete + c_name_with_marker: str def __init__(self, args, result, ellipsis, abi: Incomplete | None = None) -> None: ... class RawFunctionType(BaseFunctionType): is_raw_function: bool def build_backend_type(self, ffi, finishlist) -> None: ... - def as_function_pointer(self): ... + def as_function_pointer(self) -> FunctionPtrType: ... class FunctionPtrType(BaseFunctionType): def build_backend_type(self, ffi, finishlist): ... - def as_raw_function(self): ... + def as_raw_function(self) -> RawFunctionType: ... class PointerType(BaseType): - totype: Incomplete - quals: Incomplete - c_name_with_marker: Incomplete - def __init__(self, totype, quals: int = 0) -> None: ... + totype: BaseTypeByIdentity + quals: int + c_name_with_marker: str + def __init__(self, totype: BaseTypeByIdentity, quals: int = 0) -> None: ... def build_backend_type(self, ffi, finishlist): ... -voidp_type: Incomplete +voidp_type: PointerType -def ConstPointerType(totype): ... +def ConstPointerType(totype: BaseTypeByIdentity) -> PointerType: ... -const_voidp_type: Incomplete +const_voidp_type: PointerType class NamedPointerType(PointerType): - name: Incomplete - c_name_with_marker: Incomplete - def __init__(self, totype, name, quals: int = 0) -> None: ... + name: str + c_name_with_marker: str + def __init__(self, totype: BaseTypeByIdentity, name: str, quals: int = 0) -> None: ... class ArrayType(BaseType): is_array_type: bool item: Incomplete - length: Incomplete - c_name_with_marker: Incomplete - def __init__(self, item, length) -> None: ... - def length_is_unknown(self): ... - def resolve_length(self, newlength): ... + length: str | None + c_name_with_marker: str + def __init__(self, item, length: str | None) -> None: ... + def length_is_unknown(self) -> bool: ... + def resolve_length(self, newlength: str | None) -> ArrayType: ... def build_backend_type(self, ffi, finishlist): ... -char_array_type: Incomplete +char_array_type: ArrayType class StructOrUnionOrEnum(BaseTypeByIdentity): - forcename: Incomplete - c_name_with_marker: Incomplete + forcename: str | None + c_name_with_marker: str def build_c_name_with_marker(self) -> None: ... - def force_the_name(self, forcename) -> None: ... - def get_official_name(self): ... + def force_the_name(self, forcename: str | None) -> None: ... + def get_official_name(self) -> str: ... class StructOrUnion(StructOrUnionOrEnum): fixedlayout: Incomplete @@ -122,7 +124,7 @@ class StructOrUnion(StructOrUnionOrEnum): fldbitsize: Incomplete fldquals: Incomplete def __init__(self, name, fldnames, fldtypes, fldbitsize, fldquals: Incomplete | None = None) -> None: ... - def anonymous_struct_fields(self) -> Generator[Incomplete, None, None]: ... + def anonymous_struct_fields(self) -> Generator[StructOrUnion, None, None]: ... def enumfields(self, expand_anonymous_struct_union: bool = True) -> Generator[Incomplete, None, None]: ... def force_flatten(self) -> None: ... def get_cached_btype(self, ffi, finishlist, can_delay: bool = False): ... @@ -145,18 +147,18 @@ class EnumType(StructOrUnionOrEnum): enumvalues: Incomplete baseinttype: Incomplete def __init__(self, name, enumerators, enumvalues, baseinttype: Incomplete | None = None) -> None: ... - forcename: Incomplete - def force_the_name(self, forcename) -> None: ... + forcename: str | None + def force_the_name(self, forcename: str | None) -> None: ... def check_not_partial(self) -> None: ... def build_backend_type(self, ffi, finishlist): ... def build_baseinttype(self, ffi, finishlist): ... -def unknown_type(name, structname: Incomplete | None = None): ... -def unknown_ptr_type(name, structname: Incomplete | None = None): ... +def unknown_type(name: str, structname: str | None = None) -> StructType: ... +def unknown_ptr_type(name: str, structname: str | None = None) -> NamedPointerType: ... -global_lock: Incomplete +global_lock: LockType def get_typecache(backend): ... def global_cache(srctype, ffi, funcname, *args, **kwds): ... def pointer_cache(ffi, BType): ... -def attach_exception_info(e, name) -> None: ... +def attach_exception_info(e, name: str) -> None: ... diff --git a/stubs/cffi/cffi/pkgconfig.pyi b/stubs/cffi/cffi/pkgconfig.pyi index 081d923e3d8e..4c4af8283e6e 100644 --- a/stubs/cffi/cffi/pkgconfig.pyi +++ b/stubs/cffi/cffi/pkgconfig.pyi @@ -1,3 +1,5 @@ -def merge_flags(cfg1, cfg2): ... -def call(libname, flag, encoding="utf-8"): ... -def flags_from_pkgconfig(libs): ... +from collections.abc import Sequence + +def merge_flags(cfg1: dict[str, list[str]], cfg2: dict[str, list[str]]) -> dict[str, list[str]]: ... +def call(libname: str, flag: str, encoding: str = ...) -> str: ... +def flags_from_pkgconfig(libs: Sequence[str]) -> dict[str, list[str]]: ... diff --git a/stubs/cffi/cffi/recompiler.pyi b/stubs/cffi/cffi/recompiler.pyi index d695efa00d43..d4c3d03bf48d 100644 --- a/stubs/cffi/cffi/recompiler.pyi +++ b/stubs/cffi/cffi/recompiler.pyi @@ -1,13 +1,15 @@ import io -from _typeshed import Incomplete +from _typeshed import Incomplete, StrPath +from typing import Final from typing_extensions import TypeAlias from .cffi_opcode import * +from .error import VerificationError as VerificationError -VERSION_BASE: int -VERSION_EMBEDDED: int -VERSION_CHAR16CHAR32: int -USE_LIMITED_API: Incomplete +VERSION_BASE: Final = 9729 +VERSION_EMBEDDED: Final = 9985 +VERSION_CHAR16CHAR32: Final = 10241 +USE_LIMITED_API: Final = True class GlobalExpr: name: Incomplete @@ -16,8 +18,8 @@ class GlobalExpr: size: Incomplete check_value: Incomplete def __init__(self, name, address, type_op, size: int = 0, check_value: int = 0) -> None: ... - def as_c_expr(self): ... - def as_python_expr(self): ... + def as_c_expr(self) -> str: ... + def as_python_expr(self) -> str: ... class FieldExpr: name: Incomplete @@ -26,9 +28,9 @@ class FieldExpr: fbitsize: Incomplete field_type_op: Incomplete def __init__(self, name, field_offset, field_size, fbitsize, field_type_op) -> None: ... - def as_c_expr(self): ... + def as_c_expr(self) -> str: ... def as_python_expr(self) -> None: ... - def as_field_python_expr(self): ... + def as_field_python_expr(self) -> str: ... class StructUnionExpr: name: Incomplete @@ -40,8 +42,8 @@ class StructUnionExpr: first_field_index: Incomplete c_fields: Incomplete def __init__(self, name, type_index, flags, size, alignment, comment, first_field_index, c_fields) -> None: ... - def as_c_expr(self): ... - def as_python_expr(self): ... + def as_c_expr(self) -> str: ... + def as_python_expr(self) -> str: ... class EnumExpr: name: Incomplete @@ -50,46 +52,46 @@ class EnumExpr: signed: Incomplete allenums: Incomplete def __init__(self, name, type_index, size, signed, allenums) -> None: ... - def as_c_expr(self): ... - def as_python_expr(self): ... + def as_c_expr(self) -> str: ... + def as_python_expr(self) -> str: ... class TypenameExpr: name: Incomplete type_index: Incomplete def __init__(self, name, type_index) -> None: ... - def as_c_expr(self): ... - def as_python_expr(self): ... + def as_c_expr(self) -> str: ... + def as_python_expr(self) -> str: ... class Recompiler: ffi: Incomplete - module_name: Incomplete - target_is_python: Incomplete - def __init__(self, ffi, module_name, target_is_python: bool = False) -> None: ... - def needs_version(self, ver) -> None: ... - cffi_types: Incomplete - def collect_type_table(self): ... - ALL_STEPS: Incomplete - def collect_step_tables(self): ... - def write_source_to_f(self, f, preamble) -> None: ... - def write_c_source_to_f(self, f, preamble) -> None: ... + module_name: str + target_is_python: bool + def __init__(self, ffi, module_name: str, target_is_python: bool = False) -> None: ... + def needs_version(self, ver: int) -> None: ... + cffi_types: list[Incomplete] + def collect_type_table(self) -> None: ... + ALL_STEPS: list[str] + def collect_step_tables(self) -> None: ... + def write_source_to_f(self, f, preamble: str) -> None: ... + def write_c_source_to_f(self, f, preamble: str) -> None: ... def write_py_source_to_f(self, f) -> None: ... NativeIO: TypeAlias = io.StringIO -def make_c_source(ffi, module_name, preamble, target_c_file, verbose: bool = False): ... -def make_py_source(ffi, module_name, target_py_file, verbose: bool = False): ... +def make_c_source(ffi, module_name: str, preamble: str, target_c_file, verbose: bool = False): ... +def make_py_source(ffi, module_name: str, target_py_file, verbose: bool = False): ... def recompile( ffi, - module_name, - preamble, + module_name: str | bytes, + preamble: str | None, tmpdir: str = ".", call_c_compiler: bool = True, c_file: Incomplete | None = None, source_extension: str = ".c", - extradir: Incomplete | None = None, + extradir: StrPath | None = None, compiler_verbose: int = 1, - target: Incomplete | None = None, - debug: Incomplete | None = None, + target: str | None = None, + debug: int | None = None, uses_ffiplatform: bool = True, **kwds, ): ... diff --git a/stubs/cffi/cffi/verifier.pyi b/stubs/cffi/cffi/verifier.pyi index f26e532a2422..a56e9579e413 100644 --- a/stubs/cffi/cffi/verifier.pyi +++ b/stubs/cffi/cffi/verifier.pyi @@ -1,5 +1,7 @@ import io -from _typeshed import Incomplete +import os +from _typeshed import Incomplete, StrPath +from typing import AnyStr from typing_extensions import TypeAlias NativeIO: TypeAlias = io.StringIO @@ -7,33 +9,35 @@ NativeIO: TypeAlias = io.StringIO class Verifier: ffi: Incomplete preamble: Incomplete - flags: Incomplete - kwds: Incomplete - tmpdir: Incomplete - sourcefilename: Incomplete - modulefilename: Incomplete - ext_package: Incomplete + flags: int | None + kwds: dict[str, list[str] | tuple[str]] + tmpdir: StrPath + sourcefilename: str + modulefilename: str + ext_package: str | None def __init__( self, ffi, preamble, - tmpdir: Incomplete | None = None, - modulename: Incomplete | None = None, - ext_package: Incomplete | None = None, + tmpdir: StrPath | None = None, + modulename: str | None = None, + ext_package: str | None = None, tag: str = "", force_generic_engine: bool = False, source_extension: str = ".c", - flags: Incomplete | None = None, - relative_to: Incomplete | None = None, - **kwds, + flags: int | None = None, + relative_to: os.PathLike[AnyStr] | None = None, + **kwds: list[str] | tuple[str], ) -> None: ... def write_source(self, file: Incomplete | None = None) -> None: ... def compile_module(self) -> None: ... def load_library(self): ... - def get_module_name(self): ... + def get_module_name(self) -> str: ... def get_extension(self): ... - def generates_python_module(self): ... - def make_relative_to(self, kwds, relative_to): ... + def generates_python_module(self) -> bool: ... + def make_relative_to( + self, kwds: dict[str, list[str] | tuple[str]], relative_to: os.PathLike[AnyStr] | None + ) -> dict[str, list[str] | tuple[str]]: ... -def set_tmpdir(dirname) -> None: ... -def cleanup_tmpdir(tmpdir: Incomplete | None = None, keep_so: bool = False) -> None: ... +def set_tmpdir(dirname: StrPath) -> None: ... +def cleanup_tmpdir(tmpdir: StrPath | None = None, keep_so: bool = False) -> None: ... From 93e967446b6679fa3cc821b53e57d30673d72d00 Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Tue, 25 Mar 2025 17:52:56 +0400 Subject: [PATCH 140/388] Improve `passlib.win32` (#13711) --- stubs/passlib/@tests/stubtest_allowlist.txt | 1 - stubs/passlib/passlib/handlers/windows.pyi | 11 +++++++++-- stubs/passlib/passlib/win32.pyi | 17 +++++++++++++---- 3 files changed, 22 insertions(+), 7 deletions(-) diff --git a/stubs/passlib/@tests/stubtest_allowlist.txt b/stubs/passlib/@tests/stubtest_allowlist.txt index f72936065580..de26f574555e 100644 --- a/stubs/passlib/@tests/stubtest_allowlist.txt +++ b/stubs/passlib/@tests/stubtest_allowlist.txt @@ -32,7 +32,6 @@ passlib.utils.decor.__all__ passlib.utils.handlers.__all__ passlib.utils.md4.__all__ passlib.utils.pbkdf2.__all__ -passlib.win32.__all__ # proxy module that uses some import magic incompatible with stubtest passlib.hash diff --git a/stubs/passlib/passlib/handlers/windows.pyi b/stubs/passlib/passlib/handlers/windows.pyi index 28cbdf2c60f3..c792ebec86a3 100644 --- a/stubs/passlib/passlib/handlers/windows.pyi +++ b/stubs/passlib/passlib/handlers/windows.pyi @@ -1,5 +1,5 @@ from _typeshed import Incomplete -from typing import Any, ClassVar +from typing import Any, ClassVar, Literal, overload import passlib.utils.handlers as uh @@ -17,8 +17,15 @@ class nthash(uh.StaticHandler): checksum_size: ClassVar[int] @classmethod def raw(cls, secret): ... + @overload @classmethod - def raw_nthash(cls, secret, hex: bool = False): ... + def raw_nthash(cls, secret: str | bytes, hex: Literal[True]) -> str: ... + @overload + @classmethod + def raw_nthash(cls, secret: str | bytes, hex: Literal[False] = False) -> bytes: ... + @overload + @classmethod + def raw_nthash(cls, secret: str | bytes, hex: bool = False) -> str | bytes: ... bsd_nthash: Any diff --git a/stubs/passlib/passlib/win32.pyi b/stubs/passlib/passlib/win32.pyi index aade4ce9c954..b9118d9c4b73 100644 --- a/stubs/passlib/passlib/win32.pyi +++ b/stubs/passlib/passlib/win32.pyi @@ -1,7 +1,16 @@ -from typing import Any +from binascii import hexlify as hexlify +from typing import Final, Literal, overload -from passlib.hash import nthash as nthash +from passlib.handlers.windows import nthash as nthash -raw_nthash: Any +LM_MAGIC: Final[bytes] +raw_nthash = nthash.raw_nthash -def raw_lmhash(secret, encoding: str = "ascii", hex: bool = False): ... +@overload +def raw_lmhash(secret: str | bytes, encoding: str = "ascii", hex: Literal[False] = False) -> bytes: ... +@overload +def raw_lmhash(secret: str | bytes, encoding: str, hex: Literal[True]) -> str: ... +@overload +def raw_lmhash(secret: str | bytes, *, hex: Literal[True]) -> str: ... + +__all__ = ["nthash", "raw_lmhash", "raw_nthash"] From 1b5eff22d42ddf235aa60143d6b35ae6eff32462 Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Tue, 25 Mar 2025 18:50:20 +0400 Subject: [PATCH 141/388] Add stubs for `xlrd` (#13676) --- stubs/xlrd/METADATA.toml | 2 + stubs/xlrd/xlrd/__init__.pyi | 41 ++++++++ stubs/xlrd/xlrd/biffh.pyi | 176 ++++++++++++++++++++++++++++++++ stubs/xlrd/xlrd/book.pyi | 151 +++++++++++++++++++++++++++ stubs/xlrd/xlrd/compdoc.pyi | 54 ++++++++++ stubs/xlrd/xlrd/formatting.pyi | 111 ++++++++++++++++++++ stubs/xlrd/xlrd/formula.pyi | 87 ++++++++++++++++ stubs/xlrd/xlrd/info.pyi | 4 + stubs/xlrd/xlrd/sheet.pyi | 163 +++++++++++++++++++++++++++++ stubs/xlrd/xlrd/timemachine.pyi | 19 ++++ stubs/xlrd/xlrd/xldate.pyi | 24 +++++ 11 files changed, 832 insertions(+) create mode 100644 stubs/xlrd/METADATA.toml create mode 100644 stubs/xlrd/xlrd/__init__.pyi create mode 100644 stubs/xlrd/xlrd/biffh.pyi create mode 100644 stubs/xlrd/xlrd/book.pyi create mode 100644 stubs/xlrd/xlrd/compdoc.pyi create mode 100644 stubs/xlrd/xlrd/formatting.pyi create mode 100644 stubs/xlrd/xlrd/formula.pyi create mode 100644 stubs/xlrd/xlrd/info.pyi create mode 100644 stubs/xlrd/xlrd/sheet.pyi create mode 100644 stubs/xlrd/xlrd/timemachine.pyi create mode 100644 stubs/xlrd/xlrd/xldate.pyi diff --git a/stubs/xlrd/METADATA.toml b/stubs/xlrd/METADATA.toml new file mode 100644 index 000000000000..d82f4f2f46c6 --- /dev/null +++ b/stubs/xlrd/METADATA.toml @@ -0,0 +1,2 @@ +version = "2.0.*" +upstream_repository = "https://github.com/python-excel/xlrd" diff --git a/stubs/xlrd/xlrd/__init__.pyi b/stubs/xlrd/xlrd/__init__.pyi new file mode 100644 index 000000000000..6c2738c15c24 --- /dev/null +++ b/stubs/xlrd/xlrd/__init__.pyi @@ -0,0 +1,41 @@ +import sys +from _typeshed import SupportsWrite +from typing import Final + +from . import timemachine as timemachine +from .biffh import ( + XL_CELL_BLANK as XL_CELL_BLANK, + XL_CELL_BOOLEAN as XL_CELL_BOOLEAN, + XL_CELL_DATE as XL_CELL_DATE, + XL_CELL_EMPTY as XL_CELL_EMPTY, + XL_CELL_ERROR as XL_CELL_ERROR, + XL_CELL_NUMBER as XL_CELL_NUMBER, + XL_CELL_TEXT as XL_CELL_TEXT, + biff_text_from_num as biff_text_from_num, + error_text_from_code as error_text_from_code, +) +from .book import Book as Book, colname as colname, open_workbook_xls as open_workbook_xls +from .formula import * +from .info import __VERSION__ as __VERSION__, __version__ as __version__ +from .sheet import empty_cell as empty_cell +from .xldate import XLDateError as XLDateError, xldate_as_datetime as xldate_as_datetime, xldate_as_tuple as xldate_as_tuple + +FILE_FORMAT_DESCRIPTIONS: Final[dict[str, str]] +ZIP_SIGNATURE: Final[bytes] +PEEK_SIZE: Final[int] + +def inspect_format(path: str | None = None, content: bytes | None = None) -> str | None: ... +def open_workbook( + filename: str | None = None, + logfile: SupportsWrite[str] = sys.stdout, + verbosity: int = 0, + use_mmap: bool = True, + file_contents: bytes | None = None, + encoding_override: str | None = None, + formatting_info: bool = False, + on_demand: bool = False, + ragged_rows: bool = False, + ignore_workbook_corruption: bool = False, +) -> Book: ... +def dump(filename: str, outfile: SupportsWrite[str] = sys.stdout, unnumbered: bool = False) -> None: ... +def count_records(filename: str, outfile: SupportsWrite[str] = sys.stdout) -> None: ... diff --git a/stubs/xlrd/xlrd/biffh.pyi b/stubs/xlrd/xlrd/biffh.pyi new file mode 100644 index 000000000000..73d01394464d --- /dev/null +++ b/stubs/xlrd/xlrd/biffh.pyi @@ -0,0 +1,176 @@ +import sys +from collections.abc import Callable +from typing import Any, Final, TextIO + +from .timemachine import * + +DEBUG: Final[int] + +class XLRDError(Exception): ... + +class BaseObject: + _repr_these: list[str] + def dump(self, f: TextIO | None = None, header: str | None = None, footer: str | None = None, indent: int = 0) -> None: ... + +FUN: Final[int] +FDT: Final[int] +FNU: Final[int] +FGE: Final[int] +FTX: Final[int] +DATEFORMAT: Final[int] +NUMBERFORMAT: Final[int] +XL_CELL_EMPTY: Final[int] +XL_CELL_TEXT: Final[int] +XL_CELL_NUMBER: Final[int] +XL_CELL_DATE: Final[int] +XL_CELL_BOOLEAN: Final[int] +XL_CELL_ERROR: Final[int] +XL_CELL_BLANK: Final[int] +biff_text_from_num: Final[dict[int, str]] +error_text_from_code: Final[dict[int, str]] +BIFF_FIRST_UNICODE: Final[int] +XL_WORKBOOK_GLOBALS: Final[int] +WBKBLOBAL: Final[int] +XL_WORKBOOK_GLOBALS_4W: Final[int] +XL_WORKSHEET: Final[int] +WRKSHEET: Final[int] +XL_BOUNDSHEET_WORKSHEET: Final[int] +XL_BOUNDSHEET_CHART: Final[int] +XL_BOUNDSHEET_VB_MODULE: Final[int] +XL_ARRAY: Final[int] +XL_ARRAY2: Final[int] +XL_BLANK: Final[int] +XL_BLANK_B2: Final[int] +XL_BOF: Final[int] +XL_BOOLERR: Final[int] +XL_BOOLERR_B2: Final[int] +XL_BOUNDSHEET: Final[int] +XL_BUILTINFMTCOUNT: Final[int] +XL_CF: Final[int] +XL_CODEPAGE: Final[int] +XL_COLINFO: Final[int] +XL_COLUMNDEFAULT: Final[int] +XL_COLWIDTH: Final[int] +XL_CONDFMT: Final[int] +XL_CONTINUE: Final[int] +XL_COUNTRY: Final[int] +XL_DATEMODE: Final[int] +XL_DEFAULTROWHEIGHT: Final[int] +XL_DEFCOLWIDTH: Final[int] +XL_DIMENSION: Final[int] +XL_DIMENSION2: Final[int] +XL_EFONT: Final[int] +XL_EOF: Final[int] +XL_EXTERNNAME: Final[int] +XL_EXTERNSHEET: Final[int] +XL_EXTSST: Final[int] +XL_FEAT11: Final[int] +XL_FILEPASS: Final[int] +XL_FONT: Final[int] +XL_FONT_B3B4: Final[int] +XL_FORMAT: Final[int] +XL_FORMAT2: Final[int] +XL_FORMULA: Final[int] +XL_FORMULA3: Final[int] +XL_FORMULA4: Final[int] +XL_GCW: Final[int] +XL_HLINK: Final[int] +XL_QUICKTIP: Final[int] +XL_HORIZONTALPAGEBREAKS: Final[int] +XL_INDEX: Final[int] +XL_INTEGER: Final[int] +XL_IXFE: Final[int] +XL_LABEL: Final[int] +XL_LABEL_B2: Final[int] +XL_LABELRANGES: Final[int] +XL_LABELSST: Final[int] +XL_LEFTMARGIN: Final[int] +XL_TOPMARGIN: Final[int] +XL_RIGHTMARGIN: Final[int] +XL_BOTTOMMARGIN: Final[int] +XL_HEADER: Final[int] +XL_FOOTER: Final[int] +XL_HCENTER: Final[int] +XL_VCENTER: Final[int] +XL_MERGEDCELLS: Final[int] +XL_MSO_DRAWING: Final[int] +XL_MSO_DRAWING_GROUP: Final[int] +XL_MSO_DRAWING_SELECTION: Final[int] +XL_MULRK: Final[int] +XL_MULBLANK: Final[int] +XL_NAME: Final[int] +XL_NOTE: Final[int] +XL_NUMBER: Final[int] +XL_NUMBER_B2: Final[int] +XL_OBJ: Final[int] +XL_PAGESETUP: Final[int] +XL_PALETTE: Final[int] +XL_PANE: Final[int] +XL_PRINTGRIDLINES: Final[int] +XL_PRINTHEADERS: Final[int] +XL_RK: Final[int] +XL_ROW: Final[int] +XL_ROW_B2: Final[int] +XL_RSTRING: Final[int] +XL_SCL: Final[int] +XL_SHEETHDR: Final[int] +XL_SHEETPR: Final[int] +XL_SHEETSOFFSET: Final[int] +XL_SHRFMLA: Final[int] +XL_SST: Final[int] +XL_STANDARDWIDTH: Final[int] +XL_STRING: Final[int] +XL_STRING_B2: Final[int] +XL_STYLE: Final[int] +XL_SUPBOOK: Final[int] +XL_TABLEOP: Final[int] +XL_TABLEOP2: Final[int] +XL_TABLEOP_B2: Final[int] +XL_TXO: Final[int] +XL_UNCALCED: Final[int] +XL_UNKNOWN: Final[int] +XL_VERTICALPAGEBREAKS: Final[int] +XL_WINDOW2: Final[int] +XL_WINDOW2_B2: Final[int] +XL_WRITEACCESS: Final[int] +XL_WSBOOL: Final[int] +XL_XF: Final[int] +XL_XF2: Final[int] +XL_XF3: Final[int] +XL_XF4: Final[int] +boflen: Final[dict[int, int]] +bofcodes: Final[tuple[int, int, int, int]] +XL_FORMULA_OPCODES: Final[tuple[int, int, int]] + +def is_cell_opcode(c: int) -> bool: ... +def upkbits( + tgt_obj: object, src: int, manifest: list[tuple[int, int, str]], local_setattr: Callable[[Any, str, Any], None] = ... +) -> None: ... +def upkbitsL( + tgt_obj: object, + src: int, + manifest: list[tuple[int, int, str]], + local_setattr: Callable[[Any, str, Any], None] = ..., + local_int: Callable[[Any], int] = ..., +) -> None: ... +def unpack_string(data: bytes, pos: int, encoding: str, lenlen: int = 1) -> str: ... +def unpack_string_update_pos( + data: bytes, pos: int, encoding: str, lenlen: int = 1, known_len: int | None = None +) -> tuple[str, int]: ... +def unpack_unicode(data: bytes, pos: int, lenlen: int = 2) -> str: ... +def unpack_unicode_update_pos(data: bytes, pos: int, lenlen: int = 2, known_len: int | None = None) -> tuple[str, int]: ... +def unpack_cell_range_address_list_update_pos( + output_list: list[tuple[int, int, int, int]], data: bytes, pos: int, biff_version: int, addr_size: int = 6 +) -> int: ... + +biff_rec_name_dict: Final[dict[int, str]] + +def hex_char_dump( + strg: bytes, ofs: int, dlen: int, base: int = 0, fout: TextIO = sys.stdout, unnumbered: bool = False +) -> None: ... +def biff_dump( + mem: bytes, stream_offset: int, stream_len: int, base: int = 0, fout: TextIO = sys.stdout, unnumbered: bool = False +) -> None: ... +def biff_count_records(mem: bytes, stream_offset: int, stream_len: int, fout: TextIO = sys.stdout) -> None: ... + +encoding_from_codepage: Final[dict[int, str]] diff --git a/stubs/xlrd/xlrd/book.pyi b/stubs/xlrd/xlrd/book.pyi new file mode 100644 index 000000000000..35819e83e79f --- /dev/null +++ b/stubs/xlrd/xlrd/book.pyi @@ -0,0 +1,151 @@ +import sys +from _typeshed import SupportsWrite +from collections.abc import Iterator +from types import TracebackType +from typing import Final, Literal +from typing_extensions import Self + +from .biffh import * +from .formatting import XF, Font, Format +from .formula import * +from .sheet import Cell, Sheet +from .timemachine import * + +empty_cell: Final[Cell] +MY_EOF: Final[int] +SUPBOOK_UNK: Final[int] +SUPBOOK_INTERNAL: Final[int] +SUPBOOK_EXTERNAL: Final[int] +SUPBOOK_ADDIN: Final[int] +SUPBOOK_DDEOLE: Final[int] +SUPPORTED_VERSIONS: Final[tuple[int, ...]] +builtin_name_from_code: Final[dict[str, str]] +code_from_builtin_name: Final[dict[str, str]] + +def open_workbook_xls( + filename: str | None = None, + logfile: SupportsWrite[str] = sys.stdout, + verbosity: int = 0, + use_mmap: bool = True, + file_contents: bytes | None = None, + encoding_override: str | None = None, + formatting_info: bool = False, + on_demand: bool = False, + ragged_rows: bool = False, + ignore_workbook_corruption: bool = False, +) -> Book: ... + +class Name(BaseObject): + _repr_these: list[str] + book: Book | None = None + hidden: Literal[0, 1] + func: Literal[0, 1] + vbasic: Literal[0, 1] + macro: Literal[0, 1] + complex: Literal[0, 1] + builtin: Literal[0, 1] + funcgroup: Literal[0, 1] + binary: Literal[0, 1] + name_index: int + name: str + raw_formula: bytes + scope: Literal[-1, -2, -3, 0] + result: Operand | None + def cell(self) -> Cell: ... + def area2d(self, clipped: bool = True) -> tuple[Sheet, int, int, int, int]: ... + +class Book(BaseObject): + nsheets: int + datemode: Literal[0, 1] + biff_version: int + name_obj_list: list[Name] + codepage: int | None + encoding: str | None + countries: tuple[int, int] + user_name: str + font_list: list[Font] + xf_list: list[XF] + format_list: list[Format] + format_map: dict[int, Format] + style_name_map: dict[str, tuple[int, int]] + colour_map: dict[int, tuple[int, int, int] | None] + palette_record: list[tuple[int, int, int]] + load_time_stage_1: float + load_time_stage_2: float + def sheets(self) -> list[Sheet]: ... + def sheet_by_index(self, sheetx: int) -> Sheet: ... + def __iter__(self) -> Iterator[Sheet]: ... + def sheet_by_name(self, sheet_name: str) -> Sheet: ... + def __getitem__(self, item: int | str) -> Sheet: ... + def sheet_names(self) -> list[str]: ... + def sheet_loaded(self, sheet_name_or_index: int | str) -> bool: ... + def unload_sheet(self, sheet_name_or_index: int | str) -> None: ... + mem: bytes | None = None + filestr: bytes | None = None + def release_resources(self) -> None: ... + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_value: BaseException | None, exc_tb: TracebackType | None + ) -> None: ... + name_and_scope_map: dict[tuple[str, int], Name] + name_map: dict[str, list[Name]] + raw_user_name: bool + builtinfmtcount: int + addin_func_names: list[str] + def __init__(self) -> None: ... + logfile: SupportsWrite[str] + verbosity: int + use_mmap: bool + encoding_override: str | None + formatting_info: bool + on_demand: bool + ragged_rows: bool + stream_len: int + base: int + def biff2_8_load( + self, + filename: str | None = None, + file_contents: bytes | None = None, + logfile: SupportsWrite[str] = sys.stdout, + verbosity: int = 0, + use_mmap: bool = True, + encoding_override: str | None = None, + formatting_info: bool = False, + on_demand: bool = False, + ragged_rows: bool = False, + ignore_workbook_corruption: bool = False, + ) -> None: ... + xfcount: int + actualfmtcount: int + def initialise_format_info(self) -> None: ... + def get2bytes(self) -> int: ... + def get_record_parts(self) -> tuple[int, int, bytes]: ... + def get_record_parts_conditional(self, reqd_record: int) -> tuple[int | None, int, bytes]: ... + def get_sheet(self, sh_number: int, update_pos: bool = True) -> Sheet: ... + def get_sheets(self) -> None: ... + def fake_globals_get_sheet(self) -> None: ... + def handle_boundsheet(self, data: bytes) -> None: ... + def handle_builtinfmtcount(self, data: bytes) -> None: ... + def derive_encoding(self) -> str: ... + def handle_codepage(self, data: bytes) -> None: ... + def handle_country(self, data: bytes) -> None: ... + def handle_datemode(self, data: bytes) -> None: ... + def handle_externname(self, data: bytes) -> None: ... + def handle_externsheet(self, data: bytes) -> None: ... + def handle_filepass(self, data: bytes) -> None: ... + def handle_name(self, data: bytes) -> None: ... + def names_epilogue(self) -> None: ... + def handle_obj(self, data: bytes) -> None: ... + def handle_supbook(self, data: bytes) -> None: ... + def handle_sheethdr(self, data: bytes) -> None: ... + def handle_sheetsoffset(self, data: bytes) -> None: ... + def handle_sst(self, data: bytes) -> None: ... + def handle_writeaccess(self, data: bytes) -> None: ... + def parse_globals(self) -> None: ... + def read(self, pos: int, length: int) -> bytes: ... + def getbof(self, rqd_stream: int) -> int | None: ... + +# Helper functions +def expand_cell_address(inrow: int, incol: int) -> tuple[int, int, int, int]: ... +def display_cell_address(rowx: int, colx: int, relrow: int, relcol: int) -> str: ... +def unpack_SST_table(datatab: list[bytes], nstrings: int) -> tuple[list[str], dict[int, list[tuple[int, int]]]]: ... diff --git a/stubs/xlrd/xlrd/compdoc.pyi b/stubs/xlrd/xlrd/compdoc.pyi new file mode 100644 index 000000000000..aea4c94a3738 --- /dev/null +++ b/stubs/xlrd/xlrd/compdoc.pyi @@ -0,0 +1,54 @@ +import sys +from _typeshed import SupportsWrite +from typing import Final + +from .timemachine import * + +SIGNATURE: Final[bytes] +EOCSID: Final[int] +FREESID: Final[int] +SATSID: Final[int] +MSATSID: Final[int] +EVILSID: Final[int] + +class CompDocError(Exception): ... + +class DirNode: + DID: int + name: str + etype: int + colour: int + left_DID: int + right_DID: int + root_DID: int + first_SID: int + tot_size: int + children: list[int] + parent: int + tsinfo: tuple[int, int, int, int] + logfile: SupportsWrite[str] + def __init__(self, DID: int, dent: bytes, DEBUG: int = 0, logfile: SupportsWrite[str] = sys.stdout) -> None: ... + def dump(self, DEBUG: int = 1) -> None: ... + +class CompDoc: + logfile: SupportsWrite[str] + ignore_workbook_corruption: bool + DEBUG: int + mem: bytes + sec_size: int + short_sec_size: int + mem_data_secs: int + mem_data_len: int + seen: list[int] + SAT: list[int] + dirlist: list[DirNode] + SSCS: str + SSAT: list[int] + def __init__( + self, mem: bytes, logfile: SupportsWrite[str] = sys.stdout, DEBUG: int = 0, ignore_workbook_corruption: bool = False + ) -> None: ... + def get_named_stream(self, qname: str) -> bytes | None: ... + def locate_named_stream(self, qname: str) -> tuple[bytes | None, int, int]: ... + +def x_dump_line(alist: list[int], stride: int, f: SupportsWrite[str], dpos: int, equal: int = 0) -> None: ... +def dump_list(alist: list[int], stride: int, f: SupportsWrite[str] = sys.stdout) -> None: ... diff --git a/stubs/xlrd/xlrd/formatting.pyi b/stubs/xlrd/xlrd/formatting.pyi new file mode 100644 index 000000000000..a0898ca32dc4 --- /dev/null +++ b/stubs/xlrd/xlrd/formatting.pyi @@ -0,0 +1,111 @@ +from collections.abc import Callable +from typing import Final, Literal + +from .biffh import BaseObject +from .book import Book +from .timemachine import * + +DEBUG: Final[int] +excel_default_palette_b5: Final[tuple[tuple[int, int, int], ...]] +excel_default_palette_b2: Final[tuple[tuple[int, int, int], ...]] +excel_default_palette_b8: Final[tuple[tuple[int, int, int], ...]] +default_palette: Final[dict[int, tuple[tuple[int, int, int], ...]]] +built_in_style_names: Final[list[str]] + +def initialise_colour_map(book: Book) -> None: ... +def nearest_colour_index( + colour_map: dict[int, tuple[int, int, int] | None], rgb: tuple[int, int, int] | None, debug: int = 0 +) -> int: ... + +class EqNeAttrs: + def __eq__(self, other: object) -> bool: ... + def __ne__(self, other: object) -> bool: ... + +class Font(BaseObject, EqNeAttrs): + bold: Literal[0, 1] + character_set: int + colour_index: int + escapement: Literal[0, 1, 2] + family: Literal[0, 1, 2, 3, 4, 5] + font_index: int + height: int + italic: Literal[0, 1] + name: str + struck_out: Literal[0, 1] + underline_type: Literal[0, 1, 33, 34] + underlined: Literal[0, 1] + weight: int + outline: Literal[0, 1] + shadow: Literal[0, 1] + +def handle_efont(book: Book, data: bytes) -> None: ... +def handle_font(book: Book, data: bytes) -> None: ... + +class Format(BaseObject, EqNeAttrs): + format_key: int + type: int + format_str: str + def __init__(self, format_key: int, ty: int, format_str: str) -> None: ... + +std_format_strings: dict[int, str] +fmt_code_ranges: list[tuple[int, int, int]] +std_format_code_types: dict[int, int] +date_char_dict: dict[str, Literal[5]] +skip_char_dict: dict[str, Literal[1]] +num_char_dict: dict[str, Literal[5]] +non_date_formats: dict[str, Literal[1]] +fmt_bracketed_sub: Callable[[str, str], str] + +def is_date_format_string(book: Book, fmt: str) -> bool: ... +def handle_format(self: Book, data: bytes, rectype: int = 1054) -> None: ... +def handle_palette(book: Book, data: bytes) -> None: ... +def palette_epilogue(book: Book) -> None: ... +def handle_style(book: Book, data: bytes) -> None: ... +def check_colour_indexes_in_obj(book: Book, obj: object, orig_index: int) -> None: ... +def fill_in_standard_formats(book: Book) -> None: ... +def handle_xf(self: Book, data: bytes) -> None: ... +def xf_epilogue(self: Book) -> None: ... +def initialise_book(book: Book) -> None: ... + +class XFBorder(BaseObject, EqNeAttrs): + top_colour_index: int + bottom_colour_index: int + left_colour_index: int + right_colour_index: int + diag_colour_index: int + top_line_style: int + bottom_line_style: int + left_line_style: int + right_line_style: int + diag_line_style: int + diag_down: Literal[0, 1] + diag_up: Literal[0, 1] + +class XFBackground(BaseObject, EqNeAttrs): + fill_pattern: int + background_colour_index: int + pattern_colour_index: int + +class XFAlignment(BaseObject, EqNeAttrs): + hor_align: int + vert_align: int + rotation: int + text_wrapped: Literal[0, 1] + indent_level: int + shrink_to_fit: Literal[0, 1] + text_direction: Literal[0, 1, 2] + +class XFProtection(BaseObject, EqNeAttrs): + cell_locked: Literal[0, 1] + formula_hidden: Literal[0, 1] + +class XF(BaseObject): + is_style: Literal[0, 1] + parent_style_index: int + xf_index: int + font_index: int + format_key: int + protection: XFProtection | None + background: XFBackground | None + alignment: XFAlignment | None + border: XFBorder | None diff --git a/stubs/xlrd/xlrd/formula.pyi b/stubs/xlrd/xlrd/formula.pyi new file mode 100644 index 000000000000..4ed7928003cf --- /dev/null +++ b/stubs/xlrd/xlrd/formula.pyi @@ -0,0 +1,87 @@ +from typing import Final +from typing_extensions import Self + +from .book import Book, Name +from .timemachine import * + +__all__ = [ + "oBOOL", + "oERR", + "oNUM", + "oREF", + "oREL", + "oSTRG", + "oUNK", + "decompile_formula", + "dump_formula", + "evaluate_name_formula", + "okind_dict", + "rangename3d", + "rangename3drel", + "cellname", + "cellnameabs", + "colname", + "FMLA_TYPE_CELL", + "FMLA_TYPE_SHARED", + "FMLA_TYPE_ARRAY", + "FMLA_TYPE_COND_FMT", + "FMLA_TYPE_DATA_VAL", + "FMLA_TYPE_NAME", + "Operand", + "Ref3D", +] + +FMLA_TYPE_CELL: Final[int] +FMLA_TYPE_SHARED: Final[int] +FMLA_TYPE_ARRAY: Final[int] +FMLA_TYPE_COND_FMT: Final[int] +FMLA_TYPE_DATA_VAL: Final[int] +FMLA_TYPE_NAME: Final[int] +oBOOL: Final[int] +oERR: Final[int] +oNUM: Final[int] +oREF: Final[int] +oREL: Final[int] +oSTRG: Final[int] +oUNK: Final[int] +okind_dict: Final[dict[int, str]] + +class FormulaError(Exception): ... + +class Operand: + value: float | str | None + kind: int + text: str + rank: int + def __init__(self, akind: int | None = None, avalue: float | str | None = None, arank: int = 0, atext: str = "?") -> None: ... + +class Ref3D(tuple[int, int, int, int, int, int, int, int, int, int, int, int]): + coords: tuple[int, int, int, int, int, int] + relflags: tuple[int, int, int, int, int, int] + shtxlo: int + shtxhi: int + rowxlo: int + rowxhi: int + colxlo: int + colxhi: int + def __new__(cls, atuple: tuple[int, int, int, int, int, int, int, int, int, int, int, int]) -> Self: ... + def __init__(self, atuple: tuple[int, int, int, int, int, int, int, int, int, int, int, int]) -> None: ... + +def evaluate_name_formula(bk: Book, nobj: Name, namex: str, blah: int = 0, level: int = 0) -> None: ... +def decompile_formula( + bk: Book, + fmla: bytes, + fmlalen: int, + fmlatype: int | None = None, + browx: int | None = None, + bcolx: int | None = None, + blah: int = 0, + level: int = 0, + r1c1: int = 0, +) -> str | None: ... +def dump_formula(bk: Book, data: bytes, fmlalen: int, bv: int, reldelta: int, blah: int = 0, isname: int = 0) -> None: ... +def cellname(rowx: int, colx: int) -> str: ... +def cellnameabs(rowx: int, colx: int, r1c1: int = 0) -> str: ... +def colname(colx: int) -> str: ... +def rangename3d(book: Book, ref3d: Ref3D) -> str: ... +def rangename3drel(book: Book, ref3d: Ref3D, browx: int | None = None, bcolx: int | None = None, r1c1: int = 0) -> str: ... diff --git a/stubs/xlrd/xlrd/info.pyi b/stubs/xlrd/xlrd/info.pyi new file mode 100644 index 000000000000..e0737403b5b0 --- /dev/null +++ b/stubs/xlrd/xlrd/info.pyi @@ -0,0 +1,4 @@ +from typing import Final + +__version__: Final[str] +__VERSION__: Final[str] diff --git a/stubs/xlrd/xlrd/sheet.pyi b/stubs/xlrd/xlrd/sheet.pyi new file mode 100644 index 000000000000..9d6193654642 --- /dev/null +++ b/stubs/xlrd/xlrd/sheet.pyi @@ -0,0 +1,163 @@ +from _typeshed import SupportsWrite +from array import array +from collections.abc import Callable, Sequence +from typing import Any, Final, Literal, overload + +from .biffh import * +from .book import Book +from .formatting import XF +from .timemachine import * + +OBJ_MSO_DEBUG: Final[int] + +class MSODrawing(BaseObject): ... +class MSObj(BaseObject): ... +class MSTxo(BaseObject): ... + +class Note(BaseObject): + author: str + col_hidden: int + colx: int + rich_text_runlist: list[tuple[str, int]] | None + row_hidden: int + rowx: int + show: int + text: str + +class Hyperlink(BaseObject): + frowx: int | None + lrowx: int | None + fcolx: int | None + lcolx: int | None + type: str | None + url_or_path: bytes | str | None + desc: str | None + target: str | None + textmark: str | None + quicktip: str | None + +def unpack_RK(rk_str: bytes) -> float: ... + +cellty_from_fmtty: Final[dict[int, int]] +ctype_text: Final[dict[int, str]] + +class Cell(BaseObject): + ctype: int + value: str + xf_index: int | None + def __init__(self, ctype: int, value: str, xf_index: int | None = None) -> None: ... + +empty_cell: Final[Cell] + +class Colinfo(BaseObject): + width: int + xf_index: int + hidden: int + bit1_flag: int + outline_level: int + collapsed: int + +class Rowinfo(BaseObject): + height: int | None + has_default_height: int | None + outline_level: int | None + outline_group_starts_ends: int | None + hidden: int | None + height_mismatch: int | None + has_default_xf_index: int | None + xf_index: int | None + additional_space_above: int | None + additional_space_below: int | None + def __init__(self) -> None: ... + def __getstate__(self) -> tuple[int | None, ...]: ... + def __setstate__(self, state: tuple[int | None, ...]) -> None: ... + +class Sheet(BaseObject): + name: str + book: Book | None + nrows: int + ncols: int + colinfo_map: dict[int, Colinfo] + rowinfo_map: dict[int, Rowinfo] + col_label_ranges: list[tuple[int, int, int, int]] + row_label_ranges: list[tuple[int, int, int, int]] + merged_cells: list[tuple[int, int, int, int]] + rich_text_runlist_map: dict[tuple[int, int], list[tuple[int, int]]] + defcolwidth: float | None + standardwidth: float | None + default_row_height: int | None + default_row_height_mismatch: int | None + default_row_hidden: int | None + default_additional_space_above: int | None + default_additional_space_below: int | None + visibility: Literal[0, 1, 2] + gcw: tuple[int, ...] + hyperlink_list: list[Hyperlink] + hyperlink_map: dict[tuple[int, int], Hyperlink] + cell_note_map: dict[tuple[int, int], Note] + vert_split_pos: int + horz_split_pos: int + horz_split_first_visible: int + vert_split_first_visible: int + split_active_pane: int + has_pane_record: int + horizontal_page_breaks: list[tuple[int, int, int]] + vertical_page_breaks: list[tuple[int, int, int]] + biff_version: int + logfile: SupportsWrite[str] + bt: array[int] + bf: array[int] + number: int + verbosity: int + formatting_info: bool + ragged_rows: bool + put_cell: Callable[[int, int, int | None, str, int | None], None] + first_visible_rowx: int + first_visible_colx: int + gridline_colour_index: int + gridline_colour_rgb: tuple[int, int, int] | None + cooked_page_break_preview_mag_factor: int + cooked_normal_view_mag_factor: int + cached_page_break_preview_mag_factor: int + cached_normal_view_mag_factor: int + scl_mag_factor: int | None + utter_max_rows: int + utter_max_cols: int + def __init__(self, book: Book, position: int, name: str, number: int) -> None: ... + def cell(self, rowx: int, colx: int) -> Cell: ... + def cell_value(self, rowx: int, colx: int) -> str: ... + def cell_type(self, rowx: int, colx: int) -> int: ... + def cell_xf_index(self, rowx: int, colx: int) -> int: ... + def row_len(self, rowx: int) -> int: ... + def row(self, rowx: int) -> list[Cell]: ... + @overload + def __getitem__(self, item: int) -> list[Cell]: ... + @overload + def __getitem__(self, item: tuple[int, int]) -> Cell: ... + def get_rows(self) -> tuple[list[Cell], ...]: ... + __iter__ = get_rows + def row_types(self, rowx: int, start_colx: int = 0, end_colx: int | None = None) -> Sequence[int]: ... + def row_values(self, rowx: int, start_colx: int = 0, end_colx: int | None = None) -> Sequence[str]: ... + def row_slice(self, rowx: int, start_colx: int = 0, end_colx: int | None = None) -> list[Cell]: ... + def col_slice(self, colx: int, start_rowx: int = 0, end_rowx: int | None = None) -> list[Cell]: ... + def col_values(self, colx: int, start_rowx: int = 0, end_rowx: int | None = None) -> list[str]: ... + def col_types(self, colx: int, start_rowx: int = 0, end_rowx: int | None = None) -> list[int]: ... + col = col_slice + def tidy_dimensions(self) -> None: ... + def put_cell_ragged(self, rowx: int, colx: int, ctype: int | None, value: str, xf_index: int | None) -> None: ... + def put_cell_unragged(self, rowx: int, colx: int, ctype: int | None, value: str, xf_index: int | None) -> None: ... + def read(self, bk: Book) -> Literal[1]: ... + def string_record_contents(self, data: bytes) -> str | None: ... + def update_cooked_mag_factors(self) -> None: ... + def fixed_BIFF2_xfindex(self, cell_attr: bytes, rowx: int, colx: int, true_xfx: int | None = None) -> int: ... + def insert_new_BIFF20_xf(self, cell_attr: bytes, style: int = 0) -> int: ... + def fake_XF_from_BIFF20_cell_attr(self, cell_attr: bytes, style: int = 0) -> XF: ... + def req_fmt_info(self) -> None: ... + def computed_column_width(self, colx: int) -> float: ... + def handle_hlink(self, data: bytes) -> None: ... + def handle_quicktip(self, data: bytes) -> None: ... + def handle_msodrawingetc(self, recid: Any, data_len: int, data: bytes) -> None: ... + def handle_obj(self, data: bytes) -> MSObj | None: ... + def handle_note(self, data: bytes, txos: dict[int, MSTxo]) -> None: ... + def handle_txo(self, data: bytes) -> MSTxo | None: ... + def handle_feat11(self, data: bytes) -> None: ... diff --git a/stubs/xlrd/xlrd/timemachine.pyi b/stubs/xlrd/xlrd/timemachine.pyi new file mode 100644 index 000000000000..1a44b9bce94c --- /dev/null +++ b/stubs/xlrd/xlrd/timemachine.pyi @@ -0,0 +1,19 @@ +from collections.abc import Callable +from io import BytesIO +from typing import Any + +python_version: tuple[int, int] + +BYTES_LITERAL: Callable[[str], bytes] +UNICODE_LITERAL: Callable[[str], str] +BYTES_ORD: Callable[[bytes], int] +BYTES_IO: type[BytesIO] + +def fprintf(f: Any, fmt: str, *vargs: Any) -> None: ... + +EXCEL_TEXT_TYPES: tuple[type[str], type[bytes], type[bytearray]] +REPR = ascii +xrange = range +unicode: Callable[[bytes, str], str] +ensure_unicode: Callable[[str | bytes], str] +unichr = chr diff --git a/stubs/xlrd/xlrd/xldate.pyi b/stubs/xlrd/xlrd/xldate.pyi new file mode 100644 index 000000000000..7940d5c20707 --- /dev/null +++ b/stubs/xlrd/xlrd/xldate.pyi @@ -0,0 +1,24 @@ +import datetime +from typing import Final, Literal + +_JDN_delta: Final[tuple[int, int]] +epoch_1904: Final[datetime.datetime] +epoch_1900: Final[datetime.datetime] +epoch_1900_minus_1: Final[datetime.datetime] +_XLDAYS_TOO_LARGE: Final[tuple[int, int]] +_days_in_month: Final[tuple[None, int, int, int, int, int, int, int, int, int, int, int, int]] + +class XLDateError(ValueError): ... +class XLDateNegative(XLDateError): ... +class XLDateAmbiguous(XLDateError): ... +class XLDateTooLarge(XLDateError): ... +class XLDateBadDatemode(XLDateError): ... +class XLDateBadTuple(XLDateError): ... + +# 0: 1900-based, 1: 1904-based. +def xldate_as_tuple(xldate: float, datemode: Literal[0, 1]) -> tuple[int, int, int, int, int, int]: ... +def xldate_as_datetime(xldate: float, datemode: Literal[0, 1]) -> datetime.datetime: ... +def _leap(y: int) -> Literal[0, 1]: ... +def xldate_from_date_tuple(date_tuple: tuple[int, int, int], datemode: Literal[0, 1]) -> float: ... +def xldate_from_time_tuple(time_tuple: tuple[int, int, int]) -> float: ... +def xldate_from_datetime_tuple(datetime_tuple: tuple[int, int, int, int, int, int], datemode: Literal[0, 1]) -> float: ... From 1eac33485154d62be526a67c40036ad3ef7a7f29 Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Tue, 25 Mar 2025 18:51:05 +0400 Subject: [PATCH 142/388] Improve `ujson` (#13700) --- stubs/ujson/ujson.pyi | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/stubs/ujson/ujson.pyi b/stubs/ujson/ujson.pyi index 42a454dcb309..1fb7f2fe8af7 100644 --- a/stubs/ujson/ujson.pyi +++ b/stubs/ujson/ujson.pyi @@ -1,8 +1,8 @@ -from _typeshed import Incomplete, SupportsRead, SupportsWrite +from _typeshed import SupportsRead, SupportsWrite from collections.abc import Callable -from typing import Any +from typing import Any, Final -__version__: str +__version__: Final[str] def encode( obj: Any, @@ -14,7 +14,7 @@ def encode( indent: int = ..., allow_nan: bool = ..., reject_bytes: bool = ..., - default: Callable[[Incomplete], Incomplete] | None = None, + default: Callable[[Any], Any] | None = None, # Specify how to serialize arbitrary types separators: tuple[str, str] | None = None, ) -> str: ... def dumps( @@ -27,7 +27,7 @@ def dumps( indent: int = ..., allow_nan: bool = ..., reject_bytes: bool = ..., - default: Callable[[Incomplete], Incomplete] | None = None, + default: Callable[[Any], Any] | None = None, # Specify how to serialize arbitrary types separators: tuple[str, str] | None = None, ) -> str: ... def dump( @@ -42,7 +42,7 @@ def dump( indent: int = ..., allow_nan: bool = ..., reject_bytes: bool = ..., - default: Callable[[Incomplete], Incomplete] | None = None, + default: Callable[[Any], Any] | None = None, # Specify how to serialize arbitrary types separators: tuple[str, str] | None = None, ) -> None: ... def decode(s: str | bytes | bytearray, precise_float: bool = ...) -> Any: ... From cb5d09591c5e4f5009b6b441e309b4546861c9a0 Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Tue, 25 Mar 2025 18:54:09 +0400 Subject: [PATCH 143/388] Improve stubs for `commonmark` (#13681) --- stubs/commonmark/commonmark/blocks.pyi | 180 +++++++++--------- stubs/commonmark/commonmark/common.pyi | 62 +++--- stubs/commonmark/commonmark/dump.pyi | 10 +- stubs/commonmark/commonmark/inlines.pyi | 109 +++++------ stubs/commonmark/commonmark/main.pyi | 2 +- stubs/commonmark/commonmark/node.pyi | 72 +++---- .../commonmark/normalize_reference.pyi | 2 +- stubs/commonmark/commonmark/render/html.pyi | 65 ++++--- .../commonmark/commonmark/render/renderer.pyi | 8 +- stubs/commonmark/commonmark/render/rst.pyi | 41 ++-- 10 files changed, 287 insertions(+), 264 deletions(-) diff --git a/stubs/commonmark/commonmark/blocks.pyi b/stubs/commonmark/commonmark/blocks.pyi index 7a5bbdc082ea..a442b74c452e 100644 --- a/stubs/commonmark/commonmark/blocks.pyi +++ b/stubs/commonmark/commonmark/blocks.pyi @@ -1,141 +1,144 @@ -from _typeshed import Incomplete -from typing import Any - -CODE_INDENT: int -reHtmlBlockOpen: Any -reHtmlBlockClose: Any -reThematicBreak: Any -reMaybeSpecial: Any -reNonSpace: Any -reBulletListMarker: Any -reOrderedListMarker: Any -reATXHeadingMarker: Any -reCodeFence: Any -reClosingCodeFence: Any -reSetextHeadingLine: Any -reLineEnding: Any - -def is_blank(s): ... -def is_space_or_tab(s): ... -def peek(ln, pos): ... -def ends_with_blank_line(block): ... -def parse_list_marker(parser, container): ... -def lists_match(list_data, item_data): ... +import re +from typing import Any, Final, Literal + +from .inlines import InlineParser +from .node import Node + +CODE_INDENT: Final[int] +reHtmlBlockOpen: Final[list[re.Pattern[str]]] +reHtmlBlockClose: Final[list[re.Pattern[str]]] +reThematicBreak: Final[re.Pattern[str]] +reMaybeSpecial: Final[re.Pattern[str]] +reNonSpace: Final[re.Pattern[str]] +reBulletListMarker: Final[re.Pattern[str]] +reOrderedListMarker: Final[re.Pattern[str]] +reATXHeadingMarker: Final[re.Pattern[str]] +reCodeFence: Final[re.Pattern[str]] +reClosingCodeFence: Final[re.Pattern[str]] +reSetextHeadingLine: Final[re.Pattern[str]] +reLineEnding: Final[re.Pattern[str]] + +def is_blank(s: str) -> bool: ... +def is_space_or_tab(s: str) -> bool: ... +def peek(ln: str, pos: int) -> str | None: ... +def ends_with_blank_line(block: Node) -> bool: ... +def parse_list_marker(parser: Parser, container: Node) -> dict[str, Any] | None: ... +def lists_match(list_data: dict[str, Any], item_data: dict[str, Any]) -> bool: ... class Block: - accepts_lines: Any + accepts_lines: bool | None @staticmethod - def continue_(parser: Incomplete | None = ..., container: Incomplete | None = ...) -> None: ... + def continue_(parser: Parser | None = None, container: Node | None = None) -> int | None: ... @staticmethod - def finalize(parser: Incomplete | None = ..., block: Incomplete | None = ...) -> None: ... + def finalize(parser: Parser | None = None, block: Node | None = None) -> None: ... @staticmethod - def can_contain(t) -> None: ... + def can_contain(t: str) -> bool | None: ... class Document(Block): - accepts_lines: bool + accepts_lines: Literal[False] @staticmethod - def continue_(parser: Incomplete | None = ..., container: Incomplete | None = ...): ... + def continue_(parser: Parser | None = None, container: Node | None = None) -> Literal[0]: ... @staticmethod - def finalize(parser: Incomplete | None = ..., block: Incomplete | None = ...) -> None: ... + def finalize(parser: Parser | None = None, block: Node | None = None) -> None: ... @staticmethod - def can_contain(t): ... + def can_contain(t: str) -> bool: ... class List(Block): - accepts_lines: bool + accepts_lines: Literal[False] @staticmethod - def continue_(parser: Incomplete | None = ..., container: Incomplete | None = ...): ... + def continue_(parser: Parser | None = None, container: Node | None = None) -> Literal[0]: ... @staticmethod - def finalize(parser: Incomplete | None = ..., block: Incomplete | None = ...) -> None: ... + def finalize(parser: Parser | None = None, block: Node | None = None) -> None: ... @staticmethod - def can_contain(t): ... + def can_contain(t: str) -> bool: ... class BlockQuote(Block): - accepts_lines: bool + accepts_lines: Literal[False] @staticmethod - def continue_(parser: Incomplete | None = ..., container: Incomplete | None = ...): ... + def continue_(parser: Parser | None = None, container: Node | None = None) -> Literal[0, 1]: ... @staticmethod - def finalize(parser: Incomplete | None = ..., block: Incomplete | None = ...) -> None: ... + def finalize(parser: Parser | None = None, block: Node | None = None) -> None: ... @staticmethod - def can_contain(t): ... + def can_contain(t: str) -> bool: ... class Item(Block): - accepts_lines: bool + accepts_lines: Literal[False] @staticmethod - def continue_(parser: Incomplete | None = ..., container: Incomplete | None = ...): ... + def continue_(parser: Parser | None = None, container: Node | None = None) -> Literal[0, 1]: ... @staticmethod - def finalize(parser: Incomplete | None = ..., block: Incomplete | None = ...) -> None: ... + def finalize(parser: Parser | None = None, block: Node | None = None) -> None: ... @staticmethod - def can_contain(t): ... + def can_contain(t: str) -> bool: ... class Heading(Block): - accepts_lines: bool + accepts_lines: Literal[False] @staticmethod - def continue_(parser: Incomplete | None = ..., container: Incomplete | None = ...): ... + def continue_(parser: Parser | None = None, container: Node | None = None) -> Literal[1]: ... @staticmethod - def finalize(parser: Incomplete | None = ..., block: Incomplete | None = ...) -> None: ... + def finalize(parser: Parser | None = None, block: Node | None = None) -> None: ... @staticmethod - def can_contain(t): ... + def can_contain(t: str) -> Literal[False]: ... class ThematicBreak(Block): - accepts_lines: bool + accepts_lines: Literal[False] @staticmethod - def continue_(parser: Incomplete | None = ..., container: Incomplete | None = ...): ... + def continue_(parser: Parser | None = None, container: Node | None = None) -> Literal[1]: ... @staticmethod - def finalize(parser: Incomplete | None = ..., block: Incomplete | None = ...) -> None: ... + def finalize(parser: Parser | None = None, block: Node | None = None) -> None: ... @staticmethod - def can_contain(t): ... + def can_contain(t: str) -> Literal[False]: ... class CodeBlock(Block): - accepts_lines: bool + accepts_lines: Literal[True] @staticmethod - def continue_(parser: Incomplete | None = ..., container: Incomplete | None = ...): ... + def continue_(parser: Parser | None = None, container: Node | None = None) -> Literal[0, 1, 2]: ... @staticmethod - def finalize(parser: Incomplete | None = ..., block: Incomplete | None = ...) -> None: ... + def finalize(parser: Parser | None = None, block: Node | None = None) -> None: ... @staticmethod - def can_contain(t): ... + def can_contain(t: str) -> Literal[False]: ... class HtmlBlock(Block): - accepts_lines: bool + accepts_lines: Literal[True] @staticmethod - def continue_(parser: Incomplete | None = ..., container: Incomplete | None = ...): ... + def continue_(parser: Parser | None = None, container: Node | None = None) -> Literal[0, 1]: ... @staticmethod - def finalize(parser: Incomplete | None = ..., block: Incomplete | None = ...) -> None: ... + def finalize(parser: Parser | None = None, block: Node | None = None) -> None: ... @staticmethod - def can_contain(t): ... + def can_contain(t: str) -> Literal[False]: ... class Paragraph(Block): - accepts_lines: bool + accepts_lines: Literal[True] @staticmethod - def continue_(parser: Incomplete | None = ..., container: Incomplete | None = ...): ... + def continue_(parser: Parser | None = None, container: Node | None = None) -> Literal[0, 1]: ... @staticmethod - def finalize(parser: Incomplete | None = ..., block: Incomplete | None = ...) -> None: ... + def finalize(parser: Parser | None = None, block: Node | None = None) -> None: ... @staticmethod - def can_contain(t): ... + def can_contain(t: str) -> Literal[False]: ... class BlockStarts: - METHODS: Any + METHODS: list[str] @staticmethod - def block_quote(parser, container: Incomplete | None = ...): ... + def block_quote(parser: Parser, container: Node | None = None) -> Literal[0, 1]: ... @staticmethod - def atx_heading(parser, container: Incomplete | None = ...): ... + def atx_heading(parser: Parser, container: Node | None = None) -> Literal[0, 2]: ... @staticmethod - def fenced_code_block(parser, container: Incomplete | None = ...): ... + def fenced_code_block(parser: Parser, container: Node | None = None) -> Literal[0, 2]: ... @staticmethod - def html_block(parser, container: Incomplete | None = ...): ... + def html_block(parser: Parser, container: Node | None = None) -> Literal[0, 2]: ... @staticmethod - def setext_heading(parser, container: Incomplete | None = ...): ... + def setext_heading(parser: Parser, container: Node | None = None) -> Literal[0, 2]: ... @staticmethod - def thematic_break(parser, container: Incomplete | None = ...): ... + def thematic_break(parser: Parser, container: Node | None = None) -> Literal[0, 2]: ... @staticmethod - def list_item(parser, container: Incomplete | None = ...): ... + def list_item(parser: Parser, container: Node | None = None) -> Literal[0, 1]: ... @staticmethod - def indented_code_block(parser, container: Incomplete | None = ...): ... + def indented_code_block(parser: Parser, container: Node | None = None) -> Literal[0, 2]: ... class Parser: - doc: Any - block_starts: Any - tip: Any - oldtip: Any + doc: Node + block_starts: BlockStarts + tip: Node + oldtip: Node current_line: str line_number: int offset: int @@ -147,21 +150,22 @@ class Parser: blank: bool partially_consumed_tab: bool all_closed: bool - last_matched_container: Any - refmap: Any + last_matched_container: Node + refmap: dict[str, Any] last_line_length: int - inline_parser: Any - options: Any - def __init__(self, options=...) -> None: ... + inline_parser: InlineParser + options: dict[str, Any] + blocks: dict[str, Block] + def __init__(self, options: dict[str, Any] = {}) -> None: ... def add_line(self) -> None: ... - def add_child(self, tag, offset): ... + def add_child(self, tag: str, offset: int) -> Node: ... def close_unmatched_blocks(self) -> None: ... def find_next_nonspace(self) -> None: ... def advance_next_nonspace(self) -> None: ... - def advance_offset(self, count, columns) -> None: ... - def incorporate_line(self, ln) -> None: ... - def finalize(self, block, line_number) -> None: ... - def process_inlines(self, block) -> None: ... - def parse(self, my_input): ... + def advance_offset(self, count: int, columns: bool) -> None: ... + def incorporate_line(self, ln: str) -> None: ... + def finalize(self, block: Node, line_number: int) -> None: ... + def process_inlines(self, block: Node) -> None: ... + def parse(self, my_input: str) -> Node: ... -CAMEL_RE: Any +CAMEL_RE: Final[re.Pattern[str]] diff --git a/stubs/commonmark/commonmark/common.pyi b/stubs/commonmark/commonmark/common.pyi index c55a67969ad1..e6dda39aef8f 100644 --- a/stubs/commonmark/commonmark/common.pyi +++ b/stubs/commonmark/commonmark/common.pyi @@ -1,35 +1,39 @@ import html -from typing import Any +import re +from typing import AnyStr, Final, Literal, overload HTMLunescape = html.unescape -ENTITY: str -TAGNAME: str -ATTRIBUTENAME: str -UNQUOTEDVALUE: str -SINGLEQUOTEDVALUE: str -DOUBLEQUOTEDVALUE: str -ATTRIBUTEVALUE: Any -ATTRIBUTEVALUESPEC: Any -ATTRIBUTE: Any -OPENTAG: Any -CLOSETAG: Any -HTMLCOMMENT: str -PROCESSINGINSTRUCTION: str -DECLARATION: Any -CDATA: str -HTMLTAG: Any -reHtmlTag: Any -reBackslashOrAmp: Any -ESCAPABLE: str -reEntityOrEscapedChar: Any -XMLSPECIAL: str -reXmlSpecial: Any +ENTITY: Final[str] +TAGNAME: Final[str] +ATTRIBUTENAME: Final[str] +UNQUOTEDVALUE: Final[str] +SINGLEQUOTEDVALUE: Final[str] +DOUBLEQUOTEDVALUE: Final[str] +ATTRIBUTEVALUE: Final[str] +ATTRIBUTEVALUESPEC: Final[str] +ATTRIBUTE: Final[str] +OPENTAG: Final[str] +CLOSETAG: Final[str] +HTMLCOMMENT: Final[str] +PROCESSINGINSTRUCTION: Final[str] +DECLARATION: Final[str] +CDATA: Final[str] +HTMLTAG: Final[str] +reHtmlTag: Final[re.Pattern[str]] +reBackslashOrAmp: Final[re.Pattern[str]] +ESCAPABLE: Final[str] +reEntityOrEscapedChar: Final[re.Pattern[str]] +XMLSPECIAL: Final[str] +reXmlSpecial: Final[re.Pattern[str]] -def unescape_char(s): ... -def unescape_string(s): ... -def normalize_uri(uri): ... +def unescape_char(s: AnyStr) -> AnyStr: ... +def unescape_string(s: str) -> str: ... +def normalize_uri(uri: str) -> str: ... -UNSAFE_MAP: Any +UNSAFE_MAP: Final[dict[str, str]] -def replace_unsafe_char(s): ... -def escape_xml(s): ... +def replace_unsafe_char(s: str) -> str: ... +@overload +def escape_xml(s: None) -> Literal[""]: ... +@overload +def escape_xml(s: str) -> str: ... diff --git a/stubs/commonmark/commonmark/dump.pyi b/stubs/commonmark/commonmark/dump.pyi index 821fb95b7f5d..d6dd0350b16f 100644 --- a/stubs/commonmark/commonmark/dump.pyi +++ b/stubs/commonmark/commonmark/dump.pyi @@ -1,3 +1,7 @@ -def prepare(obj, topnode: bool = ...): ... -def dumpJSON(obj): ... -def dumpAST(obj, ind: int = ..., topnode: bool = ...) -> None: ... +from typing import Any + +from .node import Node + +def prepare(obj: Node, topnode: bool = ...) -> list[dict[str, Any]]: ... +def dumpJSON(obj: Node) -> str: ... +def dumpAST(obj: Node, ind: int = ..., topnode: bool = ...) -> None: ... diff --git a/stubs/commonmark/commonmark/inlines.pyi b/stubs/commonmark/commonmark/inlines.pyi index e53f22a10cf8..5202a01bdc97 100644 --- a/stubs/commonmark/commonmark/inlines.pyi +++ b/stubs/commonmark/commonmark/inlines.pyi @@ -1,65 +1,68 @@ import html -from typing import Any +import re +from typing import Any, Final, Literal + +from .node import Node HTMLunescape = html.unescape -ESCAPED_CHAR: Any -rePunctuation: Any -reLinkTitle: Any -reLinkDestinationBraces: Any -reEscapable: Any -reEntityHere: Any -reTicks: Any -reTicksHere: Any -reEllipses: Any -reDash: Any -reEmailAutolink: Any -reAutolink: Any -reSpnl: Any -reWhitespaceChar: Any -reWhitespace: Any -reUnicodeWhitespaceChar: Any -reFinalSpace: Any -reInitialSpace: Any -reSpaceAtEndOfLine: Any -reLinkLabel: Any -reMain: Any +ESCAPED_CHAR: Final[str] +rePunctuation: Final[re.Pattern[str]] +reLinkTitle: Final[re.Pattern[str]] +reLinkDestinationBraces: Final[re.Pattern[str]] +reEscapable: Final[re.Pattern[str]] +reEntityHere: Final[re.Pattern[str]] +reTicks: Final[re.Pattern[str]] +reTicksHere: Final[re.Pattern[str]] +reEllipses: Final[re.Pattern[str]] +reDash: Final[re.Pattern[str]] +reEmailAutolink: Final[re.Pattern[str]] +reAutolink: Final[re.Pattern[str]] +reSpnl: Final[re.Pattern[str]] +reWhitespaceChar: Final[re.Pattern[str]] +reWhitespace: Final[re.Pattern[str]] +reUnicodeWhitespaceChar: Final[re.Pattern[str]] +reFinalSpace: Final[re.Pattern[str]] +reInitialSpace: Final[re.Pattern[str]] +reSpaceAtEndOfLine: Final[re.Pattern[str]] +reLinkLabel: Final[re.Pattern[str]] +reMain: Final[re.Pattern[str]] -def text(s): ... -def smart_dashes(chars): ... +def text(s: str) -> Node: ... +def smart_dashes(chars: str) -> str: ... class InlineParser: subject: str - brackets: Any + brackets: dict[str, Any] | None pos: int - refmap: Any - options: Any - def __init__(self, options=...) -> None: ... - def match(self, regexString): ... - def peek(self): ... - def spnl(self): ... - def parseBackticks(self, block): ... - def parseBackslash(self, block): ... - def parseAutolink(self, block): ... - def parseHtmlTag(self, block): ... - def scanDelims(self, c): ... - delimiters: Any - def handleDelim(self, cc, block): ... - def removeDelimiter(self, delim) -> None: ... + refmap: dict[str, Any] + options: dict[str, Any] + def __init__(self, options: dict[str, Any] = {}) -> None: ... + def match(self, regexString: str | re.Pattern[str]) -> str | None: ... + def peek(self) -> str | None: ... + def spnl(self) -> Literal[True]: ... + def parseBackticks(self, block: Node) -> bool: ... + def parseBackslash(self, block: Node) -> Literal[True]: ... + def parseAutolink(self, block: Node) -> bool: ... + def parseHtmlTag(self, block: Node) -> bool: ... + def scanDelims(self, c: str) -> dict[str, Any] | None: ... + delimiters: dict[str, Any] + def handleDelim(self, cc: str, block: Node) -> bool: ... + def removeDelimiter(self, delim: dict[str, Any]) -> None: ... @staticmethod - def removeDelimitersBetween(bottom, top) -> None: ... + def removeDelimitersBetween(bottom: dict[str, Any], top: dict[str, Any]) -> None: ... def processEmphasis(self, stack_bottom) -> None: ... - def parseLinkTitle(self): ... - def parseLinkDestination(self): ... - def parseLinkLabel(self): ... - def parseOpenBracket(self, block): ... - def parseBang(self, block): ... - def parseCloseBracket(self, block): ... + def parseLinkTitle(self) -> str | None: ... + def parseLinkDestination(self) -> str | None: ... + def parseLinkLabel(self) -> int: ... + def parseOpenBracket(self, block: Node) -> Literal[True]: ... + def parseBang(self, block: Node) -> Literal[True]: ... + def parseCloseBracket(self, block: Node) -> Literal[True]: ... def addBracket(self, node, index, image) -> None: ... def removeBracket(self) -> None: ... - def parseEntity(self, block): ... - def parseString(self, block): ... - def parseNewline(self, block): ... - def parseReference(self, s, refmap): ... - def parseInline(self, block): ... - def parseInlines(self, block) -> None: ... - parse: Any + def parseEntity(self, block: Node) -> bool: ... + def parseString(self, block: Node) -> bool: ... + def parseNewline(self, block: Node) -> Literal[True]: ... + def parseReference(self, s: str, refmap: dict[str, Any]) -> int: ... + def parseInline(self, block: Node) -> bool: ... + def parseInlines(self, block: Node) -> None: ... + parse = parseInlines diff --git a/stubs/commonmark/commonmark/main.pyi b/stubs/commonmark/commonmark/main.pyi index c1768b9203ad..34059e6d22cd 100644 --- a/stubs/commonmark/commonmark/main.pyi +++ b/stubs/commonmark/commonmark/main.pyi @@ -1,3 +1,3 @@ from typing import Literal -def commonmark(text: str, format: Literal["html", "json", "ast", "rst"] = ...) -> str: ... +def commonmark(text: str, format: Literal["html", "json", "ast", "rst"] = "html") -> str: ... diff --git a/stubs/commonmark/commonmark/node.pyi b/stubs/commonmark/commonmark/node.pyi index 8760df613cd1..ffab8f5c98f1 100644 --- a/stubs/commonmark/commonmark/node.pyi +++ b/stubs/commonmark/commonmark/node.pyi @@ -1,51 +1,53 @@ -from typing import Any +import re +from typing import Final +from typing_extensions import Self -reContainer: Any +reContainer: Final[re.Pattern[str]] -def is_container(node): ... +def is_container(node: Node) -> bool: ... class NodeWalker: - current: Any - root: Any + current: Node | None + root: Node entering: bool - def __init__(self, root) -> None: ... - def __next__(self): ... - next: Any - def __iter__(self): ... - def nxt(self): ... - def resume_at(self, node, entering) -> None: ... + def __init__(self, root: Node) -> None: ... + def __next__(self) -> tuple[Node, bool]: ... + next = __next__ + def __iter__(self) -> Self: ... + def nxt(self) -> dict[str, Node | bool] | None: ... + def resume_at(self, node: Node, entering: bool) -> None: ... class Node: - t: Any - parent: Any - first_child: Any - last_child: Any - prv: Any - nxt: Any - sourcepos: Any + t: str + parent: Node | None + first_child: Node | None + last_child: Node | None + prv: Node | None + nxt: Node | None + sourcepos: list[list[int]] | None last_line_blank: bool last_line_checked: bool is_open: bool string_content: str - literal: Any - list_data: Any - info: Any - destination: Any - title: Any + literal: str | None + list_data: dict[str, str | int | bool | None] + info: str | None + destination: str | None + title: str | None is_fenced: bool - fence_char: Any + fence_char: str | None fence_length: int - fence_offset: Any - level: Any - on_enter: Any - on_exit: Any - def __init__(self, node_type, sourcepos) -> None: ... + fence_offset: int | None + level: int | None + on_enter: str | None + on_exit: str | None + def __init__(self, node_type: str, sourcepos: list[list[int]] | None) -> None: ... def pretty(self) -> None: ... def normalize(self) -> None: ... - def is_container(self): ... - def append_child(self, child) -> None: ... - def prepend_child(self, child) -> None: ... + def is_container(self) -> bool: ... + def append_child(self, child: Node) -> None: ... + def prepend_child(self, child: Node) -> None: ... def unlink(self) -> None: ... - def insert_after(self, sibling) -> None: ... - def insert_before(self, sibling) -> None: ... - def walker(self): ... + def insert_after(self, sibling: Node) -> None: ... + def insert_before(self, sibling: Node) -> None: ... + def walker(self) -> NodeWalker: ... diff --git a/stubs/commonmark/commonmark/normalize_reference.pyi b/stubs/commonmark/commonmark/normalize_reference.pyi index 96a3414d109b..0386213d006b 100644 --- a/stubs/commonmark/commonmark/normalize_reference.pyi +++ b/stubs/commonmark/commonmark/normalize_reference.pyi @@ -1 +1 @@ -def normalize_reference(string): ... +def normalize_reference(string: str) -> str: ... diff --git a/stubs/commonmark/commonmark/render/html.pyi b/stubs/commonmark/commonmark/render/html.pyi index 02637833a4ae..618c0f5edd2e 100644 --- a/stubs/commonmark/commonmark/render/html.pyi +++ b/stubs/commonmark/commonmark/render/html.pyi @@ -1,38 +1,43 @@ -from _typeshed import Incomplete -from typing import Any +import re +from builtins import list as _list # conflicts with a method named "list" +from typing import Any, Final, Literal, overload +from commonmark.node import Node from commonmark.render.renderer import Renderer -reUnsafeProtocol: Any -reSafeDataProtocol: Any +reUnsafeProtocol: Final[re.Pattern[str]] +reSafeDataProtocol: Final[re.Pattern[str]] -def potentially_unsafe(url): ... +def potentially_unsafe(url: str) -> bool | None: ... class HtmlRenderer(Renderer): disable_tags: int last_out: str - options: Any - def __init__(self, options=...) -> None: ... - def escape(self, text): ... - def tag(self, name, attrs: Incomplete | None = ..., selfclosing: Incomplete | None = ...) -> None: ... - def text(self, node, entering: Incomplete | None = ...) -> None: ... - def softbreak(self, node: Incomplete | None = ..., entering: Incomplete | None = ...) -> None: ... - def linebreak(self, node: Incomplete | None = ..., entering: Incomplete | None = ...) -> None: ... - def link(self, node, entering) -> None: ... - def image(self, node, entering) -> None: ... - def emph(self, node, entering) -> None: ... - def strong(self, node, entering) -> None: ... - def paragraph(self, node, entering) -> None: ... - def heading(self, node, entering) -> None: ... - def code(self, node, entering) -> None: ... - def code_block(self, node, entering) -> None: ... - def thematic_break(self, node, entering) -> None: ... - def block_quote(self, node, entering) -> None: ... - def list(self, node, entering) -> None: ... - def item(self, node, entering) -> None: ... - def html_inline(self, node, entering) -> None: ... - def html_block(self, node, entering) -> None: ... - def custom_inline(self, node, entering) -> None: ... - def custom_block(self, node, entering) -> None: ... - def out(self, s) -> None: ... - def attrs(self, node): ... + options: dict[str, Any] + def __init__(self, options: dict[str, Any] = {}) -> None: ... + @overload + def escape(self, text: None) -> Literal[""]: ... + @overload + def escape(self, text: str) -> str: ... + def tag(self, name: str, attrs: _list[_list[str]] | None = None, selfclosing: bool | None = None) -> None: ... + def text(self, node: Node, entering: bool | None = None) -> None: ... + def softbreak(self, node: Node | None = None, entering: bool | None = None) -> None: ... + def linebreak(self, node: Node | None = None, entering: bool | None = None) -> None: ... + def link(self, node: Node, entering: bool | None) -> None: ... + def image(self, node: Node, entering: bool | None) -> None: ... + def emph(self, node: Node, entering: bool | None) -> None: ... + def strong(self, node: Node, entering: bool | None) -> None: ... + def paragraph(self, node: Node, entering: bool | None) -> None: ... + def heading(self, node: Node, entering: bool | None) -> None: ... + def code(self, node: Node, entering: bool | None) -> None: ... + def code_block(self, node: Node, entering: bool | None) -> None: ... + def thematic_break(self, node: Node, entering: bool | None) -> None: ... + def block_quote(self, node: Node, entering: bool | None) -> None: ... + def list(self, node: Node, entering: bool | None) -> None: ... + def item(self, node: Node, entering: bool | None) -> None: ... + def html_inline(self, node: Node, entering: bool | None) -> None: ... + def html_block(self, node: Node, entering: bool | None) -> None: ... + def custom_inline(self, node: Node, entering: bool | None) -> None: ... + def custom_block(self, node: Node, entering: bool | None) -> None: ... + def out(self, s: str | None) -> None: ... + def attrs(self, node: Node) -> _list[_list[str]]: ... diff --git a/stubs/commonmark/commonmark/render/renderer.pyi b/stubs/commonmark/commonmark/render/renderer.pyi index b8e2e46f0969..db1ec57a4573 100644 --- a/stubs/commonmark/commonmark/render/renderer.pyi +++ b/stubs/commonmark/commonmark/render/renderer.pyi @@ -1,7 +1,9 @@ +from commonmark.node import Node + class Renderer: buf: str last_out: str - def render(self, ast): ... - def lit(self, s) -> None: ... + def render(self, ast: Node) -> str: ... + def lit(self, s: str) -> None: ... def cr(self) -> None: ... - def out(self, s) -> None: ... + def out(self, s: str) -> None: ... diff --git a/stubs/commonmark/commonmark/render/rst.pyi b/stubs/commonmark/commonmark/render/rst.pyi index 9357bfd9fc37..fd181c8779b5 100644 --- a/stubs/commonmark/commonmark/render/rst.pyi +++ b/stubs/commonmark/commonmark/render/rst.pyi @@ -1,26 +1,25 @@ -from typing import Any - +from commonmark.node import Node from commonmark.render.renderer import Renderer class ReStructuredTextRenderer(Renderer): - indent_char: Any + indent_char: str indent_length: int - def __init__(self, indent_char: str = ...) -> None: ... - def lit(self, s): ... + def __init__(self, indent_char: str = " ") -> None: ... + def lit(self, s: str) -> None: ... def cr(self) -> None: ... - def indent_lines(self, literal, indent_length: int = ...): ... - def document(self, node, entering) -> None: ... - def softbreak(self, node, entering) -> None: ... - def linebreak(self, node, entering) -> None: ... - def text(self, node, entering) -> None: ... - def emph(self, node, entering) -> None: ... - def strong(self, node, entering) -> None: ... - def paragraph(self, node, entering) -> None: ... - def link(self, node, entering) -> None: ... - def image(self, node, entering) -> None: ... - def code(self, node, entering) -> None: ... - def code_block(self, node, entering) -> None: ... - def list(self, node, entering) -> None: ... - def item(self, node, entering) -> None: ... - def block_quote(self, node, entering) -> None: ... - def heading(self, node, entering) -> None: ... + def indent_lines(self, literal: str, indent_length: int = 4) -> str: ... + def document(self, node: Node | None, entering: bool | None) -> None: ... + def softbreak(self, node: Node | None, entering: bool | None) -> None: ... + def linebreak(self, node: Node | None, entering: bool | None) -> None: ... + def text(self, node: Node, entering: bool | None) -> None: ... + def emph(self, node: Node | None, entering: bool | None) -> None: ... + def strong(self, node: Node | None, entering: bool | None) -> None: ... + def paragraph(self, node: Node, entering: bool | None) -> None: ... + def link(self, node: Node | None, entering: bool | None) -> None: ... + def image(self, node: Node, entering: bool | None) -> None: ... + def code(self, node: Node, entering: bool | None) -> None: ... + def code_block(self, node: Node, entering: bool | None) -> None: ... + def list(self, node: Node | None, entering: bool | None) -> None: ... + def item(self, node: Node, entering: bool | None) -> None: ... + def block_quote(self, node: Node | None, entering: bool | None) -> None: ... + def heading(self, node: Node, entering: bool | None) -> None: ... From d29ccf94ce45a6dc904313ada7ec4710af8298d9 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Wed, 26 Mar 2025 01:39:43 +0100 Subject: [PATCH 144/388] [stubsabot] Bump pytz to 2025.2 (#13720) --- stubs/pytz/METADATA.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stubs/pytz/METADATA.toml b/stubs/pytz/METADATA.toml index b0f041bf89af..04b5e304b1fc 100644 --- a/stubs/pytz/METADATA.toml +++ b/stubs/pytz/METADATA.toml @@ -1,3 +1,3 @@ -version = "2025.1" +version = "2025.2" # This is a mirror of https://git.launchpad.net/pytz/tree, see https://pythonhosted.org/pytz/#latest-versions upstream_repository = "https://github.com/stub42/pytz" From 2d3d1c84e0fa2fd0d56699261d517d8859459399 Mon Sep 17 00:00:00 2001 From: Avasam Date: Thu, 27 Mar 2025 04:55:38 -0400 Subject: [PATCH 145/388] Update pip install calls in scripts to use uv. And messages to reference current executable (#13597) --- scripts/create_baseline_stubs.py | 4 +++- .../install_all_third_party_dependencies.py | 18 ++++++++++-------- tests/mypy_test.py | 2 +- tests/runtests.py | 7 ++++++- 4 files changed, 20 insertions(+), 11 deletions(-) diff --git a/scripts/create_baseline_stubs.py b/scripts/create_baseline_stubs.py index 3766266adc91..2aab6aea5710 100755 --- a/scripts/create_baseline_stubs.py +++ b/scripts/create_baseline_stubs.py @@ -46,6 +46,8 @@ def get_installed_package_info(project: str) -> tuple[str, str] | None: Return (normalized project name, installed version) if successful. """ + # Not using "uv pip freeze" because if this is run from a global Python, + # it'll mistakenly list the .venv's packages. r = subprocess.run(["pip", "freeze"], capture_output=True, text=True, check=True) return search_pip_freeze_output(project, r.stdout) @@ -220,7 +222,7 @@ def main() -> None: if info is None: print(f'Error: "{project}" is not installed', file=sys.stderr) print(file=sys.stderr) - print(f'Suggestion: Run "python3 -m pip install {project}" and try again', file=sys.stderr) + print(f"Suggestion: Run `{sys.executable} -m pip install {project}` and try again", file=sys.stderr) sys.exit(1) project, version = info diff --git a/scripts/install_all_third_party_dependencies.py b/scripts/install_all_third_party_dependencies.py index a11238da8199..ca1b7075589f 100644 --- a/scripts/install_all_third_party_dependencies.py +++ b/scripts/install_all_third_party_dependencies.py @@ -3,11 +3,13 @@ from ts_utils.requirements import get_external_stub_requirements -use_uv = "--uv" in sys.argv -if use_uv: - pip_command = ["uv", "pip", "install"] -else: - pip_command = ["pip", "install"] - -requirements = get_external_stub_requirements() -subprocess.check_call(pip_command + [str(requirement) for requirement in requirements]) + +def main() -> None: + requirements = get_external_stub_requirements() + # By forwarding arguments, we naturally allow non-venv (system installs) + # by letting the script's user follow uv's own helpful hint of passing the `--system` flag. + subprocess.check_call(["uv", "pip", "install", *sys.argv[1:], *[str(requirement) for requirement in requirements]]) + + +if __name__ == "__main__": + main() diff --git a/tests/mypy_test.py b/tests/mypy_test.py index b5e91bd8af58..f292b47f4e6c 100755 --- a/tests/mypy_test.py +++ b/tests/mypy_test.py @@ -507,7 +507,7 @@ def setup_virtual_environments(distributions: dict[str, PackageDependencies], ar print(colored(f"took {venv_elapsed_time:.2f} seconds", "blue")) # STAGE 3: For each {virtual_environment: requirements_set} pairing, - # `pip install` the requirements set into the virtual environment + # `uv pip install` the requirements set into the virtual environment pip_start_time = time.perf_counter() # Limit workers to 10 at a time, since this makes network requests diff --git a/tests/runtests.py b/tests/runtests.py index e0aad9e95bd2..47be0830ba67 100755 --- a/tests/runtests.py +++ b/tests/runtests.py @@ -124,7 +124,12 @@ def main() -> None: print("\nRunning pytype...") pytype_result = subprocess.run([sys.executable, "tests/pytype_test.py", path]) else: - print(colored("\nSkipping pytype on Windows. You need to install it first: `pip install pytype`.", "yellow")) + print( + colored( + f"\nSkipping pytype on Windows. You need to install it first: `{sys.executable} -m pip install pytype` .", + "yellow", + ) + ) cases_path = test_cases_path(stub if folder == "stubs" else "stdlib") if not cases_path.exists(): From 20d4d4a960aee20376fc46c7545fb4c4756d9cd3 Mon Sep 17 00:00:00 2001 From: Avasam Date: Thu, 27 Mar 2025 04:57:46 -0400 Subject: [PATCH 146/388] Restore JACK-Client macOS stubtest (#13721) Restore JACK-Client macOS tests --- stubs/JACK-Client/METADATA.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/stubs/JACK-Client/METADATA.toml b/stubs/JACK-Client/METADATA.toml index bd070e6d0493..f18e0144f591 100644 --- a/stubs/JACK-Client/METADATA.toml +++ b/stubs/JACK-Client/METADATA.toml @@ -5,8 +5,8 @@ requires = ["numpy>=1.20", "types-cffi"] [tool.stubtest] # darwin and win32 are equivalent -platforms = ["linux"] +platforms = ["darwin", "linux"] apt_dependencies = ["libjack-dev"] -# brew_dependencies = ["jack"] +brew_dependencies = ["jack"] # No need to install on the CI. Leaving here as information for Windows contributors. # choco_dependencies = ["jack"] From 2e9900df89ea97db76f481acdb74e2a9b3fa4a04 Mon Sep 17 00:00:00 2001 From: Sebastian Rittau Date: Thu, 27 Mar 2025 17:13:13 +0100 Subject: [PATCH 147/388] [setuptools] Update to 77.0.2 (#13685) --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Avasam --- stubs/setuptools/@tests/stubtest_allowlist.txt | 4 +--- stubs/setuptools/METADATA.toml | 2 +- stubs/setuptools/distutils/compilers/C/unix.pyi | 1 + stubs/setuptools/distutils/unixccompiler.pyi | 1 + .../setuptools/_distutils/_msvccompiler.pyi | 2 ++ .../setuptools/_distutils/command/build.pyi | 2 -- .../setuptools/_distutils/compilers/C/msvc.pyi | 3 --- .../setuptools/_distutils/compilers/C/unix.pyi | 16 ++++++++++++++++ .../setuptools/_distutils/unixccompiler.pyi | 3 +++ 9 files changed, 25 insertions(+), 9 deletions(-) create mode 100644 stubs/setuptools/distutils/compilers/C/unix.pyi create mode 100644 stubs/setuptools/distutils/unixccompiler.pyi create mode 100644 stubs/setuptools/setuptools/_distutils/compilers/C/unix.pyi create mode 100644 stubs/setuptools/setuptools/_distutils/unixccompiler.pyi diff --git a/stubs/setuptools/@tests/stubtest_allowlist.txt b/stubs/setuptools/@tests/stubtest_allowlist.txt index ef8af309865d..20e9c75ba049 100644 --- a/stubs/setuptools/@tests/stubtest_allowlist.txt +++ b/stubs/setuptools/@tests/stubtest_allowlist.txt @@ -32,9 +32,7 @@ setuptools._distutils.dist.Distribution.get_obsoletes setuptools._distutils.archive_util.ARCHIVE_FORMATS setuptools._distutils.archive_util.check_archive_formats setuptools._distutils.cmd.Command.dump_options -setuptools._distutils.command.build_clib.show_compilers setuptools._distutils.command.build_ext.extension_name_re -setuptools._distutils.command.build_ext.show_compilers setuptools._distutils.command.build_scripts setuptools._distutils.command.check setuptools._distutils.command.clean @@ -75,6 +73,7 @@ setuptools._distutils.command.config setuptools._distutils.command.install_data setuptools._distutils.command.install_egg_info setuptools._distutils.command.install_headers +setuptools._distutils.compat.numpy setuptools._distutils.compat.py39 setuptools._distutils.core setuptools._distutils.cygwinccompiler @@ -84,7 +83,6 @@ setuptools._distutils.fancy_getopt setuptools._distutils.file_util setuptools._distutils.log setuptools._distutils.text_file -setuptools._distutils.unixccompiler setuptools._distutils.version setuptools._distutils.versionpredicate setuptools._distutils.zosccompiler diff --git a/stubs/setuptools/METADATA.toml b/stubs/setuptools/METADATA.toml index f8c0ecb7c936..b5b04e04ff19 100644 --- a/stubs/setuptools/METADATA.toml +++ b/stubs/setuptools/METADATA.toml @@ -1,4 +1,4 @@ -version = "~=76.0.0" +version = "~=77.0.2" upstream_repository = "https://github.com/pypa/setuptools" extra_description = """\ Given that `pkg_resources` is typed since `setuptools >= 71.1`, \ diff --git a/stubs/setuptools/distutils/compilers/C/unix.pyi b/stubs/setuptools/distutils/compilers/C/unix.pyi new file mode 100644 index 000000000000..d4dbeff6b110 --- /dev/null +++ b/stubs/setuptools/distutils/compilers/C/unix.pyi @@ -0,0 +1 @@ +from setuptools._distutils.compilers.C.unix import * diff --git a/stubs/setuptools/distutils/unixccompiler.pyi b/stubs/setuptools/distutils/unixccompiler.pyi new file mode 100644 index 000000000000..fcbf9e199041 --- /dev/null +++ b/stubs/setuptools/distutils/unixccompiler.pyi @@ -0,0 +1 @@ +from setuptools._distutils.unixccompiler import * diff --git a/stubs/setuptools/setuptools/_distutils/_msvccompiler.pyi b/stubs/setuptools/setuptools/_distutils/_msvccompiler.pyi index 34d9735b0614..8471ccab28fa 100644 --- a/stubs/setuptools/setuptools/_distutils/_msvccompiler.pyi +++ b/stubs/setuptools/setuptools/_distutils/_msvccompiler.pyi @@ -1,3 +1,5 @@ from .compilers.C import msvc +__all__ = ["MSVCCompiler"] + MSVCCompiler = msvc.Compiler diff --git a/stubs/setuptools/setuptools/_distutils/command/build.pyi b/stubs/setuptools/setuptools/_distutils/command/build.pyi index c8a8bca6e61e..90b51a8b8a0a 100644 --- a/stubs/setuptools/setuptools/_distutils/command/build.pyi +++ b/stubs/setuptools/setuptools/_distutils/command/build.pyi @@ -4,8 +4,6 @@ from typing import ClassVar from ..cmd import Command -def show_compilers() -> None: ... - class build(Command): description: ClassVar[str] user_options: ClassVar[list[tuple[str, str | None, str]]] diff --git a/stubs/setuptools/setuptools/_distutils/compilers/C/msvc.pyi b/stubs/setuptools/setuptools/_distutils/compilers/C/msvc.pyi index 2b419aa986d7..99d107c63217 100644 --- a/stubs/setuptools/setuptools/_distutils/compilers/C/msvc.pyi +++ b/stubs/setuptools/setuptools/_distutils/compilers/C/msvc.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from typing import ClassVar, Final from . import base @@ -6,8 +5,6 @@ from . import base PLAT_SPEC_TO_RUNTIME: Final[dict[str, str]] class Compiler(base.Compiler): - compiler_type: ClassVar[str] - executables: ClassVar[dict[str, Incomplete]] src_extensions: ClassVar[list[str]] res_extension: ClassVar[str] obj_extension: ClassVar[str] diff --git a/stubs/setuptools/setuptools/_distutils/compilers/C/unix.pyi b/stubs/setuptools/setuptools/_distutils/compilers/C/unix.pyi new file mode 100644 index 000000000000..2e78f191dd55 --- /dev/null +++ b/stubs/setuptools/setuptools/_distutils/compilers/C/unix.pyi @@ -0,0 +1,16 @@ +from typing import ClassVar + +from . import base + +class Compiler(base.Compiler): + src_extensions: ClassVar[list[str]] + obj_extension: ClassVar[str] + static_lib_extension: ClassVar[str] + shared_lib_extension: ClassVar[str] + dylib_lib_extension: ClassVar[str] + xcode_stub_lib_extension: ClassVar[str] + static_lib_format: ClassVar[str] + shared_lib_format: ClassVar[str] + dylib_lib_format: ClassVar[str] + xcode_stub_lib_format: ClassVar[str] + def runtime_library_dir_option(self, dir: str) -> str | list[str]: ... # type: ignore[override] diff --git a/stubs/setuptools/setuptools/_distutils/unixccompiler.pyi b/stubs/setuptools/setuptools/_distutils/unixccompiler.pyi new file mode 100644 index 000000000000..9cd30ad9a64e --- /dev/null +++ b/stubs/setuptools/setuptools/_distutils/unixccompiler.pyi @@ -0,0 +1,3 @@ +from .compilers.C import unix + +UnixCCompiler = unix.Compiler From 01849f1ced41e8fcd0d241b6c80fa238caa7af3a Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Thu, 27 Mar 2025 20:45:31 +0400 Subject: [PATCH 148/388] Complete stubs for `commonmark` (#13724) --- pyrightconfig.stricter.json | 1 - stubs/commonmark/@tests/stubtest_allowlist.txt | 2 ++ stubs/commonmark/METADATA.toml | 4 ---- stubs/commonmark/commonmark/entitytrans.pyi | 2 ++ stubs/commonmark/commonmark/inlines.pyi | 4 ++-- stubs/commonmark/commonmark/normalize_reference.pyi | 2 ++ 6 files changed, 8 insertions(+), 7 deletions(-) create mode 100644 stubs/commonmark/@tests/stubtest_allowlist.txt diff --git a/pyrightconfig.stricter.json b/pyrightconfig.stricter.json index a0e8bddda967..acb00ae85677 100644 --- a/pyrightconfig.stricter.json +++ b/pyrightconfig.stricter.json @@ -34,7 +34,6 @@ "stubs/cffi", "stubs/click-default-group", "stubs/click-web", - "stubs/commonmark", "stubs/corus", "stubs/dateparser", "stubs/defusedxml", diff --git a/stubs/commonmark/@tests/stubtest_allowlist.txt b/stubs/commonmark/@tests/stubtest_allowlist.txt new file mode 100644 index 000000000000..cc9f553f2f7e --- /dev/null +++ b/stubs/commonmark/@tests/stubtest_allowlist.txt @@ -0,0 +1,2 @@ +# Testing modules are not included in type stubs +commonmark.tests.* diff --git a/stubs/commonmark/METADATA.toml b/stubs/commonmark/METADATA.toml index e085ecf4529b..aa3a1922c3a1 100644 --- a/stubs/commonmark/METADATA.toml +++ b/stubs/commonmark/METADATA.toml @@ -1,6 +1,2 @@ version = "0.9.*" upstream_repository = "https://github.com/rtfd/commonmark.py" -partial_stub = true - -[tool.stubtest] -ignore_missing_stub = true diff --git a/stubs/commonmark/commonmark/entitytrans.pyi b/stubs/commonmark/commonmark/entitytrans.pyi index b8b0efae5fc8..da7067b2e8cc 100644 --- a/stubs/commonmark/commonmark/entitytrans.pyi +++ b/stubs/commonmark/commonmark/entitytrans.pyi @@ -1 +1,3 @@ def _unescape(s: str) -> str: ... + +__all__ = ["_unescape"] diff --git a/stubs/commonmark/commonmark/inlines.pyi b/stubs/commonmark/commonmark/inlines.pyi index 5202a01bdc97..ace221d82903 100644 --- a/stubs/commonmark/commonmark/inlines.pyi +++ b/stubs/commonmark/commonmark/inlines.pyi @@ -50,14 +50,14 @@ class InlineParser: def removeDelimiter(self, delim: dict[str, Any]) -> None: ... @staticmethod def removeDelimitersBetween(bottom: dict[str, Any], top: dict[str, Any]) -> None: ... - def processEmphasis(self, stack_bottom) -> None: ... + def processEmphasis(self, stack_bottom: dict[str, Any]) -> None: ... def parseLinkTitle(self) -> str | None: ... def parseLinkDestination(self) -> str | None: ... def parseLinkLabel(self) -> int: ... def parseOpenBracket(self, block: Node) -> Literal[True]: ... def parseBang(self, block: Node) -> Literal[True]: ... def parseCloseBracket(self, block: Node) -> Literal[True]: ... - def addBracket(self, node, index, image) -> None: ... + def addBracket(self, node: Node, index: int, image: bool | None) -> None: ... def removeBracket(self) -> None: ... def parseEntity(self, block: Node) -> bool: ... def parseString(self, block: Node) -> bool: ... diff --git a/stubs/commonmark/commonmark/normalize_reference.pyi b/stubs/commonmark/commonmark/normalize_reference.pyi index 0386213d006b..ae5fc6e0564e 100644 --- a/stubs/commonmark/commonmark/normalize_reference.pyi +++ b/stubs/commonmark/commonmark/normalize_reference.pyi @@ -1 +1,3 @@ def normalize_reference(string: str) -> str: ... + +__all__ = ["normalize_reference"] From 677c3854251a97aa78ecfe7b58cfc439911e0100 Mon Sep 17 00:00:00 2001 From: Sebastian Rittau Date: Thu, 27 Mar 2025 17:46:51 +0100 Subject: [PATCH 149/388] Sample text for closing future stdlib PRs (#13697) --- MAINTAINERS.md | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/MAINTAINERS.md b/MAINTAINERS.md index f36c28758f52..52e86aeaf8ea 100644 --- a/MAINTAINERS.md +++ b/MAINTAINERS.md @@ -77,7 +77,13 @@ blocker is, usually with a link to an open issue in another project. We want to maintain a welcoming atmosphere for contributors, so use a friendly message when closing the PR. Example message: - Thanks for contributing! I'm closing this PR for now, because it still - - after three months of inactivity. If you are still interested, please feel free to open - a new PR (or ping us to reopen this one). + Thanks for contributing! I'm closing this PR for now, because it still after three months of inactivity. If you are still interested, please feel free to open a new PR (or ping us to reopen this one). + +### Closing PRs for future standard library changes + +*See also the [guidelines in the CONTRIBUTING file](./CONTRIBUTING.md#standard-library-stubs).* + +When rejecting a PR for a change for a future Python version, use a message +like: + + Thanks for contributing! Unfortunately, [as outlined in our CONTRIBUTING document](https://github.com/python/typeshed/blob/main/CONTRIBUTING.md#standard-library-stubs) we only accept pull requests to the standard library for future Python versions after the first beta version has been released. This is in part to prevent churn in the stubs, and in part because the testing infrastructure for the future version is not yet in place. Please feel free to open a new PR when the first beta version has been released. Alternatively, if this PR is still relevant, you can leave a comment here to reopen it. From a8916025543d30a825fda979042dadc046018adc Mon Sep 17 00:00:00 2001 From: Sebastian Rittau Date: Thu, 27 Mar 2025 21:20:01 +0100 Subject: [PATCH 150/388] [requests] Remove Session.redirect_cache (#13723) --- stubs/requests/requests/sessions.pyi | 3 --- 1 file changed, 3 deletions(-) diff --git a/stubs/requests/requests/sessions.pyi b/stubs/requests/requests/sessions.pyi index 5427c5201755..61f68b914bcf 100644 --- a/stubs/requests/requests/sessions.pyi +++ b/stubs/requests/requests/sessions.pyi @@ -3,8 +3,6 @@ from collections.abc import Callable, Iterable, Mapping, MutableMapping from typing import Any, TypedDict from typing_extensions import Self, TypeAlias -from urllib3._collections import RecentlyUsedContainer - from . import adapters, auth as _auth, compat, cookies, exceptions, hooks, models, status_codes, utils from .models import Response from .structures import CaseInsensitiveDict as CaseInsensitiveDict @@ -132,7 +130,6 @@ class Session(SessionRedirectMixin): trust_env: bool cookies: RequestsCookieJar adapters: MutableMapping[str, adapters.BaseAdapter] - redirect_cache: RecentlyUsedContainer[Any, Any] def __init__(self) -> None: ... def __enter__(self) -> Self: ... def __exit__(self, *args: Unused) -> None: ... From 0cf57a4461532306918c0a6480a7b90c6a7ca0d2 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Thu, 27 Mar 2025 21:39:34 +0100 Subject: [PATCH 151/388] Update dependency pyright to v1.1.398 (#13722) --- requirements-tests.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-tests.txt b/requirements-tests.txt index 3c6eff3a4aad..9857125e9bfb 100644 --- a/requirements-tests.txt +++ b/requirements-tests.txt @@ -1,7 +1,7 @@ # Type checkers that we test our stubs against. These should always # be pinned to a specific version to make failure reproducible. mypy==1.15.0 -pyright==1.1.397 +pyright==1.1.398 # pytype can be installed on Windows, but requires building wheels, let's not do that on the CI pytype==2024.10.11; platform_system != "Windows" and python_version >= "3.10" and python_version < "3.13" From 134e49cb1e9e24379be367fadd1429e2ebbda400 Mon Sep 17 00:00:00 2001 From: Neil Mitchell Date: Fri, 28 Mar 2025 14:16:17 +0000 Subject: [PATCH 152/388] Make importlib context manager not swallow exceptions (#13733) --- stdlib/importlib/resources/__init__.pyi | 6 +++--- stdlib/importlib/resources/_common.pyi | 4 ++-- stdlib/importlib/resources/_functional.pyi | 4 ++-- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/stdlib/importlib/resources/__init__.pyi b/stdlib/importlib/resources/__init__.pyi index f82df8c591fa..a30e6cdce5c6 100644 --- a/stdlib/importlib/resources/__init__.pyi +++ b/stdlib/importlib/resources/__init__.pyi @@ -4,7 +4,7 @@ from collections.abc import Iterator from contextlib import AbstractContextManager from pathlib import Path from types import ModuleType -from typing import Any, BinaryIO, TextIO +from typing import Any, BinaryIO, Literal, TextIO from typing_extensions import TypeAlias if sys.version_info >= (3, 11): @@ -51,14 +51,14 @@ else: def open_text(package: Package, resource: Resource, encoding: str = "utf-8", errors: str = "strict") -> TextIO: ... def read_binary(package: Package, resource: Resource) -> bytes: ... def read_text(package: Package, resource: Resource, encoding: str = "utf-8", errors: str = "strict") -> str: ... - def path(package: Package, resource: Resource) -> AbstractContextManager[Path]: ... + def path(package: Package, resource: Resource) -> AbstractContextManager[Path, Literal[False]]: ... def is_resource(package: Package, name: str) -> bool: ... def contents(package: Package) -> Iterator[str]: ... if sys.version_info >= (3, 11): from importlib.resources._common import as_file as as_file elif sys.version_info >= (3, 9): - def as_file(path: Traversable) -> AbstractContextManager[Path]: ... + def as_file(path: Traversable) -> AbstractContextManager[Path, Literal[False]]: ... if sys.version_info >= (3, 11): from importlib.resources._common import files as files diff --git a/stdlib/importlib/resources/_common.pyi b/stdlib/importlib/resources/_common.pyi index f1056f62ed6e..d6a9436544dc 100644 --- a/stdlib/importlib/resources/_common.pyi +++ b/stdlib/importlib/resources/_common.pyi @@ -7,7 +7,7 @@ if sys.version_info >= (3, 11): from contextlib import AbstractContextManager from importlib.abc import ResourceReader, Traversable from pathlib import Path - from typing import overload + from typing import Literal, overload from typing_extensions import TypeAlias, deprecated Package: TypeAlias = str | types.ModuleType @@ -39,4 +39,4 @@ if sys.version_info >= (3, 11): def get_package(package: Package) -> types.ModuleType: ... def from_package(package: types.ModuleType) -> Traversable: ... - def as_file(path: Traversable) -> AbstractContextManager[Path]: ... + def as_file(path: Traversable) -> AbstractContextManager[Path, Literal[False]]: ... diff --git a/stdlib/importlib/resources/_functional.pyi b/stdlib/importlib/resources/_functional.pyi index 97e46bdf0a53..50f3405f9a00 100644 --- a/stdlib/importlib/resources/_functional.pyi +++ b/stdlib/importlib/resources/_functional.pyi @@ -8,7 +8,7 @@ if sys.version_info >= (3, 13): from importlib.resources._common import Anchor from io import TextIOWrapper from pathlib import Path - from typing import BinaryIO, overload + from typing import BinaryIO, Literal, overload from typing_extensions import Unpack def open_binary(anchor: Anchor, *path_names: StrPath) -> BinaryIO: ... @@ -25,6 +25,6 @@ if sys.version_info >= (3, 13): ) -> str: ... @overload def read_text(anchor: Anchor, *path_names: StrPath, encoding: str | None, errors: str | None = "strict") -> str: ... - def path(anchor: Anchor, *path_names: StrPath) -> AbstractContextManager[Path]: ... + def path(anchor: Anchor, *path_names: StrPath) -> AbstractContextManager[Path, Literal[False]]: ... def is_resource(anchor: Anchor, *path_names: StrPath) -> bool: ... def contents(anchor: Anchor, *path_names: StrPath) -> Iterator[str]: ... From f68d0ca6a115922b890a18959f6532a62fa84507 Mon Sep 17 00:00:00 2001 From: Avasam Date: Fri, 28 Mar 2025 11:15:08 -0400 Subject: [PATCH 153/388] Mark commonmark as no_longer_updated (#13726) --- stubs/commonmark/METADATA.toml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/stubs/commonmark/METADATA.toml b/stubs/commonmark/METADATA.toml index aa3a1922c3a1..d5b10a699d9e 100644 --- a/stubs/commonmark/METADATA.toml +++ b/stubs/commonmark/METADATA.toml @@ -1,2 +1,6 @@ version = "0.9.*" upstream_repository = "https://github.com/rtfd/commonmark.py" +no_longer_updated = true +extra_description = """\ +`commonmark` is deprecated in favor of [markdown-it-py](https://pypi.org/project/markdown-it-py/). +See [this issue](https://github.com/readthedocs/commonmark.py/issues/308) for background and discussion.""" From 367133e7c352484cd74cdbd7266d3cbda5106a1f Mon Sep 17 00:00:00 2001 From: Avasam Date: Fri, 28 Mar 2025 14:11:50 -0400 Subject: [PATCH 154/388] Enable Ruff flake8-builtins (A) (#13729) --- lib/ts_utils/metadata.py | 2 +- pyproject.toml | 3 +++ tests/mypy_test.py | 4 ++-- 3 files changed, 6 insertions(+), 3 deletions(-) diff --git a/lib/ts_utils/metadata.py b/lib/ts_utils/metadata.py index 793b3b7a6d1e..33948bdb8b36 100644 --- a/lib/ts_utils/metadata.py +++ b/lib/ts_utils/metadata.py @@ -330,7 +330,7 @@ class PackageDependencies(NamedTuple): @cache def get_pypi_name_to_typeshed_name_mapping() -> Mapping[str, str]: - return {read_metadata(dir.name).stub_distribution: dir.name for dir in STUBS_PATH.iterdir()} + return {read_metadata(stub_dir.name).stub_distribution: stub_dir.name for stub_dir in STUBS_PATH.iterdir()} @cache diff --git a/pyproject.toml b/pyproject.toml index 7adabb9eab66..3cfcddda7b5f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -44,6 +44,7 @@ exclude = ["**/test_cases/**/*.py"] # tell ruff not to flag these as e.g. "unused noqa comments" external = ["F821", "Y"] select = [ + "A", # flake8-builtins "ARG", # flake8-unused-arguments "B", # flake8-bugbear "D", # pydocstyle @@ -181,6 +182,8 @@ ignore = [ ### # Rules that are out of the control of stub authors: ### + # Names in stubs should match the implementation, even if it's ambiguous. + "A", # flake8-builtins "F403", # `from . import *` used; unable to detect undefined names # Stubs can sometimes re-export entire modules. # Issues with using a star-imported name will be caught by type-checkers. diff --git a/tests/mypy_test.py b/tests/mypy_test.py index f292b47f4e6c..0fea2d56b0a7 100755 --- a/tests/mypy_test.py +++ b/tests/mypy_test.py @@ -610,13 +610,13 @@ def main() -> None: args = parser.parse_args(namespace=CommandLineArgs()) versions = args.python_version or SUPPORTED_VERSIONS platforms = args.platform or [sys.platform] - filter = args.filter or DIRECTORIES_TO_TEST + path_filter = args.filter or DIRECTORIES_TO_TEST exclude = args.exclude or [] summary = TestSummary() with tempfile.TemporaryDirectory() as td: td_path = Path(td) for version, platform in product(versions, platforms): - config = TestConfig(args.verbose, filter, exclude, version, platform) + config = TestConfig(args.verbose, path_filter, exclude, version, platform) version_summary = test_typeshed(args=config, tempdir=td_path) summary.merge(version_summary) From f19314b60fbb7cc5441843f4f0a914e9c282e78b Mon Sep 17 00:00:00 2001 From: Sebastian Rittau Date: Fri, 28 Mar 2025 22:24:44 +0100 Subject: [PATCH 155/388] Update setuptools to 78.1.0 (#13734) --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- stubs/setuptools/@tests/stubtest_allowlist.txt | 6 ------ stubs/setuptools/METADATA.toml | 2 +- stubs/setuptools/setuptools/_distutils/command/__init__.pyi | 4 ++-- stubs/setuptools/setuptools/_distutils/command/install.pyi | 5 +++-- stubs/setuptools/setuptools/_distutils/compilers/C/msvc.pyi | 6 ++---- stubs/setuptools/setuptools/dist.pyi | 2 -- 6 files changed, 8 insertions(+), 17 deletions(-) diff --git a/stubs/setuptools/@tests/stubtest_allowlist.txt b/stubs/setuptools/@tests/stubtest_allowlist.txt index 20e9c75ba049..500a32862d49 100644 --- a/stubs/setuptools/@tests/stubtest_allowlist.txt +++ b/stubs/setuptools/@tests/stubtest_allowlist.txt @@ -33,11 +33,6 @@ setuptools._distutils.archive_util.ARCHIVE_FORMATS setuptools._distutils.archive_util.check_archive_formats setuptools._distutils.cmd.Command.dump_options setuptools._distutils.command.build_ext.extension_name_re -setuptools._distutils.command.build_scripts -setuptools._distutils.command.check -setuptools._distutils.command.clean -setuptools._distutils.command.install_data -setuptools._distutils.command.install_headers setuptools._distutils.command.install.HAS_USER_SITE setuptools._distutils.command.install.INSTALL_SCHEMES setuptools._distutils.command.install.SCHEME_KEYS @@ -70,7 +65,6 @@ setuptools._distutils.command.build_scripts setuptools._distutils.command.check setuptools._distutils.command.clean setuptools._distutils.command.config -setuptools._distutils.command.install_data setuptools._distutils.command.install_egg_info setuptools._distutils.command.install_headers setuptools._distutils.compat.numpy diff --git a/stubs/setuptools/METADATA.toml b/stubs/setuptools/METADATA.toml index b5b04e04ff19..d8a876935bfe 100644 --- a/stubs/setuptools/METADATA.toml +++ b/stubs/setuptools/METADATA.toml @@ -1,4 +1,4 @@ -version = "~=77.0.2" +version = "78.1.*" upstream_repository = "https://github.com/pypa/setuptools" extra_description = """\ Given that `pkg_resources` is typed since `setuptools >= 71.1`, \ diff --git a/stubs/setuptools/setuptools/_distutils/command/__init__.pyi b/stubs/setuptools/setuptools/_distutils/command/__init__.pyi index adeb472a515f..f6b93d32846a 100644 --- a/stubs/setuptools/setuptools/_distutils/command/__init__.pyi +++ b/stubs/setuptools/setuptools/_distutils/command/__init__.pyi @@ -9,7 +9,7 @@ from . import ( # check as check, # clean as clean, install as install, - # install_data as install_data, + install_data as install_data, # install_headers as install_headers, install_lib as install_lib, install_scripts as install_scripts, @@ -30,7 +30,7 @@ __all__ = [ "install_lib", # "install_headers", "install_scripts", - # "install_data", + "install_data", "sdist", "bdist", # "bdist_dumb", diff --git a/stubs/setuptools/setuptools/_distutils/command/install.pyi b/stubs/setuptools/setuptools/_distutils/command/install.pyi index 059116834927..4ddd43564efa 100644 --- a/stubs/setuptools/setuptools/_distutils/command/install.pyi +++ b/stubs/setuptools/setuptools/_distutils/command/install.pyi @@ -1,5 +1,6 @@ from _typeshed import Incomplete -from typing import ClassVar +from collections import ChainMap +from typing import Any, ClassVar from ..cmd import Command @@ -34,7 +35,7 @@ class install(Command): build_lib: Incomplete record: Incomplete def initialize_options(self) -> None: ... - config_vars: Incomplete + config_vars: ChainMap[str, Any] # Any: Same as sysconfig.get_config_vars install_libbase: Incomplete def finalize_options(self) -> None: ... def dump_dirs(self, msg) -> None: ... diff --git a/stubs/setuptools/setuptools/_distutils/compilers/C/msvc.pyi b/stubs/setuptools/setuptools/_distutils/compilers/C/msvc.pyi index 99d107c63217..44f5c85b8be3 100644 --- a/stubs/setuptools/setuptools/_distutils/compilers/C/msvc.pyi +++ b/stubs/setuptools/setuptools/_distutils/compilers/C/msvc.pyi @@ -10,10 +10,8 @@ class Compiler(base.Compiler): obj_extension: ClassVar[str] static_lib_extension: ClassVar[str] shared_lib_extension: ClassVar[str] - # This was accidentally removed upstream and should be back pretty soon. - # shared_lib_format: ClassVar[str] - # static_lib_format = shared_lib_format - static_lib_format: ClassVar[str] + shared_lib_format: ClassVar[str] + static_lib_format = shared_lib_format exe_extension: ClassVar[str] initialized: bool def initialize(self, plat_name: str | None = None) -> None: ... diff --git a/stubs/setuptools/setuptools/dist.pyi b/stubs/setuptools/setuptools/dist.pyi index 928c55c7131a..4cdbdec2a494 100644 --- a/stubs/setuptools/setuptools/dist.pyi +++ b/stubs/setuptools/setuptools/dist.pyi @@ -38,8 +38,6 @@ class Distribution(_Distribution): dependency_links: list[str] setup_requires: list[str] def __init__(self, attrs: MutableMapping[str, Incomplete] | None = None) -> None: ... - def warn_dash_deprecation(self, opt: str, section: str) -> str: ... - def make_option_lowercase(self, opt: str, section: str) -> str: ... def parse_config_files(self, filenames: Iterable[StrPath] | None = None, ignore_option_errors: bool = False) -> None: ... def fetch_build_eggs(self, requires: str | Iterable[str]): ... def get_egg_cache_dir(self) -> str: ... From 18049a37fe62e8fb96d67e707ca805debdb85d9d Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Fri, 28 Mar 2025 19:10:16 -0700 Subject: [PATCH 156/388] icalendar: update (#13737) Fixes #13735 --- stubs/icalendar/METADATA.toml | 2 +- stubs/icalendar/icalendar/timezone/tzid.pyi | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/stubs/icalendar/METADATA.toml b/stubs/icalendar/METADATA.toml index 2892a10d6e38..2a7b8178f541 100644 --- a/stubs/icalendar/METADATA.toml +++ b/stubs/icalendar/METADATA.toml @@ -1,4 +1,4 @@ -version = "~= 6.1.2" +version = "6.1.3" upstream_repository = "https://github.com/collective/icalendar" requires = ["types-python-dateutil", "types-pytz"] diff --git a/stubs/icalendar/icalendar/timezone/tzid.pyi b/stubs/icalendar/icalendar/timezone/tzid.pyi index c70bc41cf2e2..1e5b884c9c93 100644 --- a/stubs/icalendar/icalendar/timezone/tzid.pyi +++ b/stubs/icalendar/icalendar/timezone/tzid.pyi @@ -5,4 +5,3 @@ __all__ = ["tzid_from_tzinfo", "tzid_from_dt", "tzids_from_tzinfo"] def tzids_from_tzinfo(tzinfo: datetime.tzinfo | None) -> tuple[str, ...]: ... def tzid_from_tzinfo(tzinfo: datetime.tzinfo | None) -> str | None: ... def tzid_from_dt(dt: datetime.datetime) -> str | None: ... -def tzinfo2tzids(tzinfo: datetime.tzinfo | None) -> set[str]: ... From 007b84d38aa195bff0f6017cd984a674d7ec98ea Mon Sep 17 00:00:00 2001 From: Avasam Date: Sat, 29 Mar 2025 08:29:07 -0400 Subject: [PATCH 157/388] Remove stub commonmark (#13736) --- .../commonmark/@tests/stubtest_allowlist.txt | 2 - stubs/commonmark/METADATA.toml | 6 - stubs/commonmark/commonmark/__init__.pyi | 5 - stubs/commonmark/commonmark/blocks.pyi | 171 ------------------ stubs/commonmark/commonmark/cmark.pyi | 1 - stubs/commonmark/commonmark/common.pyi | 39 ---- stubs/commonmark/commonmark/dump.pyi | 7 - stubs/commonmark/commonmark/entitytrans.pyi | 3 - stubs/commonmark/commonmark/inlines.pyi | 68 ------- stubs/commonmark/commonmark/main.pyi | 3 - stubs/commonmark/commonmark/node.pyi | 53 ------ .../commonmark/normalize_reference.pyi | 3 - .../commonmark/commonmark/render/__init__.pyi | 0 stubs/commonmark/commonmark/render/html.pyi | 43 ----- .../commonmark/commonmark/render/renderer.pyi | 9 - stubs/commonmark/commonmark/render/rst.pyi | 25 --- 16 files changed, 438 deletions(-) delete mode 100644 stubs/commonmark/@tests/stubtest_allowlist.txt delete mode 100644 stubs/commonmark/METADATA.toml delete mode 100644 stubs/commonmark/commonmark/__init__.pyi delete mode 100644 stubs/commonmark/commonmark/blocks.pyi delete mode 100644 stubs/commonmark/commonmark/cmark.pyi delete mode 100644 stubs/commonmark/commonmark/common.pyi delete mode 100644 stubs/commonmark/commonmark/dump.pyi delete mode 100644 stubs/commonmark/commonmark/entitytrans.pyi delete mode 100644 stubs/commonmark/commonmark/inlines.pyi delete mode 100644 stubs/commonmark/commonmark/main.pyi delete mode 100644 stubs/commonmark/commonmark/node.pyi delete mode 100644 stubs/commonmark/commonmark/normalize_reference.pyi delete mode 100644 stubs/commonmark/commonmark/render/__init__.pyi delete mode 100644 stubs/commonmark/commonmark/render/html.pyi delete mode 100644 stubs/commonmark/commonmark/render/renderer.pyi delete mode 100644 stubs/commonmark/commonmark/render/rst.pyi diff --git a/stubs/commonmark/@tests/stubtest_allowlist.txt b/stubs/commonmark/@tests/stubtest_allowlist.txt deleted file mode 100644 index cc9f553f2f7e..000000000000 --- a/stubs/commonmark/@tests/stubtest_allowlist.txt +++ /dev/null @@ -1,2 +0,0 @@ -# Testing modules are not included in type stubs -commonmark.tests.* diff --git a/stubs/commonmark/METADATA.toml b/stubs/commonmark/METADATA.toml deleted file mode 100644 index d5b10a699d9e..000000000000 --- a/stubs/commonmark/METADATA.toml +++ /dev/null @@ -1,6 +0,0 @@ -version = "0.9.*" -upstream_repository = "https://github.com/rtfd/commonmark.py" -no_longer_updated = true -extra_description = """\ -`commonmark` is deprecated in favor of [markdown-it-py](https://pypi.org/project/markdown-it-py/). -See [this issue](https://github.com/readthedocs/commonmark.py/issues/308) for background and discussion.""" diff --git a/stubs/commonmark/commonmark/__init__.pyi b/stubs/commonmark/commonmark/__init__.pyi deleted file mode 100644 index 9d6feae3d8d4..000000000000 --- a/stubs/commonmark/commonmark/__init__.pyi +++ /dev/null @@ -1,5 +0,0 @@ -from commonmark.blocks import Parser as Parser -from commonmark.dump import dumpAST as dumpAST, dumpJSON as dumpJSON -from commonmark.main import commonmark as commonmark -from commonmark.render.html import HtmlRenderer as HtmlRenderer -from commonmark.render.rst import ReStructuredTextRenderer as ReStructuredTextRenderer diff --git a/stubs/commonmark/commonmark/blocks.pyi b/stubs/commonmark/commonmark/blocks.pyi deleted file mode 100644 index a442b74c452e..000000000000 --- a/stubs/commonmark/commonmark/blocks.pyi +++ /dev/null @@ -1,171 +0,0 @@ -import re -from typing import Any, Final, Literal - -from .inlines import InlineParser -from .node import Node - -CODE_INDENT: Final[int] -reHtmlBlockOpen: Final[list[re.Pattern[str]]] -reHtmlBlockClose: Final[list[re.Pattern[str]]] -reThematicBreak: Final[re.Pattern[str]] -reMaybeSpecial: Final[re.Pattern[str]] -reNonSpace: Final[re.Pattern[str]] -reBulletListMarker: Final[re.Pattern[str]] -reOrderedListMarker: Final[re.Pattern[str]] -reATXHeadingMarker: Final[re.Pattern[str]] -reCodeFence: Final[re.Pattern[str]] -reClosingCodeFence: Final[re.Pattern[str]] -reSetextHeadingLine: Final[re.Pattern[str]] -reLineEnding: Final[re.Pattern[str]] - -def is_blank(s: str) -> bool: ... -def is_space_or_tab(s: str) -> bool: ... -def peek(ln: str, pos: int) -> str | None: ... -def ends_with_blank_line(block: Node) -> bool: ... -def parse_list_marker(parser: Parser, container: Node) -> dict[str, Any] | None: ... -def lists_match(list_data: dict[str, Any], item_data: dict[str, Any]) -> bool: ... - -class Block: - accepts_lines: bool | None - @staticmethod - def continue_(parser: Parser | None = None, container: Node | None = None) -> int | None: ... - @staticmethod - def finalize(parser: Parser | None = None, block: Node | None = None) -> None: ... - @staticmethod - def can_contain(t: str) -> bool | None: ... - -class Document(Block): - accepts_lines: Literal[False] - @staticmethod - def continue_(parser: Parser | None = None, container: Node | None = None) -> Literal[0]: ... - @staticmethod - def finalize(parser: Parser | None = None, block: Node | None = None) -> None: ... - @staticmethod - def can_contain(t: str) -> bool: ... - -class List(Block): - accepts_lines: Literal[False] - @staticmethod - def continue_(parser: Parser | None = None, container: Node | None = None) -> Literal[0]: ... - @staticmethod - def finalize(parser: Parser | None = None, block: Node | None = None) -> None: ... - @staticmethod - def can_contain(t: str) -> bool: ... - -class BlockQuote(Block): - accepts_lines: Literal[False] - @staticmethod - def continue_(parser: Parser | None = None, container: Node | None = None) -> Literal[0, 1]: ... - @staticmethod - def finalize(parser: Parser | None = None, block: Node | None = None) -> None: ... - @staticmethod - def can_contain(t: str) -> bool: ... - -class Item(Block): - accepts_lines: Literal[False] - @staticmethod - def continue_(parser: Parser | None = None, container: Node | None = None) -> Literal[0, 1]: ... - @staticmethod - def finalize(parser: Parser | None = None, block: Node | None = None) -> None: ... - @staticmethod - def can_contain(t: str) -> bool: ... - -class Heading(Block): - accepts_lines: Literal[False] - @staticmethod - def continue_(parser: Parser | None = None, container: Node | None = None) -> Literal[1]: ... - @staticmethod - def finalize(parser: Parser | None = None, block: Node | None = None) -> None: ... - @staticmethod - def can_contain(t: str) -> Literal[False]: ... - -class ThematicBreak(Block): - accepts_lines: Literal[False] - @staticmethod - def continue_(parser: Parser | None = None, container: Node | None = None) -> Literal[1]: ... - @staticmethod - def finalize(parser: Parser | None = None, block: Node | None = None) -> None: ... - @staticmethod - def can_contain(t: str) -> Literal[False]: ... - -class CodeBlock(Block): - accepts_lines: Literal[True] - @staticmethod - def continue_(parser: Parser | None = None, container: Node | None = None) -> Literal[0, 1, 2]: ... - @staticmethod - def finalize(parser: Parser | None = None, block: Node | None = None) -> None: ... - @staticmethod - def can_contain(t: str) -> Literal[False]: ... - -class HtmlBlock(Block): - accepts_lines: Literal[True] - @staticmethod - def continue_(parser: Parser | None = None, container: Node | None = None) -> Literal[0, 1]: ... - @staticmethod - def finalize(parser: Parser | None = None, block: Node | None = None) -> None: ... - @staticmethod - def can_contain(t: str) -> Literal[False]: ... - -class Paragraph(Block): - accepts_lines: Literal[True] - @staticmethod - def continue_(parser: Parser | None = None, container: Node | None = None) -> Literal[0, 1]: ... - @staticmethod - def finalize(parser: Parser | None = None, block: Node | None = None) -> None: ... - @staticmethod - def can_contain(t: str) -> Literal[False]: ... - -class BlockStarts: - METHODS: list[str] - @staticmethod - def block_quote(parser: Parser, container: Node | None = None) -> Literal[0, 1]: ... - @staticmethod - def atx_heading(parser: Parser, container: Node | None = None) -> Literal[0, 2]: ... - @staticmethod - def fenced_code_block(parser: Parser, container: Node | None = None) -> Literal[0, 2]: ... - @staticmethod - def html_block(parser: Parser, container: Node | None = None) -> Literal[0, 2]: ... - @staticmethod - def setext_heading(parser: Parser, container: Node | None = None) -> Literal[0, 2]: ... - @staticmethod - def thematic_break(parser: Parser, container: Node | None = None) -> Literal[0, 2]: ... - @staticmethod - def list_item(parser: Parser, container: Node | None = None) -> Literal[0, 1]: ... - @staticmethod - def indented_code_block(parser: Parser, container: Node | None = None) -> Literal[0, 2]: ... - -class Parser: - doc: Node - block_starts: BlockStarts - tip: Node - oldtip: Node - current_line: str - line_number: int - offset: int - column: int - next_nonspace: int - next_nonspace_column: int - indent: int - indented: bool - blank: bool - partially_consumed_tab: bool - all_closed: bool - last_matched_container: Node - refmap: dict[str, Any] - last_line_length: int - inline_parser: InlineParser - options: dict[str, Any] - blocks: dict[str, Block] - def __init__(self, options: dict[str, Any] = {}) -> None: ... - def add_line(self) -> None: ... - def add_child(self, tag: str, offset: int) -> Node: ... - def close_unmatched_blocks(self) -> None: ... - def find_next_nonspace(self) -> None: ... - def advance_next_nonspace(self) -> None: ... - def advance_offset(self, count: int, columns: bool) -> None: ... - def incorporate_line(self, ln: str) -> None: ... - def finalize(self, block: Node, line_number: int) -> None: ... - def process_inlines(self, block: Node) -> None: ... - def parse(self, my_input: str) -> Node: ... - -CAMEL_RE: Final[re.Pattern[str]] diff --git a/stubs/commonmark/commonmark/cmark.pyi b/stubs/commonmark/commonmark/cmark.pyi deleted file mode 100644 index 7e7363e797f3..000000000000 --- a/stubs/commonmark/commonmark/cmark.pyi +++ /dev/null @@ -1 +0,0 @@ -def main() -> None: ... diff --git a/stubs/commonmark/commonmark/common.pyi b/stubs/commonmark/commonmark/common.pyi deleted file mode 100644 index e6dda39aef8f..000000000000 --- a/stubs/commonmark/commonmark/common.pyi +++ /dev/null @@ -1,39 +0,0 @@ -import html -import re -from typing import AnyStr, Final, Literal, overload - -HTMLunescape = html.unescape -ENTITY: Final[str] -TAGNAME: Final[str] -ATTRIBUTENAME: Final[str] -UNQUOTEDVALUE: Final[str] -SINGLEQUOTEDVALUE: Final[str] -DOUBLEQUOTEDVALUE: Final[str] -ATTRIBUTEVALUE: Final[str] -ATTRIBUTEVALUESPEC: Final[str] -ATTRIBUTE: Final[str] -OPENTAG: Final[str] -CLOSETAG: Final[str] -HTMLCOMMENT: Final[str] -PROCESSINGINSTRUCTION: Final[str] -DECLARATION: Final[str] -CDATA: Final[str] -HTMLTAG: Final[str] -reHtmlTag: Final[re.Pattern[str]] -reBackslashOrAmp: Final[re.Pattern[str]] -ESCAPABLE: Final[str] -reEntityOrEscapedChar: Final[re.Pattern[str]] -XMLSPECIAL: Final[str] -reXmlSpecial: Final[re.Pattern[str]] - -def unescape_char(s: AnyStr) -> AnyStr: ... -def unescape_string(s: str) -> str: ... -def normalize_uri(uri: str) -> str: ... - -UNSAFE_MAP: Final[dict[str, str]] - -def replace_unsafe_char(s: str) -> str: ... -@overload -def escape_xml(s: None) -> Literal[""]: ... -@overload -def escape_xml(s: str) -> str: ... diff --git a/stubs/commonmark/commonmark/dump.pyi b/stubs/commonmark/commonmark/dump.pyi deleted file mode 100644 index d6dd0350b16f..000000000000 --- a/stubs/commonmark/commonmark/dump.pyi +++ /dev/null @@ -1,7 +0,0 @@ -from typing import Any - -from .node import Node - -def prepare(obj: Node, topnode: bool = ...) -> list[dict[str, Any]]: ... -def dumpJSON(obj: Node) -> str: ... -def dumpAST(obj: Node, ind: int = ..., topnode: bool = ...) -> None: ... diff --git a/stubs/commonmark/commonmark/entitytrans.pyi b/stubs/commonmark/commonmark/entitytrans.pyi deleted file mode 100644 index da7067b2e8cc..000000000000 --- a/stubs/commonmark/commonmark/entitytrans.pyi +++ /dev/null @@ -1,3 +0,0 @@ -def _unescape(s: str) -> str: ... - -__all__ = ["_unescape"] diff --git a/stubs/commonmark/commonmark/inlines.pyi b/stubs/commonmark/commonmark/inlines.pyi deleted file mode 100644 index ace221d82903..000000000000 --- a/stubs/commonmark/commonmark/inlines.pyi +++ /dev/null @@ -1,68 +0,0 @@ -import html -import re -from typing import Any, Final, Literal - -from .node import Node - -HTMLunescape = html.unescape -ESCAPED_CHAR: Final[str] -rePunctuation: Final[re.Pattern[str]] -reLinkTitle: Final[re.Pattern[str]] -reLinkDestinationBraces: Final[re.Pattern[str]] -reEscapable: Final[re.Pattern[str]] -reEntityHere: Final[re.Pattern[str]] -reTicks: Final[re.Pattern[str]] -reTicksHere: Final[re.Pattern[str]] -reEllipses: Final[re.Pattern[str]] -reDash: Final[re.Pattern[str]] -reEmailAutolink: Final[re.Pattern[str]] -reAutolink: Final[re.Pattern[str]] -reSpnl: Final[re.Pattern[str]] -reWhitespaceChar: Final[re.Pattern[str]] -reWhitespace: Final[re.Pattern[str]] -reUnicodeWhitespaceChar: Final[re.Pattern[str]] -reFinalSpace: Final[re.Pattern[str]] -reInitialSpace: Final[re.Pattern[str]] -reSpaceAtEndOfLine: Final[re.Pattern[str]] -reLinkLabel: Final[re.Pattern[str]] -reMain: Final[re.Pattern[str]] - -def text(s: str) -> Node: ... -def smart_dashes(chars: str) -> str: ... - -class InlineParser: - subject: str - brackets: dict[str, Any] | None - pos: int - refmap: dict[str, Any] - options: dict[str, Any] - def __init__(self, options: dict[str, Any] = {}) -> None: ... - def match(self, regexString: str | re.Pattern[str]) -> str | None: ... - def peek(self) -> str | None: ... - def spnl(self) -> Literal[True]: ... - def parseBackticks(self, block: Node) -> bool: ... - def parseBackslash(self, block: Node) -> Literal[True]: ... - def parseAutolink(self, block: Node) -> bool: ... - def parseHtmlTag(self, block: Node) -> bool: ... - def scanDelims(self, c: str) -> dict[str, Any] | None: ... - delimiters: dict[str, Any] - def handleDelim(self, cc: str, block: Node) -> bool: ... - def removeDelimiter(self, delim: dict[str, Any]) -> None: ... - @staticmethod - def removeDelimitersBetween(bottom: dict[str, Any], top: dict[str, Any]) -> None: ... - def processEmphasis(self, stack_bottom: dict[str, Any]) -> None: ... - def parseLinkTitle(self) -> str | None: ... - def parseLinkDestination(self) -> str | None: ... - def parseLinkLabel(self) -> int: ... - def parseOpenBracket(self, block: Node) -> Literal[True]: ... - def parseBang(self, block: Node) -> Literal[True]: ... - def parseCloseBracket(self, block: Node) -> Literal[True]: ... - def addBracket(self, node: Node, index: int, image: bool | None) -> None: ... - def removeBracket(self) -> None: ... - def parseEntity(self, block: Node) -> bool: ... - def parseString(self, block: Node) -> bool: ... - def parseNewline(self, block: Node) -> Literal[True]: ... - def parseReference(self, s: str, refmap: dict[str, Any]) -> int: ... - def parseInline(self, block: Node) -> bool: ... - def parseInlines(self, block: Node) -> None: ... - parse = parseInlines diff --git a/stubs/commonmark/commonmark/main.pyi b/stubs/commonmark/commonmark/main.pyi deleted file mode 100644 index 34059e6d22cd..000000000000 --- a/stubs/commonmark/commonmark/main.pyi +++ /dev/null @@ -1,3 +0,0 @@ -from typing import Literal - -def commonmark(text: str, format: Literal["html", "json", "ast", "rst"] = "html") -> str: ... diff --git a/stubs/commonmark/commonmark/node.pyi b/stubs/commonmark/commonmark/node.pyi deleted file mode 100644 index ffab8f5c98f1..000000000000 --- a/stubs/commonmark/commonmark/node.pyi +++ /dev/null @@ -1,53 +0,0 @@ -import re -from typing import Final -from typing_extensions import Self - -reContainer: Final[re.Pattern[str]] - -def is_container(node: Node) -> bool: ... - -class NodeWalker: - current: Node | None - root: Node - entering: bool - def __init__(self, root: Node) -> None: ... - def __next__(self) -> tuple[Node, bool]: ... - next = __next__ - def __iter__(self) -> Self: ... - def nxt(self) -> dict[str, Node | bool] | None: ... - def resume_at(self, node: Node, entering: bool) -> None: ... - -class Node: - t: str - parent: Node | None - first_child: Node | None - last_child: Node | None - prv: Node | None - nxt: Node | None - sourcepos: list[list[int]] | None - last_line_blank: bool - last_line_checked: bool - is_open: bool - string_content: str - literal: str | None - list_data: dict[str, str | int | bool | None] - info: str | None - destination: str | None - title: str | None - is_fenced: bool - fence_char: str | None - fence_length: int - fence_offset: int | None - level: int | None - on_enter: str | None - on_exit: str | None - def __init__(self, node_type: str, sourcepos: list[list[int]] | None) -> None: ... - def pretty(self) -> None: ... - def normalize(self) -> None: ... - def is_container(self) -> bool: ... - def append_child(self, child: Node) -> None: ... - def prepend_child(self, child: Node) -> None: ... - def unlink(self) -> None: ... - def insert_after(self, sibling: Node) -> None: ... - def insert_before(self, sibling: Node) -> None: ... - def walker(self) -> NodeWalker: ... diff --git a/stubs/commonmark/commonmark/normalize_reference.pyi b/stubs/commonmark/commonmark/normalize_reference.pyi deleted file mode 100644 index ae5fc6e0564e..000000000000 --- a/stubs/commonmark/commonmark/normalize_reference.pyi +++ /dev/null @@ -1,3 +0,0 @@ -def normalize_reference(string: str) -> str: ... - -__all__ = ["normalize_reference"] diff --git a/stubs/commonmark/commonmark/render/__init__.pyi b/stubs/commonmark/commonmark/render/__init__.pyi deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/stubs/commonmark/commonmark/render/html.pyi b/stubs/commonmark/commonmark/render/html.pyi deleted file mode 100644 index 618c0f5edd2e..000000000000 --- a/stubs/commonmark/commonmark/render/html.pyi +++ /dev/null @@ -1,43 +0,0 @@ -import re -from builtins import list as _list # conflicts with a method named "list" -from typing import Any, Final, Literal, overload - -from commonmark.node import Node -from commonmark.render.renderer import Renderer - -reUnsafeProtocol: Final[re.Pattern[str]] -reSafeDataProtocol: Final[re.Pattern[str]] - -def potentially_unsafe(url: str) -> bool | None: ... - -class HtmlRenderer(Renderer): - disable_tags: int - last_out: str - options: dict[str, Any] - def __init__(self, options: dict[str, Any] = {}) -> None: ... - @overload - def escape(self, text: None) -> Literal[""]: ... - @overload - def escape(self, text: str) -> str: ... - def tag(self, name: str, attrs: _list[_list[str]] | None = None, selfclosing: bool | None = None) -> None: ... - def text(self, node: Node, entering: bool | None = None) -> None: ... - def softbreak(self, node: Node | None = None, entering: bool | None = None) -> None: ... - def linebreak(self, node: Node | None = None, entering: bool | None = None) -> None: ... - def link(self, node: Node, entering: bool | None) -> None: ... - def image(self, node: Node, entering: bool | None) -> None: ... - def emph(self, node: Node, entering: bool | None) -> None: ... - def strong(self, node: Node, entering: bool | None) -> None: ... - def paragraph(self, node: Node, entering: bool | None) -> None: ... - def heading(self, node: Node, entering: bool | None) -> None: ... - def code(self, node: Node, entering: bool | None) -> None: ... - def code_block(self, node: Node, entering: bool | None) -> None: ... - def thematic_break(self, node: Node, entering: bool | None) -> None: ... - def block_quote(self, node: Node, entering: bool | None) -> None: ... - def list(self, node: Node, entering: bool | None) -> None: ... - def item(self, node: Node, entering: bool | None) -> None: ... - def html_inline(self, node: Node, entering: bool | None) -> None: ... - def html_block(self, node: Node, entering: bool | None) -> None: ... - def custom_inline(self, node: Node, entering: bool | None) -> None: ... - def custom_block(self, node: Node, entering: bool | None) -> None: ... - def out(self, s: str | None) -> None: ... - def attrs(self, node: Node) -> _list[_list[str]]: ... diff --git a/stubs/commonmark/commonmark/render/renderer.pyi b/stubs/commonmark/commonmark/render/renderer.pyi deleted file mode 100644 index db1ec57a4573..000000000000 --- a/stubs/commonmark/commonmark/render/renderer.pyi +++ /dev/null @@ -1,9 +0,0 @@ -from commonmark.node import Node - -class Renderer: - buf: str - last_out: str - def render(self, ast: Node) -> str: ... - def lit(self, s: str) -> None: ... - def cr(self) -> None: ... - def out(self, s: str) -> None: ... diff --git a/stubs/commonmark/commonmark/render/rst.pyi b/stubs/commonmark/commonmark/render/rst.pyi deleted file mode 100644 index fd181c8779b5..000000000000 --- a/stubs/commonmark/commonmark/render/rst.pyi +++ /dev/null @@ -1,25 +0,0 @@ -from commonmark.node import Node -from commonmark.render.renderer import Renderer - -class ReStructuredTextRenderer(Renderer): - indent_char: str - indent_length: int - def __init__(self, indent_char: str = " ") -> None: ... - def lit(self, s: str) -> None: ... - def cr(self) -> None: ... - def indent_lines(self, literal: str, indent_length: int = 4) -> str: ... - def document(self, node: Node | None, entering: bool | None) -> None: ... - def softbreak(self, node: Node | None, entering: bool | None) -> None: ... - def linebreak(self, node: Node | None, entering: bool | None) -> None: ... - def text(self, node: Node, entering: bool | None) -> None: ... - def emph(self, node: Node | None, entering: bool | None) -> None: ... - def strong(self, node: Node | None, entering: bool | None) -> None: ... - def paragraph(self, node: Node, entering: bool | None) -> None: ... - def link(self, node: Node | None, entering: bool | None) -> None: ... - def image(self, node: Node, entering: bool | None) -> None: ... - def code(self, node: Node, entering: bool | None) -> None: ... - def code_block(self, node: Node, entering: bool | None) -> None: ... - def list(self, node: Node | None, entering: bool | None) -> None: ... - def item(self, node: Node, entering: bool | None) -> None: ... - def block_quote(self, node: Node | None, entering: bool | None) -> None: ... - def heading(self, node: Node, entering: bool | None) -> None: ... From 6b64b0de79e4851b3438a9aeee6164e4714ff7a8 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Sun, 30 Mar 2025 04:02:32 +0200 Subject: [PATCH 158/388] [stubsabot] Bump flake8 to 7.2.* (#13743) --- stubs/flake8/METADATA.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stubs/flake8/METADATA.toml b/stubs/flake8/METADATA.toml index 8400594dc7ac..e327bf388ff5 100644 --- a/stubs/flake8/METADATA.toml +++ b/stubs/flake8/METADATA.toml @@ -1,3 +1,3 @@ -version = "7.1.*" +version = "7.2.*" upstream_repository = "https://github.com/pycqa/flake8" requires = ["types-pyflakes"] From 7b32b3c09225618d7f27ae6751591fe62c5aa263 Mon Sep 17 00:00:00 2001 From: Sebastian Rittau Date: Mon, 31 Mar 2025 13:04:49 +0200 Subject: [PATCH 159/388] [pyflakes] Update to 3.3.* (#13745) --- stubs/pyflakes/@tests/stubtest_allowlist.txt | 1 + stubs/pyflakes/METADATA.toml | 2 +- stubs/pyflakes/pyflakes/api.pyi | 8 +- stubs/pyflakes/pyflakes/checker.pyi | 56 +++++---- stubs/pyflakes/pyflakes/messages.pyi | 113 ++++++++++--------- 5 files changed, 92 insertions(+), 88 deletions(-) diff --git a/stubs/pyflakes/@tests/stubtest_allowlist.txt b/stubs/pyflakes/@tests/stubtest_allowlist.txt index 3ea3547712bd..c803fc98bd1b 100644 --- a/stubs/pyflakes/@tests/stubtest_allowlist.txt +++ b/stubs/pyflakes/@tests/stubtest_allowlist.txt @@ -22,4 +22,5 @@ pyflakes.messages.UndefinedLocal.message_args pyflakes.messages.UndefinedName.message_args pyflakes.messages.UnusedAnnotation.message_args pyflakes.messages.UnusedImport.message_args +pyflakes.messages.UnusedIndirectAssignment.message_args pyflakes.messages.UnusedVariable.message_args diff --git a/stubs/pyflakes/METADATA.toml b/stubs/pyflakes/METADATA.toml index 0974abae9341..c9441665253a 100644 --- a/stubs/pyflakes/METADATA.toml +++ b/stubs/pyflakes/METADATA.toml @@ -1,4 +1,4 @@ -version = "3.2.*" +version = "3.3.*" upstream_repository = "https://github.com/PyCQA/pyflakes" partial_stub = true diff --git a/stubs/pyflakes/pyflakes/api.pyi b/stubs/pyflakes/pyflakes/api.pyi index 3cf6893906eb..59dc4ca8affe 100644 --- a/stubs/pyflakes/pyflakes/api.pyi +++ b/stubs/pyflakes/pyflakes/api.pyi @@ -1,6 +1,6 @@ +from _typeshed import Incomplete from collections.abc import Iterable, Iterator, Sequence from re import Pattern -from typing import Any from pyflakes.reporter import Reporter @@ -11,6 +11,6 @@ PYTHON_SHEBANG_REGEX: Pattern[bytes] def check(codeString: str, filename: str, reporter: Reporter | None = None) -> int: ... def checkPath(filename, reporter: Reporter | None = None) -> int: ... def isPythonFile(filename) -> bool: ... -def iterSourceCode(paths: Iterable[Any]) -> Iterator[Any]: ... -def checkRecursive(paths: Iterable[Any], reporter: Reporter) -> int: ... -def main(prog: str | None = None, args: Sequence[Any] | None = None) -> None: ... +def iterSourceCode(paths: Iterable[Incomplete]) -> Iterator[Incomplete]: ... +def checkRecursive(paths: Iterable[Incomplete], reporter: Reporter) -> int: ... +def main(prog: str | None = None, args: Sequence[Incomplete] | None = None) -> None: ... diff --git a/stubs/pyflakes/pyflakes/checker.pyi b/stubs/pyflakes/pyflakes/checker.pyi index 0d60f10dcd35..51385f672324 100644 --- a/stubs/pyflakes/pyflakes/checker.pyi +++ b/stubs/pyflakes/pyflakes/checker.pyi @@ -1,17 +1,17 @@ import ast import sys +from _typeshed import Incomplete from collections.abc import Callable, Generator, Iterable, Iterator from contextlib import contextmanager from re import Pattern from typing import Any, ClassVar, Literal, TypeVar, overload -from typing_extensions import ParamSpec, TypeAlias +from typing_extensions import Never, ParamSpec, TypeAlias from pyflakes.messages import Message _AnyFunction: TypeAlias = Callable[..., Any] _F = TypeVar("_F", bound=_AnyFunction) _P = ParamSpec("_P") -_T = TypeVar("_T") PYPY: bool @@ -33,19 +33,15 @@ def parse_percent_format(s: str) -> tuple[_PercentFormat, ...]: ... class _FieldsOrder(dict[type[ast.AST], tuple[str, ...]]): def __missing__(self, node_class: type[ast.AST]) -> tuple[str, ...]: ... -def counter(items: Iterable[_T]) -> dict[_T, int]: ... - _OmitType: TypeAlias = str | tuple[str, ...] | None def iter_child_nodes(node: ast.AST, omit: _OmitType = None, _fields_order: _FieldsOrder = ...) -> Iterator[ast.AST]: ... @overload -def convert_to_value(item: ast.Str) -> str: ... # type: ignore[overload-overlap] -@overload -def convert_to_value(item: ast.Bytes) -> bytes: ... # type: ignore[overload-overlap] +def convert_to_value(item: ast.Constant) -> Any: ... # type: ignore[overload-overlap] # See ast.Constant.value for possible return types @overload -def convert_to_value(item: ast.Tuple) -> tuple[Any, ...]: ... # type: ignore[overload-overlap] +def convert_to_value(item: ast.Tuple) -> tuple[Any, ...]: ... # type: ignore[overload-overlap] # Tuple items depend on their ast type @overload -def convert_to_value(item: ast.Name | ast.NameConstant) -> Any: ... +def convert_to_value(item: ast.Name) -> VariableKey: ... # type: ignore[overload-overlap] @overload def convert_to_value(item: ast.AST) -> UnhandledKeyType: ... def is_notimplemented_name_node(node: object) -> bool: ... @@ -53,7 +49,7 @@ def is_notimplemented_name_node(node: object) -> bool: ... class Binding: name: str source: ast.AST | None - used: Literal[False] | tuple[Any, ast.AST] + used: Literal[False] | tuple[Incomplete, ast.AST] def __init__(self, name: str, source: ast.AST | None) -> None: ... def redefines(self, other: Binding) -> bool: ... @@ -72,7 +68,7 @@ class VariableKey: class Importation(Definition): fullName: str - redefined: list[Any] + redefined: list[Incomplete] def __init__(self, name: str, source: ast.AST | None, full_name: str | None = None) -> None: ... @property def source_statement(self) -> str: ... @@ -89,7 +85,7 @@ class StarImportation(Importation): def __init__(self, name: str, source: ast.AST) -> None: ... class FutureImportation(ImportationFrom): - used: tuple[Any, ast.AST] + used: tuple[Incomplete, ast.AST] def __init__(self, name: str, source: ast.AST, scope) -> None: ... class Argument(Binding): ... @@ -114,7 +110,7 @@ class FunctionScope(Scope): usesLocals: bool alwaysUsed: ClassVar[set[str]] globals: set[str] - returnValue: Any + returnValue: Incomplete isGenerator: bool def __init__(self) -> None: ... def unused_assignments(self) -> Iterator[tuple[str, Binding]]: ... @@ -154,34 +150,36 @@ if sys.version_info >= (3, 10): _MatchAs: TypeAlias = ast.MatchAs _MatchOr: TypeAlias = ast.MatchOr else: - _Match: TypeAlias = Any - _MatchCase: TypeAlias = Any - _MatchValue: TypeAlias = Any - _MatchSingleton: TypeAlias = Any - _MatchSequence: TypeAlias = Any - _MatchStar: TypeAlias = Any - _MatchMapping: TypeAlias = Any - _MatchClass: TypeAlias = Any - _MatchAs: TypeAlias = Any - _MatchOr: TypeAlias = Any + # The methods using these should never be called on Python < 3.10. + _Match: TypeAlias = Never + _MatchCase: TypeAlias = Never + _MatchValue: TypeAlias = Never + _MatchSingleton: TypeAlias = Never + _MatchSequence: TypeAlias = Never + _MatchStar: TypeAlias = Never + _MatchMapping: TypeAlias = Never + _MatchClass: TypeAlias = Never + _MatchAs: TypeAlias = Never + _MatchOr: TypeAlias = Never if sys.version_info >= (3, 12): _TypeVar: TypeAlias = ast.TypeVar _TypeAlias: TypeAlias = ast.TypeAlias else: - _TypeVar: TypeAlias = Any - _TypeAlias: TypeAlias = Any + # The methods using these should never be called on Python < 3.12. + _TypeVar: TypeAlias = Never + _TypeAlias: TypeAlias = Never class Checker: nodeDepth: int offset: tuple[int, int] | None builtIns: set[str] - deadScopes: list[Any] - messages: list[Any] + deadScopes: list[Incomplete] + messages: list[Incomplete] filename: str withDoctest: bool scopeStack: list[Scope] - exceptHandlers: list[Any] + exceptHandlers: list[Incomplete] root: ast.AST def __init__( self, @@ -189,7 +187,7 @@ class Checker: filename: str = "(none)", builtins: Iterable[str] | None = None, withDoctest: bool = False, - file_tokens: tuple[Any, ...] = (), + file_tokens: tuple[Incomplete, ...] = (), ) -> None: ... def deferFunction(self, callable: _AnyFunction) -> None: ... @property diff --git a/stubs/pyflakes/pyflakes/messages.pyi b/stubs/pyflakes/pyflakes/messages.pyi index 07371e5828b4..1338e037b354 100644 --- a/stubs/pyflakes/pyflakes/messages.pyi +++ b/stubs/pyflakes/pyflakes/messages.pyi @@ -1,83 +1,88 @@ import ast +from _typeshed import Incomplete from typing import Any, ClassVar class Message: message: ClassVar[str] - message_args: tuple[Any, ...] - filename: Any + message_args: tuple[Any, ...] # Tuple types differ between sub-classes. + filename: str lineno: int col: int - def __init__(self, filename, loc: ast.AST) -> None: ... + def __init__(self, filename: str, loc: ast.AST) -> None: ... class UnusedImport(Message): - message_args: tuple[Any] - def __init__(self, filename, loc: ast.AST, name) -> None: ... + message_args: tuple[Incomplete] + def __init__(self, filename: str, loc: ast.AST, name) -> None: ... class RedefinedWhileUnused(Message): - message_args: tuple[Any, int] - def __init__(self, filename, loc: ast.AST, name, orig_loc: ast.AST) -> None: ... + message_args: tuple[Incomplete, int] + def __init__(self, filename: str, loc: ast.AST, name, orig_loc: ast.AST) -> None: ... class ImportShadowedByLoopVar(Message): - message_args: tuple[Any, int] - def __init__(self, filename, loc: ast.AST, name, orig_loc: ast.AST) -> None: ... + message_args: tuple[Incomplete, int] + def __init__(self, filename: str, loc: ast.AST, name, orig_loc: ast.AST) -> None: ... class ImportStarNotPermitted(Message): - message_args: Any - def __init__(self, filename, loc, modname) -> None: ... + message_args: Incomplete + def __init__(self, filename: str, loc, modname) -> None: ... class ImportStarUsed(Message): - message_args: tuple[Any] - def __init__(self, filename, loc: ast.AST, modname) -> None: ... + message_args: tuple[Incomplete] + def __init__(self, filename: str, loc: ast.AST, modname) -> None: ... class ImportStarUsage(Message): - message_args: tuple[Any, Any] - def __init__(self, filename, loc: ast.AST, name, from_list) -> None: ... + message_args: tuple[Incomplete, Incomplete] + def __init__(self, filename: str, loc: ast.AST, name, from_list) -> None: ... class UndefinedName(Message): - message_args: tuple[Any] - def __init__(self, filename, loc: ast.AST, name) -> None: ... + message_args: tuple[Incomplete] + def __init__(self, filename: str, loc: ast.AST, name) -> None: ... class DoctestSyntaxError(Message): message_args: tuple[()] - def __init__(self, filename, loc: ast.AST, position: tuple[int, int] | None = None) -> None: ... + def __init__(self, filename: str, loc: ast.AST, position: tuple[int, int] | None = None) -> None: ... class UndefinedExport(Message): - message_args: tuple[Any] - def __init__(self, filename, loc: ast.AST, name) -> None: ... + message_args: tuple[Incomplete] + def __init__(self, filename: str, loc: ast.AST, name) -> None: ... class UndefinedLocal(Message): default: ClassVar[str] builtin: ClassVar[str] - message_args: tuple[Any, int] - def __init__(self, filename, loc: ast.AST, name, orig_loc: ast.AST) -> None: ... + message_args: tuple[Incomplete, int] + def __init__(self, filename: str, loc: ast.AST, name, orig_loc: ast.AST) -> None: ... class DuplicateArgument(Message): - message_args: tuple[Any] - def __init__(self, filename, loc: ast.AST, name) -> None: ... + message_args: tuple[Incomplete] + def __init__(self, filename: str, loc: ast.AST, name) -> None: ... class MultiValueRepeatedKeyLiteral(Message): - message_args: tuple[Any] - def __init__(self, filename, loc: ast.AST, key) -> None: ... + message_args: tuple[Incomplete] + def __init__(self, filename: str, loc: ast.AST, key) -> None: ... class MultiValueRepeatedKeyVariable(Message): - message_args: tuple[Any] - def __init__(self, filename, loc: ast.AST, key) -> None: ... + message_args: tuple[Incomplete] + def __init__(self, filename: str, loc: ast.AST, key) -> None: ... class LateFutureImport(Message): message_args: tuple[()] - def __init__(self, filename, loc: ast.AST) -> None: ... + def __init__(self, filename: str, loc: ast.AST) -> None: ... class FutureFeatureNotDefined(Message): - message_args: tuple[Any] - def __init__(self, filename, loc: ast.AST, name) -> None: ... + message_args: tuple[Incomplete] + def __init__(self, filename: str, loc: ast.AST, name) -> None: ... class UnusedVariable(Message): - message_args: tuple[Any] - def __init__(self, filename, loc: ast.AST, names) -> None: ... + message_args: tuple[Incomplete] + def __init__(self, filename: str, loc: ast.AST, names) -> None: ... class UnusedAnnotation(Message): - message_args: tuple[Any] - def __init__(self, filename, loc: ast.AST, names) -> None: ... + message_args: tuple[Incomplete] + def __init__(self, filename: str, loc: ast.AST, names) -> None: ... + +class UnusedIndirectAssignment(Message): + message_args: tuple[str, str] + def __init__(self, filename: str, loc: ast.AST, name: str) -> None: ... class ReturnOutsideFunction(Message): ... class YieldOutsideFunction(Message): ... @@ -90,8 +95,8 @@ class IfTuple(Message): ... class AssertTuple(Message): ... class ForwardAnnotationSyntaxError(Message): - message_args: tuple[Any] - def __init__(self, filename, loc: ast.AST, annotation) -> None: ... + message_args: tuple[Incomplete] + def __init__(self, filename: str, loc: ast.AST, annotation) -> None: ... class RaiseNotImplemented(Message): ... class InvalidPrintSyntax(Message): ... @@ -99,44 +104,44 @@ class IsLiteral(Message): ... class FStringMissingPlaceholders(Message): ... class StringDotFormatExtraPositionalArguments(Message): - message_args: tuple[Any] - def __init__(self, filename, loc: ast.AST, extra_positions) -> None: ... + message_args: tuple[Incomplete] + def __init__(self, filename: str, loc: ast.AST, extra_positions) -> None: ... class StringDotFormatExtraNamedArguments(Message): - message_args: tuple[Any] - def __init__(self, filename, loc: ast.AST, extra_keywords) -> None: ... + message_args: tuple[Incomplete] + def __init__(self, filename: str, loc: ast.AST, extra_keywords) -> None: ... class StringDotFormatMissingArgument(Message): - message_args: tuple[Any] - def __init__(self, filename, loc: ast.AST, missing_arguments) -> None: ... + message_args: tuple[Incomplete] + def __init__(self, filename: str, loc: ast.AST, missing_arguments) -> None: ... class StringDotFormatMixingAutomatic(Message): ... class StringDotFormatInvalidFormat(Message): - message_args: tuple[Any] - def __init__(self, filename, loc: ast.AST, error) -> None: ... + message_args: tuple[Incomplete] + def __init__(self, filename: str, loc: ast.AST, error) -> None: ... class PercentFormatInvalidFormat(Message): - message_args: tuple[Any] - def __init__(self, filename, loc: ast.AST, error) -> None: ... + message_args: tuple[Incomplete] + def __init__(self, filename: str, loc: ast.AST, error) -> None: ... class PercentFormatMixedPositionalAndNamed(Message): ... class PercentFormatUnsupportedFormatCharacter(Message): - message_args: tuple[Any] - def __init__(self, filename, loc: ast.AST, c) -> None: ... + message_args: tuple[Incomplete] + def __init__(self, filename: str, loc: ast.AST, c) -> None: ... class PercentFormatPositionalCountMismatch(Message): message_args: tuple[int, int] - def __init__(self, filename, loc: ast.AST, n_placeholders: int, n_substitutions: int) -> None: ... + def __init__(self, filename: str, loc: ast.AST, n_placeholders: int, n_substitutions: int) -> None: ... class PercentFormatExtraNamedArguments(Message): - message_args: tuple[Any] - def __init__(self, filename, loc: ast.AST, extra_keywords) -> None: ... + message_args: tuple[Incomplete] + def __init__(self, filename: str, loc: ast.AST, extra_keywords) -> None: ... class PercentFormatMissingArgument(Message): - message_args: tuple[Any] - def __init__(self, filename, loc: ast.AST, missing_arguments) -> None: ... + message_args: tuple[Incomplete] + def __init__(self, filename: str, loc: ast.AST, missing_arguments) -> None: ... class PercentFormatExpectedMapping(Message): ... class PercentFormatExpectedSequence(Message): ... From 68c74050f714ae467174e06e775640715661830c Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Mon, 31 Mar 2025 15:28:11 +0400 Subject: [PATCH 160/388] Complete stubs for `pep8-naming` (#13741) --- .../pep8-naming/@tests/stubtest_allowlist.txt | 2 - stubs/pep8-naming/METADATA.toml | 4 - stubs/pep8-naming/pep8ext_naming.pyi | 150 ++++++++++++++++-- 3 files changed, 134 insertions(+), 22 deletions(-) delete mode 100644 stubs/pep8-naming/@tests/stubtest_allowlist.txt diff --git a/stubs/pep8-naming/@tests/stubtest_allowlist.txt b/stubs/pep8-naming/@tests/stubtest_allowlist.txt deleted file mode 100644 index 0cceda881edc..000000000000 --- a/stubs/pep8-naming/@tests/stubtest_allowlist.txt +++ /dev/null @@ -1,2 +0,0 @@ -pep8ext_naming.NamingChecker.__getattr__ -pep8ext_naming.NamingChecker.parse_options diff --git a/stubs/pep8-naming/METADATA.toml b/stubs/pep8-naming/METADATA.toml index 4946022fb103..a922d35ede9e 100644 --- a/stubs/pep8-naming/METADATA.toml +++ b/stubs/pep8-naming/METADATA.toml @@ -1,6 +1,2 @@ version = "0.14.*" upstream_repository = "https://github.com/PyCQA/pep8-naming" -partial_stub = true - -[tool.stubtest] -ignore_missing_stub = true diff --git a/stubs/pep8-naming/pep8ext_naming.pyi b/stubs/pep8-naming/pep8ext_naming.pyi index 010dba56620b..3977e3aa815a 100644 --- a/stubs/pep8-naming/pep8ext_naming.pyi +++ b/stubs/pep8-naming/pep8ext_naming.pyi @@ -1,30 +1,148 @@ +import argparse import ast -from _typeshed import Incomplete -from argparse import Namespace -from collections.abc import Generator, Iterable -from typing import Any +import optparse +from collections import deque +from collections.abc import Callable, Generator, Iterable, Sequence +from typing import Final, Literal +from typing_extensions import Self -__version__: str +__version__: Final[str] -CLASS_METHODS: frozenset[str] -METACLASS_BASES: frozenset[str] -METHOD_CONTAINER_NODES: set[ast.AST] +CLASS_METHODS: Final[frozenset[Literal["__new__", "__init_subclass__", "__class_getitem__"]]] +METACLASS_BASES: Final[frozenset[Literal["type", "ABCMeta"]]] +METHOD_CONTAINER_NODES: Final[set[ast.AST]] +FUNC_NODES: Final[tuple[type[ast.FunctionDef], type[ast.AsyncFunctionDef]]] + +class _ASTCheckMeta(type): + codes: tuple[str, ...] + all: list[BaseASTCheck] + def __init__(cls, class_name: str, bases: tuple[object, ...], namespace: Iterable[str]) -> None: ... + +class BaseASTCheck(metaclass=_ASTCheckMeta): + codes: tuple[str, ...] + all: list[BaseASTCheck] + def err(self, node: ast.AST, code: str, **kwargs: str) -> tuple[int, int, str, Self]: ... class NamingChecker: name: str version: str - visitors: Any - decorator_to_type: Any + visitors: Sequence[BaseASTCheck] + decorator_to_type: dict[str, Literal["classmethod", "staticmethod"]] ignore_names: frozenset[str] - parents: Any def __init__(self, tree: ast.AST, filename: str) -> None: ... @classmethod - def add_options(cls, parser: Any) -> None: ... + def add_options(cls, parser: optparse.OptionParser) -> None: ... @classmethod - def parse_options(cls, option: Namespace) -> None: ... - def run(self) -> Generator[tuple[int, int, str, type[Any]], None, None]: ... + def parse_options(cls, options: argparse.Namespace) -> None: ... + def run(self) -> Generator[tuple[int, int, str, Self]] | tuple[()]: ... + def visit_tree(self, node: ast.AST, parents: deque[ast.AST]) -> Generator[tuple[int, int, str, Self]]: ... + def visit_node(self, node: ast.AST, parents: Sequence[ast.AST]) -> Generator[tuple[int, int, str, Self]]: ... def tag_class_functions(self, cls_node: ast.ClassDef) -> None: ... def set_function_nodes_types(self, nodes: Iterable[ast.AST], ismetaclass: bool, late_decoration: dict[str, str]) -> None: ... - def __getattr__(self, name: str) -> Incomplete: ... # incomplete (other attributes are normally not accessed) + @classmethod + def find_decorator_name(cls, d: ast.Expr) -> str: ... + @staticmethod + def find_global_defs(func_def_node: ast.AST) -> None: ... + +class ClassNameCheck(BaseASTCheck): + codes: tuple[Literal["N801"], Literal["N818"]] + N801: str + N818: str + @classmethod + def get_classdef(cls, name: str, parents: Sequence[ast.AST]) -> ast.ClassDef | None: ... + @classmethod + def superclass_names(cls, name: str, parents: Sequence[ast.AST], _names: set[str] | None = None) -> set[str]: ... + def visit_classdef( + self, node: ast.ClassDef, parents: Sequence[ast.AST], ignore: Iterable[str] | None = None + ) -> Generator[tuple[int, int, str, Self]]: ... + +class FunctionNameCheck(BaseASTCheck): + codes: tuple[Literal["N802"], Literal["N807"]] + N802: str + N807: str + @staticmethod + def has_override_decorator(node: ast.FunctionDef | ast.AsyncFunctionDef) -> bool: ... + def visit_functiondef( + self, node: ast.FunctionDef, parents: Sequence[ast.AST], ignore: Iterable[str] | None = None + ) -> Generator[tuple[int, int, str, Self]]: ... + def visit_asyncfunctiondef( + self, node: ast.AsyncFunctionDef, parents: Sequence[ast.AST], ignore: Iterable[str] | None = None + ) -> Generator[tuple[int, int, str, Self]]: ... + +class FunctionArgNamesCheck(BaseASTCheck): + codes: tuple[Literal["N803"], Literal["N804"], Literal["N805"]] + N803: str + N804: str + N805: str + def visit_functiondef( + self, node: ast.FunctionDef, parents: Sequence[ast.AST], ignore: Iterable[str] | None = None + ) -> Generator[tuple[int, int, str, Self]]: ... + def visit_asyncfunctiondef( + self, node: ast.AsyncFunctionDef, parents: Sequence[ast.AST], ignore: Iterable[str] | None = None + ) -> Generator[tuple[int, int, str, Self]]: ... + +class ImportAsCheck(BaseASTCheck): + codes: tuple[Literal["N811"], Literal["N812"], Literal["N813"], Literal["N814"], Literal["N817"]] + N811: str + N812: str + N813: str + N814: str + N817: str + def visit_importfrom( + self, node: ast.ImportFrom, parents: Sequence[ast.AST], ignore: Iterable[str] | None = None + ) -> Generator[tuple[int, int, str, Self]]: ... + def visit_import( + self, node: ast.Import, parents: Sequence[ast.AST], ignore: Iterable[str] | None = None + ) -> Generator[tuple[int, int, str, Self]]: ... + +class VariablesCheck(BaseASTCheck): + codes: tuple[Literal["N806"], Literal["N815"], Literal["N816"]] + N806: str + N815: str + N816: str + @staticmethod + def is_namedtupe(node_value: ast.AST) -> bool: ... + def visit_assign( + self, node: ast.Assign, parents: Sequence[ast.AST], ignore: Iterable[str] | None = None + ) -> Generator[tuple[int, int, str, Self]]: ... + def visit_namedexpr( + self, node: ast.NamedExpr, parents: Sequence[ast.AST], ignore: Iterable[str] + ) -> Generator[tuple[int, int, str, Self]]: ... + def visit_annassign( + self, node: ast.AnnAssign, parents: Sequence[ast.AST], ignore: Iterable[str] + ) -> Generator[tuple[int, int, str, Self]]: ... + def visit_with( + self, node: ast.With, parents: Sequence[ast.AST], ignore: Iterable[str] + ) -> Generator[tuple[int, int, str, Self]]: ... + def visit_asyncwith( + self, node: ast.AsyncWith, parents: Sequence[ast.AST], ignore: Iterable[str] + ) -> Generator[tuple[int, int, str, Self]]: ... + def visit_for( + self, node: ast.For, parents: Sequence[ast.AST], ignore: Iterable[str] + ) -> Generator[tuple[int, int, str, Self]]: ... + def visit_asyncfor( + self, node: ast.AsyncFor, parents: Sequence[ast.AST], ignore: Iterable[str] + ) -> Generator[tuple[int, int, str, Self]]: ... + def visit_excepthandler( + self, node: ast.ExceptHandler, parents: Sequence[ast.AST], ignore: Iterable[str] + ) -> Generator[tuple[int, int, str, Self]]: ... + def visit_generatorexp( + self, node: ast.GeneratorExp, parents: Sequence[ast.AST], ignore: Iterable[str] + ) -> Generator[tuple[int, int, str, Self]]: ... + def visit_listcomp( + self, node: ast.ListComp, parents: Sequence[ast.AST], ignore: Iterable[str] + ) -> Generator[tuple[int, int, str, Self]]: ... + def visit_dictcomp( + self, node: ast.DictComp, parents: Sequence[ast.AST], ignore: Iterable[str] + ) -> Generator[tuple[int, int, str, Self]]: ... + def visit_setcomp( + self, node: ast.SetComp, parents: Sequence[ast.AST], ignore: Iterable[str] + ) -> Generator[tuple[int, int, str, Self]]: ... + @staticmethod + def global_variable_check(name: str) -> Literal["N816"] | None: ... + @staticmethod + def class_variable_check(name: str) -> Literal["N815"] | None: ... + @staticmethod + def function_variable_check(func: Callable[..., object], var_name: str) -> Literal["N806"] | None: ... -def __getattr__(name: str) -> Incomplete: ... # incomplete (other attributes are normally not accessed) +def is_mixed_case(name: str) -> bool: ... From c37dc1689a81221d82cf05394c6ab386ce015a68 Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Mon, 31 Mar 2025 15:30:55 +0400 Subject: [PATCH 161/388] Improve `pika` (#13739) --- stubs/pika/pika/__init__.pyi | 4 + .../pika/pika/adapters/asyncio_connection.pyi | 11 +- stubs/pika/pika/adapters/base_connection.pyi | 3 +- .../pika/adapters/blocking_connection.pyi | 3 +- .../pika/pika/adapters/gevent_connection.pyi | 3 +- .../pika/pika/adapters/select_connection.pyi | 3 +- .../pika/pika/adapters/tornado_connection.pyi | 3 +- .../pika/pika/adapters/twisted_connection.pyi | 3 +- .../utils/selector_ioloop_adapter.pyi | 3 +- stubs/pika/pika/callback.pyi | 35 +- stubs/pika/pika/connection.pyi | 21 +- stubs/pika/pika/diagnostic_utils.pyi | 8 +- stubs/pika/pika/exceptions.pyi | 24 +- stubs/pika/pika/heartbeat.pyi | 4 +- stubs/pika/pika/spec.pyi | 670 +++++++++--------- stubs/pika/pika/tcp_socket_opts.pyi | 11 +- stubs/pika/pika/validators.pyi | 16 +- 17 files changed, 438 insertions(+), 387 deletions(-) diff --git a/stubs/pika/pika/__init__.pyi b/stubs/pika/pika/__init__.pyi index de731d9a6600..75e955d3b14f 100644 --- a/stubs/pika/pika/__init__.pyi +++ b/stubs/pika/pika/__init__.pyi @@ -1,3 +1,5 @@ +from typing import Final + from pika import adapters as adapters from pika.adapters import ( BaseConnection as BaseConnection, @@ -9,3 +11,5 @@ from pika.connection import ConnectionParameters as ConnectionParameters, SSLOpt from pika.credentials import PlainCredentials as PlainCredentials from pika.delivery_mode import DeliveryMode as DeliveryMode from pika.spec import BasicProperties as BasicProperties + +__version__: Final[str] diff --git a/stubs/pika/pika/adapters/asyncio_connection.pyi b/stubs/pika/pika/adapters/asyncio_connection.pyi index d6359083958f..802b06d80197 100644 --- a/stubs/pika/pika/adapters/asyncio_connection.pyi +++ b/stubs/pika/pika/adapters/asyncio_connection.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from asyncio import AbstractEventLoop from collections.abc import Callable from logging import Logger @@ -6,7 +5,7 @@ from typing_extensions import Self from ..connection import Parameters from .base_connection import BaseConnection -from .utils import io_services_utils, nbio_interface +from .utils import connection_workflow, io_services_utils, nbio_interface LOGGER: Logger @@ -22,7 +21,11 @@ class AsyncioConnection(BaseConnection): ) -> None: ... @classmethod def create_connection( - cls, connection_configs, on_done, custom_ioloop: AbstractEventLoop | None = None, workflow: Incomplete | None = None + cls, + connection_configs, + on_done, + custom_ioloop: AbstractEventLoop | None = None, + workflow: connection_workflow.AbstractAMQPConnectionWorkflow | None = None, ): ... class _AsyncioIOServicesAdapter( @@ -31,7 +34,7 @@ class _AsyncioIOServicesAdapter( nbio_interface.AbstractIOServices, nbio_interface.AbstractFileDescriptorServices, ): - def __init__(self, loop: Incomplete | None = None) -> None: ... + def __init__(self, loop: AbstractEventLoop | None = None) -> None: ... def get_native_ioloop(self): ... def close(self) -> None: ... def run(self) -> None: ... diff --git a/stubs/pika/pika/adapters/base_connection.pyi b/stubs/pika/pika/adapters/base_connection.pyi index 198327c11ebe..a9dc74b92579 100644 --- a/stubs/pika/pika/adapters/base_connection.pyi +++ b/stubs/pika/pika/adapters/base_connection.pyi @@ -1,12 +1,13 @@ import abc from _typeshed import Incomplete from collections.abc import Callable +from logging import Logger from typing_extensions import Self from ..adapters.utils import nbio_interface from ..connection import Connection -LOGGER: Incomplete +LOGGER: Logger class BaseConnection(Connection, metaclass=abc.ABCMeta): def __init__( diff --git a/stubs/pika/pika/adapters/blocking_connection.pyi b/stubs/pika/pika/adapters/blocking_connection.pyi index 1ca81beae156..af721b7863b7 100644 --- a/stubs/pika/pika/adapters/blocking_connection.pyi +++ b/stubs/pika/pika/adapters/blocking_connection.pyi @@ -1,5 +1,6 @@ from _typeshed import Incomplete, Unused from collections.abc import Generator, Sequence +from logging import Logger from types import TracebackType from typing import NamedTuple from typing_extensions import Self @@ -9,7 +10,7 @@ from ..data import _ArgumentMapping from ..exchange_type import ExchangeType from ..spec import BasicProperties -LOGGER: Incomplete +LOGGER: Logger class _CallbackResult: def __init__(self, value_class: Incomplete | None = None) -> None: ... diff --git a/stubs/pika/pika/adapters/gevent_connection.pyi b/stubs/pika/pika/adapters/gevent_connection.pyi index 932bc328f644..0979dbfcdd16 100644 --- a/stubs/pika/pika/adapters/gevent_connection.pyi +++ b/stubs/pika/pika/adapters/gevent_connection.pyi @@ -1,10 +1,11 @@ from _typeshed import Incomplete +from logging import Logger from pika.adapters.base_connection import BaseConnection from pika.adapters.utils.nbio_interface import AbstractIOReference from pika.adapters.utils.selector_ioloop_adapter import AbstractSelectorIOLoop, SelectorIOServicesAdapter -LOGGER: Incomplete +LOGGER: Logger class GeventConnection(BaseConnection): def __init__( diff --git a/stubs/pika/pika/adapters/select_connection.pyi b/stubs/pika/pika/adapters/select_connection.pyi index cb8c8a8dd3d3..08c343e776c1 100644 --- a/stubs/pika/pika/adapters/select_connection.pyi +++ b/stubs/pika/pika/adapters/select_connection.pyi @@ -1,11 +1,12 @@ import abc from _typeshed import Incomplete +from logging import Logger import pika.compat from pika.adapters.base_connection import BaseConnection from pika.adapters.utils.selector_ioloop_adapter import AbstractSelectorIOLoop -LOGGER: Incomplete +LOGGER: Logger SELECT_TYPE: Incomplete class SelectConnection(BaseConnection): diff --git a/stubs/pika/pika/adapters/tornado_connection.pyi b/stubs/pika/pika/adapters/tornado_connection.pyi index 0e46bdf3aecb..9207ef355d0e 100644 --- a/stubs/pika/pika/adapters/tornado_connection.pyi +++ b/stubs/pika/pika/adapters/tornado_connection.pyi @@ -1,8 +1,9 @@ from _typeshed import Incomplete +from logging import Logger from pika.adapters import base_connection -LOGGER: Incomplete +LOGGER: Logger class TornadoConnection(base_connection.BaseConnection): def __init__( diff --git a/stubs/pika/pika/adapters/twisted_connection.pyi b/stubs/pika/pika/adapters/twisted_connection.pyi index f71637a8afb3..78bb62565ef4 100644 --- a/stubs/pika/pika/adapters/twisted_connection.pyi +++ b/stubs/pika/pika/adapters/twisted_connection.pyi @@ -2,6 +2,7 @@ # We don't want to force it as a dependency but that means we also can't test it with type-checkers given the current setup. from _typeshed import Incomplete +from logging import Logger from typing import Generic, NamedTuple, TypeVar import pika.connection @@ -17,7 +18,7 @@ from twisted.python.failure import Failure # type: ignore[import-not-found] # _T = TypeVar("_T") -LOGGER: Incomplete +LOGGER: Logger class ClosableDeferredQueue(DeferredQueue[_T], Generic[_T]): # pyright: ignore[reportUntypedBaseClass] # noqa: Y060 closed: Failure | BaseException | None diff --git a/stubs/pika/pika/adapters/utils/selector_ioloop_adapter.pyi b/stubs/pika/pika/adapters/utils/selector_ioloop_adapter.pyi index dea569613a74..5ecae98b6afc 100644 --- a/stubs/pika/pika/adapters/utils/selector_ioloop_adapter.pyi +++ b/stubs/pika/pika/adapters/utils/selector_ioloop_adapter.pyi @@ -1,9 +1,10 @@ import abc from _typeshed import Incomplete +from logging import Logger from pika.adapters.utils import io_services_utils, nbio_interface -LOGGER: Incomplete +LOGGER: Logger class AbstractSelectorIOLoop(metaclass=abc.ABCMeta): @property diff --git a/stubs/pika/pika/callback.pyi b/stubs/pika/pika/callback.pyi index d96f77c9f2ee..a92b13d97830 100644 --- a/stubs/pika/pika/callback.pyi +++ b/stubs/pika/pika/callback.pyi @@ -1,8 +1,13 @@ from _typeshed import Incomplete +from collections.abc import Callable +from logging import Logger +from typing import Literal -LOGGER: Incomplete +from pika import amqp_object, frame -def name_or_value(value): ... +LOGGER: Logger + +def name_or_value(value: amqp_object.AMQPObject | frame.Frame | int | str) -> str: ... def sanitize_prefix(function): ... def check_for_prefix_and_key(function): ... @@ -16,16 +21,22 @@ class CallbackManager: def __init__(self) -> None: ... def add( self, - prefix, - key, - callback, + prefix: str | int, + key: str | object, + callback: Callable[[Incomplete], Incomplete], one_shot: bool = True, - only_caller: Incomplete | None = None, + only_caller: object | None = None, arguments: Incomplete | None = None, - ): ... + ) -> tuple[str | int, str | object]: ... def clear(self) -> None: ... - def cleanup(self, prefix): ... - def pending(self, prefix, key): ... - def process(self, prefix, key, caller, *args, **keywords): ... - def remove(self, prefix, key, callback_value: Incomplete | None = None, arguments: Incomplete | None = None): ... - def remove_all(self, prefix, key) -> None: ... + def cleanup(self, prefix: str | int) -> bool: ... + def pending(self, prefix: str | int, key: str | object) -> int | None: ... + def process(self, prefix: str | int, key: str | object, caller, *args, **keywords) -> bool: ... + def remove( + self, + prefix: str | int, + key: str | object, + callback_value: Callable[[Incomplete], Incomplete] | None = None, + arguments: Incomplete | None = None, + ) -> Literal[True]: ... + def remove_all(self, prefix: str | int, key: str | object) -> None: ... diff --git a/stubs/pika/pika/connection.pyi b/stubs/pika/pika/connection.pyi index 9e620e25ca19..590ae33f7666 100644 --- a/stubs/pika/pika/connection.pyi +++ b/stubs/pika/pika/connection.pyi @@ -1,4 +1,5 @@ import abc +import ssl from _typeshed import Incomplete from collections.abc import Callable from logging import Logger @@ -18,13 +19,13 @@ LOGGER: Logger class Parameters: DEFAULT_USERNAME: str DEFAULT_PASSWORD: str - DEFAULT_BLOCKED_CONNECTION_TIMEOUT: Incomplete - DEFAULT_CHANNEL_MAX: Incomplete - DEFAULT_CLIENT_PROPERTIES: Incomplete + DEFAULT_BLOCKED_CONNECTION_TIMEOUT: None + DEFAULT_CHANNEL_MAX: int + DEFAULT_CLIENT_PROPERTIES: None DEFAULT_CREDENTIALS: Incomplete DEFAULT_CONNECTION_ATTEMPTS: int - DEFAULT_FRAME_MAX: Incomplete - DEFAULT_HEARTBEAT_TIMEOUT: Incomplete + DEFAULT_FRAME_MAX: int + DEFAULT_HEARTBEAT_TIMEOUT: None DEFAULT_HOST: str DEFAULT_LOCALE: str DEFAULT_PORT: int @@ -32,10 +33,10 @@ class Parameters: DEFAULT_SOCKET_TIMEOUT: float DEFAULT_STACK_TIMEOUT: float DEFAULT_SSL: bool - DEFAULT_SSL_OPTIONS: Incomplete + DEFAULT_SSL_OPTIONS: None DEFAULT_SSL_PORT: int DEFAULT_VIRTUAL_HOST: str - DEFAULT_TCP_OPTIONS: Incomplete + DEFAULT_TCP_OPTIONS: None def __init__(self) -> None: ... def __eq__(self, other: object) -> bool: ... def __ne__(self, other: object) -> bool: ... @@ -129,9 +130,9 @@ class URLParameters(Parameters): def __init__(self, url: str) -> None: ... class SSLOptions: - context: Incomplete - server_hostname: Incomplete - def __init__(self, context, server_hostname: Incomplete | None = None) -> None: ... + context: ssl.SSLContext + server_hostname: str | None + def __init__(self, context: ssl.SSLContext, server_hostname: str | None = None) -> None: ... class Connection(AbstractBase, metaclass=abc.ABCMeta): ON_CONNECTION_CLOSED: Final[str] diff --git a/stubs/pika/pika/diagnostic_utils.pyi b/stubs/pika/pika/diagnostic_utils.pyi index 7c8d2279e3c2..7e05c48794cd 100644 --- a/stubs/pika/pika/diagnostic_utils.pyi +++ b/stubs/pika/pika/diagnostic_utils.pyi @@ -1 +1,7 @@ -def create_log_exception_decorator(logger): ... +from collections.abc import Callable +from logging import Logger +from typing import Any, TypeVar + +_F = TypeVar("_F", bound=Callable[..., Any]) + +def create_log_exception_decorator(logger: Logger) -> Callable[[_F], _F]: ... diff --git a/stubs/pika/pika/exceptions.pyi b/stubs/pika/pika/exceptions.pyi index 23bf3a0f4f71..5f77d4d67572 100644 --- a/stubs/pika/pika/exceptions.pyi +++ b/stubs/pika/pika/exceptions.pyi @@ -1,4 +1,6 @@ -from _typeshed import Incomplete +from collections.abc import Sequence + +from pika.adapters.blocking_connection import ReturnedMessage class AMQPError(Exception): ... class AMQPConnectionError(AMQPError): ... @@ -12,11 +14,11 @@ class NoFreeChannels(AMQPConnectionError): ... class ConnectionWrongStateError(AMQPConnectionError): ... class ConnectionClosed(AMQPConnectionError): - def __init__(self, reply_code, reply_text) -> None: ... + def __init__(self, reply_code: int, reply_text: str) -> None: ... @property - def reply_code(self): ... + def reply_code(self) -> int: ... @property - def reply_text(self): ... + def reply_text(self) -> str: ... class ConnectionClosedByBroker(ConnectionClosed): ... class ConnectionClosedByClient(ConnectionClosed): ... @@ -26,11 +28,11 @@ class AMQPChannelError(AMQPError): ... class ChannelWrongStateError(AMQPChannelError): ... class ChannelClosed(AMQPChannelError): - def __init__(self, reply_code, reply_text) -> None: ... + def __init__(self, reply_code: int, reply_text: str) -> None: ... @property - def reply_code(self): ... + def reply_code(self) -> int: ... @property - def reply_text(self): ... + def reply_text(self) -> str: ... class ChannelClosedByBroker(ChannelClosed): ... class ChannelClosedByClient(ChannelClosed): ... @@ -38,12 +40,12 @@ class DuplicateConsumerTag(AMQPChannelError): ... class ConsumerCancelled(AMQPChannelError): ... class UnroutableError(AMQPChannelError): - messages: Incomplete - def __init__(self, messages) -> None: ... + messages: Sequence[ReturnedMessage] + def __init__(self, messages: Sequence[ReturnedMessage]) -> None: ... class NackError(AMQPChannelError): - messages: Incomplete - def __init__(self, messages) -> None: ... + messages: Sequence[ReturnedMessage] + def __init__(self, messages: Sequence[ReturnedMessage]) -> None: ... class InvalidChannelNumber(AMQPError): ... class ProtocolSyntaxError(AMQPError): ... diff --git a/stubs/pika/pika/heartbeat.pyi b/stubs/pika/pika/heartbeat.pyi index 7d1d730974bc..757fb6246551 100644 --- a/stubs/pika/pika/heartbeat.pyi +++ b/stubs/pika/pika/heartbeat.pyi @@ -1,6 +1,6 @@ -from _typeshed import Incomplete +from logging import Logger -LOGGER: Incomplete +LOGGER: Logger class HeartbeatChecker: def __init__(self, connection, timeout) -> None: ... diff --git a/stubs/pika/pika/spec.pyi b/stubs/pika/pika/spec.pyi index caf0626be24c..d5f4d9383b64 100644 --- a/stubs/pika/pika/spec.pyi +++ b/stubs/pika/pika/spec.pyi @@ -1,147 +1,153 @@ import builtins from _typeshed import Incomplete -from typing import ClassVar, Literal +from collections.abc import Mapping +from datetime import datetime +from decimal import Decimal +from typing import ClassVar, Final, Literal from typing_extensions import Self, TypeAlias -from .amqp_object import Class, Method, Properties +from pika.amqp_object import Class, Method, Properties +from pika.delivery_mode import DeliveryMode # Ouch. Since str = bytes at runtime, we need a type alias for "str". _str: TypeAlias = builtins.str # noqa: Y042 +_Value: TypeAlias = _str | bytes | bool | int | Decimal | datetime | _ArgumentMapping | list[_Value] | None +_ArgumentMapping: TypeAlias = Mapping[_str, _Value] str = builtins.bytes -PROTOCOL_VERSION: Incomplete -PORT: int -ACCESS_REFUSED: int -CHANNEL_ERROR: int -COMMAND_INVALID: int -CONNECTION_FORCED: int -CONTENT_TOO_LARGE: int -FRAME_BODY: int -FRAME_END: int -FRAME_END_SIZE: int -FRAME_ERROR: int -FRAME_HEADER: int -FRAME_HEADER_SIZE: int -FRAME_HEARTBEAT: int -FRAME_MAX_SIZE: int -FRAME_METHOD: int -FRAME_MIN_SIZE: int -INTERNAL_ERROR: int -INVALID_PATH: int -NOT_ALLOWED: int -NOT_FOUND: int -NOT_IMPLEMENTED: int -NO_CONSUMERS: int -NO_ROUTE: int -PERSISTENT_DELIVERY_MODE: int -PRECONDITION_FAILED: int -REPLY_SUCCESS: int -RESOURCE_ERROR: int -RESOURCE_LOCKED: int -SYNTAX_ERROR: int -TRANSIENT_DELIVERY_MODE: int -UNEXPECTED_FRAME: int +PROTOCOL_VERSION: Final[tuple[int, int, int]] +PORT: Final[int] +ACCESS_REFUSED: Final[int] +CHANNEL_ERROR: Final[int] +COMMAND_INVALID: Final[int] +CONNECTION_FORCED: Final[int] +CONTENT_TOO_LARGE: Final[int] +FRAME_BODY: Final[int] +FRAME_END: Final[int] +FRAME_END_SIZE: Final[int] +FRAME_ERROR: Final[int] +FRAME_HEADER: Final[int] +FRAME_HEADER_SIZE: Final[int] +FRAME_HEARTBEAT: Final[int] +FRAME_MAX_SIZE: Final[int] +FRAME_METHOD: Final[int] +FRAME_MIN_SIZE: Final[int] +INTERNAL_ERROR: Final[int] +INVALID_PATH: Final[int] +NOT_ALLOWED: Final[int] +NOT_FOUND: Final[int] +NOT_IMPLEMENTED: Final[int] +NO_CONSUMERS: Final[int] +NO_ROUTE: Final[int] +PERSISTENT_DELIVERY_MODE: Final[int] +PRECONDITION_FAILED: Final[int] +REPLY_SUCCESS: Final[int] +RESOURCE_ERROR: Final[int] +RESOURCE_LOCKED: Final[int] +SYNTAX_ERROR: Final[int] +TRANSIENT_DELIVERY_MODE: Final[int] +UNEXPECTED_FRAME: Final[int] class Connection(Class): INDEX: ClassVar[int] class Start(Method): INDEX: ClassVar[int] - version_major: Incomplete - version_minor: Incomplete - server_properties: Incomplete - mechanisms: Incomplete - locales: Incomplete + version_major: int + version_minor: int + server_properties: _ArgumentMapping | None + mechanisms: _str + locales: _str def __init__( self, version_major: int = 0, version_minor: int = 9, - server_properties: Incomplete | None = None, + server_properties: _ArgumentMapping | None = None, mechanisms: _str = "PLAIN", locales: _str = "en_US", ) -> None: ... @property - def synchronous(self): ... - def decode(self, encoded, offset: int = 0): ... - def encode(self): ... + def synchronous(self) -> Literal[True]: ... + def decode(self, encoded: bytes, offset: int = 0) -> Self: ... + def encode(self) -> list[bytes]: ... class StartOk(Method): INDEX: ClassVar[int] - client_properties: Incomplete - mechanism: Incomplete - response: Incomplete - locale: Incomplete + client_properties: _ArgumentMapping | None + mechanism: _str + response: _str | None + locale: _str def __init__( self, - client_properties: Incomplete | None = None, + client_properties: _ArgumentMapping | None = None, mechanism: _str = "PLAIN", - response: Incomplete | None = None, + response: _str | None = None, locale: _str = "en_US", ) -> None: ... @property - def synchronous(self): ... - def decode(self, encoded, offset: int = 0): ... - def encode(self): ... + def synchronous(self) -> Literal[False]: ... + def decode(self, encoded: bytes, offset: int = 0) -> Self: ... + def encode(self) -> list[bytes]: ... class Secure(Method): INDEX: ClassVar[int] - challenge: Incomplete - def __init__(self, challenge: Incomplete | None = None) -> None: ... + challenge: _str | None + def __init__(self, challenge: _str | None = None) -> None: ... @property - def synchronous(self): ... - def decode(self, encoded, offset: int = 0): ... - def encode(self): ... + def synchronous(self) -> Literal[True]: ... + def decode(self, encoded: bytes, offset: int = 0) -> Self: ... + def encode(self) -> list[bytes]: ... class SecureOk(Method): INDEX: ClassVar[int] - response: Incomplete - def __init__(self, response: Incomplete | None = None) -> None: ... + response: _str + def __init__(self, response: _str | None = None) -> None: ... @property - def synchronous(self): ... - def decode(self, encoded, offset: int = 0): ... - def encode(self): ... + def synchronous(self) -> Literal[False]: ... + def decode(self, encoded: bytes, offset: int = 0) -> Self: ... + def encode(self) -> list[bytes]: ... class Tune(Method): INDEX: ClassVar[int] - channel_max: Incomplete - frame_max: Incomplete - heartbeat: Incomplete + channel_max: int + frame_max: int + heartbeat: int def __init__(self, channel_max: int = 0, frame_max: int = 0, heartbeat: int = 0) -> None: ... @property - def synchronous(self): ... - def decode(self, encoded, offset: int = 0): ... - def encode(self): ... + def synchronous(self) -> Literal[True]: ... + def decode(self, encoded: bytes, offset: int = 0) -> Self: ... + def encode(self) -> list[bytes]: ... class TuneOk(Method): INDEX: ClassVar[int] - channel_max: Incomplete - frame_max: Incomplete - heartbeat: Incomplete + channel_max: int + frame_max: int + heartbeat: int def __init__(self, channel_max: int = 0, frame_max: int = 0, heartbeat: int = 0) -> None: ... @property - def synchronous(self): ... - def decode(self, encoded, offset: int = 0): ... - def encode(self): ... + def synchronous(self) -> Literal[False]: ... + def decode(self, encoded: bytes, offset: int = 0) -> Self: ... + def encode(self) -> list[bytes]: ... class Open(Method): INDEX: ClassVar[int] - virtual_host: Incomplete - capabilities: Incomplete - insist: Incomplete + virtual_host: _str + capabilities: _str + insist: bool def __init__(self, virtual_host: _str = "/", capabilities: _str = "", insist: bool = False) -> None: ... @property - def synchronous(self): ... - def decode(self, encoded, offset: int = 0): ... - def encode(self): ... + def synchronous(self) -> Literal[True]: ... + def decode(self, encoded: bytes, offset: int = 0) -> Self: ... + def encode(self) -> list[bytes]: ... class OpenOk(Method): INDEX: ClassVar[int] - known_hosts: Incomplete + known_hosts: _str def __init__(self, known_hosts: _str = "") -> None: ... @property - def synchronous(self): ... - def decode(self, encoded, offset: int = 0): ... - def encode(self): ... + def synchronous(self) -> Literal[False]: ... + def decode(self, encoded: bytes, offset: int = 0) -> Self: ... + def encode(self) -> list[bytes]: ... class Close(Method): INDEX: ClassVar[int] @@ -157,92 +163,92 @@ class Connection(Class): method_id: Incomplete | None = None, ) -> None: ... @property - def synchronous(self): ... - def decode(self, encoded, offset: int = 0): ... - def encode(self): ... + def synchronous(self) -> Literal[True]: ... + def decode(self, encoded: bytes, offset: int = 0) -> Self: ... + def encode(self) -> list[bytes]: ... class CloseOk(Method): INDEX: ClassVar[int] def __init__(self) -> None: ... @property - def synchronous(self): ... - def decode(self, encoded, offset: int = 0): ... - def encode(self): ... + def synchronous(self) -> Literal[False]: ... + def decode(self, encoded: bytes, offset: int = 0) -> Self: ... + def encode(self) -> list[bytes]: ... class Blocked(Method): INDEX: ClassVar[int] reason: Incomplete def __init__(self, reason: _str = "") -> None: ... @property - def synchronous(self): ... - def decode(self, encoded, offset: int = 0): ... - def encode(self): ... + def synchronous(self) -> Literal[False]: ... + def decode(self, encoded: bytes, offset: int = 0) -> Self: ... + def encode(self) -> list[bytes]: ... class Unblocked(Method): INDEX: ClassVar[int] def __init__(self) -> None: ... @property - def synchronous(self): ... - def decode(self, encoded, offset: int = 0): ... - def encode(self): ... + def synchronous(self) -> Literal[False]: ... + def decode(self, encoded: bytes, offset: int = 0) -> Self: ... + def encode(self) -> list[bytes]: ... class UpdateSecret(Method): INDEX: ClassVar[int] new_secret: Incomplete reason: Incomplete + mechanisms: _str def __init__(self, new_secret, reason) -> None: ... @property - def synchronous(self): ... - mechanisms: Incomplete - def decode(self, encoded, offset: int = 0): ... - def encode(self): ... + def synchronous(self) -> Literal[True]: ... + def decode(self, encoded: bytes, offset: int = 0) -> Self: ... + def encode(self) -> list[bytes]: ... class UpdateSecretOk(Method): INDEX: ClassVar[int] def __init__(self) -> None: ... @property - def synchronous(self): ... - def decode(self, encoded, offset: int = 0): ... - def encode(self): ... + def synchronous(self) -> Literal[False]: ... + def decode(self, encoded: bytes, offset: int = 0) -> Self: ... + def encode(self) -> list[bytes]: ... class Channel(Class): INDEX: ClassVar[int] class Open(Method): INDEX: ClassVar[int] - out_of_band: Incomplete + out_of_band: _str def __init__(self, out_of_band: _str = "") -> None: ... @property - def synchronous(self): ... - def decode(self, encoded, offset: int = 0): ... - def encode(self): ... + def synchronous(self) -> Literal[True]: ... + def decode(self, encoded: bytes, offset: int = 0) -> Self: ... + def encode(self) -> list[bytes]: ... class OpenOk(Method): INDEX: ClassVar[int] - channel_id: Incomplete + channel_id: _str def __init__(self, channel_id: _str = "") -> None: ... @property - def synchronous(self): ... - def decode(self, encoded, offset: int = 0): ... - def encode(self): ... + def synchronous(self) -> Literal[False]: ... + def decode(self, encoded: bytes, offset: int = 0) -> Self: ... + def encode(self) -> list[bytes]: ... class Flow(Method): INDEX: ClassVar[int] - active: Incomplete - def __init__(self, active: Incomplete | None = None) -> None: ... + active: bool | None + def __init__(self, active: bool | None = None) -> None: ... @property - def synchronous(self): ... - def decode(self, encoded, offset: int = 0): ... - def encode(self): ... + def synchronous(self) -> Literal[True]: ... + def decode(self, encoded: bytes, offset: int = 0) -> Self: ... + def encode(self) -> list[bytes]: ... class FlowOk(Method): INDEX: ClassVar[int] - active: Incomplete - def __init__(self, active: Incomplete | None = None) -> None: ... + active: bool | None + def __init__(self, active: bool | None = None) -> None: ... @property - def synchronous(self): ... - def decode(self, encoded, offset: int = 0): ... - def encode(self): ... + def synchronous(self) -> Literal[False]: ... + def decode(self, encoded: bytes, offset: int = 0) -> Self: ... + def encode(self) -> list[bytes]: ... class Close(Method): INDEX: ClassVar[int] @@ -258,29 +264,29 @@ class Channel(Class): method_id: Incomplete | None = None, ) -> None: ... @property - def synchronous(self): ... - def decode(self, encoded, offset: int = 0): ... - def encode(self): ... + def synchronous(self) -> Literal[True]: ... + def decode(self, encoded: bytes, offset: int = 0) -> Self: ... + def encode(self) -> list[bytes]: ... class CloseOk(Method): INDEX: ClassVar[int] def __init__(self) -> None: ... @property - def synchronous(self): ... - def decode(self, encoded, offset: int = 0): ... - def encode(self): ... + def synchronous(self) -> Literal[False]: ... + def decode(self, encoded: bytes, offset: int = 0) -> Self: ... + def encode(self) -> list[bytes]: ... class Access(Class): INDEX: ClassVar[int] class Request(Method): INDEX: ClassVar[int] - realm: Incomplete - exclusive: Incomplete - passive: Incomplete - active: Incomplete - write: Incomplete - read: Incomplete + realm: _str + exclusive: bool + passive: bool + active: bool + write: bool + read: bool def __init__( self, realm: _str = "/data", @@ -291,18 +297,18 @@ class Access(Class): read: bool = True, ) -> None: ... @property - def synchronous(self): ... - def decode(self, encoded, offset: int = 0): ... - def encode(self): ... + def synchronous(self) -> Literal[True]: ... + def decode(self, encoded: bytes, offset: int = 0) -> Self: ... + def encode(self) -> list[bytes]: ... class RequestOk(Method): INDEX: ClassVar[int] ticket: Incomplete def __init__(self, ticket: int = 1) -> None: ... @property - def synchronous(self): ... - def decode(self, encoded, offset: int = 0): ... - def encode(self): ... + def synchronous(self) -> Literal[False]: ... + def decode(self, encoded: bytes, offset: int = 0) -> Self: ... + def encode(self) -> list[bytes]: ... class Exchange(Class): INDEX: ClassVar[int] @@ -312,11 +318,11 @@ class Exchange(Class): ticket: Incomplete exchange: Incomplete type: Incomplete - passive: Incomplete - durable: Incomplete - auto_delete: Incomplete - internal: Incomplete - nowait: Incomplete + passive: bool + durable: bool + auto_delete: bool + internal: bool + nowait: bool arguments: Incomplete def __init__( self, @@ -331,9 +337,9 @@ class Exchange(Class): arguments: Incomplete | None = None, ) -> None: ... @property - def synchronous(self): ... - def decode(self, encoded, offset: int = 0): ... - def encode(self): ... + def synchronous(self) -> Literal[True]: ... + def decode(self, encoded: bytes, offset: int = 0) -> Self: ... + def encode(self) -> list[bytes]: ... class DeclareOk(Method): INDEX: ClassVar[int] @@ -348,22 +354,22 @@ class Exchange(Class): ticket: Incomplete exchange: Incomplete if_unused: Incomplete - nowait: Incomplete + nowait: bool def __init__( self, ticket: int = 0, exchange: Incomplete | None = None, if_unused: bool = False, nowait: bool = False ) -> None: ... @property - def synchronous(self): ... - def decode(self, encoded, offset: int = 0): ... - def encode(self): ... + def synchronous(self) -> Literal[True]: ... + def decode(self, encoded: bytes, offset: int = 0) -> Self: ... + def encode(self) -> list[bytes]: ... class DeleteOk(Method): INDEX: ClassVar[int] def __init__(self) -> None: ... @property - def synchronous(self): ... - def decode(self, encoded, offset: int = 0): ... - def encode(self): ... + def synchronous(self) -> Literal[False]: ... + def decode(self, encoded: bytes, offset: int = 0) -> Self: ... + def encode(self) -> list[bytes]: ... class Bind(Method): INDEX: ClassVar[int] @@ -383,17 +389,17 @@ class Exchange(Class): arguments: Incomplete | None = None, ) -> None: ... @property - def synchronous(self): ... - def decode(self, encoded, offset: int = 0): ... - def encode(self): ... + def synchronous(self) -> Literal[True]: ... + def decode(self, encoded: bytes, offset: int = 0) -> Self: ... + def encode(self) -> list[bytes]: ... class BindOk(Method): INDEX: ClassVar[int] def __init__(self) -> None: ... @property - def synchronous(self): ... - def decode(self, encoded, offset: int = 0): ... - def encode(self): ... + def synchronous(self) -> Literal[False]: ... + def decode(self, encoded: bytes, offset: int = 0) -> Self: ... + def encode(self) -> list[bytes]: ... class Unbind(Method): INDEX: ClassVar[int] @@ -401,7 +407,7 @@ class Exchange(Class): destination: Incomplete source: Incomplete routing_key: Incomplete - nowait: Incomplete + nowait: bool arguments: Incomplete def __init__( self, @@ -413,17 +419,17 @@ class Exchange(Class): arguments: Incomplete | None = None, ) -> None: ... @property - def synchronous(self): ... - def decode(self, encoded, offset: int = 0): ... - def encode(self): ... + def synchronous(self) -> Literal[True]: ... + def decode(self, encoded: bytes, offset: int = 0) -> Self: ... + def encode(self) -> list[bytes]: ... class UnbindOk(Method): INDEX: ClassVar[int] def __init__(self) -> None: ... @property - def synchronous(self): ... - def decode(self, encoded, offset: int = 0): ... - def encode(self): ... + def synchronous(self) -> Literal[False]: ... + def decode(self, encoded: bytes, offset: int = 0) -> Self: ... + def encode(self) -> list[bytes]: ... class Queue(Class): INDEX: ClassVar[int] @@ -432,11 +438,11 @@ class Queue(Class): INDEX: ClassVar[int] ticket: Incomplete queue: Incomplete - passive: Incomplete - durable: Incomplete - exclusive: Incomplete - auto_delete: Incomplete - nowait: Incomplete + passive: bool + durable: bool + exclusive: bool + auto_delete: bool + nowait: bool arguments: Incomplete def __init__( self, @@ -471,7 +477,7 @@ class Queue(Class): queue: Incomplete exchange: Incomplete routing_key: Incomplete - nowait: Incomplete + nowait: bool arguments: Incomplete def __init__( self, @@ -483,37 +489,37 @@ class Queue(Class): arguments: Incomplete | None = None, ) -> None: ... @property - def synchronous(self): ... - def decode(self, encoded, offset: int = 0): ... - def encode(self): ... + def synchronous(self) -> Literal[True]: ... + def decode(self, encoded: bytes, offset: int = 0) -> Self: ... + def encode(self) -> list[bytes]: ... class BindOk(Method): INDEX: ClassVar[int] def __init__(self) -> None: ... @property - def synchronous(self): ... - def decode(self, encoded, offset: int = 0): ... - def encode(self): ... + def synchronous(self) -> Literal[False]: ... + def decode(self, encoded: bytes, offset: int = 0) -> Self: ... + def encode(self) -> list[bytes]: ... class Purge(Method): INDEX: ClassVar[int] ticket: Incomplete queue: Incomplete - nowait: Incomplete + nowait: bool def __init__(self, ticket: int = 0, queue: _str = "", nowait: bool = False) -> None: ... @property - def synchronous(self): ... - def decode(self, encoded, offset: int = 0): ... - def encode(self): ... + def synchronous(self) -> Literal[True]: ... + def decode(self, encoded: bytes, offset: int = 0) -> Self: ... + def encode(self) -> list[bytes]: ... class PurgeOk(Method): INDEX: ClassVar[int] message_count: Incomplete def __init__(self, message_count: Incomplete | None = None) -> None: ... @property - def synchronous(self): ... - def decode(self, encoded, offset: int = 0): ... - def encode(self): ... + def synchronous(self) -> Literal[False]: ... + def decode(self, encoded: bytes, offset: int = 0) -> Self: ... + def encode(self) -> list[bytes]: ... class Delete(Method): INDEX: ClassVar[int] @@ -521,23 +527,23 @@ class Queue(Class): queue: Incomplete if_unused: Incomplete if_empty: Incomplete - nowait: Incomplete + nowait: bool def __init__( self, ticket: int = 0, queue: _str = "", if_unused: bool = False, if_empty: bool = False, nowait: bool = False ) -> None: ... @property - def synchronous(self): ... - def decode(self, encoded, offset: int = 0): ... - def encode(self): ... + def synchronous(self) -> Literal[True]: ... + def decode(self, encoded: bytes, offset: int = 0) -> Self: ... + def encode(self) -> list[bytes]: ... class DeleteOk(Method): INDEX: ClassVar[int] message_count: Incomplete def __init__(self, message_count: Incomplete | None = None) -> None: ... @property - def synchronous(self): ... - def decode(self, encoded, offset: int = 0): ... - def encode(self): ... + def synchronous(self) -> Literal[False]: ... + def decode(self, encoded: bytes, offset: int = 0) -> Self: ... + def encode(self) -> list[bytes]: ... class Unbind(Method): INDEX: ClassVar[int] @@ -555,17 +561,17 @@ class Queue(Class): arguments: Incomplete | None = None, ) -> None: ... @property - def synchronous(self): ... - def decode(self, encoded, offset: int = 0): ... - def encode(self): ... + def synchronous(self) -> Literal[True]: ... + def decode(self, encoded: bytes, offset: int = 0) -> Self: ... + def encode(self) -> list[bytes]: ... class UnbindOk(Method): INDEX: ClassVar[int] def __init__(self) -> None: ... @property - def synchronous(self): ... - def decode(self, encoded, offset: int = 0): ... - def encode(self): ... + def synchronous(self) -> Literal[False]: ... + def decode(self, encoded: bytes, offset: int = 0) -> Self: ... + def encode(self) -> list[bytes]: ... class Basic(Class): INDEX: ClassVar[int] @@ -577,27 +583,27 @@ class Basic(Class): global_qos: Incomplete def __init__(self, prefetch_size: int = 0, prefetch_count: int = 0, global_qos: bool = False) -> None: ... @property - def synchronous(self): ... - def decode(self, encoded, offset: int = 0): ... - def encode(self): ... + def synchronous(self) -> Literal[True]: ... + def decode(self, encoded: bytes, offset: int = 0) -> Self: ... + def encode(self) -> list[bytes]: ... class QosOk(Method): INDEX: ClassVar[int] def __init__(self) -> None: ... @property - def synchronous(self): ... - def decode(self, encoded, offset: int = 0): ... - def encode(self): ... + def synchronous(self) -> Literal[False]: ... + def decode(self, encoded: bytes, offset: int = 0) -> Self: ... + def encode(self) -> list[bytes]: ... class Consume(Method): INDEX: ClassVar[int] ticket: Incomplete queue: Incomplete consumer_tag: Incomplete - no_local: Incomplete - no_ack: Incomplete - exclusive: Incomplete - nowait: Incomplete + no_local: bool + no_ack: bool + exclusive: bool + nowait: bool arguments: Incomplete def __init__( self, @@ -611,37 +617,37 @@ class Basic(Class): arguments: Incomplete | None = None, ) -> None: ... @property - def synchronous(self): ... - def decode(self, encoded, offset: int = 0): ... - def encode(self): ... + def synchronous(self) -> Literal[True]: ... + def decode(self, encoded: bytes, offset: int = 0) -> Self: ... + def encode(self) -> list[bytes]: ... class ConsumeOk(Method): INDEX: ClassVar[int] consumer_tag: Incomplete def __init__(self, consumer_tag: Incomplete | None = None) -> None: ... @property - def synchronous(self): ... - def decode(self, encoded, offset: int = 0): ... - def encode(self): ... + def synchronous(self) -> Literal[False]: ... + def decode(self, encoded: bytes, offset: int = 0) -> Self: ... + def encode(self) -> list[bytes]: ... class Cancel(Method): INDEX: ClassVar[int] consumer_tag: Incomplete - nowait: Incomplete + nowait: bool def __init__(self, consumer_tag: Incomplete | None = None, nowait: bool = False) -> None: ... @property - def synchronous(self): ... - def decode(self, encoded, offset: int = 0): ... - def encode(self): ... + def synchronous(self) -> Literal[True]: ... + def decode(self, encoded: bytes, offset: int = 0) -> Self: ... + def encode(self) -> list[bytes]: ... class CancelOk(Method): INDEX: ClassVar[int] consumer_tag: Incomplete def __init__(self, consumer_tag: Incomplete | None = None) -> None: ... @property - def synchronous(self): ... - def decode(self, encoded, offset: int = 0): ... - def encode(self): ... + def synchronous(self) -> Literal[False]: ... + def decode(self, encoded: bytes, offset: int = 0) -> Self: ... + def encode(self) -> list[bytes]: ... class Publish(Method): INDEX: ClassVar[int] @@ -654,9 +660,9 @@ class Basic(Class): self, ticket: int = 0, exchange: _str = "", routing_key: _str = "", mandatory: bool = False, immediate: bool = False ) -> None: ... @property - def synchronous(self): ... - def decode(self, encoded, offset: int = 0): ... - def encode(self): ... + def synchronous(self) -> Literal[False]: ... + def decode(self, encoded: bytes, offset: int = 0) -> Self: ... + def encode(self) -> list[bytes]: ... class Return(Method): INDEX: ClassVar[int] @@ -672,9 +678,9 @@ class Basic(Class): routing_key: Incomplete | None = None, ) -> None: ... @property - def synchronous(self): ... - def decode(self, encoded, offset: int = 0): ... - def encode(self): ... + def synchronous(self) -> Literal[False]: ... + def decode(self, encoded: bytes, offset: int = 0) -> Self: ... + def encode(self) -> list[bytes]: ... class Deliver(Method): INDEX: ClassVar[int] @@ -692,20 +698,20 @@ class Basic(Class): routing_key: Incomplete | None = None, ) -> None: ... @property - def synchronous(self): ... - def decode(self, encoded, offset: int = 0): ... - def encode(self): ... + def synchronous(self) -> Literal[False]: ... + def decode(self, encoded: bytes, offset: int = 0) -> Self: ... + def encode(self) -> list[bytes]: ... class Get(Method): INDEX: ClassVar[int] ticket: Incomplete queue: Incomplete - no_ack: Incomplete + no_ack: bool def __init__(self, ticket: int = 0, queue: _str = "", no_ack: bool = False) -> None: ... @property - def synchronous(self): ... - def decode(self, encoded, offset: int = 0): ... - def encode(self): ... + def synchronous(self) -> Literal[True]: ... + def decode(self, encoded: bytes, offset: int = 0) -> Self: ... + def encode(self) -> list[bytes]: ... class GetOk(Method): INDEX: ClassVar[int] @@ -723,18 +729,18 @@ class Basic(Class): message_count: Incomplete | None = None, ) -> None: ... @property - def synchronous(self): ... - def decode(self, encoded, offset: int = 0): ... - def encode(self): ... + def synchronous(self) -> Literal[False]: ... + def decode(self, encoded: bytes, offset: int = 0) -> Self: ... + def encode(self) -> list[bytes]: ... class GetEmpty(Method): INDEX: ClassVar[int] cluster_id: Incomplete def __init__(self, cluster_id: _str = "") -> None: ... @property - def synchronous(self): ... - def decode(self, encoded, offset: int = 0): ... - def encode(self): ... + def synchronous(self) -> Literal[False]: ... + def decode(self, encoded: bytes, offset: int = 0) -> Self: ... + def encode(self) -> list[bytes]: ... class Ack(Method): INDEX: ClassVar[int] @@ -742,56 +748,56 @@ class Basic(Class): multiple: Incomplete def __init__(self, delivery_tag: int = 0, multiple: bool = False) -> None: ... @property - def synchronous(self): ... - def decode(self, encoded, offset: int = 0): ... - def encode(self): ... + def synchronous(self) -> Literal[False]: ... + def decode(self, encoded: bytes, offset: int = 0) -> Self: ... + def encode(self) -> list[bytes]: ... class Reject(Method): INDEX: ClassVar[int] delivery_tag: Incomplete - requeue: Incomplete + requeue: bool def __init__(self, delivery_tag: Incomplete | None = None, requeue: bool = True) -> None: ... @property - def synchronous(self): ... - def decode(self, encoded, offset: int = 0): ... - def encode(self): ... + def synchronous(self) -> Literal[False]: ... + def decode(self, encoded: bytes, offset: int = 0) -> Self: ... + def encode(self) -> list[bytes]: ... class RecoverAsync(Method): INDEX: ClassVar[int] - requeue: Incomplete + requeue: bool def __init__(self, requeue: bool = False) -> None: ... @property - def synchronous(self): ... - def decode(self, encoded, offset: int = 0): ... - def encode(self): ... + def synchronous(self) -> Literal[False]: ... + def decode(self, encoded: bytes, offset: int = 0) -> Self: ... + def encode(self) -> list[bytes]: ... class Recover(Method): INDEX: ClassVar[int] - requeue: Incomplete + requeue: bool def __init__(self, requeue: bool = False) -> None: ... @property - def synchronous(self): ... - def decode(self, encoded, offset: int = 0): ... - def encode(self): ... + def synchronous(self) -> Literal[True]: ... + def decode(self, encoded: bytes, offset: int = 0) -> Self: ... + def encode(self) -> list[bytes]: ... class RecoverOk(Method): INDEX: ClassVar[int] def __init__(self) -> None: ... @property - def synchronous(self): ... - def decode(self, encoded, offset: int = 0): ... - def encode(self): ... + def synchronous(self) -> Literal[False]: ... + def decode(self, encoded: bytes, offset: int = 0) -> Self: ... + def encode(self) -> list[bytes]: ... class Nack(Method): INDEX: ClassVar[int] delivery_tag: Incomplete multiple: Incomplete - requeue: Incomplete + requeue: bool def __init__(self, delivery_tag: int = 0, multiple: bool = False, requeue: bool = True) -> None: ... @property - def synchronous(self): ... - def decode(self, encoded, offset: int = 0): ... - def encode(self): ... + def synchronous(self) -> Literal[False]: ... + def decode(self, encoded: bytes, offset: int = 0) -> Self: ... + def encode(self) -> list[bytes]: ... class Tx(Class): INDEX: ClassVar[int] @@ -800,122 +806,122 @@ class Tx(Class): INDEX: ClassVar[int] def __init__(self) -> None: ... @property - def synchronous(self): ... - def decode(self, encoded, offset: int = 0): ... - def encode(self): ... + def synchronous(self) -> Literal[True]: ... + def decode(self, encoded: bytes, offset: int = 0) -> Self: ... + def encode(self) -> list[bytes]: ... class SelectOk(Method): INDEX: ClassVar[int] def __init__(self) -> None: ... @property - def synchronous(self): ... - def decode(self, encoded, offset: int = 0): ... - def encode(self): ... + def synchronous(self) -> Literal[False]: ... + def decode(self, encoded: bytes, offset: int = 0) -> Self: ... + def encode(self) -> list[bytes]: ... class Commit(Method): INDEX: ClassVar[int] def __init__(self) -> None: ... @property - def synchronous(self): ... - def decode(self, encoded, offset: int = 0): ... - def encode(self): ... + def synchronous(self) -> Literal[True]: ... + def decode(self, encoded: bytes, offset: int = 0) -> Self: ... + def encode(self) -> list[bytes]: ... class CommitOk(Method): INDEX: ClassVar[int] def __init__(self) -> None: ... @property - def synchronous(self): ... - def decode(self, encoded, offset: int = 0): ... - def encode(self): ... + def synchronous(self) -> Literal[False]: ... + def decode(self, encoded: bytes, offset: int = 0) -> Self: ... + def encode(self) -> list[bytes]: ... class Rollback(Method): INDEX: ClassVar[int] def __init__(self) -> None: ... @property - def synchronous(self): ... - def decode(self, encoded, offset: int = 0): ... - def encode(self): ... + def synchronous(self) -> Literal[True]: ... + def decode(self, encoded: bytes, offset: int = 0) -> Self: ... + def encode(self) -> list[bytes]: ... class RollbackOk(Method): INDEX: ClassVar[int] def __init__(self) -> None: ... @property - def synchronous(self): ... - def decode(self, encoded, offset: int = 0): ... - def encode(self): ... + def synchronous(self) -> Literal[False]: ... + def decode(self, encoded: bytes, offset: int = 0) -> Self: ... + def encode(self) -> list[bytes]: ... class Confirm(Class): INDEX: ClassVar[int] class Select(Method): INDEX: ClassVar[int] - nowait: Incomplete + nowait: bool def __init__(self, nowait: bool = False) -> None: ... @property - def synchronous(self): ... - def decode(self, encoded, offset: int = 0): ... - def encode(self): ... + def synchronous(self) -> Literal[True]: ... + def decode(self, encoded: bytes, offset: int = 0) -> Self: ... + def encode(self) -> list[bytes]: ... class SelectOk(Method): INDEX: ClassVar[int] def __init__(self) -> None: ... @property - def synchronous(self): ... - def decode(self, encoded, offset: int = 0): ... - def encode(self): ... + def synchronous(self) -> Literal[False]: ... + def decode(self, encoded: bytes, offset: int = 0) -> Self: ... + def encode(self) -> list[bytes]: ... class BasicProperties(Properties): - CLASS: Incomplete + CLASS: ClassVar[type[Basic]] INDEX: ClassVar[int] - FLAG_CONTENT_TYPE: Incomplete - FLAG_CONTENT_ENCODING: Incomplete - FLAG_HEADERS: Incomplete - FLAG_DELIVERY_MODE: Incomplete - FLAG_PRIORITY: Incomplete - FLAG_CORRELATION_ID: Incomplete - FLAG_REPLY_TO: Incomplete - FLAG_EXPIRATION: Incomplete - FLAG_MESSAGE_ID: Incomplete - FLAG_TIMESTAMP: Incomplete - FLAG_TYPE: Incomplete - FLAG_USER_ID: Incomplete - FLAG_APP_ID: Incomplete - FLAG_CLUSTER_ID: Incomplete - content_type: Incomplete - content_encoding: Incomplete - headers: Incomplete - delivery_mode: Incomplete + FLAG_CONTENT_TYPE: ClassVar[int] + FLAG_CONTENT_ENCODING: ClassVar[int] + FLAG_HEADERS: ClassVar[int] + FLAG_DELIVERY_MODE: ClassVar[int] + FLAG_PRIORITY: ClassVar[int] + FLAG_CORRELATION_ID: ClassVar[int] + FLAG_REPLY_TO: ClassVar[int] + FLAG_EXPIRATION: ClassVar[int] + FLAG_MESSAGE_ID: ClassVar[int] + FLAG_TIMESTAMP: ClassVar[int] + FLAG_TYPE: ClassVar[int] + FLAG_USER_ID: ClassVar[int] + FLAG_APP_ID: ClassVar[int] + FLAG_CLUSTER_ID: ClassVar[int] + content_type: _str | None + content_encoding: _str | None + headers: _ArgumentMapping | None + delivery_mode: Literal[1, 2] | None priority: Incomplete - correlation_id: Incomplete - reply_to: Incomplete - expiration: Incomplete - message_id: Incomplete + correlation_id: _str | None + reply_to: _str | None + expiration: _str | None + message_id: _str | None timestamp: Incomplete - type: Incomplete - user_id: Incomplete - app_id: Incomplete - cluster_id: Incomplete + type: _str | None + user_id: _str | None + app_id: _str | None + cluster_id: _str | None def __init__( self, - content_type: Incomplete | None = None, - content_encoding: Incomplete | None = None, - headers: Incomplete | None = None, - delivery_mode: Incomplete | None = None, + content_type: _str | None = None, + content_encoding: _str | None = None, + headers: _ArgumentMapping | None = None, + delivery_mode: DeliveryMode | Literal[1, 2] | None = None, priority: Incomplete | None = None, - correlation_id: Incomplete | None = None, - reply_to: Incomplete | None = None, - expiration: Incomplete | None = None, - message_id: Incomplete | None = None, + correlation_id: _str | None = None, + reply_to: _str | None = None, + expiration: _str | None = None, + message_id: _str | None = None, timestamp: Incomplete | None = None, - type: Incomplete | None = None, - user_id: Incomplete | None = None, - app_id: Incomplete | None = None, - cluster_id: Incomplete | None = None, + type: _str | None = None, + user_id: _str | None = None, + app_id: _str | None = None, + cluster_id: _str | None = None, ) -> None: ... - def decode(self, encoded, offset: int = 0): ... - def encode(self): ... + def decode(self, encoded: bytes, offset: int = 0) -> Self: ... + def encode(self) -> list[bytes]: ... -methods: Incomplete -props: Incomplete +methods: Final[dict[int, type[Method]]] +props: Final[dict[int, type[BasicProperties]]] -def has_content(methodNumber): ... +def has_content(methodNumber: int) -> bool: ... diff --git a/stubs/pika/pika/tcp_socket_opts.pyi b/stubs/pika/pika/tcp_socket_opts.pyi index 81837440aeb9..7c15e0beb5e8 100644 --- a/stubs/pika/pika/tcp_socket_opts.pyi +++ b/stubs/pika/pika/tcp_socket_opts.pyi @@ -1,6 +1,9 @@ -from _typeshed import Incomplete +from _socket import SocketType +from logging import Logger -LOGGER: Incomplete +LOGGER: Logger -def socket_requires_keepalive(tcp_options): ... -def set_sock_opts(tcp_options, sock) -> None: ... +_SUPPORTED_TCP_OPTIONS: dict[str, int] + +def socket_requires_keepalive(tcp_options: dict[str, int]) -> bool: ... +def set_sock_opts(tcp_options: dict[str, int] | None, sock: SocketType) -> None: ... diff --git a/stubs/pika/pika/validators.pyi b/stubs/pika/pika/validators.pyi index d376e6f1ca82..b9540bd66179 100644 --- a/stubs/pika/pika/validators.pyi +++ b/stubs/pika/pika/validators.pyi @@ -1,4 +1,12 @@ -def require_string(value, value_name) -> None: ... -def require_callback(callback, callback_name: str = "callback") -> None: ... -def rpc_completion_callback(callback): ... -def zero_or_greater(name, value) -> None: ... +from collections.abc import Callable +from typing import Literal, overload + +def require_string(value: object, value_name: str) -> None: ... # raise TypeError if value is not string +def require_callback( + callback: object, callback_name: str = "callback" +) -> None: ... # raise TypeError if callback is not callable +@overload +def rpc_completion_callback(callback: None) -> Literal[True]: ... +@overload +def rpc_completion_callback(callback: Callable[..., object]) -> Literal[False]: ... +def zero_or_greater(name: str, value: str | float) -> None: ... From be40ca69c16a0b8a400148379dd4d5efe2dd6340 Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Mon, 31 Mar 2025 18:45:47 +0400 Subject: [PATCH 162/388] Improve `oauthlib.common` (#13744) --- stubs/oauthlib/oauthlib/__init__.pyi | 9 +- stubs/oauthlib/oauthlib/common.pyi | 113 +++++++++++++---------- stubs/oauthlib/oauthlib/signals.pyi | 9 +- stubs/oauthlib/oauthlib/uri_validate.pyi | 83 +++++++++-------- 4 files changed, 118 insertions(+), 96 deletions(-) diff --git a/stubs/oauthlib/oauthlib/__init__.pyi b/stubs/oauthlib/oauthlib/__init__.pyi index 3440b7c13209..96a9263bbff5 100644 --- a/stubs/oauthlib/oauthlib/__init__.pyi +++ b/stubs/oauthlib/oauthlib/__init__.pyi @@ -1,2 +1,7 @@ -def set_debug(debug_val) -> None: ... -def get_debug(): ... +from typing import Final + +__author__: Final[str] +__version__: Final[str] + +def set_debug(debug_val: bool) -> None: ... +def get_debug() -> bool: ... diff --git a/stubs/oauthlib/oauthlib/common.pyi b/stubs/oauthlib/oauthlib/common.pyi index bd1dc509832d..f691b0e496f3 100644 --- a/stubs/oauthlib/oauthlib/common.pyi +++ b/stubs/oauthlib/oauthlib/common.pyi @@ -1,64 +1,81 @@ -from _typeshed import Incomplete -from typing import Any +import re +from _typeshed import Incomplete, SupportsLenAndGetItem +from collections.abc import Iterable, Mapping +from logging import Logger +from typing import Any, Final, Literal, TypeVar, overload +from typing_extensions import TypeAlias -UNICODE_ASCII_CHARACTER_SET: str -CLIENT_ID_CHARACTER_SET: str -SANITIZE_PATTERN: Any -INVALID_HEX_PATTERN: Any -always_safe: str -log: Any +_T = TypeVar("_T") +_V = TypeVar("_V") -def quote(s, safe: bytes = b"/"): ... -def unquote(s): ... -def urlencode(params): ... -def encode_params_utf8(params): ... -def decode_params_utf8(params): ... +_HTTPMethod: TypeAlias = Literal["CONNECT", "DELETE", "GET", "HEAD", "OPTIONS", "PATCH", "POST", "PUT", "TRACE"] -urlencoded: Any +UNICODE_ASCII_CHARACTER_SET: Final[str] +CLIENT_ID_CHARACTER_SET: Final[str] +SANITIZE_PATTERN: Final[re.Pattern[str]] +INVALID_HEX_PATTERN: Final[re.Pattern[str]] +always_safe: Final[str] +log: Logger -def urldecode(query): ... -def extract_params(raw): ... -def generate_nonce(): ... -def generate_timestamp(): ... -def generate_token(length: int = 30, chars=...): ... -def generate_signed_token(private_pem, request): ... +def quote(s: str | bytes, safe: bytes = b"/") -> str: ... +def unquote(s: str | bytes) -> str: ... +def urlencode(params: Iterable[tuple[str | bytes, str | bytes]]) -> str: ... +def encode_params_utf8(params: Iterable[tuple[str | bytes, str | bytes]]) -> list[tuple[bytes, bytes]]: ... +def decode_params_utf8(params: Iterable[tuple[str | bytes, str | bytes]]) -> list[tuple[str, str]]: ... + +urlencoded: Final[set[str]] + +def urldecode(query: str | bytes) -> list[tuple[str, str]]: ... +def extract_params(raw: str | bytes | dict[str, str] | Iterable[tuple[str, str]]) -> list[tuple[str, str]] | None: ... +def generate_nonce() -> str: ... +def generate_timestamp() -> str: ... +def generate_token(length: int = 30, chars: SupportsLenAndGetItem[str] = ...) -> str: ... +def generate_signed_token(private_pem: str, request: Request) -> str: ... def verify_signed_token(public_pem, token): ... -def generate_client_id(length: int = 30, chars=...): ... -def add_params_to_qs(query, params): ... -def add_params_to_uri(uri, params, fragment: bool = False): ... -def safe_string_equals(a, b): ... -def to_unicode(data, encoding: str = "UTF-8"): ... +def generate_client_id(length: int = 30, chars: SupportsLenAndGetItem[str] = ...) -> str: ... +def add_params_to_qs(query: str, params: dict[str, str] | Iterable[tuple[str, str]]) -> str: ... +def add_params_to_uri(uri: str, params: dict[str, str] | Iterable[tuple[str, str]], fragment: bool = False) -> str: ... +def safe_string_equals(a: str, b: str) -> bool: ... +@overload +def to_unicode(data: str | bytes, encoding: str = "UTF-8") -> str: ... +@overload +def to_unicode(data: Mapping[str, _V] | Mapping[bytes, _V], encoding: str = "UTF-8") -> dict[str, _V]: ... +@overload +def to_unicode(data: _T, encoding: str = "UTF-8") -> _T: ... -class CaseInsensitiveDict(dict[Any, Any]): - proxy: Any - def __init__(self, data) -> None: ... - def __contains__(self, k): ... - def __delitem__(self, k) -> None: ... - def __getitem__(self, k): ... - def get(self, k, default: Incomplete | None = None): ... - def __setitem__(self, k, v) -> None: ... +class CaseInsensitiveDict(dict[str, Incomplete]): + proxy: dict[str, str] + def __init__(self, data: dict[str, Incomplete]) -> None: ... + @overload + def __contains__(self, k: str) -> bool: ... + @overload + def __contains__(self, k: object) -> bool: ... + def __delitem__(self, k: str) -> None: ... + def __getitem__(self, k: str): ... + def get(self, k: str, default: Incomplete | None = None) -> Incomplete | None: ... + def __setitem__(self, k: str, v) -> None: ... def update(self, *args, **kwargs) -> None: ... class Request: - uri: Any - http_method: Any - headers: Any - body: Any - decoded_body: Any - oauth_params: Any - validator_log: Any + uri: str + http_method: _HTTPMethod + headers: CaseInsensitiveDict + body: str | dict[str, str] | list[tuple[str, str]] | None + decoded_body: list[tuple[str, str]] | None + oauth_params: list[str] + validator_log: dict[str, Any] # value type depends on the key def __init__( self, - uri, - http_method: str = "GET", - body: Incomplete | None = None, - headers: Incomplete | None = None, + uri: str, + http_method: _HTTPMethod = "GET", + body: str | dict[str, str] | list[tuple[str, str]] | None = None, + headers: Mapping[str, str] | None = None, encoding: str = "utf-8", ): ... - def __getattr__(self, name: str): ... + def __getattr__(self, name: str) -> str | None: ... # or raises AttributeError if attribute is not found @property - def uri_query(self): ... + def uri_query(self) -> str: ... @property - def uri_query_params(self): ... + def uri_query_params(self) -> list[tuple[str, str]]: ... @property - def duplicate_params(self): ... + def duplicate_params(self) -> list[str]: ... diff --git a/stubs/oauthlib/oauthlib/signals.pyi b/stubs/oauthlib/oauthlib/signals.pyi index 33096927c349..29021251576a 100644 --- a/stubs/oauthlib/oauthlib/signals.pyi +++ b/stubs/oauthlib/oauthlib/signals.pyi @@ -1,15 +1,14 @@ -from _typeshed import Incomplete from typing import Any signals_available: bool class Namespace: - def signal(self, name, doc: Incomplete | None = None): ... + def signal(self, name: str, doc: str | None = None) -> _FakeSignal: ... class _FakeSignal: - name: Any + name: str __doc__: Any - def __init__(self, name, doc: Incomplete | None = None) -> None: ... + def __init__(self, name: str, doc: str | None = None) -> None: ... send: Any connect: Any disconnect: Any @@ -18,4 +17,4 @@ class _FakeSignal: temporarily_connected_to: Any connected_to: Any -scope_changed: Any +scope_changed: _FakeSignal diff --git a/stubs/oauthlib/oauthlib/uri_validate.pyi b/stubs/oauthlib/oauthlib/uri_validate.pyi index de635c865f24..f4f90ea4c9f1 100644 --- a/stubs/oauthlib/oauthlib/uri_validate.pyi +++ b/stubs/oauthlib/oauthlib/uri_validate.pyi @@ -1,43 +1,44 @@ -from typing import Any +import re +from typing import Final -DIGIT: str -ALPHA: str -HEXDIG: str -pct_encoded: Any -unreserved: Any -gen_delims: str -sub_delims: str -pchar: Any -reserved: Any -scheme: Any -dec_octet: Any -IPv4address: Any -IPv6address: str -IPvFuture: Any -IP_literal: Any -reg_name: Any -userinfo: Any -host: Any -port: Any -authority: Any -segment: Any -segment_nz: Any -segment_nz_nc: Any -path_abempty: Any -path_absolute: Any -path_noscheme: Any -path_rootless: Any -path_empty: str -path: Any -query: Any -fragment: Any -hier_part: Any -relative_part: Any -relative_ref: Any -URI: Any -URI_reference: Any -absolute_URI: Any +DIGIT: Final[str] +ALPHA: Final[str] +HEXDIG: Final[str] +pct_encoded: Final[str] +unreserved: Final[str] +gen_delims: Final[str] +sub_delims: Final[str] +pchar: Final[str] +reserved: Final[str] +scheme: Final[str] +dec_octet: Final[str] +IPv4address: Final[str] +IPv6address: Final[str] +IPvFuture: Final[str] +IP_literal: Final[str] +reg_name: Final[str] +userinfo: Final[str] +host: Final[str] +port: Final[str] +authority: Final[str] +segment: Final[str] +segment_nz: Final[str] +segment_nz_nc: Final[str] +path_abempty: Final[str] +path_absolute: Final[str] +path_noscheme: Final[str] +path_rootless: Final[str] +path_empty: Final[str] +path: Final[str] +query: Final[str] +fragment: Final[str] +hier_part: Final[str] +relative_part: Final[str] +relative_ref: Final[str] +URI: Final[str] +URI_reference: Final[str] +absolute_URI: Final[str] -def is_uri(uri): ... -def is_uri_reference(uri): ... -def is_absolute_uri(uri): ... +def is_uri(uri: str) -> re.Match[str] | None: ... +def is_uri_reference(uri: str) -> re.Match[str] | None: ... +def is_absolute_uri(uri: str) -> re.Match[str] | None: ... From 626ba972a01e7d9785b0644fefc392d37b6b7fd0 Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Tue, 1 Apr 2025 02:27:19 +0400 Subject: [PATCH 163/388] Add `__all__` part 2 (#13719) --------- Co-authored-by: Avasam --- stubs/WTForms/@tests/stubtest_allowlist.txt | 3 - stubs/WTForms/wtforms/form.pyi | 2 + .../@tests/stubtest_allowlist.txt | 3 - stubs/editdistance/editdistance/__init__.pyi | 2 + stubs/gevent/@tests/stubtest_allowlist.txt | 35 ----- .../@tests/stubtest_allowlist_darwin.txt | 6 - .../@tests/stubtest_allowlist_linux.txt | 6 - .../@tests/stubtest_allowlist_win32.txt | 1 + stubs/gevent/gevent/_abstract_linkable.pyi | 2 + stubs/gevent/gevent/_ffi/loop.pyi | 8 +- stubs/gevent/gevent/_ffi/watcher.pyi | 2 + stubs/gevent/gevent/_greenlet_primitives.pyi | 2 + stubs/gevent/gevent/_ident.pyi | 2 + stubs/gevent/gevent/_imap.pyi | 2 + stubs/gevent/gevent/ares.pyi | 5 + stubs/gevent/gevent/backdoor.pyi | 2 + stubs/gevent/gevent/baseserver.pyi | 2 + stubs/gevent/gevent/event.pyi | 2 + stubs/gevent/gevent/events.pyi | 32 ++++ stubs/gevent/gevent/exceptions.pyi | 2 + stubs/gevent/gevent/fileobject.pyi | 2 + stubs/gevent/gevent/greenlet.pyi | 2 + stubs/gevent/gevent/hub.pyi | 2 + stubs/gevent/gevent/libev/__init__.pyi | 1 + stubs/gevent/gevent/libev/corecext.pyi | 10 ++ stubs/gevent/gevent/libev/corecffi.pyi | 10 ++ stubs/gevent/gevent/libuv/__init__.pyi | 1 + stubs/gevent/gevent/libuv/loop.pyi | 2 + stubs/gevent/gevent/libuv/watcher.pyi | 2 + stubs/gevent/gevent/local.pyi | 2 + stubs/gevent/gevent/monkey/__init__.pyi | 28 ++++ stubs/gevent/gevent/resolver/__init__.pyi | 2 + stubs/gevent/gevent/resolver/ares.pyi | 2 + stubs/gevent/gevent/resolver/blocking.pyi | 2 + stubs/gevent/gevent/resolver/dnspython.pyi | 2 + stubs/gevent/gevent/resolver/thread.pyi | 2 + stubs/gevent/gevent/resolver_ares.pyi | 5 + stubs/gevent/gevent/resolver_thread.pyi | 1 + stubs/gevent/gevent/server.pyi | 2 + stubs/gevent/gevent/signal.pyi | 2 + stubs/gevent/gevent/threadpool.pyi | 2 + stubs/gevent/gevent/timeout.pyi | 2 + stubs/gevent/gevent/util.pyi | 2 + stubs/gevent/gevent/win32util.pyi | 2 + stubs/greenlet/@tests/stubtest_allowlist.txt | 3 - stubs/greenlet/greenlet/__init__.pyi | 1 + stubs/httplib2/@tests/stubtest_allowlist.txt | 3 - stubs/httplib2/httplib2/__init__.pyi | 14 ++ stubs/libsass/@tests/stubtest_allowlist.txt | 6 - stubs/libsass/sass.pyi | 17 +++ stubs/libsass/sassutils/builder.pyi | 2 + stubs/libsass/sassutils/distutils.pyi | 2 + stubs/libsass/sassutils/wsgi.pyi | 2 + stubs/passlib/@tests/stubtest_allowlist.txt | 36 +---- .../passlib/crypto/_blowfish/__init__.pyi | 2 + .../passlib/passlib/crypto/_blowfish/base.pyi | 2 + .../passlib/crypto/_blowfish/unrolled.pyi | 2 + stubs/passlib/passlib/crypto/_md4.pyi | 2 + stubs/passlib/passlib/crypto/digest.pyi | 12 ++ .../passlib/crypto/scrypt/__init__.pyi | 2 + .../passlib/crypto/scrypt/_builtin.pyi | 2 + stubs/passlib/passlib/handlers/argon2.pyi | 2 + stubs/passlib/passlib/handlers/bcrypt.pyi | 2 + stubs/passlib/passlib/handlers/cisco.pyi | 2 + stubs/passlib/passlib/handlers/des_crypt.pyi | 2 + stubs/passlib/passlib/handlers/digests.pyi | 2 + stubs/passlib/passlib/handlers/django.pyi | 11 ++ stubs/passlib/passlib/handlers/fshp.pyi | 2 + stubs/passlib/passlib/handlers/md5_crypt.pyi | 2 + stubs/passlib/passlib/handlers/misc.pyi | 2 + stubs/passlib/passlib/handlers/mssql.pyi | 2 + stubs/passlib/passlib/handlers/pbkdf2.pyi | 2 + stubs/passlib/passlib/handlers/phpass.pyi | 2 + stubs/passlib/passlib/handlers/postgres.pyi | 2 + stubs/passlib/passlib/handlers/roundup.pyi | 2 + stubs/passlib/passlib/handlers/scram.pyi | 2 + stubs/passlib/passlib/handlers/scrypt.pyi | 2 + stubs/passlib/passlib/handlers/sha1_crypt.pyi | 2 + stubs/passlib/passlib/handlers/sha2_crypt.pyi | 2 + .../passlib/handlers/sun_md5_crypt.pyi | 2 + stubs/passlib/passlib/handlers/windows.pyi | 2 + stubs/passlib/passlib/utils/binary.pyi | 29 ++++ stubs/passlib/passlib/utils/decor.pyi | 9 ++ stubs/passlib/passlib/utils/handlers.pyi | 20 +++ stubs/passlib/passlib/utils/md4.pyi | 2 + stubs/passlib/passlib/utils/pbkdf2.pyi | 10 ++ stubs/peewee/@tests/stubtest_allowlist.txt | 3 - stubs/peewee/peewee.pyi | 78 ++++++++++ stubs/polib/@tests/stubtest_allowlist.txt | 2 - stubs/polib/polib.pyi | 13 ++ stubs/psutil/@tests/stubtest_allowlist.txt | 2 - stubs/psutil/psutil/_common.pyi | 93 ++++++++++++ stubs/psutil/psutil/_psposix.pyi | 2 + stubs/reportlab/@tests/stubtest_allowlist.txt | 22 +-- .../reportlab/graphics/barcode/__init__.pyi | 2 + .../reportlab/graphics/barcode/eanbc.pyi | 2 + .../graphics/barcode/ecc200datamatrix.pyi | 2 + .../reportlab/graphics/barcode/usps4s.pyi | 2 + .../reportlab/graphics/barcode/widgets.pyi | 15 ++ .../reportlab/reportlab/graphics/svgpath.pyi | 2 + .../reportlab/graphics/transform.pyi | 15 ++ stubs/reportlab/reportlab/graphics/utils.pyi | 2 + stubs/reportlab/reportlab/lib/arciv.pyi | 2 + stubs/reportlab/reportlab/lib/formatters.pyi | 2 + .../reportlab/reportlab/lib/pygments2xpre.pyi | 2 + stubs/reportlab/reportlab/lib/rl_accel.pyi | 14 ++ stubs/reportlab/reportlab/lib/rltempfile.pyi | 2 + stubs/reportlab/reportlab/lib/styles.pyi | 2 + stubs/reportlab/reportlab/pdfgen/canvas.pyi | 2 + stubs/reportlab/reportlab/platypus/frames.pyi | 2 + .../reportlab/reportlab/platypus/multicol.pyi | 2 + .../reportlab/platypus/paragraph.pyi | 2 + .../reportlab/platypus/xpreformatted.pyi | 2 + stubs/reportlab/reportlab/rl_settings.pyi | 69 +++++++++ stubs/toposort/@tests/stubtest_allowlist.txt | 2 - stubs/toposort/toposort.pyi | 2 + stubs/tqdm/@tests/stubtest_allowlist.txt | 6 - stubs/tqdm/tqdm/_main.pyi | 4 +- stubs/tqdm/tqdm/_tqdm.pyi | 11 +- stubs/tqdm/tqdm/_tqdm_gui.pyi | 7 +- stubs/tqdm/tqdm/_tqdm_notebook.pyi | 7 +- .../@tests/stubtest_allowlist.txt | 2 - stubs/whatthepatch/whatthepatch/__init__.pyi | 2 + .../workalendar/@tests/stubtest_allowlist.txt | 11 +- .../workalendar/africa/__init__.pyi | 14 ++ .../workalendar/america/__init__.pyi | 88 +++++++++++ .../workalendar/workalendar/asia/__init__.pyi | 16 ++ stubs/workalendar/workalendar/astronomy.pyi | 2 + .../workalendar/europe/__init__.pyi | 141 ++++++++++++++++++ .../europe/scotland/mixins/__init__.pyi | 24 +++ .../workalendar/oceania/__init__.pyi | 17 +++ .../workalendar/workalendar/usa/__init__.pyi | 72 +++++++++ 132 files changed, 1057 insertions(+), 171 deletions(-) delete mode 100644 stubs/polib/@tests/stubtest_allowlist.txt delete mode 100644 stubs/toposort/@tests/stubtest_allowlist.txt delete mode 100644 stubs/whatthepatch/@tests/stubtest_allowlist.txt diff --git a/stubs/WTForms/@tests/stubtest_allowlist.txt b/stubs/WTForms/@tests/stubtest_allowlist.txt index 2c8c7e4b50d3..18057b0c2330 100644 --- a/stubs/WTForms/@tests/stubtest_allowlist.txt +++ b/stubs/WTForms/@tests/stubtest_allowlist.txt @@ -1,6 +1,3 @@ -# TODO: missing from stub -wtforms.form.__all__ - # Error: is not present at runtime # ============================= # This is hack to get around Field.__new__ not being able to return diff --git a/stubs/WTForms/wtforms/form.pyi b/stubs/WTForms/wtforms/form.pyi index ab889d4de002..c108954471e0 100644 --- a/stubs/WTForms/wtforms/form.pyi +++ b/stubs/WTForms/wtforms/form.pyi @@ -83,3 +83,5 @@ class Form(BaseForm, metaclass=FormMeta): def __setitem__(self, name: str, value: None) -> None: ... # type: ignore[override] def __delitem__(self, name: str) -> None: ... def __delattr__(self, name: str) -> None: ... + +__all__ = ("BaseForm", "Form") diff --git a/stubs/editdistance/@tests/stubtest_allowlist.txt b/stubs/editdistance/@tests/stubtest_allowlist.txt index d9d3402279f0..ef46eff1a3b5 100644 --- a/stubs/editdistance/@tests/stubtest_allowlist.txt +++ b/stubs/editdistance/@tests/stubtest_allowlist.txt @@ -1,5 +1,2 @@ -# TODO: missing from stub -editdistance.__all__ - # Not public API -- the submodule is an implementation detail due to it being a cythonized package editdistance.bycython diff --git a/stubs/editdistance/editdistance/__init__.pyi b/stubs/editdistance/editdistance/__init__.pyi index 048d56125afa..8d31bd35aa1b 100644 --- a/stubs/editdistance/editdistance/__init__.pyi +++ b/stubs/editdistance/editdistance/__init__.pyi @@ -4,3 +4,5 @@ def eval(a: Iterable[Hashable], b: Iterable[Hashable]) -> int: ... def distance(a: Iterable[Hashable], b: Iterable[Hashable]) -> int: ... def eval_criterion(a: Iterable[Hashable], b: Iterable[Hashable], thr: int) -> bool: ... def distance_le_than(a: Iterable[Hashable], b: Iterable[Hashable], thr: int) -> bool: ... + +__all__ = ("eval", "distance", "eval_criterion", "distance_le_than") diff --git a/stubs/gevent/@tests/stubtest_allowlist.txt b/stubs/gevent/@tests/stubtest_allowlist.txt index 0b0da22fc238..3cd813e4b742 100644 --- a/stubs/gevent/@tests/stubtest_allowlist.txt +++ b/stubs/gevent/@tests/stubtest_allowlist.txt @@ -1,40 +1,8 @@ # TODO: missing from stub -gevent._abstract_linkable.__all__ -gevent._ffi.loop.__all__ -gevent._ffi.watcher.__all__ -gevent._greenlet_primitives.__all__ -gevent._ident.__all__ -gevent._imap.__all__ -gevent.backdoor.__all__ -gevent.baseserver.__all__ -gevent.event.__all__ -gevent.events.__all__ -gevent.exceptions.__all__ -gevent.fileobject.__all__ -gevent.greenlet.__all__ -gevent.hub.__all__ -gevent.libev.__all__ -gevent.libev.corecext.__all__ -gevent.libuv.__all__ -gevent.libuv.loop.__all__ -gevent.libuv.watcher.__all__ -gevent.local.__all__ -gevent.monkey.__all__ gevent.os.__all__ -gevent.resolver.__all__ -gevent.resolver.blocking.__all__ -gevent.resolver.dnspython.__all__ -gevent.resolver.thread.__all__ -gevent.resolver_thread.__all__ -gevent.server.__all__ -gevent.signal.__all__ gevent.socket.__all__ gevent.ssl.__all__ gevent.subprocess.__all__ -gevent.threadpool.__all__ -gevent.timeout.__all__ -gevent.util.__all__ -gevent.win32util.__all__ # Error: failed to find stubs # ============================= @@ -66,7 +34,6 @@ gevent._ffi.ERROR gevent._ffi.GEVENT_DEBUG_LEVEL gevent._ffi.TRACE gevent._ffi.loop.AbstractLoop.async -gevent._ffi.loop.assign_standard_callbacks gevent._fileobjectcommon.UniversalNewlineBytesWrapper gevent._waiter.Waiter.switch_args @@ -79,8 +46,6 @@ gevent.libuv.watcher.watcher.feed # unnecessary python 2 compatibility stuff gevent._config.Config.trace_malloc gevent._imap.IMapUnordered.next -gevent.monkey.patch_builtins -gevent.monkey.patch_sys gevent.pywsgi.Environ.iteritems # weird method that doesn't work with this being generic, so we removed it diff --git a/stubs/gevent/@tests/stubtest_allowlist_darwin.txt b/stubs/gevent/@tests/stubtest_allowlist_darwin.txt index d561f9168ab3..99849c10b71b 100644 --- a/stubs/gevent/@tests/stubtest_allowlist_darwin.txt +++ b/stubs/gevent/@tests/stubtest_allowlist_darwin.txt @@ -1,9 +1,3 @@ -# TODO: missing from stub -gevent.ares.__all__ -gevent.libev.corecffi.__all__ -gevent.resolver.ares.__all__ -gevent.resolver_ares.__all__ - # Error: is not present in stub # ============================= # internal API stuff we dropped because it wasn't necessary diff --git a/stubs/gevent/@tests/stubtest_allowlist_linux.txt b/stubs/gevent/@tests/stubtest_allowlist_linux.txt index a28b77fae091..d768857225d1 100644 --- a/stubs/gevent/@tests/stubtest_allowlist_linux.txt +++ b/stubs/gevent/@tests/stubtest_allowlist_linux.txt @@ -1,9 +1,3 @@ -# TODO: missing from stub -gevent.ares.__all__ -gevent.libev.corecffi.__all__ -gevent.resolver.ares.__all__ -gevent.resolver_ares.__all__ - # Error: is not present in stub # ============================= # internal API stuff we dropped because it wasn't necessary diff --git a/stubs/gevent/@tests/stubtest_allowlist_win32.txt b/stubs/gevent/@tests/stubtest_allowlist_win32.txt index 734da8785259..52eddde03a7c 100644 --- a/stubs/gevent/@tests/stubtest_allowlist_win32.txt +++ b/stubs/gevent/@tests/stubtest_allowlist_win32.txt @@ -1,6 +1,7 @@ # Error: is not present in stub # ============================= # these get exported but don't actually work on win32 so we ignore them +gevent.signal.__all__ gevent.signal.getsignal gevent.signal.signal diff --git a/stubs/gevent/gevent/_abstract_linkable.pyi b/stubs/gevent/gevent/_abstract_linkable.pyi index 0c0d9a0756cc..cdf549b90cf3 100644 --- a/stubs/gevent/gevent/_abstract_linkable.pyi +++ b/stubs/gevent/gevent/_abstract_linkable.pyi @@ -11,3 +11,5 @@ class AbstractLinkable: def rawlink(self, callback: Callable[[Self], object], /) -> None: ... def ready(self) -> bool: ... def unlink(self, callback: Callable[[Self], object], /) -> None: ... + +__all__ = ["AbstractLinkable"] diff --git a/stubs/gevent/gevent/_ffi/loop.pyi b/stubs/gevent/gevent/_ffi/loop.pyi index b06e90c2a46d..7bd152d277e0 100644 --- a/stubs/gevent/gevent/_ffi/loop.pyi +++ b/stubs/gevent/gevent/_ffi/loop.pyi @@ -1,6 +1,6 @@ import sys from _typeshed import FileDescriptor -from collections.abc import Callable +from collections.abc import Callable, Sequence from types import TracebackType from typing import Protocol from typing_extensions import TypeAlias, TypeVarTuple, Unpack @@ -17,6 +17,10 @@ class _SupportsHandleError(Protocol): _ErrorHandler: TypeAlias = _ErrorHandlerFunc | _SupportsHandleError +def assign_standard_callbacks( + ffi: object, lib: object, callbacks_class: Callable[[object], object], extras: Sequence[tuple[object, object]] = ... +) -> object: ... + class AbstractLoop: CALLBACK_CHECK_COUNT: int error_handler: _ErrorHandler | None @@ -78,3 +82,5 @@ class AbstractLoop: def run_callback_threadsafe(self, func: Callable[[Unpack[_Ts]], object], *args: Unpack[_Ts]) -> _Callback: ... def callback(self, priority: float | None = ...) -> _Callback: ... def fileno(self) -> FileDescriptor | None: ... + +__all__ = ["AbstractLoop", "assign_standard_callbacks"] diff --git a/stubs/gevent/gevent/_ffi/watcher.pyi b/stubs/gevent/gevent/_ffi/watcher.pyi index 112396aa10ba..05128ac2cf34 100644 --- a/stubs/gevent/gevent/_ffi/watcher.pyi +++ b/stubs/gevent/gevent/_ffi/watcher.pyi @@ -89,3 +89,5 @@ class StatMixin: def prev(self) -> _StatResult | None: ... @property def interval(self) -> float: ... + +__all__: list[str] = [] diff --git a/stubs/gevent/gevent/_greenlet_primitives.pyi b/stubs/gevent/gevent/_greenlet_primitives.pyi index c8c371fc5a97..334f4178491b 100644 --- a/stubs/gevent/gevent/_greenlet_primitives.pyi +++ b/stubs/gevent/gevent/_greenlet_primitives.pyi @@ -14,3 +14,5 @@ class SwitchOutGreenletWithLoop(TrackedRawGreenlet): def loop(self, value: _Loop) -> None: ... def switch(self) -> Any: ... def switch_out(self) -> NoReturn: ... + +__all__ = ["TrackedRawGreenlet", "SwitchOutGreenletWithLoop"] diff --git a/stubs/gevent/gevent/_ident.pyi b/stubs/gevent/gevent/_ident.pyi index 56144a379f13..7c1bf6b8d319 100644 --- a/stubs/gevent/gevent/_ident.pyi +++ b/stubs/gevent/gevent/_ident.pyi @@ -10,3 +10,5 @@ class IdentRegistry: def __init__(self) -> None: ... def get_ident(self, obj: object) -> int: ... def __len__(self) -> int: ... + +__all__ = ["IdentRegistry"] diff --git a/stubs/gevent/gevent/_imap.pyi b/stubs/gevent/gevent/_imap.pyi index fa7a7953075d..1220d57a65c6 100644 --- a/stubs/gevent/gevent/_imap.pyi +++ b/stubs/gevent/gevent/_imap.pyi @@ -22,3 +22,5 @@ class IMapUnordered(Greenlet[_P, _T]): class IMap(IMapUnordered[_P, _T]): index: int + +__all__ = ["IMapUnordered", "IMap"] diff --git a/stubs/gevent/gevent/ares.pyi b/stubs/gevent/gevent/ares.pyi index 530dece6fe68..8fa5509e5fa6 100644 --- a/stubs/gevent/gevent/ares.pyi +++ b/stubs/gevent/gevent/ares.pyi @@ -1 +1,6 @@ +import sys + from gevent.resolver.cares import * + +if sys.platform != "win32": + __all__ = ["channel"] diff --git a/stubs/gevent/gevent/backdoor.pyi b/stubs/gevent/gevent/backdoor.pyi index a9fd2a8eca7c..67740ee35fda 100644 --- a/stubs/gevent/gevent/backdoor.pyi +++ b/stubs/gevent/gevent/backdoor.pyi @@ -43,3 +43,5 @@ class BackdoorServer(StreamServer): ciphers: str = ..., ) -> None: ... def handle(self, conn: _GeventSocket, _address: _Address) -> None: ... + +__all__ = ["BackdoorServer"] diff --git a/stubs/gevent/gevent/baseserver.pyi b/stubs/gevent/gevent/baseserver.pyi index 3a8ac540ae45..aa5308ec0b92 100644 --- a/stubs/gevent/gevent/baseserver.pyi +++ b/stubs/gevent/gevent/baseserver.pyi @@ -63,3 +63,5 @@ class BaseServer(Generic[_P]): def stop(self, timeout: float | None = None) -> None: ... def serve_forever(self, stop_timeout: float | None = None) -> None: ... def is_fatal_error(self, ex: BaseException) -> bool: ... + +__all__ = ["BaseServer"] diff --git a/stubs/gevent/gevent/event.pyi b/stubs/gevent/gevent/event.pyi index 199a63f88eca..b6b827da36b9 100644 --- a/stubs/gevent/gevent/event.pyi +++ b/stubs/gevent/gevent/event.pyi @@ -61,3 +61,5 @@ class AsyncResult(AbstractLinkable, Generic[_T]): def done(self) -> bool: ... def cancel(self) -> Literal[False]: ... def cancelled(self) -> Literal[False]: ... + +__all__ = ["Event", "AsyncResult"] diff --git a/stubs/gevent/gevent/events.pyi b/stubs/gevent/gevent/events.pyi index fd11a1c80827..de1913d501eb 100644 --- a/stubs/gevent/gevent/events.pyi +++ b/stubs/gevent/gevent/events.pyi @@ -163,3 +163,35 @@ class IGeventDidPatchAllEvent(IGeventDidPatchEvent): ... @implementer(IGeventDidPatchAllEvent) class GeventDidPatchAllEvent(_PatchAllMixin, GeventDidPatchEvent): ENTRY_POINT_NAME: str + +__all__ = [ + "subscribers", + # monitor thread + "IEventLoopBlocked", + "EventLoopBlocked", + "IMemoryUsageThresholdExceeded", + "MemoryUsageThresholdExceeded", + "IMemoryUsageUnderThreshold", + "MemoryUsageUnderThreshold", + # Hub + "IPeriodicMonitorThread", + "IPeriodicMonitorThreadStartedEvent", + "PeriodicMonitorThreadStartedEvent", + # monkey + "IGeventPatchEvent", + "GeventPatchEvent", + "IGeventWillPatchEvent", + "DoNotPatch", + "GeventWillPatchEvent", + "IGeventDidPatchEvent", + "IGeventWillPatchModuleEvent", + "GeventWillPatchModuleEvent", + "IGeventDidPatchModuleEvent", + "GeventDidPatchModuleEvent", + "IGeventWillPatchAllEvent", + "GeventWillPatchAllEvent", + "IGeventDidPatchBuiltinModulesEvent", + "GeventDidPatchBuiltinModulesEvent", + "IGeventDidPatchAllEvent", + "GeventDidPatchAllEvent", +] diff --git a/stubs/gevent/gevent/exceptions.pyi b/stubs/gevent/gevent/exceptions.pyi index 1143e853fa97..26cacfa4fc7d 100644 --- a/stubs/gevent/gevent/exceptions.pyi +++ b/stubs/gevent/gevent/exceptions.pyi @@ -13,3 +13,5 @@ class InvalidThreadUseError(RuntimeError): ... class HubDestroyed(GreenletExit): destroy_loop: bool def __init__(self, destroy_loop: bool) -> None: ... + +__all__ = ["LoopExit"] diff --git a/stubs/gevent/gevent/fileobject.pyi b/stubs/gevent/gevent/fileobject.pyi index 4138b6e25a97..db49e17e084f 100644 --- a/stubs/gevent/gevent/fileobject.pyi +++ b/stubs/gevent/gevent/fileobject.pyi @@ -149,7 +149,9 @@ if sys.platform != "win32": ) -> None: ... _FileObjectType: TypeAlias = type[FileObjectPosix[Any, Any] | FileObjectBlock[Any, Any] | FileObjectThread[Any, Any]] + __all__ = ["FileObjectPosix", "FileObjectThread", "FileObjectBlock", "FileObject"] else: _FileObjectType: TypeAlias = type[FileObjectBlock[Any, Any] | FileObjectThread[Any, Any]] + __all__ = ["FileObjectThread", "FileObjectBlock", "FileObject"] FileObject: _FileObjectType diff --git a/stubs/gevent/gevent/greenlet.pyi b/stubs/gevent/gevent/greenlet.pyi index bed83cfc3a22..51ca9730d2eb 100644 --- a/stubs/gevent/gevent/greenlet.pyi +++ b/stubs/gevent/gevent/greenlet.pyi @@ -96,3 +96,5 @@ def killall( block: bool = True, timeout: float | None = None, ) -> None: ... + +__all__ = ["Greenlet", "joinall", "killall"] diff --git a/stubs/gevent/gevent/hub.pyi b/stubs/gevent/gevent/hub.pyi index 44a8282ae316..4e4d409b59b4 100644 --- a/stubs/gevent/gevent/hub.pyi +++ b/stubs/gevent/gevent/hub.pyi @@ -108,3 +108,5 @@ class linkproxy: obj: object def __init__(self, callback: Callable[[_T], object], obj: _T) -> None: ... def __call__(self, *args: object) -> None: ... + +__all__ = ["getcurrent", "GreenletExit", "spawn_raw", "sleep", "kill", "signal", "reinit", "get_hub", "Hub", "Waiter"] diff --git a/stubs/gevent/gevent/libev/__init__.pyi b/stubs/gevent/gevent/libev/__init__.pyi index e69de29bb2d1..c9c2ef67bd9d 100644 --- a/stubs/gevent/gevent/libev/__init__.pyi +++ b/stubs/gevent/gevent/libev/__init__.pyi @@ -0,0 +1 @@ +__all__: list[str] = [] diff --git a/stubs/gevent/gevent/libev/corecext.pyi b/stubs/gevent/gevent/libev/corecext.pyi index 35a46bbf3655..5b328fcbe19b 100644 --- a/stubs/gevent/gevent/libev/corecext.pyi +++ b/stubs/gevent/gevent/libev/corecext.pyi @@ -90,3 +90,13 @@ if sys.platform != "win32": def origflags_int(self) -> int: ... @property def sigfd(self) -> FileDescriptor: ... + + __all__ = [ + "get_version", + "get_header_version", + "supported_backends", + "recommended_backends", + "embeddable_backends", + "time", + "loop", + ] diff --git a/stubs/gevent/gevent/libev/corecffi.pyi b/stubs/gevent/gevent/libev/corecffi.pyi index e814fb9b90b3..3f7821a82524 100644 --- a/stubs/gevent/gevent/libev/corecffi.pyi +++ b/stubs/gevent/gevent/libev/corecffi.pyi @@ -34,3 +34,13 @@ class loop(AbstractLoop): def reset_sigchld(self) -> None: ... def stat(self, path: str, interval: float = 0.0, ref: bool = True, priority: bool | None = None) -> watcher.stat: ... + +__all__ = [ + "get_version", + "get_header_version", + "supported_backends", + "recommended_backends", + "embeddable_backends", + "time", + "loop", +] diff --git a/stubs/gevent/gevent/libuv/__init__.pyi b/stubs/gevent/gevent/libuv/__init__.pyi index e69de29bb2d1..c9c2ef67bd9d 100644 --- a/stubs/gevent/gevent/libuv/__init__.pyi +++ b/stubs/gevent/gevent/libuv/__init__.pyi @@ -0,0 +1 @@ +__all__: list[str] = [] diff --git a/stubs/gevent/gevent/libuv/loop.pyi b/stubs/gevent/gevent/libuv/loop.pyi index 950b82118375..cd4ccd5287ef 100644 --- a/stubs/gevent/gevent/libuv/loop.pyi +++ b/stubs/gevent/gevent/libuv/loop.pyi @@ -40,3 +40,5 @@ class loop(AbstractLoop): def fork(self, ref: bool = True, priority: int | None = None) -> watcher.fork: ... def child(self, pid: int, trace: int = 0, ref: bool = True) -> watcher.child: ... # prepare is not supported on libuv yet, but we need type_error to annotate that + +__all__: list[str] = [] diff --git a/stubs/gevent/gevent/libuv/watcher.pyi b/stubs/gevent/gevent/libuv/watcher.pyi index 375eb88a7d94..af5b5e6331a4 100644 --- a/stubs/gevent/gevent/libuv/watcher.pyi +++ b/stubs/gevent/gevent/libuv/watcher.pyi @@ -30,3 +30,5 @@ class signal(_base.SignalMixin, watcher): ... class idle(_base.IdleMixin, watcher): ... class check(_base.CheckMixin, watcher): ... class prepare(_base.PrepareMixin, watcher): ... + +__all__: list[str] = [] diff --git a/stubs/gevent/gevent/local.pyi b/stubs/gevent/gevent/local.pyi index a455c97f7017..cdcf88b483c8 100644 --- a/stubs/gevent/gevent/local.pyi +++ b/stubs/gevent/gevent/local.pyi @@ -7,3 +7,5 @@ class local: def __getattribute__(self, name: str) -> Any: ... def __delattr__(self, name: str) -> None: ... def __setattr__(self, name: str, value: Any) -> None: ... + +__all__ = ["local"] diff --git a/stubs/gevent/gevent/monkey/__init__.pyi b/stubs/gevent/gevent/monkey/__init__.pyi index d7ccf4f2822a..da6da0c3f35f 100644 --- a/stubs/gevent/gevent/monkey/__init__.pyi +++ b/stubs/gevent/gevent/monkey/__init__.pyi @@ -13,11 +13,13 @@ def patch_thread( threading: bool = True, _threading_local: bool = True, Event: bool = True, logging: bool = True, existing_locks: bool = True ) -> None: ... def patch_socket(dns: bool = True, aggressive: bool = True) -> None: ... +def patch_builtins() -> None: ... def patch_dns() -> None: ... def patch_ssl() -> None: ... def patch_select(aggressive: bool = True) -> None: ... def patch_selectors(aggressive: bool = True) -> None: ... def patch_subprocess() -> None: ... +def patch_sys(stdin: bool = True, stdout: bool = True, stderr: bool = True) -> None: ... def patch_signal() -> None: ... def patch_all( socket: bool = True, @@ -38,3 +40,29 @@ def patch_all( **kwargs: object, ) -> bool | None: ... def main() -> dict[str, Any]: ... + +__all__ = [ + "patch_all", + "patch_builtins", + "patch_dns", + "patch_os", + "patch_queue", + "patch_select", + "patch_signal", + "patch_socket", + "patch_ssl", + "patch_subprocess", + "patch_sys", + "patch_thread", + "patch_time", + # query functions + "get_original", + "is_module_patched", + "is_object_patched", + # plugin API + "patch_module", + # module functions + "main", + # Errors and warnings + "MonkeyPatchWarning", +] diff --git a/stubs/gevent/gevent/resolver/__init__.pyi b/stubs/gevent/gevent/resolver/__init__.pyi index f47eaf5b4567..72609bc06442 100644 --- a/stubs/gevent/gevent/resolver/__init__.pyi +++ b/stubs/gevent/gevent/resolver/__init__.pyi @@ -19,3 +19,5 @@ class AbstractResolver: ) -> _AddrinfoResult: ... def gethostbyaddr(self, ip_address: str) -> tuple[str, list[str], list[str]]: ... def getnameinfo(self, sockaddr: _SockAddr, flags: int) -> _NameinfoResult: ... + +__all__ = () diff --git a/stubs/gevent/gevent/resolver/ares.pyi b/stubs/gevent/gevent/resolver/ares.pyi index c63cb40843ec..a20e6b99a1e3 100644 --- a/stubs/gevent/gevent/resolver/ares.pyi +++ b/stubs/gevent/gevent/resolver/ares.pyi @@ -39,3 +39,5 @@ if sys.platform != "win32": servers: Sequence[str] | str | None = None, ) -> None: ... def __del__(self) -> None: ... + + __all__ = ["Resolver"] diff --git a/stubs/gevent/gevent/resolver/blocking.pyi b/stubs/gevent/gevent/resolver/blocking.pyi index 69236512c373..54ee37b7ce31 100644 --- a/stubs/gevent/gevent/resolver/blocking.pyi +++ b/stubs/gevent/gevent/resolver/blocking.pyi @@ -11,3 +11,5 @@ class Resolver: ) -> _AddrinfoResult: ... def gethostbyaddr(self, ip_address: str) -> tuple[str, list[str], list[str]]: ... def getnameinfo(self, sockaddr: _SockAddr, flags: int) -> _NameinfoResult: ... + +__all__ = ["Resolver"] diff --git a/stubs/gevent/gevent/resolver/dnspython.pyi b/stubs/gevent/gevent/resolver/dnspython.pyi index 5e49b6e3916e..cf965d10a2c9 100644 --- a/stubs/gevent/gevent/resolver/dnspython.pyi +++ b/stubs/gevent/gevent/resolver/dnspython.pyi @@ -7,3 +7,5 @@ class Resolver(AbstractResolver): def __init__(self, hub: Hub | None = ...) -> None: ... @property def resolver(self) -> Any: ... # this is a custom dnspython Resolver + +__all__ = ["Resolver"] diff --git a/stubs/gevent/gevent/resolver/thread.pyi b/stubs/gevent/gevent/resolver/thread.pyi index ecd25d33047b..3a265ed172c3 100644 --- a/stubs/gevent/gevent/resolver/thread.pyi +++ b/stubs/gevent/gevent/resolver/thread.pyi @@ -13,3 +13,5 @@ class Resolver: ) -> _AddrinfoResult: ... def gethostbyaddr(self, ip_address: str) -> tuple[str, list[str], list[str]]: ... def getnameinfo(self, sockaddr: _SockAddr, flags: int) -> _NameinfoResult: ... + +__all__ = ["Resolver"] diff --git a/stubs/gevent/gevent/resolver_ares.pyi b/stubs/gevent/gevent/resolver_ares.pyi index 6f52ee389eb8..5eb75234139c 100644 --- a/stubs/gevent/gevent/resolver_ares.pyi +++ b/stubs/gevent/gevent/resolver_ares.pyi @@ -1 +1,6 @@ +import sys + from gevent.resolver.ares import * + +if sys.platform != "win32": + __all__ = ["Resolver"] diff --git a/stubs/gevent/gevent/resolver_thread.pyi b/stubs/gevent/gevent/resolver_thread.pyi index 1a3071953bd8..f960eab26073 100644 --- a/stubs/gevent/gevent/resolver_thread.pyi +++ b/stubs/gevent/gevent/resolver_thread.pyi @@ -1 +1,2 @@ from gevent.resolver.thread import * +from gevent.resolver.thread import __all__ as __all__ diff --git a/stubs/gevent/gevent/server.pyi b/stubs/gevent/gevent/server.pyi index 36ce8d951f6e..712754d4ee2a 100644 --- a/stubs/gevent/gevent/server.pyi +++ b/stubs/gevent/gevent/server.pyi @@ -82,3 +82,5 @@ class DatagramServer(BaseServer[[_GeventSocket, _Address]]): def sendto(self, data: ReadableBuffer, address: _StrictAddress, /) -> int: ... @overload def sendto(self, data: ReadableBuffer, flags: int, address: _StrictAddress, /) -> int: ... + +__all__ = ["StreamServer", "DatagramServer"] diff --git a/stubs/gevent/gevent/signal.pyi b/stubs/gevent/gevent/signal.pyi index 17d0e7d3ca4d..4c08f33ff56a 100644 --- a/stubs/gevent/gevent/signal.pyi +++ b/stubs/gevent/gevent/signal.pyi @@ -8,3 +8,5 @@ from signal import _HANDLER, _SIGNUM if sys.platform != "win32": def getsignal(signalnum: _SIGNUM) -> _HANDLER: ... def signal(signalnum: _SIGNUM, handler: _HANDLER) -> _HANDLER: ... + + __all__ = ["signal", "getsignal"] diff --git a/stubs/gevent/gevent/threadpool.pyi b/stubs/gevent/gevent/threadpool.pyi index ac1e5cd14f2b..e65c64cb2d1f 100644 --- a/stubs/gevent/gevent/threadpool.pyi +++ b/stubs/gevent/gevent/threadpool.pyi @@ -53,3 +53,5 @@ class ThreadResult(Generic[_T]): class ThreadPoolExecutor(concurrent.futures.ThreadPoolExecutor): kill = concurrent.futures.ThreadPoolExecutor.shutdown + +__all__ = ["ThreadPool", "ThreadResult", "ThreadPoolExecutor"] diff --git a/stubs/gevent/gevent/timeout.pyi b/stubs/gevent/gevent/timeout.pyi index 9e7bf5e4c36b..3fff1e511c8a 100644 --- a/stubs/gevent/gevent/timeout.pyi +++ b/stubs/gevent/gevent/timeout.pyi @@ -53,3 +53,5 @@ def with_timeout( ) -> _T1 | _T2: ... @overload def with_timeout(seconds: float | None, function: Callable[_P, _T], *args: _P.args, **kwds: _P.kwargs) -> _T: ... + +__all__ = ["Timeout", "with_timeout"] diff --git a/stubs/gevent/gevent/util.pyi b/stubs/gevent/gevent/util.pyi index 8ded2b527041..5d8a828d985c 100644 --- a/stubs/gevent/gevent/util.pyi +++ b/stubs/gevent/gevent/util.pyi @@ -47,3 +47,5 @@ class assert_switches: def __init__(self, max_blocking_time: float | None = None, hub_only: bool = False) -> None: ... def __enter__(self) -> Self: ... def __exit__(self, t: type[BaseException] | None, v: BaseException | None, tb: TracebackType | None) -> None: ... + +__all__ = ["format_run_info", "print_run_info", "GreenletTree", "wrap_errors", "assert_switches"] diff --git a/stubs/gevent/gevent/win32util.pyi b/stubs/gevent/gevent/win32util.pyi index 70fc1c046974..e6fc566ee52c 100644 --- a/stubs/gevent/gevent/win32util.pyi +++ b/stubs/gevent/gevent/win32util.pyi @@ -1,3 +1,5 @@ from collections.abc import Callable formatError: Callable[[object], str] + +__all__ = ["formatError"] diff --git a/stubs/greenlet/@tests/stubtest_allowlist.txt b/stubs/greenlet/@tests/stubtest_allowlist.txt index c1b1af71a6e4..38d2d056903f 100644 --- a/stubs/greenlet/@tests/stubtest_allowlist.txt +++ b/stubs/greenlet/@tests/stubtest_allowlist.txt @@ -1,6 +1,3 @@ -# TODO: missing from stub -greenlet.__all__ - # Error: is not present in stub # ============================= # this module only contains C code and exports no Python code, so it's better diff --git a/stubs/greenlet/greenlet/__init__.pyi b/stubs/greenlet/greenlet/__init__.pyi index 2616abab9653..903fc6f31c70 100644 --- a/stubs/greenlet/greenlet/__init__.pyi +++ b/stubs/greenlet/greenlet/__init__.pyi @@ -11,3 +11,4 @@ from ._greenlet import ( ) __version__: Final[str] +__all__ = ["__version__", "_C_API", "GreenletExit", "error", "getcurrent", "greenlet", "gettrace", "settrace"] diff --git a/stubs/httplib2/@tests/stubtest_allowlist.txt b/stubs/httplib2/@tests/stubtest_allowlist.txt index 2524d80cb014..6b90fbdac749 100644 --- a/stubs/httplib2/@tests/stubtest_allowlist.txt +++ b/stubs/httplib2/@tests/stubtest_allowlist.txt @@ -1,5 +1,2 @@ -# TODO: missing from stub -httplib2.__all__ - # __getattr__() replaced with actual field in stub httplib2.Response.dict diff --git a/stubs/httplib2/httplib2/__init__.pyi b/stubs/httplib2/httplib2/__init__.pyi index a8cc8e90d8e7..6e4b6dc7f320 100644 --- a/stubs/httplib2/httplib2/__init__.pyi +++ b/stubs/httplib2/httplib2/__init__.pyi @@ -186,3 +186,17 @@ class Response(dict[str, Any]): def __init__(self, info) -> None: ... @property def dict(self) -> Self: ... + +__all__ = [ + "debuglevel", + "FailedToDecompressContent", + "Http", + "HttpLib2Error", + "ProxyInfo", + "RedirectLimit", + "RedirectMissingLocation", + "Response", + "RETRIES", + "UnimplementedDigestAuthOptionError", + "UnimplementedHmacDigestAuthOptionError", +] diff --git a/stubs/libsass/@tests/stubtest_allowlist.txt b/stubs/libsass/@tests/stubtest_allowlist.txt index 0f7d3fdbce0d..a93ae4db300f 100644 --- a/stubs/libsass/@tests/stubtest_allowlist.txt +++ b/stubs/libsass/@tests/stubtest_allowlist.txt @@ -1,9 +1,3 @@ -# TODO: missing from stub -sassutils.builder.__all__ -sassutils.distutils.__all__ -sassutils.wsgi.__all__ -sass.__all__ - # Error: is not present in stub # ============================= # These are only implemented for the purposes of emitting an error diff --git a/stubs/libsass/sass.pyi b/stubs/libsass/sass.pyi index fac9d4820ce1..b81a15213378 100644 --- a/stubs/libsass/sass.pyi +++ b/stubs/libsass/sass.pyi @@ -183,3 +183,20 @@ class SassMap(Mapping[_KT, _VT_co]): def __iter__(self) -> Iterator[_KT]: ... def __len__(self) -> int: ... def __hash__(self) -> int: ... + +__all__ = ( + "MODES", + "OUTPUT_STYLES", + "SOURCE_COMMENTS", + "CompileError", + "SassColor", + "SassError", + "SassFunction", + "SassList", + "SassMap", + "SassNumber", + "SassWarning", + "and_join", + "compile", + "libsass_version", +) diff --git a/stubs/libsass/sassutils/builder.pyi b/stubs/libsass/sassutils/builder.pyi index 7d23a6fbbbb3..851767cd3827 100644 --- a/stubs/libsass/sassutils/builder.pyi +++ b/stubs/libsass/sassutils/builder.pyi @@ -32,3 +32,5 @@ class Manifest: def unresolve_filename(self, package_dir: str, filename: str) -> str: ... def build(self, package_dir: str, output_style: _OutputStyle = "nested") -> frozenset[str]: ... def build_one(self, package_dir: str, filename: str, source_map: bool = False) -> str: ... + +__all__ = ("SUFFIXES", "SUFFIX_PATTERN", "Manifest", "build_directory") diff --git a/stubs/libsass/sassutils/distutils.pyi b/stubs/libsass/sassutils/distutils.pyi index a4a74cb25e0e..faa810dc424f 100644 --- a/stubs/libsass/sassutils/distutils.pyi +++ b/stubs/libsass/sassutils/distutils.pyi @@ -14,3 +14,5 @@ class build_sass(Command): def finalize_options(self) -> None: ... def run(self) -> None: ... def get_package_dir(self, package: str) -> str: ... + +__all__ = ("build_sass", "validate_manifests") diff --git a/stubs/libsass/sassutils/wsgi.pyi b/stubs/libsass/sassutils/wsgi.pyi index fa2a9d792d51..8d9a62ea8aed 100644 --- a/stubs/libsass/sassutils/wsgi.pyi +++ b/stubs/libsass/sassutils/wsgi.pyi @@ -20,3 +20,5 @@ class SassMiddleware: def __call__(self, environ: WSGIEnvironment, start_response: StartResponse) -> Iterable[bytes]: ... @staticmethod def quote_css_string(s: str) -> str: ... + +__all__ = ("SassMiddleware",) diff --git a/stubs/passlib/@tests/stubtest_allowlist.txt b/stubs/passlib/@tests/stubtest_allowlist.txt index de26f574555e..5877bbebe95b 100644 --- a/stubs/passlib/@tests/stubtest_allowlist.txt +++ b/stubs/passlib/@tests/stubtest_allowlist.txt @@ -1,38 +1,3 @@ -# TODO: missing from stub -passlib.hosts.__all__ -passlib.crypto._blowfish.__all__ -passlib.crypto._blowfish.base.__all__ -passlib.crypto._blowfish.unrolled.__all__ -passlib.crypto._md4.__all__ -passlib.crypto.digest.__all__ -passlib.crypto.scrypt.__all__ -passlib.crypto.scrypt._builtin.__all__ -passlib.handlers.argon2.__all__ -passlib.handlers.bcrypt.__all__ -passlib.handlers.cisco.__all__ -passlib.handlers.des_crypt.__all__ -passlib.handlers.digests.__all__ -passlib.handlers.django.__all__ -passlib.handlers.fshp.__all__ -passlib.handlers.md5_crypt.__all__ -passlib.handlers.misc.__all__ -passlib.handlers.mssql.__all__ -passlib.handlers.pbkdf2.__all__ -passlib.handlers.phpass.__all__ -passlib.handlers.postgres.__all__ -passlib.handlers.roundup.__all__ -passlib.handlers.scram.__all__ -passlib.handlers.scrypt.__all__ -passlib.handlers.sha1_crypt.__all__ -passlib.handlers.sha2_crypt.__all__ -passlib.handlers.sun_md5_crypt.__all__ -passlib.handlers.windows.__all__ -passlib.utils.binary.__all__ -passlib.utils.decor.__all__ -passlib.utils.handlers.__all__ -passlib.utils.md4.__all__ -passlib.utils.pbkdf2.__all__ - # proxy module that uses some import magic incompatible with stubtest passlib.hash @@ -87,6 +52,7 @@ passlib.tests.* # This is only available when the crypt module is available. This module # was dropped from the standard library of Python 3.13, but is still available # in some environments. +passlib.hosts.__all__ (passlib.hosts.host_context)? # Fields differs at runtime: diff --git a/stubs/passlib/passlib/crypto/_blowfish/__init__.pyi b/stubs/passlib/passlib/crypto/_blowfish/__init__.pyi index 093b06fe5037..f73ccb004ecc 100644 --- a/stubs/passlib/passlib/crypto/_blowfish/__init__.pyi +++ b/stubs/passlib/passlib/crypto/_blowfish/__init__.pyi @@ -1,3 +1,5 @@ from passlib.crypto._blowfish.unrolled import BlowfishEngine as BlowfishEngine def raw_bcrypt(password, ident, salt, log_rounds): ... + +__all__ = ["BlowfishEngine", "raw_bcrypt"] diff --git a/stubs/passlib/passlib/crypto/_blowfish/base.pyi b/stubs/passlib/passlib/crypto/_blowfish/base.pyi index abdeed216fc4..d882673d5be3 100644 --- a/stubs/passlib/passlib/crypto/_blowfish/base.pyi +++ b/stubs/passlib/passlib/crypto/_blowfish/base.pyi @@ -11,3 +11,5 @@ class BlowfishEngine: def eks_salted_expand(self, key_words, salt_words) -> None: ... def eks_repeated_expand(self, key_words, salt_words, rounds) -> None: ... def repeat_encipher(self, l, r, count): ... + +__all__ = ["BlowfishEngine"] diff --git a/stubs/passlib/passlib/crypto/_blowfish/unrolled.pyi b/stubs/passlib/passlib/crypto/_blowfish/unrolled.pyi index f0ad547567c0..cb584d2b1310 100644 --- a/stubs/passlib/passlib/crypto/_blowfish/unrolled.pyi +++ b/stubs/passlib/passlib/crypto/_blowfish/unrolled.pyi @@ -3,3 +3,5 @@ from passlib.crypto._blowfish.base import BlowfishEngine as _BlowfishEngine class BlowfishEngine(_BlowfishEngine): def encipher(self, l, r): ... def expand(self, key_words) -> None: ... + +__all__ = ["BlowfishEngine"] diff --git a/stubs/passlib/passlib/crypto/_md4.pyi b/stubs/passlib/passlib/crypto/_md4.pyi index 2175a206053c..cbd87fb432bd 100644 --- a/stubs/passlib/passlib/crypto/_md4.pyi +++ b/stubs/passlib/passlib/crypto/_md4.pyi @@ -10,3 +10,5 @@ class md4: def copy(self): ... def digest(self): ... def hexdigest(self): ... + +__all__ = ["md4"] diff --git a/stubs/passlib/passlib/crypto/digest.pyi b/stubs/passlib/passlib/crypto/digest.pyi index 15eec301b43a..7aa493613f31 100644 --- a/stubs/passlib/passlib/crypto/digest.pyi +++ b/stubs/passlib/passlib/crypto/digest.pyi @@ -26,3 +26,15 @@ class HashInfo(SequenceMixin): def compile_hmac(digest, key, multipart: bool = False): ... def pbkdf1(digest, secret, salt, rounds, keylen: Incomplete | None = None): ... def pbkdf2_hmac(digest, secret, salt, rounds, keylen: Incomplete | None = None): ... + +__all__ = [ + # hash utils + "lookup_hash", + "HashInfo", + "norm_hash_name", + # hmac utils + "compile_hmac", + # kdfs + "pbkdf1", + "pbkdf2_hmac", +] diff --git a/stubs/passlib/passlib/crypto/scrypt/__init__.pyi b/stubs/passlib/passlib/crypto/scrypt/__init__.pyi index 913e9428b514..68cd983a0089 100644 --- a/stubs/passlib/passlib/crypto/scrypt/__init__.pyi +++ b/stubs/passlib/passlib/crypto/scrypt/__init__.pyi @@ -1,2 +1,4 @@ def validate(n, r, p): ... def scrypt(secret, salt, n, r, p: int = 1, keylen: int = 32): ... + +__all__ = ["validate", "scrypt"] diff --git a/stubs/passlib/passlib/crypto/scrypt/_builtin.pyi b/stubs/passlib/passlib/crypto/scrypt/_builtin.pyi index d7fa6867f017..4cb28c9221f5 100644 --- a/stubs/passlib/passlib/crypto/scrypt/_builtin.pyi +++ b/stubs/passlib/passlib/crypto/scrypt/_builtin.pyi @@ -17,3 +17,5 @@ class ScryptEngine: def run(self, secret, salt, keylen): ... def smix(self, input) -> Generator[None, None, Any]: ... def bmix(self, source, target) -> None: ... + +__all__ = ["ScryptEngine"] diff --git a/stubs/passlib/passlib/handlers/argon2.pyi b/stubs/passlib/passlib/handlers/argon2.pyi index e7cd6228a264..7f24f3be3c7e 100644 --- a/stubs/passlib/passlib/handlers/argon2.pyi +++ b/stubs/passlib/passlib/handlers/argon2.pyi @@ -83,3 +83,5 @@ class _PureBackend(_Argon2Common): ... class argon2(_NoBackend, _Argon2Common): # type: ignore[misc] backends: ClassVar[tuple[str, ...]] + +__all__ = ["argon2"] diff --git a/stubs/passlib/passlib/handlers/bcrypt.pyi b/stubs/passlib/passlib/handlers/bcrypt.pyi index 278b137a3799..6117d9c8c23d 100644 --- a/stubs/passlib/passlib/handlers/bcrypt.pyi +++ b/stubs/passlib/passlib/handlers/bcrypt.pyi @@ -53,3 +53,5 @@ class bcrypt_sha256(_wrapped_bcrypt): @classmethod def from_string(cls, hash): ... def __init__(self, version: Incomplete | None = None, **kwds) -> None: ... + +__all__ = ["bcrypt"] diff --git a/stubs/passlib/passlib/handlers/cisco.pyi b/stubs/passlib/passlib/handlers/cisco.pyi index 1d05ce2fb2f9..61b480739c4d 100644 --- a/stubs/passlib/passlib/handlers/cisco.pyi +++ b/stubs/passlib/passlib/handlers/cisco.pyi @@ -25,3 +25,5 @@ class cisco_type7(uh.GenericHandler): def __init__(self, salt: int | None = None, **kwds) -> None: ... @classmethod def decode(cls, hash, encoding: str = "utf-8"): ... + +__all__ = ["cisco_pix", "cisco_asa", "cisco_type7"] diff --git a/stubs/passlib/passlib/handlers/des_crypt.pyi b/stubs/passlib/passlib/handlers/des_crypt.pyi index a8a10abb9ff6..6e3b7297bb80 100644 --- a/stubs/passlib/passlib/handlers/des_crypt.pyi +++ b/stubs/passlib/passlib/handlers/des_crypt.pyi @@ -50,3 +50,5 @@ class crypt16(uh.TruncateMixin, uh.HasSalt, uh.GenericHandler): # type: ignore[ truncate_size: ClassVar[int] @classmethod def from_string(cls, hash): ... + +__all__ = ["des_crypt", "bsdi_crypt", "bigcrypt", "crypt16"] diff --git a/stubs/passlib/passlib/handlers/digests.pyi b/stubs/passlib/passlib/handlers/digests.pyi index 389d0328c0e5..8353f7d9d760 100644 --- a/stubs/passlib/passlib/handlers/digests.pyi +++ b/stubs/passlib/passlib/handlers/digests.pyi @@ -30,3 +30,5 @@ class htdigest(uh.MinimalHandler): def genconfig(cls): ... @classmethod def genhash(cls, secret, config, user, realm, encoding: Incomplete | None = None): ... # type: ignore[override] + +__all__ = ["create_hex_hash", "hex_md4", "hex_md5", "hex_sha1", "hex_sha256", "hex_sha512"] diff --git a/stubs/passlib/passlib/handlers/django.pyi b/stubs/passlib/passlib/handlers/django.pyi index 2996700e0fcf..f613ed7cb435 100644 --- a/stubs/passlib/passlib/handlers/django.pyi +++ b/stubs/passlib/passlib/handlers/django.pyi @@ -79,3 +79,14 @@ class django_disabled(DisabledHash, uh.StaticHandler): def identify(cls, hash: str | bytes) -> bool: ... @classmethod def verify(cls, secret: str | bytes, hash: str | bytes) -> bool: ... # type: ignore[override] + +__all__ = [ + "django_salted_sha1", + "django_salted_md5", + "django_bcrypt", + "django_pbkdf2_sha1", + "django_pbkdf2_sha256", + "django_argon2", + "django_des_crypt", + "django_disabled", +] diff --git a/stubs/passlib/passlib/handlers/fshp.pyi b/stubs/passlib/passlib/handlers/fshp.pyi index 8a6fe9ecf785..a8addc70a042 100644 --- a/stubs/passlib/passlib/handlers/fshp.pyi +++ b/stubs/passlib/passlib/handlers/fshp.pyi @@ -24,3 +24,5 @@ class fshp(uh.HasRounds, uh.HasRawSalt, uh.HasRawChecksum, uh.GenericHandler): def checksum_size(self): ... @classmethod def from_string(cls, hash): ... + +__all__ = ["fshp"] diff --git a/stubs/passlib/passlib/handlers/md5_crypt.pyi b/stubs/passlib/passlib/handlers/md5_crypt.pyi index 53bf4afadcaf..94fef0634106 100644 --- a/stubs/passlib/passlib/handlers/md5_crypt.pyi +++ b/stubs/passlib/passlib/handlers/md5_crypt.pyi @@ -18,3 +18,5 @@ class md5_crypt(uh.HasManyBackends, _MD5_Common): class apr_md5_crypt(_MD5_Common): name: ClassVar[str] ident: ClassVar[str] + +__all__ = ["md5_crypt", "apr_md5_crypt"] diff --git a/stubs/passlib/passlib/handlers/misc.pyi b/stubs/passlib/passlib/handlers/misc.pyi index 822ae77b34ba..2120f052a1c9 100644 --- a/stubs/passlib/passlib/handlers/misc.pyi +++ b/stubs/passlib/passlib/handlers/misc.pyi @@ -48,3 +48,5 @@ class plaintext(uh.MinimalHandler): def genconfig(cls): ... @classmethod def genhash(cls, secret, config, encoding: str | None = None): ... # type: ignore[override] + +__all__ = ["unix_disabled", "unix_fallback", "plaintext"] diff --git a/stubs/passlib/passlib/handlers/mssql.pyi b/stubs/passlib/passlib/handlers/mssql.pyi index 3bf6fc437522..c1c7f1f8882d 100644 --- a/stubs/passlib/passlib/handlers/mssql.pyi +++ b/stubs/passlib/passlib/handlers/mssql.pyi @@ -19,3 +19,5 @@ class mssql2005(uh.HasRawSalt, uh.HasRawChecksum, uh.GenericHandler): max_salt_size: ClassVar[int] @classmethod def from_string(cls, hash): ... + +__all__ = ["mssql2000", "mssql2005"] diff --git a/stubs/passlib/passlib/handlers/pbkdf2.pyi b/stubs/passlib/passlib/handlers/pbkdf2.pyi index 280849de8fb4..dfe0f112e9f4 100644 --- a/stubs/passlib/passlib/handlers/pbkdf2.pyi +++ b/stubs/passlib/passlib/handlers/pbkdf2.pyi @@ -87,3 +87,5 @@ class grub_pbkdf2_sha512(uh.HasRounds, uh.HasRawSalt, uh.HasRawChecksum, uh.Gene rounds_cost: ClassVar[str] @classmethod def from_string(cls, hash): ... + +__all__ = ["pbkdf2_sha1", "pbkdf2_sha256", "pbkdf2_sha512", "cta_pbkdf2_sha1", "dlitz_pbkdf2_sha1", "grub_pbkdf2_sha512"] diff --git a/stubs/passlib/passlib/handlers/phpass.pyi b/stubs/passlib/passlib/handlers/phpass.pyi index 02419bdc8fec..e13cacf49436 100644 --- a/stubs/passlib/passlib/handlers/phpass.pyi +++ b/stubs/passlib/passlib/handlers/phpass.pyi @@ -18,3 +18,5 @@ class phpass(uh.HasManyIdents, uh.HasRounds, uh.HasSalt, uh.GenericHandler): # ident_aliases: ClassVar[dict[str, str]] @classmethod def from_string(cls, hash: str | bytes) -> Self: ... # type: ignore[override] + +__all__ = ["phpass"] diff --git a/stubs/passlib/passlib/handlers/postgres.pyi b/stubs/passlib/passlib/handlers/postgres.pyi index 99a515e468bd..263fda727a50 100644 --- a/stubs/passlib/passlib/handlers/postgres.pyi +++ b/stubs/passlib/passlib/handlers/postgres.pyi @@ -6,3 +6,5 @@ class postgres_md5(uh.HasUserContext, uh.StaticHandler): name: ClassVar[str] checksum_chars: ClassVar[str] checksum_size: ClassVar[int] + +__all__ = ["postgres_md5"] diff --git a/stubs/passlib/passlib/handlers/roundup.pyi b/stubs/passlib/passlib/handlers/roundup.pyi index 53ca720b3c1f..24b9f60ed9e8 100644 --- a/stubs/passlib/passlib/handlers/roundup.pyi +++ b/stubs/passlib/passlib/handlers/roundup.pyi @@ -3,3 +3,5 @@ from typing import Any roundup_plaintext: Any ldap_hex_md5: Any ldap_hex_sha1: Any + +__all__ = ["roundup_plaintext", "ldap_hex_md5", "ldap_hex_sha1"] diff --git a/stubs/passlib/passlib/handlers/scram.pyi b/stubs/passlib/passlib/handlers/scram.pyi index 36a4377bd5c1..7c6fbd23c41a 100644 --- a/stubs/passlib/passlib/handlers/scram.pyi +++ b/stubs/passlib/passlib/handlers/scram.pyi @@ -27,3 +27,5 @@ class scram(uh.HasRounds, uh.HasRawSalt, uh.HasRawChecksum, uh.GenericHandler): def __init__(self, algs: Incomplete | None = None, **kwds) -> None: ... @classmethod def verify(cls, secret, hash, full: bool = False): ... # type: ignore[override] + +__all__ = ["scram"] diff --git a/stubs/passlib/passlib/handlers/scrypt.pyi b/stubs/passlib/passlib/handlers/scrypt.pyi index 52285c931c30..871390276c29 100644 --- a/stubs/passlib/passlib/handlers/scrypt.pyi +++ b/stubs/passlib/passlib/handlers/scrypt.pyi @@ -31,3 +31,5 @@ class scrypt(uh.ParallelismMixin, uh.HasRounds, uh.HasRawSalt, uh.HasRawChecksum def has_backend(cls, name: str = "any"): ... @classmethod def set_backend(cls, name: str = "any", dryrun: bool = False) -> None: ... + +__all__ = ["scrypt"] diff --git a/stubs/passlib/passlib/handlers/sha1_crypt.pyi b/stubs/passlib/passlib/handlers/sha1_crypt.pyi index dd4bdea86191..9380a736a8bd 100644 --- a/stubs/passlib/passlib/handlers/sha1_crypt.pyi +++ b/stubs/passlib/passlib/handlers/sha1_crypt.pyi @@ -21,3 +21,5 @@ class sha1_crypt(uh.HasManyBackends, uh.HasRounds, uh.HasSalt, uh.GenericHandler def from_string(cls, hash: str | bytes) -> Self: ... # type: ignore[override] def to_string(self, config: bool = False) -> str: ... backends: ClassVar[tuple[str, ...]] + +__all__: list[str] = [] diff --git a/stubs/passlib/passlib/handlers/sha2_crypt.pyi b/stubs/passlib/passlib/handlers/sha2_crypt.pyi index 7ebaffb58b18..44e6dcdebe50 100644 --- a/stubs/passlib/passlib/handlers/sha2_crypt.pyi +++ b/stubs/passlib/passlib/handlers/sha2_crypt.pyi @@ -27,3 +27,5 @@ class sha512_crypt(_SHA2_Common): ident: ClassVar[str] checksum_size: ClassVar[int] default_rounds: ClassVar[int] + +__all__ = ["sha512_crypt", "sha256_crypt"] diff --git a/stubs/passlib/passlib/handlers/sun_md5_crypt.pyi b/stubs/passlib/passlib/handlers/sun_md5_crypt.pyi index c5b1a165ab63..6bec6a56b1f6 100644 --- a/stubs/passlib/passlib/handlers/sun_md5_crypt.pyi +++ b/stubs/passlib/passlib/handlers/sun_md5_crypt.pyi @@ -22,3 +22,5 @@ class sun_md5_crypt(uh.HasRounds, uh.HasSalt, uh.GenericHandler): # type: ignor @classmethod def from_string(cls, hash: str | bytes) -> Self: ... # type: ignore[override] def to_string(self, _withchk: bool = True) -> str: ... + +__all__ = ["sun_md5_crypt"] diff --git a/stubs/passlib/passlib/handlers/windows.pyi b/stubs/passlib/passlib/handlers/windows.pyi index c792ebec86a3..165deec3d8b7 100644 --- a/stubs/passlib/passlib/handlers/windows.pyi +++ b/stubs/passlib/passlib/handlers/windows.pyi @@ -42,3 +42,5 @@ class msdcc2(uh.HasUserContext, uh.StaticHandler): checksum_size: ClassVar[int] @classmethod def raw(cls, secret, user): ... + +__all__ = ["lmhash", "nthash", "bsd_nthash", "msdcc", "msdcc2"] diff --git a/stubs/passlib/passlib/utils/binary.pyi b/stubs/passlib/passlib/utils/binary.pyi index c188e50e11d8..64b5d5d8c465 100644 --- a/stubs/passlib/passlib/utils/binary.pyi +++ b/stubs/passlib/passlib/utils/binary.pyi @@ -49,3 +49,32 @@ class LazyBase64Engine(Base64Engine): h64: Any h64big: Any bcrypt64: Any + +__all__ = [ + # constants + "BASE64_CHARS", + "PADDED_BASE64_CHARS", + "AB64_CHARS", + "HASH64_CHARS", + "BCRYPT_CHARS", + "HEX_CHARS", + "LOWER_HEX_CHARS", + "UPPER_HEX_CHARS", + "ALL_BYTE_VALUES", + # misc + "compile_byte_translation", + # base64 + "ab64_encode", + "ab64_decode", + "b64s_encode", + "b64s_decode", + # base32 + "b32encode", + "b32decode", + # custom encodings + "Base64Engine", + "LazyBase64Engine", + "h64", + "h64big", + "bcrypt64", +] diff --git a/stubs/passlib/passlib/utils/decor.pyi b/stubs/passlib/passlib/utils/decor.pyi index 0087b908080b..3104d06f7ca6 100644 --- a/stubs/passlib/passlib/utils/decor.pyi +++ b/stubs/passlib/passlib/utils/decor.pyi @@ -40,3 +40,12 @@ def deprecated_method( updoc: bool = True, replacement: Incomplete | None = None, ): ... + +__all__ = [ + "classproperty", + "hybrid_method", + "memoize_single_value", + "memoized_property", + "deprecated_function", + "deprecated_method", +] diff --git a/stubs/passlib/passlib/utils/handlers.pyi b/stubs/passlib/passlib/utils/handlers.pyi index edb743617b44..6c76c322cb78 100644 --- a/stubs/passlib/passlib/utils/handlers.pyi +++ b/stubs/passlib/passlib/utils/handlers.pyi @@ -183,3 +183,23 @@ class PrefixWrapper: def encrypt(self, secret, **kwds): ... def hash(self, secret, **kwds): ... def verify(self, secret, hash, **kwds): ... + +__all__ = [ + # helpers for implementing MCF handlers + "parse_mc2", + "parse_mc3", + "render_mc2", + "render_mc3", + # framework for implementing handlers + "GenericHandler", + "StaticHandler", + "HasUserContext", + "HasRawChecksum", + "HasManyIdents", + "HasSalt", + "HasRawSalt", + "HasRounds", + "HasManyBackends", + # other helpers + "PrefixWrapper", +] diff --git a/stubs/passlib/passlib/utils/md4.pyi b/stubs/passlib/passlib/utils/md4.pyi index a316e0c7fcaa..3ce1272632b6 100644 --- a/stubs/passlib/passlib/utils/md4.pyi +++ b/stubs/passlib/passlib/utils/md4.pyi @@ -1,3 +1,5 @@ from typing import Any md4: Any + +__all__ = ["md4"] diff --git a/stubs/passlib/passlib/utils/pbkdf2.pyi b/stubs/passlib/passlib/utils/pbkdf2.pyi index 57a5bb65b4ae..31f0748694d2 100644 --- a/stubs/passlib/passlib/utils/pbkdf2.pyi +++ b/stubs/passlib/passlib/utils/pbkdf2.pyi @@ -5,3 +5,13 @@ from passlib.crypto.digest import norm_hash_name as norm_hash_name def get_prf(name): ... def pbkdf1(secret, salt, rounds, keylen: Incomplete | None = None, hash: str = "sha1"): ... def pbkdf2(secret, salt, rounds, keylen: Incomplete | None = None, prf: str = "hmac-sha1"): ... + +__all__ = [ + # hash utils + "norm_hash_name", + # prf utils + "get_prf", + # kdfs + "pbkdf1", + "pbkdf2", +] diff --git a/stubs/peewee/@tests/stubtest_allowlist.txt b/stubs/peewee/@tests/stubtest_allowlist.txt index 8e93c781b09a..d7ab2ba94e5a 100644 --- a/stubs/peewee/@tests/stubtest_allowlist.txt +++ b/stubs/peewee/@tests/stubtest_allowlist.txt @@ -1,6 +1,3 @@ -# TODO: missing from stub -peewee.__all__ - # Stubtest doesn't recognize __ as indicating positional-only arg at runtime # https://github.com/python/mypy/issues/15302 peewee.Model.insert diff --git a/stubs/peewee/peewee.pyi b/stubs/peewee/peewee.pyi index 6ea807eefc5b..9aaf79ac88a1 100644 --- a/stubs/peewee/peewee.pyi +++ b/stubs/peewee/peewee.pyi @@ -1845,3 +1845,81 @@ class PrefetchQuery: def store_instance(self, instance, id_map) -> None: ... def prefetch(sq, *subqueries): ... + +__all__ = [ + "AnyField", + "AsIs", + "AutoField", + "BareField", + "BigAutoField", + "BigBitField", + "BigIntegerField", + "BinaryUUIDField", + "BitField", + "BlobField", + "BooleanField", + "Case", + "Cast", + "CharField", + "Check", + "chunked", + "Column", + "CompositeKey", + "Context", + "Database", + "DatabaseError", + "DatabaseProxy", + "DataError", + "DateField", + "DateTimeField", + "DecimalField", + "DeferredForeignKey", + "DeferredThroughModel", + "DJANGO_MAP", + "DoesNotExist", + "DoubleField", + "DQ", + "EXCLUDED", + "Field", + "FixedCharField", + "FloatField", + "fn", + "ForeignKeyField", + "IdentityField", + "ImproperlyConfigured", + "Index", + "IntegerField", + "IntegrityError", + "InterfaceError", + "InternalError", + "IPField", + "JOIN", + "ManyToManyField", + "Model", + "ModelIndex", + "MySQLDatabase", + "NotSupportedError", + "OP", + "OperationalError", + "PostgresqlDatabase", + "PrimaryKeyField", + "prefetch", + "PREFETCH_TYPE", + "ProgrammingError", + "Proxy", + "QualifiedNames", + "SchemaManager", + "SmallIntegerField", + "Select", + "SQL", + "SqliteDatabase", + "Table", + "TextField", + "TimeField", + "TimestampField", + "Tuple", + "UUIDField", + "Value", + "ValuesList", + "Window", +] diff --git a/stubs/polib/@tests/stubtest_allowlist.txt b/stubs/polib/@tests/stubtest_allowlist.txt deleted file mode 100644 index 0c8aa4534438..000000000000 --- a/stubs/polib/@tests/stubtest_allowlist.txt +++ /dev/null @@ -1,2 +0,0 @@ -# TODO: missing from stub -polib.__all__ diff --git a/stubs/polib/polib.pyi b/stubs/polib/polib.pyi index fa7061f1f6ab..2bc1171fb9c2 100644 --- a/stubs/polib/polib.pyi +++ b/stubs/polib/polib.pyi @@ -155,3 +155,16 @@ class _MOFileParser(Generic[_TM]): def __init__(self, mofile: str, *args: Any, **kwargs: Any) -> None: ... def __del__(self) -> None: ... def parse(self) -> _TM: ... + +__all__ = [ + "pofile", + "POFile", + "POEntry", + "mofile", + "MOFile", + "MOEntry", + "default_encoding", + "escape", + "unescape", + "detect_encoding", +] diff --git a/stubs/psutil/@tests/stubtest_allowlist.txt b/stubs/psutil/@tests/stubtest_allowlist.txt index 0895c3f41246..ec9f30c85992 100644 --- a/stubs/psutil/@tests/stubtest_allowlist.txt +++ b/stubs/psutil/@tests/stubtest_allowlist.txt @@ -1,7 +1,5 @@ # TODO: missing from stub psutil.__all__ -psutil._common.__all__ -psutil._psposix.__all__ # Stubtest does not support these platforms psutil._psaix diff --git a/stubs/psutil/psutil/_common.pyi b/stubs/psutil/psutil/_common.pyi index 7ea0bbb81342..872a329548ff 100644 --- a/stubs/psutil/psutil/_common.pyi +++ b/stubs/psutil/psutil/_common.pyi @@ -283,3 +283,96 @@ def term_supports_colors(file: SupportsWrite[str] = ...) -> bool: ... def hilite(s: str, color: str | None = None, bold: bool = False) -> str: ... def print_color(s: str, color: str | None = None, bold: bool = False, file: SupportsWrite[str] = ...) -> None: ... def debug(msg) -> None: ... + +__all__ = [ + # OS constants + "FREEBSD", + "BSD", + "LINUX", + "NETBSD", + "OPENBSD", + "MACOS", + "OSX", + "POSIX", + "SUNOS", + "WINDOWS", + # connection constants + "CONN_CLOSE", + "CONN_CLOSE_WAIT", + "CONN_CLOSING", + "CONN_ESTABLISHED", + "CONN_FIN_WAIT1", + "CONN_FIN_WAIT2", + "CONN_LAST_ACK", + "CONN_LISTEN", + "CONN_NONE", + "CONN_SYN_RECV", + "CONN_SYN_SENT", + "CONN_TIME_WAIT", + # net constants + "NIC_DUPLEX_FULL", + "NIC_DUPLEX_HALF", + "NIC_DUPLEX_UNKNOWN", + # process status constants + "STATUS_DEAD", + "STATUS_DISK_SLEEP", + "STATUS_IDLE", + "STATUS_LOCKED", + "STATUS_RUNNING", + "STATUS_SLEEPING", + "STATUS_STOPPED", + "STATUS_SUSPENDED", + "STATUS_TRACING_STOP", + "STATUS_WAITING", + "STATUS_WAKE_KILL", + "STATUS_WAKING", + "STATUS_ZOMBIE", + "STATUS_PARKED", + # other constants + "ENCODING", + "ENCODING_ERRS", + "AF_INET6", + # named tuples + "pconn", + "pcputimes", + "pctxsw", + "pgids", + "pio", + "pionice", + "popenfile", + "pthread", + "puids", + "sconn", + "scpustats", + "sdiskio", + "sdiskpart", + "sdiskusage", + "snetio", + "snicaddr", + "snicstats", + "sswap", + "suser", + # utility functions + "conn_tmap", + "deprecated_method", + "isfile_strict", + "memoize", + "parse_environ_block", + "path_exists_strict", + "usage_percent", + "supports_ipv6", + "sockfam_to_enum", + "socktype_to_enum", + "wrap_numbers", + "open_text", + "open_binary", + "cat", + "bcat", + "bytes2human", + "conn_to_ntuple", + "debug", + # shell utils + "hilite", + "term_supports_colors", + "print_color", +] diff --git a/stubs/psutil/psutil/_psposix.pyi b/stubs/psutil/psutil/_psposix.pyi index 6d1f357a7c2d..8b157113e188 100644 --- a/stubs/psutil/psutil/_psposix.pyi +++ b/stubs/psutil/psutil/_psposix.pyi @@ -13,3 +13,5 @@ def wait_pid( ): ... def disk_usage(path): ... def get_terminal_map(): ... + +__all__ = ["pid_exists", "wait_pid", "disk_usage", "get_terminal_map"] diff --git a/stubs/reportlab/@tests/stubtest_allowlist.txt b/stubs/reportlab/@tests/stubtest_allowlist.txt index 3d5537fa9f79..b11ffbf4b64b 100644 --- a/stubs/reportlab/@tests/stubtest_allowlist.txt +++ b/stubs/reportlab/@tests/stubtest_allowlist.txt @@ -1,26 +1,10 @@ # TODO: missing from stub -reportlab.graphics.barcode.__all__ reportlab.graphics.barcode.dmtx.__all__ +reportlab.rl_config.__all__ + +# Incorrect __all__ names in runtime reportlab.graphics.barcode.eanbc.__all__ reportlab.graphics.barcode.ecc200datamatrix.__all__ -reportlab.graphics.barcode.usps4s.__all__ -reportlab.graphics.barcode.widgets.__all__ -reportlab.graphics.svgpath.__all__ -reportlab.graphics.transform.__all__ -reportlab.graphics.utils.__all__ -reportlab.lib.arciv.__all__ -reportlab.lib.formatters.__all__ -reportlab.lib.pygments2xpre.__all__ -reportlab.lib.rl_accel.__all__ -reportlab.lib.rltempfile.__all__ -reportlab.lib.styles.__all__ -reportlab.pdfgen.canvas.__all__ -reportlab.platypus.frames.__all__ -reportlab.platypus.multicol.__all__ -reportlab.platypus.paragraph.__all__ -reportlab.platypus.xpreformatted.__all__ -reportlab.rl_config.__all__ -reportlab.rl_settings.__all__ # Error: is inconsistent # ====================== diff --git a/stubs/reportlab/reportlab/graphics/barcode/__init__.pyi b/stubs/reportlab/reportlab/graphics/barcode/__init__.pyi index 0098e0c2a798..62d6c1381612 100644 --- a/stubs/reportlab/reportlab/graphics/barcode/__init__.pyi +++ b/stubs/reportlab/reportlab/graphics/barcode/__init__.pyi @@ -3,3 +3,5 @@ def getCodes(): ... def getCodeNames(): ... def createBarcodeDrawing(codeName, **options): ... def createBarcodeImageInMemory(codeName, **options): ... + +__all__ = ("registerWidget", "getCodes", "getCodeNames", "createBarcodeDrawing", "createBarcodeImageInMemory") diff --git a/stubs/reportlab/reportlab/graphics/barcode/eanbc.pyi b/stubs/reportlab/reportlab/graphics/barcode/eanbc.pyi index 65ab74d690e6..4bc3d09551c3 100644 --- a/stubs/reportlab/reportlab/graphics/barcode/eanbc.pyi +++ b/stubs/reportlab/reportlab/graphics/barcode/eanbc.pyi @@ -39,3 +39,5 @@ class Ean5BarcodeWidget(Ean13BarcodeWidget): class ISBNBarcodeWidget(Ean13BarcodeWidget): codeName: str def draw(self): ... + +__all__ = ("Ean13BarcodeWidget", "Ean8BarcodeWidget", "UPCA", "Ean5BarcodeWidget", "ISBNBarcodeWidget") diff --git a/stubs/reportlab/reportlab/graphics/barcode/ecc200datamatrix.pyi b/stubs/reportlab/reportlab/graphics/barcode/ecc200datamatrix.pyi index 536bb43470e1..e017b1bba63b 100644 --- a/stubs/reportlab/reportlab/graphics/barcode/ecc200datamatrix.pyi +++ b/stubs/reportlab/reportlab/graphics/barcode/ecc200datamatrix.pyi @@ -20,3 +20,5 @@ class ECC200DataMatrix(Barcode): def encode(self): ... def computeSize(self, *args) -> None: ... def draw(self) -> None: ... + +__all__ = ("ECC200DataMatrix",) diff --git a/stubs/reportlab/reportlab/graphics/barcode/usps4s.pyi b/stubs/reportlab/reportlab/graphics/barcode/usps4s.pyi index ee9f1dc6f5e9..aeb4a5dfc3da 100644 --- a/stubs/reportlab/reportlab/graphics/barcode/usps4s.pyi +++ b/stubs/reportlab/reportlab/graphics/barcode/usps4s.pyi @@ -83,3 +83,5 @@ class USPS_4State(Barcode): def value(self, value) -> None: ... def drawHumanReadable(self) -> None: ... def annotate(self, x, y, text, fontName, fontSize, anchor: str = "middle") -> None: ... + +__all__ = ("USPS_4State",) diff --git a/stubs/reportlab/reportlab/graphics/barcode/widgets.pyi b/stubs/reportlab/reportlab/graphics/barcode/widgets.pyi index f5bdea3fc8d3..2ea5c9d53937 100644 --- a/stubs/reportlab/reportlab/graphics/barcode/widgets.pyi +++ b/stubs/reportlab/reportlab/graphics/barcode/widgets.pyi @@ -63,3 +63,18 @@ class BarcodePOSTNET(_BarcodeWidget): class BarcodeUSPS_4State(_BarcodeWidget): codeName: Final = "USPS_4State" def __init__(self, **kw) -> None: ... + +__all__ = ( + "BarcodeI2of5", + "BarcodeCode128", + "BarcodeStandard93", + "BarcodeExtended93", + "BarcodeStandard39", + "BarcodeExtended39", + "BarcodeMSI", + "BarcodeCodabar", + "BarcodeCode11", + "BarcodeFIM", + "BarcodePOSTNET", + "BarcodeUSPS_4State", +) diff --git a/stubs/reportlab/reportlab/graphics/svgpath.pyi b/stubs/reportlab/reportlab/graphics/svgpath.pyi index 5273b798110b..89464473a29e 100644 --- a/stubs/reportlab/reportlab/graphics/svgpath.pyi +++ b/stubs/reportlab/reportlab/graphics/svgpath.pyi @@ -6,3 +6,5 @@ class SvgPath(Path, UserNode): fillColor: Incomplete def __init__(self, s, isClipPath: int = 0, autoclose: Incomplete | None = None, fillMode=0, **kw) -> None: ... def provideNode(self): ... + +__all__ = ("SvgPath",) diff --git a/stubs/reportlab/reportlab/graphics/transform.pyi b/stubs/reportlab/reportlab/graphics/transform.pyi index 4cb456df9772..b8b8b59c9950 100644 --- a/stubs/reportlab/reportlab/graphics/transform.pyi +++ b/stubs/reportlab/reportlab/graphics/transform.pyi @@ -10,3 +10,18 @@ def zTransformPoint(A, v): ... def transformPoint(A, v): ... def transformPoints(matrix, V): ... def zTransformPoints(matrix, V): ... + +__all__ = ( + "nullTransform", + "translate", + "scale", + "rotate", + "skewX", + "skewY", + "mmult", + "inverse", + "zTransformPoint", + "transformPoint", + "transformPoints", + "zTransformPoints", +) diff --git a/stubs/reportlab/reportlab/graphics/utils.pyi b/stubs/reportlab/reportlab/graphics/utils.pyi index 116864736255..5853d4943246 100644 --- a/stubs/reportlab/reportlab/graphics/utils.pyi +++ b/stubs/reportlab/reportlab/graphics/utils.pyi @@ -20,3 +20,5 @@ def text2Path( # NOTE: This only exists on some render backends def processGlyph(G, truncate=1, pathReverse=0): ... def text2PathDescription(text, x=0, y=0, fontName=..., fontSize=1000, anchor="start", truncate=1, pathReverse=0, gs=None): ... + +__all__ = ("setFont", "pathNumTrunc", "processGlyph", "text2PathDescription", "text2Path", "RenderPMError") diff --git a/stubs/reportlab/reportlab/lib/arciv.pyi b/stubs/reportlab/reportlab/lib/arciv.pyi index d8ea9257047b..0fa1d8aee78c 100644 --- a/stubs/reportlab/reportlab/lib/arciv.pyi +++ b/stubs/reportlab/reportlab/lib/arciv.pyi @@ -5,3 +5,5 @@ class ArcIV: def encode(text, key): ... def decode(text, key): ... + +__all__ = ["ArcIV", "encode", "decode"] diff --git a/stubs/reportlab/reportlab/lib/formatters.pyi b/stubs/reportlab/reportlab/lib/formatters.pyi index 343e450b6a5b..31f215719e2c 100644 --- a/stubs/reportlab/reportlab/lib/formatters.pyi +++ b/stubs/reportlab/reportlab/lib/formatters.pyi @@ -22,3 +22,5 @@ class DecimalFormatter(Formatter): suffix: Incomplete | None = None, ) -> None: ... def format(self, num): ... + +__all__ = ("Formatter", "DecimalFormatter") diff --git a/stubs/reportlab/reportlab/lib/pygments2xpre.pyi b/stubs/reportlab/reportlab/lib/pygments2xpre.pyi index c36ec3d628a4..f31768798b46 100644 --- a/stubs/reportlab/reportlab/lib/pygments2xpre.pyi +++ b/stubs/reportlab/reportlab/lib/pygments2xpre.pyi @@ -1 +1,3 @@ def pygments2xpre(s, language: str = "python"): ... + +__all__ = ("pygments2xpre",) diff --git a/stubs/reportlab/reportlab/lib/rl_accel.pyi b/stubs/reportlab/reportlab/lib/rl_accel.pyi index 1b69717784fe..b0a9336227b7 100644 --- a/stubs/reportlab/reportlab/lib/rl_accel.pyi +++ b/stubs/reportlab/reportlab/lib/rl_accel.pyi @@ -9,3 +9,17 @@ def escapePDF(s): ... def asciiBase85Encode(input): ... def asciiBase85Decode(input): ... def sameFrag(f, g): ... + +__all__ = [ + "fp_str", + "unicode2T1", + "instanceStringWidthT1", + "instanceStringWidthTTF", + "asciiBase85Encode", + "asciiBase85Decode", + "escapePDF", + "sameFrag", + "calcChecksum", + "add32", + "hex32", +] diff --git a/stubs/reportlab/reportlab/lib/rltempfile.pyi b/stubs/reportlab/reportlab/lib/rltempfile.pyi index e0175a8eac11..24b270ff237c 100644 --- a/stubs/reportlab/reportlab/lib/rltempfile.pyi +++ b/stubs/reportlab/reportlab/lib/rltempfile.pyi @@ -1,2 +1,4 @@ def get_rl_tempdir(*subdirs: str) -> str: ... def get_rl_tempfile(fn: str | None = None) -> str: ... + +__all__ = ("get_rl_tempdir", "get_rl_tempdir") diff --git a/stubs/reportlab/reportlab/lib/styles.pyi b/stubs/reportlab/reportlab/lib/styles.pyi index 5cfa39f3f184..028d9b0cd5a8 100644 --- a/stubs/reportlab/reportlab/lib/styles.pyi +++ b/stubs/reportlab/reportlab/lib/styles.pyi @@ -179,3 +179,5 @@ class StyleSheet1: def list(self) -> None: ... def getSampleStyleSheet() -> StyleSheet1: ... + +__all__ = ("PropertySet", "ParagraphStyle", "str2alignment", "LineStyle", "ListStyle", "StyleSheet1", "getSampleStyleSheet") diff --git a/stubs/reportlab/reportlab/pdfgen/canvas.pyi b/stubs/reportlab/reportlab/pdfgen/canvas.pyi index 0f33d5a61d57..96351502e016 100644 --- a/stubs/reportlab/reportlab/pdfgen/canvas.pyi +++ b/stubs/reportlab/reportlab/pdfgen/canvas.pyi @@ -305,3 +305,5 @@ class Canvas(_PDFColorSetter): def acroForm(self): ... def drawBoundary(self, sb, x1: float, y1: float, width: float, height: float) -> None: ... def shapedText(self, text) -> tuple[Incomplete, Incomplete]: ... + +__all__ = ["Canvas", "ShowBoundaryValue"] diff --git a/stubs/reportlab/reportlab/platypus/frames.pyi b/stubs/reportlab/reportlab/platypus/frames.pyi index 9de3e87389da..061ba9be67c6 100644 --- a/stubs/reportlab/reportlab/platypus/frames.pyi +++ b/stubs/reportlab/reportlab/platypus/frames.pyi @@ -35,3 +35,5 @@ class Frame: def drawBoundary(self, canv: Canvas) -> None: ... def addFromList(self, drawlist: list[Flowable], canv: Canvas) -> None: ... def add_generated_content(self, *C: Flowable) -> None: ... + +__all__ = ("Frame",) diff --git a/stubs/reportlab/reportlab/platypus/multicol.pyi b/stubs/reportlab/reportlab/platypus/multicol.pyi index b30bb0c1e2b5..76a6329965a8 100644 --- a/stubs/reportlab/reportlab/platypus/multicol.pyi +++ b/stubs/reportlab/reportlab/platypus/multicol.pyi @@ -15,3 +15,5 @@ class MultiCol(_Container, _FindSplitterMixin, Flowable): spaceAfter: float | None = None, ) -> None: ... def nWidths(self, aW: float) -> list[float]: ... + +__all__ = ["MultiCol"] diff --git a/stubs/reportlab/reportlab/platypus/paragraph.pyi b/stubs/reportlab/reportlab/platypus/paragraph.pyi index 9e79a1df682c..6f38d99f8ec6 100644 --- a/stubs/reportlab/reportlab/platypus/paragraph.pyi +++ b/stubs/reportlab/reportlab/platypus/paragraph.pyi @@ -36,3 +36,5 @@ class Paragraph(Flowable): def getActualLineWidths0(self) -> list[float]: ... @staticmethod def dumpFrags(frags, indent: int = 4, full: bool = False) -> str: ... + +__all__ = ("Paragraph", "cleanBlockQuotedText", "ParaLines", "FragLine") diff --git a/stubs/reportlab/reportlab/platypus/xpreformatted.pyi b/stubs/reportlab/reportlab/platypus/xpreformatted.pyi index ae676ca79279..c5b5e4c81b4e 100644 --- a/stubs/reportlab/reportlab/platypus/xpreformatted.pyi +++ b/stubs/reportlab/reportlab/platypus/xpreformatted.pyi @@ -28,3 +28,5 @@ class PythonPreformatted(XPreformatted): ) -> None: ... def escapeHtml(self, text: str) -> str: ... def fontify(self, code: str) -> str: ... + +__all__ = ("XPreformatted", "PythonPreformatted") diff --git a/stubs/reportlab/reportlab/rl_settings.pyi b/stubs/reportlab/reportlab/rl_settings.pyi index da3f2cdec45e..2c220169136a 100644 --- a/stubs/reportlab/reportlab/rl_settings.pyi +++ b/stubs/reportlab/reportlab/rl_settings.pyi @@ -71,3 +71,72 @@ T1SearchPath: Final[Incomplete] TTFSearchPath: Final[Incomplete] CMapSearchPath: Final[Incomplete] shapedFontGlob: Final[Incomplete | None] + +__all__ = ( + "allowTableBoundsErrors", + "shapeChecking", + "defaultEncoding", + "defaultGraphicsFontName", + "pageCompression", + "useA85", + "defaultPageSize", + "defaultImageCaching", + "warnOnMissingFontGlyphs", + "verbose", + "showBoundary", + "emptyTableAction", + "invariant", + "eps_preview_transparent", + "eps_preview", + "eps_ttf_embed", + "eps_ttf_embed_uid", + "overlapAttachedSpace", + "longTableOptimize", + "autoConvertEncoding", + "_FUZZ", + "wrapA85", + "fsEncodings", + "odbc_driver", + "platypus_link_underline", + "canvas_basefontname", + "allowShortTableRows", + "imageReaderFlags", + "paraFontSizeHeightOffset", + "canvas_baseColor", + "ignoreContainerActions", + "ttfAsciiReadable", + "pdfMultiLine", + "pdfComments", + "debug", + "rtlSupport", + "listWrapOnFakeWidth", + "T1SearchPath", + "TTFSearchPath", + "CMapSearchPath", + "decimalSymbol", + "errorOnDuplicatePageLabelPage", + "autoGenerateMissingTTFName", + "allowTTFSubsetting", + "spaceShrinkage", + "underlineWidth", + "underlineOffset", + "underlineGap", + "strikeWidth", + "strikeOffset", + "strikeGap", + "hyphenationLang", + "uriWasteReduce", + "embeddedHyphenation", + "hyphenationMinWordLength", + "reserveTTFNotdef", + "documentLang", + "encryptionStrength", + "trustedHosts", + "trustedSchemes", + "renderPMBackend", + "xmlParser", + "textPaths", + "toColorCanUse", + "defCWRF", + "shapedFontGlob", +) diff --git a/stubs/toposort/@tests/stubtest_allowlist.txt b/stubs/toposort/@tests/stubtest_allowlist.txt deleted file mode 100644 index f544b05c4078..000000000000 --- a/stubs/toposort/@tests/stubtest_allowlist.txt +++ /dev/null @@ -1,2 +0,0 @@ -# TODO: missing from stub -toposort.__all__ diff --git a/stubs/toposort/toposort.pyi b/stubs/toposort/toposort.pyi index 76af5313428b..35ad36b6f955 100644 --- a/stubs/toposort/toposort.pyi +++ b/stubs/toposort/toposort.pyi @@ -15,3 +15,5 @@ class CircularDependencyError(ValueError): def toposort(data: _SupportsItemsAndLen[_T, Iterable[_T]]) -> Iterator[set[_T]]: ... def toposort_flatten(data: _SupportsItemsAndLen[_T, Iterable[_T]], sort: bool = ...) -> list[_T]: ... + +__all__ = ["toposort", "toposort_flatten", "CircularDependencyError"] diff --git a/stubs/tqdm/@tests/stubtest_allowlist.txt b/stubs/tqdm/@tests/stubtest_allowlist.txt index e14a351b27a6..abe294afc5e1 100644 --- a/stubs/tqdm/@tests/stubtest_allowlist.txt +++ b/stubs/tqdm/@tests/stubtest_allowlist.txt @@ -1,9 +1,3 @@ -# TODO: missing from stub -tqdm._main.__all__ -tqdm._tqdm.__all__ -tqdm._tqdm_gui.__all__ -tqdm._tqdm_notebook.__all__ - # Cannot import in stubtest tqdm.__main__ diff --git a/stubs/tqdm/tqdm/_main.pyi b/stubs/tqdm/tqdm/_main.pyi index 81525aa3c18b..339e5728ce41 100644 --- a/stubs/tqdm/tqdm/_main.pyi +++ b/stubs/tqdm/tqdm/_main.pyi @@ -1,4 +1,2 @@ from .cli import * - -# Names in __all__ with no definition: -# main +from .cli import __all__ as __all__ diff --git a/stubs/tqdm/tqdm/_tqdm.pyi b/stubs/tqdm/tqdm/_tqdm.pyi index e2796836c195..6a8a8311f736 100644 --- a/stubs/tqdm/tqdm/_tqdm.pyi +++ b/stubs/tqdm/tqdm/_tqdm.pyi @@ -1,11 +1,2 @@ from .std import * -from .std import TqdmDeprecationWarning as TqdmDeprecationWarning - -# Names in __all__ with no definition: -# TqdmExperimentalWarning -# TqdmKeyError -# TqdmMonitorWarning -# TqdmTypeError -# TqdmWarning -# tqdm -# trange +from .std import TqdmDeprecationWarning as TqdmDeprecationWarning, __all__ as __all__ diff --git a/stubs/tqdm/tqdm/_tqdm_gui.pyi b/stubs/tqdm/tqdm/_tqdm_gui.pyi index 4c489bd2fb0a..28b0f3787654 100644 --- a/stubs/tqdm/tqdm/_tqdm_gui.pyi +++ b/stubs/tqdm/tqdm/_tqdm_gui.pyi @@ -1,7 +1,2 @@ from .gui import * - -# Names in __all__ with no definition: -# tgrange -# tqdm -# tqdm_gui -# trange +from .gui import __all__ as __all__ diff --git a/stubs/tqdm/tqdm/_tqdm_notebook.pyi b/stubs/tqdm/tqdm/_tqdm_notebook.pyi index 5c9a36126b05..f610309a623a 100644 --- a/stubs/tqdm/tqdm/_tqdm_notebook.pyi +++ b/stubs/tqdm/tqdm/_tqdm_notebook.pyi @@ -1,7 +1,2 @@ from .notebook import * - -# Names in __all__ with no definition: -# tnrange -# tqdm -# tqdm_notebook -# trange +from .notebook import __all__ as __all__ diff --git a/stubs/whatthepatch/@tests/stubtest_allowlist.txt b/stubs/whatthepatch/@tests/stubtest_allowlist.txt deleted file mode 100644 index 143d9ce12850..000000000000 --- a/stubs/whatthepatch/@tests/stubtest_allowlist.txt +++ /dev/null @@ -1,2 +0,0 @@ -# TODO: missing from stub -whatthepatch.__all__ diff --git a/stubs/whatthepatch/whatthepatch/__init__.pyi b/stubs/whatthepatch/whatthepatch/__init__.pyi index 600e4526291c..37369cb4d146 100644 --- a/stubs/whatthepatch/whatthepatch/__init__.pyi +++ b/stubs/whatthepatch/whatthepatch/__init__.pyi @@ -1,2 +1,4 @@ from .apply import apply_diff as apply_diff from .patch import parse_patch as parse_patch + +__all__ = ["parse_patch", "apply_diff"] diff --git a/stubs/workalendar/@tests/stubtest_allowlist.txt b/stubs/workalendar/@tests/stubtest_allowlist.txt index c40df8b9c723..bd0a43b5b82a 100644 --- a/stubs/workalendar/@tests/stubtest_allowlist.txt +++ b/stubs/workalendar/@tests/stubtest_allowlist.txt @@ -1,15 +1,8 @@ -# TODO: missing from stub -workalendar.africa.__all__ -workalendar.america.__all__ -workalendar.asia.__all__ -workalendar.astronomy.__all__ -workalendar.europe.__all__ +# VictoriaDayTuesdayAfterFirstMondayMay is not defined but used in __all__ runtime: workalendar.europe.scotland.mixins.__all__ -workalendar.oceania.__all__ -workalendar.usa.__all__ +workalendar.europe.scotland.mixins.VictoriaDayTuesdayAfterFirstMondayMay workalendar.core.CoreCalendar.name -workalendar.europe.scotland.mixins.VictoriaDayTuesdayAfterFirstMondayMay workalendar.skyfield_astronomy workalendar.tests workalendar.core.classproperty diff --git a/stubs/workalendar/workalendar/africa/__init__.pyi b/stubs/workalendar/workalendar/africa/__init__.pyi index 1b68102bf411..3fa6aa875df6 100644 --- a/stubs/workalendar/workalendar/africa/__init__.pyi +++ b/stubs/workalendar/workalendar/africa/__init__.pyi @@ -9,3 +9,17 @@ from .nigeria import Nigeria as Nigeria from .sao_tome import SaoTomeAndPrincipe as SaoTomeAndPrincipe from .south_africa import SouthAfrica as SouthAfrica from .tunisia import Tunisia as Tunisia + +__all__ = ( + "Algeria", + "Benin", + "IvoryCoast", + "Kenya", + "Madagascar", + "SaoTomeAndPrincipe", + "SouthAfrica", + "Angola", + "Mozambique", + "Nigeria", + "Tunisia", +) diff --git a/stubs/workalendar/workalendar/america/__init__.pyi b/stubs/workalendar/workalendar/america/__init__.pyi index ba36ee024bd8..04106833dd36 100644 --- a/stubs/workalendar/workalendar/america/__init__.pyi +++ b/stubs/workalendar/workalendar/america/__init__.pyi @@ -84,3 +84,91 @@ from .el_salvador import ElSalvador as ElSalvador from .mexico import Mexico as Mexico from .panama import Panama as Panama from .paraguay import Paraguay as Paraguay + +__all__ = ( + # Brazil & its states. + "Brazil", + "BrazilAcre", + "BrazilAlagoas", + "BrazilAmapa", + "BrazilAmazonas", + "BrazilBahia", + "BrazilCeara", + "BrazilDistritoFederal", + "BrazilEspiritoSanto", + "BrazilGoias", + "BrazilMaranhao", + "BrazilMinasGerais", + "BrazilMatoGrosso", + "BrazilMatoGrossoDoSul", + "BrazilPara", + "BrazilParaiba", + "BrazilPernambuco", + "BrazilPiaui", + "BrazilParana", + "BrazilRioDeJaneiro", + "BrazilRioGrandeDoNorte", + "BrazilRioGrandeDoSul", + "BrazilRondonia", + "BrazilRoraima", + "BrazilSantaCatarina", + "BrazilSaoPauloState", + "BrazilSaoPauloCity", + "BrazilSergipe", + "BrazilTocantins", + "BrazilVitoriaCity", + "BrazilVilaVelhaCity", + "BrazilCariacicaCity", + "BrazilGuarapariCity", + "BrazilSerraCity", + "BrazilBankCalendar", + "BrazilRioBrancoCity", + "BrazilMaceioCity", + "BrazilManausCity", + "BrazilMacapaCity", + "BrazilSalvadorCity", + "BrazilFortalezaCity", + "BrazilGoianiaCity", + "BrazilBeloHorizonteCity", + "BrazilCampoGrandeCity", + "BrazilCuiabaCity", + "BrazilBelemCity", + "BrazilJoaoPessoaCity", + "BrazilRecifeCity", + "BrazilTeresinaCity", + "BrazilCuritibaCity", + "BrazilNatalCity", + "BrazilPortoVelhoCity", + "BrazilBoaVistaCity", + "BrazilPortoAlegreCity", + "BrazilChapecoCity", + "BrazilFlorianopolisCity", + "BrazilJoinvilleCity", + "BrazilAracajuCity", + "BrazilSorocabaCity", + "BrazilPalmasCity", + # Canada + "Canada", + "Ontario", + "Quebec", + "BritishColumbia", + "Alberta", + "Saskatchewan", + "Manitoba", + "NewBrunswick", + "NovaScotia", + "PrinceEdwardIsland", + "Newfoundland", + "Yukon", + "NorthwestTerritories", + "Nunavut", + # Other american countries + "Barbados", + "Chile", + "Colombia", + "Mexico", + "Panama", + "Paraguay", + "Argentina", + "ElSalvador", +) diff --git a/stubs/workalendar/workalendar/asia/__init__.pyi b/stubs/workalendar/workalendar/asia/__init__.pyi index 92c649780542..2f32eb0e8955 100644 --- a/stubs/workalendar/workalendar/asia/__init__.pyi +++ b/stubs/workalendar/workalendar/asia/__init__.pyi @@ -9,3 +9,19 @@ from .qatar import Qatar as Qatar from .singapore import Singapore as Singapore from .south_korea import SouthKorea as SouthKorea from .taiwan import Taiwan as Taiwan + +__all__ = ( + "China", + "HongKong", + "HongKongBank", + "Japan", + "JapanBank", + "Malaysia", + "Qatar", + "Singapore", + "SouthKorea", + "Taiwan", + "Israel", + "Philippines", + "Kazakhstan", +) diff --git a/stubs/workalendar/workalendar/astronomy.pyi b/stubs/workalendar/workalendar/astronomy.pyi index 3c2e2eb22dac..a86a890e5901 100644 --- a/stubs/workalendar/workalendar/astronomy.pyi +++ b/stubs/workalendar/workalendar/astronomy.pyi @@ -1 +1,3 @@ from .precomputed_astronomy import calculate_equinoxes as calculate_equinoxes, solar_term as solar_term + +__all__ = ["calculate_equinoxes", "solar_term"] diff --git a/stubs/workalendar/workalendar/europe/__init__.pyi b/stubs/workalendar/workalendar/europe/__init__.pyi index e6446fa4a776..b6abc88df1dc 100644 --- a/stubs/workalendar/workalendar/europe/__init__.pyi +++ b/stubs/workalendar/workalendar/europe/__init__.pyi @@ -136,3 +136,144 @@ from .switzerland import ( from .turkey import Turkey as Turkey from .ukraine import Ukraine as Ukraine from .united_kingdom import UnitedKingdom as UnitedKingdom, UnitedKingdomNorthernIreland as UnitedKingdomNorthernIreland + +__all__ = ( + "Austria", + "Belarus", + "Belgium", + "Bulgaria", + "CaymanIslands", + "Croatia", + "Cyprus", + "CzechRepublic", + "Denmark", + "Estonia", + "EuropeanCentralBank", + "Finland", + "France", + "FranceAlsaceMoselle", + "Georgia", + "Greece", + "Guernsey", + "Hungary", + "Iceland", + "Ireland", + "Italy", + "Latvia", + "Lithuania", + "Luxembourg", + "Malta", + "Monaco", + "Netherlands", + "NetherlandsWithSchoolHolidays", + "Norway", + "Poland", + "Portugal", + "Romania", + "Russia", + "Serbia", + "Slovakia", + "Slovenia", + "Sweden", + "Switzerland", + "Ukraine", + "UnitedKingdom", + "UnitedKingdomNorthernIreland", + "Turkey", + # Germany + "Germany", + "BadenWurttemberg", + "Bavaria", + "Berlin", + "Brandenburg", + "Bremen", + "Hamburg", + "Hesse", + "MecklenburgVorpommern", + "LowerSaxony", + "NorthRhineWestphalia", + "RhinelandPalatinate", + "Saarland", + "Saxony", + "SaxonyAnhalt", + "SchleswigHolstein", + "Thuringia", + # Scotland + "Scotland", + "Aberdeen", + "Angus", + "Arbroath", + "Ayr", + "CarnoustieMonifieth", + "Clydebank", + "DumfriesGalloway", + "Dundee", + "EastDunbartonshire", + "Edinburgh", + "Elgin", + "Falkirk", + "Fife", + "Galashiels", + "Glasgow", + "Hawick", + "Inverclyde", + "Inverness", + "Kilmarnock", + "Lanark", + "Linlithgow", + "Lochaber", + "NorthLanarkshire", + "Paisley", + "Perth", + "ScottishBorders", + "SouthLanarkshire", + "Stirling", + "WestDunbartonshire", + # Spain + "Spain", + "Andalusia", + "Aragon", + "Catalonia", + "CastileAndLeon", + "CastillaLaMancha", + "CanaryIslands", + "Extremadura", + "Galicia", + "BalearicIslands", + "LaRioja", + "CommunityofMadrid", + "Murcia", + "Navarre", + "Asturias", + "BasqueCountry", + "Cantabria", + "ValencianCommunity", + # Switzerland + "Switzerland", + "Aargau", + "AppenzellInnerrhoden", + "AppenzellAusserrhoden", + "Bern", + "BaselLandschaft", + "BaselStadt", + "Fribourg", + "Geneva", + "Glarus", + "Graubunden", + "Jura", + "Luzern", + "Neuchatel", + "Nidwalden", + "Obwalden", + "StGallen", + "Schaffhausen", + "Solothurn", + "Schwyz", + "Thurgau", + "Ticino", + "Uri", + "Vaud", + "Valais", + "Zug", + "Zurich", +) diff --git a/stubs/workalendar/workalendar/europe/scotland/mixins/__init__.pyi b/stubs/workalendar/workalendar/europe/scotland/mixins/__init__.pyi index 0d2ea52f330f..07f6ebf86979 100644 --- a/stubs/workalendar/workalendar/europe/scotland/mixins/__init__.pyi +++ b/stubs/workalendar/workalendar/europe/scotland/mixins/__init__.pyi @@ -37,3 +37,27 @@ class AyrGoldCup: # Names in __all__ with no definition: # VictoriaDayTuesdayAfterFirstMondayMay + +__all__ = [ + "AyrGoldCup", + "SpringHolidayFirstMondayApril", + "SpringHolidaySecondMondayApril", + "SpringHolidayTuesdayAfterFirstMondayMay", + "SpringHolidayLastMondayMay", + "SpringHolidayFirstMondayJune", + "VictoriaDayFourthMondayMay", + "VictoriaDayLastMondayMay", + "VictoriaDayTuesdayAfterFirstMondayMay", # noqa: F822 # pyright: ignore[reportUnsupportedDunderAll] see https://github.com/workalendar/workalendar/pull/778 + "VictoriaDayFirstMondayJune", + "FairHolidayLastMondayJune", + "FairHolidayFirstMondayJuly", + "FairHolidaySecondMondayJuly", + "FairHolidayThirdMondayJuly", + "FairHolidayLastMondayJuly", + "FairHolidayFourthFridayJuly", + "FairHolidayFirstMondayAugust", + "AutumnHolidayLastMondaySeptember", + "AutumnHolidayFirstMondayOctober", + "AutumnHolidaySecondMondayOctober", + "AutumnHolidayThirdMondayOctober", +] diff --git a/stubs/workalendar/workalendar/oceania/__init__.pyi b/stubs/workalendar/workalendar/oceania/__init__.pyi index 4e04fcfa9570..57d836755277 100644 --- a/stubs/workalendar/workalendar/oceania/__init__.pyi +++ b/stubs/workalendar/workalendar/oceania/__init__.pyi @@ -12,3 +12,20 @@ from .australia import ( ) from .marshall_islands import MarshallIslands as MarshallIslands from .new_zealand import NewZealand as NewZealand + +__all__ = ( + # Australia and al. + "Australia", + "AustralianCapitalTerritory", + "NewSouthWales", + "NorthernTerritory", + "Queensland", + "SouthAustralia", + "Tasmania", + "Hobart", + "Victoria", + "WesternAustralia", + # Other oceanian countries + "MarshallIslands", + "NewZealand", +) diff --git a/stubs/workalendar/workalendar/usa/__init__.pyi b/stubs/workalendar/workalendar/usa/__init__.pyi index 3c6eea9fa327..79008dff2ca1 100644 --- a/stubs/workalendar/workalendar/usa/__init__.pyi +++ b/stubs/workalendar/workalendar/usa/__init__.pyi @@ -68,3 +68,75 @@ from .washington import Washington as Washington from .west_virginia import WestVirginia as WestVirginia from .wisconsin import Wisconsin as Wisconsin from .wyoming import Wyoming as Wyoming + +__all__ = [ + "UnitedStates", # Generic federal calendar + "Alabama", + "AlabamaBaldwinCounty", + "AlabamaMobileCounty", + "AlabamaPerryCounty", + "Alaska", + "Arizona", + "Arkansas", + "California", + "CaliforniaEducation", + "CaliforniaBerkeley", + "CaliforniaSanFrancisco", + "CaliforniaWestHollywood", + "Colorado", + "Connecticut", + "Delaware", + "DistrictOfColumbia", + "Florida", + "FloridaLegal", + "FloridaCircuitCourts", + "FloridaMiamiDade", + "Georgia", + "Hawaii", + "Idaho", + "Illinois", + "ChicagoIllinois", + "Indiana", + "Iowa", + "Kansas", + "Kentucky", + "Louisiana", + "Maine", + "Maryland", + "Massachusetts", + "SuffolkCountyMassachusetts", + "Michigan", + "Minnesota", + "Mississippi", + "Missouri", + "Montana", + "Nebraska", + "Nevada", + "NewHampshire", + "NewJersey", + "NewMexico", + "NewYork", + "NorthCarolina", + "NorthDakota", + "Ohio", + "Oklahoma", + "Oregon", + "Pennsylvania", + "RhodeIsland", + "SouthCarolina", + "SouthDakota", + "Tennessee", + "TexasBase", + "Texas", + "Utah", + "Vermont", + "Virginia", + "Washington", + "WestVirginia", + "Wisconsin", + "Wyoming", + # Non-State territories + "AmericanSamoa", + "Guam", + "FederalReserveSystem", +] From bbdc744846356d1ca7c022a4101e3500ac5704f7 Mon Sep 17 00:00:00 2001 From: Sebastian Rittau Date: Tue, 1 Apr 2025 02:22:59 +0200 Subject: [PATCH 164/388] [JACK-Client] All METADATA_ constants are sometimes available on Linux (#13639) --- .../JACK-Client/@tests/stubtest_allowlist.txt | 2 ++ stubs/JACK-Client/jack/__init__.pyi | 25 +++++++++---------- 2 files changed, 14 insertions(+), 13 deletions(-) create mode 100644 stubs/JACK-Client/@tests/stubtest_allowlist.txt diff --git a/stubs/JACK-Client/@tests/stubtest_allowlist.txt b/stubs/JACK-Client/@tests/stubtest_allowlist.txt new file mode 100644 index 000000000000..5a7c05f46db9 --- /dev/null +++ b/stubs/JACK-Client/@tests/stubtest_allowlist.txt @@ -0,0 +1,2 @@ +# The available constants differ based on the local environment. +(jack\.METADATA_.*)? diff --git a/stubs/JACK-Client/jack/__init__.pyi b/stubs/JACK-Client/jack/__init__.pyi index 4f8d61bd4de0..0644f983e694 100644 --- a/stubs/JACK-Client/jack/__init__.pyi +++ b/stubs/JACK-Client/jack/__init__.pyi @@ -1,7 +1,6 @@ -import sys from _typeshed import Unused from collections.abc import Callable, Generator, Iterable, Iterator, Sequence -from typing import Any, Literal, NoReturn, overload +from typing import Any, Final, Literal, NoReturn, overload from typing_extensions import Self import numpy @@ -317,14 +316,14 @@ def set_error_function(callback: Callable[[str], object] | None = None) -> None: def set_info_function(callback: Callable[[str], object] | None = None) -> None: ... def client_pid(name: str) -> int: ... -METADATA_CONNECTED: str -METADATA_HARDWARE: str -METADATA_ICON_LARGE: str -METADATA_ICON_SMALL: str -METADATA_PORT_GROUP: str -METADATA_PRETTY_NAME: str -if sys.platform != "linux": - METADATA_EVENT_TYPES: str - METADATA_ICON_NAME: str - METADATA_ORDER: str - METADATA_SIGNAL_TYPE: str +# Some METADATA_ constants are not available on all systems. +METADATA_CONNECTED: Final[str] +METADATA_HARDWARE: Final[str] +METADATA_ICON_LARGE: Final[str] +METADATA_ICON_SMALL: Final[str] +METADATA_PORT_GROUP: Final[str] +METADATA_PRETTY_NAME: Final[str] +METADATA_EVENT_TYPES: Final[str] +METADATA_ICON_NAME: Final[str] +METADATA_ORDER: Final[str] +METADATA_SIGNAL_TYPE: Final[str] From d0bf66bd34f78d638babcb4b5d806dec57b7dc4e Mon Sep 17 00:00:00 2001 From: Brian Schubert Date: Tue, 1 Apr 2025 02:32:00 -0400 Subject: [PATCH 165/388] [pyflakes] Update to 3.3.2 (#13756) --- stubs/pyflakes/METADATA.toml | 2 +- stubs/pyflakes/pyflakes/checker.pyi | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/stubs/pyflakes/METADATA.toml b/stubs/pyflakes/METADATA.toml index c9441665253a..615ddbc4e61b 100644 --- a/stubs/pyflakes/METADATA.toml +++ b/stubs/pyflakes/METADATA.toml @@ -1,4 +1,4 @@ -version = "3.3.*" +version = "~=3.3.2" upstream_repository = "https://github.com/PyCQA/pyflakes" partial_stub = true diff --git a/stubs/pyflakes/pyflakes/checker.pyi b/stubs/pyflakes/pyflakes/checker.pyi index 51385f672324..1736ae44cff1 100644 --- a/stubs/pyflakes/pyflakes/checker.pyi +++ b/stubs/pyflakes/pyflakes/checker.pyi @@ -104,7 +104,8 @@ class ExportBinding(Binding): class Scope(dict[str, Binding]): importStarred: bool -class ClassScope(Scope): ... +class ClassScope(Scope): + def __init__(self) -> None: ... class FunctionScope(Scope): usesLocals: bool From 28660194d9f7761280c43b407e58f43a219ac73b Mon Sep 17 00:00:00 2001 From: Daksh2000 <44369829+Daksh2000@users.noreply.github.com> Date: Tue, 1 Apr 2025 15:00:58 +0530 Subject: [PATCH 166/388] Removed stubs: influxdb-client (April 2025) (#13718) --- pyrightconfig.stricter.json | 1 - .../@tests/stubtest_allowlist.txt | 2 - stubs/influxdb-client/METADATA.toml | 16 - .../influxdb_client/__init__.pyi | 427 ------------------ .../influxdb_client/_async/__init__.pyi | 0 .../influxdb_client/_async/api_client.pyi | 65 --- .../influxdb_client/_async/rest.pyi | 96 ---- .../influxdb_client/_sync/__init__.pyi | 0 .../influxdb_client/_sync/api_client.pyi | 65 --- .../influxdb_client/_sync/rest.pyi | 103 ----- .../influxdb_client/client/__init__.pyi | 41 -- .../influxdb_client/client/_base.pyi | 60 --- .../influxdb_client/client/_pages.pyi | 37 -- .../client/authorizations_api.pyi | 23 - .../influxdb_client/client/bucket_api.pyi | 24 - .../influxdb_client/client/delete_api.pyi | 10 - .../client/delete_api_async.pyi | 10 - .../influxdb_client/client/exceptions.pyi | 13 - .../client/flux_csv_parser.pyi | 75 --- .../influxdb_client/client/flux_table.pyi | 56 --- .../client/influxdb_client.pyi | 72 --- .../client/influxdb_client_async.pyi | 56 --- .../client/invokable_scripts_api.pyi | 26 -- .../influxdb_client/client/labels_api.pyi | 11 - .../client/logging_handler.pyi | 14 - .../client/organizations_api.pyi | 10 - .../influxdb_client/client/query_api.pyi | 51 --- .../client/query_api_async.pyi | 39 -- .../influxdb_client/client/tasks_api.pyi | 40 -- .../influxdb_client/client/users_api.pyi | 10 - .../influxdb_client/client/util/__init__.pyi | 0 .../client/util/date_utils.pyi | 16 - .../client/util/date_utils_pandas.pyi | 5 - .../influxdb_client/client/util/helpers.pyi | 4 - .../client/util/multiprocessing_helper.pyi | 25 - .../influxdb_client/client/warnings.pyi | 7 - .../influxdb_client/client/write/__init__.pyi | 41 -- .../client/write/dataframe_serializer.pyi | 15 - .../influxdb_client/client/write/point.pyi | 40 -- .../influxdb_client/client/write/retry.pyi | 40 -- .../influxdb_client/client/write_api.pyi | 112 ----- .../client/write_api_async.pyi | 38 -- .../influxdb_client/configuration.pyi | 50 -- .../influxdb_client/domain/__init__.pyi | 373 --------------- .../add_resource_member_request_body.pyi | 19 - .../domain/analyze_query_response.pyi | 15 - .../domain/analyze_query_response_errors.pyi | 33 -- .../domain/array_expression.pyi | 21 - .../influxdb_client/domain/ast_response.pyi | 15 - .../influxdb_client/domain/authorization.pyi | 67 --- .../domain/authorization_post_request.pyi | 32 -- .../domain/authorization_update_request.pyi | 19 - .../influxdb_client/domain/authorizations.pyi | 19 - .../influxdb_client/domain/axes.pyi | 19 - .../influxdb_client/domain/axis.pyi | 43 -- .../influxdb_client/domain/axis_scale.pyi | 12 - .../influxdb_client/domain/bad_statement.pyi | 21 - .../domain/band_view_properties.pyi | 157 ------- .../domain/binary_expression.pyi | 35 -- .../influxdb_client/domain/block.pyi | 21 - .../domain/boolean_literal.pyi | 21 - .../influxdb_client/domain/bucket.pyi | 73 --- .../influxdb_client/domain/bucket_links.pyi | 39 -- .../domain/bucket_metadata_manifest.pyi | 48 -- .../domain/bucket_retention_rules.pyi | 22 - .../domain/bucket_shard_mapping.pyi | 19 - .../influxdb_client/domain/buckets.pyi | 19 - .../builder_aggregate_function_type.pyi | 12 - .../influxdb_client/domain/builder_config.pyi | 33 -- .../builder_config_aggregate_window.pyi | 19 - .../domain/builder_functions_type.pyi | 15 - .../domain/builder_tags_type.pyi | 25 - .../domain/builtin_statement.pyi | 21 - .../domain/call_expression.pyi | 27 -- .../influxdb_client/domain/cell.pyi | 48 -- .../influxdb_client/domain/cell_links.pyi | 15 - .../influxdb_client/domain/cell_update.pyi | 29 -- .../domain/cell_with_view_properties.pyi | 32 -- .../influxdb_client/domain/check.pyi | 17 - .../influxdb_client/domain/check_base.pyi | 88 ---- .../domain/check_base_links.pyi | 34 -- .../domain/check_discriminator.pyi | 30 -- .../influxdb_client/domain/check_patch.pyi | 25 - .../domain/check_status_level.pyi | 15 - .../domain/check_view_properties.pyi | 67 --- .../influxdb_client/domain/checks.pyi | 19 - .../domain/column_data_type.pyi | 15 - .../domain/column_semantic_type.pyi | 13 - .../domain/conditional_expression.pyi | 35 -- .../influxdb_client/domain/config.pyi | 15 - .../domain/constant_variable_properties.pyi | 21 - .../influxdb_client/domain/create_cell.pyi | 43 -- .../domain/create_dashboard_request.pyi | 25 - .../influxdb_client/domain/custom_check.pyi | 35 -- .../influxdb_client/domain/dashboard.pyi | 43 -- .../domain/dashboard_color.pyi | 38 -- .../domain/dashboard_query.pyi | 33 -- .../domain/dashboard_with_view_properties.pyi | 43 -- .../influxdb_client/domain/dashboards.pyi | 19 - .../domain/date_time_literal.pyi | 21 - .../influxdb_client/domain/dbr_ps.pyi | 15 - .../influxdb_client/domain/dbrp.pyi | 50 -- .../influxdb_client/domain/dbrp_create.pyi | 37 -- .../influxdb_client/domain/dbrp_get.pyi | 15 - .../influxdb_client/domain/dbrp_update.pyi | 19 - .../influxdb_client/domain/deadman_check.pyi | 75 --- .../influxdb_client/domain/decimal_places.pyi | 19 - .../domain/delete_predicate_request.pyi | 25 - .../influxdb_client/domain/dialect.pyi | 38 -- .../domain/dict_expression.pyi | 21 - .../influxdb_client/domain/dict_item.pyi | 23 - .../influxdb_client/domain/duration.pyi | 25 - .../domain/duration_literal.pyi | 21 - .../influxdb_client/domain/error.pyi | 33 -- .../influxdb_client/domain/expression.pyi | 13 - .../domain/expression_statement.pyi | 21 - .../influxdb_client/domain/field.pyi | 33 -- .../influxdb_client/domain/file.pyi | 38 -- .../influxdb_client/domain/float_literal.pyi | 21 - .../influxdb_client/domain/flux_response.pyi | 15 - .../domain/flux_suggestion.pyi | 19 - .../domain/flux_suggestions.pyi | 15 - .../domain/function_expression.pyi | 27 -- .../domain/gauge_view_properties.pyi | 70 --- .../domain/greater_threshold.pyi | 27 -- .../influxdb_client/domain/health_check.pyi | 43 -- .../domain/heatmap_view_properties.pyi | 167 ------- .../domain/histogram_view_properties.pyi | 95 ---- .../domain/http_notification_endpoint.pyi | 66 --- .../domain/http_notification_rule.pyi | 40 -- .../domain/http_notification_rule_base.pyi | 48 -- .../influxdb_client/domain/identifier.pyi | 21 - .../domain/import_declaration.pyi | 19 - .../domain/index_expression.pyi | 27 -- .../domain/integer_literal.pyi | 21 - .../influxdb_client/domain/is_onboarding.pyi | 15 - .../influxdb_client/domain/label.pyi | 33 -- .../domain/label_create_request.pyi | 25 - .../influxdb_client/domain/label_mapping.pyi | 15 - .../influxdb_client/domain/label_response.pyi | 19 - .../influxdb_client/domain/label_update.pyi | 19 - .../domain/labels_response.pyi | 19 - .../domain/language_request.pyi | 15 - .../legacy_authorization_post_request.pyi | 37 -- .../domain/lesser_threshold.pyi | 27 -- .../line_plus_single_stat_properties.pyi | 162 ------- .../domain/line_protocol_error.pyi | 38 -- .../domain/line_protocol_length_error.pyi | 19 - .../influxdb_client/domain/links.pyi | 21 - .../domain/list_stacks_response.pyi | 15 - .../influxdb_client/domain/log_event.pyi | 25 - .../domain/logical_expression.pyi | 35 -- .../influxdb_client/domain/logs.pyi | 15 - .../domain/map_variable_properties.pyi | 21 - .../domain/markdown_view_properties.pyi | 27 -- .../domain/measurement_schema.pyi | 48 -- .../domain/measurement_schema_column.pyi | 25 - .../measurement_schema_create_request.pyi | 19 - .../domain/measurement_schema_list.pyi | 15 - .../measurement_schema_update_request.pyi | 15 - .../domain/member_assignment.pyi | 27 -- .../domain/member_expression.pyi | 23 - .../domain/metadata_backup.pyi | 25 - .../influxdb_client/domain/model_property.pyi | 25 - .../domain/mosaic_view_properties.pyi | 160 ------- .../influxdb_client/domain/node.pyi | 10 - .../domain/notification_endpoint.pyi | 17 - .../domain/notification_endpoint_base.pyi | 68 --- .../notification_endpoint_base_links.pyi | 29 -- .../notification_endpoint_discriminator.pyi | 26 -- .../domain/notification_endpoint_type.pyi | 14 - .../domain/notification_endpoint_update.pyi | 25 - .../domain/notification_endpoints.pyi | 19 - .../domain/notification_rule.pyi | 17 - .../domain/notification_rule_base.pyi | 128 ------ .../domain/notification_rule_base_links.pyi | 34 -- .../notification_rule_discriminator.pyi | 38 -- .../domain/notification_rule_update.pyi | 25 - .../domain/notification_rules.pyi | 19 - .../domain/object_expression.pyi | 21 - .../domain/onboarding_request.pyi | 48 -- .../domain/onboarding_response.pyi | 33 -- .../domain/option_statement.pyi | 21 - .../influxdb_client/domain/organization.pyi | 50 -- .../domain/organization_links.pyi | 49 -- .../influxdb_client/domain/organizations.pyi | 19 - .../influxdb_client/domain/package.pyi | 33 -- .../influxdb_client/domain/package_clause.pyi | 19 - .../pager_duty_notification_endpoint.pyi | 36 -- .../domain/pager_duty_notification_rule.pyi | 40 -- .../pager_duty_notification_rule_base.pyi | 48 -- .../domain/paren_expression.pyi | 21 - .../domain/password_reset_body.pyi | 15 - .../domain/patch_bucket_request.pyi | 25 - .../domain/patch_dashboard_request.pyi | 25 - .../domain/patch_organization_request.pyi | 19 - .../domain/patch_retention_rule.pyi | 22 - .../domain/patch_stack_request.pyi | 33 -- ...tch_stack_request_additional_resources.pyi | 25 - .../influxdb_client/domain/permission.pyi | 19 - .../domain/permission_resource.pyi | 38 -- .../domain/pipe_expression.pyi | 27 -- .../influxdb_client/domain/pipe_literal.pyi | 17 - .../domain/post_bucket_request.pyi | 40 -- .../influxdb_client/domain/post_check.pyi | 17 - .../domain/post_notification_endpoint.pyi | 17 - .../domain/post_notification_rule.pyi | 17 - .../domain/post_organization_request.pyi | 19 - .../domain/post_restore_kv_response.pyi | 15 - .../domain/post_stack_request.pyi | 33 -- .../influxdb_client/domain/property_key.pyi | 13 - .../influxdb_client/domain/query.pyi | 43 -- .../domain/query_edit_mode.pyi | 12 - .../domain/query_variable_properties.pyi | 21 - .../query_variable_properties_values.pyi | 19 - .../domain/range_threshold.pyi | 37 -- .../influxdb_client/domain/ready.pyi | 25 - .../influxdb_client/domain/regexp_literal.pyi | 21 - .../domain/remote_connection.pyi | 48 -- .../remote_connection_creation_request.pyi | 48 -- .../remote_connection_update_request.pyi | 43 -- .../domain/remote_connections.pyi | 15 - .../domain/renamable_field.pyi | 25 - .../influxdb_client/domain/replication.pyi | 77 ---- .../domain/replication_creation_request.pyi | 57 --- .../domain/replication_update_request.pyi | 47 -- .../influxdb_client/domain/replications.pyi | 15 - .../domain/resource_member.pyi | 24 - .../domain/resource_members.pyi | 19 - .../domain/resource_members_links.pyi | 11 - .../influxdb_client/domain/resource_owner.pyi | 24 - .../domain/resource_owners.pyi | 19 - .../domain/restored_bucket_mappings.pyi | 25 - .../domain/retention_policy_manifest.pyi | 43 -- .../domain/return_statement.pyi | 21 - .../influxdb_client/domain/routes.pyi | 103 ----- .../domain/routes_external.pyi | 15 - .../influxdb_client/domain/routes_query.pyi | 29 -- .../influxdb_client/domain/routes_system.pyi | 25 - .../domain/rule_status_level.pyi | 16 - .../influxdb_client/domain/run.pyi | 60 --- .../influxdb_client/domain/run_links.pyi | 21 - .../influxdb_client/domain/run_manually.pyi | 15 - .../influxdb_client/domain/runs.pyi | 19 - .../domain/scatter_view_properties.pyi | 172 ------- .../influxdb_client/domain/schema_type.pyi | 12 - .../domain/scraper_target_request.pyi | 43 -- .../domain/scraper_target_response.pyi | 41 -- .../domain/scraper_target_responses.pyi | 15 - .../influxdb_client/domain/script.pyi | 58 --- .../domain/script_create_request.pyi | 33 -- .../domain/script_invocation_params.pyi | 15 - .../domain/script_language.pyi | 14 - .../domain/script_update_request.pyi | 19 - .../influxdb_client/domain/scripts.pyi | 15 - .../influxdb_client/domain/secret_keys.pyi | 15 - .../domain/secret_keys_response.pyi | 17 - .../domain/shard_group_manifest.pyi | 43 -- .../influxdb_client/domain/shard_manifest.pyi | 19 - .../influxdb_client/domain/shard_owner.pyi | 15 - .../domain/simple_table_view_properties.pyi | 45 -- .../domain/single_stat_view_properties.pyi | 75 --- .../domain/slack_notification_endpoint.pyi | 36 -- .../domain/slack_notification_rule.pyi | 41 -- .../domain/slack_notification_rule_base.pyi | 53 --- .../domain/smtp_notification_rule.pyi | 42 -- .../domain/smtp_notification_rule_base.pyi | 58 --- .../influxdb_client/domain/source.pyi | 93 ---- .../influxdb_client/domain/source_links.pyi | 29 -- .../influxdb_client/domain/sources.pyi | 19 - .../influxdb_client/domain/stack.pyi | 33 -- .../domain/stack_associations.pyi | 19 - .../influxdb_client/domain/stack_events.pyi | 48 -- .../influxdb_client/domain/stack_links.pyi | 11 - .../domain/stack_resources.pyi | 43 -- .../influxdb_client/domain/statement.pyi | 10 - .../influxdb_client/domain/static_legend.pyi | 48 -- .../influxdb_client/domain/status_rule.pyi | 33 -- .../influxdb_client/domain/string_literal.pyi | 21 - .../domain/subscription_manifest.pyi | 25 - .../domain/table_view_properties.pyi | 65 --- .../table_view_properties_table_options.pyi | 33 -- .../influxdb_client/domain/tag_rule.pyi | 25 - .../influxdb_client/domain/task.pyi | 108 ----- .../domain/task_create_request.pyi | 38 -- .../influxdb_client/domain/task_links.pyi | 39 -- .../domain/task_status_type.pyi | 12 - .../domain/task_update_request.pyi | 48 -- .../influxdb_client/domain/tasks.pyi | 17 - .../influxdb_client/domain/telegraf.pyi | 35 -- .../domain/telegraf_plugin.pyi | 33 -- .../domain/telegraf_plugin_request.pyi | 43 -- .../telegraf_plugin_request_plugins.pyi | 38 -- .../domain/telegraf_plugins.pyi | 25 - .../domain/telegraf_request.pyi | 38 -- .../domain/telegraf_request_metadata.pyi | 15 - .../influxdb_client/domain/telegrafs.pyi | 15 - .../domain/telegram_notification_endpoint.pyi | 36 -- .../domain/telegram_notification_rule.pyi | 42 -- .../telegram_notification_rule_base.pyi | 58 --- .../influxdb_client/domain/template_apply.pyi | 58 --- .../domain/template_apply_remotes.pyi | 19 - .../domain/template_apply_template.pyi | 25 - .../influxdb_client/domain/template_chart.pyi | 38 -- .../domain/template_export_by_id.pyi | 25 - .../domain/template_export_by_id_org_ids.pyi | 19 - ...template_export_by_id_resource_filters.pyi | 19 - .../template_export_by_id_resources.pyi | 23 - .../domain/template_export_by_name.pyi | 25 - .../template_export_by_name_resources.pyi | 19 - .../influxdb_client/domain/template_kind.pyi | 24 - .../domain/template_summary.pyi | 38 -- .../domain/template_summary_diff.pyi | 63 --- .../domain/template_summary_diff_buckets.pyi | 43 -- .../template_summary_diff_buckets_new_old.pyi | 25 - .../domain/template_summary_diff_checks.pyi | 43 -- .../template_summary_diff_dashboards.pyi | 43 -- ...mplate_summary_diff_dashboards_new_old.pyi | 25 - .../template_summary_diff_label_mappings.pyi | 53 --- .../domain/template_summary_diff_labels.pyi | 43 -- .../template_summary_diff_labels_new_old.pyi | 25 - ...te_summary_diff_notification_endpoints.pyi | 43 -- ...mplate_summary_diff_notification_rules.pyi | 43 -- ...ummary_diff_notification_rules_new_old.pyi | 68 --- .../domain/template_summary_diff_tasks.pyi | 43 -- .../template_summary_diff_tasks_new_old.pyi | 48 -- ...template_summary_diff_telegraf_configs.pyi | 43 -- .../template_summary_diff_variables.pyi | 43 -- ...emplate_summary_diff_variables_new_old.pyi | 25 - .../domain/template_summary_errors.pyi | 33 -- .../domain/template_summary_label.pyi | 48 -- .../template_summary_label_properties.pyi | 19 - .../domain/template_summary_summary.pyi | 73 --- .../template_summary_summary_buckets.pyi | 58 --- .../template_summary_summary_dashboards.pyi | 58 --- ...emplate_summary_summary_label_mappings.pyi | 53 --- ...ate_summary_summary_notification_rules.pyi | 88 ---- .../template_summary_summary_status_rules.pyi | 19 - .../template_summary_summary_tag_rules.pyi | 25 - .../domain/template_summary_summary_tasks.pyi | 68 --- .../template_summary_summary_variables.pyi | 58 --- .../influxdb_client/domain/test_statement.pyi | 18 - .../influxdb_client/domain/threshold.pyi | 17 - .../influxdb_client/domain/threshold_base.pyi | 19 - .../domain/threshold_check.pyi | 60 --- .../domain/unary_expression.pyi | 27 -- .../domain/unsigned_integer_literal.pyi | 21 - .../influxdb_client/domain/user.pyi | 23 - .../influxdb_client/domain/user_response.pyi | 33 -- .../domain/user_response_links.pyi | 11 - .../influxdb_client/domain/users.pyi | 19 - .../influxdb_client/domain/variable.pyi | 63 --- .../domain/variable_assignment.pyi | 25 - .../influxdb_client/domain/variable_links.pyi | 21 - .../domain/variable_properties.pyi | 10 - .../influxdb_client/domain/variables.pyi | 15 - .../influxdb_client/domain/view.pyi | 33 -- .../influxdb_client/domain/view_links.pyi | 11 - .../domain/view_properties.pyi | 10 - .../influxdb_client/domain/views.pyi | 19 - .../domain/write_precision.pyi | 18 - .../influxdb_client/domain/xy_geom.pyi | 17 - .../domain/xy_view_properties.pyi | 157 ------- .../influxdb_client/extras.pyi | 4 - .../influxdb-client/influxdb_client/rest.pyi | 29 -- .../influxdb_client/service/__init__.pyi | 41 -- .../influxdb_client/service/_base_service.pyi | 8 - .../service/authorizations_service.pyi | 21 - .../service/backup_service.pyi | 15 - .../service/bucket_schemas_service.pyi | 20 - .../service/buckets_service.pyi | 51 --- .../influxdb_client/service/cells_service.pyi | 24 - .../service/checks_service.pyi | 36 -- .../service/config_service.pyi | 12 - .../service/dashboards_service.pyi | 66 --- .../service/dbr_ps_service.pyi | 21 - .../service/delete_service.pyi | 9 - .../service/health_service.pyi | 9 - .../service/invokable_scripts_service.pyi | 27 -- .../service/labels_service.pyi | 21 - .../service/legacy_authorizations_service.pyi | 24 - .../service/metrics_service.pyi | 9 - .../notification_endpoints_service.pyi | 33 -- .../service/notification_rules_service.pyi | 33 -- .../service/organizations_service.pyi | 39 -- .../influxdb_client/service/ping_service.pyi | 12 - .../influxdb_client/service/query_service.pyi | 21 - .../influxdb_client/service/ready_service.pyi | 9 - .../service/remote_connections_service.pyi | 21 - .../service/replications_service.pyi | 24 - .../service/resources_service.pyi | 9 - .../service/restore_service.pyi | 21 - .../service/routes_service.pyi | 9 - .../influxdb_client/service/rules_service.pyi | 9 - .../service/scraper_targets_service.pyi | 48 -- .../service/secrets_service.pyi | 18 - .../influxdb_client/service/setup_service.pyi | 12 - .../service/signin_service.pyi | 9 - .../service/signout_service.pyi | 9 - .../service/sources_service.pyi | 27 -- .../influxdb_client/service/tasks_service.pyi | 130 ------ .../service/telegraf_plugins_service.pyi | 9 - .../service/telegrafs_service.pyi | 48 -- .../service/templates_service.pyi | 30 -- .../influxdb_client/service/users_service.pyi | 33 -- .../service/variables_service.pyi | 33 -- .../influxdb_client/service/views_service.pyi | 12 - .../influxdb_client/service/write_service.pyi | 9 - .../influxdb_client/version.pyi | 1 - 409 files changed, 14113 deletions(-) delete mode 100644 stubs/influxdb-client/@tests/stubtest_allowlist.txt delete mode 100644 stubs/influxdb-client/METADATA.toml delete mode 100644 stubs/influxdb-client/influxdb_client/__init__.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/_async/__init__.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/_async/api_client.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/_async/rest.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/_sync/__init__.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/_sync/api_client.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/_sync/rest.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/client/__init__.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/client/_base.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/client/_pages.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/client/authorizations_api.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/client/bucket_api.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/client/delete_api.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/client/delete_api_async.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/client/exceptions.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/client/flux_csv_parser.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/client/flux_table.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/client/influxdb_client.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/client/influxdb_client_async.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/client/invokable_scripts_api.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/client/labels_api.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/client/logging_handler.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/client/organizations_api.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/client/query_api.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/client/query_api_async.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/client/tasks_api.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/client/users_api.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/client/util/__init__.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/client/util/date_utils.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/client/util/date_utils_pandas.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/client/util/helpers.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/client/util/multiprocessing_helper.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/client/warnings.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/client/write/__init__.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/client/write/dataframe_serializer.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/client/write/point.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/client/write/retry.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/client/write_api.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/client/write_api_async.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/configuration.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/__init__.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/add_resource_member_request_body.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/analyze_query_response.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/analyze_query_response_errors.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/array_expression.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/ast_response.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/authorization.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/authorization_post_request.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/authorization_update_request.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/authorizations.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/axes.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/axis.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/axis_scale.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/bad_statement.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/band_view_properties.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/binary_expression.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/block.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/boolean_literal.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/bucket.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/bucket_links.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/bucket_metadata_manifest.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/bucket_retention_rules.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/bucket_shard_mapping.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/buckets.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/builder_aggregate_function_type.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/builder_config.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/builder_config_aggregate_window.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/builder_functions_type.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/builder_tags_type.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/builtin_statement.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/call_expression.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/cell.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/cell_links.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/cell_update.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/cell_with_view_properties.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/check.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/check_base.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/check_base_links.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/check_discriminator.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/check_patch.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/check_status_level.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/check_view_properties.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/checks.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/column_data_type.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/column_semantic_type.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/conditional_expression.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/config.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/constant_variable_properties.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/create_cell.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/create_dashboard_request.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/custom_check.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/dashboard.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/dashboard_color.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/dashboard_query.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/dashboard_with_view_properties.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/dashboards.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/date_time_literal.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/dbr_ps.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/dbrp.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/dbrp_create.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/dbrp_get.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/dbrp_update.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/deadman_check.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/decimal_places.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/delete_predicate_request.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/dialect.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/dict_expression.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/dict_item.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/duration.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/duration_literal.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/error.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/expression.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/expression_statement.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/field.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/file.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/float_literal.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/flux_response.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/flux_suggestion.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/flux_suggestions.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/function_expression.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/gauge_view_properties.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/greater_threshold.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/health_check.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/heatmap_view_properties.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/histogram_view_properties.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/http_notification_endpoint.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/http_notification_rule.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/http_notification_rule_base.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/identifier.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/import_declaration.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/index_expression.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/integer_literal.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/is_onboarding.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/label.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/label_create_request.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/label_mapping.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/label_response.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/label_update.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/labels_response.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/language_request.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/legacy_authorization_post_request.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/lesser_threshold.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/line_plus_single_stat_properties.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/line_protocol_error.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/line_protocol_length_error.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/links.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/list_stacks_response.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/log_event.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/logical_expression.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/logs.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/map_variable_properties.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/markdown_view_properties.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/measurement_schema.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/measurement_schema_column.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/measurement_schema_create_request.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/measurement_schema_list.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/measurement_schema_update_request.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/member_assignment.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/member_expression.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/metadata_backup.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/model_property.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/mosaic_view_properties.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/node.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/notification_endpoint.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/notification_endpoint_base.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/notification_endpoint_base_links.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/notification_endpoint_discriminator.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/notification_endpoint_type.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/notification_endpoint_update.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/notification_endpoints.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/notification_rule.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/notification_rule_base.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/notification_rule_base_links.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/notification_rule_discriminator.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/notification_rule_update.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/notification_rules.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/object_expression.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/onboarding_request.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/onboarding_response.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/option_statement.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/organization.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/organization_links.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/organizations.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/package.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/package_clause.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/pager_duty_notification_endpoint.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/pager_duty_notification_rule.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/pager_duty_notification_rule_base.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/paren_expression.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/password_reset_body.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/patch_bucket_request.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/patch_dashboard_request.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/patch_organization_request.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/patch_retention_rule.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/patch_stack_request.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/patch_stack_request_additional_resources.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/permission.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/permission_resource.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/pipe_expression.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/pipe_literal.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/post_bucket_request.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/post_check.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/post_notification_endpoint.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/post_notification_rule.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/post_organization_request.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/post_restore_kv_response.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/post_stack_request.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/property_key.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/query.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/query_edit_mode.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/query_variable_properties.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/query_variable_properties_values.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/range_threshold.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/ready.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/regexp_literal.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/remote_connection.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/remote_connection_creation_request.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/remote_connection_update_request.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/remote_connections.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/renamable_field.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/replication.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/replication_creation_request.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/replication_update_request.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/replications.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/resource_member.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/resource_members.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/resource_members_links.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/resource_owner.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/resource_owners.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/restored_bucket_mappings.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/retention_policy_manifest.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/return_statement.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/routes.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/routes_external.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/routes_query.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/routes_system.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/rule_status_level.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/run.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/run_links.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/run_manually.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/runs.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/scatter_view_properties.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/schema_type.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/scraper_target_request.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/scraper_target_response.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/scraper_target_responses.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/script.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/script_create_request.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/script_invocation_params.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/script_language.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/script_update_request.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/scripts.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/secret_keys.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/secret_keys_response.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/shard_group_manifest.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/shard_manifest.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/shard_owner.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/simple_table_view_properties.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/single_stat_view_properties.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/slack_notification_endpoint.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/slack_notification_rule.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/slack_notification_rule_base.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/smtp_notification_rule.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/smtp_notification_rule_base.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/source.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/source_links.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/sources.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/stack.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/stack_associations.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/stack_events.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/stack_links.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/stack_resources.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/statement.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/static_legend.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/status_rule.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/string_literal.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/subscription_manifest.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/table_view_properties.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/table_view_properties_table_options.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/tag_rule.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/task.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/task_create_request.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/task_links.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/task_status_type.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/task_update_request.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/tasks.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/telegraf.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/telegraf_plugin.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/telegraf_plugin_request.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/telegraf_plugin_request_plugins.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/telegraf_plugins.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/telegraf_request.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/telegraf_request_metadata.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/telegrafs.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/telegram_notification_endpoint.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/telegram_notification_rule.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/telegram_notification_rule_base.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/template_apply.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/template_apply_remotes.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/template_apply_template.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/template_chart.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/template_export_by_id.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/template_export_by_id_org_ids.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/template_export_by_id_resource_filters.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/template_export_by_id_resources.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/template_export_by_name.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/template_export_by_name_resources.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/template_kind.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/template_summary.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/template_summary_diff.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/template_summary_diff_buckets.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/template_summary_diff_buckets_new_old.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/template_summary_diff_checks.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/template_summary_diff_dashboards.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/template_summary_diff_dashboards_new_old.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/template_summary_diff_label_mappings.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/template_summary_diff_labels.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/template_summary_diff_labels_new_old.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/template_summary_diff_notification_endpoints.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/template_summary_diff_notification_rules.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/template_summary_diff_notification_rules_new_old.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/template_summary_diff_tasks.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/template_summary_diff_tasks_new_old.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/template_summary_diff_telegraf_configs.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/template_summary_diff_variables.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/template_summary_diff_variables_new_old.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/template_summary_errors.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/template_summary_label.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/template_summary_label_properties.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/template_summary_summary.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/template_summary_summary_buckets.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/template_summary_summary_dashboards.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/template_summary_summary_label_mappings.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/template_summary_summary_notification_rules.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/template_summary_summary_status_rules.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/template_summary_summary_tag_rules.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/template_summary_summary_tasks.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/template_summary_summary_variables.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/test_statement.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/threshold.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/threshold_base.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/threshold_check.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/unary_expression.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/unsigned_integer_literal.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/user.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/user_response.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/user_response_links.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/users.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/variable.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/variable_assignment.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/variable_links.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/variable_properties.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/variables.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/view.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/view_links.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/view_properties.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/views.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/write_precision.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/xy_geom.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/domain/xy_view_properties.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/extras.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/rest.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/service/__init__.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/service/_base_service.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/service/authorizations_service.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/service/backup_service.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/service/bucket_schemas_service.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/service/buckets_service.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/service/cells_service.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/service/checks_service.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/service/config_service.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/service/dashboards_service.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/service/dbr_ps_service.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/service/delete_service.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/service/health_service.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/service/invokable_scripts_service.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/service/labels_service.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/service/legacy_authorizations_service.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/service/metrics_service.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/service/notification_endpoints_service.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/service/notification_rules_service.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/service/organizations_service.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/service/ping_service.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/service/query_service.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/service/ready_service.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/service/remote_connections_service.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/service/replications_service.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/service/resources_service.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/service/restore_service.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/service/routes_service.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/service/rules_service.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/service/scraper_targets_service.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/service/secrets_service.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/service/setup_service.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/service/signin_service.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/service/signout_service.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/service/sources_service.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/service/tasks_service.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/service/telegraf_plugins_service.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/service/telegrafs_service.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/service/templates_service.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/service/users_service.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/service/variables_service.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/service/views_service.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/service/write_service.pyi delete mode 100644 stubs/influxdb-client/influxdb_client/version.pyi diff --git a/pyrightconfig.stricter.json b/pyrightconfig.stricter.json index acb00ae85677..ee21eb97e43e 100644 --- a/pyrightconfig.stricter.json +++ b/pyrightconfig.stricter.json @@ -49,7 +49,6 @@ "stubs/httplib2", "stubs/hvac", "stubs/icalendar", - "stubs/influxdb-client", "stubs/jmespath", "stubs/jsonschema", "stubs/jwcrypto", diff --git a/stubs/influxdb-client/@tests/stubtest_allowlist.txt b/stubs/influxdb-client/@tests/stubtest_allowlist.txt deleted file mode 100644 index 8836ec67302d..000000000000 --- a/stubs/influxdb-client/@tests/stubtest_allowlist.txt +++ /dev/null @@ -1,2 +0,0 @@ -# TODO: missing from stub -influxdb_client.extras.__all__ diff --git a/stubs/influxdb-client/METADATA.toml b/stubs/influxdb-client/METADATA.toml deleted file mode 100644 index a3459d75778d..000000000000 --- a/stubs/influxdb-client/METADATA.toml +++ /dev/null @@ -1,16 +0,0 @@ -version = "1.45.*" -upstream_repository = "https://github.com/influxdata/influxdb-client-python" -# requires a version of urllib3 with a py.typed file -requires = ["urllib3>=2"] - -extra_description = """\ - Note: `types-influxdb-client` has required `urllib3>=2` since v1.37.0.1. \ - If you need to install `types-influxdb-client` into an environment \ - that must also have `urllib3<2` installed into it, \ - you will have to use `types-influxdb-client<1.37.0.1`.\ - """ -obsolete_since = "1.46.0" # Released on 2024-09-13 - -[tool.stubtest] -extras = ["extra"] -stubtest_requirements = ["aiohttp"] diff --git a/stubs/influxdb-client/influxdb_client/__init__.pyi b/stubs/influxdb-client/influxdb_client/__init__.pyi deleted file mode 100644 index b3757b5810c4..000000000000 --- a/stubs/influxdb-client/influxdb_client/__init__.pyi +++ /dev/null @@ -1,427 +0,0 @@ -from influxdb_client.client.authorizations_api import AuthorizationsApi as AuthorizationsApi -from influxdb_client.client.bucket_api import BucketsApi as BucketsApi -from influxdb_client.client.delete_api import DeleteApi as DeleteApi -from influxdb_client.client.influxdb_client import InfluxDBClient as InfluxDBClient -from influxdb_client.client.invokable_scripts_api import InvokableScriptsApi as InvokableScriptsApi -from influxdb_client.client.labels_api import LabelsApi as LabelsApi -from influxdb_client.client.logging_handler import InfluxLoggingHandler as InfluxLoggingHandler -from influxdb_client.client.organizations_api import OrganizationsApi as OrganizationsApi -from influxdb_client.client.query_api import QueryApi as QueryApi -from influxdb_client.client.tasks_api import TasksApi as TasksApi -from influxdb_client.client.users_api import UsersApi as UsersApi -from influxdb_client.client.write.point import Point as Point -from influxdb_client.client.write_api import WriteApi as WriteApi, WriteOptions as WriteOptions -from influxdb_client.configuration import Configuration as Configuration -from influxdb_client.domain.add_resource_member_request_body import AddResourceMemberRequestBody as AddResourceMemberRequestBody -from influxdb_client.domain.analyze_query_response import AnalyzeQueryResponse as AnalyzeQueryResponse -from influxdb_client.domain.analyze_query_response_errors import AnalyzeQueryResponseErrors as AnalyzeQueryResponseErrors -from influxdb_client.domain.array_expression import ArrayExpression as ArrayExpression -from influxdb_client.domain.ast_response import ASTResponse as ASTResponse -from influxdb_client.domain.authorization import Authorization as Authorization -from influxdb_client.domain.authorization_post_request import AuthorizationPostRequest as AuthorizationPostRequest -from influxdb_client.domain.authorization_update_request import AuthorizationUpdateRequest as AuthorizationUpdateRequest -from influxdb_client.domain.authorizations import Authorizations as Authorizations -from influxdb_client.domain.axes import Axes as Axes -from influxdb_client.domain.axis import Axis as Axis -from influxdb_client.domain.axis_scale import AxisScale as AxisScale -from influxdb_client.domain.bad_statement import BadStatement as BadStatement -from influxdb_client.domain.band_view_properties import BandViewProperties as BandViewProperties -from influxdb_client.domain.binary_expression import BinaryExpression as BinaryExpression -from influxdb_client.domain.block import Block as Block -from influxdb_client.domain.boolean_literal import BooleanLiteral as BooleanLiteral -from influxdb_client.domain.bucket import Bucket as Bucket -from influxdb_client.domain.bucket_links import BucketLinks as BucketLinks -from influxdb_client.domain.bucket_metadata_manifest import BucketMetadataManifest as BucketMetadataManifest -from influxdb_client.domain.bucket_retention_rules import BucketRetentionRules as BucketRetentionRules -from influxdb_client.domain.bucket_shard_mapping import BucketShardMapping as BucketShardMapping -from influxdb_client.domain.buckets import Buckets as Buckets -from influxdb_client.domain.builder_aggregate_function_type import BuilderAggregateFunctionType as BuilderAggregateFunctionType -from influxdb_client.domain.builder_config import BuilderConfig as BuilderConfig -from influxdb_client.domain.builder_config_aggregate_window import BuilderConfigAggregateWindow as BuilderConfigAggregateWindow -from influxdb_client.domain.builder_functions_type import BuilderFunctionsType as BuilderFunctionsType -from influxdb_client.domain.builder_tags_type import BuilderTagsType as BuilderTagsType -from influxdb_client.domain.builtin_statement import BuiltinStatement as BuiltinStatement -from influxdb_client.domain.call_expression import CallExpression as CallExpression -from influxdb_client.domain.cell import Cell as Cell -from influxdb_client.domain.cell_links import CellLinks as CellLinks -from influxdb_client.domain.cell_update import CellUpdate as CellUpdate -from influxdb_client.domain.cell_with_view_properties import CellWithViewProperties as CellWithViewProperties -from influxdb_client.domain.check import Check as Check -from influxdb_client.domain.check_base import CheckBase as CheckBase -from influxdb_client.domain.check_base_links import CheckBaseLinks as CheckBaseLinks -from influxdb_client.domain.check_discriminator import CheckDiscriminator as CheckDiscriminator -from influxdb_client.domain.check_patch import CheckPatch as CheckPatch -from influxdb_client.domain.check_status_level import CheckStatusLevel as CheckStatusLevel -from influxdb_client.domain.check_view_properties import CheckViewProperties as CheckViewProperties -from influxdb_client.domain.checks import Checks as Checks -from influxdb_client.domain.column_data_type import ColumnDataType as ColumnDataType -from influxdb_client.domain.column_semantic_type import ColumnSemanticType as ColumnSemanticType -from influxdb_client.domain.conditional_expression import ConditionalExpression as ConditionalExpression -from influxdb_client.domain.config import Config as Config -from influxdb_client.domain.constant_variable_properties import ConstantVariableProperties as ConstantVariableProperties -from influxdb_client.domain.create_cell import CreateCell as CreateCell -from influxdb_client.domain.create_dashboard_request import CreateDashboardRequest as CreateDashboardRequest -from influxdb_client.domain.custom_check import CustomCheck as CustomCheck -from influxdb_client.domain.dashboard import Dashboard as Dashboard -from influxdb_client.domain.dashboard_color import DashboardColor as DashboardColor -from influxdb_client.domain.dashboard_query import DashboardQuery as DashboardQuery -from influxdb_client.domain.dashboard_with_view_properties import DashboardWithViewProperties as DashboardWithViewProperties -from influxdb_client.domain.dashboards import Dashboards as Dashboards -from influxdb_client.domain.date_time_literal import DateTimeLiteral as DateTimeLiteral -from influxdb_client.domain.dbr_ps import DBRPs as DBRPs -from influxdb_client.domain.dbrp import DBRP as DBRP -from influxdb_client.domain.dbrp_create import DBRPCreate as DBRPCreate -from influxdb_client.domain.dbrp_get import DBRPGet as DBRPGet -from influxdb_client.domain.dbrp_update import DBRPUpdate as DBRPUpdate -from influxdb_client.domain.deadman_check import DeadmanCheck as DeadmanCheck -from influxdb_client.domain.decimal_places import DecimalPlaces as DecimalPlaces -from influxdb_client.domain.delete_predicate_request import DeletePredicateRequest as DeletePredicateRequest -from influxdb_client.domain.dialect import Dialect as Dialect -from influxdb_client.domain.dict_expression import DictExpression as DictExpression -from influxdb_client.domain.dict_item import DictItem as DictItem -from influxdb_client.domain.duration import Duration as Duration -from influxdb_client.domain.duration_literal import DurationLiteral as DurationLiteral -from influxdb_client.domain.error import Error as Error -from influxdb_client.domain.expression import Expression as Expression -from influxdb_client.domain.expression_statement import ExpressionStatement as ExpressionStatement -from influxdb_client.domain.field import Field as Field -from influxdb_client.domain.file import File as File -from influxdb_client.domain.float_literal import FloatLiteral as FloatLiteral -from influxdb_client.domain.flux_response import FluxResponse as FluxResponse -from influxdb_client.domain.flux_suggestion import FluxSuggestion as FluxSuggestion -from influxdb_client.domain.flux_suggestions import FluxSuggestions as FluxSuggestions -from influxdb_client.domain.function_expression import FunctionExpression as FunctionExpression -from influxdb_client.domain.gauge_view_properties import GaugeViewProperties as GaugeViewProperties -from influxdb_client.domain.greater_threshold import GreaterThreshold as GreaterThreshold -from influxdb_client.domain.health_check import HealthCheck as HealthCheck -from influxdb_client.domain.heatmap_view_properties import HeatmapViewProperties as HeatmapViewProperties -from influxdb_client.domain.histogram_view_properties import HistogramViewProperties as HistogramViewProperties -from influxdb_client.domain.http_notification_endpoint import HTTPNotificationEndpoint as HTTPNotificationEndpoint -from influxdb_client.domain.http_notification_rule import HTTPNotificationRule as HTTPNotificationRule -from influxdb_client.domain.http_notification_rule_base import HTTPNotificationRuleBase as HTTPNotificationRuleBase -from influxdb_client.domain.identifier import Identifier as Identifier -from influxdb_client.domain.import_declaration import ImportDeclaration as ImportDeclaration -from influxdb_client.domain.index_expression import IndexExpression as IndexExpression -from influxdb_client.domain.integer_literal import IntegerLiteral as IntegerLiteral -from influxdb_client.domain.is_onboarding import IsOnboarding as IsOnboarding -from influxdb_client.domain.label import Label as Label -from influxdb_client.domain.label_create_request import LabelCreateRequest as LabelCreateRequest -from influxdb_client.domain.label_mapping import LabelMapping as LabelMapping -from influxdb_client.domain.label_response import LabelResponse as LabelResponse -from influxdb_client.domain.label_update import LabelUpdate as LabelUpdate -from influxdb_client.domain.labels_response import LabelsResponse as LabelsResponse -from influxdb_client.domain.language_request import LanguageRequest as LanguageRequest -from influxdb_client.domain.legacy_authorization_post_request import ( - LegacyAuthorizationPostRequest as LegacyAuthorizationPostRequest, -) -from influxdb_client.domain.lesser_threshold import LesserThreshold as LesserThreshold -from influxdb_client.domain.line_plus_single_stat_properties import LinePlusSingleStatProperties as LinePlusSingleStatProperties -from influxdb_client.domain.line_protocol_error import LineProtocolError as LineProtocolError -from influxdb_client.domain.line_protocol_length_error import LineProtocolLengthError as LineProtocolLengthError -from influxdb_client.domain.links import Links as Links -from influxdb_client.domain.list_stacks_response import ListStacksResponse as ListStacksResponse -from influxdb_client.domain.log_event import LogEvent as LogEvent -from influxdb_client.domain.logical_expression import LogicalExpression as LogicalExpression -from influxdb_client.domain.logs import Logs as Logs -from influxdb_client.domain.map_variable_properties import MapVariableProperties as MapVariableProperties -from influxdb_client.domain.markdown_view_properties import MarkdownViewProperties as MarkdownViewProperties -from influxdb_client.domain.measurement_schema import MeasurementSchema as MeasurementSchema -from influxdb_client.domain.measurement_schema_column import MeasurementSchemaColumn as MeasurementSchemaColumn -from influxdb_client.domain.measurement_schema_create_request import ( - MeasurementSchemaCreateRequest as MeasurementSchemaCreateRequest, -) -from influxdb_client.domain.measurement_schema_list import MeasurementSchemaList as MeasurementSchemaList -from influxdb_client.domain.measurement_schema_update_request import ( - MeasurementSchemaUpdateRequest as MeasurementSchemaUpdateRequest, -) -from influxdb_client.domain.member_assignment import MemberAssignment as MemberAssignment -from influxdb_client.domain.member_expression import MemberExpression as MemberExpression -from influxdb_client.domain.metadata_backup import MetadataBackup as MetadataBackup -from influxdb_client.domain.model_property import ModelProperty as ModelProperty -from influxdb_client.domain.mosaic_view_properties import MosaicViewProperties as MosaicViewProperties -from influxdb_client.domain.node import Node as Node -from influxdb_client.domain.notification_endpoint import NotificationEndpoint as NotificationEndpoint -from influxdb_client.domain.notification_endpoint_base import NotificationEndpointBase as NotificationEndpointBase -from influxdb_client.domain.notification_endpoint_base_links import NotificationEndpointBaseLinks as NotificationEndpointBaseLinks -from influxdb_client.domain.notification_endpoint_discriminator import ( - NotificationEndpointDiscriminator as NotificationEndpointDiscriminator, -) -from influxdb_client.domain.notification_endpoint_type import NotificationEndpointType as NotificationEndpointType -from influxdb_client.domain.notification_endpoint_update import NotificationEndpointUpdate as NotificationEndpointUpdate -from influxdb_client.domain.notification_endpoints import NotificationEndpoints as NotificationEndpoints -from influxdb_client.domain.notification_rule import NotificationRule as NotificationRule -from influxdb_client.domain.notification_rule_base import NotificationRuleBase as NotificationRuleBase -from influxdb_client.domain.notification_rule_base_links import NotificationRuleBaseLinks as NotificationRuleBaseLinks -from influxdb_client.domain.notification_rule_discriminator import NotificationRuleDiscriminator as NotificationRuleDiscriminator -from influxdb_client.domain.notification_rule_update import NotificationRuleUpdate as NotificationRuleUpdate -from influxdb_client.domain.notification_rules import NotificationRules as NotificationRules -from influxdb_client.domain.object_expression import ObjectExpression as ObjectExpression -from influxdb_client.domain.onboarding_request import OnboardingRequest as OnboardingRequest -from influxdb_client.domain.onboarding_response import OnboardingResponse as OnboardingResponse -from influxdb_client.domain.option_statement import OptionStatement as OptionStatement -from influxdb_client.domain.organization import Organization as Organization -from influxdb_client.domain.organization_links import OrganizationLinks as OrganizationLinks -from influxdb_client.domain.organizations import Organizations as Organizations -from influxdb_client.domain.package import Package as Package -from influxdb_client.domain.package_clause import PackageClause as PackageClause -from influxdb_client.domain.pager_duty_notification_endpoint import PagerDutyNotificationEndpoint as PagerDutyNotificationEndpoint -from influxdb_client.domain.pager_duty_notification_rule import PagerDutyNotificationRule as PagerDutyNotificationRule -from influxdb_client.domain.pager_duty_notification_rule_base import ( - PagerDutyNotificationRuleBase as PagerDutyNotificationRuleBase, -) -from influxdb_client.domain.paren_expression import ParenExpression as ParenExpression -from influxdb_client.domain.password_reset_body import PasswordResetBody as PasswordResetBody -from influxdb_client.domain.patch_bucket_request import PatchBucketRequest as PatchBucketRequest -from influxdb_client.domain.patch_dashboard_request import PatchDashboardRequest as PatchDashboardRequest -from influxdb_client.domain.patch_organization_request import PatchOrganizationRequest as PatchOrganizationRequest -from influxdb_client.domain.patch_retention_rule import PatchRetentionRule as PatchRetentionRule -from influxdb_client.domain.patch_stack_request import PatchStackRequest as PatchStackRequest -from influxdb_client.domain.patch_stack_request_additional_resources import ( - PatchStackRequestAdditionalResources as PatchStackRequestAdditionalResources, -) -from influxdb_client.domain.permission import Permission as Permission -from influxdb_client.domain.permission_resource import PermissionResource as PermissionResource -from influxdb_client.domain.pipe_expression import PipeExpression as PipeExpression -from influxdb_client.domain.pipe_literal import PipeLiteral as PipeLiteral -from influxdb_client.domain.post_bucket_request import PostBucketRequest as PostBucketRequest -from influxdb_client.domain.post_check import PostCheck as PostCheck -from influxdb_client.domain.post_notification_endpoint import PostNotificationEndpoint as PostNotificationEndpoint -from influxdb_client.domain.post_notification_rule import PostNotificationRule as PostNotificationRule -from influxdb_client.domain.post_organization_request import PostOrganizationRequest as PostOrganizationRequest -from influxdb_client.domain.post_restore_kv_response import PostRestoreKVResponse as PostRestoreKVResponse -from influxdb_client.domain.post_stack_request import PostStackRequest as PostStackRequest -from influxdb_client.domain.property_key import PropertyKey as PropertyKey -from influxdb_client.domain.query import Query as Query -from influxdb_client.domain.query_edit_mode import QueryEditMode as QueryEditMode -from influxdb_client.domain.query_variable_properties import QueryVariableProperties as QueryVariableProperties -from influxdb_client.domain.query_variable_properties_values import QueryVariablePropertiesValues as QueryVariablePropertiesValues -from influxdb_client.domain.range_threshold import RangeThreshold as RangeThreshold -from influxdb_client.domain.ready import Ready as Ready -from influxdb_client.domain.regexp_literal import RegexpLiteral as RegexpLiteral -from influxdb_client.domain.remote_connection import RemoteConnection as RemoteConnection -from influxdb_client.domain.remote_connection_creation_request import ( - RemoteConnectionCreationRequest as RemoteConnectionCreationRequest, -) -from influxdb_client.domain.remote_connection_update_request import RemoteConnectionUpdateRequest as RemoteConnectionUpdateRequest -from influxdb_client.domain.remote_connections import RemoteConnections as RemoteConnections -from influxdb_client.domain.renamable_field import RenamableField as RenamableField -from influxdb_client.domain.replication import Replication as Replication -from influxdb_client.domain.replication_creation_request import ReplicationCreationRequest as ReplicationCreationRequest -from influxdb_client.domain.replication_update_request import ReplicationUpdateRequest as ReplicationUpdateRequest -from influxdb_client.domain.replications import Replications as Replications -from influxdb_client.domain.resource_member import ResourceMember as ResourceMember -from influxdb_client.domain.resource_members import ResourceMembers as ResourceMembers -from influxdb_client.domain.resource_members_links import ResourceMembersLinks as ResourceMembersLinks -from influxdb_client.domain.resource_owner import ResourceOwner as ResourceOwner -from influxdb_client.domain.resource_owners import ResourceOwners as ResourceOwners -from influxdb_client.domain.restored_bucket_mappings import RestoredBucketMappings as RestoredBucketMappings -from influxdb_client.domain.retention_policy_manifest import RetentionPolicyManifest as RetentionPolicyManifest -from influxdb_client.domain.return_statement import ReturnStatement as ReturnStatement -from influxdb_client.domain.routes import Routes as Routes -from influxdb_client.domain.routes_external import RoutesExternal as RoutesExternal -from influxdb_client.domain.routes_query import RoutesQuery as RoutesQuery -from influxdb_client.domain.routes_system import RoutesSystem as RoutesSystem -from influxdb_client.domain.rule_status_level import RuleStatusLevel as RuleStatusLevel -from influxdb_client.domain.run import Run as Run -from influxdb_client.domain.run_links import RunLinks as RunLinks -from influxdb_client.domain.run_manually import RunManually as RunManually -from influxdb_client.domain.runs import Runs as Runs -from influxdb_client.domain.scatter_view_properties import ScatterViewProperties as ScatterViewProperties -from influxdb_client.domain.schema_type import SchemaType as SchemaType -from influxdb_client.domain.scraper_target_request import ScraperTargetRequest as ScraperTargetRequest -from influxdb_client.domain.scraper_target_response import ScraperTargetResponse as ScraperTargetResponse -from influxdb_client.domain.scraper_target_responses import ScraperTargetResponses as ScraperTargetResponses -from influxdb_client.domain.script import Script as Script -from influxdb_client.domain.script_create_request import ScriptCreateRequest as ScriptCreateRequest -from influxdb_client.domain.script_invocation_params import ScriptInvocationParams as ScriptInvocationParams -from influxdb_client.domain.script_language import ScriptLanguage as ScriptLanguage -from influxdb_client.domain.script_update_request import ScriptUpdateRequest as ScriptUpdateRequest -from influxdb_client.domain.scripts import Scripts as Scripts -from influxdb_client.domain.secret_keys import SecretKeys as SecretKeys -from influxdb_client.domain.secret_keys_response import SecretKeysResponse as SecretKeysResponse -from influxdb_client.domain.shard_group_manifest import ShardGroupManifest as ShardGroupManifest -from influxdb_client.domain.shard_manifest import ShardManifest as ShardManifest -from influxdb_client.domain.shard_owner import ShardOwner as ShardOwner -from influxdb_client.domain.simple_table_view_properties import SimpleTableViewProperties as SimpleTableViewProperties -from influxdb_client.domain.single_stat_view_properties import SingleStatViewProperties as SingleStatViewProperties -from influxdb_client.domain.slack_notification_endpoint import SlackNotificationEndpoint as SlackNotificationEndpoint -from influxdb_client.domain.slack_notification_rule import SlackNotificationRule as SlackNotificationRule -from influxdb_client.domain.slack_notification_rule_base import SlackNotificationRuleBase as SlackNotificationRuleBase -from influxdb_client.domain.smtp_notification_rule import SMTPNotificationRule as SMTPNotificationRule -from influxdb_client.domain.smtp_notification_rule_base import SMTPNotificationRuleBase as SMTPNotificationRuleBase -from influxdb_client.domain.source import Source as Source -from influxdb_client.domain.source_links import SourceLinks as SourceLinks -from influxdb_client.domain.sources import Sources as Sources -from influxdb_client.domain.stack import Stack as Stack -from influxdb_client.domain.stack_associations import StackAssociations as StackAssociations -from influxdb_client.domain.stack_events import StackEvents as StackEvents -from influxdb_client.domain.stack_links import StackLinks as StackLinks -from influxdb_client.domain.stack_resources import StackResources as StackResources -from influxdb_client.domain.statement import Statement as Statement -from influxdb_client.domain.static_legend import StaticLegend as StaticLegend -from influxdb_client.domain.status_rule import StatusRule as StatusRule -from influxdb_client.domain.string_literal import StringLiteral as StringLiteral -from influxdb_client.domain.subscription_manifest import SubscriptionManifest as SubscriptionManifest -from influxdb_client.domain.table_view_properties import TableViewProperties as TableViewProperties -from influxdb_client.domain.table_view_properties_table_options import ( - TableViewPropertiesTableOptions as TableViewPropertiesTableOptions, -) -from influxdb_client.domain.tag_rule import TagRule as TagRule -from influxdb_client.domain.task import Task as Task -from influxdb_client.domain.task_create_request import TaskCreateRequest as TaskCreateRequest -from influxdb_client.domain.task_links import TaskLinks as TaskLinks -from influxdb_client.domain.task_status_type import TaskStatusType as TaskStatusType -from influxdb_client.domain.task_update_request import TaskUpdateRequest as TaskUpdateRequest -from influxdb_client.domain.tasks import Tasks as Tasks -from influxdb_client.domain.telegraf import Telegraf as Telegraf -from influxdb_client.domain.telegraf_plugin import TelegrafPlugin as TelegrafPlugin -from influxdb_client.domain.telegraf_plugin_request import TelegrafPluginRequest as TelegrafPluginRequest -from influxdb_client.domain.telegraf_plugin_request_plugins import TelegrafPluginRequestPlugins as TelegrafPluginRequestPlugins -from influxdb_client.domain.telegraf_plugins import TelegrafPlugins as TelegrafPlugins -from influxdb_client.domain.telegraf_request import TelegrafRequest as TelegrafRequest -from influxdb_client.domain.telegraf_request_metadata import TelegrafRequestMetadata as TelegrafRequestMetadata -from influxdb_client.domain.telegrafs import Telegrafs as Telegrafs -from influxdb_client.domain.telegram_notification_endpoint import TelegramNotificationEndpoint as TelegramNotificationEndpoint -from influxdb_client.domain.telegram_notification_rule import TelegramNotificationRule as TelegramNotificationRule -from influxdb_client.domain.telegram_notification_rule_base import TelegramNotificationRuleBase as TelegramNotificationRuleBase -from influxdb_client.domain.template_apply import TemplateApply as TemplateApply -from influxdb_client.domain.template_apply_remotes import TemplateApplyRemotes as TemplateApplyRemotes -from influxdb_client.domain.template_apply_template import TemplateApplyTemplate as TemplateApplyTemplate -from influxdb_client.domain.template_chart import TemplateChart as TemplateChart -from influxdb_client.domain.template_export_by_id import TemplateExportByID as TemplateExportByID -from influxdb_client.domain.template_export_by_id_org_ids import TemplateExportByIDOrgIDs as TemplateExportByIDOrgIDs -from influxdb_client.domain.template_export_by_id_resource_filters import ( - TemplateExportByIDResourceFilters as TemplateExportByIDResourceFilters, -) -from influxdb_client.domain.template_export_by_id_resources import TemplateExportByIDResources as TemplateExportByIDResources -from influxdb_client.domain.template_kind import TemplateKind as TemplateKind -from influxdb_client.domain.template_summary import TemplateSummary as TemplateSummary -from influxdb_client.domain.template_summary_diff import TemplateSummaryDiff as TemplateSummaryDiff -from influxdb_client.domain.template_summary_diff_buckets import TemplateSummaryDiffBuckets as TemplateSummaryDiffBuckets -from influxdb_client.domain.template_summary_diff_buckets_new_old import ( - TemplateSummaryDiffBucketsNewOld as TemplateSummaryDiffBucketsNewOld, -) -from influxdb_client.domain.template_summary_diff_checks import TemplateSummaryDiffChecks as TemplateSummaryDiffChecks -from influxdb_client.domain.template_summary_diff_dashboards import TemplateSummaryDiffDashboards as TemplateSummaryDiffDashboards -from influxdb_client.domain.template_summary_diff_dashboards_new_old import ( - TemplateSummaryDiffDashboardsNewOld as TemplateSummaryDiffDashboardsNewOld, -) -from influxdb_client.domain.template_summary_diff_label_mappings import ( - TemplateSummaryDiffLabelMappings as TemplateSummaryDiffLabelMappings, -) -from influxdb_client.domain.template_summary_diff_labels import TemplateSummaryDiffLabels as TemplateSummaryDiffLabels -from influxdb_client.domain.template_summary_diff_labels_new_old import ( - TemplateSummaryDiffLabelsNewOld as TemplateSummaryDiffLabelsNewOld, -) -from influxdb_client.domain.template_summary_diff_notification_endpoints import ( - TemplateSummaryDiffNotificationEndpoints as TemplateSummaryDiffNotificationEndpoints, -) -from influxdb_client.domain.template_summary_diff_notification_rules import ( - TemplateSummaryDiffNotificationRules as TemplateSummaryDiffNotificationRules, -) -from influxdb_client.domain.template_summary_diff_notification_rules_new_old import ( - TemplateSummaryDiffNotificationRulesNewOld as TemplateSummaryDiffNotificationRulesNewOld, -) -from influxdb_client.domain.template_summary_diff_tasks import TemplateSummaryDiffTasks as TemplateSummaryDiffTasks -from influxdb_client.domain.template_summary_diff_tasks_new_old import ( - TemplateSummaryDiffTasksNewOld as TemplateSummaryDiffTasksNewOld, -) -from influxdb_client.domain.template_summary_diff_telegraf_configs import ( - TemplateSummaryDiffTelegrafConfigs as TemplateSummaryDiffTelegrafConfigs, -) -from influxdb_client.domain.template_summary_diff_variables import TemplateSummaryDiffVariables as TemplateSummaryDiffVariables -from influxdb_client.domain.template_summary_diff_variables_new_old import ( - TemplateSummaryDiffVariablesNewOld as TemplateSummaryDiffVariablesNewOld, -) -from influxdb_client.domain.template_summary_errors import TemplateSummaryErrors as TemplateSummaryErrors -from influxdb_client.domain.template_summary_label import TemplateSummaryLabel as TemplateSummaryLabel -from influxdb_client.domain.template_summary_label_properties import ( - TemplateSummaryLabelProperties as TemplateSummaryLabelProperties, -) -from influxdb_client.domain.template_summary_summary import TemplateSummarySummary as TemplateSummarySummary -from influxdb_client.domain.template_summary_summary_buckets import TemplateSummarySummaryBuckets as TemplateSummarySummaryBuckets -from influxdb_client.domain.template_summary_summary_dashboards import ( - TemplateSummarySummaryDashboards as TemplateSummarySummaryDashboards, -) -from influxdb_client.domain.template_summary_summary_label_mappings import ( - TemplateSummarySummaryLabelMappings as TemplateSummarySummaryLabelMappings, -) -from influxdb_client.domain.template_summary_summary_notification_rules import ( - TemplateSummarySummaryNotificationRules as TemplateSummarySummaryNotificationRules, -) -from influxdb_client.domain.template_summary_summary_status_rules import ( - TemplateSummarySummaryStatusRules as TemplateSummarySummaryStatusRules, -) -from influxdb_client.domain.template_summary_summary_tag_rules import ( - TemplateSummarySummaryTagRules as TemplateSummarySummaryTagRules, -) -from influxdb_client.domain.template_summary_summary_tasks import TemplateSummarySummaryTasks as TemplateSummarySummaryTasks -from influxdb_client.domain.template_summary_summary_variables import ( - TemplateSummarySummaryVariables as TemplateSummarySummaryVariables, -) -from influxdb_client.domain.test_statement import TestStatement as TestStatement -from influxdb_client.domain.threshold import Threshold as Threshold -from influxdb_client.domain.threshold_base import ThresholdBase as ThresholdBase -from influxdb_client.domain.threshold_check import ThresholdCheck as ThresholdCheck -from influxdb_client.domain.unary_expression import UnaryExpression as UnaryExpression -from influxdb_client.domain.unsigned_integer_literal import UnsignedIntegerLiteral as UnsignedIntegerLiteral -from influxdb_client.domain.user import User as User -from influxdb_client.domain.user_response import UserResponse as UserResponse -from influxdb_client.domain.user_response_links import UserResponseLinks as UserResponseLinks -from influxdb_client.domain.users import Users as Users -from influxdb_client.domain.variable import Variable as Variable -from influxdb_client.domain.variable_assignment import VariableAssignment as VariableAssignment -from influxdb_client.domain.variable_links import VariableLinks as VariableLinks -from influxdb_client.domain.variable_properties import VariableProperties as VariableProperties -from influxdb_client.domain.variables import Variables as Variables -from influxdb_client.domain.view import View as View -from influxdb_client.domain.view_links import ViewLinks as ViewLinks -from influxdb_client.domain.view_properties import ViewProperties as ViewProperties -from influxdb_client.domain.views import Views as Views -from influxdb_client.domain.write_precision import WritePrecision as WritePrecision -from influxdb_client.domain.xy_geom import XYGeom as XYGeom -from influxdb_client.domain.xy_view_properties import XYViewProperties as XYViewProperties -from influxdb_client.service.authorizations_service import AuthorizationsService as AuthorizationsService -from influxdb_client.service.backup_service import BackupService as BackupService -from influxdb_client.service.bucket_schemas_service import BucketSchemasService as BucketSchemasService -from influxdb_client.service.buckets_service import BucketsService as BucketsService -from influxdb_client.service.cells_service import CellsService as CellsService -from influxdb_client.service.checks_service import ChecksService as ChecksService -from influxdb_client.service.config_service import ConfigService as ConfigService -from influxdb_client.service.dashboards_service import DashboardsService as DashboardsService -from influxdb_client.service.dbr_ps_service import DBRPsService as DBRPsService -from influxdb_client.service.delete_service import DeleteService as DeleteService -from influxdb_client.service.health_service import HealthService as HealthService -from influxdb_client.service.invokable_scripts_service import InvokableScriptsService as InvokableScriptsService -from influxdb_client.service.labels_service import LabelsService as LabelsService -from influxdb_client.service.legacy_authorizations_service import LegacyAuthorizationsService as LegacyAuthorizationsService -from influxdb_client.service.metrics_service import MetricsService as MetricsService -from influxdb_client.service.notification_endpoints_service import NotificationEndpointsService as NotificationEndpointsService -from influxdb_client.service.notification_rules_service import NotificationRulesService as NotificationRulesService -from influxdb_client.service.organizations_service import OrganizationsService as OrganizationsService -from influxdb_client.service.ping_service import PingService as PingService -from influxdb_client.service.query_service import QueryService as QueryService -from influxdb_client.service.ready_service import ReadyService as ReadyService -from influxdb_client.service.remote_connections_service import RemoteConnectionsService as RemoteConnectionsService -from influxdb_client.service.replications_service import ReplicationsService as ReplicationsService -from influxdb_client.service.resources_service import ResourcesService as ResourcesService -from influxdb_client.service.restore_service import RestoreService as RestoreService -from influxdb_client.service.routes_service import RoutesService as RoutesService -from influxdb_client.service.rules_service import RulesService as RulesService -from influxdb_client.service.scraper_targets_service import ScraperTargetsService as ScraperTargetsService -from influxdb_client.service.secrets_service import SecretsService as SecretsService -from influxdb_client.service.setup_service import SetupService as SetupService -from influxdb_client.service.signin_service import SigninService as SigninService -from influxdb_client.service.signout_service import SignoutService as SignoutService -from influxdb_client.service.sources_service import SourcesService as SourcesService -from influxdb_client.service.tasks_service import TasksService as TasksService -from influxdb_client.service.telegraf_plugins_service import TelegrafPluginsService as TelegrafPluginsService -from influxdb_client.service.telegrafs_service import TelegrafsService as TelegrafsService -from influxdb_client.service.templates_service import TemplatesService as TemplatesService -from influxdb_client.service.users_service import UsersService as UsersService -from influxdb_client.service.variables_service import VariablesService as VariablesService -from influxdb_client.service.views_service import ViewsService as ViewsService -from influxdb_client.service.write_service import WriteService as WriteService -from influxdb_client.version import VERSION as VERSION - -__version__ = VERSION diff --git a/stubs/influxdb-client/influxdb_client/_async/__init__.pyi b/stubs/influxdb-client/influxdb_client/_async/__init__.pyi deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/stubs/influxdb-client/influxdb_client/_async/api_client.pyi b/stubs/influxdb-client/influxdb_client/_async/api_client.pyi deleted file mode 100644 index 7ab0099c8484..000000000000 --- a/stubs/influxdb-client/influxdb_client/_async/api_client.pyi +++ /dev/null @@ -1,65 +0,0 @@ -from _typeshed import Incomplete - -class ApiClientAsync: - PRIMITIVE_TYPES: Incomplete - NATIVE_TYPES_MAPPING: Incomplete - configuration: Incomplete - pool_threads: Incomplete - rest_client: Incomplete - default_headers: Incomplete - cookie: Incomplete - def __init__( - self, - configuration: Incomplete | None = None, - header_name: Incomplete | None = None, - header_value: Incomplete | None = None, - cookie: Incomplete | None = None, - pool_threads: Incomplete | None = None, - **kwargs, - ) -> None: ... - async def close(self) -> None: ... - @property - def pool(self): ... - @property - def user_agent(self): ... - @user_agent.setter - def user_agent(self, value) -> None: ... - def set_default_header(self, header_name, header_value) -> None: ... - def sanitize_for_serialization(self, obj): ... - def deserialize(self, response, response_type): ... - def call_api( - self, - resource_path, - method, - path_params: Incomplete | None = None, - query_params: Incomplete | None = None, - header_params: Incomplete | None = None, - body: Incomplete | None = None, - post_params: Incomplete | None = None, - files: Incomplete | None = None, - response_type: Incomplete | None = None, - auth_settings: Incomplete | None = None, - async_req: Incomplete | None = None, - _return_http_data_only: Incomplete | None = None, - collection_formats: Incomplete | None = None, - _preload_content: bool = True, - _request_timeout: Incomplete | None = None, - urlopen_kw: Incomplete | None = None, - ): ... - def request( - self, - method, - url, - query_params: Incomplete | None = None, - headers: Incomplete | None = None, - post_params: Incomplete | None = None, - body: Incomplete | None = None, - _preload_content: bool = True, - _request_timeout: Incomplete | None = None, - **urlopen_kw, - ): ... - def parameters_to_tuples(self, params, collection_formats): ... - def prepare_post_parameters(self, post_params: Incomplete | None = None, files: Incomplete | None = None): ... - def select_header_accept(self, accepts): ... - def select_header_content_type(self, content_types): ... - def update_params_for_auth(self, headers, querys, auth_settings) -> None: ... diff --git a/stubs/influxdb-client/influxdb_client/_async/rest.pyi b/stubs/influxdb-client/influxdb_client/_async/rest.pyi deleted file mode 100644 index 667662487d32..000000000000 --- a/stubs/influxdb-client/influxdb_client/_async/rest.pyi +++ /dev/null @@ -1,96 +0,0 @@ -import io -from _typeshed import Incomplete - -class RESTResponseAsync(io.IOBase): - aiohttp_response: Incomplete - status: Incomplete - reason: Incomplete - data: Incomplete - def __init__(self, resp, data) -> None: ... - def getheaders(self): ... - def getheader(self, name, default: Incomplete | None = None): ... - -class RESTClientObjectAsync: - proxy: Incomplete - proxy_headers: Incomplete - allow_redirects: Incomplete - max_redirects: Incomplete - pool_manager: Incomplete - def __init__(self, configuration, pools_size: int = 4, maxsize: Incomplete | None = None, **kwargs) -> None: ... - async def close(self) -> None: ... - async def request( - self, - method, - url, - query_params: Incomplete | None = None, - headers: Incomplete | None = None, - body: Incomplete | None = None, - post_params: Incomplete | None = None, - _preload_content: bool = True, - _request_timeout: Incomplete | None = None, - ): ... - async def GET( - self, - url, - headers: Incomplete | None = None, - query_params: Incomplete | None = None, - _preload_content: bool = True, - _request_timeout: Incomplete | None = None, - ): ... - async def HEAD( - self, - url, - headers: Incomplete | None = None, - query_params: Incomplete | None = None, - _preload_content: bool = True, - _request_timeout: Incomplete | None = None, - ): ... - async def OPTIONS( - self, - url, - headers: Incomplete | None = None, - query_params: Incomplete | None = None, - post_params: Incomplete | None = None, - body: Incomplete | None = None, - _preload_content: bool = True, - _request_timeout: Incomplete | None = None, - ): ... - async def DELETE( - self, - url, - headers: Incomplete | None = None, - query_params: Incomplete | None = None, - body: Incomplete | None = None, - _preload_content: bool = True, - _request_timeout: Incomplete | None = None, - ): ... - async def POST( - self, - url, - headers: Incomplete | None = None, - query_params: Incomplete | None = None, - post_params: Incomplete | None = None, - body: Incomplete | None = None, - _preload_content: bool = True, - _request_timeout: Incomplete | None = None, - ): ... - async def PUT( - self, - url, - headers: Incomplete | None = None, - query_params: Incomplete | None = None, - post_params: Incomplete | None = None, - body: Incomplete | None = None, - _preload_content: bool = True, - _request_timeout: Incomplete | None = None, - ): ... - async def PATCH( - self, - url, - headers: Incomplete | None = None, - query_params: Incomplete | None = None, - post_params: Incomplete | None = None, - body: Incomplete | None = None, - _preload_content: bool = True, - _request_timeout: Incomplete | None = None, - ): ... diff --git a/stubs/influxdb-client/influxdb_client/_sync/__init__.pyi b/stubs/influxdb-client/influxdb_client/_sync/__init__.pyi deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/stubs/influxdb-client/influxdb_client/_sync/api_client.pyi b/stubs/influxdb-client/influxdb_client/_sync/api_client.pyi deleted file mode 100644 index 04033438d6a6..000000000000 --- a/stubs/influxdb-client/influxdb_client/_sync/api_client.pyi +++ /dev/null @@ -1,65 +0,0 @@ -from _typeshed import Incomplete - -class ApiClient: - PRIMITIVE_TYPES: Incomplete - NATIVE_TYPES_MAPPING: Incomplete - configuration: Incomplete - pool_threads: Incomplete - rest_client: Incomplete - default_headers: Incomplete - cookie: Incomplete - def __init__( - self, - configuration: Incomplete | None = None, - header_name: Incomplete | None = None, - header_value: Incomplete | None = None, - cookie: Incomplete | None = None, - pool_threads: Incomplete | None = None, - retries: bool = False, - ) -> None: ... - def __del__(self) -> None: ... - @property - def pool(self): ... - @property - def user_agent(self): ... - @user_agent.setter - def user_agent(self, value) -> None: ... - def set_default_header(self, header_name, header_value) -> None: ... - def sanitize_for_serialization(self, obj): ... - def deserialize(self, response, response_type): ... - def call_api( - self, - resource_path, - method, - path_params: Incomplete | None = None, - query_params: Incomplete | None = None, - header_params: Incomplete | None = None, - body: Incomplete | None = None, - post_params: Incomplete | None = None, - files: Incomplete | None = None, - response_type: Incomplete | None = None, - auth_settings: Incomplete | None = None, - async_req: Incomplete | None = None, - _return_http_data_only: Incomplete | None = None, - collection_formats: Incomplete | None = None, - _preload_content: bool = True, - _request_timeout: Incomplete | None = None, - urlopen_kw: Incomplete | None = None, - ): ... - def request( - self, - method, - url, - query_params: Incomplete | None = None, - headers: Incomplete | None = None, - post_params: Incomplete | None = None, - body: Incomplete | None = None, - _preload_content: bool = True, - _request_timeout: Incomplete | None = None, - **urlopen_kw, - ): ... - def parameters_to_tuples(self, params, collection_formats): ... - def prepare_post_parameters(self, post_params: Incomplete | None = None, files: Incomplete | None = None): ... - def select_header_accept(self, accepts): ... - def select_header_content_type(self, content_types): ... - def update_params_for_auth(self, headers, querys, auth_settings) -> None: ... diff --git a/stubs/influxdb-client/influxdb_client/_sync/rest.pyi b/stubs/influxdb-client/influxdb_client/_sync/rest.pyi deleted file mode 100644 index 6a196dbc0b56..000000000000 --- a/stubs/influxdb-client/influxdb_client/_sync/rest.pyi +++ /dev/null @@ -1,103 +0,0 @@ -import io -from _typeshed import Incomplete - -class RESTResponse(io.IOBase): - urllib3_response: Incomplete - status: Incomplete - reason: Incomplete - data: Incomplete - def __init__(self, resp) -> None: ... - def getheaders(self): ... - def getheader(self, name, default: Incomplete | None = None): ... - -class RESTClientObject: - configuration: Incomplete - pools_size: Incomplete - maxsize: Incomplete - retries: Incomplete - pool_manager: Incomplete - def __init__(self, configuration, pools_size: int = 4, maxsize: Incomplete | None = None, retries: bool = False) -> None: ... - def request( - self, - method, - url, - query_params: Incomplete | None = None, - headers: Incomplete | None = None, - body: Incomplete | None = None, - post_params: Incomplete | None = None, - _preload_content: bool = True, - _request_timeout: Incomplete | None = None, - **urlopen_kw, - ): ... - def GET( - self, - url, - headers: Incomplete | None = None, - query_params: Incomplete | None = None, - _preload_content: bool = True, - _request_timeout: Incomplete | None = None, - **urlopen_kw, - ): ... - def HEAD( - self, - url, - headers: Incomplete | None = None, - query_params: Incomplete | None = None, - _preload_content: bool = True, - _request_timeout: Incomplete | None = None, - **urlopen_kw, - ): ... - def OPTIONS( - self, - url, - headers: Incomplete | None = None, - query_params: Incomplete | None = None, - post_params: Incomplete | None = None, - body: Incomplete | None = None, - _preload_content: bool = True, - _request_timeout: Incomplete | None = None, - **urlopen_kw, - ): ... - def DELETE( - self, - url, - headers: Incomplete | None = None, - query_params: Incomplete | None = None, - body: Incomplete | None = None, - _preload_content: bool = True, - _request_timeout: Incomplete | None = None, - **urlopen_kw, - ): ... - def POST( - self, - url, - headers: Incomplete | None = None, - query_params: Incomplete | None = None, - post_params: Incomplete | None = None, - body: Incomplete | None = None, - _preload_content: bool = True, - _request_timeout: Incomplete | None = None, - **urlopen_kw, - ): ... - def PUT( - self, - url, - headers: Incomplete | None = None, - query_params: Incomplete | None = None, - post_params: Incomplete | None = None, - body: Incomplete | None = None, - _preload_content: bool = True, - _request_timeout: Incomplete | None = None, - **urlopen_kw, - ): ... - def PATCH( - self, - url, - headers: Incomplete | None = None, - query_params: Incomplete | None = None, - post_params: Incomplete | None = None, - body: Incomplete | None = None, - _preload_content: bool = True, - _request_timeout: Incomplete | None = None, - **urlopen_kw, - ): ... diff --git a/stubs/influxdb-client/influxdb_client/client/__init__.pyi b/stubs/influxdb-client/influxdb_client/client/__init__.pyi deleted file mode 100644 index 4285a8aa3166..000000000000 --- a/stubs/influxdb-client/influxdb_client/client/__init__.pyi +++ /dev/null @@ -1,41 +0,0 @@ -from influxdb_client.service.authorizations_service import AuthorizationsService as AuthorizationsService -from influxdb_client.service.backup_service import BackupService as BackupService -from influxdb_client.service.bucket_schemas_service import BucketSchemasService as BucketSchemasService -from influxdb_client.service.buckets_service import BucketsService as BucketsService -from influxdb_client.service.cells_service import CellsService as CellsService -from influxdb_client.service.checks_service import ChecksService as ChecksService -from influxdb_client.service.config_service import ConfigService as ConfigService -from influxdb_client.service.dashboards_service import DashboardsService as DashboardsService -from influxdb_client.service.dbr_ps_service import DBRPsService as DBRPsService -from influxdb_client.service.delete_service import DeleteService as DeleteService -from influxdb_client.service.health_service import HealthService as HealthService -from influxdb_client.service.invokable_scripts_service import InvokableScriptsService as InvokableScriptsService -from influxdb_client.service.labels_service import LabelsService as LabelsService -from influxdb_client.service.legacy_authorizations_service import LegacyAuthorizationsService as LegacyAuthorizationsService -from influxdb_client.service.metrics_service import MetricsService as MetricsService -from influxdb_client.service.notification_endpoints_service import NotificationEndpointsService as NotificationEndpointsService -from influxdb_client.service.notification_rules_service import NotificationRulesService as NotificationRulesService -from influxdb_client.service.organizations_service import OrganizationsService as OrganizationsService -from influxdb_client.service.ping_service import PingService as PingService -from influxdb_client.service.query_service import QueryService as QueryService -from influxdb_client.service.ready_service import ReadyService as ReadyService -from influxdb_client.service.remote_connections_service import RemoteConnectionsService as RemoteConnectionsService -from influxdb_client.service.replications_service import ReplicationsService as ReplicationsService -from influxdb_client.service.resources_service import ResourcesService as ResourcesService -from influxdb_client.service.restore_service import RestoreService as RestoreService -from influxdb_client.service.routes_service import RoutesService as RoutesService -from influxdb_client.service.rules_service import RulesService as RulesService -from influxdb_client.service.scraper_targets_service import ScraperTargetsService as ScraperTargetsService -from influxdb_client.service.secrets_service import SecretsService as SecretsService -from influxdb_client.service.setup_service import SetupService as SetupService -from influxdb_client.service.signin_service import SigninService as SigninService -from influxdb_client.service.signout_service import SignoutService as SignoutService -from influxdb_client.service.sources_service import SourcesService as SourcesService -from influxdb_client.service.tasks_service import TasksService as TasksService -from influxdb_client.service.telegraf_plugins_service import TelegrafPluginsService as TelegrafPluginsService -from influxdb_client.service.telegrafs_service import TelegrafsService as TelegrafsService -from influxdb_client.service.templates_service import TemplatesService as TemplatesService -from influxdb_client.service.users_service import UsersService as UsersService -from influxdb_client.service.variables_service import VariablesService as VariablesService -from influxdb_client.service.views_service import ViewsService as ViewsService -from influxdb_client.service.write_service import WriteService as WriteService diff --git a/stubs/influxdb-client/influxdb_client/client/_base.pyi b/stubs/influxdb-client/influxdb_client/client/_base.pyi deleted file mode 100644 index edc44395d8ef..000000000000 --- a/stubs/influxdb-client/influxdb_client/client/_base.pyi +++ /dev/null @@ -1,60 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client import Configuration - -LOGGERS_NAMES: Incomplete - -class _BaseClient: - url: str - token: str | None - org: str | None - default_tags: Incomplete | None - conf: _Configuration - auth_header_name: Incomplete | None - auth_header_value: Incomplete | None - retries: bool | Incomplete - profilers: Incomplete | None - def __init__( - self, - url: str, - token: str | None, - debug: bool | None = None, - timeout: int = 10000, - enable_gzip: bool = False, - org: str | None = None, - default_tags: dict[Incomplete, Incomplete] | None = None, - http_client_logger: str | None = None, - *, - verify_ssl: bool = ..., - ssl_ca_cert: Incomplete | None = ..., - cert_file: Incomplete | None = ..., - cert_key_file: Incomplete | None = ..., - cert_key_password: Incomplete | None = ..., - ssl_context: Incomplete | None = ..., - proxy: Incomplete | None = ..., - proxy_headers: Incomplete | None = ..., - connection_pool_maxsize: int = ..., - username: Incomplete | None = ..., - password: Incomplete | None = ..., - auth_basic: bool = ..., - retries: bool | Incomplete = ..., - profilers: Incomplete | None = ..., - ) -> None: ... - -class _BaseQueryApi: - default_dialect: Incomplete - def __init__(self, influxdb_client, query_options: Incomplete | None = None) -> None: ... - -class _BaseWriteApi: - def __init__(self, influxdb_client, point_settings: Incomplete | None = None) -> None: ... - -class _BaseDeleteApi: - def __init__(self, influxdb_client) -> None: ... - -class _Configuration(Configuration): - enable_gzip: bool - username: Incomplete - password: Incomplete - def __init__(self) -> None: ... - def update_request_header_params(self, path: str, params: dict[Incomplete, Incomplete]): ... - def update_request_body(self, path: str, body): ... diff --git a/stubs/influxdb-client/influxdb_client/client/_pages.pyi b/stubs/influxdb-client/influxdb_client/client/_pages.pyi deleted file mode 100644 index 1ecf38d5f1c2..000000000000 --- a/stubs/influxdb-client/influxdb_client/client/_pages.pyi +++ /dev/null @@ -1,37 +0,0 @@ -from collections.abc import Callable -from typing import Any, Generic, Protocol, TypeVar -from typing_extensions import Self - -class _HasId(Protocol): - @property - def id(self) -> str | None: ... - -_R = TypeVar("_R", default=Any) -_T = TypeVar("_T", bound=_HasId) - -class _Page(Generic[_T]): - has_next: bool - values: list[_T] - next_after: str | None - - def __init__(self, values: list[_T], has_next: bool, next_after: str | None) -> None: ... - @staticmethod - def empty() -> _Page[_T]: ... - @staticmethod - def initial(after: str | None) -> _Page[_T]: ... - -class _PageIterator(Generic[_T]): - page: _Page[_T] - get_next_page: Callable[[_Page[_T]], _Page[_T]] - - def __init__(self, page: _Page[_T], get_next_page: Callable[[_Page[_T]], _Page[_T]]) -> None: ... - def __iter__(self) -> Self: ... - def __next__(self) -> _T: ... - -class _Paginated(Generic[_T, _R]): - paginated_getter: Callable[..., _R] # Gets passed additional kwargs to find_iter(). - pluck_page_resources_from_response: Callable[[_R], list[_T]] - def __init__( - self, paginated_getter: Callable[..., _R], pluck_page_resources_from_response: Callable[[_R], list[_T]] - ) -> None: ... - def find_iter(self, *, after: str | None = None, **kwargs: Any) -> _PageIterator[_T]: ... diff --git a/stubs/influxdb-client/influxdb_client/client/authorizations_api.pyi b/stubs/influxdb-client/influxdb_client/client/authorizations_api.pyi deleted file mode 100644 index fbcdefe5b61e..000000000000 --- a/stubs/influxdb-client/influxdb_client/client/authorizations_api.pyi +++ /dev/null @@ -1,23 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client import Authorization, Organization, User - -class AuthorizationsApi: - def __init__(self, influxdb_client) -> None: ... - def create_authorization( - self, - org_id: Incomplete | None = None, - permissions: list[Incomplete] | None = None, - authorization: Authorization | None = None, - ) -> Authorization: ... - def find_authorization_by_id(self, auth_id: str) -> Authorization: ... - def find_authorizations(self, **kwargs): ... - def find_authorizations_by_user(self, user: User): ... - def find_authorizations_by_user_id(self, user_id: str): ... - def find_authorizations_by_user_name(self, user_name: str): ... - def find_authorizations_by_org(self, org: Organization): ... - def find_authorizations_by_org_name(self, org_name: str): ... - def find_authorizations_by_org_id(self, org_id: str): ... - def update_authorization(self, auth): ... - def clone_authorization(self, auth) -> Authorization: ... - def delete_authorization(self, auth): ... diff --git a/stubs/influxdb-client/influxdb_client/client/bucket_api.pyi b/stubs/influxdb-client/influxdb_client/client/bucket_api.pyi deleted file mode 100644 index d1cd792d3ba6..000000000000 --- a/stubs/influxdb-client/influxdb_client/client/bucket_api.pyi +++ /dev/null @@ -1,24 +0,0 @@ -from _typeshed import Incomplete - -from ..domain.bucket import Bucket -from ._pages import _PageIterator - -class BucketsApi: - def __init__(self, influxdb_client) -> None: ... - def create_bucket( - self, - bucket: Incomplete | None = None, - bucket_name: Incomplete | None = None, - org_id: Incomplete | None = None, - retention_rules: Incomplete | None = None, - description: Incomplete | None = None, - org: Incomplete | None = None, - ) -> Bucket: ... - def update_bucket(self, bucket: Bucket) -> Bucket: ... - def delete_bucket(self, bucket): ... - def find_bucket_by_id(self, id): ... - def find_bucket_by_name(self, bucket_name): ... - def find_buckets(self, **kwargs): ... - def find_buckets_iter( - self, *, name: str = ..., org: str = ..., org_id: str = ..., after: str | None = None, limit: int = ... - ) -> _PageIterator[Bucket]: ... diff --git a/stubs/influxdb-client/influxdb_client/client/delete_api.pyi b/stubs/influxdb-client/influxdb_client/client/delete_api.pyi deleted file mode 100644 index 369568891e26..000000000000 --- a/stubs/influxdb-client/influxdb_client/client/delete_api.pyi +++ /dev/null @@ -1,10 +0,0 @@ -from datetime import datetime - -from influxdb_client import Organization -from influxdb_client.client._base import _BaseDeleteApi - -class DeleteApi(_BaseDeleteApi): - def __init__(self, influxdb_client) -> None: ... - def delete( - self, start: str | datetime, stop: str | datetime, predicate: str, bucket: str, org: str | Organization | None = None - ) -> None: ... diff --git a/stubs/influxdb-client/influxdb_client/client/delete_api_async.pyi b/stubs/influxdb-client/influxdb_client/client/delete_api_async.pyi deleted file mode 100644 index dca8faa6efd6..000000000000 --- a/stubs/influxdb-client/influxdb_client/client/delete_api_async.pyi +++ /dev/null @@ -1,10 +0,0 @@ -from datetime import datetime - -from influxdb_client import Organization -from influxdb_client.client._base import _BaseDeleteApi - -class DeleteApiAsync(_BaseDeleteApi): - def __init__(self, influxdb_client) -> None: ... - async def delete( - self, start: str | datetime, stop: str | datetime, predicate: str, bucket: str, org: str | Organization | None = None - ) -> bool: ... diff --git a/stubs/influxdb-client/influxdb_client/client/exceptions.pyi b/stubs/influxdb-client/influxdb_client/client/exceptions.pyi deleted file mode 100644 index 0770c7f9fefc..000000000000 --- a/stubs/influxdb-client/influxdb_client/client/exceptions.pyi +++ /dev/null @@ -1,13 +0,0 @@ -from _typeshed import Incomplete - -from urllib3 import HTTPResponse - -from .._sync.rest import RESTResponse - -logger: Incomplete - -class InfluxDBError(Exception): - response: Incomplete - message: Incomplete - retry_after: Incomplete - def __init__(self, response: HTTPResponse | RESTResponse | None = None, message: str | None = None) -> None: ... diff --git a/stubs/influxdb-client/influxdb_client/client/flux_csv_parser.pyi b/stubs/influxdb-client/influxdb_client/client/flux_csv_parser.pyi deleted file mode 100644 index 8091fa70659b..000000000000 --- a/stubs/influxdb-client/influxdb_client/client/flux_csv_parser.pyi +++ /dev/null @@ -1,75 +0,0 @@ -from _typeshed import Incomplete -from collections.abc import Generator -from enum import Enum -from types import TracebackType -from typing_extensions import Self - -from influxdb_client.client.flux_table import TableList - -ANNOTATION_DEFAULT: str -ANNOTATION_GROUP: str -ANNOTATION_DATATYPE: str -ANNOTATIONS: Incomplete - -class FluxQueryException(Exception): - message: Incomplete - reference: Incomplete - def __init__(self, message, reference) -> None: ... - -class FluxCsvParserException(Exception): ... - -class FluxSerializationMode(Enum): - tables = 1 - stream = 2 - dataFrame = 3 - -class FluxResponseMetadataMode(Enum): - full = 1 - only_names = 2 - -class _FluxCsvParserMetadata: - table_index: int - table_id: int - start_new_table: bool - table: Incomplete - groups: Incomplete - parsing_state_error: bool - def __init__(self) -> None: ... - -class FluxCsvParser: - tables: Incomplete - def __init__( - self, - response, - serialization_mode: FluxSerializationMode, - data_frame_index: list[str] | None = None, - query_options: Incomplete | None = None, - response_metadata_mode: FluxResponseMetadataMode = ..., - use_extension_dtypes: bool = False, - ) -> None: ... - def __enter__(self) -> Self: ... - def __exit__( - self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None - ) -> None: ... - async def __aenter__(self) -> Self: ... - async def __aexit__( - self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None - ) -> None: ... - def generator(self) -> Generator[Incomplete, None, None]: ... - def generator_async(self): ... - def parse_record(self, table_index, table, csv): ... - @staticmethod - def add_data_types(table, data_types) -> None: ... - @staticmethod - def add_groups(table, csv) -> None: ... - @staticmethod - def add_default_empty_values(table, default_values) -> None: ... - @staticmethod - def add_column_names_and_tags(table, csv) -> None: ... - def table_list(self) -> TableList: ... - -class _StreamReaderToWithAsyncRead: - response: Incomplete - decoder: Incomplete - def __init__(self, response) -> None: ... - async def read(self, size: int) -> str: ... diff --git a/stubs/influxdb-client/influxdb_client/client/flux_table.pyi b/stubs/influxdb-client/influxdb_client/client/flux_table.pyi deleted file mode 100644 index 66a791209cde..000000000000 --- a/stubs/influxdb-client/influxdb_client/client/flux_table.pyi +++ /dev/null @@ -1,56 +0,0 @@ -from _typeshed import Incomplete -from collections.abc import Iterator -from http.client import HTTPResponse -from json import JSONEncoder - -class FluxStructure: ... - -class FluxStructureEncoder(JSONEncoder): - def default(self, obj): ... - -class FluxTable(FluxStructure): - columns: Incomplete - records: Incomplete - def __init__(self) -> None: ... - def get_group_key(self): ... - def __iter__(self): ... - -class FluxColumn(FluxStructure): - default_value: Incomplete - group: Incomplete - data_type: Incomplete - label: Incomplete - index: Incomplete - def __init__( - self, - index: Incomplete | None = None, - label: Incomplete | None = None, - data_type: Incomplete | None = None, - group: Incomplete | None = None, - default_value: Incomplete | None = None, - ) -> None: ... - -class FluxRecord(FluxStructure): - table: Incomplete - values: Incomplete - row: Incomplete - def __init__(self, table, values: Incomplete | None = None) -> None: ... - def get_start(self): ... - def get_stop(self): ... - def get_time(self): ... - def get_value(self): ... - def get_field(self): ... - def get_measurement(self): ... - def __getitem__(self, key): ... - def __setitem__(self, key, value): ... - -class TableList(list[FluxTable]): - def to_values(self, columns: list[str] | None = None) -> list[list[object]]: ... - def to_json(self, columns: list[str] | None = None, **kwargs) -> str: ... - -class CSVIterator(Iterator[list[str]]): - delegate: Incomplete - def __init__(self, response: HTTPResponse) -> None: ... - def __iter__(self): ... - def __next__(self): ... - def to_values(self) -> list[list[str]]: ... diff --git a/stubs/influxdb-client/influxdb_client/client/influxdb_client.pyi b/stubs/influxdb-client/influxdb_client/client/influxdb_client.pyi deleted file mode 100644 index b67ad4451f1f..000000000000 --- a/stubs/influxdb-client/influxdb_client/client/influxdb_client.pyi +++ /dev/null @@ -1,72 +0,0 @@ -from _typeshed import Incomplete -from types import TracebackType -from typing_extensions import Self - -from influxdb_client import HealthCheck, InvokableScriptsApi, Ready -from influxdb_client.client._base import _BaseClient -from influxdb_client.client.authorizations_api import AuthorizationsApi -from influxdb_client.client.bucket_api import BucketsApi -from influxdb_client.client.delete_api import DeleteApi -from influxdb_client.client.labels_api import LabelsApi -from influxdb_client.client.organizations_api import OrganizationsApi -from influxdb_client.client.query_api import QueryApi, QueryOptions -from influxdb_client.client.tasks_api import TasksApi -from influxdb_client.client.users_api import UsersApi -from influxdb_client.client.write_api import PointSettings, WriteApi, WriteOptions - -logger: Incomplete - -class InfluxDBClient(_BaseClient): - api_client: Incomplete - def __init__( - self, - url: str, - token: str | None = None, - debug: bool | None = None, - timeout: int = 10000, - enable_gzip: bool = False, - org: str | None = None, - default_tags: dict[Incomplete, Incomplete] | None = None, - *, - verify_ssl: bool = ..., - ssl_ca_cert: Incomplete | None = ..., - cert_file: Incomplete | None = ..., - cert_key_file: Incomplete | None = ..., - cert_key_password: Incomplete | None = ..., - ssl_context: Incomplete | None = ..., - proxy: Incomplete | None = ..., - proxy_headers: Incomplete | None = ..., - connection_pool_maxsize: int = ..., - username: Incomplete | None = ..., - password: Incomplete | None = ..., - auth_basic: bool = ..., - retries: bool | Incomplete = ..., - profilers: Incomplete | None = ..., - ) -> None: ... - def __enter__(self) -> Self: ... - def __exit__( - self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None - ) -> None: ... - @classmethod - def from_config_file( - cls, config_file: str = "config.ini", debug: Incomplete | None = None, enable_gzip: bool = False, **kwargs - ): ... - @classmethod - def from_env_properties(cls, debug: Incomplete | None = None, enable_gzip: bool = False, **kwargs): ... - def write_api(self, write_options: WriteOptions = ..., point_settings: PointSettings = ..., **kwargs) -> WriteApi: ... - def query_api(self, query_options: QueryOptions = ...) -> QueryApi: ... - def invokable_scripts_api(self) -> InvokableScriptsApi: ... - def close(self) -> None: ... - def __del__(self) -> None: ... - def buckets_api(self) -> BucketsApi: ... - def authorizations_api(self) -> AuthorizationsApi: ... - def users_api(self) -> UsersApi: ... - def organizations_api(self) -> OrganizationsApi: ... - def tasks_api(self) -> TasksApi: ... - def labels_api(self) -> LabelsApi: ... - def health(self) -> HealthCheck: ... - def ping(self) -> bool: ... - def version(self) -> str: ... - def build(self) -> str: ... - def ready(self) -> Ready: ... - def delete_api(self) -> DeleteApi: ... diff --git a/stubs/influxdb-client/influxdb_client/client/influxdb_client_async.pyi b/stubs/influxdb-client/influxdb_client/client/influxdb_client_async.pyi deleted file mode 100644 index 3e308e031df1..000000000000 --- a/stubs/influxdb-client/influxdb_client/client/influxdb_client_async.pyi +++ /dev/null @@ -1,56 +0,0 @@ -from _typeshed import Incomplete -from types import TracebackType -from typing_extensions import Self - -from influxdb_client.client._base import _BaseClient -from influxdb_client.client.delete_api_async import DeleteApiAsync -from influxdb_client.client.query_api import QueryOptions -from influxdb_client.client.query_api_async import QueryApiAsync -from influxdb_client.client.write_api import PointSettings -from influxdb_client.client.write_api_async import WriteApiAsync - -logger: Incomplete - -class InfluxDBClientAsync(_BaseClient): - api_client: Incomplete - def __init__( - self, - url: str, - token: str | None = None, - org: str | None = None, - debug: bool | None = None, - timeout: int = 10000, - enable_gzip: bool = False, - *, - verify_ssl: bool = ..., - ssl_ca_cert: Incomplete | None = ..., - cert_file: Incomplete | None = ..., - cert_key_file: Incomplete | None = ..., - cert_key_password: Incomplete | None = ..., - ssl_context: Incomplete | None = ..., - proxy: Incomplete | None = ..., - proxy_headers: Incomplete | None = ..., - connection_pool_maxsize: int = ..., - username: Incomplete | None = ..., - password: Incomplete | None = ..., - auth_basic: bool = ..., - retries: bool | Incomplete = ..., - profilers: Incomplete | None = ..., - ) -> None: ... - async def __aenter__(self) -> Self: ... - async def __aexit__( - self, exc_type: type[BaseException] | None, exc: BaseException | None, tb: TracebackType | None - ) -> None: ... - async def close(self) -> None: ... - @classmethod - def from_config_file( - cls, config_file: str = "config.ini", debug: Incomplete | None = None, enable_gzip: bool = False, **kwargs - ): ... - @classmethod - def from_env_properties(cls, debug: Incomplete | None = None, enable_gzip: bool = False, **kwargs): ... - async def ping(self) -> bool: ... - async def version(self) -> str: ... - async def build(self) -> str: ... - def query_api(self, query_options: QueryOptions = ...) -> QueryApiAsync: ... - def write_api(self, point_settings: PointSettings = ...) -> WriteApiAsync: ... - def delete_api(self) -> DeleteApiAsync: ... diff --git a/stubs/influxdb-client/influxdb_client/client/invokable_scripts_api.pyi b/stubs/influxdb-client/influxdb_client/client/invokable_scripts_api.pyi deleted file mode 100644 index 87154739037c..000000000000 --- a/stubs/influxdb-client/influxdb_client/client/invokable_scripts_api.pyi +++ /dev/null @@ -1,26 +0,0 @@ -from _typeshed import Incomplete -from collections.abc import Generator, Iterator -from typing import Any - -from influxdb_client import Script, ScriptCreateRequest, ScriptUpdateRequest -from influxdb_client.client._base import _BaseQueryApi -from influxdb_client.client.flux_table import CSVIterator, FluxRecord, TableList - -class InvokableScriptsApi(_BaseQueryApi): - def __init__(self, influxdb_client) -> None: ... - def create_script(self, create_request: ScriptCreateRequest) -> Script: ... - def update_script(self, script_id: str, update_request: ScriptUpdateRequest) -> Script: ... - def delete_script(self, script_id: str) -> None: ... - def find_scripts(self, **kwargs): ... - def invoke_script(self, script_id: str, params: dict[Incomplete, Incomplete] | None = None) -> TableList: ... - def invoke_script_stream( - self, script_id: str, params: dict[Incomplete, Incomplete] | None = None - ) -> Generator[FluxRecord, Any, None]: ... - def invoke_script_data_frame( - self, script_id: str, params: dict[Incomplete, Incomplete] | None = None, data_frame_index: list[str] | None = None - ): ... - def invoke_script_data_frame_stream( - self, script_id: str, params: dict[Incomplete, Incomplete] | None = None, data_frame_index: list[str] | None = None - ): ... - def invoke_script_csv(self, script_id: str, params: dict[Incomplete, Incomplete] | None = None) -> CSVIterator: ... - def invoke_script_raw(self, script_id: str, params: dict[Incomplete, Incomplete] | None = None) -> Iterator[list[str]]: ... diff --git a/stubs/influxdb-client/influxdb_client/client/labels_api.pyi b/stubs/influxdb-client/influxdb_client/client/labels_api.pyi deleted file mode 100644 index 0268470957a7..000000000000 --- a/stubs/influxdb-client/influxdb_client/client/labels_api.pyi +++ /dev/null @@ -1,11 +0,0 @@ -from influxdb_client import Label - -class LabelsApi: - def __init__(self, influxdb_client) -> None: ... - def create_label(self, name: str, org_id: str, properties: dict[str, str] | None = None) -> Label: ... - def update_label(self, label: Label): ... - def delete_label(self, label: str | Label): ... - def clone_label(self, cloned_name: str, label: Label) -> Label: ... - def find_labels(self, **kwargs) -> list[Label]: ... - def find_label_by_id(self, label_id: str): ... - def find_label_by_org(self, org_id) -> list[Label]: ... diff --git a/stubs/influxdb-client/influxdb_client/client/logging_handler.pyi b/stubs/influxdb-client/influxdb_client/client/logging_handler.pyi deleted file mode 100644 index c6561c780538..000000000000 --- a/stubs/influxdb-client/influxdb_client/client/logging_handler.pyi +++ /dev/null @@ -1,14 +0,0 @@ -import logging -from _typeshed import Incomplete - -class InfluxLoggingHandler(logging.Handler): - DEFAULT_LOG_RECORD_KEYS: Incomplete - bucket: Incomplete - client: Incomplete - write_api: Incomplete - def __init__( - self, *, url, token, org, bucket, client_args: Incomplete | None = None, write_api_args: Incomplete | None = None - ) -> None: ... - def __del__(self) -> None: ... - def close(self) -> None: ... - def emit(self, record: logging.LogRecord) -> None: ... diff --git a/stubs/influxdb-client/influxdb_client/client/organizations_api.pyi b/stubs/influxdb-client/influxdb_client/client/organizations_api.pyi deleted file mode 100644 index fb014a4912b7..000000000000 --- a/stubs/influxdb-client/influxdb_client/client/organizations_api.pyi +++ /dev/null @@ -1,10 +0,0 @@ -from influxdb_client import Organization - -class OrganizationsApi: - def __init__(self, influxdb_client) -> None: ... - def me(self): ... - def find_organization(self, org_id): ... - def find_organizations(self, **kwargs): ... - def create_organization(self, name: str | None = None, organization: Organization | None = None) -> Organization: ... - def update_organization(self, organization: Organization) -> Organization: ... - def delete_organization(self, org_id: str): ... diff --git a/stubs/influxdb-client/influxdb_client/client/query_api.pyi b/stubs/influxdb-client/influxdb_client/client/query_api.pyi deleted file mode 100644 index d155fdb8fc7c..000000000000 --- a/stubs/influxdb-client/influxdb_client/client/query_api.pyi +++ /dev/null @@ -1,51 +0,0 @@ -from _typeshed import Incomplete, SupportsItems -from collections.abc import Callable, Generator -from typing import Any - -from influxdb_client import Dialect -from influxdb_client.client._base import _BaseQueryApi -from influxdb_client.client.flux_table import CSVIterator, FluxRecord, TableList -from influxdb_client.domain.organization import Organization - -class QueryOptions: - profilers: Incomplete - profiler_callback: Incomplete - def __init__( - self, profilers: list[str] | None = None, profiler_callback: Callable[..., Incomplete] | None = None - ) -> None: ... - -class QueryApi(_BaseQueryApi): - def __init__(self, influxdb_client, query_options=...) -> None: ... - def query_csv( - self, - query: str, - org: Incomplete | None = None, - dialect: Dialect = ..., - params: SupportsItems[str, Incomplete] | None = None, - ) -> CSVIterator: ... - def query_raw( - self, query: str, org: Incomplete | None = None, dialect=..., params: SupportsItems[str, Incomplete] | None = None - ): ... - def query( - self, query: str, org: Incomplete | None = None, params: SupportsItems[str, Incomplete] | None = None - ) -> TableList: ... - def query_stream( - self, query: str, org: Incomplete | None = None, params: SupportsItems[str, Incomplete] | None = None - ) -> Generator[FluxRecord, Any, None]: ... - def query_data_frame( - self, - query: str, - org: Organization | str | None = None, - data_frame_index: list[str] | None = None, - params: SupportsItems[str, Incomplete] | None = None, - use_extension_dtypes: bool = False, - ): ... - def query_data_frame_stream( - self, - query: str, - org: Organization | str | None = None, - data_frame_index: list[str] | None = None, - params: SupportsItems[str, Incomplete] | None = None, - use_extension_dtypes: bool = False, - ): ... - def __del__(self) -> None: ... diff --git a/stubs/influxdb-client/influxdb_client/client/query_api_async.pyi b/stubs/influxdb-client/influxdb_client/client/query_api_async.pyi deleted file mode 100644 index 4648d07d9299..000000000000 --- a/stubs/influxdb-client/influxdb_client/client/query_api_async.pyi +++ /dev/null @@ -1,39 +0,0 @@ -from _typeshed import Incomplete, SupportsItems -from collections.abc import AsyncGenerator - -from influxdb_client.client._base import _BaseQueryApi -from influxdb_client.client.flux_table import FluxRecord, TableList -from influxdb_client.domain.dialect import Dialect -from influxdb_client.domain.organization import Organization - -class QueryApiAsync(_BaseQueryApi): - def __init__(self, influxdb_client, query_options=...) -> None: ... - async def query( - self, query: str, org: Incomplete | None = None, params: SupportsItems[str, Incomplete] | None = None - ) -> TableList: ... - async def query_stream( - self, query: str, org: Incomplete | None = None, params: SupportsItems[str, Incomplete] | None = None - ) -> AsyncGenerator[FluxRecord, None]: ... - async def query_data_frame( - self, - query: str, - org: str | Organization | None = None, - data_frame_index: list[str] | None = None, - params: SupportsItems[str, Incomplete] | None = None, - use_extension_dtypes: bool = False, - ): ... - async def query_data_frame_stream( - self, - query: str, - org: str | Organization | None = None, - data_frame_index: list[str] | None = None, - params: SupportsItems[str, Incomplete] | None = None, - use_extension_dtypes: bool = False, - ): ... - async def query_raw( - self, - query: str, - org: str | Organization | None = None, - dialect: Dialect = ..., - params: SupportsItems[str, Incomplete] | None = None, - ) -> str: ... diff --git a/stubs/influxdb-client/influxdb_client/client/tasks_api.pyi b/stubs/influxdb-client/influxdb_client/client/tasks_api.pyi deleted file mode 100644 index 15a5110dfdeb..000000000000 --- a/stubs/influxdb-client/influxdb_client/client/tasks_api.pyi +++ /dev/null @@ -1,40 +0,0 @@ -from datetime import datetime - -from influxdb_client import LabelResponse, LogEvent, Run, TaskCreateRequest, TaskUpdateRequest -from influxdb_client.domain.task import Task - -from ._pages import _PageIterator - -class TasksApi: - def __init__(self, influxdb_client) -> None: ... - def find_task_by_id(self, task_id) -> Task: ... - def find_tasks( - self, *, name: str = ..., after: str = ..., user: str = ..., org: str = ..., org_id: str = ..., limit: int = ..., **kwargs - ) -> list[Task]: ... - def find_tasks_iter( - self, *, name: str = ..., after: str | None = None, user: str = ..., org: str = ..., org_id: str = ..., limit: int = ... - ) -> _PageIterator[Task]: ... - def create_task(self, task: Task | None = None, task_create_request: TaskCreateRequest | None = None) -> Task: ... - def create_task_every(self, name, flux, every, organization) -> Task: ... - def create_task_cron(self, name: str, flux: str, cron: str, org_id: str) -> Task: ... - def delete_task(self, task_id: str): ... - def update_task(self, task: Task) -> Task: ... - def update_task_request(self, task_id, task_update_request: TaskUpdateRequest) -> Task: ... - def clone_task(self, task: Task) -> Task: ... - def get_labels(self, task_id): ... - def add_label(self, label_id: str, task_id: str) -> LabelResponse: ... - def delete_label(self, label_id: str, task_id: str): ... - def get_members(self, task_id: str): ... - def add_member(self, member_id, task_id): ... - def delete_member(self, member_id, task_id): ... - def get_owners(self, task_id): ... - def add_owner(self, owner_id, task_id): ... - def delete_owner(self, owner_id, task_id): ... - def get_runs(self, task_id, **kwargs) -> list[Run]: ... - def get_run(self, task_id: str, run_id: str) -> Run: ... - def get_run_logs(self, task_id: str, run_id: str) -> list[LogEvent]: ... - def run_manually(self, task_id: str, scheduled_for: datetime | None = None): ... - def retry_run(self, task_id: str, run_id: str): ... - def cancel_run(self, task_id: str, run_id: str): ... - def get_logs(self, task_id: str) -> list[LogEvent]: ... - def find_tasks_by_user(self, task_user_id): ... diff --git a/stubs/influxdb-client/influxdb_client/client/users_api.pyi b/stubs/influxdb-client/influxdb_client/client/users_api.pyi deleted file mode 100644 index 8be06535e024..000000000000 --- a/stubs/influxdb-client/influxdb_client/client/users_api.pyi +++ /dev/null @@ -1,10 +0,0 @@ -from influxdb_client import User, UserResponse, Users - -class UsersApi: - def __init__(self, influxdb_client) -> None: ... - def me(self) -> User: ... - def create_user(self, name: str) -> User: ... - def update_user(self, user: User) -> UserResponse: ... - def update_password(self, user: str | User | UserResponse, password: str) -> None: ... - def delete_user(self, user: str | User | UserResponse) -> None: ... - def find_users(self, **kwargs) -> Users: ... diff --git a/stubs/influxdb-client/influxdb_client/client/util/__init__.pyi b/stubs/influxdb-client/influxdb_client/client/util/__init__.pyi deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/stubs/influxdb-client/influxdb_client/client/util/date_utils.pyi b/stubs/influxdb-client/influxdb_client/client/util/date_utils.pyi deleted file mode 100644 index 0f5d8ef0cc88..000000000000 --- a/stubs/influxdb-client/influxdb_client/client/util/date_utils.pyi +++ /dev/null @@ -1,16 +0,0 @@ -from datetime import datetime, timedelta, tzinfo -from threading import Lock - -date_helper: DateHelper | None -lock_: Lock - -class DateHelper: - timezone: tzinfo - def __init__(self, timezone: tzinfo = ...) -> None: ... - # This returns None in the implementation, but a datetime-compatible - # object is monkey-patched in at runtime. - def parse_date(self, date_string: str) -> datetime: ... - def to_nanoseconds(self, delta: timedelta) -> int: ... - def to_utc(self, value: datetime) -> datetime: ... - -def get_date_helper() -> DateHelper: ... diff --git a/stubs/influxdb-client/influxdb_client/client/util/date_utils_pandas.pyi b/stubs/influxdb-client/influxdb_client/client/util/date_utils_pandas.pyi deleted file mode 100644 index 2cd7570ceebe..000000000000 --- a/stubs/influxdb-client/influxdb_client/client/util/date_utils_pandas.pyi +++ /dev/null @@ -1,5 +0,0 @@ -from influxdb_client.client.util.date_utils import DateHelper - -class PandasDateTimeHelper(DateHelper): - def parse_date(self, date_string: str): ... - def to_nanoseconds(self, delta): ... diff --git a/stubs/influxdb-client/influxdb_client/client/util/helpers.pyi b/stubs/influxdb-client/influxdb_client/client/util/helpers.pyi deleted file mode 100644 index 15963b5abd92..000000000000 --- a/stubs/influxdb-client/influxdb_client/client/util/helpers.pyi +++ /dev/null @@ -1,4 +0,0 @@ -from influxdb_client.client.influxdb_client import InfluxDBClient -from influxdb_client.domain.organization import Organization - -def get_org_query_param(org: Organization | str | None, client: InfluxDBClient, required_id: bool = False) -> str: ... diff --git a/stubs/influxdb-client/influxdb_client/client/util/multiprocessing_helper.pyi b/stubs/influxdb-client/influxdb_client/client/util/multiprocessing_helper.pyi deleted file mode 100644 index 8889a3817d8c..000000000000 --- a/stubs/influxdb-client/influxdb_client/client/util/multiprocessing_helper.pyi +++ /dev/null @@ -1,25 +0,0 @@ -import multiprocessing -from _typeshed import Incomplete -from types import TracebackType - -logger: Incomplete - -class _PoisonPill: ... - -class MultiprocessingWriter(multiprocessing.Process): - __started__: bool - __disposed__: bool - kwargs: Incomplete - client: Incomplete - write_api: Incomplete - queue_: Incomplete - def __init__(self, **kwargs) -> None: ... - def write(self, **kwargs) -> None: ... - def run(self) -> None: ... - def start(self) -> None: ... - def terminate(self) -> None: ... - def __enter__(self): ... - def __exit__( - self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None - ) -> None: ... - def __del__(self) -> None: ... diff --git a/stubs/influxdb-client/influxdb_client/client/warnings.pyi b/stubs/influxdb-client/influxdb_client/client/warnings.pyi deleted file mode 100644 index e198f507aa08..000000000000 --- a/stubs/influxdb-client/influxdb_client/client/warnings.pyi +++ /dev/null @@ -1,7 +0,0 @@ -class MissingPivotFunction(UserWarning): - @staticmethod - def print_warning(query: str): ... - -class CloudOnlyWarning(UserWarning): - @staticmethod - def print_warning(api_name: str, doc_url: str): ... diff --git a/stubs/influxdb-client/influxdb_client/client/write/__init__.pyi b/stubs/influxdb-client/influxdb_client/client/write/__init__.pyi deleted file mode 100644 index 4285a8aa3166..000000000000 --- a/stubs/influxdb-client/influxdb_client/client/write/__init__.pyi +++ /dev/null @@ -1,41 +0,0 @@ -from influxdb_client.service.authorizations_service import AuthorizationsService as AuthorizationsService -from influxdb_client.service.backup_service import BackupService as BackupService -from influxdb_client.service.bucket_schemas_service import BucketSchemasService as BucketSchemasService -from influxdb_client.service.buckets_service import BucketsService as BucketsService -from influxdb_client.service.cells_service import CellsService as CellsService -from influxdb_client.service.checks_service import ChecksService as ChecksService -from influxdb_client.service.config_service import ConfigService as ConfigService -from influxdb_client.service.dashboards_service import DashboardsService as DashboardsService -from influxdb_client.service.dbr_ps_service import DBRPsService as DBRPsService -from influxdb_client.service.delete_service import DeleteService as DeleteService -from influxdb_client.service.health_service import HealthService as HealthService -from influxdb_client.service.invokable_scripts_service import InvokableScriptsService as InvokableScriptsService -from influxdb_client.service.labels_service import LabelsService as LabelsService -from influxdb_client.service.legacy_authorizations_service import LegacyAuthorizationsService as LegacyAuthorizationsService -from influxdb_client.service.metrics_service import MetricsService as MetricsService -from influxdb_client.service.notification_endpoints_service import NotificationEndpointsService as NotificationEndpointsService -from influxdb_client.service.notification_rules_service import NotificationRulesService as NotificationRulesService -from influxdb_client.service.organizations_service import OrganizationsService as OrganizationsService -from influxdb_client.service.ping_service import PingService as PingService -from influxdb_client.service.query_service import QueryService as QueryService -from influxdb_client.service.ready_service import ReadyService as ReadyService -from influxdb_client.service.remote_connections_service import RemoteConnectionsService as RemoteConnectionsService -from influxdb_client.service.replications_service import ReplicationsService as ReplicationsService -from influxdb_client.service.resources_service import ResourcesService as ResourcesService -from influxdb_client.service.restore_service import RestoreService as RestoreService -from influxdb_client.service.routes_service import RoutesService as RoutesService -from influxdb_client.service.rules_service import RulesService as RulesService -from influxdb_client.service.scraper_targets_service import ScraperTargetsService as ScraperTargetsService -from influxdb_client.service.secrets_service import SecretsService as SecretsService -from influxdb_client.service.setup_service import SetupService as SetupService -from influxdb_client.service.signin_service import SigninService as SigninService -from influxdb_client.service.signout_service import SignoutService as SignoutService -from influxdb_client.service.sources_service import SourcesService as SourcesService -from influxdb_client.service.tasks_service import TasksService as TasksService -from influxdb_client.service.telegraf_plugins_service import TelegrafPluginsService as TelegrafPluginsService -from influxdb_client.service.telegrafs_service import TelegrafsService as TelegrafsService -from influxdb_client.service.templates_service import TemplatesService as TemplatesService -from influxdb_client.service.users_service import UsersService as UsersService -from influxdb_client.service.variables_service import VariablesService as VariablesService -from influxdb_client.service.views_service import ViewsService as ViewsService -from influxdb_client.service.write_service import WriteService as WriteService diff --git a/stubs/influxdb-client/influxdb_client/client/write/dataframe_serializer.pyi b/stubs/influxdb-client/influxdb_client/client/write/dataframe_serializer.pyi deleted file mode 100644 index 9cdc625d74f4..000000000000 --- a/stubs/influxdb-client/influxdb_client/client/write/dataframe_serializer.pyi +++ /dev/null @@ -1,15 +0,0 @@ -from _typeshed import Incomplete - -logger: Incomplete - -class DataframeSerializer: - data_frame: Incomplete - f: Incomplete - field_indexes: Incomplete - first_field_maybe_null: Incomplete - chunk_size: Incomplete - def __init__(self, data_frame, point_settings, precision="ns", chunk_size: int | None = None, **kwargs) -> None: ... - def serialize(self, chunk_idx: int | None = None): ... - def number_of_chunks(self): ... - -def data_frame_to_list_of_points(data_frame, point_settings, precision="ns", **kwargs): ... diff --git a/stubs/influxdb-client/influxdb_client/client/write/point.pyi b/stubs/influxdb-client/influxdb_client/client/write/point.pyi deleted file mode 100644 index 38d7eef5866e..000000000000 --- a/stubs/influxdb-client/influxdb_client/client/write/point.pyi +++ /dev/null @@ -1,40 +0,0 @@ -from _typeshed import Incomplete, SupportsContainsAndGetItem, SupportsItems -from collections.abc import Iterable -from datetime import datetime, timedelta -from numbers import Integral -from typing import Any, Literal -from typing_extensions import Self, TypeAlias - -from influxdb_client.domain.write_precision import _WritePrecision - -_Value: TypeAlias = Incomplete -_Time: TypeAlias = Integral | str | datetime | timedelta - -EPOCH: datetime -DEFAULT_WRITE_PRECISION: _WritePrecision - -class Point: - @staticmethod - def measurement(measurement: str) -> Point: ... - @staticmethod - def from_dict( - dictionary: SupportsContainsAndGetItem[str, Any], - write_precision: _WritePrecision = "ns", - *, - record_measurement_name: str | None = ..., - record_measurement_key: str = ..., - record_tag_keys: Iterable[str] | None = ..., - record_field_keys: Iterable[str] | None = ..., - record_time_key: str = ..., - fields: SupportsItems[str, Literal["int", "uint", "float"]] = ..., - ) -> Point: ... - def __init__(self, measurement_name: str) -> None: ... - def time(self, time: _Time, write_precision: _WritePrecision = "ns") -> Self: ... - def tag(self, key: str, value: _Value) -> Self: ... - def field(self, field: str, value: _Value) -> Self: ... - def to_line_protocol(self, precision: _WritePrecision | None = None) -> str: ... - @property - def write_precision(self) -> _WritePrecision: ... - @classmethod - def set_str_rep(cls, rep_function: Any) -> None: ... - def __eq__(self, other: object) -> bool: ... diff --git a/stubs/influxdb-client/influxdb_client/client/write/retry.pyi b/stubs/influxdb-client/influxdb_client/client/write/retry.pyi deleted file mode 100644 index 49848de59d85..000000000000 --- a/stubs/influxdb-client/influxdb_client/client/write/retry.pyi +++ /dev/null @@ -1,40 +0,0 @@ -from _typeshed import Incomplete -from collections.abc import Callable - -from urllib3 import Retry - -logger: Incomplete - -class WritesRetry(Retry): - jitter_interval: Incomplete - total: Incomplete - retry_interval: Incomplete - max_retry_delay: Incomplete - max_retry_time: Incomplete - exponential_base: Incomplete - retry_timeout: Incomplete - retry_callback: Incomplete - def __init__( - self, - jitter_interval: int = 0, - max_retry_delay: int = 125, - exponential_base: int = 2, - max_retry_time: int = 180, - total: int = 5, - retry_interval: int = 5, - retry_callback: Callable[[Exception], int] | None = None, - **kw, - ) -> None: ... - def new(self, **kw): ... - def is_retry(self, method, status_code, has_retry_after: bool = False): ... - def get_backoff_time(self): ... - def get_retry_after(self, response): ... - def increment( - self, - method: Incomplete | None = None, - url: Incomplete | None = None, - response: Incomplete | None = None, - error: Incomplete | None = None, - _pool: Incomplete | None = None, - _stacktrace: Incomplete | None = None, - ): ... diff --git a/stubs/influxdb-client/influxdb_client/client/write_api.pyi b/stubs/influxdb-client/influxdb_client/client/write_api.pyi deleted file mode 100644 index a753472c320a..000000000000 --- a/stubs/influxdb-client/influxdb_client/client/write_api.pyi +++ /dev/null @@ -1,112 +0,0 @@ -import logging -from _typeshed import Incomplete -from collections.abc import Iterable -from enum import Enum -from types import TracebackType -from typing import Any -from typing_extensions import Self, TypeAlias - -from influxdb_client.client._base import _BaseWriteApi -from influxdb_client.client.write.point import Point -from influxdb_client.domain.write_precision import _WritePrecision - -_DataClass: TypeAlias = Any # any dataclass -_NamedTuple: TypeAlias = tuple[Any, ...] # any NamedTuple -_Observable: TypeAlias = Any # reactivex.Observable - -logger: logging.Logger - -class WriteType(Enum): - batching = 1 - asynchronous = 2 - synchronous = 3 - -class WriteOptions: - write_type: WriteType - batch_size: int - flush_interval: int - jitter_interval: int - retry_interval: int - max_retries: int - max_retry_delay: int - max_retry_time: int - exponential_base: int - write_scheduler: Incomplete - max_close_wait: int - def __init__( - self, - write_type: WriteType = ..., - batch_size: int = 1_000, - flush_interval: int = 1_000, - jitter_interval: int = 0, - retry_interval: int = 5_000, - max_retries: int = 5, - max_retry_delay: int = 125_000, - max_retry_time: int = 180_000, - exponential_base: int = 2, - max_close_wait: int = 300_000, - write_scheduler=..., - ) -> None: ... - def to_retry_strategy(self, **kwargs): ... - -SYNCHRONOUS: Incomplete -ASYNCHRONOUS: Incomplete - -class PointSettings: - defaultTags: Incomplete - def __init__(self, **default_tags) -> None: ... - def add_default_tag(self, key, value) -> None: ... - -class _BatchItemKey: - bucket: Incomplete - org: Incomplete - precision: Incomplete - def __init__(self, bucket, org, precision="ns") -> None: ... - def __hash__(self) -> int: ... - def __eq__(self, o: object) -> bool: ... - -class _BatchItem: - key: Incomplete - data: Incomplete - size: Incomplete - def __init__(self, key: _BatchItemKey, data, size: int = 1) -> None: ... - def to_key_tuple(self) -> tuple[str, str, str]: ... - -class _BatchResponse: - data: Incomplete - exception: Incomplete - def __init__(self, data: _BatchItem, exception: Exception | None = None) -> None: ... - -class WriteApi(_BaseWriteApi): - def __init__( - self, influxdb_client, write_options: WriteOptions = ..., point_settings: PointSettings = ..., **kwargs - ) -> None: ... - def write( - self, - bucket: str, - org: str | None = None, - record: ( - str - | Iterable[str] - | Point - | Iterable[Point] - | dict[Incomplete, Incomplete] - | Iterable[dict[Incomplete, Incomplete]] - | bytes - | Iterable[bytes] - | _Observable - | _NamedTuple - | Iterable[_NamedTuple] - | _DataClass - | Iterable[_DataClass] - ) = None, - write_precision: _WritePrecision = "ns", - **kwargs, - ) -> Any: ... - def flush(self) -> None: ... - def close(self) -> None: ... - def __enter__(self) -> Self: ... - def __exit__( - self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None - ) -> None: ... - def __del__(self) -> None: ... diff --git a/stubs/influxdb-client/influxdb_client/client/write_api_async.pyi b/stubs/influxdb-client/influxdb_client/client/write_api_async.pyi deleted file mode 100644 index 87a8d8b9dafa..000000000000 --- a/stubs/influxdb-client/influxdb_client/client/write_api_async.pyi +++ /dev/null @@ -1,38 +0,0 @@ -from _typeshed import Incomplete -from collections.abc import Iterable -from typing import Any -from typing_extensions import TypeAlias - -from influxdb_client.client._base import _BaseWriteApi -from influxdb_client.client.write.point import Point -from influxdb_client.client.write_api import PointSettings -from influxdb_client.domain.write_precision import _WritePrecision - -_DataClass: TypeAlias = Any # any dataclass -_NamedTuple: TypeAlias = tuple[Any, ...] # any NamedTuple - -logger: Incomplete - -class WriteApiAsync(_BaseWriteApi): - def __init__(self, influxdb_client, point_settings: PointSettings = ...) -> None: ... - async def write( - self, - bucket: str, - org: str | None = None, - record: ( - str - | Iterable[str] - | Point - | Iterable[Point] - | dict[Incomplete, Incomplete] - | Iterable[dict[Incomplete, Incomplete]] - | bytes - | Iterable[bytes] - | _NamedTuple - | Iterable[_NamedTuple] - | _DataClass - | Iterable[_DataClass] - ) = None, - write_precision: _WritePrecision = "ns", - **kwargs, - ) -> bool: ... diff --git a/stubs/influxdb-client/influxdb_client/configuration.pyi b/stubs/influxdb-client/influxdb_client/configuration.pyi deleted file mode 100644 index eb52aa69131e..000000000000 --- a/stubs/influxdb-client/influxdb_client/configuration.pyi +++ /dev/null @@ -1,50 +0,0 @@ -from _typeshed import Incomplete - -class TypeWithDefault(type): - def __init__(cls, name, bases, dct) -> None: ... - def __call__(cls): ... - def set_default(cls, default) -> None: ... - -class Configuration(metaclass=TypeWithDefault): - host: str - temp_folder_path: Incomplete - api_key: Incomplete - api_key_prefix: Incomplete - username: str - password: str - loggers: Incomplete - logger_stream_handler: Incomplete - logger_file_handler: Incomplete - verify_ssl: bool - ssl_ca_cert: Incomplete - cert_file: Incomplete - cert_key_file: Incomplete - cert_key_password: Incomplete - assert_hostname: Incomplete - ssl_context: Incomplete - connection_pool_maxsize: Incomplete - timeout: Incomplete - auth_basic: bool - proxy: Incomplete - proxy_headers: Incomplete - safe_chars_for_path_param: str - logger_formatter: Incomplete - def __init__(self) -> None: ... - @property - def logger_file(self): ... - @logger_file.setter - def logger_file(self, value) -> None: ... - @property - def debug(self): ... - @debug.setter - def debug(self, value): ... - @property - def logger_format(self): ... - @logger_format.setter - def logger_format(self, value) -> None: ... - def get_api_key_with_prefix(self, identifier): ... - def get_basic_auth_token(self): ... - def auth_settings(self): ... - def to_debug_report(self): ... - def update_request_header_params(self, path: str, params: dict[Incomplete, Incomplete]): ... - def update_request_body(self, path: str, body): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/__init__.pyi b/stubs/influxdb-client/influxdb_client/domain/__init__.pyi deleted file mode 100644 index def5bd65ac21..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/__init__.pyi +++ /dev/null @@ -1,373 +0,0 @@ -from influxdb_client.domain.add_resource_member_request_body import AddResourceMemberRequestBody as AddResourceMemberRequestBody -from influxdb_client.domain.analyze_query_response import AnalyzeQueryResponse as AnalyzeQueryResponse -from influxdb_client.domain.analyze_query_response_errors import AnalyzeQueryResponseErrors as AnalyzeQueryResponseErrors -from influxdb_client.domain.array_expression import ArrayExpression as ArrayExpression -from influxdb_client.domain.ast_response import ASTResponse as ASTResponse -from influxdb_client.domain.authorization import Authorization as Authorization -from influxdb_client.domain.authorization_post_request import AuthorizationPostRequest as AuthorizationPostRequest -from influxdb_client.domain.authorization_update_request import AuthorizationUpdateRequest as AuthorizationUpdateRequest -from influxdb_client.domain.authorizations import Authorizations as Authorizations -from influxdb_client.domain.axes import Axes as Axes -from influxdb_client.domain.axis import Axis as Axis -from influxdb_client.domain.axis_scale import AxisScale as AxisScale -from influxdb_client.domain.bad_statement import BadStatement as BadStatement -from influxdb_client.domain.band_view_properties import BandViewProperties as BandViewProperties -from influxdb_client.domain.binary_expression import BinaryExpression as BinaryExpression -from influxdb_client.domain.block import Block as Block -from influxdb_client.domain.boolean_literal import BooleanLiteral as BooleanLiteral -from influxdb_client.domain.bucket import Bucket as Bucket -from influxdb_client.domain.bucket_links import BucketLinks as BucketLinks -from influxdb_client.domain.bucket_metadata_manifest import BucketMetadataManifest as BucketMetadataManifest -from influxdb_client.domain.bucket_retention_rules import BucketRetentionRules as BucketRetentionRules -from influxdb_client.domain.bucket_shard_mapping import BucketShardMapping as BucketShardMapping -from influxdb_client.domain.buckets import Buckets as Buckets -from influxdb_client.domain.builder_aggregate_function_type import BuilderAggregateFunctionType as BuilderAggregateFunctionType -from influxdb_client.domain.builder_config import BuilderConfig as BuilderConfig -from influxdb_client.domain.builder_config_aggregate_window import BuilderConfigAggregateWindow as BuilderConfigAggregateWindow -from influxdb_client.domain.builder_functions_type import BuilderFunctionsType as BuilderFunctionsType -from influxdb_client.domain.builder_tags_type import BuilderTagsType as BuilderTagsType -from influxdb_client.domain.builtin_statement import BuiltinStatement as BuiltinStatement -from influxdb_client.domain.call_expression import CallExpression as CallExpression -from influxdb_client.domain.cell import Cell as Cell -from influxdb_client.domain.cell_links import CellLinks as CellLinks -from influxdb_client.domain.cell_update import CellUpdate as CellUpdate -from influxdb_client.domain.cell_with_view_properties import CellWithViewProperties as CellWithViewProperties -from influxdb_client.domain.check import Check as Check -from influxdb_client.domain.check_base import CheckBase as CheckBase -from influxdb_client.domain.check_base_links import CheckBaseLinks as CheckBaseLinks -from influxdb_client.domain.check_discriminator import CheckDiscriminator as CheckDiscriminator -from influxdb_client.domain.check_patch import CheckPatch as CheckPatch -from influxdb_client.domain.check_status_level import CheckStatusLevel as CheckStatusLevel -from influxdb_client.domain.check_view_properties import CheckViewProperties as CheckViewProperties -from influxdb_client.domain.checks import Checks as Checks -from influxdb_client.domain.column_data_type import ColumnDataType as ColumnDataType -from influxdb_client.domain.column_semantic_type import ColumnSemanticType as ColumnSemanticType -from influxdb_client.domain.conditional_expression import ConditionalExpression as ConditionalExpression -from influxdb_client.domain.config import Config as Config -from influxdb_client.domain.constant_variable_properties import ConstantVariableProperties as ConstantVariableProperties -from influxdb_client.domain.create_cell import CreateCell as CreateCell -from influxdb_client.domain.create_dashboard_request import CreateDashboardRequest as CreateDashboardRequest -from influxdb_client.domain.custom_check import CustomCheck as CustomCheck -from influxdb_client.domain.dashboard import Dashboard as Dashboard -from influxdb_client.domain.dashboard_color import DashboardColor as DashboardColor -from influxdb_client.domain.dashboard_query import DashboardQuery as DashboardQuery -from influxdb_client.domain.dashboard_with_view_properties import DashboardWithViewProperties as DashboardWithViewProperties -from influxdb_client.domain.dashboards import Dashboards as Dashboards -from influxdb_client.domain.date_time_literal import DateTimeLiteral as DateTimeLiteral -from influxdb_client.domain.dbr_ps import DBRPs as DBRPs -from influxdb_client.domain.dbrp import DBRP as DBRP -from influxdb_client.domain.dbrp_create import DBRPCreate as DBRPCreate -from influxdb_client.domain.dbrp_get import DBRPGet as DBRPGet -from influxdb_client.domain.dbrp_update import DBRPUpdate as DBRPUpdate -from influxdb_client.domain.deadman_check import DeadmanCheck as DeadmanCheck -from influxdb_client.domain.decimal_places import DecimalPlaces as DecimalPlaces -from influxdb_client.domain.delete_predicate_request import DeletePredicateRequest as DeletePredicateRequest -from influxdb_client.domain.dialect import Dialect as Dialect -from influxdb_client.domain.dict_expression import DictExpression as DictExpression -from influxdb_client.domain.dict_item import DictItem as DictItem -from influxdb_client.domain.duration import Duration as Duration -from influxdb_client.domain.duration_literal import DurationLiteral as DurationLiteral -from influxdb_client.domain.error import Error as Error -from influxdb_client.domain.expression import Expression as Expression -from influxdb_client.domain.expression_statement import ExpressionStatement as ExpressionStatement -from influxdb_client.domain.field import Field as Field -from influxdb_client.domain.file import File as File -from influxdb_client.domain.float_literal import FloatLiteral as FloatLiteral -from influxdb_client.domain.flux_response import FluxResponse as FluxResponse -from influxdb_client.domain.flux_suggestion import FluxSuggestion as FluxSuggestion -from influxdb_client.domain.flux_suggestions import FluxSuggestions as FluxSuggestions -from influxdb_client.domain.function_expression import FunctionExpression as FunctionExpression -from influxdb_client.domain.gauge_view_properties import GaugeViewProperties as GaugeViewProperties -from influxdb_client.domain.greater_threshold import GreaterThreshold as GreaterThreshold -from influxdb_client.domain.health_check import HealthCheck as HealthCheck -from influxdb_client.domain.heatmap_view_properties import HeatmapViewProperties as HeatmapViewProperties -from influxdb_client.domain.histogram_view_properties import HistogramViewProperties as HistogramViewProperties -from influxdb_client.domain.http_notification_endpoint import HTTPNotificationEndpoint as HTTPNotificationEndpoint -from influxdb_client.domain.http_notification_rule import HTTPNotificationRule as HTTPNotificationRule -from influxdb_client.domain.http_notification_rule_base import HTTPNotificationRuleBase as HTTPNotificationRuleBase -from influxdb_client.domain.identifier import Identifier as Identifier -from influxdb_client.domain.import_declaration import ImportDeclaration as ImportDeclaration -from influxdb_client.domain.index_expression import IndexExpression as IndexExpression -from influxdb_client.domain.integer_literal import IntegerLiteral as IntegerLiteral -from influxdb_client.domain.is_onboarding import IsOnboarding as IsOnboarding -from influxdb_client.domain.label import Label as Label -from influxdb_client.domain.label_create_request import LabelCreateRequest as LabelCreateRequest -from influxdb_client.domain.label_mapping import LabelMapping as LabelMapping -from influxdb_client.domain.label_response import LabelResponse as LabelResponse -from influxdb_client.domain.label_update import LabelUpdate as LabelUpdate -from influxdb_client.domain.labels_response import LabelsResponse as LabelsResponse -from influxdb_client.domain.language_request import LanguageRequest as LanguageRequest -from influxdb_client.domain.legacy_authorization_post_request import ( - LegacyAuthorizationPostRequest as LegacyAuthorizationPostRequest, -) -from influxdb_client.domain.lesser_threshold import LesserThreshold as LesserThreshold -from influxdb_client.domain.line_plus_single_stat_properties import LinePlusSingleStatProperties as LinePlusSingleStatProperties -from influxdb_client.domain.line_protocol_error import LineProtocolError as LineProtocolError -from influxdb_client.domain.line_protocol_length_error import LineProtocolLengthError as LineProtocolLengthError -from influxdb_client.domain.links import Links as Links -from influxdb_client.domain.list_stacks_response import ListStacksResponse as ListStacksResponse -from influxdb_client.domain.log_event import LogEvent as LogEvent -from influxdb_client.domain.logical_expression import LogicalExpression as LogicalExpression -from influxdb_client.domain.logs import Logs as Logs -from influxdb_client.domain.map_variable_properties import MapVariableProperties as MapVariableProperties -from influxdb_client.domain.markdown_view_properties import MarkdownViewProperties as MarkdownViewProperties -from influxdb_client.domain.measurement_schema import MeasurementSchema as MeasurementSchema -from influxdb_client.domain.measurement_schema_column import MeasurementSchemaColumn as MeasurementSchemaColumn -from influxdb_client.domain.measurement_schema_create_request import ( - MeasurementSchemaCreateRequest as MeasurementSchemaCreateRequest, -) -from influxdb_client.domain.measurement_schema_list import MeasurementSchemaList as MeasurementSchemaList -from influxdb_client.domain.measurement_schema_update_request import ( - MeasurementSchemaUpdateRequest as MeasurementSchemaUpdateRequest, -) -from influxdb_client.domain.member_assignment import MemberAssignment as MemberAssignment -from influxdb_client.domain.member_expression import MemberExpression as MemberExpression -from influxdb_client.domain.metadata_backup import MetadataBackup as MetadataBackup -from influxdb_client.domain.model_property import ModelProperty as ModelProperty -from influxdb_client.domain.mosaic_view_properties import MosaicViewProperties as MosaicViewProperties -from influxdb_client.domain.node import Node as Node -from influxdb_client.domain.notification_endpoint import NotificationEndpoint as NotificationEndpoint -from influxdb_client.domain.notification_endpoint_base import NotificationEndpointBase as NotificationEndpointBase -from influxdb_client.domain.notification_endpoint_base_links import NotificationEndpointBaseLinks as NotificationEndpointBaseLinks -from influxdb_client.domain.notification_endpoint_discriminator import ( - NotificationEndpointDiscriminator as NotificationEndpointDiscriminator, -) -from influxdb_client.domain.notification_endpoint_type import NotificationEndpointType as NotificationEndpointType -from influxdb_client.domain.notification_endpoint_update import NotificationEndpointUpdate as NotificationEndpointUpdate -from influxdb_client.domain.notification_endpoints import NotificationEndpoints as NotificationEndpoints -from influxdb_client.domain.notification_rule import NotificationRule as NotificationRule -from influxdb_client.domain.notification_rule_base import NotificationRuleBase as NotificationRuleBase -from influxdb_client.domain.notification_rule_base_links import NotificationRuleBaseLinks as NotificationRuleBaseLinks -from influxdb_client.domain.notification_rule_discriminator import NotificationRuleDiscriminator as NotificationRuleDiscriminator -from influxdb_client.domain.notification_rule_update import NotificationRuleUpdate as NotificationRuleUpdate -from influxdb_client.domain.notification_rules import NotificationRules as NotificationRules -from influxdb_client.domain.object_expression import ObjectExpression as ObjectExpression -from influxdb_client.domain.onboarding_request import OnboardingRequest as OnboardingRequest -from influxdb_client.domain.onboarding_response import OnboardingResponse as OnboardingResponse -from influxdb_client.domain.option_statement import OptionStatement as OptionStatement -from influxdb_client.domain.organization import Organization as Organization -from influxdb_client.domain.organization_links import OrganizationLinks as OrganizationLinks -from influxdb_client.domain.organizations import Organizations as Organizations -from influxdb_client.domain.package import Package as Package -from influxdb_client.domain.package_clause import PackageClause as PackageClause -from influxdb_client.domain.pager_duty_notification_endpoint import PagerDutyNotificationEndpoint as PagerDutyNotificationEndpoint -from influxdb_client.domain.pager_duty_notification_rule import PagerDutyNotificationRule as PagerDutyNotificationRule -from influxdb_client.domain.pager_duty_notification_rule_base import ( - PagerDutyNotificationRuleBase as PagerDutyNotificationRuleBase, -) -from influxdb_client.domain.paren_expression import ParenExpression as ParenExpression -from influxdb_client.domain.password_reset_body import PasswordResetBody as PasswordResetBody -from influxdb_client.domain.patch_bucket_request import PatchBucketRequest as PatchBucketRequest -from influxdb_client.domain.patch_dashboard_request import PatchDashboardRequest as PatchDashboardRequest -from influxdb_client.domain.patch_organization_request import PatchOrganizationRequest as PatchOrganizationRequest -from influxdb_client.domain.patch_retention_rule import PatchRetentionRule as PatchRetentionRule -from influxdb_client.domain.patch_stack_request import PatchStackRequest as PatchStackRequest -from influxdb_client.domain.patch_stack_request_additional_resources import ( - PatchStackRequestAdditionalResources as PatchStackRequestAdditionalResources, -) -from influxdb_client.domain.permission import Permission as Permission -from influxdb_client.domain.permission_resource import PermissionResource as PermissionResource -from influxdb_client.domain.pipe_expression import PipeExpression as PipeExpression -from influxdb_client.domain.pipe_literal import PipeLiteral as PipeLiteral -from influxdb_client.domain.post_bucket_request import PostBucketRequest as PostBucketRequest -from influxdb_client.domain.post_check import PostCheck as PostCheck -from influxdb_client.domain.post_notification_endpoint import PostNotificationEndpoint as PostNotificationEndpoint -from influxdb_client.domain.post_notification_rule import PostNotificationRule as PostNotificationRule -from influxdb_client.domain.post_organization_request import PostOrganizationRequest as PostOrganizationRequest -from influxdb_client.domain.post_restore_kv_response import PostRestoreKVResponse as PostRestoreKVResponse -from influxdb_client.domain.post_stack_request import PostStackRequest as PostStackRequest -from influxdb_client.domain.property_key import PropertyKey as PropertyKey -from influxdb_client.domain.query import Query as Query -from influxdb_client.domain.query_edit_mode import QueryEditMode as QueryEditMode -from influxdb_client.domain.query_variable_properties import QueryVariableProperties as QueryVariableProperties -from influxdb_client.domain.query_variable_properties_values import QueryVariablePropertiesValues as QueryVariablePropertiesValues -from influxdb_client.domain.range_threshold import RangeThreshold as RangeThreshold -from influxdb_client.domain.ready import Ready as Ready -from influxdb_client.domain.regexp_literal import RegexpLiteral as RegexpLiteral -from influxdb_client.domain.remote_connection import RemoteConnection as RemoteConnection -from influxdb_client.domain.remote_connection_creation_request import ( - RemoteConnectionCreationRequest as RemoteConnectionCreationRequest, -) -from influxdb_client.domain.remote_connection_update_request import RemoteConnectionUpdateRequest as RemoteConnectionUpdateRequest -from influxdb_client.domain.remote_connections import RemoteConnections as RemoteConnections -from influxdb_client.domain.renamable_field import RenamableField as RenamableField -from influxdb_client.domain.replication import Replication as Replication -from influxdb_client.domain.replication_creation_request import ReplicationCreationRequest as ReplicationCreationRequest -from influxdb_client.domain.replication_update_request import ReplicationUpdateRequest as ReplicationUpdateRequest -from influxdb_client.domain.replications import Replications as Replications -from influxdb_client.domain.resource_member import ResourceMember as ResourceMember -from influxdb_client.domain.resource_members import ResourceMembers as ResourceMembers -from influxdb_client.domain.resource_members_links import ResourceMembersLinks as ResourceMembersLinks -from influxdb_client.domain.resource_owner import ResourceOwner as ResourceOwner -from influxdb_client.domain.resource_owners import ResourceOwners as ResourceOwners -from influxdb_client.domain.restored_bucket_mappings import RestoredBucketMappings as RestoredBucketMappings -from influxdb_client.domain.retention_policy_manifest import RetentionPolicyManifest as RetentionPolicyManifest -from influxdb_client.domain.return_statement import ReturnStatement as ReturnStatement -from influxdb_client.domain.routes import Routes as Routes -from influxdb_client.domain.routes_external import RoutesExternal as RoutesExternal -from influxdb_client.domain.routes_query import RoutesQuery as RoutesQuery -from influxdb_client.domain.routes_system import RoutesSystem as RoutesSystem -from influxdb_client.domain.rule_status_level import RuleStatusLevel as RuleStatusLevel -from influxdb_client.domain.run import Run as Run -from influxdb_client.domain.run_links import RunLinks as RunLinks -from influxdb_client.domain.run_manually import RunManually as RunManually -from influxdb_client.domain.runs import Runs as Runs -from influxdb_client.domain.scatter_view_properties import ScatterViewProperties as ScatterViewProperties -from influxdb_client.domain.schema_type import SchemaType as SchemaType -from influxdb_client.domain.scraper_target_request import ScraperTargetRequest as ScraperTargetRequest -from influxdb_client.domain.scraper_target_response import ScraperTargetResponse as ScraperTargetResponse -from influxdb_client.domain.scraper_target_responses import ScraperTargetResponses as ScraperTargetResponses -from influxdb_client.domain.script import Script as Script -from influxdb_client.domain.script_create_request import ScriptCreateRequest as ScriptCreateRequest -from influxdb_client.domain.script_invocation_params import ScriptInvocationParams as ScriptInvocationParams -from influxdb_client.domain.script_language import ScriptLanguage as ScriptLanguage -from influxdb_client.domain.script_update_request import ScriptUpdateRequest as ScriptUpdateRequest -from influxdb_client.domain.scripts import Scripts as Scripts -from influxdb_client.domain.secret_keys import SecretKeys as SecretKeys -from influxdb_client.domain.secret_keys_response import SecretKeysResponse as SecretKeysResponse -from influxdb_client.domain.shard_group_manifest import ShardGroupManifest as ShardGroupManifest -from influxdb_client.domain.shard_manifest import ShardManifest as ShardManifest -from influxdb_client.domain.shard_owner import ShardOwner as ShardOwner -from influxdb_client.domain.simple_table_view_properties import SimpleTableViewProperties as SimpleTableViewProperties -from influxdb_client.domain.single_stat_view_properties import SingleStatViewProperties as SingleStatViewProperties -from influxdb_client.domain.slack_notification_endpoint import SlackNotificationEndpoint as SlackNotificationEndpoint -from influxdb_client.domain.slack_notification_rule import SlackNotificationRule as SlackNotificationRule -from influxdb_client.domain.slack_notification_rule_base import SlackNotificationRuleBase as SlackNotificationRuleBase -from influxdb_client.domain.smtp_notification_rule import SMTPNotificationRule as SMTPNotificationRule -from influxdb_client.domain.smtp_notification_rule_base import SMTPNotificationRuleBase as SMTPNotificationRuleBase -from influxdb_client.domain.source import Source as Source -from influxdb_client.domain.source_links import SourceLinks as SourceLinks -from influxdb_client.domain.sources import Sources as Sources -from influxdb_client.domain.stack import Stack as Stack -from influxdb_client.domain.stack_associations import StackAssociations as StackAssociations -from influxdb_client.domain.stack_events import StackEvents as StackEvents -from influxdb_client.domain.stack_links import StackLinks as StackLinks -from influxdb_client.domain.stack_resources import StackResources as StackResources -from influxdb_client.domain.statement import Statement as Statement -from influxdb_client.domain.static_legend import StaticLegend as StaticLegend -from influxdb_client.domain.status_rule import StatusRule as StatusRule -from influxdb_client.domain.string_literal import StringLiteral as StringLiteral -from influxdb_client.domain.subscription_manifest import SubscriptionManifest as SubscriptionManifest -from influxdb_client.domain.table_view_properties import TableViewProperties as TableViewProperties -from influxdb_client.domain.table_view_properties_table_options import ( - TableViewPropertiesTableOptions as TableViewPropertiesTableOptions, -) -from influxdb_client.domain.tag_rule import TagRule as TagRule -from influxdb_client.domain.task import Task as Task -from influxdb_client.domain.task_create_request import TaskCreateRequest as TaskCreateRequest -from influxdb_client.domain.task_links import TaskLinks as TaskLinks -from influxdb_client.domain.task_status_type import TaskStatusType as TaskStatusType -from influxdb_client.domain.task_update_request import TaskUpdateRequest as TaskUpdateRequest -from influxdb_client.domain.tasks import Tasks as Tasks -from influxdb_client.domain.telegraf import Telegraf as Telegraf -from influxdb_client.domain.telegraf_plugin import TelegrafPlugin as TelegrafPlugin -from influxdb_client.domain.telegraf_plugin_request import TelegrafPluginRequest as TelegrafPluginRequest -from influxdb_client.domain.telegraf_plugin_request_plugins import TelegrafPluginRequestPlugins as TelegrafPluginRequestPlugins -from influxdb_client.domain.telegraf_plugins import TelegrafPlugins as TelegrafPlugins -from influxdb_client.domain.telegraf_request import TelegrafRequest as TelegrafRequest -from influxdb_client.domain.telegraf_request_metadata import TelegrafRequestMetadata as TelegrafRequestMetadata -from influxdb_client.domain.telegrafs import Telegrafs as Telegrafs -from influxdb_client.domain.telegram_notification_endpoint import TelegramNotificationEndpoint as TelegramNotificationEndpoint -from influxdb_client.domain.telegram_notification_rule import TelegramNotificationRule as TelegramNotificationRule -from influxdb_client.domain.telegram_notification_rule_base import TelegramNotificationRuleBase as TelegramNotificationRuleBase -from influxdb_client.domain.template_apply import TemplateApply as TemplateApply -from influxdb_client.domain.template_apply_remotes import TemplateApplyRemotes as TemplateApplyRemotes -from influxdb_client.domain.template_apply_template import TemplateApplyTemplate as TemplateApplyTemplate -from influxdb_client.domain.template_chart import TemplateChart as TemplateChart -from influxdb_client.domain.template_export_by_id import TemplateExportByID as TemplateExportByID -from influxdb_client.domain.template_export_by_id_org_ids import TemplateExportByIDOrgIDs as TemplateExportByIDOrgIDs -from influxdb_client.domain.template_export_by_id_resource_filters import ( - TemplateExportByIDResourceFilters as TemplateExportByIDResourceFilters, -) -from influxdb_client.domain.template_export_by_id_resources import TemplateExportByIDResources as TemplateExportByIDResources -from influxdb_client.domain.template_export_by_name import TemplateExportByName as TemplateExportByName -from influxdb_client.domain.template_export_by_name_resources import ( - TemplateExportByNameResources as TemplateExportByNameResources, -) -from influxdb_client.domain.template_kind import TemplateKind as TemplateKind -from influxdb_client.domain.template_summary import TemplateSummary as TemplateSummary -from influxdb_client.domain.template_summary_diff import TemplateSummaryDiff as TemplateSummaryDiff -from influxdb_client.domain.template_summary_diff_buckets import TemplateSummaryDiffBuckets as TemplateSummaryDiffBuckets -from influxdb_client.domain.template_summary_diff_buckets_new_old import ( - TemplateSummaryDiffBucketsNewOld as TemplateSummaryDiffBucketsNewOld, -) -from influxdb_client.domain.template_summary_diff_checks import TemplateSummaryDiffChecks as TemplateSummaryDiffChecks -from influxdb_client.domain.template_summary_diff_dashboards import TemplateSummaryDiffDashboards as TemplateSummaryDiffDashboards -from influxdb_client.domain.template_summary_diff_dashboards_new_old import ( - TemplateSummaryDiffDashboardsNewOld as TemplateSummaryDiffDashboardsNewOld, -) -from influxdb_client.domain.template_summary_diff_label_mappings import ( - TemplateSummaryDiffLabelMappings as TemplateSummaryDiffLabelMappings, -) -from influxdb_client.domain.template_summary_diff_labels import TemplateSummaryDiffLabels as TemplateSummaryDiffLabels -from influxdb_client.domain.template_summary_diff_labels_new_old import ( - TemplateSummaryDiffLabelsNewOld as TemplateSummaryDiffLabelsNewOld, -) -from influxdb_client.domain.template_summary_diff_notification_endpoints import ( - TemplateSummaryDiffNotificationEndpoints as TemplateSummaryDiffNotificationEndpoints, -) -from influxdb_client.domain.template_summary_diff_notification_rules import ( - TemplateSummaryDiffNotificationRules as TemplateSummaryDiffNotificationRules, -) -from influxdb_client.domain.template_summary_diff_notification_rules_new_old import ( - TemplateSummaryDiffNotificationRulesNewOld as TemplateSummaryDiffNotificationRulesNewOld, -) -from influxdb_client.domain.template_summary_diff_tasks import TemplateSummaryDiffTasks as TemplateSummaryDiffTasks -from influxdb_client.domain.template_summary_diff_tasks_new_old import ( - TemplateSummaryDiffTasksNewOld as TemplateSummaryDiffTasksNewOld, -) -from influxdb_client.domain.template_summary_diff_telegraf_configs import ( - TemplateSummaryDiffTelegrafConfigs as TemplateSummaryDiffTelegrafConfigs, -) -from influxdb_client.domain.template_summary_diff_variables import TemplateSummaryDiffVariables as TemplateSummaryDiffVariables -from influxdb_client.domain.template_summary_diff_variables_new_old import ( - TemplateSummaryDiffVariablesNewOld as TemplateSummaryDiffVariablesNewOld, -) -from influxdb_client.domain.template_summary_errors import TemplateSummaryErrors as TemplateSummaryErrors -from influxdb_client.domain.template_summary_label import TemplateSummaryLabel as TemplateSummaryLabel -from influxdb_client.domain.template_summary_label_properties import ( - TemplateSummaryLabelProperties as TemplateSummaryLabelProperties, -) -from influxdb_client.domain.template_summary_summary import TemplateSummarySummary as TemplateSummarySummary -from influxdb_client.domain.template_summary_summary_buckets import TemplateSummarySummaryBuckets as TemplateSummarySummaryBuckets -from influxdb_client.domain.template_summary_summary_dashboards import ( - TemplateSummarySummaryDashboards as TemplateSummarySummaryDashboards, -) -from influxdb_client.domain.template_summary_summary_label_mappings import ( - TemplateSummarySummaryLabelMappings as TemplateSummarySummaryLabelMappings, -) -from influxdb_client.domain.template_summary_summary_notification_rules import ( - TemplateSummarySummaryNotificationRules as TemplateSummarySummaryNotificationRules, -) -from influxdb_client.domain.template_summary_summary_status_rules import ( - TemplateSummarySummaryStatusRules as TemplateSummarySummaryStatusRules, -) -from influxdb_client.domain.template_summary_summary_tag_rules import ( - TemplateSummarySummaryTagRules as TemplateSummarySummaryTagRules, -) -from influxdb_client.domain.template_summary_summary_tasks import TemplateSummarySummaryTasks as TemplateSummarySummaryTasks -from influxdb_client.domain.template_summary_summary_variables import ( - TemplateSummarySummaryVariables as TemplateSummarySummaryVariables, -) -from influxdb_client.domain.test_statement import TestStatement as TestStatement -from influxdb_client.domain.threshold import Threshold as Threshold -from influxdb_client.domain.threshold_base import ThresholdBase as ThresholdBase -from influxdb_client.domain.threshold_check import ThresholdCheck as ThresholdCheck -from influxdb_client.domain.unary_expression import UnaryExpression as UnaryExpression -from influxdb_client.domain.unsigned_integer_literal import UnsignedIntegerLiteral as UnsignedIntegerLiteral -from influxdb_client.domain.user import User as User -from influxdb_client.domain.user_response import UserResponse as UserResponse -from influxdb_client.domain.user_response_links import UserResponseLinks as UserResponseLinks -from influxdb_client.domain.users import Users as Users -from influxdb_client.domain.variable import Variable as Variable -from influxdb_client.domain.variable_assignment import VariableAssignment as VariableAssignment -from influxdb_client.domain.variable_links import VariableLinks as VariableLinks -from influxdb_client.domain.variable_properties import VariableProperties as VariableProperties -from influxdb_client.domain.variables import Variables as Variables -from influxdb_client.domain.view import View as View -from influxdb_client.domain.view_links import ViewLinks as ViewLinks -from influxdb_client.domain.view_properties import ViewProperties as ViewProperties -from influxdb_client.domain.views import Views as Views -from influxdb_client.domain.write_precision import WritePrecision as WritePrecision -from influxdb_client.domain.xy_geom import XYGeom as XYGeom -from influxdb_client.domain.xy_view_properties import XYViewProperties as XYViewProperties diff --git a/stubs/influxdb-client/influxdb_client/domain/add_resource_member_request_body.pyi b/stubs/influxdb-client/influxdb_client/domain/add_resource_member_request_body.pyi deleted file mode 100644 index e46d43db2f91..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/add_resource_member_request_body.pyi +++ /dev/null @@ -1,19 +0,0 @@ -from _typeshed import Incomplete - -class AddResourceMemberRequestBody: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, id: Incomplete | None = None, name: Incomplete | None = None) -> None: ... - @property - def id(self): ... - @id.setter - def id(self, id) -> None: ... - @property - def name(self): ... - @name.setter - def name(self, name) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/analyze_query_response.pyi b/stubs/influxdb-client/influxdb_client/domain/analyze_query_response.pyi deleted file mode 100644 index eea43aa2ec37..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/analyze_query_response.pyi +++ /dev/null @@ -1,15 +0,0 @@ -from _typeshed import Incomplete - -class AnalyzeQueryResponse: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, errors: Incomplete | None = None) -> None: ... - @property - def errors(self): ... - @errors.setter - def errors(self, errors) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/analyze_query_response_errors.pyi b/stubs/influxdb-client/influxdb_client/domain/analyze_query_response_errors.pyi deleted file mode 100644 index 67962686dcd2..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/analyze_query_response_errors.pyi +++ /dev/null @@ -1,33 +0,0 @@ -from _typeshed import Incomplete - -class AnalyzeQueryResponseErrors: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - line: Incomplete | None = None, - column: Incomplete | None = None, - character: Incomplete | None = None, - message: Incomplete | None = None, - ) -> None: ... - @property - def line(self): ... - @line.setter - def line(self, line) -> None: ... - @property - def column(self): ... - @column.setter - def column(self, column) -> None: ... - @property - def character(self): ... - @character.setter - def character(self, character) -> None: ... - @property - def message(self): ... - @message.setter - def message(self, message) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/array_expression.pyi b/stubs/influxdb-client/influxdb_client/domain/array_expression.pyi deleted file mode 100644 index b552bb067a38..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/array_expression.pyi +++ /dev/null @@ -1,21 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.expression import Expression - -class ArrayExpression(Expression): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, type: Incomplete | None = None, elements: Incomplete | None = None) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - @property - def elements(self): ... - @elements.setter - def elements(self, elements) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/ast_response.pyi b/stubs/influxdb-client/influxdb_client/domain/ast_response.pyi deleted file mode 100644 index faa12e7b7505..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/ast_response.pyi +++ /dev/null @@ -1,15 +0,0 @@ -from _typeshed import Incomplete - -class ASTResponse: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, ast: Incomplete | None = None) -> None: ... - @property - def ast(self): ... - @ast.setter - def ast(self, ast) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/authorization.pyi b/stubs/influxdb-client/influxdb_client/domain/authorization.pyi deleted file mode 100644 index 571333ccad88..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/authorization.pyi +++ /dev/null @@ -1,67 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.authorization_update_request import AuthorizationUpdateRequest - -class Authorization(AuthorizationUpdateRequest): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - created_at: Incomplete | None = None, - updated_at: Incomplete | None = None, - org_id: Incomplete | None = None, - permissions: Incomplete | None = None, - id: Incomplete | None = None, - token: Incomplete | None = None, - user_id: Incomplete | None = None, - user: Incomplete | None = None, - org: Incomplete | None = None, - links: Incomplete | None = None, - status: str = "active", - description: Incomplete | None = None, - ) -> None: ... - @property - def created_at(self): ... - @created_at.setter - def created_at(self, created_at) -> None: ... - @property - def updated_at(self): ... - @updated_at.setter - def updated_at(self, updated_at) -> None: ... - @property - def org_id(self): ... - @org_id.setter - def org_id(self, org_id) -> None: ... - @property - def permissions(self): ... - @permissions.setter - def permissions(self, permissions) -> None: ... - @property - def id(self): ... - @id.setter - def id(self, id) -> None: ... - @property - def token(self): ... - @token.setter - def token(self, token) -> None: ... - @property - def user_id(self): ... - @user_id.setter - def user_id(self, user_id) -> None: ... - @property - def user(self): ... - @user.setter - def user(self, user) -> None: ... - @property - def org(self): ... - @org.setter - def org(self, org) -> None: ... - @property - def links(self): ... - @links.setter - def links(self, links) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/authorization_post_request.pyi b/stubs/influxdb-client/influxdb_client/domain/authorization_post_request.pyi deleted file mode 100644 index 360b6fa644e1..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/authorization_post_request.pyi +++ /dev/null @@ -1,32 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.authorization_update_request import AuthorizationUpdateRequest - -class AuthorizationPostRequest(AuthorizationUpdateRequest): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - org_id: Incomplete | None = None, - user_id: Incomplete | None = None, - permissions: Incomplete | None = None, - status: str = "active", - description: Incomplete | None = None, - ) -> None: ... - @property - def org_id(self): ... - @org_id.setter - def org_id(self, org_id) -> None: ... - @property - def user_id(self): ... - @user_id.setter - def user_id(self, user_id) -> None: ... - @property - def permissions(self): ... - @permissions.setter - def permissions(self, permissions) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/authorization_update_request.pyi b/stubs/influxdb-client/influxdb_client/domain/authorization_update_request.pyi deleted file mode 100644 index d8dfe9157156..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/authorization_update_request.pyi +++ /dev/null @@ -1,19 +0,0 @@ -from _typeshed import Incomplete - -class AuthorizationUpdateRequest: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, status: str = "active", description: Incomplete | None = None) -> None: ... - @property - def status(self): ... - @status.setter - def status(self, status) -> None: ... - @property - def description(self): ... - @description.setter - def description(self, description) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/authorizations.pyi b/stubs/influxdb-client/influxdb_client/domain/authorizations.pyi deleted file mode 100644 index efb9ae966c93..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/authorizations.pyi +++ /dev/null @@ -1,19 +0,0 @@ -from _typeshed import Incomplete - -class Authorizations: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, links: Incomplete | None = None, authorizations: Incomplete | None = None) -> None: ... - @property - def links(self): ... - @links.setter - def links(self, links) -> None: ... - @property - def authorizations(self): ... - @authorizations.setter - def authorizations(self, authorizations) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/axes.pyi b/stubs/influxdb-client/influxdb_client/domain/axes.pyi deleted file mode 100644 index 5efb4f00a4ea..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/axes.pyi +++ /dev/null @@ -1,19 +0,0 @@ -from _typeshed import Incomplete - -class Axes: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, x: Incomplete | None = None, y: Incomplete | None = None) -> None: ... - @property - def x(self): ... - @x.setter - def x(self, x) -> None: ... - @property - def y(self): ... - @y.setter - def y(self, y) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/axis.pyi b/stubs/influxdb-client/influxdb_client/domain/axis.pyi deleted file mode 100644 index c47f8311b0cf..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/axis.pyi +++ /dev/null @@ -1,43 +0,0 @@ -from _typeshed import Incomplete - -class Axis: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - bounds: Incomplete | None = None, - label: Incomplete | None = None, - prefix: Incomplete | None = None, - suffix: Incomplete | None = None, - base: Incomplete | None = None, - scale: Incomplete | None = None, - ) -> None: ... - @property - def bounds(self): ... - @bounds.setter - def bounds(self, bounds) -> None: ... - @property - def label(self): ... - @label.setter - def label(self, label) -> None: ... - @property - def prefix(self): ... - @prefix.setter - def prefix(self, prefix) -> None: ... - @property - def suffix(self): ... - @suffix.setter - def suffix(self, suffix) -> None: ... - @property - def base(self): ... - @base.setter - def base(self, base) -> None: ... - @property - def scale(self): ... - @scale.setter - def scale(self, scale) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/axis_scale.pyi b/stubs/influxdb-client/influxdb_client/domain/axis_scale.pyi deleted file mode 100644 index 765352aec473..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/axis_scale.pyi +++ /dev/null @@ -1,12 +0,0 @@ -from _typeshed import Incomplete - -class AxisScale: - LOG: str - LINEAR: str - openapi_types: Incomplete - attribute_map: Incomplete - def __init__(self) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/bad_statement.pyi b/stubs/influxdb-client/influxdb_client/domain/bad_statement.pyi deleted file mode 100644 index 6136b67f6598..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/bad_statement.pyi +++ /dev/null @@ -1,21 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.statement import Statement - -class BadStatement(Statement): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, type: Incomplete | None = None, text: Incomplete | None = None) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - @property - def text(self): ... - @text.setter - def text(self, text) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/band_view_properties.pyi b/stubs/influxdb-client/influxdb_client/domain/band_view_properties.pyi deleted file mode 100644 index b55127a7fa7f..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/band_view_properties.pyi +++ /dev/null @@ -1,157 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.view_properties import ViewProperties - -class BandViewProperties(ViewProperties): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - adaptive_zoom_hide: bool | None = None, - time_format: Incomplete | None = None, - type: Incomplete | None = None, - queries: Incomplete | None = None, - colors: Incomplete | None = None, - shape: Incomplete | None = None, - note: Incomplete | None = None, - show_note_when_empty: Incomplete | None = None, - axes: Incomplete | None = None, - static_legend: Incomplete | None = None, - x_column: Incomplete | None = None, - generate_x_axis_ticks: Incomplete | None = None, - x_total_ticks: Incomplete | None = None, - x_tick_start: Incomplete | None = None, - x_tick_step: Incomplete | None = None, - y_column: Incomplete | None = None, - generate_y_axis_ticks: Incomplete | None = None, - y_total_ticks: Incomplete | None = None, - y_tick_start: Incomplete | None = None, - y_tick_step: Incomplete | None = None, - upper_column: Incomplete | None = None, - main_column: Incomplete | None = None, - lower_column: Incomplete | None = None, - hover_dimension: Incomplete | None = None, - geom: Incomplete | None = None, - legend_colorize_rows: Incomplete | None = None, - legend_hide: Incomplete | None = None, - legend_opacity: Incomplete | None = None, - legend_orientation_threshold: Incomplete | None = None, - ) -> None: ... - adaptive_zoom_hide: bool | None - @property - def time_format(self): ... - @time_format.setter - def time_format(self, time_format) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - @property - def queries(self): ... - @queries.setter - def queries(self, queries) -> None: ... - @property - def colors(self): ... - @colors.setter - def colors(self, colors) -> None: ... - @property - def shape(self): ... - @shape.setter - def shape(self, shape) -> None: ... - @property - def note(self): ... - @note.setter - def note(self, note) -> None: ... - @property - def show_note_when_empty(self): ... - @show_note_when_empty.setter - def show_note_when_empty(self, show_note_when_empty) -> None: ... - @property - def axes(self): ... - @axes.setter - def axes(self, axes) -> None: ... - @property - def static_legend(self): ... - @static_legend.setter - def static_legend(self, static_legend) -> None: ... - @property - def x_column(self): ... - @x_column.setter - def x_column(self, x_column) -> None: ... - @property - def generate_x_axis_ticks(self): ... - @generate_x_axis_ticks.setter - def generate_x_axis_ticks(self, generate_x_axis_ticks) -> None: ... - @property - def x_total_ticks(self): ... - @x_total_ticks.setter - def x_total_ticks(self, x_total_ticks) -> None: ... - @property - def x_tick_start(self): ... - @x_tick_start.setter - def x_tick_start(self, x_tick_start) -> None: ... - @property - def x_tick_step(self): ... - @x_tick_step.setter - def x_tick_step(self, x_tick_step) -> None: ... - @property - def y_column(self): ... - @y_column.setter - def y_column(self, y_column) -> None: ... - @property - def generate_y_axis_ticks(self): ... - @generate_y_axis_ticks.setter - def generate_y_axis_ticks(self, generate_y_axis_ticks) -> None: ... - @property - def y_total_ticks(self): ... - @y_total_ticks.setter - def y_total_ticks(self, y_total_ticks) -> None: ... - @property - def y_tick_start(self): ... - @y_tick_start.setter - def y_tick_start(self, y_tick_start) -> None: ... - @property - def y_tick_step(self): ... - @y_tick_step.setter - def y_tick_step(self, y_tick_step) -> None: ... - @property - def upper_column(self): ... - @upper_column.setter - def upper_column(self, upper_column) -> None: ... - @property - def main_column(self): ... - @main_column.setter - def main_column(self, main_column) -> None: ... - @property - def lower_column(self): ... - @lower_column.setter - def lower_column(self, lower_column) -> None: ... - @property - def hover_dimension(self): ... - @hover_dimension.setter - def hover_dimension(self, hover_dimension) -> None: ... - @property - def geom(self): ... - @geom.setter - def geom(self, geom) -> None: ... - @property - def legend_colorize_rows(self): ... - @legend_colorize_rows.setter - def legend_colorize_rows(self, legend_colorize_rows) -> None: ... - @property - def legend_hide(self): ... - @legend_hide.setter - def legend_hide(self, legend_hide) -> None: ... - @property - def legend_opacity(self): ... - @legend_opacity.setter - def legend_opacity(self, legend_opacity) -> None: ... - @property - def legend_orientation_threshold(self): ... - @legend_orientation_threshold.setter - def legend_orientation_threshold(self, legend_orientation_threshold) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/binary_expression.pyi b/stubs/influxdb-client/influxdb_client/domain/binary_expression.pyi deleted file mode 100644 index 6ddbcaef3093..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/binary_expression.pyi +++ /dev/null @@ -1,35 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.expression import Expression - -class BinaryExpression(Expression): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - type: Incomplete | None = None, - operator: Incomplete | None = None, - left: Incomplete | None = None, - right: Incomplete | None = None, - ) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - @property - def operator(self): ... - @operator.setter - def operator(self, operator) -> None: ... - @property - def left(self): ... - @left.setter - def left(self, left) -> None: ... - @property - def right(self): ... - @right.setter - def right(self, right) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/block.pyi b/stubs/influxdb-client/influxdb_client/domain/block.pyi deleted file mode 100644 index 183aaefa4a2e..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/block.pyi +++ /dev/null @@ -1,21 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.node import Node - -class Block(Node): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, type: Incomplete | None = None, body: Incomplete | None = None) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - @property - def body(self): ... - @body.setter - def body(self, body) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/boolean_literal.pyi b/stubs/influxdb-client/influxdb_client/domain/boolean_literal.pyi deleted file mode 100644 index 6c76f89c5bef..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/boolean_literal.pyi +++ /dev/null @@ -1,21 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.expression import Expression - -class BooleanLiteral(Expression): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, type: Incomplete | None = None, value: Incomplete | None = None) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - @property - def value(self): ... - @value.setter - def value(self, value) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/bucket.pyi b/stubs/influxdb-client/influxdb_client/domain/bucket.pyi deleted file mode 100644 index fe7a86461d6c..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/bucket.pyi +++ /dev/null @@ -1,73 +0,0 @@ -from _typeshed import Incomplete - -class Bucket: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - links: Incomplete | None = None, - id: str | None = None, - type: str = "user", - name: Incomplete | None = None, - description: Incomplete | None = None, - org_id: str | None = None, - rp: Incomplete | None = None, - schema_type: Incomplete | None = None, - created_at: Incomplete | None = None, - updated_at: Incomplete | None = None, - retention_rules: Incomplete | None = None, - labels: Incomplete | None = None, - ) -> None: ... - @property - def links(self): ... - @links.setter - def links(self, links) -> None: ... - @property - def id(self) -> str | None: ... - @id.setter - def id(self, id: str) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - @property - def name(self): ... - @name.setter - def name(self, name) -> None: ... - @property - def description(self): ... - @description.setter - def description(self, description) -> None: ... - @property - def org_id(self) -> str | None: ... - @org_id.setter - def org_id(self, org_id: str) -> None: ... - @property - def rp(self): ... - @rp.setter - def rp(self, rp) -> None: ... - @property - def schema_type(self): ... - @schema_type.setter - def schema_type(self, schema_type) -> None: ... - @property - def created_at(self): ... - @created_at.setter - def created_at(self, created_at) -> None: ... - @property - def updated_at(self): ... - @updated_at.setter - def updated_at(self, updated_at) -> None: ... - @property - def retention_rules(self): ... - @retention_rules.setter - def retention_rules(self, retention_rules) -> None: ... - @property - def labels(self): ... - @labels.setter - def labels(self, labels) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/bucket_links.pyi b/stubs/influxdb-client/influxdb_client/domain/bucket_links.pyi deleted file mode 100644 index afdd4c014434..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/bucket_links.pyi +++ /dev/null @@ -1,39 +0,0 @@ -from _typeshed import Incomplete - -class BucketLinks: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - labels: Incomplete | None = None, - members: Incomplete | None = None, - org: Incomplete | None = None, - owners: Incomplete | None = None, - _self: Incomplete | None = None, - write: Incomplete | None = None, - ) -> None: ... - @property - def labels(self): ... - @labels.setter - def labels(self, labels) -> None: ... - @property - def members(self): ... - @members.setter - def members(self, members) -> None: ... - @property - def org(self): ... - @org.setter - def org(self, org) -> None: ... - @property - def owners(self): ... - @owners.setter - def owners(self, owners) -> None: ... - @property - def write(self): ... - @write.setter - def write(self, write) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/bucket_metadata_manifest.pyi b/stubs/influxdb-client/influxdb_client/domain/bucket_metadata_manifest.pyi deleted file mode 100644 index 4cf3cbb67acf..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/bucket_metadata_manifest.pyi +++ /dev/null @@ -1,48 +0,0 @@ -from _typeshed import Incomplete - -class BucketMetadataManifest: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - organization_id: Incomplete | None = None, - organization_name: Incomplete | None = None, - bucket_id: Incomplete | None = None, - bucket_name: Incomplete | None = None, - description: Incomplete | None = None, - default_retention_policy: Incomplete | None = None, - retention_policies: Incomplete | None = None, - ) -> None: ... - @property - def organization_id(self): ... - @organization_id.setter - def organization_id(self, organization_id) -> None: ... - @property - def organization_name(self): ... - @organization_name.setter - def organization_name(self, organization_name) -> None: ... - @property - def bucket_id(self): ... - @bucket_id.setter - def bucket_id(self, bucket_id) -> None: ... - @property - def bucket_name(self): ... - @bucket_name.setter - def bucket_name(self, bucket_name) -> None: ... - @property - def description(self): ... - @description.setter - def description(self, description) -> None: ... - @property - def default_retention_policy(self): ... - @default_retention_policy.setter - def default_retention_policy(self, default_retention_policy) -> None: ... - @property - def retention_policies(self): ... - @retention_policies.setter - def retention_policies(self, retention_policies) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/bucket_retention_rules.pyi b/stubs/influxdb-client/influxdb_client/domain/bucket_retention_rules.pyi deleted file mode 100644 index d858aaf36dc9..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/bucket_retention_rules.pyi +++ /dev/null @@ -1,22 +0,0 @@ -from _typeshed import Incomplete - -class BucketRetentionRules: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, type: str = "expire", every_seconds: int = 2592000, shard_group_duration_seconds: int | None = None - ) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - every_seconds: int - @property - def shard_group_duration_seconds(self): ... - @shard_group_duration_seconds.setter - def shard_group_duration_seconds(self, shard_group_duration_seconds) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/bucket_shard_mapping.pyi b/stubs/influxdb-client/influxdb_client/domain/bucket_shard_mapping.pyi deleted file mode 100644 index 114d6b1cb9de..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/bucket_shard_mapping.pyi +++ /dev/null @@ -1,19 +0,0 @@ -from _typeshed import Incomplete - -class BucketShardMapping: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, old_id: Incomplete | None = None, new_id: Incomplete | None = None) -> None: ... - @property - def old_id(self): ... - @old_id.setter - def old_id(self, old_id) -> None: ... - @property - def new_id(self): ... - @new_id.setter - def new_id(self, new_id) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/buckets.pyi b/stubs/influxdb-client/influxdb_client/domain/buckets.pyi deleted file mode 100644 index 2aba7eeb1608..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/buckets.pyi +++ /dev/null @@ -1,19 +0,0 @@ -from _typeshed import Incomplete - -class Buckets: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, links: Incomplete | None = None, buckets: Incomplete | None = None) -> None: ... - @property - def links(self): ... - @links.setter - def links(self, links) -> None: ... - @property - def buckets(self): ... - @buckets.setter - def buckets(self, buckets) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/builder_aggregate_function_type.pyi b/stubs/influxdb-client/influxdb_client/domain/builder_aggregate_function_type.pyi deleted file mode 100644 index 77ebfb9efcba..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/builder_aggregate_function_type.pyi +++ /dev/null @@ -1,12 +0,0 @@ -from _typeshed import Incomplete - -class BuilderAggregateFunctionType: - FILTER: str - GROUP: str - openapi_types: Incomplete - attribute_map: Incomplete - def __init__(self) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/builder_config.pyi b/stubs/influxdb-client/influxdb_client/domain/builder_config.pyi deleted file mode 100644 index 730091fb2a1f..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/builder_config.pyi +++ /dev/null @@ -1,33 +0,0 @@ -from _typeshed import Incomplete - -class BuilderConfig: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - buckets: Incomplete | None = None, - tags: Incomplete | None = None, - functions: Incomplete | None = None, - aggregate_window: Incomplete | None = None, - ) -> None: ... - @property - def buckets(self): ... - @buckets.setter - def buckets(self, buckets) -> None: ... - @property - def tags(self): ... - @tags.setter - def tags(self, tags) -> None: ... - @property - def functions(self): ... - @functions.setter - def functions(self, functions) -> None: ... - @property - def aggregate_window(self): ... - @aggregate_window.setter - def aggregate_window(self, aggregate_window) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/builder_config_aggregate_window.pyi b/stubs/influxdb-client/influxdb_client/domain/builder_config_aggregate_window.pyi deleted file mode 100644 index 88ec913deba0..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/builder_config_aggregate_window.pyi +++ /dev/null @@ -1,19 +0,0 @@ -from _typeshed import Incomplete - -class BuilderConfigAggregateWindow: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, period: Incomplete | None = None, fill_values: Incomplete | None = None) -> None: ... - @property - def period(self): ... - @period.setter - def period(self, period) -> None: ... - @property - def fill_values(self): ... - @fill_values.setter - def fill_values(self, fill_values) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/builder_functions_type.pyi b/stubs/influxdb-client/influxdb_client/domain/builder_functions_type.pyi deleted file mode 100644 index 0ef39b154745..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/builder_functions_type.pyi +++ /dev/null @@ -1,15 +0,0 @@ -from _typeshed import Incomplete - -class BuilderFunctionsType: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, name: Incomplete | None = None) -> None: ... - @property - def name(self): ... - @name.setter - def name(self, name) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/builder_tags_type.pyi b/stubs/influxdb-client/influxdb_client/domain/builder_tags_type.pyi deleted file mode 100644 index 234c293082f7..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/builder_tags_type.pyi +++ /dev/null @@ -1,25 +0,0 @@ -from _typeshed import Incomplete - -class BuilderTagsType: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, key: Incomplete | None = None, values: Incomplete | None = None, aggregate_function_type: Incomplete | None = None - ) -> None: ... - @property - def key(self): ... - @key.setter - def key(self, key) -> None: ... - @property - def values(self): ... - @values.setter - def values(self, values) -> None: ... - @property - def aggregate_function_type(self): ... - @aggregate_function_type.setter - def aggregate_function_type(self, aggregate_function_type) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/builtin_statement.pyi b/stubs/influxdb-client/influxdb_client/domain/builtin_statement.pyi deleted file mode 100644 index 07acc096407b..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/builtin_statement.pyi +++ /dev/null @@ -1,21 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.statement import Statement - -class BuiltinStatement(Statement): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, type: Incomplete | None = None, id: Incomplete | None = None) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - @property - def id(self): ... - @id.setter - def id(self, id) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/call_expression.pyi b/stubs/influxdb-client/influxdb_client/domain/call_expression.pyi deleted file mode 100644 index ac679fc0d4a8..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/call_expression.pyi +++ /dev/null @@ -1,27 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.expression import Expression - -class CallExpression(Expression): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, type: Incomplete | None = None, callee: Incomplete | None = None, arguments: Incomplete | None = None - ) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - @property - def callee(self): ... - @callee.setter - def callee(self, callee) -> None: ... - @property - def arguments(self): ... - @arguments.setter - def arguments(self, arguments) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/cell.pyi b/stubs/influxdb-client/influxdb_client/domain/cell.pyi deleted file mode 100644 index ccb50b6d6ce2..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/cell.pyi +++ /dev/null @@ -1,48 +0,0 @@ -from _typeshed import Incomplete - -class Cell: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - id: Incomplete | None = None, - links: Incomplete | None = None, - x: Incomplete | None = None, - y: Incomplete | None = None, - w: Incomplete | None = None, - h: Incomplete | None = None, - view_id: Incomplete | None = None, - ) -> None: ... - @property - def id(self): ... - @id.setter - def id(self, id) -> None: ... - @property - def links(self): ... - @links.setter - def links(self, links) -> None: ... - @property - def x(self): ... - @x.setter - def x(self, x) -> None: ... - @property - def y(self): ... - @y.setter - def y(self, y) -> None: ... - @property - def w(self): ... - @w.setter - def w(self, w) -> None: ... - @property - def h(self): ... - @h.setter - def h(self, h) -> None: ... - @property - def view_id(self): ... - @view_id.setter - def view_id(self, view_id) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/cell_links.pyi b/stubs/influxdb-client/influxdb_client/domain/cell_links.pyi deleted file mode 100644 index 2b1d3dc8fbfd..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/cell_links.pyi +++ /dev/null @@ -1,15 +0,0 @@ -from _typeshed import Incomplete - -class CellLinks: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, _self: Incomplete | None = None, view: Incomplete | None = None) -> None: ... - @property - def view(self): ... - @view.setter - def view(self, view) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/cell_update.pyi b/stubs/influxdb-client/influxdb_client/domain/cell_update.pyi deleted file mode 100644 index 654997afa40b..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/cell_update.pyi +++ /dev/null @@ -1,29 +0,0 @@ -from _typeshed import Incomplete - -class CellUpdate: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, x: Incomplete | None = None, y: Incomplete | None = None, w: Incomplete | None = None, h: Incomplete | None = None - ) -> None: ... - @property - def x(self): ... - @x.setter - def x(self, x) -> None: ... - @property - def y(self): ... - @y.setter - def y(self, y) -> None: ... - @property - def w(self): ... - @w.setter - def w(self, w) -> None: ... - @property - def h(self): ... - @h.setter - def h(self, h) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/cell_with_view_properties.pyi b/stubs/influxdb-client/influxdb_client/domain/cell_with_view_properties.pyi deleted file mode 100644 index cdb8d76b0ecd..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/cell_with_view_properties.pyi +++ /dev/null @@ -1,32 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.cell import Cell - -class CellWithViewProperties(Cell): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - name: Incomplete | None = None, - properties: Incomplete | None = None, - id: Incomplete | None = None, - links: Incomplete | None = None, - x: Incomplete | None = None, - y: Incomplete | None = None, - w: Incomplete | None = None, - h: Incomplete | None = None, - view_id: Incomplete | None = None, - ) -> None: ... - @property - def name(self): ... - @name.setter - def name(self, name) -> None: ... - @property - def properties(self): ... - @properties.setter - def properties(self, properties) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/check.pyi b/stubs/influxdb-client/influxdb_client/domain/check.pyi deleted file mode 100644 index a6d4dd4c9fb1..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/check.pyi +++ /dev/null @@ -1,17 +0,0 @@ -from _typeshed import Incomplete - -class Check: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator_value_class_map: Incomplete - discriminator: str - def __init__(self, type: Incomplete | None = None) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - def get_real_child_model(self, data): ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/check_base.pyi b/stubs/influxdb-client/influxdb_client/domain/check_base.pyi deleted file mode 100644 index c32c0550a093..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/check_base.pyi +++ /dev/null @@ -1,88 +0,0 @@ -from _typeshed import Incomplete - -class CheckBase: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - id: Incomplete | None = None, - name: Incomplete | None = None, - org_id: Incomplete | None = None, - task_id: Incomplete | None = None, - owner_id: Incomplete | None = None, - created_at: Incomplete | None = None, - updated_at: Incomplete | None = None, - query: Incomplete | None = None, - status: Incomplete | None = None, - description: Incomplete | None = None, - latest_completed: Incomplete | None = None, - last_run_status: Incomplete | None = None, - last_run_error: Incomplete | None = None, - labels: Incomplete | None = None, - links: Incomplete | None = None, - ) -> None: ... - @property - def id(self): ... - @id.setter - def id(self, id) -> None: ... - @property - def name(self): ... - @name.setter - def name(self, name) -> None: ... - @property - def org_id(self): ... - @org_id.setter - def org_id(self, org_id) -> None: ... - @property - def task_id(self): ... - @task_id.setter - def task_id(self, task_id) -> None: ... - @property - def owner_id(self): ... - @owner_id.setter - def owner_id(self, owner_id) -> None: ... - @property - def created_at(self): ... - @created_at.setter - def created_at(self, created_at) -> None: ... - @property - def updated_at(self): ... - @updated_at.setter - def updated_at(self, updated_at) -> None: ... - @property - def query(self): ... - @query.setter - def query(self, query) -> None: ... - @property - def status(self): ... - @status.setter - def status(self, status) -> None: ... - @property - def description(self): ... - @description.setter - def description(self, description) -> None: ... - @property - def latest_completed(self): ... - @latest_completed.setter - def latest_completed(self, latest_completed) -> None: ... - @property - def last_run_status(self): ... - @last_run_status.setter - def last_run_status(self, last_run_status) -> None: ... - @property - def last_run_error(self): ... - @last_run_error.setter - def last_run_error(self, last_run_error) -> None: ... - @property - def labels(self): ... - @labels.setter - def labels(self, labels) -> None: ... - @property - def links(self): ... - @links.setter - def links(self, links) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/check_base_links.pyi b/stubs/influxdb-client/influxdb_client/domain/check_base_links.pyi deleted file mode 100644 index aeae82955a39..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/check_base_links.pyi +++ /dev/null @@ -1,34 +0,0 @@ -from _typeshed import Incomplete - -class CheckBaseLinks: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - _self: Incomplete | None = None, - labels: Incomplete | None = None, - members: Incomplete | None = None, - owners: Incomplete | None = None, - query: Incomplete | None = None, - ) -> None: ... - @property - def labels(self): ... - @labels.setter - def labels(self, labels) -> None: ... - @property - def members(self): ... - @members.setter - def members(self, members) -> None: ... - @property - def owners(self): ... - @owners.setter - def owners(self, owners) -> None: ... - @property - def query(self): ... - @query.setter - def query(self, query) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/check_discriminator.pyi b/stubs/influxdb-client/influxdb_client/domain/check_discriminator.pyi deleted file mode 100644 index 7fe7f09b540f..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/check_discriminator.pyi +++ /dev/null @@ -1,30 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.check_base import CheckBase - -class CheckDiscriminator(CheckBase): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - id: Incomplete | None = None, - name: Incomplete | None = None, - org_id: Incomplete | None = None, - task_id: Incomplete | None = None, - owner_id: Incomplete | None = None, - created_at: Incomplete | None = None, - updated_at: Incomplete | None = None, - query: Incomplete | None = None, - status: Incomplete | None = None, - description: Incomplete | None = None, - latest_completed: Incomplete | None = None, - last_run_status: Incomplete | None = None, - last_run_error: Incomplete | None = None, - labels: Incomplete | None = None, - links: Incomplete | None = None, - ) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/check_patch.pyi b/stubs/influxdb-client/influxdb_client/domain/check_patch.pyi deleted file mode 100644 index 670855c00c36..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/check_patch.pyi +++ /dev/null @@ -1,25 +0,0 @@ -from _typeshed import Incomplete - -class CheckPatch: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, name: Incomplete | None = None, description: Incomplete | None = None, status: Incomplete | None = None - ) -> None: ... - @property - def name(self): ... - @name.setter - def name(self, name) -> None: ... - @property - def description(self): ... - @description.setter - def description(self, description) -> None: ... - @property - def status(self): ... - @status.setter - def status(self, status) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/check_status_level.pyi b/stubs/influxdb-client/influxdb_client/domain/check_status_level.pyi deleted file mode 100644 index 9dafd3a68c3f..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/check_status_level.pyi +++ /dev/null @@ -1,15 +0,0 @@ -from _typeshed import Incomplete - -class CheckStatusLevel: - UNKNOWN: str - OK: str - INFO: str - CRIT: str - WARN: str - openapi_types: Incomplete - attribute_map: Incomplete - def __init__(self) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/check_view_properties.pyi b/stubs/influxdb-client/influxdb_client/domain/check_view_properties.pyi deleted file mode 100644 index ddf7c7d5c514..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/check_view_properties.pyi +++ /dev/null @@ -1,67 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.view_properties import ViewProperties - -class CheckViewProperties(ViewProperties): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - adaptive_zoom_hide: bool | None = None, - type: Incomplete | None = None, - shape: Incomplete | None = None, - check_id: Incomplete | None = None, - check: Incomplete | None = None, - queries: Incomplete | None = None, - colors: Incomplete | None = None, - legend_colorize_rows: Incomplete | None = None, - legend_hide: Incomplete | None = None, - legend_opacity: Incomplete | None = None, - legend_orientation_threshold: Incomplete | None = None, - ) -> None: ... - adaptive_zoom_hide: bool | None - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - @property - def shape(self): ... - @shape.setter - def shape(self, shape) -> None: ... - @property - def check_id(self): ... - @check_id.setter - def check_id(self, check_id) -> None: ... - @property - def check(self): ... - @check.setter - def check(self, check) -> None: ... - @property - def queries(self): ... - @queries.setter - def queries(self, queries) -> None: ... - @property - def colors(self): ... - @colors.setter - def colors(self, colors) -> None: ... - @property - def legend_colorize_rows(self): ... - @legend_colorize_rows.setter - def legend_colorize_rows(self, legend_colorize_rows) -> None: ... - @property - def legend_hide(self): ... - @legend_hide.setter - def legend_hide(self, legend_hide) -> None: ... - @property - def legend_opacity(self): ... - @legend_opacity.setter - def legend_opacity(self, legend_opacity) -> None: ... - @property - def legend_orientation_threshold(self): ... - @legend_orientation_threshold.setter - def legend_orientation_threshold(self, legend_orientation_threshold) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/checks.pyi b/stubs/influxdb-client/influxdb_client/domain/checks.pyi deleted file mode 100644 index 563fa0372ec8..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/checks.pyi +++ /dev/null @@ -1,19 +0,0 @@ -from _typeshed import Incomplete - -class Checks: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, checks: Incomplete | None = None, links: Incomplete | None = None) -> None: ... - @property - def checks(self): ... - @checks.setter - def checks(self, checks) -> None: ... - @property - def links(self): ... - @links.setter - def links(self, links) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/column_data_type.pyi b/stubs/influxdb-client/influxdb_client/domain/column_data_type.pyi deleted file mode 100644 index fd59dd0caf67..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/column_data_type.pyi +++ /dev/null @@ -1,15 +0,0 @@ -from _typeshed import Incomplete - -class ColumnDataType: - INTEGER: str - FLOAT: str - BOOLEAN: str - STRING: str - UNSIGNED: str - openapi_types: Incomplete - attribute_map: Incomplete - def __init__(self) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/column_semantic_type.pyi b/stubs/influxdb-client/influxdb_client/domain/column_semantic_type.pyi deleted file mode 100644 index feaa3a9e73f7..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/column_semantic_type.pyi +++ /dev/null @@ -1,13 +0,0 @@ -from _typeshed import Incomplete - -class ColumnSemanticType: - TIMESTAMP: str - TAG: str - FIELD: str - openapi_types: Incomplete - attribute_map: Incomplete - def __init__(self) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/conditional_expression.pyi b/stubs/influxdb-client/influxdb_client/domain/conditional_expression.pyi deleted file mode 100644 index 30f04107e97c..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/conditional_expression.pyi +++ /dev/null @@ -1,35 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.expression import Expression - -class ConditionalExpression(Expression): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - type: Incomplete | None = None, - test: Incomplete | None = None, - alternate: Incomplete | None = None, - consequent: Incomplete | None = None, - ) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - @property - def test(self): ... - @test.setter - def test(self, test) -> None: ... - @property - def alternate(self): ... - @alternate.setter - def alternate(self, alternate) -> None: ... - @property - def consequent(self): ... - @consequent.setter - def consequent(self, consequent) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/config.pyi b/stubs/influxdb-client/influxdb_client/domain/config.pyi deleted file mode 100644 index bf02e6b829a9..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/config.pyi +++ /dev/null @@ -1,15 +0,0 @@ -from _typeshed import Incomplete - -class Config: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, config: Incomplete | None = None) -> None: ... - @property - def config(self): ... - @config.setter - def config(self, config) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/constant_variable_properties.pyi b/stubs/influxdb-client/influxdb_client/domain/constant_variable_properties.pyi deleted file mode 100644 index 9e077bb5900a..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/constant_variable_properties.pyi +++ /dev/null @@ -1,21 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.variable_properties import VariableProperties - -class ConstantVariableProperties(VariableProperties): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, type: Incomplete | None = None, values: Incomplete | None = None) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - @property - def values(self): ... - @values.setter - def values(self, values) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/create_cell.pyi b/stubs/influxdb-client/influxdb_client/domain/create_cell.pyi deleted file mode 100644 index 1ff16d2d241e..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/create_cell.pyi +++ /dev/null @@ -1,43 +0,0 @@ -from _typeshed import Incomplete - -class CreateCell: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - name: Incomplete | None = None, - x: Incomplete | None = None, - y: Incomplete | None = None, - w: Incomplete | None = None, - h: Incomplete | None = None, - using_view: Incomplete | None = None, - ) -> None: ... - @property - def name(self): ... - @name.setter - def name(self, name) -> None: ... - @property - def x(self): ... - @x.setter - def x(self, x) -> None: ... - @property - def y(self): ... - @y.setter - def y(self, y) -> None: ... - @property - def w(self): ... - @w.setter - def w(self, w) -> None: ... - @property - def h(self): ... - @h.setter - def h(self, h) -> None: ... - @property - def using_view(self): ... - @using_view.setter - def using_view(self, using_view) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/create_dashboard_request.pyi b/stubs/influxdb-client/influxdb_client/domain/create_dashboard_request.pyi deleted file mode 100644 index 6e4349f7bab4..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/create_dashboard_request.pyi +++ /dev/null @@ -1,25 +0,0 @@ -from _typeshed import Incomplete - -class CreateDashboardRequest: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, org_id: Incomplete | None = None, name: Incomplete | None = None, description: Incomplete | None = None - ) -> None: ... - @property - def org_id(self): ... - @org_id.setter - def org_id(self, org_id) -> None: ... - @property - def name(self): ... - @name.setter - def name(self, name) -> None: ... - @property - def description(self): ... - @description.setter - def description(self, description) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/custom_check.pyi b/stubs/influxdb-client/influxdb_client/domain/custom_check.pyi deleted file mode 100644 index 9dcb7281bfc4..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/custom_check.pyi +++ /dev/null @@ -1,35 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.check_discriminator import CheckDiscriminator - -class CustomCheck(CheckDiscriminator): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - type: str = "custom", - id: Incomplete | None = None, - name: Incomplete | None = None, - org_id: Incomplete | None = None, - task_id: Incomplete | None = None, - owner_id: Incomplete | None = None, - created_at: Incomplete | None = None, - updated_at: Incomplete | None = None, - query: Incomplete | None = None, - status: Incomplete | None = None, - description: Incomplete | None = None, - latest_completed: Incomplete | None = None, - last_run_status: Incomplete | None = None, - last_run_error: Incomplete | None = None, - labels: Incomplete | None = None, - links: Incomplete | None = None, - ) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/dashboard.pyi b/stubs/influxdb-client/influxdb_client/domain/dashboard.pyi deleted file mode 100644 index 1f3a1da4b49e..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/dashboard.pyi +++ /dev/null @@ -1,43 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.create_dashboard_request import CreateDashboardRequest - -class Dashboard(CreateDashboardRequest): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - links: Incomplete | None = None, - id: Incomplete | None = None, - meta: Incomplete | None = None, - cells: Incomplete | None = None, - labels: Incomplete | None = None, - org_id: Incomplete | None = None, - name: Incomplete | None = None, - description: Incomplete | None = None, - ) -> None: ... - @property - def links(self): ... - @links.setter - def links(self, links) -> None: ... - @property - def id(self): ... - @id.setter - def id(self, id) -> None: ... - @property - def meta(self): ... - @meta.setter - def meta(self, meta) -> None: ... - @property - def cells(self): ... - @cells.setter - def cells(self, cells) -> None: ... - @property - def labels(self): ... - @labels.setter - def labels(self, labels) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/dashboard_color.pyi b/stubs/influxdb-client/influxdb_client/domain/dashboard_color.pyi deleted file mode 100644 index 92d00a4798b7..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/dashboard_color.pyi +++ /dev/null @@ -1,38 +0,0 @@ -from _typeshed import Incomplete - -class DashboardColor: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - id: Incomplete | None = None, - type: Incomplete | None = None, - hex: Incomplete | None = None, - name: Incomplete | None = None, - value: Incomplete | None = None, - ) -> None: ... - @property - def id(self): ... - @id.setter - def id(self, id) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - @property - def hex(self): ... - @hex.setter - def hex(self, hex) -> None: ... - @property - def name(self): ... - @name.setter - def name(self, name) -> None: ... - @property - def value(self): ... - @value.setter - def value(self, value) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/dashboard_query.pyi b/stubs/influxdb-client/influxdb_client/domain/dashboard_query.pyi deleted file mode 100644 index a9d3507da6cf..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/dashboard_query.pyi +++ /dev/null @@ -1,33 +0,0 @@ -from _typeshed import Incomplete - -class DashboardQuery: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - text: Incomplete | None = None, - edit_mode: Incomplete | None = None, - name: Incomplete | None = None, - builder_config: Incomplete | None = None, - ) -> None: ... - @property - def text(self): ... - @text.setter - def text(self, text) -> None: ... - @property - def edit_mode(self): ... - @edit_mode.setter - def edit_mode(self, edit_mode) -> None: ... - @property - def name(self): ... - @name.setter - def name(self, name) -> None: ... - @property - def builder_config(self): ... - @builder_config.setter - def builder_config(self, builder_config) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/dashboard_with_view_properties.pyi b/stubs/influxdb-client/influxdb_client/domain/dashboard_with_view_properties.pyi deleted file mode 100644 index 56342a632f44..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/dashboard_with_view_properties.pyi +++ /dev/null @@ -1,43 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.create_dashboard_request import CreateDashboardRequest - -class DashboardWithViewProperties(CreateDashboardRequest): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - links: Incomplete | None = None, - id: Incomplete | None = None, - meta: Incomplete | None = None, - cells: Incomplete | None = None, - labels: Incomplete | None = None, - org_id: Incomplete | None = None, - name: Incomplete | None = None, - description: Incomplete | None = None, - ) -> None: ... - @property - def links(self): ... - @links.setter - def links(self, links) -> None: ... - @property - def id(self): ... - @id.setter - def id(self, id) -> None: ... - @property - def meta(self): ... - @meta.setter - def meta(self, meta) -> None: ... - @property - def cells(self): ... - @cells.setter - def cells(self, cells) -> None: ... - @property - def labels(self): ... - @labels.setter - def labels(self, labels) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/dashboards.pyi b/stubs/influxdb-client/influxdb_client/domain/dashboards.pyi deleted file mode 100644 index 0755b771a30e..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/dashboards.pyi +++ /dev/null @@ -1,19 +0,0 @@ -from _typeshed import Incomplete - -class Dashboards: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, links: Incomplete | None = None, dashboards: Incomplete | None = None) -> None: ... - @property - def links(self): ... - @links.setter - def links(self, links) -> None: ... - @property - def dashboards(self): ... - @dashboards.setter - def dashboards(self, dashboards) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/date_time_literal.pyi b/stubs/influxdb-client/influxdb_client/domain/date_time_literal.pyi deleted file mode 100644 index dd33297c3646..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/date_time_literal.pyi +++ /dev/null @@ -1,21 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.expression import Expression - -class DateTimeLiteral(Expression): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, type: Incomplete | None = None, value: Incomplete | None = None) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - @property - def value(self): ... - @value.setter - def value(self, value) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/dbr_ps.pyi b/stubs/influxdb-client/influxdb_client/domain/dbr_ps.pyi deleted file mode 100644 index 94e8bbab18f7..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/dbr_ps.pyi +++ /dev/null @@ -1,15 +0,0 @@ -from _typeshed import Incomplete - -class DBRPs: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, content: Incomplete | None = None) -> None: ... - @property - def content(self): ... - @content.setter - def content(self, content) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/dbrp.pyi b/stubs/influxdb-client/influxdb_client/domain/dbrp.pyi deleted file mode 100644 index 2272a3cf1fd7..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/dbrp.pyi +++ /dev/null @@ -1,50 +0,0 @@ -from _typeshed import Incomplete - -class DBRP: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - id: Incomplete | None = None, - org_id: Incomplete | None = None, - bucket_id: Incomplete | None = None, - database: Incomplete | None = None, - retention_policy: Incomplete | None = None, - default: Incomplete | None = None, - virtual: bool | None = None, - links: Incomplete | None = None, - ) -> None: ... - @property - def id(self): ... - @id.setter - def id(self, id) -> None: ... - @property - def org_id(self): ... - @org_id.setter - def org_id(self, org_id) -> None: ... - @property - def bucket_id(self): ... - @bucket_id.setter - def bucket_id(self, bucket_id) -> None: ... - @property - def database(self): ... - @database.setter - def database(self, database) -> None: ... - @property - def retention_policy(self): ... - @retention_policy.setter - def retention_policy(self, retention_policy) -> None: ... - @property - def default(self): ... - @default.setter - def default(self, default) -> None: ... - virtual: bool | None - @property - def links(self): ... - @links.setter - def links(self, links) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/dbrp_create.pyi b/stubs/influxdb-client/influxdb_client/domain/dbrp_create.pyi deleted file mode 100644 index 890659261582..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/dbrp_create.pyi +++ /dev/null @@ -1,37 +0,0 @@ -from _typeshed import Incomplete - -class DBRPCreate: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - org: str | None = None, - org_id: str | None = None, - bucket_id: Incomplete | None = None, - database: Incomplete | None = None, - retention_policy: Incomplete | None = None, - default: Incomplete | None = None, - ) -> None: ... - org: str | None - org_id: str | None - @property - def bucket_id(self): ... - @bucket_id.setter - def bucket_id(self, bucket_id) -> None: ... - @property - def database(self): ... - @database.setter - def database(self, database) -> None: ... - @property - def retention_policy(self): ... - @retention_policy.setter - def retention_policy(self, retention_policy) -> None: ... - @property - def default(self): ... - @default.setter - def default(self, default) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/dbrp_get.pyi b/stubs/influxdb-client/influxdb_client/domain/dbrp_get.pyi deleted file mode 100644 index 1d82e5b2d0e3..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/dbrp_get.pyi +++ /dev/null @@ -1,15 +0,0 @@ -from _typeshed import Incomplete - -class DBRPGet: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, content: Incomplete | None = None) -> None: ... - @property - def content(self): ... - @content.setter - def content(self, content) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/dbrp_update.pyi b/stubs/influxdb-client/influxdb_client/domain/dbrp_update.pyi deleted file mode 100644 index 66cf660e93a8..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/dbrp_update.pyi +++ /dev/null @@ -1,19 +0,0 @@ -from _typeshed import Incomplete - -class DBRPUpdate: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, retention_policy: Incomplete | None = None, default: Incomplete | None = None) -> None: ... - @property - def retention_policy(self): ... - @retention_policy.setter - def retention_policy(self, retention_policy) -> None: ... - @property - def default(self): ... - @default.setter - def default(self, default) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/deadman_check.pyi b/stubs/influxdb-client/influxdb_client/domain/deadman_check.pyi deleted file mode 100644 index c7633cd73f2f..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/deadman_check.pyi +++ /dev/null @@ -1,75 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.check_discriminator import CheckDiscriminator - -class DeadmanCheck(CheckDiscriminator): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - type: str = "deadman", - time_since: Incomplete | None = None, - stale_time: Incomplete | None = None, - report_zero: Incomplete | None = None, - level: Incomplete | None = None, - every: Incomplete | None = None, - offset: Incomplete | None = None, - tags: Incomplete | None = None, - status_message_template: Incomplete | None = None, - id: Incomplete | None = None, - name: Incomplete | None = None, - org_id: Incomplete | None = None, - task_id: Incomplete | None = None, - owner_id: Incomplete | None = None, - created_at: Incomplete | None = None, - updated_at: Incomplete | None = None, - query: Incomplete | None = None, - status: Incomplete | None = None, - description: Incomplete | None = None, - latest_completed: Incomplete | None = None, - last_run_status: Incomplete | None = None, - last_run_error: Incomplete | None = None, - labels: Incomplete | None = None, - links: Incomplete | None = None, - ) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - @property - def time_since(self): ... - @time_since.setter - def time_since(self, time_since) -> None: ... - @property - def stale_time(self): ... - @stale_time.setter - def stale_time(self, stale_time) -> None: ... - @property - def report_zero(self): ... - @report_zero.setter - def report_zero(self, report_zero) -> None: ... - @property - def level(self): ... - @level.setter - def level(self, level) -> None: ... - @property - def every(self): ... - @every.setter - def every(self, every) -> None: ... - @property - def offset(self): ... - @offset.setter - def offset(self, offset) -> None: ... - @property - def tags(self): ... - @tags.setter - def tags(self, tags) -> None: ... - @property - def status_message_template(self): ... - @status_message_template.setter - def status_message_template(self, status_message_template) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/decimal_places.pyi b/stubs/influxdb-client/influxdb_client/domain/decimal_places.pyi deleted file mode 100644 index 7d8db236a662..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/decimal_places.pyi +++ /dev/null @@ -1,19 +0,0 @@ -from _typeshed import Incomplete - -class DecimalPlaces: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, is_enforced: Incomplete | None = None, digits: Incomplete | None = None) -> None: ... - @property - def is_enforced(self): ... - @is_enforced.setter - def is_enforced(self, is_enforced) -> None: ... - @property - def digits(self): ... - @digits.setter - def digits(self, digits) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/delete_predicate_request.pyi b/stubs/influxdb-client/influxdb_client/domain/delete_predicate_request.pyi deleted file mode 100644 index 0c8cd263d2b6..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/delete_predicate_request.pyi +++ /dev/null @@ -1,25 +0,0 @@ -from _typeshed import Incomplete - -class DeletePredicateRequest: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, start: Incomplete | None = None, stop: Incomplete | None = None, predicate: Incomplete | None = None - ) -> None: ... - @property - def start(self): ... - @start.setter - def start(self, start) -> None: ... - @property - def stop(self): ... - @stop.setter - def stop(self, stop) -> None: ... - @property - def predicate(self): ... - @predicate.setter - def predicate(self, predicate) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/dialect.pyi b/stubs/influxdb-client/influxdb_client/domain/dialect.pyi deleted file mode 100644 index 0f33b944033c..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/dialect.pyi +++ /dev/null @@ -1,38 +0,0 @@ -from _typeshed import Incomplete - -class Dialect: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - header: bool = True, - delimiter: str = ",", - annotations: Incomplete | None = None, - comment_prefix: str = "#", - date_time_format: str = "RFC3339", - ) -> None: ... - @property - def header(self): ... - @header.setter - def header(self, header) -> None: ... - @property - def delimiter(self): ... - @delimiter.setter - def delimiter(self, delimiter) -> None: ... - @property - def annotations(self): ... - @annotations.setter - def annotations(self, annotations) -> None: ... - @property - def comment_prefix(self): ... - @comment_prefix.setter - def comment_prefix(self, comment_prefix) -> None: ... - @property - def date_time_format(self): ... - @date_time_format.setter - def date_time_format(self, date_time_format) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/dict_expression.pyi b/stubs/influxdb-client/influxdb_client/domain/dict_expression.pyi deleted file mode 100644 index ed47f1cfec4b..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/dict_expression.pyi +++ /dev/null @@ -1,21 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.expression import Expression - -class DictExpression(Expression): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, type: Incomplete | None = None, elements: Incomplete | None = None) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - @property - def elements(self): ... - @elements.setter - def elements(self, elements) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/dict_item.pyi b/stubs/influxdb-client/influxdb_client/domain/dict_item.pyi deleted file mode 100644 index f9dc62b3dd23..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/dict_item.pyi +++ /dev/null @@ -1,23 +0,0 @@ -from _typeshed import Incomplete - -class DictItem: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, type: Incomplete | None = None, key: Incomplete | None = None, val: Incomplete | None = None) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - @property - def key(self): ... - @key.setter - def key(self, key) -> None: ... - @property - def val(self): ... - @val.setter - def val(self, val) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/duration.pyi b/stubs/influxdb-client/influxdb_client/domain/duration.pyi deleted file mode 100644 index e6465c8c63bc..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/duration.pyi +++ /dev/null @@ -1,25 +0,0 @@ -from _typeshed import Incomplete - -class Duration: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, type: Incomplete | None = None, magnitude: Incomplete | None = None, unit: Incomplete | None = None - ) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - @property - def magnitude(self): ... - @magnitude.setter - def magnitude(self, magnitude) -> None: ... - @property - def unit(self): ... - @unit.setter - def unit(self, unit) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/duration_literal.pyi b/stubs/influxdb-client/influxdb_client/domain/duration_literal.pyi deleted file mode 100644 index 4e6663161f8a..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/duration_literal.pyi +++ /dev/null @@ -1,21 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.expression import Expression - -class DurationLiteral(Expression): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, type: Incomplete | None = None, values: Incomplete | None = None) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - @property - def values(self): ... - @values.setter - def values(self, values) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/error.pyi b/stubs/influxdb-client/influxdb_client/domain/error.pyi deleted file mode 100644 index bf554a22915f..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/error.pyi +++ /dev/null @@ -1,33 +0,0 @@ -from _typeshed import Incomplete - -class Error: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - code: Incomplete | None = None, - message: Incomplete | None = None, - op: Incomplete | None = None, - err: Incomplete | None = None, - ) -> None: ... - @property - def code(self): ... - @code.setter - def code(self, code) -> None: ... - @property - def message(self): ... - @message.setter - def message(self, message) -> None: ... - @property - def op(self): ... - @op.setter - def op(self, op) -> None: ... - @property - def err(self): ... - @err.setter - def err(self, err) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/expression.pyi b/stubs/influxdb-client/influxdb_client/domain/expression.pyi deleted file mode 100644 index 0add99c90ef7..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/expression.pyi +++ /dev/null @@ -1,13 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.node import Node - -class Expression(Node): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/expression_statement.pyi b/stubs/influxdb-client/influxdb_client/domain/expression_statement.pyi deleted file mode 100644 index 1a3d31cc4403..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/expression_statement.pyi +++ /dev/null @@ -1,21 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.statement import Statement - -class ExpressionStatement(Statement): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, type: Incomplete | None = None, expression: Incomplete | None = None) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - @property - def expression(self): ... - @expression.setter - def expression(self, expression) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/field.pyi b/stubs/influxdb-client/influxdb_client/domain/field.pyi deleted file mode 100644 index ae3b6c016c48..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/field.pyi +++ /dev/null @@ -1,33 +0,0 @@ -from _typeshed import Incomplete - -class Field: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - value: Incomplete | None = None, - type: Incomplete | None = None, - alias: Incomplete | None = None, - args: Incomplete | None = None, - ) -> None: ... - @property - def value(self): ... - @value.setter - def value(self, value) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - @property - def alias(self): ... - @alias.setter - def alias(self, alias) -> None: ... - @property - def args(self): ... - @args.setter - def args(self, args) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/file.pyi b/stubs/influxdb-client/influxdb_client/domain/file.pyi deleted file mode 100644 index bec8e115207a..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/file.pyi +++ /dev/null @@ -1,38 +0,0 @@ -from _typeshed import Incomplete - -class File: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - type: Incomplete | None = None, - name: Incomplete | None = None, - package: Incomplete | None = None, - imports: Incomplete | None = None, - body: Incomplete | None = None, - ) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - @property - def name(self): ... - @name.setter - def name(self, name) -> None: ... - @property - def package(self): ... - @package.setter - def package(self, package) -> None: ... - @property - def imports(self): ... - @imports.setter - def imports(self, imports) -> None: ... - @property - def body(self): ... - @body.setter - def body(self, body) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/float_literal.pyi b/stubs/influxdb-client/influxdb_client/domain/float_literal.pyi deleted file mode 100644 index 70ea53c9bdc5..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/float_literal.pyi +++ /dev/null @@ -1,21 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.expression import Expression - -class FloatLiteral(Expression): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, type: Incomplete | None = None, value: Incomplete | None = None) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - @property - def value(self): ... - @value.setter - def value(self, value) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/flux_response.pyi b/stubs/influxdb-client/influxdb_client/domain/flux_response.pyi deleted file mode 100644 index 7eada764528f..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/flux_response.pyi +++ /dev/null @@ -1,15 +0,0 @@ -from _typeshed import Incomplete - -class FluxResponse: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, flux: Incomplete | None = None) -> None: ... - @property - def flux(self): ... - @flux.setter - def flux(self, flux) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/flux_suggestion.pyi b/stubs/influxdb-client/influxdb_client/domain/flux_suggestion.pyi deleted file mode 100644 index 9620dbd293ed..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/flux_suggestion.pyi +++ /dev/null @@ -1,19 +0,0 @@ -from _typeshed import Incomplete - -class FluxSuggestion: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, name: Incomplete | None = None, params: Incomplete | None = None) -> None: ... - @property - def name(self): ... - @name.setter - def name(self, name) -> None: ... - @property - def params(self): ... - @params.setter - def params(self, params) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/flux_suggestions.pyi b/stubs/influxdb-client/influxdb_client/domain/flux_suggestions.pyi deleted file mode 100644 index 8b919b10b671..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/flux_suggestions.pyi +++ /dev/null @@ -1,15 +0,0 @@ -from _typeshed import Incomplete - -class FluxSuggestions: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, funcs: Incomplete | None = None) -> None: ... - @property - def funcs(self): ... - @funcs.setter - def funcs(self, funcs) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/function_expression.pyi b/stubs/influxdb-client/influxdb_client/domain/function_expression.pyi deleted file mode 100644 index 9b22d9d8e7e9..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/function_expression.pyi +++ /dev/null @@ -1,27 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.expression import Expression - -class FunctionExpression(Expression): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, type: Incomplete | None = None, params: Incomplete | None = None, body: Incomplete | None = None - ) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - @property - def params(self): ... - @params.setter - def params(self, params) -> None: ... - @property - def body(self): ... - @body.setter - def body(self, body) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/gauge_view_properties.pyi b/stubs/influxdb-client/influxdb_client/domain/gauge_view_properties.pyi deleted file mode 100644 index 68543b312b95..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/gauge_view_properties.pyi +++ /dev/null @@ -1,70 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.view_properties import ViewProperties - -class GaugeViewProperties(ViewProperties): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - type: Incomplete | None = None, - queries: Incomplete | None = None, - colors: Incomplete | None = None, - shape: Incomplete | None = None, - note: Incomplete | None = None, - show_note_when_empty: Incomplete | None = None, - prefix: Incomplete | None = None, - tick_prefix: Incomplete | None = None, - suffix: Incomplete | None = None, - tick_suffix: Incomplete | None = None, - decimal_places: Incomplete | None = None, - ) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - @property - def queries(self): ... - @queries.setter - def queries(self, queries) -> None: ... - @property - def colors(self): ... - @colors.setter - def colors(self, colors) -> None: ... - @property - def shape(self): ... - @shape.setter - def shape(self, shape) -> None: ... - @property - def note(self): ... - @note.setter - def note(self, note) -> None: ... - @property - def show_note_when_empty(self): ... - @show_note_when_empty.setter - def show_note_when_empty(self, show_note_when_empty) -> None: ... - @property - def prefix(self): ... - @prefix.setter - def prefix(self, prefix) -> None: ... - @property - def tick_prefix(self): ... - @tick_prefix.setter - def tick_prefix(self, tick_prefix) -> None: ... - @property - def suffix(self): ... - @suffix.setter - def suffix(self, suffix) -> None: ... - @property - def tick_suffix(self): ... - @tick_suffix.setter - def tick_suffix(self, tick_suffix) -> None: ... - @property - def decimal_places(self): ... - @decimal_places.setter - def decimal_places(self, decimal_places) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/greater_threshold.pyi b/stubs/influxdb-client/influxdb_client/domain/greater_threshold.pyi deleted file mode 100644 index 2a134031aafa..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/greater_threshold.pyi +++ /dev/null @@ -1,27 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.threshold_base import ThresholdBase - -class GreaterThreshold(ThresholdBase): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - type: str = "greater", - value: Incomplete | None = None, - level: Incomplete | None = None, - all_values: Incomplete | None = None, - ) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - @property - def value(self): ... - @value.setter - def value(self, value) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/health_check.pyi b/stubs/influxdb-client/influxdb_client/domain/health_check.pyi deleted file mode 100644 index 94f4b242dcac..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/health_check.pyi +++ /dev/null @@ -1,43 +0,0 @@ -from _typeshed import Incomplete - -class HealthCheck: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - name: Incomplete | None = None, - message: Incomplete | None = None, - checks: Incomplete | None = None, - status: Incomplete | None = None, - version: Incomplete | None = None, - commit: Incomplete | None = None, - ) -> None: ... - @property - def name(self): ... - @name.setter - def name(self, name) -> None: ... - @property - def message(self): ... - @message.setter - def message(self, message) -> None: ... - @property - def checks(self): ... - @checks.setter - def checks(self, checks) -> None: ... - @property - def status(self): ... - @status.setter - def status(self, status) -> None: ... - @property - def version(self): ... - @version.setter - def version(self, version) -> None: ... - @property - def commit(self): ... - @commit.setter - def commit(self, commit) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/heatmap_view_properties.pyi b/stubs/influxdb-client/influxdb_client/domain/heatmap_view_properties.pyi deleted file mode 100644 index 3a2a4315d39c..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/heatmap_view_properties.pyi +++ /dev/null @@ -1,167 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.view_properties import ViewProperties - -class HeatmapViewProperties(ViewProperties): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - adaptive_zoom_hide: bool | None = None, - time_format: Incomplete | None = None, - type: Incomplete | None = None, - queries: Incomplete | None = None, - colors: Incomplete | None = None, - shape: Incomplete | None = None, - note: Incomplete | None = None, - show_note_when_empty: Incomplete | None = None, - x_column: Incomplete | None = None, - generate_x_axis_ticks: Incomplete | None = None, - x_total_ticks: Incomplete | None = None, - x_tick_start: Incomplete | None = None, - x_tick_step: Incomplete | None = None, - y_column: Incomplete | None = None, - generate_y_axis_ticks: Incomplete | None = None, - y_total_ticks: Incomplete | None = None, - y_tick_start: Incomplete | None = None, - y_tick_step: Incomplete | None = None, - x_domain: Incomplete | None = None, - y_domain: Incomplete | None = None, - x_axis_label: Incomplete | None = None, - y_axis_label: Incomplete | None = None, - x_prefix: Incomplete | None = None, - x_suffix: Incomplete | None = None, - y_prefix: Incomplete | None = None, - y_suffix: Incomplete | None = None, - bin_size: Incomplete | None = None, - legend_colorize_rows: Incomplete | None = None, - legend_hide: Incomplete | None = None, - legend_opacity: Incomplete | None = None, - legend_orientation_threshold: Incomplete | None = None, - ) -> None: ... - adaptive_zoom_hide: bool | None - @property - def time_format(self): ... - @time_format.setter - def time_format(self, time_format) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - @property - def queries(self): ... - @queries.setter - def queries(self, queries) -> None: ... - @property - def colors(self): ... - @colors.setter - def colors(self, colors) -> None: ... - @property - def shape(self): ... - @shape.setter - def shape(self, shape) -> None: ... - @property - def note(self): ... - @note.setter - def note(self, note) -> None: ... - @property - def show_note_when_empty(self): ... - @show_note_when_empty.setter - def show_note_when_empty(self, show_note_when_empty) -> None: ... - @property - def x_column(self): ... - @x_column.setter - def x_column(self, x_column) -> None: ... - @property - def generate_x_axis_ticks(self): ... - @generate_x_axis_ticks.setter - def generate_x_axis_ticks(self, generate_x_axis_ticks) -> None: ... - @property - def x_total_ticks(self): ... - @x_total_ticks.setter - def x_total_ticks(self, x_total_ticks) -> None: ... - @property - def x_tick_start(self): ... - @x_tick_start.setter - def x_tick_start(self, x_tick_start) -> None: ... - @property - def x_tick_step(self): ... - @x_tick_step.setter - def x_tick_step(self, x_tick_step) -> None: ... - @property - def y_column(self): ... - @y_column.setter - def y_column(self, y_column) -> None: ... - @property - def generate_y_axis_ticks(self): ... - @generate_y_axis_ticks.setter - def generate_y_axis_ticks(self, generate_y_axis_ticks) -> None: ... - @property - def y_total_ticks(self): ... - @y_total_ticks.setter - def y_total_ticks(self, y_total_ticks) -> None: ... - @property - def y_tick_start(self): ... - @y_tick_start.setter - def y_tick_start(self, y_tick_start) -> None: ... - @property - def y_tick_step(self): ... - @y_tick_step.setter - def y_tick_step(self, y_tick_step) -> None: ... - @property - def x_domain(self): ... - @x_domain.setter - def x_domain(self, x_domain) -> None: ... - @property - def y_domain(self): ... - @y_domain.setter - def y_domain(self, y_domain) -> None: ... - @property - def x_axis_label(self): ... - @x_axis_label.setter - def x_axis_label(self, x_axis_label) -> None: ... - @property - def y_axis_label(self): ... - @y_axis_label.setter - def y_axis_label(self, y_axis_label) -> None: ... - @property - def x_prefix(self): ... - @x_prefix.setter - def x_prefix(self, x_prefix) -> None: ... - @property - def x_suffix(self): ... - @x_suffix.setter - def x_suffix(self, x_suffix) -> None: ... - @property - def y_prefix(self): ... - @y_prefix.setter - def y_prefix(self, y_prefix) -> None: ... - @property - def y_suffix(self): ... - @y_suffix.setter - def y_suffix(self, y_suffix) -> None: ... - @property - def bin_size(self): ... - @bin_size.setter - def bin_size(self, bin_size) -> None: ... - @property - def legend_colorize_rows(self): ... - @legend_colorize_rows.setter - def legend_colorize_rows(self, legend_colorize_rows) -> None: ... - @property - def legend_hide(self): ... - @legend_hide.setter - def legend_hide(self, legend_hide) -> None: ... - @property - def legend_opacity(self): ... - @legend_opacity.setter - def legend_opacity(self, legend_opacity) -> None: ... - @property - def legend_orientation_threshold(self): ... - @legend_orientation_threshold.setter - def legend_orientation_threshold(self, legend_orientation_threshold) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/histogram_view_properties.pyi b/stubs/influxdb-client/influxdb_client/domain/histogram_view_properties.pyi deleted file mode 100644 index 7c789027120e..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/histogram_view_properties.pyi +++ /dev/null @@ -1,95 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.view_properties import ViewProperties - -class HistogramViewProperties(ViewProperties): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - type: Incomplete | None = None, - queries: Incomplete | None = None, - colors: Incomplete | None = None, - shape: Incomplete | None = None, - note: Incomplete | None = None, - show_note_when_empty: Incomplete | None = None, - x_column: Incomplete | None = None, - fill_columns: Incomplete | None = None, - x_domain: Incomplete | None = None, - x_axis_label: Incomplete | None = None, - position: Incomplete | None = None, - bin_count: Incomplete | None = None, - legend_colorize_rows: Incomplete | None = None, - legend_hide: Incomplete | None = None, - legend_opacity: Incomplete | None = None, - legend_orientation_threshold: Incomplete | None = None, - ) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - @property - def queries(self): ... - @queries.setter - def queries(self, queries) -> None: ... - @property - def colors(self): ... - @colors.setter - def colors(self, colors) -> None: ... - @property - def shape(self): ... - @shape.setter - def shape(self, shape) -> None: ... - @property - def note(self): ... - @note.setter - def note(self, note) -> None: ... - @property - def show_note_when_empty(self): ... - @show_note_when_empty.setter - def show_note_when_empty(self, show_note_when_empty) -> None: ... - @property - def x_column(self): ... - @x_column.setter - def x_column(self, x_column) -> None: ... - @property - def fill_columns(self): ... - @fill_columns.setter - def fill_columns(self, fill_columns) -> None: ... - @property - def x_domain(self): ... - @x_domain.setter - def x_domain(self, x_domain) -> None: ... - @property - def x_axis_label(self): ... - @x_axis_label.setter - def x_axis_label(self, x_axis_label) -> None: ... - @property - def position(self): ... - @position.setter - def position(self, position) -> None: ... - @property - def bin_count(self): ... - @bin_count.setter - def bin_count(self, bin_count) -> None: ... - @property - def legend_colorize_rows(self): ... - @legend_colorize_rows.setter - def legend_colorize_rows(self, legend_colorize_rows) -> None: ... - @property - def legend_hide(self): ... - @legend_hide.setter - def legend_hide(self, legend_hide) -> None: ... - @property - def legend_opacity(self): ... - @legend_opacity.setter - def legend_opacity(self, legend_opacity) -> None: ... - @property - def legend_orientation_threshold(self): ... - @legend_orientation_threshold.setter - def legend_orientation_threshold(self, legend_orientation_threshold) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/http_notification_endpoint.pyi b/stubs/influxdb-client/influxdb_client/domain/http_notification_endpoint.pyi deleted file mode 100644 index 0b63747d2f79..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/http_notification_endpoint.pyi +++ /dev/null @@ -1,66 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.notification_endpoint_discriminator import NotificationEndpointDiscriminator - -class HTTPNotificationEndpoint(NotificationEndpointDiscriminator): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - url: Incomplete | None = None, - username: Incomplete | None = None, - password: Incomplete | None = None, - token: Incomplete | None = None, - method: Incomplete | None = None, - auth_method: Incomplete | None = None, - content_template: Incomplete | None = None, - headers: Incomplete | None = None, - id: Incomplete | None = None, - org_id: Incomplete | None = None, - user_id: Incomplete | None = None, - created_at: Incomplete | None = None, - updated_at: Incomplete | None = None, - description: Incomplete | None = None, - name: Incomplete | None = None, - status: str = "active", - labels: Incomplete | None = None, - links: Incomplete | None = None, - type: str = "http", - ) -> None: ... - @property - def url(self): ... - @url.setter - def url(self, url) -> None: ... - @property - def username(self): ... - @username.setter - def username(self, username) -> None: ... - @property - def password(self): ... - @password.setter - def password(self, password) -> None: ... - @property - def token(self): ... - @token.setter - def token(self, token) -> None: ... - @property - def method(self): ... - @method.setter - def method(self, method) -> None: ... - @property - def auth_method(self): ... - @auth_method.setter - def auth_method(self, auth_method) -> None: ... - @property - def content_template(self): ... - @content_template.setter - def content_template(self, content_template) -> None: ... - @property - def headers(self): ... - @headers.setter - def headers(self, headers) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/http_notification_rule.pyi b/stubs/influxdb-client/influxdb_client/domain/http_notification_rule.pyi deleted file mode 100644 index c2af5dd93da1..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/http_notification_rule.pyi +++ /dev/null @@ -1,40 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.http_notification_rule_base import HTTPNotificationRuleBase - -class HTTPNotificationRule(HTTPNotificationRuleBase): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - type: str = "http", - url: Incomplete | None = None, - latest_completed: Incomplete | None = None, - last_run_status: Incomplete | None = None, - last_run_error: Incomplete | None = None, - id: Incomplete | None = None, - endpoint_id: Incomplete | None = None, - org_id: Incomplete | None = None, - task_id: Incomplete | None = None, - owner_id: Incomplete | None = None, - created_at: Incomplete | None = None, - updated_at: Incomplete | None = None, - status: Incomplete | None = None, - name: Incomplete | None = None, - sleep_until: Incomplete | None = None, - every: Incomplete | None = None, - offset: Incomplete | None = None, - runbook_link: Incomplete | None = None, - limit_every: Incomplete | None = None, - limit: Incomplete | None = None, - tag_rules: Incomplete | None = None, - description: Incomplete | None = None, - status_rules: Incomplete | None = None, - labels: Incomplete | None = None, - links: Incomplete | None = None, - ) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/http_notification_rule_base.pyi b/stubs/influxdb-client/influxdb_client/domain/http_notification_rule_base.pyi deleted file mode 100644 index cdbb00b1b10e..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/http_notification_rule_base.pyi +++ /dev/null @@ -1,48 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.notification_rule_discriminator import NotificationRuleDiscriminator - -class HTTPNotificationRuleBase(NotificationRuleDiscriminator): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - type: Incomplete | None = None, - url: Incomplete | None = None, - latest_completed: Incomplete | None = None, - last_run_status: Incomplete | None = None, - last_run_error: Incomplete | None = None, - id: Incomplete | None = None, - endpoint_id: Incomplete | None = None, - org_id: Incomplete | None = None, - task_id: Incomplete | None = None, - owner_id: Incomplete | None = None, - created_at: Incomplete | None = None, - updated_at: Incomplete | None = None, - status: Incomplete | None = None, - name: Incomplete | None = None, - sleep_until: Incomplete | None = None, - every: Incomplete | None = None, - offset: Incomplete | None = None, - runbook_link: Incomplete | None = None, - limit_every: Incomplete | None = None, - limit: Incomplete | None = None, - tag_rules: Incomplete | None = None, - description: Incomplete | None = None, - status_rules: Incomplete | None = None, - labels: Incomplete | None = None, - links: Incomplete | None = None, - ) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - @property - def url(self): ... - @url.setter - def url(self, url) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/identifier.pyi b/stubs/influxdb-client/influxdb_client/domain/identifier.pyi deleted file mode 100644 index e06aa053febe..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/identifier.pyi +++ /dev/null @@ -1,21 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.property_key import PropertyKey - -class Identifier(PropertyKey): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, type: Incomplete | None = None, name: Incomplete | None = None) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - @property - def name(self): ... - @name.setter - def name(self, name) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/import_declaration.pyi b/stubs/influxdb-client/influxdb_client/domain/import_declaration.pyi deleted file mode 100644 index 559312f7d5ec..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/import_declaration.pyi +++ /dev/null @@ -1,19 +0,0 @@ -from _typeshed import Incomplete - -class ImportDeclaration: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, type: Incomplete | None = None, _as: Incomplete | None = None, path: Incomplete | None = None) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - @property - def path(self): ... - @path.setter - def path(self, path) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/index_expression.pyi b/stubs/influxdb-client/influxdb_client/domain/index_expression.pyi deleted file mode 100644 index 0109285257c4..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/index_expression.pyi +++ /dev/null @@ -1,27 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.expression import Expression - -class IndexExpression(Expression): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, type: Incomplete | None = None, array: Incomplete | None = None, index: Incomplete | None = None - ) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - @property - def array(self): ... - @array.setter - def array(self, array) -> None: ... - @property - def index(self): ... - @index.setter - def index(self, index) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/integer_literal.pyi b/stubs/influxdb-client/influxdb_client/domain/integer_literal.pyi deleted file mode 100644 index a20edb2cdaff..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/integer_literal.pyi +++ /dev/null @@ -1,21 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.expression import Expression - -class IntegerLiteral(Expression): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, type: Incomplete | None = None, value: Incomplete | None = None) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - @property - def value(self): ... - @value.setter - def value(self, value) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/is_onboarding.pyi b/stubs/influxdb-client/influxdb_client/domain/is_onboarding.pyi deleted file mode 100644 index 3425eaca5105..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/is_onboarding.pyi +++ /dev/null @@ -1,15 +0,0 @@ -from _typeshed import Incomplete - -class IsOnboarding: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, allowed: Incomplete | None = None) -> None: ... - @property - def allowed(self): ... - @allowed.setter - def allowed(self, allowed) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/label.pyi b/stubs/influxdb-client/influxdb_client/domain/label.pyi deleted file mode 100644 index 4fb33f0f9808..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/label.pyi +++ /dev/null @@ -1,33 +0,0 @@ -from _typeshed import Incomplete - -class Label: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - id: Incomplete | None = None, - org_id: Incomplete | None = None, - name: Incomplete | None = None, - properties: Incomplete | None = None, - ) -> None: ... - @property - def id(self): ... - @id.setter - def id(self, id) -> None: ... - @property - def org_id(self): ... - @org_id.setter - def org_id(self, org_id) -> None: ... - @property - def name(self): ... - @name.setter - def name(self, name) -> None: ... - @property - def properties(self): ... - @properties.setter - def properties(self, properties) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/label_create_request.pyi b/stubs/influxdb-client/influxdb_client/domain/label_create_request.pyi deleted file mode 100644 index c79bab0073f5..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/label_create_request.pyi +++ /dev/null @@ -1,25 +0,0 @@ -from _typeshed import Incomplete - -class LabelCreateRequest: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, org_id: Incomplete | None = None, name: Incomplete | None = None, properties: Incomplete | None = None - ) -> None: ... - @property - def org_id(self): ... - @org_id.setter - def org_id(self, org_id) -> None: ... - @property - def name(self): ... - @name.setter - def name(self, name) -> None: ... - @property - def properties(self): ... - @properties.setter - def properties(self, properties) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/label_mapping.pyi b/stubs/influxdb-client/influxdb_client/domain/label_mapping.pyi deleted file mode 100644 index 9a0b1239dde9..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/label_mapping.pyi +++ /dev/null @@ -1,15 +0,0 @@ -from _typeshed import Incomplete - -class LabelMapping: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, label_id: Incomplete | None = None) -> None: ... - @property - def label_id(self): ... - @label_id.setter - def label_id(self, label_id) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/label_response.pyi b/stubs/influxdb-client/influxdb_client/domain/label_response.pyi deleted file mode 100644 index 370bbfbdaee6..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/label_response.pyi +++ /dev/null @@ -1,19 +0,0 @@ -from _typeshed import Incomplete - -class LabelResponse: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, label: Incomplete | None = None, links: Incomplete | None = None) -> None: ... - @property - def label(self): ... - @label.setter - def label(self, label) -> None: ... - @property - def links(self): ... - @links.setter - def links(self, links) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/label_update.pyi b/stubs/influxdb-client/influxdb_client/domain/label_update.pyi deleted file mode 100644 index 05b0abcc4d02..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/label_update.pyi +++ /dev/null @@ -1,19 +0,0 @@ -from _typeshed import Incomplete - -class LabelUpdate: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, name: Incomplete | None = None, properties: Incomplete | None = None) -> None: ... - @property - def name(self): ... - @name.setter - def name(self, name) -> None: ... - @property - def properties(self): ... - @properties.setter - def properties(self, properties) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/labels_response.pyi b/stubs/influxdb-client/influxdb_client/domain/labels_response.pyi deleted file mode 100644 index 83f3f38c1e4a..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/labels_response.pyi +++ /dev/null @@ -1,19 +0,0 @@ -from _typeshed import Incomplete - -class LabelsResponse: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, labels: Incomplete | None = None, links: Incomplete | None = None) -> None: ... - @property - def labels(self): ... - @labels.setter - def labels(self, labels) -> None: ... - @property - def links(self): ... - @links.setter - def links(self, links) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/language_request.pyi b/stubs/influxdb-client/influxdb_client/domain/language_request.pyi deleted file mode 100644 index 00aaedb104d5..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/language_request.pyi +++ /dev/null @@ -1,15 +0,0 @@ -from _typeshed import Incomplete - -class LanguageRequest: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, query: Incomplete | None = None) -> None: ... - @property - def query(self): ... - @query.setter - def query(self, query) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/legacy_authorization_post_request.pyi b/stubs/influxdb-client/influxdb_client/domain/legacy_authorization_post_request.pyi deleted file mode 100644 index e874a1c195f1..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/legacy_authorization_post_request.pyi +++ /dev/null @@ -1,37 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.authorization_update_request import AuthorizationUpdateRequest - -class LegacyAuthorizationPostRequest(AuthorizationUpdateRequest): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - org_id: Incomplete | None = None, - user_id: Incomplete | None = None, - token: Incomplete | None = None, - permissions: Incomplete | None = None, - status: str = "active", - description: Incomplete | None = None, - ) -> None: ... - @property - def org_id(self): ... - @org_id.setter - def org_id(self, org_id) -> None: ... - @property - def user_id(self): ... - @user_id.setter - def user_id(self, user_id) -> None: ... - @property - def token(self): ... - @token.setter - def token(self, token) -> None: ... - @property - def permissions(self): ... - @permissions.setter - def permissions(self, permissions) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/lesser_threshold.pyi b/stubs/influxdb-client/influxdb_client/domain/lesser_threshold.pyi deleted file mode 100644 index f427d56167be..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/lesser_threshold.pyi +++ /dev/null @@ -1,27 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.threshold_base import ThresholdBase - -class LesserThreshold(ThresholdBase): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - type: str = "lesser", - value: Incomplete | None = None, - level: Incomplete | None = None, - all_values: Incomplete | None = None, - ) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - @property - def value(self): ... - @value.setter - def value(self, value) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/line_plus_single_stat_properties.pyi b/stubs/influxdb-client/influxdb_client/domain/line_plus_single_stat_properties.pyi deleted file mode 100644 index 2a0f47f7057f..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/line_plus_single_stat_properties.pyi +++ /dev/null @@ -1,162 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.view_properties import ViewProperties - -class LinePlusSingleStatProperties(ViewProperties): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - adaptive_zoom_hide: bool | None = None, - time_format: Incomplete | None = None, - type: Incomplete | None = None, - queries: Incomplete | None = None, - colors: Incomplete | None = None, - shape: Incomplete | None = None, - note: Incomplete | None = None, - show_note_when_empty: Incomplete | None = None, - axes: Incomplete | None = None, - static_legend: Incomplete | None = None, - x_column: Incomplete | None = None, - generate_x_axis_ticks: Incomplete | None = None, - x_total_ticks: Incomplete | None = None, - x_tick_start: Incomplete | None = None, - x_tick_step: Incomplete | None = None, - y_column: Incomplete | None = None, - generate_y_axis_ticks: Incomplete | None = None, - y_total_ticks: Incomplete | None = None, - y_tick_start: Incomplete | None = None, - y_tick_step: Incomplete | None = None, - shade_below: Incomplete | None = None, - hover_dimension: Incomplete | None = None, - position: Incomplete | None = None, - prefix: Incomplete | None = None, - suffix: Incomplete | None = None, - decimal_places: Incomplete | None = None, - legend_colorize_rows: Incomplete | None = None, - legend_hide: Incomplete | None = None, - legend_opacity: Incomplete | None = None, - legend_orientation_threshold: Incomplete | None = None, - ) -> None: ... - adaptive_zoom_hide: bool | None - @property - def time_format(self): ... - @time_format.setter - def time_format(self, time_format) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - @property - def queries(self): ... - @queries.setter - def queries(self, queries) -> None: ... - @property - def colors(self): ... - @colors.setter - def colors(self, colors) -> None: ... - @property - def shape(self): ... - @shape.setter - def shape(self, shape) -> None: ... - @property - def note(self): ... - @note.setter - def note(self, note) -> None: ... - @property - def show_note_when_empty(self): ... - @show_note_when_empty.setter - def show_note_when_empty(self, show_note_when_empty) -> None: ... - @property - def axes(self): ... - @axes.setter - def axes(self, axes) -> None: ... - @property - def static_legend(self): ... - @static_legend.setter - def static_legend(self, static_legend) -> None: ... - @property - def x_column(self): ... - @x_column.setter - def x_column(self, x_column) -> None: ... - @property - def generate_x_axis_ticks(self): ... - @generate_x_axis_ticks.setter - def generate_x_axis_ticks(self, generate_x_axis_ticks) -> None: ... - @property - def x_total_ticks(self): ... - @x_total_ticks.setter - def x_total_ticks(self, x_total_ticks) -> None: ... - @property - def x_tick_start(self): ... - @x_tick_start.setter - def x_tick_start(self, x_tick_start) -> None: ... - @property - def x_tick_step(self): ... - @x_tick_step.setter - def x_tick_step(self, x_tick_step) -> None: ... - @property - def y_column(self): ... - @y_column.setter - def y_column(self, y_column) -> None: ... - @property - def generate_y_axis_ticks(self): ... - @generate_y_axis_ticks.setter - def generate_y_axis_ticks(self, generate_y_axis_ticks) -> None: ... - @property - def y_total_ticks(self): ... - @y_total_ticks.setter - def y_total_ticks(self, y_total_ticks) -> None: ... - @property - def y_tick_start(self): ... - @y_tick_start.setter - def y_tick_start(self, y_tick_start) -> None: ... - @property - def y_tick_step(self): ... - @y_tick_step.setter - def y_tick_step(self, y_tick_step) -> None: ... - @property - def shade_below(self): ... - @shade_below.setter - def shade_below(self, shade_below) -> None: ... - @property - def hover_dimension(self): ... - @hover_dimension.setter - def hover_dimension(self, hover_dimension) -> None: ... - @property - def position(self): ... - @position.setter - def position(self, position) -> None: ... - @property - def prefix(self): ... - @prefix.setter - def prefix(self, prefix) -> None: ... - @property - def suffix(self): ... - @suffix.setter - def suffix(self, suffix) -> None: ... - @property - def decimal_places(self): ... - @decimal_places.setter - def decimal_places(self, decimal_places) -> None: ... - @property - def legend_colorize_rows(self): ... - @legend_colorize_rows.setter - def legend_colorize_rows(self, legend_colorize_rows) -> None: ... - @property - def legend_hide(self): ... - @legend_hide.setter - def legend_hide(self, legend_hide) -> None: ... - @property - def legend_opacity(self): ... - @legend_opacity.setter - def legend_opacity(self, legend_opacity) -> None: ... - @property - def legend_orientation_threshold(self): ... - @legend_orientation_threshold.setter - def legend_orientation_threshold(self, legend_orientation_threshold) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/line_protocol_error.pyi b/stubs/influxdb-client/influxdb_client/domain/line_protocol_error.pyi deleted file mode 100644 index 47d4d3094af1..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/line_protocol_error.pyi +++ /dev/null @@ -1,38 +0,0 @@ -from _typeshed import Incomplete - -class LineProtocolError: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - code: Incomplete | None = None, - message: Incomplete | None = None, - op: Incomplete | None = None, - err: Incomplete | None = None, - line: Incomplete | None = None, - ) -> None: ... - @property - def code(self): ... - @code.setter - def code(self, code) -> None: ... - @property - def message(self): ... - @message.setter - def message(self, message) -> None: ... - @property - def op(self): ... - @op.setter - def op(self, op) -> None: ... - @property - def err(self): ... - @err.setter - def err(self, err) -> None: ... - @property - def line(self): ... - @line.setter - def line(self, line) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/line_protocol_length_error.pyi b/stubs/influxdb-client/influxdb_client/domain/line_protocol_length_error.pyi deleted file mode 100644 index 42f1c348d891..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/line_protocol_length_error.pyi +++ /dev/null @@ -1,19 +0,0 @@ -from _typeshed import Incomplete - -class LineProtocolLengthError: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, code: Incomplete | None = None, message: Incomplete | None = None) -> None: ... - @property - def code(self): ... - @code.setter - def code(self, code) -> None: ... - @property - def message(self): ... - @message.setter - def message(self, message) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/links.pyi b/stubs/influxdb-client/influxdb_client/domain/links.pyi deleted file mode 100644 index 202819af84ab..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/links.pyi +++ /dev/null @@ -1,21 +0,0 @@ -from _typeshed import Incomplete - -class Links: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, next: Incomplete | None = None, _self: Incomplete | None = None, prev: Incomplete | None = None - ) -> None: ... - @property - def next(self): ... - @next.setter - def next(self, next) -> None: ... - @property - def prev(self): ... - @prev.setter - def prev(self, prev) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/list_stacks_response.pyi b/stubs/influxdb-client/influxdb_client/domain/list_stacks_response.pyi deleted file mode 100644 index d2b7ee8ec428..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/list_stacks_response.pyi +++ /dev/null @@ -1,15 +0,0 @@ -from _typeshed import Incomplete - -class ListStacksResponse: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, stacks: Incomplete | None = None) -> None: ... - @property - def stacks(self): ... - @stacks.setter - def stacks(self, stacks) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/log_event.pyi b/stubs/influxdb-client/influxdb_client/domain/log_event.pyi deleted file mode 100644 index db1bda0c83dd..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/log_event.pyi +++ /dev/null @@ -1,25 +0,0 @@ -from _typeshed import Incomplete - -class LogEvent: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, time: Incomplete | None = None, message: Incomplete | None = None, run_id: Incomplete | None = None - ) -> None: ... - @property - def time(self): ... - @time.setter - def time(self, time) -> None: ... - @property - def message(self): ... - @message.setter - def message(self, message) -> None: ... - @property - def run_id(self): ... - @run_id.setter - def run_id(self, run_id) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/logical_expression.pyi b/stubs/influxdb-client/influxdb_client/domain/logical_expression.pyi deleted file mode 100644 index 321a202df596..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/logical_expression.pyi +++ /dev/null @@ -1,35 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.expression import Expression - -class LogicalExpression(Expression): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - type: Incomplete | None = None, - operator: Incomplete | None = None, - left: Incomplete | None = None, - right: Incomplete | None = None, - ) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - @property - def operator(self): ... - @operator.setter - def operator(self, operator) -> None: ... - @property - def left(self): ... - @left.setter - def left(self, left) -> None: ... - @property - def right(self): ... - @right.setter - def right(self, right) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/logs.pyi b/stubs/influxdb-client/influxdb_client/domain/logs.pyi deleted file mode 100644 index b96b51f83dcf..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/logs.pyi +++ /dev/null @@ -1,15 +0,0 @@ -from _typeshed import Incomplete - -class Logs: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, events: Incomplete | None = None) -> None: ... - @property - def events(self): ... - @events.setter - def events(self, events) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/map_variable_properties.pyi b/stubs/influxdb-client/influxdb_client/domain/map_variable_properties.pyi deleted file mode 100644 index e3bd6f55e50f..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/map_variable_properties.pyi +++ /dev/null @@ -1,21 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.variable_properties import VariableProperties - -class MapVariableProperties(VariableProperties): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, type: Incomplete | None = None, values: Incomplete | None = None) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - @property - def values(self): ... - @values.setter - def values(self, values) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/markdown_view_properties.pyi b/stubs/influxdb-client/influxdb_client/domain/markdown_view_properties.pyi deleted file mode 100644 index 207bd88fe2ef..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/markdown_view_properties.pyi +++ /dev/null @@ -1,27 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.view_properties import ViewProperties - -class MarkdownViewProperties(ViewProperties): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, type: Incomplete | None = None, shape: Incomplete | None = None, note: Incomplete | None = None - ) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - @property - def shape(self): ... - @shape.setter - def shape(self, shape) -> None: ... - @property - def note(self): ... - @note.setter - def note(self, note) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/measurement_schema.pyi b/stubs/influxdb-client/influxdb_client/domain/measurement_schema.pyi deleted file mode 100644 index 77dc640632c0..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/measurement_schema.pyi +++ /dev/null @@ -1,48 +0,0 @@ -from _typeshed import Incomplete - -class MeasurementSchema: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - id: Incomplete | None = None, - org_id: Incomplete | None = None, - bucket_id: Incomplete | None = None, - name: Incomplete | None = None, - columns: Incomplete | None = None, - created_at: Incomplete | None = None, - updated_at: Incomplete | None = None, - ) -> None: ... - @property - def id(self): ... - @id.setter - def id(self, id) -> None: ... - @property - def org_id(self): ... - @org_id.setter - def org_id(self, org_id) -> None: ... - @property - def bucket_id(self): ... - @bucket_id.setter - def bucket_id(self, bucket_id) -> None: ... - @property - def name(self): ... - @name.setter - def name(self, name) -> None: ... - @property - def columns(self): ... - @columns.setter - def columns(self, columns) -> None: ... - @property - def created_at(self): ... - @created_at.setter - def created_at(self, created_at) -> None: ... - @property - def updated_at(self): ... - @updated_at.setter - def updated_at(self, updated_at) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/measurement_schema_column.pyi b/stubs/influxdb-client/influxdb_client/domain/measurement_schema_column.pyi deleted file mode 100644 index ed1c0a542bbd..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/measurement_schema_column.pyi +++ /dev/null @@ -1,25 +0,0 @@ -from _typeshed import Incomplete - -class MeasurementSchemaColumn: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, name: Incomplete | None = None, type: Incomplete | None = None, data_type: Incomplete | None = None - ) -> None: ... - @property - def name(self): ... - @name.setter - def name(self, name) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - @property - def data_type(self): ... - @data_type.setter - def data_type(self, data_type) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/measurement_schema_create_request.pyi b/stubs/influxdb-client/influxdb_client/domain/measurement_schema_create_request.pyi deleted file mode 100644 index ee878f37fa5a..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/measurement_schema_create_request.pyi +++ /dev/null @@ -1,19 +0,0 @@ -from _typeshed import Incomplete - -class MeasurementSchemaCreateRequest: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, name: Incomplete | None = None, columns: Incomplete | None = None) -> None: ... - @property - def name(self): ... - @name.setter - def name(self, name) -> None: ... - @property - def columns(self): ... - @columns.setter - def columns(self, columns) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/measurement_schema_list.pyi b/stubs/influxdb-client/influxdb_client/domain/measurement_schema_list.pyi deleted file mode 100644 index c040b8274502..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/measurement_schema_list.pyi +++ /dev/null @@ -1,15 +0,0 @@ -from _typeshed import Incomplete - -class MeasurementSchemaList: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, measurement_schemas: Incomplete | None = None) -> None: ... - @property - def measurement_schemas(self): ... - @measurement_schemas.setter - def measurement_schemas(self, measurement_schemas) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/measurement_schema_update_request.pyi b/stubs/influxdb-client/influxdb_client/domain/measurement_schema_update_request.pyi deleted file mode 100644 index 077bfbb2dc04..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/measurement_schema_update_request.pyi +++ /dev/null @@ -1,15 +0,0 @@ -from _typeshed import Incomplete - -class MeasurementSchemaUpdateRequest: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, columns: Incomplete | None = None) -> None: ... - @property - def columns(self): ... - @columns.setter - def columns(self, columns) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/member_assignment.pyi b/stubs/influxdb-client/influxdb_client/domain/member_assignment.pyi deleted file mode 100644 index 2eea4ac73e1a..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/member_assignment.pyi +++ /dev/null @@ -1,27 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.statement import Statement - -class MemberAssignment(Statement): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, type: Incomplete | None = None, member: Incomplete | None = None, init: Incomplete | None = None - ) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - @property - def member(self): ... - @member.setter - def member(self, member) -> None: ... - @property - def init(self): ... - @init.setter - def init(self, init) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/member_expression.pyi b/stubs/influxdb-client/influxdb_client/domain/member_expression.pyi deleted file mode 100644 index 61b21f854dd3..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/member_expression.pyi +++ /dev/null @@ -1,23 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.expression import Expression - -class MemberExpression(Expression): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, type: Incomplete | None = None, object: Incomplete | None = None, _property: Incomplete | None = None - ) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - @property - def object(self): ... - @object.setter - def object(self, object) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/metadata_backup.pyi b/stubs/influxdb-client/influxdb_client/domain/metadata_backup.pyi deleted file mode 100644 index c201a5556f14..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/metadata_backup.pyi +++ /dev/null @@ -1,25 +0,0 @@ -from _typeshed import Incomplete - -class MetadataBackup: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, kv: Incomplete | None = None, sql: Incomplete | None = None, buckets: Incomplete | None = None - ) -> None: ... - @property - def kv(self): ... - @kv.setter - def kv(self, kv) -> None: ... - @property - def sql(self): ... - @sql.setter - def sql(self, sql) -> None: ... - @property - def buckets(self): ... - @buckets.setter - def buckets(self, buckets) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/model_property.pyi b/stubs/influxdb-client/influxdb_client/domain/model_property.pyi deleted file mode 100644 index fd4a3e5c29a4..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/model_property.pyi +++ /dev/null @@ -1,25 +0,0 @@ -from _typeshed import Incomplete - -class ModelProperty: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, type: Incomplete | None = None, key: Incomplete | None = None, value: Incomplete | None = None - ) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - @property - def key(self): ... - @key.setter - def key(self, key) -> None: ... - @property - def value(self): ... - @value.setter - def value(self, value) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/mosaic_view_properties.pyi b/stubs/influxdb-client/influxdb_client/domain/mosaic_view_properties.pyi deleted file mode 100644 index aca6e7e6688e..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/mosaic_view_properties.pyi +++ /dev/null @@ -1,160 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.view_properties import ViewProperties - -class MosaicViewProperties(ViewProperties): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - time_format: Incomplete | None = None, - type: Incomplete | None = None, - queries: Incomplete | None = None, - colors: Incomplete | None = None, - shape: Incomplete | None = None, - note: Incomplete | None = None, - show_note_when_empty: Incomplete | None = None, - x_column: Incomplete | None = None, - generate_x_axis_ticks: Incomplete | None = None, - x_total_ticks: Incomplete | None = None, - x_tick_start: Incomplete | None = None, - x_tick_step: Incomplete | None = None, - y_label_column_separator: Incomplete | None = None, - y_label_columns: Incomplete | None = None, - y_series_columns: Incomplete | None = None, - fill_columns: Incomplete | None = None, - x_domain: Incomplete | None = None, - y_domain: Incomplete | None = None, - x_axis_label: Incomplete | None = None, - y_axis_label: Incomplete | None = None, - x_prefix: Incomplete | None = None, - x_suffix: Incomplete | None = None, - y_prefix: Incomplete | None = None, - y_suffix: Incomplete | None = None, - hover_dimension: Incomplete | None = None, - legend_colorize_rows: Incomplete | None = None, - legend_hide: Incomplete | None = None, - legend_opacity: Incomplete | None = None, - legend_orientation_threshold: Incomplete | None = None, - ) -> None: ... - @property - def time_format(self): ... - @time_format.setter - def time_format(self, time_format) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - @property - def queries(self): ... - @queries.setter - def queries(self, queries) -> None: ... - @property - def colors(self): ... - @colors.setter - def colors(self, colors) -> None: ... - @property - def shape(self): ... - @shape.setter - def shape(self, shape) -> None: ... - @property - def note(self): ... - @note.setter - def note(self, note) -> None: ... - @property - def show_note_when_empty(self): ... - @show_note_when_empty.setter - def show_note_when_empty(self, show_note_when_empty) -> None: ... - @property - def x_column(self): ... - @x_column.setter - def x_column(self, x_column) -> None: ... - @property - def generate_x_axis_ticks(self): ... - @generate_x_axis_ticks.setter - def generate_x_axis_ticks(self, generate_x_axis_ticks) -> None: ... - @property - def x_total_ticks(self): ... - @x_total_ticks.setter - def x_total_ticks(self, x_total_ticks) -> None: ... - @property - def x_tick_start(self): ... - @x_tick_start.setter - def x_tick_start(self, x_tick_start) -> None: ... - @property - def x_tick_step(self): ... - @x_tick_step.setter - def x_tick_step(self, x_tick_step) -> None: ... - @property - def y_label_column_separator(self): ... - @y_label_column_separator.setter - def y_label_column_separator(self, y_label_column_separator) -> None: ... - @property - def y_label_columns(self): ... - @y_label_columns.setter - def y_label_columns(self, y_label_columns) -> None: ... - @property - def y_series_columns(self): ... - @y_series_columns.setter - def y_series_columns(self, y_series_columns) -> None: ... - @property - def fill_columns(self): ... - @fill_columns.setter - def fill_columns(self, fill_columns) -> None: ... - @property - def x_domain(self): ... - @x_domain.setter - def x_domain(self, x_domain) -> None: ... - @property - def y_domain(self): ... - @y_domain.setter - def y_domain(self, y_domain) -> None: ... - @property - def x_axis_label(self): ... - @x_axis_label.setter - def x_axis_label(self, x_axis_label) -> None: ... - @property - def y_axis_label(self): ... - @y_axis_label.setter - def y_axis_label(self, y_axis_label) -> None: ... - @property - def x_prefix(self): ... - @x_prefix.setter - def x_prefix(self, x_prefix) -> None: ... - @property - def x_suffix(self): ... - @x_suffix.setter - def x_suffix(self, x_suffix) -> None: ... - @property - def y_prefix(self): ... - @y_prefix.setter - def y_prefix(self, y_prefix) -> None: ... - @property - def y_suffix(self): ... - @y_suffix.setter - def y_suffix(self, y_suffix) -> None: ... - @property - def hover_dimension(self): ... - @hover_dimension.setter - def hover_dimension(self, hover_dimension) -> None: ... - @property - def legend_colorize_rows(self): ... - @legend_colorize_rows.setter - def legend_colorize_rows(self, legend_colorize_rows) -> None: ... - @property - def legend_hide(self): ... - @legend_hide.setter - def legend_hide(self, legend_hide) -> None: ... - @property - def legend_opacity(self): ... - @legend_opacity.setter - def legend_opacity(self, legend_opacity) -> None: ... - @property - def legend_orientation_threshold(self): ... - @legend_orientation_threshold.setter - def legend_orientation_threshold(self, legend_orientation_threshold) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/node.pyi b/stubs/influxdb-client/influxdb_client/domain/node.pyi deleted file mode 100644 index 6535e25d050a..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/node.pyi +++ /dev/null @@ -1,10 +0,0 @@ -from _typeshed import Incomplete - -class Node: - openapi_types: Incomplete - attribute_map: Incomplete - def __init__(self) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/notification_endpoint.pyi b/stubs/influxdb-client/influxdb_client/domain/notification_endpoint.pyi deleted file mode 100644 index 9f45abb2318e..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/notification_endpoint.pyi +++ /dev/null @@ -1,17 +0,0 @@ -from _typeshed import Incomplete - -class NotificationEndpoint: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator_value_class_map: Incomplete - discriminator: str - def __init__(self, type: Incomplete | None = None) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - def get_real_child_model(self, data): ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/notification_endpoint_base.pyi b/stubs/influxdb-client/influxdb_client/domain/notification_endpoint_base.pyi deleted file mode 100644 index 1a213237b3ce..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/notification_endpoint_base.pyi +++ /dev/null @@ -1,68 +0,0 @@ -from _typeshed import Incomplete - -class NotificationEndpointBase: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - id: Incomplete | None = None, - org_id: Incomplete | None = None, - user_id: Incomplete | None = None, - created_at: Incomplete | None = None, - updated_at: Incomplete | None = None, - description: Incomplete | None = None, - name: Incomplete | None = None, - status: str = "active", - labels: Incomplete | None = None, - links: Incomplete | None = None, - type: Incomplete | None = None, - ) -> None: ... - @property - def id(self): ... - @id.setter - def id(self, id) -> None: ... - @property - def org_id(self): ... - @org_id.setter - def org_id(self, org_id) -> None: ... - @property - def user_id(self): ... - @user_id.setter - def user_id(self, user_id) -> None: ... - @property - def created_at(self): ... - @created_at.setter - def created_at(self, created_at) -> None: ... - @property - def updated_at(self): ... - @updated_at.setter - def updated_at(self, updated_at) -> None: ... - @property - def description(self): ... - @description.setter - def description(self, description) -> None: ... - @property - def name(self): ... - @name.setter - def name(self, name) -> None: ... - @property - def status(self): ... - @status.setter - def status(self, status) -> None: ... - @property - def labels(self): ... - @labels.setter - def labels(self, labels) -> None: ... - @property - def links(self): ... - @links.setter - def links(self, links) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/notification_endpoint_base_links.pyi b/stubs/influxdb-client/influxdb_client/domain/notification_endpoint_base_links.pyi deleted file mode 100644 index 48e906186e30..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/notification_endpoint_base_links.pyi +++ /dev/null @@ -1,29 +0,0 @@ -from _typeshed import Incomplete - -class NotificationEndpointBaseLinks: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - _self: Incomplete | None = None, - labels: Incomplete | None = None, - members: Incomplete | None = None, - owners: Incomplete | None = None, - ) -> None: ... - @property - def labels(self): ... - @labels.setter - def labels(self, labels) -> None: ... - @property - def members(self): ... - @members.setter - def members(self, members) -> None: ... - @property - def owners(self): ... - @owners.setter - def owners(self, owners) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/notification_endpoint_discriminator.pyi b/stubs/influxdb-client/influxdb_client/domain/notification_endpoint_discriminator.pyi deleted file mode 100644 index 0179fcf6cee8..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/notification_endpoint_discriminator.pyi +++ /dev/null @@ -1,26 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.notification_endpoint_base import NotificationEndpointBase - -class NotificationEndpointDiscriminator(NotificationEndpointBase): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - id: Incomplete | None = None, - org_id: Incomplete | None = None, - user_id: Incomplete | None = None, - created_at: Incomplete | None = None, - updated_at: Incomplete | None = None, - description: Incomplete | None = None, - name: Incomplete | None = None, - status: str = "active", - labels: Incomplete | None = None, - links: Incomplete | None = None, - type: Incomplete | None = None, - ) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/notification_endpoint_type.pyi b/stubs/influxdb-client/influxdb_client/domain/notification_endpoint_type.pyi deleted file mode 100644 index 130fdf443ce9..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/notification_endpoint_type.pyi +++ /dev/null @@ -1,14 +0,0 @@ -from _typeshed import Incomplete - -class NotificationEndpointType: - SLACK: str - PAGERDUTY: str - HTTP: str - TELEGRAM: str - openapi_types: Incomplete - attribute_map: Incomplete - def __init__(self) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/notification_endpoint_update.pyi b/stubs/influxdb-client/influxdb_client/domain/notification_endpoint_update.pyi deleted file mode 100644 index 43fd259c8faa..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/notification_endpoint_update.pyi +++ /dev/null @@ -1,25 +0,0 @@ -from _typeshed import Incomplete - -class NotificationEndpointUpdate: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, name: Incomplete | None = None, description: Incomplete | None = None, status: Incomplete | None = None - ) -> None: ... - @property - def name(self): ... - @name.setter - def name(self, name) -> None: ... - @property - def description(self): ... - @description.setter - def description(self, description) -> None: ... - @property - def status(self): ... - @status.setter - def status(self, status) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/notification_endpoints.pyi b/stubs/influxdb-client/influxdb_client/domain/notification_endpoints.pyi deleted file mode 100644 index 6a00e8d6268a..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/notification_endpoints.pyi +++ /dev/null @@ -1,19 +0,0 @@ -from _typeshed import Incomplete - -class NotificationEndpoints: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, notification_endpoints: Incomplete | None = None, links: Incomplete | None = None) -> None: ... - @property - def notification_endpoints(self): ... - @notification_endpoints.setter - def notification_endpoints(self, notification_endpoints) -> None: ... - @property - def links(self): ... - @links.setter - def links(self, links) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/notification_rule.pyi b/stubs/influxdb-client/influxdb_client/domain/notification_rule.pyi deleted file mode 100644 index 77c338d27e23..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/notification_rule.pyi +++ /dev/null @@ -1,17 +0,0 @@ -from _typeshed import Incomplete - -class NotificationRule: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator_value_class_map: Incomplete - discriminator: str - def __init__(self, type: Incomplete | None = None) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - def get_real_child_model(self, data): ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/notification_rule_base.pyi b/stubs/influxdb-client/influxdb_client/domain/notification_rule_base.pyi deleted file mode 100644 index 02bee2c23ab3..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/notification_rule_base.pyi +++ /dev/null @@ -1,128 +0,0 @@ -from _typeshed import Incomplete - -class NotificationRuleBase: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - latest_completed: Incomplete | None = None, - last_run_status: Incomplete | None = None, - last_run_error: Incomplete | None = None, - id: Incomplete | None = None, - endpoint_id: Incomplete | None = None, - org_id: Incomplete | None = None, - task_id: Incomplete | None = None, - owner_id: Incomplete | None = None, - created_at: Incomplete | None = None, - updated_at: Incomplete | None = None, - status: Incomplete | None = None, - name: Incomplete | None = None, - sleep_until: Incomplete | None = None, - every: Incomplete | None = None, - offset: Incomplete | None = None, - runbook_link: Incomplete | None = None, - limit_every: Incomplete | None = None, - limit: Incomplete | None = None, - tag_rules: Incomplete | None = None, - description: Incomplete | None = None, - status_rules: Incomplete | None = None, - labels: Incomplete | None = None, - links: Incomplete | None = None, - ) -> None: ... - @property - def latest_completed(self): ... - @latest_completed.setter - def latest_completed(self, latest_completed) -> None: ... - @property - def last_run_status(self): ... - @last_run_status.setter - def last_run_status(self, last_run_status) -> None: ... - @property - def last_run_error(self): ... - @last_run_error.setter - def last_run_error(self, last_run_error) -> None: ... - @property - def id(self): ... - @id.setter - def id(self, id) -> None: ... - @property - def endpoint_id(self): ... - @endpoint_id.setter - def endpoint_id(self, endpoint_id) -> None: ... - @property - def org_id(self): ... - @org_id.setter - def org_id(self, org_id) -> None: ... - @property - def task_id(self): ... - @task_id.setter - def task_id(self, task_id) -> None: ... - @property - def owner_id(self): ... - @owner_id.setter - def owner_id(self, owner_id) -> None: ... - @property - def created_at(self): ... - @created_at.setter - def created_at(self, created_at) -> None: ... - @property - def updated_at(self): ... - @updated_at.setter - def updated_at(self, updated_at) -> None: ... - @property - def status(self): ... - @status.setter - def status(self, status) -> None: ... - @property - def name(self): ... - @name.setter - def name(self, name) -> None: ... - @property - def sleep_until(self): ... - @sleep_until.setter - def sleep_until(self, sleep_until) -> None: ... - @property - def every(self): ... - @every.setter - def every(self, every) -> None: ... - @property - def offset(self): ... - @offset.setter - def offset(self, offset) -> None: ... - @property - def runbook_link(self): ... - @runbook_link.setter - def runbook_link(self, runbook_link) -> None: ... - @property - def limit_every(self): ... - @limit_every.setter - def limit_every(self, limit_every) -> None: ... - @property - def limit(self): ... - @limit.setter - def limit(self, limit) -> None: ... - @property - def tag_rules(self): ... - @tag_rules.setter - def tag_rules(self, tag_rules) -> None: ... - @property - def description(self): ... - @description.setter - def description(self, description) -> None: ... - @property - def status_rules(self): ... - @status_rules.setter - def status_rules(self, status_rules) -> None: ... - @property - def labels(self): ... - @labels.setter - def labels(self, labels) -> None: ... - @property - def links(self): ... - @links.setter - def links(self, links) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/notification_rule_base_links.pyi b/stubs/influxdb-client/influxdb_client/domain/notification_rule_base_links.pyi deleted file mode 100644 index 0dabc441ca08..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/notification_rule_base_links.pyi +++ /dev/null @@ -1,34 +0,0 @@ -from _typeshed import Incomplete - -class NotificationRuleBaseLinks: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - _self: Incomplete | None = None, - labels: Incomplete | None = None, - members: Incomplete | None = None, - owners: Incomplete | None = None, - query: Incomplete | None = None, - ) -> None: ... - @property - def labels(self): ... - @labels.setter - def labels(self, labels) -> None: ... - @property - def members(self): ... - @members.setter - def members(self, members) -> None: ... - @property - def owners(self): ... - @owners.setter - def owners(self, owners) -> None: ... - @property - def query(self): ... - @query.setter - def query(self, query) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/notification_rule_discriminator.pyi b/stubs/influxdb-client/influxdb_client/domain/notification_rule_discriminator.pyi deleted file mode 100644 index ac900e510847..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/notification_rule_discriminator.pyi +++ /dev/null @@ -1,38 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.notification_rule_base import NotificationRuleBase - -class NotificationRuleDiscriminator(NotificationRuleBase): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - latest_completed: Incomplete | None = None, - last_run_status: Incomplete | None = None, - last_run_error: Incomplete | None = None, - id: Incomplete | None = None, - endpoint_id: Incomplete | None = None, - org_id: Incomplete | None = None, - task_id: Incomplete | None = None, - owner_id: Incomplete | None = None, - created_at: Incomplete | None = None, - updated_at: Incomplete | None = None, - status: Incomplete | None = None, - name: Incomplete | None = None, - sleep_until: Incomplete | None = None, - every: Incomplete | None = None, - offset: Incomplete | None = None, - runbook_link: Incomplete | None = None, - limit_every: Incomplete | None = None, - limit: Incomplete | None = None, - tag_rules: Incomplete | None = None, - description: Incomplete | None = None, - status_rules: Incomplete | None = None, - labels: Incomplete | None = None, - links: Incomplete | None = None, - ) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/notification_rule_update.pyi b/stubs/influxdb-client/influxdb_client/domain/notification_rule_update.pyi deleted file mode 100644 index d64b07bb1838..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/notification_rule_update.pyi +++ /dev/null @@ -1,25 +0,0 @@ -from _typeshed import Incomplete - -class NotificationRuleUpdate: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, name: Incomplete | None = None, description: Incomplete | None = None, status: Incomplete | None = None - ) -> None: ... - @property - def name(self): ... - @name.setter - def name(self, name) -> None: ... - @property - def description(self): ... - @description.setter - def description(self, description) -> None: ... - @property - def status(self): ... - @status.setter - def status(self, status) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/notification_rules.pyi b/stubs/influxdb-client/influxdb_client/domain/notification_rules.pyi deleted file mode 100644 index 6eebc7c6bd82..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/notification_rules.pyi +++ /dev/null @@ -1,19 +0,0 @@ -from _typeshed import Incomplete - -class NotificationRules: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, notification_rules: Incomplete | None = None, links: Incomplete | None = None) -> None: ... - @property - def notification_rules(self): ... - @notification_rules.setter - def notification_rules(self, notification_rules) -> None: ... - @property - def links(self): ... - @links.setter - def links(self, links) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/object_expression.pyi b/stubs/influxdb-client/influxdb_client/domain/object_expression.pyi deleted file mode 100644 index 7080f527a6e7..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/object_expression.pyi +++ /dev/null @@ -1,21 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.expression import Expression - -class ObjectExpression(Expression): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, type: Incomplete | None = None, properties: Incomplete | None = None) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - @property - def properties(self): ... - @properties.setter - def properties(self, properties) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/onboarding_request.pyi b/stubs/influxdb-client/influxdb_client/domain/onboarding_request.pyi deleted file mode 100644 index 13b3c7404e9a..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/onboarding_request.pyi +++ /dev/null @@ -1,48 +0,0 @@ -from _typeshed import Incomplete - -class OnboardingRequest: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - username: Incomplete | None = None, - password: Incomplete | None = None, - org: Incomplete | None = None, - bucket: Incomplete | None = None, - retention_period_seconds: Incomplete | None = None, - retention_period_hrs: Incomplete | None = None, - token: Incomplete | None = None, - ) -> None: ... - @property - def username(self): ... - @username.setter - def username(self, username) -> None: ... - @property - def password(self): ... - @password.setter - def password(self, password) -> None: ... - @property - def org(self): ... - @org.setter - def org(self, org) -> None: ... - @property - def bucket(self): ... - @bucket.setter - def bucket(self, bucket) -> None: ... - @property - def retention_period_seconds(self): ... - @retention_period_seconds.setter - def retention_period_seconds(self, retention_period_seconds) -> None: ... - @property - def retention_period_hrs(self): ... - @retention_period_hrs.setter - def retention_period_hrs(self, retention_period_hrs) -> None: ... - @property - def token(self): ... - @token.setter - def token(self, token) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/onboarding_response.pyi b/stubs/influxdb-client/influxdb_client/domain/onboarding_response.pyi deleted file mode 100644 index 40422e36d33d..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/onboarding_response.pyi +++ /dev/null @@ -1,33 +0,0 @@ -from _typeshed import Incomplete - -class OnboardingResponse: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - user: Incomplete | None = None, - org: Incomplete | None = None, - bucket: Incomplete | None = None, - auth: Incomplete | None = None, - ) -> None: ... - @property - def user(self): ... - @user.setter - def user(self, user) -> None: ... - @property - def org(self): ... - @org.setter - def org(self, org) -> None: ... - @property - def bucket(self): ... - @bucket.setter - def bucket(self, bucket) -> None: ... - @property - def auth(self): ... - @auth.setter - def auth(self, auth) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/option_statement.pyi b/stubs/influxdb-client/influxdb_client/domain/option_statement.pyi deleted file mode 100644 index 91161b9b13d8..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/option_statement.pyi +++ /dev/null @@ -1,21 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.statement import Statement - -class OptionStatement(Statement): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, type: Incomplete | None = None, assignment: Incomplete | None = None) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - @property - def assignment(self): ... - @assignment.setter - def assignment(self, assignment) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/organization.pyi b/stubs/influxdb-client/influxdb_client/domain/organization.pyi deleted file mode 100644 index 160835a90d3a..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/organization.pyi +++ /dev/null @@ -1,50 +0,0 @@ -from _typeshed import Incomplete - -class Organization: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - links: Incomplete | None = None, - id: str | None = None, - name: Incomplete | None = None, - default_storage_type: str | None = None, - description: Incomplete | None = None, - created_at: Incomplete | None = None, - updated_at: Incomplete | None = None, - status: str = "active", - ) -> None: ... - @property - def links(self): ... - @links.setter - def links(self, links) -> None: ... - @property - def id(self) -> str: ... - @id.setter - def id(self, id: str) -> None: ... - @property - def name(self): ... - @name.setter - def name(self, name) -> None: ... - default_storage_type: str | None - @property - def description(self): ... - @description.setter - def description(self, description) -> None: ... - @property - def created_at(self): ... - @created_at.setter - def created_at(self, created_at) -> None: ... - @property - def updated_at(self): ... - @updated_at.setter - def updated_at(self, updated_at) -> None: ... - @property - def status(self): ... - @status.setter - def status(self, status) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/organization_links.pyi b/stubs/influxdb-client/influxdb_client/domain/organization_links.pyi deleted file mode 100644 index ec53731fd591..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/organization_links.pyi +++ /dev/null @@ -1,49 +0,0 @@ -from _typeshed import Incomplete - -class OrganizationLinks: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - _self: Incomplete | None = None, - members: Incomplete | None = None, - owners: Incomplete | None = None, - labels: Incomplete | None = None, - secrets: Incomplete | None = None, - buckets: Incomplete | None = None, - tasks: Incomplete | None = None, - dashboards: Incomplete | None = None, - ) -> None: ... - @property - def members(self): ... - @members.setter - def members(self, members) -> None: ... - @property - def owners(self): ... - @owners.setter - def owners(self, owners) -> None: ... - @property - def labels(self): ... - @labels.setter - def labels(self, labels) -> None: ... - @property - def secrets(self): ... - @secrets.setter - def secrets(self, secrets) -> None: ... - @property - def buckets(self): ... - @buckets.setter - def buckets(self, buckets) -> None: ... - @property - def tasks(self): ... - @tasks.setter - def tasks(self, tasks) -> None: ... - @property - def dashboards(self): ... - @dashboards.setter - def dashboards(self, dashboards) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/organizations.pyi b/stubs/influxdb-client/influxdb_client/domain/organizations.pyi deleted file mode 100644 index 98d86c4038da..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/organizations.pyi +++ /dev/null @@ -1,19 +0,0 @@ -from _typeshed import Incomplete - -class Organizations: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, links: Incomplete | None = None, orgs: Incomplete | None = None) -> None: ... - @property - def links(self): ... - @links.setter - def links(self, links) -> None: ... - @property - def orgs(self): ... - @orgs.setter - def orgs(self, orgs) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/package.pyi b/stubs/influxdb-client/influxdb_client/domain/package.pyi deleted file mode 100644 index c388e2062198..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/package.pyi +++ /dev/null @@ -1,33 +0,0 @@ -from _typeshed import Incomplete - -class Package: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - type: Incomplete | None = None, - path: Incomplete | None = None, - package: Incomplete | None = None, - files: Incomplete | None = None, - ) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - @property - def path(self): ... - @path.setter - def path(self, path) -> None: ... - @property - def package(self): ... - @package.setter - def package(self, package) -> None: ... - @property - def files(self): ... - @files.setter - def files(self, files) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/package_clause.pyi b/stubs/influxdb-client/influxdb_client/domain/package_clause.pyi deleted file mode 100644 index 8ca4885f7fab..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/package_clause.pyi +++ /dev/null @@ -1,19 +0,0 @@ -from _typeshed import Incomplete - -class PackageClause: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, type: Incomplete | None = None, name: Incomplete | None = None) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - @property - def name(self): ... - @name.setter - def name(self, name) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/pager_duty_notification_endpoint.pyi b/stubs/influxdb-client/influxdb_client/domain/pager_duty_notification_endpoint.pyi deleted file mode 100644 index fc61ab9ced87..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/pager_duty_notification_endpoint.pyi +++ /dev/null @@ -1,36 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.notification_endpoint_discriminator import NotificationEndpointDiscriminator - -class PagerDutyNotificationEndpoint(NotificationEndpointDiscriminator): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - client_url: Incomplete | None = None, - routing_key: Incomplete | None = None, - id: Incomplete | None = None, - org_id: Incomplete | None = None, - user_id: Incomplete | None = None, - created_at: Incomplete | None = None, - updated_at: Incomplete | None = None, - description: Incomplete | None = None, - name: Incomplete | None = None, - status: str = "active", - labels: Incomplete | None = None, - links: Incomplete | None = None, - type: str = "pagerduty", - ) -> None: ... - @property - def client_url(self): ... - @client_url.setter - def client_url(self, client_url) -> None: ... - @property - def routing_key(self): ... - @routing_key.setter - def routing_key(self, routing_key) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/pager_duty_notification_rule.pyi b/stubs/influxdb-client/influxdb_client/domain/pager_duty_notification_rule.pyi deleted file mode 100644 index 6090bb616ab3..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/pager_duty_notification_rule.pyi +++ /dev/null @@ -1,40 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.pager_duty_notification_rule_base import PagerDutyNotificationRuleBase - -class PagerDutyNotificationRule(PagerDutyNotificationRuleBase): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - type: str = "pagerduty", - message_template: Incomplete | None = None, - latest_completed: Incomplete | None = None, - last_run_status: Incomplete | None = None, - last_run_error: Incomplete | None = None, - id: Incomplete | None = None, - endpoint_id: Incomplete | None = None, - org_id: Incomplete | None = None, - task_id: Incomplete | None = None, - owner_id: Incomplete | None = None, - created_at: Incomplete | None = None, - updated_at: Incomplete | None = None, - status: Incomplete | None = None, - name: Incomplete | None = None, - sleep_until: Incomplete | None = None, - every: Incomplete | None = None, - offset: Incomplete | None = None, - runbook_link: Incomplete | None = None, - limit_every: Incomplete | None = None, - limit: Incomplete | None = None, - tag_rules: Incomplete | None = None, - description: Incomplete | None = None, - status_rules: Incomplete | None = None, - labels: Incomplete | None = None, - links: Incomplete | None = None, - ) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/pager_duty_notification_rule_base.pyi b/stubs/influxdb-client/influxdb_client/domain/pager_duty_notification_rule_base.pyi deleted file mode 100644 index 6b020b577720..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/pager_duty_notification_rule_base.pyi +++ /dev/null @@ -1,48 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.notification_rule_discriminator import NotificationRuleDiscriminator - -class PagerDutyNotificationRuleBase(NotificationRuleDiscriminator): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - type: Incomplete | None = None, - message_template: Incomplete | None = None, - latest_completed: Incomplete | None = None, - last_run_status: Incomplete | None = None, - last_run_error: Incomplete | None = None, - id: Incomplete | None = None, - endpoint_id: Incomplete | None = None, - org_id: Incomplete | None = None, - task_id: Incomplete | None = None, - owner_id: Incomplete | None = None, - created_at: Incomplete | None = None, - updated_at: Incomplete | None = None, - status: Incomplete | None = None, - name: Incomplete | None = None, - sleep_until: Incomplete | None = None, - every: Incomplete | None = None, - offset: Incomplete | None = None, - runbook_link: Incomplete | None = None, - limit_every: Incomplete | None = None, - limit: Incomplete | None = None, - tag_rules: Incomplete | None = None, - description: Incomplete | None = None, - status_rules: Incomplete | None = None, - labels: Incomplete | None = None, - links: Incomplete | None = None, - ) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - @property - def message_template(self): ... - @message_template.setter - def message_template(self, message_template) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/paren_expression.pyi b/stubs/influxdb-client/influxdb_client/domain/paren_expression.pyi deleted file mode 100644 index faec9ececd91..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/paren_expression.pyi +++ /dev/null @@ -1,21 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.expression import Expression - -class ParenExpression(Expression): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, type: Incomplete | None = None, expression: Incomplete | None = None) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - @property - def expression(self): ... - @expression.setter - def expression(self, expression) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/password_reset_body.pyi b/stubs/influxdb-client/influxdb_client/domain/password_reset_body.pyi deleted file mode 100644 index 56ba7003ac4f..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/password_reset_body.pyi +++ /dev/null @@ -1,15 +0,0 @@ -from _typeshed import Incomplete - -class PasswordResetBody: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, password: Incomplete | None = None) -> None: ... - @property - def password(self): ... - @password.setter - def password(self, password) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/patch_bucket_request.pyi b/stubs/influxdb-client/influxdb_client/domain/patch_bucket_request.pyi deleted file mode 100644 index ec49a5a57aa2..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/patch_bucket_request.pyi +++ /dev/null @@ -1,25 +0,0 @@ -from _typeshed import Incomplete - -class PatchBucketRequest: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, name: Incomplete | None = None, description: Incomplete | None = None, retention_rules: Incomplete | None = None - ) -> None: ... - @property - def name(self): ... - @name.setter - def name(self, name) -> None: ... - @property - def description(self): ... - @description.setter - def description(self, description) -> None: ... - @property - def retention_rules(self): ... - @retention_rules.setter - def retention_rules(self, retention_rules) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/patch_dashboard_request.pyi b/stubs/influxdb-client/influxdb_client/domain/patch_dashboard_request.pyi deleted file mode 100644 index 3d486879b75b..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/patch_dashboard_request.pyi +++ /dev/null @@ -1,25 +0,0 @@ -from _typeshed import Incomplete - -class PatchDashboardRequest: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, name: Incomplete | None = None, description: Incomplete | None = None, cells: Incomplete | None = None - ) -> None: ... - @property - def name(self): ... - @name.setter - def name(self, name) -> None: ... - @property - def description(self): ... - @description.setter - def description(self, description) -> None: ... - @property - def cells(self): ... - @cells.setter - def cells(self, cells) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/patch_organization_request.pyi b/stubs/influxdb-client/influxdb_client/domain/patch_organization_request.pyi deleted file mode 100644 index 89880e6c2949..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/patch_organization_request.pyi +++ /dev/null @@ -1,19 +0,0 @@ -from _typeshed import Incomplete - -class PatchOrganizationRequest: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, name: Incomplete | None = None, description: Incomplete | None = None) -> None: ... - @property - def name(self): ... - @name.setter - def name(self, name) -> None: ... - @property - def description(self): ... - @description.setter - def description(self, description) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/patch_retention_rule.pyi b/stubs/influxdb-client/influxdb_client/domain/patch_retention_rule.pyi deleted file mode 100644 index 876202eb40f1..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/patch_retention_rule.pyi +++ /dev/null @@ -1,22 +0,0 @@ -from _typeshed import Incomplete - -class PatchRetentionRule: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, type: str = "expire", every_seconds: int = 2592000, shard_group_duration_seconds: int | None = None - ) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - every_seconds: int - @property - def shard_group_duration_seconds(self): ... - @shard_group_duration_seconds.setter - def shard_group_duration_seconds(self, shard_group_duration_seconds) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/patch_stack_request.pyi b/stubs/influxdb-client/influxdb_client/domain/patch_stack_request.pyi deleted file mode 100644 index df7a24343a22..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/patch_stack_request.pyi +++ /dev/null @@ -1,33 +0,0 @@ -from _typeshed import Incomplete - -class PatchStackRequest: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - name: Incomplete | None = None, - description: Incomplete | None = None, - template_ur_ls: Incomplete | None = None, - additional_resources: Incomplete | None = None, - ) -> None: ... - @property - def name(self): ... - @name.setter - def name(self, name) -> None: ... - @property - def description(self): ... - @description.setter - def description(self, description) -> None: ... - @property - def template_ur_ls(self): ... - @template_ur_ls.setter - def template_ur_ls(self, template_ur_ls) -> None: ... - @property - def additional_resources(self): ... - @additional_resources.setter - def additional_resources(self, additional_resources) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/patch_stack_request_additional_resources.pyi b/stubs/influxdb-client/influxdb_client/domain/patch_stack_request_additional_resources.pyi deleted file mode 100644 index 0da86d918ab1..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/patch_stack_request_additional_resources.pyi +++ /dev/null @@ -1,25 +0,0 @@ -from _typeshed import Incomplete - -class PatchStackRequestAdditionalResources: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, resource_id: Incomplete | None = None, kind: Incomplete | None = None, template_meta_name: Incomplete | None = None - ) -> None: ... - @property - def resource_id(self): ... - @resource_id.setter - def resource_id(self, resource_id) -> None: ... - @property - def kind(self): ... - @kind.setter - def kind(self, kind) -> None: ... - @property - def template_meta_name(self): ... - @template_meta_name.setter - def template_meta_name(self, template_meta_name) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/permission.pyi b/stubs/influxdb-client/influxdb_client/domain/permission.pyi deleted file mode 100644 index a17cf800123a..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/permission.pyi +++ /dev/null @@ -1,19 +0,0 @@ -from _typeshed import Incomplete - -class Permission: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, action: Incomplete | None = None, resource: Incomplete | None = None) -> None: ... - @property - def action(self): ... - @action.setter - def action(self, action) -> None: ... - @property - def resource(self): ... - @resource.setter - def resource(self, resource) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/permission_resource.pyi b/stubs/influxdb-client/influxdb_client/domain/permission_resource.pyi deleted file mode 100644 index f50bdcbfc860..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/permission_resource.pyi +++ /dev/null @@ -1,38 +0,0 @@ -from _typeshed import Incomplete - -class PermissionResource: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - type: Incomplete | None = None, - id: Incomplete | None = None, - name: Incomplete | None = None, - org_id: Incomplete | None = None, - org: Incomplete | None = None, - ) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - @property - def id(self): ... - @id.setter - def id(self, id) -> None: ... - @property - def name(self): ... - @name.setter - def name(self, name) -> None: ... - @property - def org_id(self): ... - @org_id.setter - def org_id(self, org_id) -> None: ... - @property - def org(self): ... - @org.setter - def org(self, org) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/pipe_expression.pyi b/stubs/influxdb-client/influxdb_client/domain/pipe_expression.pyi deleted file mode 100644 index 9c0963be5f85..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/pipe_expression.pyi +++ /dev/null @@ -1,27 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.expression import Expression - -class PipeExpression(Expression): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, type: Incomplete | None = None, argument: Incomplete | None = None, call: Incomplete | None = None - ) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - @property - def argument(self): ... - @argument.setter - def argument(self, argument) -> None: ... - @property - def call(self): ... - @call.setter - def call(self, call) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/pipe_literal.pyi b/stubs/influxdb-client/influxdb_client/domain/pipe_literal.pyi deleted file mode 100644 index ec66cd603cdd..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/pipe_literal.pyi +++ /dev/null @@ -1,17 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.expression import Expression - -class PipeLiteral(Expression): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, type: Incomplete | None = None) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/post_bucket_request.pyi b/stubs/influxdb-client/influxdb_client/domain/post_bucket_request.pyi deleted file mode 100644 index da5f69cac051..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/post_bucket_request.pyi +++ /dev/null @@ -1,40 +0,0 @@ -from _typeshed import Incomplete - -class PostBucketRequest: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - org_id: Incomplete | None = None, - name: Incomplete | None = None, - description: Incomplete | None = None, - rp: str = "0", - retention_rules: Incomplete | None = None, - schema_type: Incomplete | None = None, - ) -> None: ... - @property - def org_id(self): ... - @org_id.setter - def org_id(self, org_id) -> None: ... - @property - def name(self): ... - @name.setter - def name(self, name) -> None: ... - @property - def description(self): ... - @description.setter - def description(self, description) -> None: ... - rp: str - @property - def retention_rules(self): ... - @retention_rules.setter - def retention_rules(self, retention_rules) -> None: ... - @property - def schema_type(self): ... - @schema_type.setter - def schema_type(self, schema_type) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/post_check.pyi b/stubs/influxdb-client/influxdb_client/domain/post_check.pyi deleted file mode 100644 index af00fc3f4472..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/post_check.pyi +++ /dev/null @@ -1,17 +0,0 @@ -from _typeshed import Incomplete - -class PostCheck: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator_value_class_map: Incomplete - discriminator: str - def __init__(self, type: Incomplete | None = None) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - def get_real_child_model(self, data): ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/post_notification_endpoint.pyi b/stubs/influxdb-client/influxdb_client/domain/post_notification_endpoint.pyi deleted file mode 100644 index 68aa2013f96c..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/post_notification_endpoint.pyi +++ /dev/null @@ -1,17 +0,0 @@ -from _typeshed import Incomplete - -class PostNotificationEndpoint: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator_value_class_map: Incomplete - discriminator: str - def __init__(self, type: Incomplete | None = None) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - def get_real_child_model(self, data): ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/post_notification_rule.pyi b/stubs/influxdb-client/influxdb_client/domain/post_notification_rule.pyi deleted file mode 100644 index 347a704b555b..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/post_notification_rule.pyi +++ /dev/null @@ -1,17 +0,0 @@ -from _typeshed import Incomplete - -class PostNotificationRule: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator_value_class_map: Incomplete - discriminator: str - def __init__(self, type: Incomplete | None = None) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - def get_real_child_model(self, data): ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/post_organization_request.pyi b/stubs/influxdb-client/influxdb_client/domain/post_organization_request.pyi deleted file mode 100644 index 02bb451b9649..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/post_organization_request.pyi +++ /dev/null @@ -1,19 +0,0 @@ -from _typeshed import Incomplete - -class PostOrganizationRequest: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, name: Incomplete | None = None, description: Incomplete | None = None) -> None: ... - @property - def name(self): ... - @name.setter - def name(self, name) -> None: ... - @property - def description(self): ... - @description.setter - def description(self, description) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/post_restore_kv_response.pyi b/stubs/influxdb-client/influxdb_client/domain/post_restore_kv_response.pyi deleted file mode 100644 index 7cef21628ae8..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/post_restore_kv_response.pyi +++ /dev/null @@ -1,15 +0,0 @@ -from _typeshed import Incomplete - -class PostRestoreKVResponse: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, token: Incomplete | None = None) -> None: ... - @property - def token(self): ... - @token.setter - def token(self, token) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/post_stack_request.pyi b/stubs/influxdb-client/influxdb_client/domain/post_stack_request.pyi deleted file mode 100644 index d9e4b1e52633..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/post_stack_request.pyi +++ /dev/null @@ -1,33 +0,0 @@ -from _typeshed import Incomplete - -class PostStackRequest: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - org_id: Incomplete | None = None, - name: Incomplete | None = None, - description: Incomplete | None = None, - urls: Incomplete | None = None, - ) -> None: ... - @property - def org_id(self): ... - @org_id.setter - def org_id(self, org_id) -> None: ... - @property - def name(self): ... - @name.setter - def name(self, name) -> None: ... - @property - def description(self): ... - @description.setter - def description(self, description) -> None: ... - @property - def urls(self): ... - @urls.setter - def urls(self, urls) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/property_key.pyi b/stubs/influxdb-client/influxdb_client/domain/property_key.pyi deleted file mode 100644 index 1767303771d3..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/property_key.pyi +++ /dev/null @@ -1,13 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.expression import Expression - -class PropertyKey(Expression): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/query.pyi b/stubs/influxdb-client/influxdb_client/domain/query.pyi deleted file mode 100644 index d4a2aac230e1..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/query.pyi +++ /dev/null @@ -1,43 +0,0 @@ -from _typeshed import Incomplete - -class Query: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - extern: Incomplete | None = None, - query: Incomplete | None = None, - type: Incomplete | None = None, - params: Incomplete | None = None, - dialect: Incomplete | None = None, - now: Incomplete | None = None, - ) -> None: ... - @property - def extern(self): ... - @extern.setter - def extern(self, extern) -> None: ... - @property - def query(self): ... - @query.setter - def query(self, query) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - @property - def params(self): ... - @params.setter - def params(self, params) -> None: ... - @property - def dialect(self): ... - @dialect.setter - def dialect(self, dialect) -> None: ... - @property - def now(self): ... - @now.setter - def now(self, now) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/query_edit_mode.pyi b/stubs/influxdb-client/influxdb_client/domain/query_edit_mode.pyi deleted file mode 100644 index 60ce7aa35da9..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/query_edit_mode.pyi +++ /dev/null @@ -1,12 +0,0 @@ -from _typeshed import Incomplete - -class QueryEditMode: - BUILDER: str - ADVANCED: str - openapi_types: Incomplete - attribute_map: Incomplete - def __init__(self) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/query_variable_properties.pyi b/stubs/influxdb-client/influxdb_client/domain/query_variable_properties.pyi deleted file mode 100644 index b88949eb5957..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/query_variable_properties.pyi +++ /dev/null @@ -1,21 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.variable_properties import VariableProperties - -class QueryVariableProperties(VariableProperties): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, type: Incomplete | None = None, values: Incomplete | None = None) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - @property - def values(self): ... - @values.setter - def values(self, values) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/query_variable_properties_values.pyi b/stubs/influxdb-client/influxdb_client/domain/query_variable_properties_values.pyi deleted file mode 100644 index 7a0068681ea7..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/query_variable_properties_values.pyi +++ /dev/null @@ -1,19 +0,0 @@ -from _typeshed import Incomplete - -class QueryVariablePropertiesValues: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, query: Incomplete | None = None, language: Incomplete | None = None) -> None: ... - @property - def query(self): ... - @query.setter - def query(self, query) -> None: ... - @property - def language(self): ... - @language.setter - def language(self, language) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/range_threshold.pyi b/stubs/influxdb-client/influxdb_client/domain/range_threshold.pyi deleted file mode 100644 index bc6b3aa97fdc..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/range_threshold.pyi +++ /dev/null @@ -1,37 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.threshold_base import ThresholdBase - -class RangeThreshold(ThresholdBase): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - type: str = "range", - min: Incomplete | None = None, - max: Incomplete | None = None, - within: Incomplete | None = None, - level: Incomplete | None = None, - all_values: Incomplete | None = None, - ) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - @property - def min(self): ... - @min.setter - def min(self, min) -> None: ... - @property - def max(self): ... - @max.setter - def max(self, max) -> None: ... - @property - def within(self): ... - @within.setter - def within(self, within) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/ready.pyi b/stubs/influxdb-client/influxdb_client/domain/ready.pyi deleted file mode 100644 index 64aaea7baadb..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/ready.pyi +++ /dev/null @@ -1,25 +0,0 @@ -from _typeshed import Incomplete - -class Ready: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, status: Incomplete | None = None, started: Incomplete | None = None, up: Incomplete | None = None - ) -> None: ... - @property - def status(self): ... - @status.setter - def status(self, status) -> None: ... - @property - def started(self): ... - @started.setter - def started(self, started) -> None: ... - @property - def up(self): ... - @up.setter - def up(self, up) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/regexp_literal.pyi b/stubs/influxdb-client/influxdb_client/domain/regexp_literal.pyi deleted file mode 100644 index 5cf69b5b513d..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/regexp_literal.pyi +++ /dev/null @@ -1,21 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.expression import Expression - -class RegexpLiteral(Expression): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, type: Incomplete | None = None, value: Incomplete | None = None) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - @property - def value(self): ... - @value.setter - def value(self, value) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/remote_connection.pyi b/stubs/influxdb-client/influxdb_client/domain/remote_connection.pyi deleted file mode 100644 index 21c64d669cd7..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/remote_connection.pyi +++ /dev/null @@ -1,48 +0,0 @@ -from _typeshed import Incomplete - -class RemoteConnection: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - id: Incomplete | None = None, - name: Incomplete | None = None, - org_id: Incomplete | None = None, - description: Incomplete | None = None, - remote_url: Incomplete | None = None, - remote_org_id: Incomplete | None = None, - allow_insecure_tls: bool = False, - ) -> None: ... - @property - def id(self): ... - @id.setter - def id(self, id) -> None: ... - @property - def name(self): ... - @name.setter - def name(self, name) -> None: ... - @property - def org_id(self): ... - @org_id.setter - def org_id(self, org_id) -> None: ... - @property - def description(self): ... - @description.setter - def description(self, description) -> None: ... - @property - def remote_url(self): ... - @remote_url.setter - def remote_url(self, remote_url) -> None: ... - @property - def remote_org_id(self): ... - @remote_org_id.setter - def remote_org_id(self, remote_org_id) -> None: ... - @property - def allow_insecure_tls(self): ... - @allow_insecure_tls.setter - def allow_insecure_tls(self, allow_insecure_tls) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/remote_connection_creation_request.pyi b/stubs/influxdb-client/influxdb_client/domain/remote_connection_creation_request.pyi deleted file mode 100644 index f829233ffdbd..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/remote_connection_creation_request.pyi +++ /dev/null @@ -1,48 +0,0 @@ -from _typeshed import Incomplete - -class RemoteConnectionCreationRequest: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - name: Incomplete | None = None, - description: Incomplete | None = None, - org_id: Incomplete | None = None, - remote_url: Incomplete | None = None, - remote_api_token: Incomplete | None = None, - remote_org_id: Incomplete | None = None, - allow_insecure_tls: bool = False, - ) -> None: ... - @property - def name(self): ... - @name.setter - def name(self, name) -> None: ... - @property - def description(self): ... - @description.setter - def description(self, description) -> None: ... - @property - def org_id(self): ... - @org_id.setter - def org_id(self, org_id) -> None: ... - @property - def remote_url(self): ... - @remote_url.setter - def remote_url(self, remote_url) -> None: ... - @property - def remote_api_token(self): ... - @remote_api_token.setter - def remote_api_token(self, remote_api_token) -> None: ... - @property - def remote_org_id(self): ... - @remote_org_id.setter - def remote_org_id(self, remote_org_id) -> None: ... - @property - def allow_insecure_tls(self): ... - @allow_insecure_tls.setter - def allow_insecure_tls(self, allow_insecure_tls) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/remote_connection_update_request.pyi b/stubs/influxdb-client/influxdb_client/domain/remote_connection_update_request.pyi deleted file mode 100644 index 38f48aa9b5dd..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/remote_connection_update_request.pyi +++ /dev/null @@ -1,43 +0,0 @@ -from _typeshed import Incomplete - -class RemoteConnectionUpdateRequest: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - name: Incomplete | None = None, - description: Incomplete | None = None, - remote_url: Incomplete | None = None, - remote_api_token: Incomplete | None = None, - remote_org_id: Incomplete | None = None, - allow_insecure_tls: bool = False, - ) -> None: ... - @property - def name(self): ... - @name.setter - def name(self, name) -> None: ... - @property - def description(self): ... - @description.setter - def description(self, description) -> None: ... - @property - def remote_url(self): ... - @remote_url.setter - def remote_url(self, remote_url) -> None: ... - @property - def remote_api_token(self): ... - @remote_api_token.setter - def remote_api_token(self, remote_api_token) -> None: ... - @property - def remote_org_id(self): ... - @remote_org_id.setter - def remote_org_id(self, remote_org_id) -> None: ... - @property - def allow_insecure_tls(self): ... - @allow_insecure_tls.setter - def allow_insecure_tls(self, allow_insecure_tls) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/remote_connections.pyi b/stubs/influxdb-client/influxdb_client/domain/remote_connections.pyi deleted file mode 100644 index f55a99ee98fc..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/remote_connections.pyi +++ /dev/null @@ -1,15 +0,0 @@ -from _typeshed import Incomplete - -class RemoteConnections: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, remotes: Incomplete | None = None) -> None: ... - @property - def remotes(self): ... - @remotes.setter - def remotes(self, remotes) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/renamable_field.pyi b/stubs/influxdb-client/influxdb_client/domain/renamable_field.pyi deleted file mode 100644 index 64e02dbc4e71..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/renamable_field.pyi +++ /dev/null @@ -1,25 +0,0 @@ -from _typeshed import Incomplete - -class RenamableField: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, internal_name: Incomplete | None = None, display_name: Incomplete | None = None, visible: Incomplete | None = None - ) -> None: ... - @property - def internal_name(self): ... - @internal_name.setter - def internal_name(self, internal_name) -> None: ... - @property - def display_name(self): ... - @display_name.setter - def display_name(self, display_name) -> None: ... - @property - def visible(self): ... - @visible.setter - def visible(self, visible) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/replication.pyi b/stubs/influxdb-client/influxdb_client/domain/replication.pyi deleted file mode 100644 index b095e5385a31..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/replication.pyi +++ /dev/null @@ -1,77 +0,0 @@ -from _typeshed import Incomplete - -class Replication: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - id: Incomplete | None = None, - name: Incomplete | None = None, - description: Incomplete | None = None, - org_id: Incomplete | None = None, - remote_id: Incomplete | None = None, - local_bucket_id: Incomplete | None = None, - remote_bucket_id: Incomplete | None = None, - remote_bucket_name: str | None = None, - max_queue_size_bytes: Incomplete | None = None, - current_queue_size_bytes: Incomplete | None = None, - remaining_bytes_to_be_synced: int | None = None, - latest_response_code: Incomplete | None = None, - latest_error_message: Incomplete | None = None, - drop_non_retryable_data: Incomplete | None = None, - ) -> None: ... - @property - def id(self): ... - @id.setter - def id(self, id) -> None: ... - @property - def name(self): ... - @name.setter - def name(self, name) -> None: ... - @property - def description(self): ... - @description.setter - def description(self, description) -> None: ... - @property - def org_id(self): ... - @org_id.setter - def org_id(self, org_id) -> None: ... - @property - def remote_id(self): ... - @remote_id.setter - def remote_id(self, remote_id) -> None: ... - @property - def local_bucket_id(self): ... - @local_bucket_id.setter - def local_bucket_id(self, local_bucket_id) -> None: ... - @property - def remote_bucket_id(self): ... - @remote_bucket_id.setter - def remote_bucket_id(self, remote_bucket_id) -> None: ... - remote_bucket_name: str | None - @property - def max_queue_size_bytes(self): ... - @max_queue_size_bytes.setter - def max_queue_size_bytes(self, max_queue_size_bytes) -> None: ... - @property - def current_queue_size_bytes(self): ... - @current_queue_size_bytes.setter - def current_queue_size_bytes(self, current_queue_size_bytes) -> None: ... - remaining_bytes_to_be_synced: int | None - @property - def latest_response_code(self): ... - @latest_response_code.setter - def latest_response_code(self, latest_response_code) -> None: ... - @property - def latest_error_message(self): ... - @latest_error_message.setter - def latest_error_message(self, latest_error_message) -> None: ... - @property - def drop_non_retryable_data(self): ... - @drop_non_retryable_data.setter - def drop_non_retryable_data(self, drop_non_retryable_data) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/replication_creation_request.pyi b/stubs/influxdb-client/influxdb_client/domain/replication_creation_request.pyi deleted file mode 100644 index 8807bf320230..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/replication_creation_request.pyi +++ /dev/null @@ -1,57 +0,0 @@ -from _typeshed import Incomplete - -class ReplicationCreationRequest: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - name: Incomplete | None = None, - description: Incomplete | None = None, - org_id: Incomplete | None = None, - remote_id: Incomplete | None = None, - local_bucket_id: Incomplete | None = None, - remote_bucket_id: Incomplete | None = None, - remote_bucket_name: str | None = None, - max_queue_size_bytes: int = 67108860, - drop_non_retryable_data: bool = False, - max_age_seconds: int = 604800, - ) -> None: ... - @property - def name(self): ... - @name.setter - def name(self, name) -> None: ... - @property - def description(self): ... - @description.setter - def description(self, description) -> None: ... - @property - def org_id(self): ... - @org_id.setter - def org_id(self, org_id) -> None: ... - @property - def remote_id(self): ... - @remote_id.setter - def remote_id(self, remote_id) -> None: ... - @property - def local_bucket_id(self): ... - @local_bucket_id.setter - def local_bucket_id(self, local_bucket_id) -> None: ... - @property - def remote_bucket_id(self): ... - @remote_bucket_id.setter - def remote_bucket_id(self, remote_bucket_id) -> None: ... - remote_bucket_name: str | None - @property - def max_queue_size_bytes(self): ... - @max_queue_size_bytes.setter - def max_queue_size_bytes(self, max_queue_size_bytes) -> None: ... - @property - def drop_non_retryable_data(self): ... - @drop_non_retryable_data.setter - def drop_non_retryable_data(self, drop_non_retryable_data) -> None: ... - max_age_seconds: int - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/replication_update_request.pyi b/stubs/influxdb-client/influxdb_client/domain/replication_update_request.pyi deleted file mode 100644 index 328f9a8acf75..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/replication_update_request.pyi +++ /dev/null @@ -1,47 +0,0 @@ -from _typeshed import Incomplete - -class ReplicationUpdateRequest: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - name: Incomplete | None = None, - description: Incomplete | None = None, - remote_id: Incomplete | None = None, - remote_bucket_id: Incomplete | None = None, - remote_bucket_name: str | None = None, - max_queue_size_bytes: Incomplete | None = None, - drop_non_retryable_data: Incomplete | None = None, - max_age_seconds: int | None = None, - ) -> None: ... - @property - def name(self): ... - @name.setter - def name(self, name) -> None: ... - @property - def description(self): ... - @description.setter - def description(self, description) -> None: ... - @property - def remote_id(self): ... - @remote_id.setter - def remote_id(self, remote_id) -> None: ... - @property - def remote_bucket_id(self): ... - @remote_bucket_id.setter - def remote_bucket_id(self, remote_bucket_id) -> None: ... - remote_bucket_name: str | None - @property - def max_queue_size_bytes(self): ... - @max_queue_size_bytes.setter - def max_queue_size_bytes(self, max_queue_size_bytes) -> None: ... - @property - def drop_non_retryable_data(self): ... - @drop_non_retryable_data.setter - def drop_non_retryable_data(self, drop_non_retryable_data) -> None: ... - max_age_seconds: int | None - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/replications.pyi b/stubs/influxdb-client/influxdb_client/domain/replications.pyi deleted file mode 100644 index 100ef1d0b276..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/replications.pyi +++ /dev/null @@ -1,15 +0,0 @@ -from _typeshed import Incomplete - -class Replications: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, replications: Incomplete | None = None) -> None: ... - @property - def replications(self): ... - @replications.setter - def replications(self, replications) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/resource_member.pyi b/stubs/influxdb-client/influxdb_client/domain/resource_member.pyi deleted file mode 100644 index 44799808d8f5..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/resource_member.pyi +++ /dev/null @@ -1,24 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.user_response import UserResponse - -class ResourceMember(UserResponse): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - role: str = "member", - id: Incomplete | None = None, - name: Incomplete | None = None, - status: str = "active", - links: Incomplete | None = None, - ) -> None: ... - @property - def role(self): ... - @role.setter - def role(self, role) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/resource_members.pyi b/stubs/influxdb-client/influxdb_client/domain/resource_members.pyi deleted file mode 100644 index 371ff5e505a7..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/resource_members.pyi +++ /dev/null @@ -1,19 +0,0 @@ -from _typeshed import Incomplete - -class ResourceMembers: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, links: Incomplete | None = None, users: Incomplete | None = None) -> None: ... - @property - def links(self): ... - @links.setter - def links(self, links) -> None: ... - @property - def users(self): ... - @users.setter - def users(self, users) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/resource_members_links.pyi b/stubs/influxdb-client/influxdb_client/domain/resource_members_links.pyi deleted file mode 100644 index 0fd7995dbb7a..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/resource_members_links.pyi +++ /dev/null @@ -1,11 +0,0 @@ -from _typeshed import Incomplete - -class ResourceMembersLinks: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, _self: Incomplete | None = None) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/resource_owner.pyi b/stubs/influxdb-client/influxdb_client/domain/resource_owner.pyi deleted file mode 100644 index 7ff699bfb1ef..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/resource_owner.pyi +++ /dev/null @@ -1,24 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.user_response import UserResponse - -class ResourceOwner(UserResponse): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - role: str = "owner", - id: Incomplete | None = None, - name: Incomplete | None = None, - status: str = "active", - links: Incomplete | None = None, - ) -> None: ... - @property - def role(self): ... - @role.setter - def role(self, role) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/resource_owners.pyi b/stubs/influxdb-client/influxdb_client/domain/resource_owners.pyi deleted file mode 100644 index befbb5de61c1..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/resource_owners.pyi +++ /dev/null @@ -1,19 +0,0 @@ -from _typeshed import Incomplete - -class ResourceOwners: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, links: Incomplete | None = None, users: Incomplete | None = None) -> None: ... - @property - def links(self): ... - @links.setter - def links(self, links) -> None: ... - @property - def users(self): ... - @users.setter - def users(self, users) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/restored_bucket_mappings.pyi b/stubs/influxdb-client/influxdb_client/domain/restored_bucket_mappings.pyi deleted file mode 100644 index 7ad10c273177..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/restored_bucket_mappings.pyi +++ /dev/null @@ -1,25 +0,0 @@ -from _typeshed import Incomplete - -class RestoredBucketMappings: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, id: Incomplete | None = None, name: Incomplete | None = None, shard_mappings: Incomplete | None = None - ) -> None: ... - @property - def id(self): ... - @id.setter - def id(self, id) -> None: ... - @property - def name(self): ... - @name.setter - def name(self, name) -> None: ... - @property - def shard_mappings(self): ... - @shard_mappings.setter - def shard_mappings(self, shard_mappings) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/retention_policy_manifest.pyi b/stubs/influxdb-client/influxdb_client/domain/retention_policy_manifest.pyi deleted file mode 100644 index d075925ff096..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/retention_policy_manifest.pyi +++ /dev/null @@ -1,43 +0,0 @@ -from _typeshed import Incomplete - -class RetentionPolicyManifest: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - name: Incomplete | None = None, - replica_n: Incomplete | None = None, - duration: Incomplete | None = None, - shard_group_duration: Incomplete | None = None, - shard_groups: Incomplete | None = None, - subscriptions: Incomplete | None = None, - ) -> None: ... - @property - def name(self): ... - @name.setter - def name(self, name) -> None: ... - @property - def replica_n(self): ... - @replica_n.setter - def replica_n(self, replica_n) -> None: ... - @property - def duration(self): ... - @duration.setter - def duration(self, duration) -> None: ... - @property - def shard_group_duration(self): ... - @shard_group_duration.setter - def shard_group_duration(self, shard_group_duration) -> None: ... - @property - def shard_groups(self): ... - @shard_groups.setter - def shard_groups(self, shard_groups) -> None: ... - @property - def subscriptions(self): ... - @subscriptions.setter - def subscriptions(self, subscriptions) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/return_statement.pyi b/stubs/influxdb-client/influxdb_client/domain/return_statement.pyi deleted file mode 100644 index c35fb895862d..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/return_statement.pyi +++ /dev/null @@ -1,21 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.statement import Statement - -class ReturnStatement(Statement): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, type: Incomplete | None = None, argument: Incomplete | None = None) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - @property - def argument(self): ... - @argument.setter - def argument(self, argument) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/routes.pyi b/stubs/influxdb-client/influxdb_client/domain/routes.pyi deleted file mode 100644 index 909cae592d30..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/routes.pyi +++ /dev/null @@ -1,103 +0,0 @@ -from _typeshed import Incomplete - -class Routes: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - authorizations: Incomplete | None = None, - buckets: Incomplete | None = None, - dashboards: Incomplete | None = None, - external: Incomplete | None = None, - variables: Incomplete | None = None, - me: Incomplete | None = None, - flags: Incomplete | None = None, - orgs: Incomplete | None = None, - query: Incomplete | None = None, - setup: Incomplete | None = None, - signin: Incomplete | None = None, - signout: Incomplete | None = None, - sources: Incomplete | None = None, - system: Incomplete | None = None, - tasks: Incomplete | None = None, - telegrafs: Incomplete | None = None, - users: Incomplete | None = None, - write: Incomplete | None = None, - ) -> None: ... - @property - def authorizations(self): ... - @authorizations.setter - def authorizations(self, authorizations) -> None: ... - @property - def buckets(self): ... - @buckets.setter - def buckets(self, buckets) -> None: ... - @property - def dashboards(self): ... - @dashboards.setter - def dashboards(self, dashboards) -> None: ... - @property - def external(self): ... - @external.setter - def external(self, external) -> None: ... - @property - def variables(self): ... - @variables.setter - def variables(self, variables) -> None: ... - @property - def me(self): ... - @me.setter - def me(self, me) -> None: ... - @property - def flags(self): ... - @flags.setter - def flags(self, flags) -> None: ... - @property - def orgs(self): ... - @orgs.setter - def orgs(self, orgs) -> None: ... - @property - def query(self): ... - @query.setter - def query(self, query) -> None: ... - @property - def setup(self): ... - @setup.setter - def setup(self, setup) -> None: ... - @property - def signin(self): ... - @signin.setter - def signin(self, signin) -> None: ... - @property - def signout(self): ... - @signout.setter - def signout(self, signout) -> None: ... - @property - def sources(self): ... - @sources.setter - def sources(self, sources) -> None: ... - @property - def system(self): ... - @system.setter - def system(self, system) -> None: ... - @property - def tasks(self): ... - @tasks.setter - def tasks(self, tasks) -> None: ... - @property - def telegrafs(self): ... - @telegrafs.setter - def telegrafs(self, telegrafs) -> None: ... - @property - def users(self): ... - @users.setter - def users(self, users) -> None: ... - @property - def write(self): ... - @write.setter - def write(self, write) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/routes_external.pyi b/stubs/influxdb-client/influxdb_client/domain/routes_external.pyi deleted file mode 100644 index 97ad969e533c..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/routes_external.pyi +++ /dev/null @@ -1,15 +0,0 @@ -from _typeshed import Incomplete - -class RoutesExternal: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, status_feed: Incomplete | None = None) -> None: ... - @property - def status_feed(self): ... - @status_feed.setter - def status_feed(self, status_feed) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/routes_query.pyi b/stubs/influxdb-client/influxdb_client/domain/routes_query.pyi deleted file mode 100644 index 9d92d3d25cdb..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/routes_query.pyi +++ /dev/null @@ -1,29 +0,0 @@ -from _typeshed import Incomplete - -class RoutesQuery: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - _self: Incomplete | None = None, - ast: Incomplete | None = None, - analyze: Incomplete | None = None, - suggestions: Incomplete | None = None, - ) -> None: ... - @property - def ast(self): ... - @ast.setter - def ast(self, ast) -> None: ... - @property - def analyze(self): ... - @analyze.setter - def analyze(self, analyze) -> None: ... - @property - def suggestions(self): ... - @suggestions.setter - def suggestions(self, suggestions) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/routes_system.pyi b/stubs/influxdb-client/influxdb_client/domain/routes_system.pyi deleted file mode 100644 index dbf28489b38f..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/routes_system.pyi +++ /dev/null @@ -1,25 +0,0 @@ -from _typeshed import Incomplete - -class RoutesSystem: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, metrics: Incomplete | None = None, debug: Incomplete | None = None, health: Incomplete | None = None - ) -> None: ... - @property - def metrics(self): ... - @metrics.setter - def metrics(self, metrics) -> None: ... - @property - def debug(self): ... - @debug.setter - def debug(self, debug) -> None: ... - @property - def health(self): ... - @health.setter - def health(self, health) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/rule_status_level.pyi b/stubs/influxdb-client/influxdb_client/domain/rule_status_level.pyi deleted file mode 100644 index 73b580cb02e9..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/rule_status_level.pyi +++ /dev/null @@ -1,16 +0,0 @@ -from _typeshed import Incomplete - -class RuleStatusLevel: - UNKNOWN: str - OK: str - INFO: str - CRIT: str - WARN: str - ANY: str - openapi_types: Incomplete - attribute_map: Incomplete - def __init__(self) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/run.pyi b/stubs/influxdb-client/influxdb_client/domain/run.pyi deleted file mode 100644 index cb0623fed76b..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/run.pyi +++ /dev/null @@ -1,60 +0,0 @@ -from _typeshed import Incomplete - -class Run: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - id: Incomplete | None = None, - task_id: Incomplete | None = None, - status: Incomplete | None = None, - scheduled_for: Incomplete | None = None, - log: Incomplete | None = None, - flux: str | None = None, - started_at: Incomplete | None = None, - finished_at: Incomplete | None = None, - requested_at: Incomplete | None = None, - links: Incomplete | None = None, - ) -> None: ... - @property - def id(self): ... - @id.setter - def id(self, id) -> None: ... - @property - def task_id(self): ... - @task_id.setter - def task_id(self, task_id) -> None: ... - @property - def status(self): ... - @status.setter - def status(self, status) -> None: ... - @property - def scheduled_for(self): ... - @scheduled_for.setter - def scheduled_for(self, scheduled_for) -> None: ... - @property - def log(self): ... - @log.setter - def log(self, log) -> None: ... - flux: str | None - @property - def started_at(self): ... - @started_at.setter - def started_at(self, started_at) -> None: ... - @property - def finished_at(self): ... - @finished_at.setter - def finished_at(self, finished_at) -> None: ... - @property - def requested_at(self): ... - @requested_at.setter - def requested_at(self, requested_at) -> None: ... - @property - def links(self): ... - @links.setter - def links(self, links) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/run_links.pyi b/stubs/influxdb-client/influxdb_client/domain/run_links.pyi deleted file mode 100644 index 380e3d22a73b..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/run_links.pyi +++ /dev/null @@ -1,21 +0,0 @@ -from _typeshed import Incomplete - -class RunLinks: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, _self: Incomplete | None = None, task: Incomplete | None = None, retry: Incomplete | None = None - ) -> None: ... - @property - def task(self): ... - @task.setter - def task(self, task) -> None: ... - @property - def retry(self): ... - @retry.setter - def retry(self, retry) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/run_manually.pyi b/stubs/influxdb-client/influxdb_client/domain/run_manually.pyi deleted file mode 100644 index a0058aec8408..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/run_manually.pyi +++ /dev/null @@ -1,15 +0,0 @@ -from _typeshed import Incomplete - -class RunManually: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, scheduled_for: Incomplete | None = None) -> None: ... - @property - def scheduled_for(self): ... - @scheduled_for.setter - def scheduled_for(self, scheduled_for) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/runs.pyi b/stubs/influxdb-client/influxdb_client/domain/runs.pyi deleted file mode 100644 index a8c1969596e9..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/runs.pyi +++ /dev/null @@ -1,19 +0,0 @@ -from _typeshed import Incomplete - -class Runs: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, links: Incomplete | None = None, runs: Incomplete | None = None) -> None: ... - @property - def links(self): ... - @links.setter - def links(self, links) -> None: ... - @property - def runs(self): ... - @runs.setter - def runs(self, runs) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/scatter_view_properties.pyi b/stubs/influxdb-client/influxdb_client/domain/scatter_view_properties.pyi deleted file mode 100644 index 8968aabff26c..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/scatter_view_properties.pyi +++ /dev/null @@ -1,172 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.view_properties import ViewProperties - -class ScatterViewProperties(ViewProperties): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - adaptive_zoom_hide: bool | None = None, - time_format: Incomplete | None = None, - type: Incomplete | None = None, - queries: Incomplete | None = None, - colors: Incomplete | None = None, - shape: Incomplete | None = None, - note: Incomplete | None = None, - show_note_when_empty: Incomplete | None = None, - x_column: Incomplete | None = None, - generate_x_axis_ticks: Incomplete | None = None, - x_total_ticks: Incomplete | None = None, - x_tick_start: Incomplete | None = None, - x_tick_step: Incomplete | None = None, - y_column: Incomplete | None = None, - generate_y_axis_ticks: Incomplete | None = None, - y_total_ticks: Incomplete | None = None, - y_tick_start: Incomplete | None = None, - y_tick_step: Incomplete | None = None, - fill_columns: Incomplete | None = None, - symbol_columns: Incomplete | None = None, - x_domain: Incomplete | None = None, - y_domain: Incomplete | None = None, - x_axis_label: Incomplete | None = None, - y_axis_label: Incomplete | None = None, - x_prefix: Incomplete | None = None, - x_suffix: Incomplete | None = None, - y_prefix: Incomplete | None = None, - y_suffix: Incomplete | None = None, - legend_colorize_rows: Incomplete | None = None, - legend_hide: Incomplete | None = None, - legend_opacity: Incomplete | None = None, - legend_orientation_threshold: Incomplete | None = None, - ) -> None: ... - adaptive_zoom_hide: bool | None - @property - def time_format(self): ... - @time_format.setter - def time_format(self, time_format) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - @property - def queries(self): ... - @queries.setter - def queries(self, queries) -> None: ... - @property - def colors(self): ... - @colors.setter - def colors(self, colors) -> None: ... - @property - def shape(self): ... - @shape.setter - def shape(self, shape) -> None: ... - @property - def note(self): ... - @note.setter - def note(self, note) -> None: ... - @property - def show_note_when_empty(self): ... - @show_note_when_empty.setter - def show_note_when_empty(self, show_note_when_empty) -> None: ... - @property - def x_column(self): ... - @x_column.setter - def x_column(self, x_column) -> None: ... - @property - def generate_x_axis_ticks(self): ... - @generate_x_axis_ticks.setter - def generate_x_axis_ticks(self, generate_x_axis_ticks) -> None: ... - @property - def x_total_ticks(self): ... - @x_total_ticks.setter - def x_total_ticks(self, x_total_ticks) -> None: ... - @property - def x_tick_start(self): ... - @x_tick_start.setter - def x_tick_start(self, x_tick_start) -> None: ... - @property - def x_tick_step(self): ... - @x_tick_step.setter - def x_tick_step(self, x_tick_step) -> None: ... - @property - def y_column(self): ... - @y_column.setter - def y_column(self, y_column) -> None: ... - @property - def generate_y_axis_ticks(self): ... - @generate_y_axis_ticks.setter - def generate_y_axis_ticks(self, generate_y_axis_ticks) -> None: ... - @property - def y_total_ticks(self): ... - @y_total_ticks.setter - def y_total_ticks(self, y_total_ticks) -> None: ... - @property - def y_tick_start(self): ... - @y_tick_start.setter - def y_tick_start(self, y_tick_start) -> None: ... - @property - def y_tick_step(self): ... - @y_tick_step.setter - def y_tick_step(self, y_tick_step) -> None: ... - @property - def fill_columns(self): ... - @fill_columns.setter - def fill_columns(self, fill_columns) -> None: ... - @property - def symbol_columns(self): ... - @symbol_columns.setter - def symbol_columns(self, symbol_columns) -> None: ... - @property - def x_domain(self): ... - @x_domain.setter - def x_domain(self, x_domain) -> None: ... - @property - def y_domain(self): ... - @y_domain.setter - def y_domain(self, y_domain) -> None: ... - @property - def x_axis_label(self): ... - @x_axis_label.setter - def x_axis_label(self, x_axis_label) -> None: ... - @property - def y_axis_label(self): ... - @y_axis_label.setter - def y_axis_label(self, y_axis_label) -> None: ... - @property - def x_prefix(self): ... - @x_prefix.setter - def x_prefix(self, x_prefix) -> None: ... - @property - def x_suffix(self): ... - @x_suffix.setter - def x_suffix(self, x_suffix) -> None: ... - @property - def y_prefix(self): ... - @y_prefix.setter - def y_prefix(self, y_prefix) -> None: ... - @property - def y_suffix(self): ... - @y_suffix.setter - def y_suffix(self, y_suffix) -> None: ... - @property - def legend_colorize_rows(self): ... - @legend_colorize_rows.setter - def legend_colorize_rows(self, legend_colorize_rows) -> None: ... - @property - def legend_hide(self): ... - @legend_hide.setter - def legend_hide(self, legend_hide) -> None: ... - @property - def legend_opacity(self): ... - @legend_opacity.setter - def legend_opacity(self, legend_opacity) -> None: ... - @property - def legend_orientation_threshold(self): ... - @legend_orientation_threshold.setter - def legend_orientation_threshold(self, legend_orientation_threshold) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/schema_type.pyi b/stubs/influxdb-client/influxdb_client/domain/schema_type.pyi deleted file mode 100644 index aef84af17839..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/schema_type.pyi +++ /dev/null @@ -1,12 +0,0 @@ -from _typeshed import Incomplete - -class SchemaType: - IMPLICIT: str - EXPLICIT: str - openapi_types: Incomplete - attribute_map: Incomplete - def __init__(self) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/scraper_target_request.pyi b/stubs/influxdb-client/influxdb_client/domain/scraper_target_request.pyi deleted file mode 100644 index 6d950407f0eb..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/scraper_target_request.pyi +++ /dev/null @@ -1,43 +0,0 @@ -from _typeshed import Incomplete - -class ScraperTargetRequest: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - name: Incomplete | None = None, - type: Incomplete | None = None, - url: Incomplete | None = None, - org_id: Incomplete | None = None, - bucket_id: Incomplete | None = None, - allow_insecure: bool = False, - ) -> None: ... - @property - def name(self): ... - @name.setter - def name(self, name) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - @property - def url(self): ... - @url.setter - def url(self, url) -> None: ... - @property - def org_id(self): ... - @org_id.setter - def org_id(self, org_id) -> None: ... - @property - def bucket_id(self): ... - @bucket_id.setter - def bucket_id(self, bucket_id) -> None: ... - @property - def allow_insecure(self): ... - @allow_insecure.setter - def allow_insecure(self, allow_insecure) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/scraper_target_response.pyi b/stubs/influxdb-client/influxdb_client/domain/scraper_target_response.pyi deleted file mode 100644 index 4de5f6b0661d..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/scraper_target_response.pyi +++ /dev/null @@ -1,41 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.scraper_target_request import ScraperTargetRequest - -class ScraperTargetResponse(ScraperTargetRequest): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - id: Incomplete | None = None, - org: Incomplete | None = None, - bucket: Incomplete | None = None, - links: Incomplete | None = None, - name: Incomplete | None = None, - type: Incomplete | None = None, - url: Incomplete | None = None, - org_id: Incomplete | None = None, - bucket_id: Incomplete | None = None, - allow_insecure: bool = False, - ) -> None: ... - @property - def id(self): ... - @id.setter - def id(self, id) -> None: ... - @property - def org(self): ... - @org.setter - def org(self, org) -> None: ... - @property - def bucket(self): ... - @bucket.setter - def bucket(self, bucket) -> None: ... - @property - def links(self): ... - @links.setter - def links(self, links) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/scraper_target_responses.pyi b/stubs/influxdb-client/influxdb_client/domain/scraper_target_responses.pyi deleted file mode 100644 index 743bf68ba746..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/scraper_target_responses.pyi +++ /dev/null @@ -1,15 +0,0 @@ -from _typeshed import Incomplete - -class ScraperTargetResponses: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, configurations: Incomplete | None = None) -> None: ... - @property - def configurations(self): ... - @configurations.setter - def configurations(self, configurations) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/script.pyi b/stubs/influxdb-client/influxdb_client/domain/script.pyi deleted file mode 100644 index 3d697387c608..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/script.pyi +++ /dev/null @@ -1,58 +0,0 @@ -from _typeshed import Incomplete - -class Script: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - id: Incomplete | None = None, - name: Incomplete | None = None, - description: Incomplete | None = None, - org_id: Incomplete | None = None, - script: Incomplete | None = None, - language: Incomplete | None = None, - url: Incomplete | None = None, - created_at: Incomplete | None = None, - updated_at: Incomplete | None = None, - ) -> None: ... - @property - def id(self): ... - @id.setter - def id(self, id) -> None: ... - @property - def name(self): ... - @name.setter - def name(self, name) -> None: ... - @property - def description(self): ... - @description.setter - def description(self, description) -> None: ... - @property - def org_id(self): ... - @org_id.setter - def org_id(self, org_id) -> None: ... - @property - def script(self): ... - @script.setter - def script(self, script) -> None: ... - @property - def language(self): ... - @language.setter - def language(self, language) -> None: ... - @property - def url(self): ... - @url.setter - def url(self, url) -> None: ... - @property - def created_at(self): ... - @created_at.setter - def created_at(self, created_at) -> None: ... - @property - def updated_at(self): ... - @updated_at.setter - def updated_at(self, updated_at) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/script_create_request.pyi b/stubs/influxdb-client/influxdb_client/domain/script_create_request.pyi deleted file mode 100644 index 14cebc9245e7..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/script_create_request.pyi +++ /dev/null @@ -1,33 +0,0 @@ -from _typeshed import Incomplete - -class ScriptCreateRequest: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - name: Incomplete | None = None, - description: Incomplete | None = None, - script: Incomplete | None = None, - language: Incomplete | None = None, - ) -> None: ... - @property - def name(self): ... - @name.setter - def name(self, name) -> None: ... - @property - def description(self): ... - @description.setter - def description(self, description) -> None: ... - @property - def script(self): ... - @script.setter - def script(self, script) -> None: ... - @property - def language(self): ... - @language.setter - def language(self, language) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/script_invocation_params.pyi b/stubs/influxdb-client/influxdb_client/domain/script_invocation_params.pyi deleted file mode 100644 index b9a5f1f4c456..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/script_invocation_params.pyi +++ /dev/null @@ -1,15 +0,0 @@ -from _typeshed import Incomplete - -class ScriptInvocationParams: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, params: Incomplete | None = None) -> None: ... - @property - def params(self): ... - @params.setter - def params(self, params) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/script_language.pyi b/stubs/influxdb-client/influxdb_client/domain/script_language.pyi deleted file mode 100644 index 18147050209b..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/script_language.pyi +++ /dev/null @@ -1,14 +0,0 @@ -from _typeshed import Incomplete -from typing import ClassVar, Final - -class ScriptLanguage: - FLUX: Final = "flux" - SQL: Final = "sql" - INFLUXQL: Final = "influxql" - - openapi_types: ClassVar[dict[Incomplete, Incomplete]] - attribute_map: ClassVar[dict[Incomplete, Incomplete]] - def to_dict(self) -> dict[Incomplete, Incomplete]: ... - def to_str(self) -> str: ... - def __eq__(self, other: object) -> bool: ... - def __ne__(self, other: object) -> bool: ... diff --git a/stubs/influxdb-client/influxdb_client/domain/script_update_request.pyi b/stubs/influxdb-client/influxdb_client/domain/script_update_request.pyi deleted file mode 100644 index 9c79ca906311..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/script_update_request.pyi +++ /dev/null @@ -1,19 +0,0 @@ -from _typeshed import Incomplete - -class ScriptUpdateRequest: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, description: Incomplete | None = None, script: Incomplete | None = None) -> None: ... - @property - def description(self): ... - @description.setter - def description(self, description) -> None: ... - @property - def script(self): ... - @script.setter - def script(self, script) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/scripts.pyi b/stubs/influxdb-client/influxdb_client/domain/scripts.pyi deleted file mode 100644 index 0f2ce357cd97..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/scripts.pyi +++ /dev/null @@ -1,15 +0,0 @@ -from _typeshed import Incomplete - -class Scripts: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, scripts: Incomplete | None = None) -> None: ... - @property - def scripts(self): ... - @scripts.setter - def scripts(self, scripts) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/secret_keys.pyi b/stubs/influxdb-client/influxdb_client/domain/secret_keys.pyi deleted file mode 100644 index 04343cf06f7c..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/secret_keys.pyi +++ /dev/null @@ -1,15 +0,0 @@ -from _typeshed import Incomplete - -class SecretKeys: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, secrets: Incomplete | None = None) -> None: ... - @property - def secrets(self): ... - @secrets.setter - def secrets(self, secrets) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/secret_keys_response.pyi b/stubs/influxdb-client/influxdb_client/domain/secret_keys_response.pyi deleted file mode 100644 index 344544700876..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/secret_keys_response.pyi +++ /dev/null @@ -1,17 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.secret_keys import SecretKeys - -class SecretKeysResponse(SecretKeys): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, links: Incomplete | None = None, secrets: Incomplete | None = None) -> None: ... - @property - def links(self): ... - @links.setter - def links(self, links) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/shard_group_manifest.pyi b/stubs/influxdb-client/influxdb_client/domain/shard_group_manifest.pyi deleted file mode 100644 index c60c68203f2d..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/shard_group_manifest.pyi +++ /dev/null @@ -1,43 +0,0 @@ -from _typeshed import Incomplete - -class ShardGroupManifest: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - id: Incomplete | None = None, - start_time: Incomplete | None = None, - end_time: Incomplete | None = None, - deleted_at: Incomplete | None = None, - truncated_at: Incomplete | None = None, - shards: Incomplete | None = None, - ) -> None: ... - @property - def id(self): ... - @id.setter - def id(self, id) -> None: ... - @property - def start_time(self): ... - @start_time.setter - def start_time(self, start_time) -> None: ... - @property - def end_time(self): ... - @end_time.setter - def end_time(self, end_time) -> None: ... - @property - def deleted_at(self): ... - @deleted_at.setter - def deleted_at(self, deleted_at) -> None: ... - @property - def truncated_at(self): ... - @truncated_at.setter - def truncated_at(self, truncated_at) -> None: ... - @property - def shards(self): ... - @shards.setter - def shards(self, shards) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/shard_manifest.pyi b/stubs/influxdb-client/influxdb_client/domain/shard_manifest.pyi deleted file mode 100644 index 51cc75b2bba2..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/shard_manifest.pyi +++ /dev/null @@ -1,19 +0,0 @@ -from _typeshed import Incomplete - -class ShardManifest: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, id: Incomplete | None = None, shard_owners: Incomplete | None = None) -> None: ... - @property - def id(self): ... - @id.setter - def id(self, id) -> None: ... - @property - def shard_owners(self): ... - @shard_owners.setter - def shard_owners(self, shard_owners) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/shard_owner.pyi b/stubs/influxdb-client/influxdb_client/domain/shard_owner.pyi deleted file mode 100644 index c284b237f1a4..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/shard_owner.pyi +++ /dev/null @@ -1,15 +0,0 @@ -from _typeshed import Incomplete - -class ShardOwner: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, node_id: Incomplete | None = None) -> None: ... - @property - def node_id(self): ... - @node_id.setter - def node_id(self, node_id) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/simple_table_view_properties.pyi b/stubs/influxdb-client/influxdb_client/domain/simple_table_view_properties.pyi deleted file mode 100644 index da173e6bb258..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/simple_table_view_properties.pyi +++ /dev/null @@ -1,45 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.view_properties import ViewProperties - -class SimpleTableViewProperties(ViewProperties): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - type: Incomplete | None = None, - show_all: Incomplete | None = None, - queries: Incomplete | None = None, - shape: Incomplete | None = None, - note: Incomplete | None = None, - show_note_when_empty: Incomplete | None = None, - ) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - @property - def show_all(self): ... - @show_all.setter - def show_all(self, show_all) -> None: ... - @property - def queries(self): ... - @queries.setter - def queries(self, queries) -> None: ... - @property - def shape(self): ... - @shape.setter - def shape(self, shape) -> None: ... - @property - def note(self): ... - @note.setter - def note(self, note) -> None: ... - @property - def show_note_when_empty(self): ... - @show_note_when_empty.setter - def show_note_when_empty(self, show_note_when_empty) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/single_stat_view_properties.pyi b/stubs/influxdb-client/influxdb_client/domain/single_stat_view_properties.pyi deleted file mode 100644 index a644310dbc61..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/single_stat_view_properties.pyi +++ /dev/null @@ -1,75 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.view_properties import ViewProperties - -class SingleStatViewProperties(ViewProperties): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - type: Incomplete | None = None, - queries: Incomplete | None = None, - colors: Incomplete | None = None, - shape: Incomplete | None = None, - note: Incomplete | None = None, - show_note_when_empty: Incomplete | None = None, - prefix: Incomplete | None = None, - tick_prefix: Incomplete | None = None, - suffix: Incomplete | None = None, - tick_suffix: Incomplete | None = None, - static_legend: Incomplete | None = None, - decimal_places: Incomplete | None = None, - ) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - @property - def queries(self): ... - @queries.setter - def queries(self, queries) -> None: ... - @property - def colors(self): ... - @colors.setter - def colors(self, colors) -> None: ... - @property - def shape(self): ... - @shape.setter - def shape(self, shape) -> None: ... - @property - def note(self): ... - @note.setter - def note(self, note) -> None: ... - @property - def show_note_when_empty(self): ... - @show_note_when_empty.setter - def show_note_when_empty(self, show_note_when_empty) -> None: ... - @property - def prefix(self): ... - @prefix.setter - def prefix(self, prefix) -> None: ... - @property - def tick_prefix(self): ... - @tick_prefix.setter - def tick_prefix(self, tick_prefix) -> None: ... - @property - def suffix(self): ... - @suffix.setter - def suffix(self, suffix) -> None: ... - @property - def tick_suffix(self): ... - @tick_suffix.setter - def tick_suffix(self, tick_suffix) -> None: ... - @property - def static_legend(self): ... - @static_legend.setter - def static_legend(self, static_legend) -> None: ... - @property - def decimal_places(self): ... - @decimal_places.setter - def decimal_places(self, decimal_places) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/slack_notification_endpoint.pyi b/stubs/influxdb-client/influxdb_client/domain/slack_notification_endpoint.pyi deleted file mode 100644 index b84baf4371d3..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/slack_notification_endpoint.pyi +++ /dev/null @@ -1,36 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.notification_endpoint_discriminator import NotificationEndpointDiscriminator - -class SlackNotificationEndpoint(NotificationEndpointDiscriminator): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - url: Incomplete | None = None, - token: Incomplete | None = None, - id: Incomplete | None = None, - org_id: Incomplete | None = None, - user_id: Incomplete | None = None, - created_at: Incomplete | None = None, - updated_at: Incomplete | None = None, - description: Incomplete | None = None, - name: Incomplete | None = None, - status: str = "active", - labels: Incomplete | None = None, - links: Incomplete | None = None, - type: str = "slack", - ) -> None: ... - @property - def url(self): ... - @url.setter - def url(self, url) -> None: ... - @property - def token(self): ... - @token.setter - def token(self, token) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/slack_notification_rule.pyi b/stubs/influxdb-client/influxdb_client/domain/slack_notification_rule.pyi deleted file mode 100644 index c015f1018f90..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/slack_notification_rule.pyi +++ /dev/null @@ -1,41 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.slack_notification_rule_base import SlackNotificationRuleBase - -class SlackNotificationRule(SlackNotificationRuleBase): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - type: str = "slack", - channel: Incomplete | None = None, - message_template: Incomplete | None = None, - latest_completed: Incomplete | None = None, - last_run_status: Incomplete | None = None, - last_run_error: Incomplete | None = None, - id: Incomplete | None = None, - endpoint_id: Incomplete | None = None, - org_id: Incomplete | None = None, - task_id: Incomplete | None = None, - owner_id: Incomplete | None = None, - created_at: Incomplete | None = None, - updated_at: Incomplete | None = None, - status: Incomplete | None = None, - name: Incomplete | None = None, - sleep_until: Incomplete | None = None, - every: Incomplete | None = None, - offset: Incomplete | None = None, - runbook_link: Incomplete | None = None, - limit_every: Incomplete | None = None, - limit: Incomplete | None = None, - tag_rules: Incomplete | None = None, - description: Incomplete | None = None, - status_rules: Incomplete | None = None, - labels: Incomplete | None = None, - links: Incomplete | None = None, - ) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/slack_notification_rule_base.pyi b/stubs/influxdb-client/influxdb_client/domain/slack_notification_rule_base.pyi deleted file mode 100644 index 763216a2529f..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/slack_notification_rule_base.pyi +++ /dev/null @@ -1,53 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.notification_rule_discriminator import NotificationRuleDiscriminator - -class SlackNotificationRuleBase(NotificationRuleDiscriminator): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - type: Incomplete | None = None, - channel: Incomplete | None = None, - message_template: Incomplete | None = None, - latest_completed: Incomplete | None = None, - last_run_status: Incomplete | None = None, - last_run_error: Incomplete | None = None, - id: Incomplete | None = None, - endpoint_id: Incomplete | None = None, - org_id: Incomplete | None = None, - task_id: Incomplete | None = None, - owner_id: Incomplete | None = None, - created_at: Incomplete | None = None, - updated_at: Incomplete | None = None, - status: Incomplete | None = None, - name: Incomplete | None = None, - sleep_until: Incomplete | None = None, - every: Incomplete | None = None, - offset: Incomplete | None = None, - runbook_link: Incomplete | None = None, - limit_every: Incomplete | None = None, - limit: Incomplete | None = None, - tag_rules: Incomplete | None = None, - description: Incomplete | None = None, - status_rules: Incomplete | None = None, - labels: Incomplete | None = None, - links: Incomplete | None = None, - ) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - @property - def channel(self): ... - @channel.setter - def channel(self, channel) -> None: ... - @property - def message_template(self): ... - @message_template.setter - def message_template(self, message_template) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/smtp_notification_rule.pyi b/stubs/influxdb-client/influxdb_client/domain/smtp_notification_rule.pyi deleted file mode 100644 index b2dccc5d7b6c..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/smtp_notification_rule.pyi +++ /dev/null @@ -1,42 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.smtp_notification_rule_base import SMTPNotificationRuleBase - -class SMTPNotificationRule(SMTPNotificationRuleBase): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - type: str = "smtp", - subject_template: Incomplete | None = None, - body_template: Incomplete | None = None, - to: Incomplete | None = None, - latest_completed: Incomplete | None = None, - last_run_status: Incomplete | None = None, - last_run_error: Incomplete | None = None, - id: Incomplete | None = None, - endpoint_id: Incomplete | None = None, - org_id: Incomplete | None = None, - task_id: Incomplete | None = None, - owner_id: Incomplete | None = None, - created_at: Incomplete | None = None, - updated_at: Incomplete | None = None, - status: Incomplete | None = None, - name: Incomplete | None = None, - sleep_until: Incomplete | None = None, - every: Incomplete | None = None, - offset: Incomplete | None = None, - runbook_link: Incomplete | None = None, - limit_every: Incomplete | None = None, - limit: Incomplete | None = None, - tag_rules: Incomplete | None = None, - description: Incomplete | None = None, - status_rules: Incomplete | None = None, - labels: Incomplete | None = None, - links: Incomplete | None = None, - ) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/smtp_notification_rule_base.pyi b/stubs/influxdb-client/influxdb_client/domain/smtp_notification_rule_base.pyi deleted file mode 100644 index dbbbdbf5a516..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/smtp_notification_rule_base.pyi +++ /dev/null @@ -1,58 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.notification_rule_discriminator import NotificationRuleDiscriminator - -class SMTPNotificationRuleBase(NotificationRuleDiscriminator): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - type: Incomplete | None = None, - subject_template: Incomplete | None = None, - body_template: Incomplete | None = None, - to: Incomplete | None = None, - latest_completed: Incomplete | None = None, - last_run_status: Incomplete | None = None, - last_run_error: Incomplete | None = None, - id: Incomplete | None = None, - endpoint_id: Incomplete | None = None, - org_id: Incomplete | None = None, - task_id: Incomplete | None = None, - owner_id: Incomplete | None = None, - created_at: Incomplete | None = None, - updated_at: Incomplete | None = None, - status: Incomplete | None = None, - name: Incomplete | None = None, - sleep_until: Incomplete | None = None, - every: Incomplete | None = None, - offset: Incomplete | None = None, - runbook_link: Incomplete | None = None, - limit_every: Incomplete | None = None, - limit: Incomplete | None = None, - tag_rules: Incomplete | None = None, - description: Incomplete | None = None, - status_rules: Incomplete | None = None, - labels: Incomplete | None = None, - links: Incomplete | None = None, - ) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - @property - def subject_template(self): ... - @subject_template.setter - def subject_template(self, subject_template) -> None: ... - @property - def body_template(self): ... - @body_template.setter - def body_template(self, body_template) -> None: ... - @property - def to(self): ... - @to.setter - def to(self, to) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/source.pyi b/stubs/influxdb-client/influxdb_client/domain/source.pyi deleted file mode 100644 index ab677a5b03a0..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/source.pyi +++ /dev/null @@ -1,93 +0,0 @@ -from _typeshed import Incomplete - -class Source: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - links: Incomplete | None = None, - id: Incomplete | None = None, - org_id: Incomplete | None = None, - default: Incomplete | None = None, - name: Incomplete | None = None, - type: Incomplete | None = None, - url: Incomplete | None = None, - insecure_skip_verify: Incomplete | None = None, - telegraf: Incomplete | None = None, - token: Incomplete | None = None, - username: Incomplete | None = None, - password: Incomplete | None = None, - shared_secret: Incomplete | None = None, - meta_url: Incomplete | None = None, - default_rp: Incomplete | None = None, - languages: Incomplete | None = None, - ) -> None: ... - @property - def links(self): ... - @links.setter - def links(self, links) -> None: ... - @property - def id(self): ... - @id.setter - def id(self, id) -> None: ... - @property - def org_id(self): ... - @org_id.setter - def org_id(self, org_id) -> None: ... - @property - def default(self): ... - @default.setter - def default(self, default) -> None: ... - @property - def name(self): ... - @name.setter - def name(self, name) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - @property - def url(self): ... - @url.setter - def url(self, url) -> None: ... - @property - def insecure_skip_verify(self): ... - @insecure_skip_verify.setter - def insecure_skip_verify(self, insecure_skip_verify) -> None: ... - @property - def telegraf(self): ... - @telegraf.setter - def telegraf(self, telegraf) -> None: ... - @property - def token(self): ... - @token.setter - def token(self, token) -> None: ... - @property - def username(self): ... - @username.setter - def username(self, username) -> None: ... - @property - def password(self): ... - @password.setter - def password(self, password) -> None: ... - @property - def shared_secret(self): ... - @shared_secret.setter - def shared_secret(self, shared_secret) -> None: ... - @property - def meta_url(self): ... - @meta_url.setter - def meta_url(self, meta_url) -> None: ... - @property - def default_rp(self): ... - @default_rp.setter - def default_rp(self, default_rp) -> None: ... - @property - def languages(self): ... - @languages.setter - def languages(self, languages) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/source_links.pyi b/stubs/influxdb-client/influxdb_client/domain/source_links.pyi deleted file mode 100644 index ae4afc68c2a4..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/source_links.pyi +++ /dev/null @@ -1,29 +0,0 @@ -from _typeshed import Incomplete - -class SourceLinks: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - _self: Incomplete | None = None, - query: Incomplete | None = None, - health: Incomplete | None = None, - buckets: Incomplete | None = None, - ) -> None: ... - @property - def query(self): ... - @query.setter - def query(self, query) -> None: ... - @property - def health(self): ... - @health.setter - def health(self, health) -> None: ... - @property - def buckets(self): ... - @buckets.setter - def buckets(self, buckets) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/sources.pyi b/stubs/influxdb-client/influxdb_client/domain/sources.pyi deleted file mode 100644 index 56eb1cfa8943..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/sources.pyi +++ /dev/null @@ -1,19 +0,0 @@ -from _typeshed import Incomplete - -class Sources: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, links: Incomplete | None = None, sources: Incomplete | None = None) -> None: ... - @property - def links(self): ... - @links.setter - def links(self, links) -> None: ... - @property - def sources(self): ... - @sources.setter - def sources(self, sources) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/stack.pyi b/stubs/influxdb-client/influxdb_client/domain/stack.pyi deleted file mode 100644 index 4a80edaeeb4d..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/stack.pyi +++ /dev/null @@ -1,33 +0,0 @@ -from _typeshed import Incomplete - -class Stack: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - id: Incomplete | None = None, - org_id: Incomplete | None = None, - created_at: Incomplete | None = None, - events: Incomplete | None = None, - ) -> None: ... - @property - def id(self): ... - @id.setter - def id(self, id) -> None: ... - @property - def org_id(self): ... - @org_id.setter - def org_id(self, org_id) -> None: ... - @property - def created_at(self): ... - @created_at.setter - def created_at(self, created_at) -> None: ... - @property - def events(self): ... - @events.setter - def events(self, events) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/stack_associations.pyi b/stubs/influxdb-client/influxdb_client/domain/stack_associations.pyi deleted file mode 100644 index 9baa40125c1a..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/stack_associations.pyi +++ /dev/null @@ -1,19 +0,0 @@ -from _typeshed import Incomplete - -class StackAssociations: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, kind: Incomplete | None = None, meta_name: Incomplete | None = None) -> None: ... - @property - def kind(self): ... - @kind.setter - def kind(self, kind) -> None: ... - @property - def meta_name(self): ... - @meta_name.setter - def meta_name(self, meta_name) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/stack_events.pyi b/stubs/influxdb-client/influxdb_client/domain/stack_events.pyi deleted file mode 100644 index 3385249165c7..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/stack_events.pyi +++ /dev/null @@ -1,48 +0,0 @@ -from _typeshed import Incomplete - -class StackEvents: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - event_type: Incomplete | None = None, - name: Incomplete | None = None, - description: Incomplete | None = None, - sources: Incomplete | None = None, - resources: Incomplete | None = None, - urls: Incomplete | None = None, - updated_at: Incomplete | None = None, - ) -> None: ... - @property - def event_type(self): ... - @event_type.setter - def event_type(self, event_type) -> None: ... - @property - def name(self): ... - @name.setter - def name(self, name) -> None: ... - @property - def description(self): ... - @description.setter - def description(self, description) -> None: ... - @property - def sources(self): ... - @sources.setter - def sources(self, sources) -> None: ... - @property - def resources(self): ... - @resources.setter - def resources(self, resources) -> None: ... - @property - def urls(self): ... - @urls.setter - def urls(self, urls) -> None: ... - @property - def updated_at(self): ... - @updated_at.setter - def updated_at(self, updated_at) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/stack_links.pyi b/stubs/influxdb-client/influxdb_client/domain/stack_links.pyi deleted file mode 100644 index edd51b386443..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/stack_links.pyi +++ /dev/null @@ -1,11 +0,0 @@ -from _typeshed import Incomplete - -class StackLinks: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, _self: Incomplete | None = None) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/stack_resources.pyi b/stubs/influxdb-client/influxdb_client/domain/stack_resources.pyi deleted file mode 100644 index c3ed860f67df..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/stack_resources.pyi +++ /dev/null @@ -1,43 +0,0 @@ -from _typeshed import Incomplete - -class StackResources: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - api_version: Incomplete | None = None, - resource_id: Incomplete | None = None, - kind: Incomplete | None = None, - template_meta_name: Incomplete | None = None, - associations: Incomplete | None = None, - links: Incomplete | None = None, - ) -> None: ... - @property - def api_version(self): ... - @api_version.setter - def api_version(self, api_version) -> None: ... - @property - def resource_id(self): ... - @resource_id.setter - def resource_id(self, resource_id) -> None: ... - @property - def kind(self): ... - @kind.setter - def kind(self, kind) -> None: ... - @property - def template_meta_name(self): ... - @template_meta_name.setter - def template_meta_name(self, template_meta_name) -> None: ... - @property - def associations(self): ... - @associations.setter - def associations(self, associations) -> None: ... - @property - def links(self): ... - @links.setter - def links(self, links) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/statement.pyi b/stubs/influxdb-client/influxdb_client/domain/statement.pyi deleted file mode 100644 index 789212dbcd77..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/statement.pyi +++ /dev/null @@ -1,10 +0,0 @@ -from _typeshed import Incomplete - -class Statement: - openapi_types: Incomplete - attribute_map: Incomplete - def __init__(self) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/static_legend.pyi b/stubs/influxdb-client/influxdb_client/domain/static_legend.pyi deleted file mode 100644 index 485dea5772ee..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/static_legend.pyi +++ /dev/null @@ -1,48 +0,0 @@ -from _typeshed import Incomplete - -class StaticLegend: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - colorize_rows: Incomplete | None = None, - height_ratio: Incomplete | None = None, - show: Incomplete | None = None, - opacity: Incomplete | None = None, - orientation_threshold: Incomplete | None = None, - value_axis: Incomplete | None = None, - width_ratio: Incomplete | None = None, - ) -> None: ... - @property - def colorize_rows(self): ... - @colorize_rows.setter - def colorize_rows(self, colorize_rows) -> None: ... - @property - def height_ratio(self): ... - @height_ratio.setter - def height_ratio(self, height_ratio) -> None: ... - @property - def show(self): ... - @show.setter - def show(self, show) -> None: ... - @property - def opacity(self): ... - @opacity.setter - def opacity(self, opacity) -> None: ... - @property - def orientation_threshold(self): ... - @orientation_threshold.setter - def orientation_threshold(self, orientation_threshold) -> None: ... - @property - def value_axis(self): ... - @value_axis.setter - def value_axis(self, value_axis) -> None: ... - @property - def width_ratio(self): ... - @width_ratio.setter - def width_ratio(self, width_ratio) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/status_rule.pyi b/stubs/influxdb-client/influxdb_client/domain/status_rule.pyi deleted file mode 100644 index f8364cb4660b..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/status_rule.pyi +++ /dev/null @@ -1,33 +0,0 @@ -from _typeshed import Incomplete - -class StatusRule: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - current_level: Incomplete | None = None, - previous_level: Incomplete | None = None, - count: Incomplete | None = None, - period: Incomplete | None = None, - ) -> None: ... - @property - def current_level(self): ... - @current_level.setter - def current_level(self, current_level) -> None: ... - @property - def previous_level(self): ... - @previous_level.setter - def previous_level(self, previous_level) -> None: ... - @property - def count(self): ... - @count.setter - def count(self, count) -> None: ... - @property - def period(self): ... - @period.setter - def period(self, period) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/string_literal.pyi b/stubs/influxdb-client/influxdb_client/domain/string_literal.pyi deleted file mode 100644 index 3aeecf0eb0ea..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/string_literal.pyi +++ /dev/null @@ -1,21 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.property_key import PropertyKey - -class StringLiteral(PropertyKey): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, type: Incomplete | None = None, value: Incomplete | None = None) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - @property - def value(self): ... - @value.setter - def value(self, value) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/subscription_manifest.pyi b/stubs/influxdb-client/influxdb_client/domain/subscription_manifest.pyi deleted file mode 100644 index 18c35652ce36..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/subscription_manifest.pyi +++ /dev/null @@ -1,25 +0,0 @@ -from _typeshed import Incomplete - -class SubscriptionManifest: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, name: Incomplete | None = None, mode: Incomplete | None = None, destinations: Incomplete | None = None - ) -> None: ... - @property - def name(self): ... - @name.setter - def name(self, name) -> None: ... - @property - def mode(self): ... - @mode.setter - def mode(self, mode) -> None: ... - @property - def destinations(self): ... - @destinations.setter - def destinations(self, destinations) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/table_view_properties.pyi b/stubs/influxdb-client/influxdb_client/domain/table_view_properties.pyi deleted file mode 100644 index 7a68faa84199..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/table_view_properties.pyi +++ /dev/null @@ -1,65 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.view_properties import ViewProperties - -class TableViewProperties(ViewProperties): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - type: Incomplete | None = None, - queries: Incomplete | None = None, - colors: Incomplete | None = None, - shape: Incomplete | None = None, - note: Incomplete | None = None, - show_note_when_empty: Incomplete | None = None, - table_options: Incomplete | None = None, - field_options: Incomplete | None = None, - time_format: Incomplete | None = None, - decimal_places: Incomplete | None = None, - ) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - @property - def queries(self): ... - @queries.setter - def queries(self, queries) -> None: ... - @property - def colors(self): ... - @colors.setter - def colors(self, colors) -> None: ... - @property - def shape(self): ... - @shape.setter - def shape(self, shape) -> None: ... - @property - def note(self): ... - @note.setter - def note(self, note) -> None: ... - @property - def show_note_when_empty(self): ... - @show_note_when_empty.setter - def show_note_when_empty(self, show_note_when_empty) -> None: ... - @property - def table_options(self): ... - @table_options.setter - def table_options(self, table_options) -> None: ... - @property - def field_options(self): ... - @field_options.setter - def field_options(self, field_options) -> None: ... - @property - def time_format(self): ... - @time_format.setter - def time_format(self, time_format) -> None: ... - @property - def decimal_places(self): ... - @decimal_places.setter - def decimal_places(self, decimal_places) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/table_view_properties_table_options.pyi b/stubs/influxdb-client/influxdb_client/domain/table_view_properties_table_options.pyi deleted file mode 100644 index b1be53b9c15a..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/table_view_properties_table_options.pyi +++ /dev/null @@ -1,33 +0,0 @@ -from _typeshed import Incomplete - -class TableViewPropertiesTableOptions: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - vertical_time_axis: Incomplete | None = None, - sort_by: Incomplete | None = None, - wrapping: Incomplete | None = None, - fix_first_column: Incomplete | None = None, - ) -> None: ... - @property - def vertical_time_axis(self): ... - @vertical_time_axis.setter - def vertical_time_axis(self, vertical_time_axis) -> None: ... - @property - def sort_by(self): ... - @sort_by.setter - def sort_by(self, sort_by) -> None: ... - @property - def wrapping(self): ... - @wrapping.setter - def wrapping(self, wrapping) -> None: ... - @property - def fix_first_column(self): ... - @fix_first_column.setter - def fix_first_column(self, fix_first_column) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/tag_rule.pyi b/stubs/influxdb-client/influxdb_client/domain/tag_rule.pyi deleted file mode 100644 index 5b00cd713417..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/tag_rule.pyi +++ /dev/null @@ -1,25 +0,0 @@ -from _typeshed import Incomplete - -class TagRule: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, key: Incomplete | None = None, value: Incomplete | None = None, operator: Incomplete | None = None - ) -> None: ... - @property - def key(self): ... - @key.setter - def key(self, key) -> None: ... - @property - def value(self): ... - @value.setter - def value(self, value) -> None: ... - @property - def operator(self): ... - @operator.setter - def operator(self, operator) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/task.pyi b/stubs/influxdb-client/influxdb_client/domain/task.pyi deleted file mode 100644 index 1d0d6310286d..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/task.pyi +++ /dev/null @@ -1,108 +0,0 @@ -from _typeshed import Incomplete - -class Task: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - id: str | None = None, - org_id: str | None = None, - org: Incomplete | None = None, - name: Incomplete | None = None, - owner_id: Incomplete | None = None, - description: Incomplete | None = None, - status: Incomplete | None = None, - labels: Incomplete | None = None, - authorization_id: Incomplete | None = None, - flux: Incomplete | None = None, - every: Incomplete | None = None, - cron: Incomplete | None = None, - offset: Incomplete | None = None, - latest_completed: Incomplete | None = None, - last_run_status: Incomplete | None = None, - last_run_error: Incomplete | None = None, - created_at: Incomplete | None = None, - updated_at: Incomplete | None = None, - links: Incomplete | None = None, - ) -> None: ... - @property - def id(self) -> str | None: ... - @id.setter - def id(self, id: str) -> None: ... - @property - def org_id(self) -> str | None: ... - @org_id.setter - def org_id(self, org_id: str) -> None: ... - @property - def org(self): ... - @org.setter - def org(self, org) -> None: ... - @property - def name(self): ... - @name.setter - def name(self, name) -> None: ... - @property - def owner_id(self): ... - @owner_id.setter - def owner_id(self, owner_id) -> None: ... - @property - def description(self): ... - @description.setter - def description(self, description) -> None: ... - @property - def status(self): ... - @status.setter - def status(self, status) -> None: ... - @property - def labels(self): ... - @labels.setter - def labels(self, labels) -> None: ... - @property - def authorization_id(self): ... - @authorization_id.setter - def authorization_id(self, authorization_id) -> None: ... - @property - def flux(self): ... - @flux.setter - def flux(self, flux) -> None: ... - @property - def every(self): ... - @every.setter - def every(self, every) -> None: ... - @property - def cron(self): ... - @cron.setter - def cron(self, cron) -> None: ... - @property - def offset(self): ... - @offset.setter - def offset(self, offset) -> None: ... - @property - def latest_completed(self): ... - @latest_completed.setter - def latest_completed(self, latest_completed) -> None: ... - @property - def last_run_status(self): ... - @last_run_status.setter - def last_run_status(self, last_run_status) -> None: ... - @property - def last_run_error(self): ... - @last_run_error.setter - def last_run_error(self, last_run_error) -> None: ... - @property - def created_at(self): ... - @created_at.setter - def created_at(self, created_at) -> None: ... - @property - def updated_at(self): ... - @updated_at.setter - def updated_at(self, updated_at) -> None: ... - @property - def links(self): ... - @links.setter - def links(self, links) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/task_create_request.pyi b/stubs/influxdb-client/influxdb_client/domain/task_create_request.pyi deleted file mode 100644 index bc531fae3a8e..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/task_create_request.pyi +++ /dev/null @@ -1,38 +0,0 @@ -from _typeshed import Incomplete - -class TaskCreateRequest: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - org_id: Incomplete | None = None, - org: Incomplete | None = None, - status: Incomplete | None = None, - flux: Incomplete | None = None, - description: Incomplete | None = None, - ) -> None: ... - @property - def org_id(self): ... - @org_id.setter - def org_id(self, org_id) -> None: ... - @property - def org(self): ... - @org.setter - def org(self, org) -> None: ... - @property - def status(self): ... - @status.setter - def status(self, status) -> None: ... - @property - def flux(self): ... - @flux.setter - def flux(self, flux) -> None: ... - @property - def description(self): ... - @description.setter - def description(self, description) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/task_links.pyi b/stubs/influxdb-client/influxdb_client/domain/task_links.pyi deleted file mode 100644 index 3fff3d7e1b94..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/task_links.pyi +++ /dev/null @@ -1,39 +0,0 @@ -from _typeshed import Incomplete - -class TaskLinks: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - _self: Incomplete | None = None, - owners: Incomplete | None = None, - members: Incomplete | None = None, - runs: Incomplete | None = None, - logs: Incomplete | None = None, - labels: Incomplete | None = None, - ) -> None: ... - @property - def owners(self): ... - @owners.setter - def owners(self, owners) -> None: ... - @property - def members(self): ... - @members.setter - def members(self, members) -> None: ... - @property - def runs(self): ... - @runs.setter - def runs(self, runs) -> None: ... - @property - def logs(self): ... - @logs.setter - def logs(self, logs) -> None: ... - @property - def labels(self): ... - @labels.setter - def labels(self, labels) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/task_status_type.pyi b/stubs/influxdb-client/influxdb_client/domain/task_status_type.pyi deleted file mode 100644 index 936a3e722421..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/task_status_type.pyi +++ /dev/null @@ -1,12 +0,0 @@ -from _typeshed import Incomplete - -class TaskStatusType: - ACTIVE: str - INACTIVE: str - openapi_types: Incomplete - attribute_map: Incomplete - def __init__(self) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/task_update_request.pyi b/stubs/influxdb-client/influxdb_client/domain/task_update_request.pyi deleted file mode 100644 index 3e18b11f9138..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/task_update_request.pyi +++ /dev/null @@ -1,48 +0,0 @@ -from _typeshed import Incomplete - -class TaskUpdateRequest: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - status: Incomplete | None = None, - flux: Incomplete | None = None, - name: Incomplete | None = None, - every: Incomplete | None = None, - cron: Incomplete | None = None, - offset: Incomplete | None = None, - description: Incomplete | None = None, - ) -> None: ... - @property - def status(self): ... - @status.setter - def status(self, status) -> None: ... - @property - def flux(self): ... - @flux.setter - def flux(self, flux) -> None: ... - @property - def name(self): ... - @name.setter - def name(self, name) -> None: ... - @property - def every(self): ... - @every.setter - def every(self, every) -> None: ... - @property - def cron(self): ... - @cron.setter - def cron(self, cron) -> None: ... - @property - def offset(self): ... - @offset.setter - def offset(self, offset) -> None: ... - @property - def description(self): ... - @description.setter - def description(self, description) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/tasks.pyi b/stubs/influxdb-client/influxdb_client/domain/tasks.pyi deleted file mode 100644 index 4be54c6e4e3e..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/tasks.pyi +++ /dev/null @@ -1,17 +0,0 @@ -from typing import Any, ClassVar - -from influxdb_client.domain.links import Links -from influxdb_client.domain.task import Task - -class Tasks: - openapi_types: ClassVar[dict[str, str]] - attribute_map: ClassVar[dict[str, str]] - - tasks: list[Task] - links: Links - discriminator: None - def __init__(self, links: Links | None = None, tasks: list[Task] | None = None) -> None: ... - def to_dict(self) -> dict[str, Any]: ... - def to_str(self) -> str: ... - def __eq__(self, other: object) -> bool: ... - def __ne__(self, other: object) -> bool: ... diff --git a/stubs/influxdb-client/influxdb_client/domain/telegraf.pyi b/stubs/influxdb-client/influxdb_client/domain/telegraf.pyi deleted file mode 100644 index d7c7a14c9131..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/telegraf.pyi +++ /dev/null @@ -1,35 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.telegraf_request import TelegrafRequest - -class Telegraf(TelegrafRequest): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - id: Incomplete | None = None, - links: Incomplete | None = None, - labels: Incomplete | None = None, - name: Incomplete | None = None, - description: Incomplete | None = None, - metadata: Incomplete | None = None, - config: Incomplete | None = None, - org_id: Incomplete | None = None, - ) -> None: ... - @property - def id(self): ... - @id.setter - def id(self, id) -> None: ... - @property - def links(self): ... - @links.setter - def links(self, links) -> None: ... - @property - def labels(self): ... - @labels.setter - def labels(self, labels) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/telegraf_plugin.pyi b/stubs/influxdb-client/influxdb_client/domain/telegraf_plugin.pyi deleted file mode 100644 index eacf2afa6a56..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/telegraf_plugin.pyi +++ /dev/null @@ -1,33 +0,0 @@ -from _typeshed import Incomplete - -class TelegrafPlugin: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - type: Incomplete | None = None, - name: Incomplete | None = None, - description: Incomplete | None = None, - config: Incomplete | None = None, - ) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - @property - def name(self): ... - @name.setter - def name(self, name) -> None: ... - @property - def description(self): ... - @description.setter - def description(self, description) -> None: ... - @property - def config(self): ... - @config.setter - def config(self, config) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/telegraf_plugin_request.pyi b/stubs/influxdb-client/influxdb_client/domain/telegraf_plugin_request.pyi deleted file mode 100644 index 6457074f2a1e..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/telegraf_plugin_request.pyi +++ /dev/null @@ -1,43 +0,0 @@ -from _typeshed import Incomplete - -class TelegrafPluginRequest: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - name: Incomplete | None = None, - description: Incomplete | None = None, - plugins: Incomplete | None = None, - metadata: Incomplete | None = None, - config: Incomplete | None = None, - org_id: Incomplete | None = None, - ) -> None: ... - @property - def name(self): ... - @name.setter - def name(self, name) -> None: ... - @property - def description(self): ... - @description.setter - def description(self, description) -> None: ... - @property - def plugins(self): ... - @plugins.setter - def plugins(self, plugins) -> None: ... - @property - def metadata(self): ... - @metadata.setter - def metadata(self, metadata) -> None: ... - @property - def config(self): ... - @config.setter - def config(self, config) -> None: ... - @property - def org_id(self): ... - @org_id.setter - def org_id(self, org_id) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/telegraf_plugin_request_plugins.pyi b/stubs/influxdb-client/influxdb_client/domain/telegraf_plugin_request_plugins.pyi deleted file mode 100644 index 2805f5807e9f..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/telegraf_plugin_request_plugins.pyi +++ /dev/null @@ -1,38 +0,0 @@ -from _typeshed import Incomplete - -class TelegrafPluginRequestPlugins: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - type: Incomplete | None = None, - name: Incomplete | None = None, - alias: Incomplete | None = None, - description: Incomplete | None = None, - config: Incomplete | None = None, - ) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - @property - def name(self): ... - @name.setter - def name(self, name) -> None: ... - @property - def alias(self): ... - @alias.setter - def alias(self, alias) -> None: ... - @property - def description(self): ... - @description.setter - def description(self, description) -> None: ... - @property - def config(self): ... - @config.setter - def config(self, config) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/telegraf_plugins.pyi b/stubs/influxdb-client/influxdb_client/domain/telegraf_plugins.pyi deleted file mode 100644 index 5b5b0663ffbc..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/telegraf_plugins.pyi +++ /dev/null @@ -1,25 +0,0 @@ -from _typeshed import Incomplete - -class TelegrafPlugins: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, version: Incomplete | None = None, os: Incomplete | None = None, plugins: Incomplete | None = None - ) -> None: ... - @property - def version(self): ... - @version.setter - def version(self, version) -> None: ... - @property - def os(self): ... - @os.setter - def os(self, os) -> None: ... - @property - def plugins(self): ... - @plugins.setter - def plugins(self, plugins) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/telegraf_request.pyi b/stubs/influxdb-client/influxdb_client/domain/telegraf_request.pyi deleted file mode 100644 index 7a76be4c6d22..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/telegraf_request.pyi +++ /dev/null @@ -1,38 +0,0 @@ -from _typeshed import Incomplete - -class TelegrafRequest: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - name: Incomplete | None = None, - description: Incomplete | None = None, - metadata: Incomplete | None = None, - config: Incomplete | None = None, - org_id: Incomplete | None = None, - ) -> None: ... - @property - def name(self): ... - @name.setter - def name(self, name) -> None: ... - @property - def description(self): ... - @description.setter - def description(self, description) -> None: ... - @property - def metadata(self): ... - @metadata.setter - def metadata(self, metadata) -> None: ... - @property - def config(self): ... - @config.setter - def config(self, config) -> None: ... - @property - def org_id(self): ... - @org_id.setter - def org_id(self, org_id) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/telegraf_request_metadata.pyi b/stubs/influxdb-client/influxdb_client/domain/telegraf_request_metadata.pyi deleted file mode 100644 index 5a770c1302de..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/telegraf_request_metadata.pyi +++ /dev/null @@ -1,15 +0,0 @@ -from _typeshed import Incomplete - -class TelegrafRequestMetadata: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, buckets: Incomplete | None = None) -> None: ... - @property - def buckets(self): ... - @buckets.setter - def buckets(self, buckets) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/telegrafs.pyi b/stubs/influxdb-client/influxdb_client/domain/telegrafs.pyi deleted file mode 100644 index 0ebdc56e69f8..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/telegrafs.pyi +++ /dev/null @@ -1,15 +0,0 @@ -from _typeshed import Incomplete - -class Telegrafs: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, configurations: Incomplete | None = None) -> None: ... - @property - def configurations(self): ... - @configurations.setter - def configurations(self, configurations) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/telegram_notification_endpoint.pyi b/stubs/influxdb-client/influxdb_client/domain/telegram_notification_endpoint.pyi deleted file mode 100644 index c0d2714d047b..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/telegram_notification_endpoint.pyi +++ /dev/null @@ -1,36 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.notification_endpoint_discriminator import NotificationEndpointDiscriminator - -class TelegramNotificationEndpoint(NotificationEndpointDiscriminator): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - token: Incomplete | None = None, - channel: Incomplete | None = None, - id: Incomplete | None = None, - org_id: Incomplete | None = None, - user_id: Incomplete | None = None, - created_at: Incomplete | None = None, - updated_at: Incomplete | None = None, - description: Incomplete | None = None, - name: Incomplete | None = None, - status: str = "active", - labels: Incomplete | None = None, - links: Incomplete | None = None, - type: str = "telegram", - ) -> None: ... - @property - def token(self): ... - @token.setter - def token(self, token) -> None: ... - @property - def channel(self): ... - @channel.setter - def channel(self, channel) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/telegram_notification_rule.pyi b/stubs/influxdb-client/influxdb_client/domain/telegram_notification_rule.pyi deleted file mode 100644 index 978f0429a981..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/telegram_notification_rule.pyi +++ /dev/null @@ -1,42 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.telegram_notification_rule_base import TelegramNotificationRuleBase - -class TelegramNotificationRule(TelegramNotificationRuleBase): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - type: str = "telegram", - message_template: Incomplete | None = None, - parse_mode: Incomplete | None = None, - disable_web_page_preview: Incomplete | None = None, - latest_completed: Incomplete | None = None, - last_run_status: Incomplete | None = None, - last_run_error: Incomplete | None = None, - id: Incomplete | None = None, - endpoint_id: Incomplete | None = None, - org_id: Incomplete | None = None, - task_id: Incomplete | None = None, - owner_id: Incomplete | None = None, - created_at: Incomplete | None = None, - updated_at: Incomplete | None = None, - status: Incomplete | None = None, - name: Incomplete | None = None, - sleep_until: Incomplete | None = None, - every: Incomplete | None = None, - offset: Incomplete | None = None, - runbook_link: Incomplete | None = None, - limit_every: Incomplete | None = None, - limit: Incomplete | None = None, - tag_rules: Incomplete | None = None, - description: Incomplete | None = None, - status_rules: Incomplete | None = None, - labels: Incomplete | None = None, - links: Incomplete | None = None, - ) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/telegram_notification_rule_base.pyi b/stubs/influxdb-client/influxdb_client/domain/telegram_notification_rule_base.pyi deleted file mode 100644 index d0171b9af3ca..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/telegram_notification_rule_base.pyi +++ /dev/null @@ -1,58 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.notification_rule_discriminator import NotificationRuleDiscriminator - -class TelegramNotificationRuleBase(NotificationRuleDiscriminator): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - type: Incomplete | None = None, - message_template: Incomplete | None = None, - parse_mode: Incomplete | None = None, - disable_web_page_preview: Incomplete | None = None, - latest_completed: Incomplete | None = None, - last_run_status: Incomplete | None = None, - last_run_error: Incomplete | None = None, - id: Incomplete | None = None, - endpoint_id: Incomplete | None = None, - org_id: Incomplete | None = None, - task_id: Incomplete | None = None, - owner_id: Incomplete | None = None, - created_at: Incomplete | None = None, - updated_at: Incomplete | None = None, - status: Incomplete | None = None, - name: Incomplete | None = None, - sleep_until: Incomplete | None = None, - every: Incomplete | None = None, - offset: Incomplete | None = None, - runbook_link: Incomplete | None = None, - limit_every: Incomplete | None = None, - limit: Incomplete | None = None, - tag_rules: Incomplete | None = None, - description: Incomplete | None = None, - status_rules: Incomplete | None = None, - labels: Incomplete | None = None, - links: Incomplete | None = None, - ) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - @property - def message_template(self): ... - @message_template.setter - def message_template(self, message_template) -> None: ... - @property - def parse_mode(self): ... - @parse_mode.setter - def parse_mode(self, parse_mode) -> None: ... - @property - def disable_web_page_preview(self): ... - @disable_web_page_preview.setter - def disable_web_page_preview(self, disable_web_page_preview) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/template_apply.pyi b/stubs/influxdb-client/influxdb_client/domain/template_apply.pyi deleted file mode 100644 index 125192bb9588..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/template_apply.pyi +++ /dev/null @@ -1,58 +0,0 @@ -from _typeshed import Incomplete - -class TemplateApply: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - dry_run: Incomplete | None = None, - org_id: Incomplete | None = None, - stack_id: Incomplete | None = None, - template: Incomplete | None = None, - templates: Incomplete | None = None, - env_refs: Incomplete | None = None, - secrets: Incomplete | None = None, - remotes: Incomplete | None = None, - actions: Incomplete | None = None, - ) -> None: ... - @property - def dry_run(self): ... - @dry_run.setter - def dry_run(self, dry_run) -> None: ... - @property - def org_id(self): ... - @org_id.setter - def org_id(self, org_id) -> None: ... - @property - def stack_id(self): ... - @stack_id.setter - def stack_id(self, stack_id) -> None: ... - @property - def template(self): ... - @template.setter - def template(self, template) -> None: ... - @property - def templates(self): ... - @templates.setter - def templates(self, templates) -> None: ... - @property - def env_refs(self): ... - @env_refs.setter - def env_refs(self, env_refs) -> None: ... - @property - def secrets(self): ... - @secrets.setter - def secrets(self, secrets) -> None: ... - @property - def remotes(self): ... - @remotes.setter - def remotes(self, remotes) -> None: ... - @property - def actions(self): ... - @actions.setter - def actions(self, actions) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/template_apply_remotes.pyi b/stubs/influxdb-client/influxdb_client/domain/template_apply_remotes.pyi deleted file mode 100644 index d37907b002fd..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/template_apply_remotes.pyi +++ /dev/null @@ -1,19 +0,0 @@ -from _typeshed import Incomplete - -class TemplateApplyRemotes: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, url: Incomplete | None = None, content_type: Incomplete | None = None) -> None: ... - @property - def url(self): ... - @url.setter - def url(self, url) -> None: ... - @property - def content_type(self): ... - @content_type.setter - def content_type(self, content_type) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/template_apply_template.pyi b/stubs/influxdb-client/influxdb_client/domain/template_apply_template.pyi deleted file mode 100644 index 8bffdcbbf108..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/template_apply_template.pyi +++ /dev/null @@ -1,25 +0,0 @@ -from _typeshed import Incomplete - -class TemplateApplyTemplate: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, content_type: Incomplete | None = None, sources: Incomplete | None = None, contents: Incomplete | None = None - ) -> None: ... - @property - def content_type(self): ... - @content_type.setter - def content_type(self, content_type) -> None: ... - @property - def sources(self): ... - @sources.setter - def sources(self, sources) -> None: ... - @property - def contents(self): ... - @contents.setter - def contents(self, contents) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/template_chart.pyi b/stubs/influxdb-client/influxdb_client/domain/template_chart.pyi deleted file mode 100644 index cc7fdcb26af8..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/template_chart.pyi +++ /dev/null @@ -1,38 +0,0 @@ -from _typeshed import Incomplete - -class TemplateChart: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - x_pos: Incomplete | None = None, - y_pos: Incomplete | None = None, - height: Incomplete | None = None, - width: Incomplete | None = None, - properties: Incomplete | None = None, - ) -> None: ... - @property - def x_pos(self): ... - @x_pos.setter - def x_pos(self, x_pos) -> None: ... - @property - def y_pos(self): ... - @y_pos.setter - def y_pos(self, y_pos) -> None: ... - @property - def height(self): ... - @height.setter - def height(self, height) -> None: ... - @property - def width(self): ... - @width.setter - def width(self, width) -> None: ... - @property - def properties(self): ... - @properties.setter - def properties(self, properties) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/template_export_by_id.pyi b/stubs/influxdb-client/influxdb_client/domain/template_export_by_id.pyi deleted file mode 100644 index e41d39828549..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/template_export_by_id.pyi +++ /dev/null @@ -1,25 +0,0 @@ -from _typeshed import Incomplete - -class TemplateExportByID: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, stack_id: Incomplete | None = None, org_ids: Incomplete | None = None, resources: Incomplete | None = None - ) -> None: ... - @property - def stack_id(self): ... - @stack_id.setter - def stack_id(self, stack_id) -> None: ... - @property - def org_ids(self): ... - @org_ids.setter - def org_ids(self, org_ids) -> None: ... - @property - def resources(self): ... - @resources.setter - def resources(self, resources) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/template_export_by_id_org_ids.pyi b/stubs/influxdb-client/influxdb_client/domain/template_export_by_id_org_ids.pyi deleted file mode 100644 index a85b358707d1..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/template_export_by_id_org_ids.pyi +++ /dev/null @@ -1,19 +0,0 @@ -from _typeshed import Incomplete - -class TemplateExportByIDOrgIDs: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, org_id: Incomplete | None = None, resource_filters: Incomplete | None = None) -> None: ... - @property - def org_id(self): ... - @org_id.setter - def org_id(self, org_id) -> None: ... - @property - def resource_filters(self): ... - @resource_filters.setter - def resource_filters(self, resource_filters) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/template_export_by_id_resource_filters.pyi b/stubs/influxdb-client/influxdb_client/domain/template_export_by_id_resource_filters.pyi deleted file mode 100644 index 067e27aaaa11..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/template_export_by_id_resource_filters.pyi +++ /dev/null @@ -1,19 +0,0 @@ -from _typeshed import Incomplete - -class TemplateExportByIDResourceFilters: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, by_label: Incomplete | None = None, by_resource_kind: Incomplete | None = None) -> None: ... - @property - def by_label(self): ... - @by_label.setter - def by_label(self, by_label) -> None: ... - @property - def by_resource_kind(self): ... - @by_resource_kind.setter - def by_resource_kind(self, by_resource_kind) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/template_export_by_id_resources.pyi b/stubs/influxdb-client/influxdb_client/domain/template_export_by_id_resources.pyi deleted file mode 100644 index 431f2434f6fc..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/template_export_by_id_resources.pyi +++ /dev/null @@ -1,23 +0,0 @@ -from _typeshed import Incomplete - -class TemplateExportByIDResources: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, id: Incomplete | None = None, kind: Incomplete | None = None, name: Incomplete | None = None) -> None: ... - @property - def id(self): ... - @id.setter - def id(self, id) -> None: ... - @property - def kind(self): ... - @kind.setter - def kind(self, kind) -> None: ... - @property - def name(self): ... - @name.setter - def name(self, name) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/template_export_by_name.pyi b/stubs/influxdb-client/influxdb_client/domain/template_export_by_name.pyi deleted file mode 100644 index 930d8beb0293..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/template_export_by_name.pyi +++ /dev/null @@ -1,25 +0,0 @@ -from _typeshed import Incomplete - -class TemplateExportByName: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, stack_id: Incomplete | None = None, org_ids: Incomplete | None = None, resources: Incomplete | None = None - ) -> None: ... - @property - def stack_id(self): ... - @stack_id.setter - def stack_id(self, stack_id) -> None: ... - @property - def org_ids(self): ... - @org_ids.setter - def org_ids(self, org_ids) -> None: ... - @property - def resources(self): ... - @resources.setter - def resources(self, resources) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/template_export_by_name_resources.pyi b/stubs/influxdb-client/influxdb_client/domain/template_export_by_name_resources.pyi deleted file mode 100644 index d5b8892a5005..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/template_export_by_name_resources.pyi +++ /dev/null @@ -1,19 +0,0 @@ -from _typeshed import Incomplete - -class TemplateExportByNameResources: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, kind: Incomplete | None = None, name: Incomplete | None = None) -> None: ... - @property - def kind(self): ... - @kind.setter - def kind(self, kind) -> None: ... - @property - def name(self): ... - @name.setter - def name(self, name) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/template_kind.pyi b/stubs/influxdb-client/influxdb_client/domain/template_kind.pyi deleted file mode 100644 index cb375c1c55f3..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/template_kind.pyi +++ /dev/null @@ -1,24 +0,0 @@ -from _typeshed import Incomplete - -class TemplateKind: - BUCKET: str - CHECK: str - CHECKDEADMAN: str - CHECKTHRESHOLD: str - DASHBOARD: str - LABEL: str - NOTIFICATIONENDPOINT: str - NOTIFICATIONENDPOINTHTTP: str - NOTIFICATIONENDPOINTPAGERDUTY: str - NOTIFICATIONENDPOINTSLACK: str - NOTIFICATIONRULE: str - TASK: str - TELEGRAF: str - VARIABLE: str - openapi_types: Incomplete - attribute_map: Incomplete - def __init__(self) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/template_summary.pyi b/stubs/influxdb-client/influxdb_client/domain/template_summary.pyi deleted file mode 100644 index 4f11359ab228..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/template_summary.pyi +++ /dev/null @@ -1,38 +0,0 @@ -from _typeshed import Incomplete - -class TemplateSummary: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - sources: Incomplete | None = None, - stack_id: Incomplete | None = None, - summary: Incomplete | None = None, - diff: Incomplete | None = None, - errors: Incomplete | None = None, - ) -> None: ... - @property - def sources(self): ... - @sources.setter - def sources(self, sources) -> None: ... - @property - def stack_id(self): ... - @stack_id.setter - def stack_id(self, stack_id) -> None: ... - @property - def summary(self): ... - @summary.setter - def summary(self, summary) -> None: ... - @property - def diff(self): ... - @diff.setter - def diff(self, diff) -> None: ... - @property - def errors(self): ... - @errors.setter - def errors(self, errors) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/template_summary_diff.pyi b/stubs/influxdb-client/influxdb_client/domain/template_summary_diff.pyi deleted file mode 100644 index 253ed22e9a7a..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/template_summary_diff.pyi +++ /dev/null @@ -1,63 +0,0 @@ -from _typeshed import Incomplete - -class TemplateSummaryDiff: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - buckets: Incomplete | None = None, - checks: Incomplete | None = None, - dashboards: Incomplete | None = None, - labels: Incomplete | None = None, - label_mappings: Incomplete | None = None, - notification_endpoints: Incomplete | None = None, - notification_rules: Incomplete | None = None, - tasks: Incomplete | None = None, - telegraf_configs: Incomplete | None = None, - variables: Incomplete | None = None, - ) -> None: ... - @property - def buckets(self): ... - @buckets.setter - def buckets(self, buckets) -> None: ... - @property - def checks(self): ... - @checks.setter - def checks(self, checks) -> None: ... - @property - def dashboards(self): ... - @dashboards.setter - def dashboards(self, dashboards) -> None: ... - @property - def labels(self): ... - @labels.setter - def labels(self, labels) -> None: ... - @property - def label_mappings(self): ... - @label_mappings.setter - def label_mappings(self, label_mappings) -> None: ... - @property - def notification_endpoints(self): ... - @notification_endpoints.setter - def notification_endpoints(self, notification_endpoints) -> None: ... - @property - def notification_rules(self): ... - @notification_rules.setter - def notification_rules(self, notification_rules) -> None: ... - @property - def tasks(self): ... - @tasks.setter - def tasks(self, tasks) -> None: ... - @property - def telegraf_configs(self): ... - @telegraf_configs.setter - def telegraf_configs(self, telegraf_configs) -> None: ... - @property - def variables(self): ... - @variables.setter - def variables(self, variables) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_buckets.pyi b/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_buckets.pyi deleted file mode 100644 index d00432fc2c99..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_buckets.pyi +++ /dev/null @@ -1,43 +0,0 @@ -from _typeshed import Incomplete - -class TemplateSummaryDiffBuckets: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - kind: Incomplete | None = None, - state_status: Incomplete | None = None, - id: Incomplete | None = None, - template_meta_name: Incomplete | None = None, - new: Incomplete | None = None, - old: Incomplete | None = None, - ) -> None: ... - @property - def kind(self): ... - @kind.setter - def kind(self, kind) -> None: ... - @property - def state_status(self): ... - @state_status.setter - def state_status(self, state_status) -> None: ... - @property - def id(self): ... - @id.setter - def id(self, id) -> None: ... - @property - def template_meta_name(self): ... - @template_meta_name.setter - def template_meta_name(self, template_meta_name) -> None: ... - @property - def new(self): ... - @new.setter - def new(self, new) -> None: ... - @property - def old(self): ... - @old.setter - def old(self, old) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_buckets_new_old.pyi b/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_buckets_new_old.pyi deleted file mode 100644 index 7be2bc1ba112..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_buckets_new_old.pyi +++ /dev/null @@ -1,25 +0,0 @@ -from _typeshed import Incomplete - -class TemplateSummaryDiffBucketsNewOld: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, name: Incomplete | None = None, description: Incomplete | None = None, retention_rules: Incomplete | None = None - ) -> None: ... - @property - def name(self): ... - @name.setter - def name(self, name) -> None: ... - @property - def description(self): ... - @description.setter - def description(self, description) -> None: ... - @property - def retention_rules(self): ... - @retention_rules.setter - def retention_rules(self, retention_rules) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_checks.pyi b/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_checks.pyi deleted file mode 100644 index de592b55220d..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_checks.pyi +++ /dev/null @@ -1,43 +0,0 @@ -from _typeshed import Incomplete - -class TemplateSummaryDiffChecks: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - kind: Incomplete | None = None, - state_status: Incomplete | None = None, - id: Incomplete | None = None, - template_meta_name: Incomplete | None = None, - new: Incomplete | None = None, - old: Incomplete | None = None, - ) -> None: ... - @property - def kind(self): ... - @kind.setter - def kind(self, kind) -> None: ... - @property - def state_status(self): ... - @state_status.setter - def state_status(self, state_status) -> None: ... - @property - def id(self): ... - @id.setter - def id(self, id) -> None: ... - @property - def template_meta_name(self): ... - @template_meta_name.setter - def template_meta_name(self, template_meta_name) -> None: ... - @property - def new(self): ... - @new.setter - def new(self, new) -> None: ... - @property - def old(self): ... - @old.setter - def old(self, old) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_dashboards.pyi b/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_dashboards.pyi deleted file mode 100644 index 7ec3dd4d354a..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_dashboards.pyi +++ /dev/null @@ -1,43 +0,0 @@ -from _typeshed import Incomplete - -class TemplateSummaryDiffDashboards: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - state_status: Incomplete | None = None, - id: Incomplete | None = None, - kind: Incomplete | None = None, - template_meta_name: Incomplete | None = None, - new: Incomplete | None = None, - old: Incomplete | None = None, - ) -> None: ... - @property - def state_status(self): ... - @state_status.setter - def state_status(self, state_status) -> None: ... - @property - def id(self): ... - @id.setter - def id(self, id) -> None: ... - @property - def kind(self): ... - @kind.setter - def kind(self, kind) -> None: ... - @property - def template_meta_name(self): ... - @template_meta_name.setter - def template_meta_name(self, template_meta_name) -> None: ... - @property - def new(self): ... - @new.setter - def new(self, new) -> None: ... - @property - def old(self): ... - @old.setter - def old(self, old) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_dashboards_new_old.pyi b/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_dashboards_new_old.pyi deleted file mode 100644 index 2f3ec4a94b2a..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_dashboards_new_old.pyi +++ /dev/null @@ -1,25 +0,0 @@ -from _typeshed import Incomplete - -class TemplateSummaryDiffDashboardsNewOld: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, name: Incomplete | None = None, description: Incomplete | None = None, charts: Incomplete | None = None - ) -> None: ... - @property - def name(self): ... - @name.setter - def name(self, name) -> None: ... - @property - def description(self): ... - @description.setter - def description(self, description) -> None: ... - @property - def charts(self): ... - @charts.setter - def charts(self, charts) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_label_mappings.pyi b/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_label_mappings.pyi deleted file mode 100644 index 2f6c3b1e80c2..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_label_mappings.pyi +++ /dev/null @@ -1,53 +0,0 @@ -from _typeshed import Incomplete - -class TemplateSummaryDiffLabelMappings: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - status: Incomplete | None = None, - resource_type: Incomplete | None = None, - resource_id: Incomplete | None = None, - resource_template_meta_name: Incomplete | None = None, - resource_name: Incomplete | None = None, - label_id: Incomplete | None = None, - label_template_meta_name: Incomplete | None = None, - label_name: Incomplete | None = None, - ) -> None: ... - @property - def status(self): ... - @status.setter - def status(self, status) -> None: ... - @property - def resource_type(self): ... - @resource_type.setter - def resource_type(self, resource_type) -> None: ... - @property - def resource_id(self): ... - @resource_id.setter - def resource_id(self, resource_id) -> None: ... - @property - def resource_template_meta_name(self): ... - @resource_template_meta_name.setter - def resource_template_meta_name(self, resource_template_meta_name) -> None: ... - @property - def resource_name(self): ... - @resource_name.setter - def resource_name(self, resource_name) -> None: ... - @property - def label_id(self): ... - @label_id.setter - def label_id(self, label_id) -> None: ... - @property - def label_template_meta_name(self): ... - @label_template_meta_name.setter - def label_template_meta_name(self, label_template_meta_name) -> None: ... - @property - def label_name(self): ... - @label_name.setter - def label_name(self, label_name) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_labels.pyi b/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_labels.pyi deleted file mode 100644 index aece37978acc..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_labels.pyi +++ /dev/null @@ -1,43 +0,0 @@ -from _typeshed import Incomplete - -class TemplateSummaryDiffLabels: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - state_status: Incomplete | None = None, - kind: Incomplete | None = None, - id: Incomplete | None = None, - template_meta_name: Incomplete | None = None, - new: Incomplete | None = None, - old: Incomplete | None = None, - ) -> None: ... - @property - def state_status(self): ... - @state_status.setter - def state_status(self, state_status) -> None: ... - @property - def kind(self): ... - @kind.setter - def kind(self, kind) -> None: ... - @property - def id(self): ... - @id.setter - def id(self, id) -> None: ... - @property - def template_meta_name(self): ... - @template_meta_name.setter - def template_meta_name(self, template_meta_name) -> None: ... - @property - def new(self): ... - @new.setter - def new(self, new) -> None: ... - @property - def old(self): ... - @old.setter - def old(self, old) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_labels_new_old.pyi b/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_labels_new_old.pyi deleted file mode 100644 index a51f3ea5aa4e..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_labels_new_old.pyi +++ /dev/null @@ -1,25 +0,0 @@ -from _typeshed import Incomplete - -class TemplateSummaryDiffLabelsNewOld: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, name: Incomplete | None = None, color: Incomplete | None = None, description: Incomplete | None = None - ) -> None: ... - @property - def name(self): ... - @name.setter - def name(self, name) -> None: ... - @property - def color(self): ... - @color.setter - def color(self, color) -> None: ... - @property - def description(self): ... - @description.setter - def description(self, description) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_notification_endpoints.pyi b/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_notification_endpoints.pyi deleted file mode 100644 index e4a6be4cf220..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_notification_endpoints.pyi +++ /dev/null @@ -1,43 +0,0 @@ -from _typeshed import Incomplete - -class TemplateSummaryDiffNotificationEndpoints: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - kind: Incomplete | None = None, - state_status: Incomplete | None = None, - id: Incomplete | None = None, - template_meta_name: Incomplete | None = None, - new: Incomplete | None = None, - old: Incomplete | None = None, - ) -> None: ... - @property - def kind(self): ... - @kind.setter - def kind(self, kind) -> None: ... - @property - def state_status(self): ... - @state_status.setter - def state_status(self, state_status) -> None: ... - @property - def id(self): ... - @id.setter - def id(self, id) -> None: ... - @property - def template_meta_name(self): ... - @template_meta_name.setter - def template_meta_name(self, template_meta_name) -> None: ... - @property - def new(self): ... - @new.setter - def new(self, new) -> None: ... - @property - def old(self): ... - @old.setter - def old(self, old) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_notification_rules.pyi b/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_notification_rules.pyi deleted file mode 100644 index c552c0673837..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_notification_rules.pyi +++ /dev/null @@ -1,43 +0,0 @@ -from _typeshed import Incomplete - -class TemplateSummaryDiffNotificationRules: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - kind: Incomplete | None = None, - state_status: Incomplete | None = None, - id: Incomplete | None = None, - template_meta_name: Incomplete | None = None, - new: Incomplete | None = None, - old: Incomplete | None = None, - ) -> None: ... - @property - def kind(self): ... - @kind.setter - def kind(self, kind) -> None: ... - @property - def state_status(self): ... - @state_status.setter - def state_status(self, state_status) -> None: ... - @property - def id(self): ... - @id.setter - def id(self, id) -> None: ... - @property - def template_meta_name(self): ... - @template_meta_name.setter - def template_meta_name(self, template_meta_name) -> None: ... - @property - def new(self): ... - @new.setter - def new(self, new) -> None: ... - @property - def old(self): ... - @old.setter - def old(self, old) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_notification_rules_new_old.pyi b/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_notification_rules_new_old.pyi deleted file mode 100644 index 3c4755932ef4..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_notification_rules_new_old.pyi +++ /dev/null @@ -1,68 +0,0 @@ -from _typeshed import Incomplete - -class TemplateSummaryDiffNotificationRulesNewOld: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - name: Incomplete | None = None, - description: Incomplete | None = None, - endpoint_name: Incomplete | None = None, - endpoint_id: Incomplete | None = None, - endpoint_type: Incomplete | None = None, - every: Incomplete | None = None, - offset: Incomplete | None = None, - message_template: Incomplete | None = None, - status: Incomplete | None = None, - status_rules: Incomplete | None = None, - tag_rules: Incomplete | None = None, - ) -> None: ... - @property - def name(self): ... - @name.setter - def name(self, name) -> None: ... - @property - def description(self): ... - @description.setter - def description(self, description) -> None: ... - @property - def endpoint_name(self): ... - @endpoint_name.setter - def endpoint_name(self, endpoint_name) -> None: ... - @property - def endpoint_id(self): ... - @endpoint_id.setter - def endpoint_id(self, endpoint_id) -> None: ... - @property - def endpoint_type(self): ... - @endpoint_type.setter - def endpoint_type(self, endpoint_type) -> None: ... - @property - def every(self): ... - @every.setter - def every(self, every) -> None: ... - @property - def offset(self): ... - @offset.setter - def offset(self, offset) -> None: ... - @property - def message_template(self): ... - @message_template.setter - def message_template(self, message_template) -> None: ... - @property - def status(self): ... - @status.setter - def status(self, status) -> None: ... - @property - def status_rules(self): ... - @status_rules.setter - def status_rules(self, status_rules) -> None: ... - @property - def tag_rules(self): ... - @tag_rules.setter - def tag_rules(self, tag_rules) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_tasks.pyi b/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_tasks.pyi deleted file mode 100644 index a57fc9c4c286..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_tasks.pyi +++ /dev/null @@ -1,43 +0,0 @@ -from _typeshed import Incomplete - -class TemplateSummaryDiffTasks: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - kind: Incomplete | None = None, - state_status: Incomplete | None = None, - id: Incomplete | None = None, - template_meta_name: Incomplete | None = None, - new: Incomplete | None = None, - old: Incomplete | None = None, - ) -> None: ... - @property - def kind(self): ... - @kind.setter - def kind(self, kind) -> None: ... - @property - def state_status(self): ... - @state_status.setter - def state_status(self, state_status) -> None: ... - @property - def id(self): ... - @id.setter - def id(self, id) -> None: ... - @property - def template_meta_name(self): ... - @template_meta_name.setter - def template_meta_name(self, template_meta_name) -> None: ... - @property - def new(self): ... - @new.setter - def new(self, new) -> None: ... - @property - def old(self): ... - @old.setter - def old(self, old) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_tasks_new_old.pyi b/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_tasks_new_old.pyi deleted file mode 100644 index cd940e0953d7..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_tasks_new_old.pyi +++ /dev/null @@ -1,48 +0,0 @@ -from _typeshed import Incomplete - -class TemplateSummaryDiffTasksNewOld: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - name: Incomplete | None = None, - cron: Incomplete | None = None, - description: Incomplete | None = None, - every: Incomplete | None = None, - offset: Incomplete | None = None, - query: Incomplete | None = None, - status: Incomplete | None = None, - ) -> None: ... - @property - def name(self): ... - @name.setter - def name(self, name) -> None: ... - @property - def cron(self): ... - @cron.setter - def cron(self, cron) -> None: ... - @property - def description(self): ... - @description.setter - def description(self, description) -> None: ... - @property - def every(self): ... - @every.setter - def every(self, every) -> None: ... - @property - def offset(self): ... - @offset.setter - def offset(self, offset) -> None: ... - @property - def query(self): ... - @query.setter - def query(self, query) -> None: ... - @property - def status(self): ... - @status.setter - def status(self, status) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_telegraf_configs.pyi b/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_telegraf_configs.pyi deleted file mode 100644 index e414f2caebaf..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_telegraf_configs.pyi +++ /dev/null @@ -1,43 +0,0 @@ -from _typeshed import Incomplete - -class TemplateSummaryDiffTelegrafConfigs: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - kind: Incomplete | None = None, - state_status: Incomplete | None = None, - id: Incomplete | None = None, - template_meta_name: Incomplete | None = None, - new: Incomplete | None = None, - old: Incomplete | None = None, - ) -> None: ... - @property - def kind(self): ... - @kind.setter - def kind(self, kind) -> None: ... - @property - def state_status(self): ... - @state_status.setter - def state_status(self, state_status) -> None: ... - @property - def id(self): ... - @id.setter - def id(self, id) -> None: ... - @property - def template_meta_name(self): ... - @template_meta_name.setter - def template_meta_name(self, template_meta_name) -> None: ... - @property - def new(self): ... - @new.setter - def new(self, new) -> None: ... - @property - def old(self): ... - @old.setter - def old(self, old) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_variables.pyi b/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_variables.pyi deleted file mode 100644 index de68d83bf1fd..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_variables.pyi +++ /dev/null @@ -1,43 +0,0 @@ -from _typeshed import Incomplete - -class TemplateSummaryDiffVariables: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - kind: Incomplete | None = None, - state_status: Incomplete | None = None, - id: Incomplete | None = None, - template_meta_name: Incomplete | None = None, - new: Incomplete | None = None, - old: Incomplete | None = None, - ) -> None: ... - @property - def kind(self): ... - @kind.setter - def kind(self, kind) -> None: ... - @property - def state_status(self): ... - @state_status.setter - def state_status(self, state_status) -> None: ... - @property - def id(self): ... - @id.setter - def id(self, id) -> None: ... - @property - def template_meta_name(self): ... - @template_meta_name.setter - def template_meta_name(self, template_meta_name) -> None: ... - @property - def new(self): ... - @new.setter - def new(self, new) -> None: ... - @property - def old(self): ... - @old.setter - def old(self, old) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_variables_new_old.pyi b/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_variables_new_old.pyi deleted file mode 100644 index fca9b663e698..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_variables_new_old.pyi +++ /dev/null @@ -1,25 +0,0 @@ -from _typeshed import Incomplete - -class TemplateSummaryDiffVariablesNewOld: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, name: Incomplete | None = None, description: Incomplete | None = None, args: Incomplete | None = None - ) -> None: ... - @property - def name(self): ... - @name.setter - def name(self, name) -> None: ... - @property - def description(self): ... - @description.setter - def description(self, description) -> None: ... - @property - def args(self): ... - @args.setter - def args(self, args) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/template_summary_errors.pyi b/stubs/influxdb-client/influxdb_client/domain/template_summary_errors.pyi deleted file mode 100644 index 056464795b0c..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/template_summary_errors.pyi +++ /dev/null @@ -1,33 +0,0 @@ -from _typeshed import Incomplete - -class TemplateSummaryErrors: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - kind: Incomplete | None = None, - reason: Incomplete | None = None, - fields: Incomplete | None = None, - indexes: Incomplete | None = None, - ) -> None: ... - @property - def kind(self): ... - @kind.setter - def kind(self, kind) -> None: ... - @property - def reason(self): ... - @reason.setter - def reason(self, reason) -> None: ... - @property - def fields(self): ... - @fields.setter - def fields(self, fields) -> None: ... - @property - def indexes(self): ... - @indexes.setter - def indexes(self, indexes) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/template_summary_label.pyi b/stubs/influxdb-client/influxdb_client/domain/template_summary_label.pyi deleted file mode 100644 index 9ffa1b1c0693..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/template_summary_label.pyi +++ /dev/null @@ -1,48 +0,0 @@ -from _typeshed import Incomplete - -class TemplateSummaryLabel: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - id: Incomplete | None = None, - org_id: Incomplete | None = None, - kind: Incomplete | None = None, - template_meta_name: Incomplete | None = None, - name: Incomplete | None = None, - properties: Incomplete | None = None, - env_references: Incomplete | None = None, - ) -> None: ... - @property - def id(self): ... - @id.setter - def id(self, id) -> None: ... - @property - def org_id(self): ... - @org_id.setter - def org_id(self, org_id) -> None: ... - @property - def kind(self): ... - @kind.setter - def kind(self, kind) -> None: ... - @property - def template_meta_name(self): ... - @template_meta_name.setter - def template_meta_name(self, template_meta_name) -> None: ... - @property - def name(self): ... - @name.setter - def name(self, name) -> None: ... - @property - def properties(self): ... - @properties.setter - def properties(self, properties) -> None: ... - @property - def env_references(self): ... - @env_references.setter - def env_references(self, env_references) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/template_summary_label_properties.pyi b/stubs/influxdb-client/influxdb_client/domain/template_summary_label_properties.pyi deleted file mode 100644 index a77f76c5a121..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/template_summary_label_properties.pyi +++ /dev/null @@ -1,19 +0,0 @@ -from _typeshed import Incomplete - -class TemplateSummaryLabelProperties: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, color: Incomplete | None = None, description: Incomplete | None = None) -> None: ... - @property - def color(self): ... - @color.setter - def color(self, color) -> None: ... - @property - def description(self): ... - @description.setter - def description(self, description) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/template_summary_summary.pyi b/stubs/influxdb-client/influxdb_client/domain/template_summary_summary.pyi deleted file mode 100644 index 589729bda0be..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/template_summary_summary.pyi +++ /dev/null @@ -1,73 +0,0 @@ -from _typeshed import Incomplete - -class TemplateSummarySummary: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - buckets: Incomplete | None = None, - checks: Incomplete | None = None, - dashboards: Incomplete | None = None, - labels: Incomplete | None = None, - label_mappings: Incomplete | None = None, - missing_env_refs: Incomplete | None = None, - missing_secrets: Incomplete | None = None, - notification_endpoints: Incomplete | None = None, - notification_rules: Incomplete | None = None, - tasks: Incomplete | None = None, - telegraf_configs: Incomplete | None = None, - variables: Incomplete | None = None, - ) -> None: ... - @property - def buckets(self): ... - @buckets.setter - def buckets(self, buckets) -> None: ... - @property - def checks(self): ... - @checks.setter - def checks(self, checks) -> None: ... - @property - def dashboards(self): ... - @dashboards.setter - def dashboards(self, dashboards) -> None: ... - @property - def labels(self): ... - @labels.setter - def labels(self, labels) -> None: ... - @property - def label_mappings(self): ... - @label_mappings.setter - def label_mappings(self, label_mappings) -> None: ... - @property - def missing_env_refs(self): ... - @missing_env_refs.setter - def missing_env_refs(self, missing_env_refs) -> None: ... - @property - def missing_secrets(self): ... - @missing_secrets.setter - def missing_secrets(self, missing_secrets) -> None: ... - @property - def notification_endpoints(self): ... - @notification_endpoints.setter - def notification_endpoints(self, notification_endpoints) -> None: ... - @property - def notification_rules(self): ... - @notification_rules.setter - def notification_rules(self, notification_rules) -> None: ... - @property - def tasks(self): ... - @tasks.setter - def tasks(self, tasks) -> None: ... - @property - def telegraf_configs(self): ... - @telegraf_configs.setter - def telegraf_configs(self, telegraf_configs) -> None: ... - @property - def variables(self): ... - @variables.setter - def variables(self, variables) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/template_summary_summary_buckets.pyi b/stubs/influxdb-client/influxdb_client/domain/template_summary_summary_buckets.pyi deleted file mode 100644 index 60ca17aa3881..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/template_summary_summary_buckets.pyi +++ /dev/null @@ -1,58 +0,0 @@ -from _typeshed import Incomplete - -class TemplateSummarySummaryBuckets: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - id: Incomplete | None = None, - org_id: Incomplete | None = None, - kind: Incomplete | None = None, - template_meta_name: Incomplete | None = None, - name: Incomplete | None = None, - description: Incomplete | None = None, - retention_period: Incomplete | None = None, - label_associations: Incomplete | None = None, - env_references: Incomplete | None = None, - ) -> None: ... - @property - def id(self): ... - @id.setter - def id(self, id) -> None: ... - @property - def org_id(self): ... - @org_id.setter - def org_id(self, org_id) -> None: ... - @property - def kind(self): ... - @kind.setter - def kind(self, kind) -> None: ... - @property - def template_meta_name(self): ... - @template_meta_name.setter - def template_meta_name(self, template_meta_name) -> None: ... - @property - def name(self): ... - @name.setter - def name(self, name) -> None: ... - @property - def description(self): ... - @description.setter - def description(self, description) -> None: ... - @property - def retention_period(self): ... - @retention_period.setter - def retention_period(self, retention_period) -> None: ... - @property - def label_associations(self): ... - @label_associations.setter - def label_associations(self, label_associations) -> None: ... - @property - def env_references(self): ... - @env_references.setter - def env_references(self, env_references) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/template_summary_summary_dashboards.pyi b/stubs/influxdb-client/influxdb_client/domain/template_summary_summary_dashboards.pyi deleted file mode 100644 index f5580b0deff6..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/template_summary_summary_dashboards.pyi +++ /dev/null @@ -1,58 +0,0 @@ -from _typeshed import Incomplete - -class TemplateSummarySummaryDashboards: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - id: Incomplete | None = None, - org_id: Incomplete | None = None, - kind: Incomplete | None = None, - template_meta_name: Incomplete | None = None, - name: Incomplete | None = None, - description: Incomplete | None = None, - label_associations: Incomplete | None = None, - charts: Incomplete | None = None, - env_references: Incomplete | None = None, - ) -> None: ... - @property - def id(self): ... - @id.setter - def id(self, id) -> None: ... - @property - def org_id(self): ... - @org_id.setter - def org_id(self, org_id) -> None: ... - @property - def kind(self): ... - @kind.setter - def kind(self, kind) -> None: ... - @property - def template_meta_name(self): ... - @template_meta_name.setter - def template_meta_name(self, template_meta_name) -> None: ... - @property - def name(self): ... - @name.setter - def name(self, name) -> None: ... - @property - def description(self): ... - @description.setter - def description(self, description) -> None: ... - @property - def label_associations(self): ... - @label_associations.setter - def label_associations(self, label_associations) -> None: ... - @property - def charts(self): ... - @charts.setter - def charts(self, charts) -> None: ... - @property - def env_references(self): ... - @env_references.setter - def env_references(self, env_references) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/template_summary_summary_label_mappings.pyi b/stubs/influxdb-client/influxdb_client/domain/template_summary_summary_label_mappings.pyi deleted file mode 100644 index ccf9764165f1..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/template_summary_summary_label_mappings.pyi +++ /dev/null @@ -1,53 +0,0 @@ -from _typeshed import Incomplete - -class TemplateSummarySummaryLabelMappings: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - status: Incomplete | None = None, - resource_template_meta_name: Incomplete | None = None, - resource_name: Incomplete | None = None, - resource_id: Incomplete | None = None, - resource_type: Incomplete | None = None, - label_template_meta_name: Incomplete | None = None, - label_name: Incomplete | None = None, - label_id: Incomplete | None = None, - ) -> None: ... - @property - def status(self): ... - @status.setter - def status(self, status) -> None: ... - @property - def resource_template_meta_name(self): ... - @resource_template_meta_name.setter - def resource_template_meta_name(self, resource_template_meta_name) -> None: ... - @property - def resource_name(self): ... - @resource_name.setter - def resource_name(self, resource_name) -> None: ... - @property - def resource_id(self): ... - @resource_id.setter - def resource_id(self, resource_id) -> None: ... - @property - def resource_type(self): ... - @resource_type.setter - def resource_type(self, resource_type) -> None: ... - @property - def label_template_meta_name(self): ... - @label_template_meta_name.setter - def label_template_meta_name(self, label_template_meta_name) -> None: ... - @property - def label_name(self): ... - @label_name.setter - def label_name(self, label_name) -> None: ... - @property - def label_id(self): ... - @label_id.setter - def label_id(self, label_id) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/template_summary_summary_notification_rules.pyi b/stubs/influxdb-client/influxdb_client/domain/template_summary_summary_notification_rules.pyi deleted file mode 100644 index 4cddeafed9ce..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/template_summary_summary_notification_rules.pyi +++ /dev/null @@ -1,88 +0,0 @@ -from _typeshed import Incomplete - -class TemplateSummarySummaryNotificationRules: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - kind: Incomplete | None = None, - template_meta_name: Incomplete | None = None, - name: Incomplete | None = None, - description: Incomplete | None = None, - endpoint_template_meta_name: Incomplete | None = None, - endpoint_id: Incomplete | None = None, - endpoint_type: Incomplete | None = None, - every: Incomplete | None = None, - offset: Incomplete | None = None, - message_template: Incomplete | None = None, - status: Incomplete | None = None, - status_rules: Incomplete | None = None, - tag_rules: Incomplete | None = None, - label_associations: Incomplete | None = None, - env_references: Incomplete | None = None, - ) -> None: ... - @property - def kind(self): ... - @kind.setter - def kind(self, kind) -> None: ... - @property - def template_meta_name(self): ... - @template_meta_name.setter - def template_meta_name(self, template_meta_name) -> None: ... - @property - def name(self): ... - @name.setter - def name(self, name) -> None: ... - @property - def description(self): ... - @description.setter - def description(self, description) -> None: ... - @property - def endpoint_template_meta_name(self): ... - @endpoint_template_meta_name.setter - def endpoint_template_meta_name(self, endpoint_template_meta_name) -> None: ... - @property - def endpoint_id(self): ... - @endpoint_id.setter - def endpoint_id(self, endpoint_id) -> None: ... - @property - def endpoint_type(self): ... - @endpoint_type.setter - def endpoint_type(self, endpoint_type) -> None: ... - @property - def every(self): ... - @every.setter - def every(self, every) -> None: ... - @property - def offset(self): ... - @offset.setter - def offset(self, offset) -> None: ... - @property - def message_template(self): ... - @message_template.setter - def message_template(self, message_template) -> None: ... - @property - def status(self): ... - @status.setter - def status(self, status) -> None: ... - @property - def status_rules(self): ... - @status_rules.setter - def status_rules(self, status_rules) -> None: ... - @property - def tag_rules(self): ... - @tag_rules.setter - def tag_rules(self, tag_rules) -> None: ... - @property - def label_associations(self): ... - @label_associations.setter - def label_associations(self, label_associations) -> None: ... - @property - def env_references(self): ... - @env_references.setter - def env_references(self, env_references) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/template_summary_summary_status_rules.pyi b/stubs/influxdb-client/influxdb_client/domain/template_summary_summary_status_rules.pyi deleted file mode 100644 index 29f307273ead..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/template_summary_summary_status_rules.pyi +++ /dev/null @@ -1,19 +0,0 @@ -from _typeshed import Incomplete - -class TemplateSummarySummaryStatusRules: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, current_level: Incomplete | None = None, previous_level: Incomplete | None = None) -> None: ... - @property - def current_level(self): ... - @current_level.setter - def current_level(self, current_level) -> None: ... - @property - def previous_level(self): ... - @previous_level.setter - def previous_level(self, previous_level) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/template_summary_summary_tag_rules.pyi b/stubs/influxdb-client/influxdb_client/domain/template_summary_summary_tag_rules.pyi deleted file mode 100644 index 665b26c83280..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/template_summary_summary_tag_rules.pyi +++ /dev/null @@ -1,25 +0,0 @@ -from _typeshed import Incomplete - -class TemplateSummarySummaryTagRules: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, key: Incomplete | None = None, value: Incomplete | None = None, operator: Incomplete | None = None - ) -> None: ... - @property - def key(self): ... - @key.setter - def key(self, key) -> None: ... - @property - def value(self): ... - @value.setter - def value(self, value) -> None: ... - @property - def operator(self): ... - @operator.setter - def operator(self, operator) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/template_summary_summary_tasks.pyi b/stubs/influxdb-client/influxdb_client/domain/template_summary_summary_tasks.pyi deleted file mode 100644 index acebc2925499..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/template_summary_summary_tasks.pyi +++ /dev/null @@ -1,68 +0,0 @@ -from _typeshed import Incomplete - -class TemplateSummarySummaryTasks: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - kind: Incomplete | None = None, - template_meta_name: Incomplete | None = None, - id: Incomplete | None = None, - name: Incomplete | None = None, - cron: Incomplete | None = None, - description: Incomplete | None = None, - every: Incomplete | None = None, - offset: Incomplete | None = None, - query: Incomplete | None = None, - status: Incomplete | None = None, - env_references: Incomplete | None = None, - ) -> None: ... - @property - def kind(self): ... - @kind.setter - def kind(self, kind) -> None: ... - @property - def template_meta_name(self): ... - @template_meta_name.setter - def template_meta_name(self, template_meta_name) -> None: ... - @property - def id(self): ... - @id.setter - def id(self, id) -> None: ... - @property - def name(self): ... - @name.setter - def name(self, name) -> None: ... - @property - def cron(self): ... - @cron.setter - def cron(self, cron) -> None: ... - @property - def description(self): ... - @description.setter - def description(self, description) -> None: ... - @property - def every(self): ... - @every.setter - def every(self, every) -> None: ... - @property - def offset(self): ... - @offset.setter - def offset(self, offset) -> None: ... - @property - def query(self): ... - @query.setter - def query(self, query) -> None: ... - @property - def status(self): ... - @status.setter - def status(self, status) -> None: ... - @property - def env_references(self): ... - @env_references.setter - def env_references(self, env_references) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/template_summary_summary_variables.pyi b/stubs/influxdb-client/influxdb_client/domain/template_summary_summary_variables.pyi deleted file mode 100644 index c652d5229b1b..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/template_summary_summary_variables.pyi +++ /dev/null @@ -1,58 +0,0 @@ -from _typeshed import Incomplete - -class TemplateSummarySummaryVariables: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - kind: Incomplete | None = None, - template_meta_name: Incomplete | None = None, - id: Incomplete | None = None, - org_id: Incomplete | None = None, - name: Incomplete | None = None, - description: Incomplete | None = None, - arguments: Incomplete | None = None, - label_associations: Incomplete | None = None, - env_references: Incomplete | None = None, - ) -> None: ... - @property - def kind(self): ... - @kind.setter - def kind(self, kind) -> None: ... - @property - def template_meta_name(self): ... - @template_meta_name.setter - def template_meta_name(self, template_meta_name) -> None: ... - @property - def id(self): ... - @id.setter - def id(self, id) -> None: ... - @property - def org_id(self): ... - @org_id.setter - def org_id(self, org_id) -> None: ... - @property - def name(self): ... - @name.setter - def name(self, name) -> None: ... - @property - def description(self): ... - @description.setter - def description(self, description) -> None: ... - @property - def arguments(self): ... - @arguments.setter - def arguments(self, arguments) -> None: ... - @property - def label_associations(self): ... - @label_associations.setter - def label_associations(self, label_associations) -> None: ... - @property - def env_references(self): ... - @env_references.setter - def env_references(self, env_references) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/test_statement.pyi b/stubs/influxdb-client/influxdb_client/domain/test_statement.pyi deleted file mode 100644 index 25f3069daf00..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/test_statement.pyi +++ /dev/null @@ -1,18 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.statement import Statement - -class TestStatement(Statement): - def __init__(self, type: Incomplete | None = None, assignment: Incomplete | None = None): ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - @property - def assignment(self): ... - @assignment.setter - def assignment(self, assignment) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/threshold.pyi b/stubs/influxdb-client/influxdb_client/domain/threshold.pyi deleted file mode 100644 index 9de0078b113e..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/threshold.pyi +++ /dev/null @@ -1,17 +0,0 @@ -from _typeshed import Incomplete - -class Threshold: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator_value_class_map: Incomplete - discriminator: str - def __init__(self, type: Incomplete | None = None) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - def get_real_child_model(self, data): ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/threshold_base.pyi b/stubs/influxdb-client/influxdb_client/domain/threshold_base.pyi deleted file mode 100644 index a1e666596a27..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/threshold_base.pyi +++ /dev/null @@ -1,19 +0,0 @@ -from _typeshed import Incomplete - -class ThresholdBase: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, level: Incomplete | None = None, all_values: Incomplete | None = None) -> None: ... - @property - def level(self): ... - @level.setter - def level(self, level) -> None: ... - @property - def all_values(self): ... - @all_values.setter - def all_values(self, all_values) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/threshold_check.pyi b/stubs/influxdb-client/influxdb_client/domain/threshold_check.pyi deleted file mode 100644 index a012f9b6ea6a..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/threshold_check.pyi +++ /dev/null @@ -1,60 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.check_discriminator import CheckDiscriminator - -class ThresholdCheck(CheckDiscriminator): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - type: str = "threshold", - thresholds: Incomplete | None = None, - every: Incomplete | None = None, - offset: Incomplete | None = None, - tags: Incomplete | None = None, - status_message_template: Incomplete | None = None, - id: Incomplete | None = None, - name: Incomplete | None = None, - org_id: Incomplete | None = None, - task_id: Incomplete | None = None, - owner_id: Incomplete | None = None, - created_at: Incomplete | None = None, - updated_at: Incomplete | None = None, - query: Incomplete | None = None, - status: Incomplete | None = None, - description: Incomplete | None = None, - latest_completed: Incomplete | None = None, - last_run_status: Incomplete | None = None, - last_run_error: Incomplete | None = None, - labels: Incomplete | None = None, - links: Incomplete | None = None, - ) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - @property - def thresholds(self): ... - @thresholds.setter - def thresholds(self, thresholds) -> None: ... - @property - def every(self): ... - @every.setter - def every(self, every) -> None: ... - @property - def offset(self): ... - @offset.setter - def offset(self, offset) -> None: ... - @property - def tags(self): ... - @tags.setter - def tags(self, tags) -> None: ... - @property - def status_message_template(self): ... - @status_message_template.setter - def status_message_template(self, status_message_template) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/unary_expression.pyi b/stubs/influxdb-client/influxdb_client/domain/unary_expression.pyi deleted file mode 100644 index 085f3695cc0f..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/unary_expression.pyi +++ /dev/null @@ -1,27 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.expression import Expression - -class UnaryExpression(Expression): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, type: Incomplete | None = None, operator: Incomplete | None = None, argument: Incomplete | None = None - ) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - @property - def operator(self): ... - @operator.setter - def operator(self, operator) -> None: ... - @property - def argument(self): ... - @argument.setter - def argument(self, argument) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/unsigned_integer_literal.pyi b/stubs/influxdb-client/influxdb_client/domain/unsigned_integer_literal.pyi deleted file mode 100644 index 8349cc0a82bf..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/unsigned_integer_literal.pyi +++ /dev/null @@ -1,21 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.expression import Expression - -class UnsignedIntegerLiteral(Expression): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, type: Incomplete | None = None, value: Incomplete | None = None) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - @property - def value(self): ... - @value.setter - def value(self, value) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/user.pyi b/stubs/influxdb-client/influxdb_client/domain/user.pyi deleted file mode 100644 index 658a357dfd8f..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/user.pyi +++ /dev/null @@ -1,23 +0,0 @@ -from _typeshed import Incomplete - -class User: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, id: Incomplete | None = None, name: Incomplete | None = None, status: str = "active") -> None: ... - @property - def id(self): ... - @id.setter - def id(self, id) -> None: ... - @property - def name(self): ... - @name.setter - def name(self, name) -> None: ... - @property - def status(self): ... - @status.setter - def status(self, status) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/user_response.pyi b/stubs/influxdb-client/influxdb_client/domain/user_response.pyi deleted file mode 100644 index 15f510b394e7..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/user_response.pyi +++ /dev/null @@ -1,33 +0,0 @@ -from _typeshed import Incomplete - -class UserResponse: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - id: Incomplete | None = None, - name: Incomplete | None = None, - status: str = "active", - links: Incomplete | None = None, - ) -> None: ... - @property - def id(self): ... - @id.setter - def id(self, id) -> None: ... - @property - def name(self): ... - @name.setter - def name(self, name) -> None: ... - @property - def status(self): ... - @status.setter - def status(self, status) -> None: ... - @property - def links(self): ... - @links.setter - def links(self, links) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/user_response_links.pyi b/stubs/influxdb-client/influxdb_client/domain/user_response_links.pyi deleted file mode 100644 index b1ecdff904c9..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/user_response_links.pyi +++ /dev/null @@ -1,11 +0,0 @@ -from _typeshed import Incomplete - -class UserResponseLinks: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, _self: Incomplete | None = None) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/users.pyi b/stubs/influxdb-client/influxdb_client/domain/users.pyi deleted file mode 100644 index f526b5910609..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/users.pyi +++ /dev/null @@ -1,19 +0,0 @@ -from _typeshed import Incomplete - -class Users: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, links: Incomplete | None = None, users: Incomplete | None = None) -> None: ... - @property - def links(self): ... - @links.setter - def links(self, links) -> None: ... - @property - def users(self): ... - @users.setter - def users(self, users) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/variable.pyi b/stubs/influxdb-client/influxdb_client/domain/variable.pyi deleted file mode 100644 index d7d51c41301c..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/variable.pyi +++ /dev/null @@ -1,63 +0,0 @@ -from _typeshed import Incomplete - -class Variable: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - links: Incomplete | None = None, - id: Incomplete | None = None, - org_id: Incomplete | None = None, - name: Incomplete | None = None, - description: Incomplete | None = None, - selected: Incomplete | None = None, - labels: Incomplete | None = None, - arguments: Incomplete | None = None, - created_at: Incomplete | None = None, - updated_at: Incomplete | None = None, - ) -> None: ... - @property - def links(self): ... - @links.setter - def links(self, links) -> None: ... - @property - def id(self): ... - @id.setter - def id(self, id) -> None: ... - @property - def org_id(self): ... - @org_id.setter - def org_id(self, org_id) -> None: ... - @property - def name(self): ... - @name.setter - def name(self, name) -> None: ... - @property - def description(self): ... - @description.setter - def description(self, description) -> None: ... - @property - def selected(self): ... - @selected.setter - def selected(self, selected) -> None: ... - @property - def labels(self): ... - @labels.setter - def labels(self, labels) -> None: ... - @property - def arguments(self): ... - @arguments.setter - def arguments(self, arguments) -> None: ... - @property - def created_at(self): ... - @created_at.setter - def created_at(self, created_at) -> None: ... - @property - def updated_at(self): ... - @updated_at.setter - def updated_at(self, updated_at) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/variable_assignment.pyi b/stubs/influxdb-client/influxdb_client/domain/variable_assignment.pyi deleted file mode 100644 index 3229214f1dda..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/variable_assignment.pyi +++ /dev/null @@ -1,25 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.statement import Statement - -class VariableAssignment(Statement): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, type: Incomplete | None = None, id: Incomplete | None = None, init: Incomplete | None = None) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - @property - def id(self): ... - @id.setter - def id(self, id) -> None: ... - @property - def init(self): ... - @init.setter - def init(self, init) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/variable_links.pyi b/stubs/influxdb-client/influxdb_client/domain/variable_links.pyi deleted file mode 100644 index acec1d256fa1..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/variable_links.pyi +++ /dev/null @@ -1,21 +0,0 @@ -from _typeshed import Incomplete - -class VariableLinks: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, _self: Incomplete | None = None, org: Incomplete | None = None, labels: Incomplete | None = None - ) -> None: ... - @property - def org(self): ... - @org.setter - def org(self, org) -> None: ... - @property - def labels(self): ... - @labels.setter - def labels(self, labels) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/variable_properties.pyi b/stubs/influxdb-client/influxdb_client/domain/variable_properties.pyi deleted file mode 100644 index 4554cacfe181..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/variable_properties.pyi +++ /dev/null @@ -1,10 +0,0 @@ -from _typeshed import Incomplete - -class VariableProperties: - openapi_types: Incomplete - attribute_map: Incomplete - def __init__(self) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/variables.pyi b/stubs/influxdb-client/influxdb_client/domain/variables.pyi deleted file mode 100644 index 3c84f83bf710..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/variables.pyi +++ /dev/null @@ -1,15 +0,0 @@ -from _typeshed import Incomplete - -class Variables: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, variables: Incomplete | None = None) -> None: ... - @property - def variables(self): ... - @variables.setter - def variables(self, variables) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/view.pyi b/stubs/influxdb-client/influxdb_client/domain/view.pyi deleted file mode 100644 index f4a27704d7d7..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/view.pyi +++ /dev/null @@ -1,33 +0,0 @@ -from _typeshed import Incomplete - -class View: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - links: Incomplete | None = None, - id: Incomplete | None = None, - name: Incomplete | None = None, - properties: Incomplete | None = None, - ) -> None: ... - @property - def links(self): ... - @links.setter - def links(self, links) -> None: ... - @property - def id(self): ... - @id.setter - def id(self, id) -> None: ... - @property - def name(self): ... - @name.setter - def name(self, name) -> None: ... - @property - def properties(self): ... - @properties.setter - def properties(self, properties) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/view_links.pyi b/stubs/influxdb-client/influxdb_client/domain/view_links.pyi deleted file mode 100644 index b848d86f6a98..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/view_links.pyi +++ /dev/null @@ -1,11 +0,0 @@ -from _typeshed import Incomplete - -class ViewLinks: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, _self: Incomplete | None = None) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/view_properties.pyi b/stubs/influxdb-client/influxdb_client/domain/view_properties.pyi deleted file mode 100644 index eef5b89ae0af..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/view_properties.pyi +++ /dev/null @@ -1,10 +0,0 @@ -from _typeshed import Incomplete - -class ViewProperties: - openapi_types: Incomplete - attribute_map: Incomplete - def __init__(self) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/views.pyi b/stubs/influxdb-client/influxdb_client/domain/views.pyi deleted file mode 100644 index 3480f632b416..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/views.pyi +++ /dev/null @@ -1,19 +0,0 @@ -from _typeshed import Incomplete - -class Views: - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__(self, links: Incomplete | None = None, views: Incomplete | None = None) -> None: ... - @property - def links(self): ... - @links.setter - def links(self, links) -> None: ... - @property - def views(self): ... - @views.setter - def views(self, views) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/write_precision.pyi b/stubs/influxdb-client/influxdb_client/domain/write_precision.pyi deleted file mode 100644 index f9832543de5c..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/write_precision.pyi +++ /dev/null @@ -1,18 +0,0 @@ -from _typeshed import Incomplete -from typing import Any, ClassVar, Final, Literal -from typing_extensions import TypeAlias - -_WritePrecision: TypeAlias = Literal["ms", "s", "us", "ns"] # noqa: Y047 - -class WritePrecision: - MS: Final = "ms" - S: Final = "s" - US: Final = "us" - NS: Final = "ns" - openapi_types: ClassVar[dict[str, Incomplete]] - attribute_map: ClassVar[dict[str, Incomplete]] - def __init__(self) -> None: ... - def to_dict(self) -> dict[str, Any]: ... - def to_str(self) -> str: ... - def __eq__(self, other: object) -> bool: ... - def __ne__(self, other: object) -> bool: ... diff --git a/stubs/influxdb-client/influxdb_client/domain/xy_geom.pyi b/stubs/influxdb-client/influxdb_client/domain/xy_geom.pyi deleted file mode 100644 index e4ab5eba1999..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/xy_geom.pyi +++ /dev/null @@ -1,17 +0,0 @@ -from _typeshed import Incomplete - -class XYGeom: - LINE: str - STEP: str - STACKED: str - BAR: str - MONOTONEX: str - STEPBEFORE: str - STEPAFTER: str - openapi_types: Incomplete - attribute_map: Incomplete - def __init__(self) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/domain/xy_view_properties.pyi b/stubs/influxdb-client/influxdb_client/domain/xy_view_properties.pyi deleted file mode 100644 index 391e630ba337..000000000000 --- a/stubs/influxdb-client/influxdb_client/domain/xy_view_properties.pyi +++ /dev/null @@ -1,157 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.domain.view_properties import ViewProperties - -class XYViewProperties(ViewProperties): - openapi_types: Incomplete - attribute_map: Incomplete - discriminator: Incomplete - def __init__( - self, - adaptive_zoom_hide: bool | None = None, - time_format: Incomplete | None = None, - type: Incomplete | None = None, - queries: Incomplete | None = None, - colors: Incomplete | None = None, - color_mapping: Incomplete | None = None, - shape: Incomplete | None = None, - note: Incomplete | None = None, - show_note_when_empty: Incomplete | None = None, - axes: Incomplete | None = None, - static_legend: Incomplete | None = None, - x_column: Incomplete | None = None, - generate_x_axis_ticks: Incomplete | None = None, - x_total_ticks: Incomplete | None = None, - x_tick_start: Incomplete | None = None, - x_tick_step: Incomplete | None = None, - y_column: Incomplete | None = None, - generate_y_axis_ticks: Incomplete | None = None, - y_total_ticks: Incomplete | None = None, - y_tick_start: Incomplete | None = None, - y_tick_step: Incomplete | None = None, - shade_below: Incomplete | None = None, - hover_dimension: Incomplete | None = None, - position: Incomplete | None = None, - geom: Incomplete | None = None, - legend_colorize_rows: Incomplete | None = None, - legend_hide: Incomplete | None = None, - legend_opacity: Incomplete | None = None, - legend_orientation_threshold: Incomplete | None = None, - ) -> None: ... - adaptive_zoom_hide: bool | None - @property - def time_format(self): ... - @time_format.setter - def time_format(self, time_format) -> None: ... - @property - def type(self): ... - @type.setter - def type(self, type) -> None: ... - @property - def queries(self): ... - @queries.setter - def queries(self, queries) -> None: ... - @property - def colors(self): ... - @colors.setter - def colors(self, colors) -> None: ... - @property - def color_mapping(self): ... - @color_mapping.setter - def color_mapping(self, color_mapping) -> None: ... - @property - def shape(self): ... - @shape.setter - def shape(self, shape) -> None: ... - @property - def note(self): ... - @note.setter - def note(self, note) -> None: ... - @property - def show_note_when_empty(self): ... - @show_note_when_empty.setter - def show_note_when_empty(self, show_note_when_empty) -> None: ... - @property - def axes(self): ... - @axes.setter - def axes(self, axes) -> None: ... - @property - def static_legend(self): ... - @static_legend.setter - def static_legend(self, static_legend) -> None: ... - @property - def x_column(self): ... - @x_column.setter - def x_column(self, x_column) -> None: ... - @property - def generate_x_axis_ticks(self): ... - @generate_x_axis_ticks.setter - def generate_x_axis_ticks(self, generate_x_axis_ticks) -> None: ... - @property - def x_total_ticks(self): ... - @x_total_ticks.setter - def x_total_ticks(self, x_total_ticks) -> None: ... - @property - def x_tick_start(self): ... - @x_tick_start.setter - def x_tick_start(self, x_tick_start) -> None: ... - @property - def x_tick_step(self): ... - @x_tick_step.setter - def x_tick_step(self, x_tick_step) -> None: ... - @property - def y_column(self): ... - @y_column.setter - def y_column(self, y_column) -> None: ... - @property - def generate_y_axis_ticks(self): ... - @generate_y_axis_ticks.setter - def generate_y_axis_ticks(self, generate_y_axis_ticks) -> None: ... - @property - def y_total_ticks(self): ... - @y_total_ticks.setter - def y_total_ticks(self, y_total_ticks) -> None: ... - @property - def y_tick_start(self): ... - @y_tick_start.setter - def y_tick_start(self, y_tick_start) -> None: ... - @property - def y_tick_step(self): ... - @y_tick_step.setter - def y_tick_step(self, y_tick_step) -> None: ... - @property - def shade_below(self): ... - @shade_below.setter - def shade_below(self, shade_below) -> None: ... - @property - def hover_dimension(self): ... - @hover_dimension.setter - def hover_dimension(self, hover_dimension) -> None: ... - @property - def position(self): ... - @position.setter - def position(self, position) -> None: ... - @property - def geom(self): ... - @geom.setter - def geom(self, geom) -> None: ... - @property - def legend_colorize_rows(self): ... - @legend_colorize_rows.setter - def legend_colorize_rows(self, legend_colorize_rows) -> None: ... - @property - def legend_hide(self): ... - @legend_hide.setter - def legend_hide(self, legend_hide) -> None: ... - @property - def legend_opacity(self): ... - @legend_opacity.setter - def legend_opacity(self, legend_opacity) -> None: ... - @property - def legend_orientation_threshold(self): ... - @legend_orientation_threshold.setter - def legend_orientation_threshold(self, legend_orientation_threshold) -> None: ... - def to_dict(self): ... - def to_str(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... diff --git a/stubs/influxdb-client/influxdb_client/extras.pyi b/stubs/influxdb-client/influxdb_client/extras.pyi deleted file mode 100644 index 3cc4ea47de24..000000000000 --- a/stubs/influxdb-client/influxdb_client/extras.pyi +++ /dev/null @@ -1,4 +0,0 @@ -from typing import Any - -np: Any # numpy module -pd: Any # pandas module diff --git a/stubs/influxdb-client/influxdb_client/rest.pyi b/stubs/influxdb-client/influxdb_client/rest.pyi deleted file mode 100644 index b25e6d3cb706..000000000000 --- a/stubs/influxdb-client/influxdb_client/rest.pyi +++ /dev/null @@ -1,29 +0,0 @@ -from _typeshed import Incomplete - -from urllib3 import HTTPResponse - -from ._sync.rest import RESTResponse -from .client.exceptions import InfluxDBError - -class ApiException(InfluxDBError): - status: Incomplete - reason: Incomplete - body: Incomplete - headers: Incomplete - def __init__( - self, - status: Incomplete | None = None, - reason: Incomplete | None = None, - http_resp: HTTPResponse | RESTResponse | None = None, - ) -> None: ... - -class _BaseRESTClient: - logger: Incomplete - @staticmethod - def log_request(method: str, url: str): ... - @staticmethod - def log_response(status: str): ... - @staticmethod - def log_body(body: object, prefix: str): ... - @staticmethod - def log_headers(headers: dict[str, str], prefix: str): ... diff --git a/stubs/influxdb-client/influxdb_client/service/__init__.pyi b/stubs/influxdb-client/influxdb_client/service/__init__.pyi deleted file mode 100644 index 4285a8aa3166..000000000000 --- a/stubs/influxdb-client/influxdb_client/service/__init__.pyi +++ /dev/null @@ -1,41 +0,0 @@ -from influxdb_client.service.authorizations_service import AuthorizationsService as AuthorizationsService -from influxdb_client.service.backup_service import BackupService as BackupService -from influxdb_client.service.bucket_schemas_service import BucketSchemasService as BucketSchemasService -from influxdb_client.service.buckets_service import BucketsService as BucketsService -from influxdb_client.service.cells_service import CellsService as CellsService -from influxdb_client.service.checks_service import ChecksService as ChecksService -from influxdb_client.service.config_service import ConfigService as ConfigService -from influxdb_client.service.dashboards_service import DashboardsService as DashboardsService -from influxdb_client.service.dbr_ps_service import DBRPsService as DBRPsService -from influxdb_client.service.delete_service import DeleteService as DeleteService -from influxdb_client.service.health_service import HealthService as HealthService -from influxdb_client.service.invokable_scripts_service import InvokableScriptsService as InvokableScriptsService -from influxdb_client.service.labels_service import LabelsService as LabelsService -from influxdb_client.service.legacy_authorizations_service import LegacyAuthorizationsService as LegacyAuthorizationsService -from influxdb_client.service.metrics_service import MetricsService as MetricsService -from influxdb_client.service.notification_endpoints_service import NotificationEndpointsService as NotificationEndpointsService -from influxdb_client.service.notification_rules_service import NotificationRulesService as NotificationRulesService -from influxdb_client.service.organizations_service import OrganizationsService as OrganizationsService -from influxdb_client.service.ping_service import PingService as PingService -from influxdb_client.service.query_service import QueryService as QueryService -from influxdb_client.service.ready_service import ReadyService as ReadyService -from influxdb_client.service.remote_connections_service import RemoteConnectionsService as RemoteConnectionsService -from influxdb_client.service.replications_service import ReplicationsService as ReplicationsService -from influxdb_client.service.resources_service import ResourcesService as ResourcesService -from influxdb_client.service.restore_service import RestoreService as RestoreService -from influxdb_client.service.routes_service import RoutesService as RoutesService -from influxdb_client.service.rules_service import RulesService as RulesService -from influxdb_client.service.scraper_targets_service import ScraperTargetsService as ScraperTargetsService -from influxdb_client.service.secrets_service import SecretsService as SecretsService -from influxdb_client.service.setup_service import SetupService as SetupService -from influxdb_client.service.signin_service import SigninService as SigninService -from influxdb_client.service.signout_service import SignoutService as SignoutService -from influxdb_client.service.sources_service import SourcesService as SourcesService -from influxdb_client.service.tasks_service import TasksService as TasksService -from influxdb_client.service.telegraf_plugins_service import TelegrafPluginsService as TelegrafPluginsService -from influxdb_client.service.telegrafs_service import TelegrafsService as TelegrafsService -from influxdb_client.service.templates_service import TemplatesService as TemplatesService -from influxdb_client.service.users_service import UsersService as UsersService -from influxdb_client.service.variables_service import VariablesService as VariablesService -from influxdb_client.service.views_service import ViewsService as ViewsService -from influxdb_client.service.write_service import WriteService as WriteService diff --git a/stubs/influxdb-client/influxdb_client/service/_base_service.pyi b/stubs/influxdb-client/influxdb_client/service/_base_service.pyi deleted file mode 100644 index ef493c4ed361..000000000000 --- a/stubs/influxdb-client/influxdb_client/service/_base_service.pyi +++ /dev/null @@ -1,8 +0,0 @@ -from _typeshed import Incomplete - -class _BaseService: - api_client: Incomplete - def __init__(self, api_client: Incomplete | None = None) -> None: ... - def build_type(self) -> str: ... - async def build_type_async(self) -> str: ... - def response_header(self, response, header_name: str = "X-Influxdb-Version") -> str: ... diff --git a/stubs/influxdb-client/influxdb_client/service/authorizations_service.pyi b/stubs/influxdb-client/influxdb_client/service/authorizations_service.pyi deleted file mode 100644 index ded67599e115..000000000000 --- a/stubs/influxdb-client/influxdb_client/service/authorizations_service.pyi +++ /dev/null @@ -1,21 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.service._base_service import _BaseService - -class AuthorizationsService(_BaseService): - def __init__(self, api_client: Incomplete | None = None) -> None: ... - def delete_authorizations_id(self, auth_id, **kwargs): ... - def delete_authorizations_id_with_http_info(self, auth_id, **kwargs): ... - async def delete_authorizations_id_async(self, auth_id, **kwargs): ... - def get_authorizations(self, **kwargs): ... - def get_authorizations_with_http_info(self, **kwargs): ... - async def get_authorizations_async(self, **kwargs): ... - def get_authorizations_id(self, auth_id, **kwargs): ... - def get_authorizations_id_with_http_info(self, auth_id, **kwargs): ... - async def get_authorizations_id_async(self, auth_id, **kwargs): ... - def patch_authorizations_id(self, auth_id, authorization_update_request, **kwargs): ... - def patch_authorizations_id_with_http_info(self, auth_id, authorization_update_request, **kwargs): ... - async def patch_authorizations_id_async(self, auth_id, authorization_update_request, **kwargs): ... - def post_authorizations(self, authorization_post_request, **kwargs): ... - def post_authorizations_with_http_info(self, authorization_post_request, **kwargs): ... - async def post_authorizations_async(self, authorization_post_request, **kwargs): ... diff --git a/stubs/influxdb-client/influxdb_client/service/backup_service.pyi b/stubs/influxdb-client/influxdb_client/service/backup_service.pyi deleted file mode 100644 index f8958e4b0f96..000000000000 --- a/stubs/influxdb-client/influxdb_client/service/backup_service.pyi +++ /dev/null @@ -1,15 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.service._base_service import _BaseService - -class BackupService(_BaseService): - def __init__(self, api_client: Incomplete | None = None) -> None: ... - def get_backup_kv(self, **kwargs): ... - def get_backup_kv_with_http_info(self, **kwargs): ... - async def get_backup_kv_async(self, **kwargs): ... - def get_backup_metadata(self, **kwargs): ... - def get_backup_metadata_with_http_info(self, **kwargs): ... - async def get_backup_metadata_async(self, **kwargs): ... - def get_backup_shard_id(self, shard_id, **kwargs): ... - def get_backup_shard_id_with_http_info(self, shard_id, **kwargs): ... - async def get_backup_shard_id_async(self, shard_id, **kwargs): ... diff --git a/stubs/influxdb-client/influxdb_client/service/bucket_schemas_service.pyi b/stubs/influxdb-client/influxdb_client/service/bucket_schemas_service.pyi deleted file mode 100644 index 8eaa7079319b..000000000000 --- a/stubs/influxdb-client/influxdb_client/service/bucket_schemas_service.pyi +++ /dev/null @@ -1,20 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.service._base_service import _BaseService - -class BucketSchemasService(_BaseService): - def __init__(self, api_client: Incomplete | None = None) -> None: ... - def create_measurement_schema(self, bucket_id, measurement_schema_create_request, **kwargs): ... - def create_measurement_schema_with_http_info(self, bucket_id, measurement_schema_create_request, **kwargs): ... - async def create_measurement_schema_async(self, bucket_id, measurement_schema_create_request, **kwargs): ... - def get_measurement_schema(self, bucket_id, measurement_id, **kwargs): ... - def get_measurement_schema_with_http_info(self, bucket_id, measurement_id, **kwargs): ... - async def get_measurement_schema_async(self, bucket_id, measurement_id, **kwargs): ... - def get_measurement_schemas(self, bucket_id, **kwargs): ... - def get_measurement_schemas_with_http_info(self, bucket_id, **kwargs): ... - async def get_measurement_schemas_async(self, bucket_id, **kwargs): ... - def update_measurement_schema(self, bucket_id, measurement_id, measurement_schema_update_request, **kwargs): ... - def update_measurement_schema_with_http_info( - self, bucket_id, measurement_id, measurement_schema_update_request, **kwargs - ): ... - async def update_measurement_schema_async(self, bucket_id, measurement_id, measurement_schema_update_request, **kwargs): ... diff --git a/stubs/influxdb-client/influxdb_client/service/buckets_service.pyi b/stubs/influxdb-client/influxdb_client/service/buckets_service.pyi deleted file mode 100644 index 3f2c9ae57bf5..000000000000 --- a/stubs/influxdb-client/influxdb_client/service/buckets_service.pyi +++ /dev/null @@ -1,51 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.service._base_service import _BaseService - -class BucketsService(_BaseService): - def __init__(self, api_client: Incomplete | None = None) -> None: ... - def delete_buckets_id(self, bucket_id, **kwargs): ... - def delete_buckets_id_with_http_info(self, bucket_id, **kwargs): ... - async def delete_buckets_id_async(self, bucket_id, **kwargs): ... - def delete_buckets_id_labels_id(self, bucket_id, label_id, **kwargs): ... - def delete_buckets_id_labels_id_with_http_info(self, bucket_id, label_id, **kwargs): ... - async def delete_buckets_id_labels_id_async(self, bucket_id, label_id, **kwargs): ... - def delete_buckets_id_members_id(self, user_id, bucket_id, **kwargs): ... - def delete_buckets_id_members_id_with_http_info(self, user_id, bucket_id, **kwargs): ... - async def delete_buckets_id_members_id_async(self, user_id, bucket_id, **kwargs): ... - def delete_buckets_id_owners_id(self, user_id, bucket_id, **kwargs): ... - def delete_buckets_id_owners_id_with_http_info(self, user_id, bucket_id, **kwargs): ... - async def delete_buckets_id_owners_id_async(self, user_id, bucket_id, **kwargs): ... - def get_buckets(self, **kwargs): ... - def get_buckets_with_http_info(self, **kwargs): ... - async def get_buckets_async(self, **kwargs): ... - def get_buckets_id(self, bucket_id, **kwargs): ... - def get_buckets_id_with_http_info(self, bucket_id, **kwargs): ... - async def get_buckets_id_async(self, bucket_id, **kwargs): ... - def get_buckets_id_labels(self, bucket_id, **kwargs): ... - def get_buckets_id_labels_with_http_info(self, bucket_id, **kwargs): ... - async def get_buckets_id_labels_async(self, bucket_id, **kwargs): ... - def get_buckets_id_members(self, bucket_id, **kwargs): ... - def get_buckets_id_members_with_http_info(self, bucket_id, **kwargs): ... - async def get_buckets_id_members_async(self, bucket_id, **kwargs): ... - def get_buckets_id_owners(self, bucket_id, **kwargs): ... - def get_buckets_id_owners_with_http_info(self, bucket_id, **kwargs): ... - async def get_buckets_id_owners_async(self, bucket_id, **kwargs): ... - def get_sources_id_buckets(self, source_id, **kwargs): ... - def get_sources_id_buckets_with_http_info(self, source_id, **kwargs): ... - async def get_sources_id_buckets_async(self, source_id, **kwargs): ... - def patch_buckets_id(self, bucket_id, patch_bucket_request, **kwargs): ... - def patch_buckets_id_with_http_info(self, bucket_id, patch_bucket_request, **kwargs): ... - async def patch_buckets_id_async(self, bucket_id, patch_bucket_request, **kwargs): ... - def post_buckets(self, post_bucket_request, **kwargs): ... - def post_buckets_with_http_info(self, post_bucket_request, **kwargs): ... - async def post_buckets_async(self, post_bucket_request, **kwargs): ... - def post_buckets_id_labels(self, bucket_id, label_mapping, **kwargs): ... - def post_buckets_id_labels_with_http_info(self, bucket_id, label_mapping, **kwargs): ... - async def post_buckets_id_labels_async(self, bucket_id, label_mapping, **kwargs): ... - def post_buckets_id_members(self, bucket_id, add_resource_member_request_body, **kwargs): ... - def post_buckets_id_members_with_http_info(self, bucket_id, add_resource_member_request_body, **kwargs): ... - async def post_buckets_id_members_async(self, bucket_id, add_resource_member_request_body, **kwargs): ... - def post_buckets_id_owners(self, bucket_id, add_resource_member_request_body, **kwargs): ... - def post_buckets_id_owners_with_http_info(self, bucket_id, add_resource_member_request_body, **kwargs): ... - async def post_buckets_id_owners_async(self, bucket_id, add_resource_member_request_body, **kwargs): ... diff --git a/stubs/influxdb-client/influxdb_client/service/cells_service.pyi b/stubs/influxdb-client/influxdb_client/service/cells_service.pyi deleted file mode 100644 index 0bf6fa8e24e0..000000000000 --- a/stubs/influxdb-client/influxdb_client/service/cells_service.pyi +++ /dev/null @@ -1,24 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.service._base_service import _BaseService - -class CellsService(_BaseService): - def __init__(self, api_client: Incomplete | None = None) -> None: ... - def delete_dashboards_id_cells_id(self, dashboard_id, cell_id, **kwargs): ... - def delete_dashboards_id_cells_id_with_http_info(self, dashboard_id, cell_id, **kwargs): ... - async def delete_dashboards_id_cells_id_async(self, dashboard_id, cell_id, **kwargs): ... - def get_dashboards_id_cells_id_view(self, dashboard_id, cell_id, **kwargs): ... - def get_dashboards_id_cells_id_view_with_http_info(self, dashboard_id, cell_id, **kwargs): ... - async def get_dashboards_id_cells_id_view_async(self, dashboard_id, cell_id, **kwargs): ... - def patch_dashboards_id_cells_id(self, dashboard_id, cell_id, cell_update, **kwargs): ... - def patch_dashboards_id_cells_id_with_http_info(self, dashboard_id, cell_id, cell_update, **kwargs): ... - async def patch_dashboards_id_cells_id_async(self, dashboard_id, cell_id, cell_update, **kwargs): ... - def patch_dashboards_id_cells_id_view(self, dashboard_id, cell_id, view, **kwargs): ... - def patch_dashboards_id_cells_id_view_with_http_info(self, dashboard_id, cell_id, view, **kwargs): ... - async def patch_dashboards_id_cells_id_view_async(self, dashboard_id, cell_id, view, **kwargs): ... - def post_dashboards_id_cells(self, dashboard_id, create_cell, **kwargs): ... - def post_dashboards_id_cells_with_http_info(self, dashboard_id, create_cell, **kwargs): ... - async def post_dashboards_id_cells_async(self, dashboard_id, create_cell, **kwargs): ... - def put_dashboards_id_cells(self, dashboard_id, cell, **kwargs): ... - def put_dashboards_id_cells_with_http_info(self, dashboard_id, cell, **kwargs): ... - async def put_dashboards_id_cells_async(self, dashboard_id, cell, **kwargs): ... diff --git a/stubs/influxdb-client/influxdb_client/service/checks_service.pyi b/stubs/influxdb-client/influxdb_client/service/checks_service.pyi deleted file mode 100644 index 74b78eee3abb..000000000000 --- a/stubs/influxdb-client/influxdb_client/service/checks_service.pyi +++ /dev/null @@ -1,36 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.service._base_service import _BaseService - -class ChecksService(_BaseService): - def __init__(self, api_client: Incomplete | None = None) -> None: ... - def create_check(self, post_check, **kwargs): ... - def create_check_with_http_info(self, post_check, **kwargs): ... - async def create_check_async(self, post_check, **kwargs): ... - def delete_checks_id(self, check_id, **kwargs): ... - def delete_checks_id_with_http_info(self, check_id, **kwargs): ... - async def delete_checks_id_async(self, check_id, **kwargs): ... - def delete_checks_id_labels_id(self, check_id, label_id, **kwargs): ... - def delete_checks_id_labels_id_with_http_info(self, check_id, label_id, **kwargs): ... - async def delete_checks_id_labels_id_async(self, check_id, label_id, **kwargs): ... - def get_checks(self, org_id, **kwargs): ... - def get_checks_with_http_info(self, org_id, **kwargs): ... - async def get_checks_async(self, org_id, **kwargs): ... - def get_checks_id(self, check_id, **kwargs): ... - def get_checks_id_with_http_info(self, check_id, **kwargs): ... - async def get_checks_id_async(self, check_id, **kwargs): ... - def get_checks_id_labels(self, check_id, **kwargs): ... - def get_checks_id_labels_with_http_info(self, check_id, **kwargs): ... - async def get_checks_id_labels_async(self, check_id, **kwargs): ... - def get_checks_id_query(self, check_id, **kwargs): ... - def get_checks_id_query_with_http_info(self, check_id, **kwargs): ... - async def get_checks_id_query_async(self, check_id, **kwargs): ... - def patch_checks_id(self, check_id, check_patch, **kwargs): ... - def patch_checks_id_with_http_info(self, check_id, check_patch, **kwargs): ... - async def patch_checks_id_async(self, check_id, check_patch, **kwargs): ... - def post_checks_id_labels(self, check_id, label_mapping, **kwargs): ... - def post_checks_id_labels_with_http_info(self, check_id, label_mapping, **kwargs): ... - async def post_checks_id_labels_async(self, check_id, label_mapping, **kwargs): ... - def put_checks_id(self, check_id, check, **kwargs): ... - def put_checks_id_with_http_info(self, check_id, check, **kwargs): ... - async def put_checks_id_async(self, check_id, check, **kwargs): ... diff --git a/stubs/influxdb-client/influxdb_client/service/config_service.pyi b/stubs/influxdb-client/influxdb_client/service/config_service.pyi deleted file mode 100644 index 775053854fdf..000000000000 --- a/stubs/influxdb-client/influxdb_client/service/config_service.pyi +++ /dev/null @@ -1,12 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.service._base_service import _BaseService - -class ConfigService(_BaseService): - def __init__(self, api_client: Incomplete | None = None) -> None: ... - def get_config(self, **kwargs): ... - def get_config_with_http_info(self, **kwargs): ... - async def get_config_async(self, **kwargs): ... - def get_flags(self, **kwargs): ... - def get_flags_with_http_info(self, **kwargs): ... - async def get_flags_async(self, **kwargs): ... diff --git a/stubs/influxdb-client/influxdb_client/service/dashboards_service.pyi b/stubs/influxdb-client/influxdb_client/service/dashboards_service.pyi deleted file mode 100644 index 4c7d53e7377a..000000000000 --- a/stubs/influxdb-client/influxdb_client/service/dashboards_service.pyi +++ /dev/null @@ -1,66 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.service._base_service import _BaseService - -class DashboardsService(_BaseService): - def __init__(self, api_client: Incomplete | None = None) -> None: ... - def delete_dashboards_id(self, dashboard_id, **kwargs): ... - def delete_dashboards_id_with_http_info(self, dashboard_id, **kwargs): ... - async def delete_dashboards_id_async(self, dashboard_id, **kwargs): ... - def delete_dashboards_id_cells_id(self, dashboard_id, cell_id, **kwargs): ... - def delete_dashboards_id_cells_id_with_http_info(self, dashboard_id, cell_id, **kwargs): ... - async def delete_dashboards_id_cells_id_async(self, dashboard_id, cell_id, **kwargs): ... - def delete_dashboards_id_labels_id(self, dashboard_id, label_id, **kwargs): ... - def delete_dashboards_id_labels_id_with_http_info(self, dashboard_id, label_id, **kwargs): ... - async def delete_dashboards_id_labels_id_async(self, dashboard_id, label_id, **kwargs): ... - def delete_dashboards_id_members_id(self, user_id, dashboard_id, **kwargs): ... - def delete_dashboards_id_members_id_with_http_info(self, user_id, dashboard_id, **kwargs): ... - async def delete_dashboards_id_members_id_async(self, user_id, dashboard_id, **kwargs): ... - def delete_dashboards_id_owners_id(self, user_id, dashboard_id, **kwargs): ... - def delete_dashboards_id_owners_id_with_http_info(self, user_id, dashboard_id, **kwargs): ... - async def delete_dashboards_id_owners_id_async(self, user_id, dashboard_id, **kwargs): ... - def get_dashboards(self, **kwargs): ... - def get_dashboards_with_http_info(self, **kwargs): ... - async def get_dashboards_async(self, **kwargs): ... - def get_dashboards_id(self, dashboard_id, **kwargs): ... - def get_dashboards_id_with_http_info(self, dashboard_id, **kwargs): ... - async def get_dashboards_id_async(self, dashboard_id, **kwargs): ... - def get_dashboards_id_cells_id_view(self, dashboard_id, cell_id, **kwargs): ... - def get_dashboards_id_cells_id_view_with_http_info(self, dashboard_id, cell_id, **kwargs): ... - async def get_dashboards_id_cells_id_view_async(self, dashboard_id, cell_id, **kwargs): ... - def get_dashboards_id_labels(self, dashboard_id, **kwargs): ... - def get_dashboards_id_labels_with_http_info(self, dashboard_id, **kwargs): ... - async def get_dashboards_id_labels_async(self, dashboard_id, **kwargs): ... - def get_dashboards_id_members(self, dashboard_id, **kwargs): ... - def get_dashboards_id_members_with_http_info(self, dashboard_id, **kwargs): ... - async def get_dashboards_id_members_async(self, dashboard_id, **kwargs): ... - def get_dashboards_id_owners(self, dashboard_id, **kwargs): ... - def get_dashboards_id_owners_with_http_info(self, dashboard_id, **kwargs): ... - async def get_dashboards_id_owners_async(self, dashboard_id, **kwargs): ... - def patch_dashboards_id(self, dashboard_id, **kwargs): ... - def patch_dashboards_id_with_http_info(self, dashboard_id, **kwargs): ... - async def patch_dashboards_id_async(self, dashboard_id, **kwargs): ... - def patch_dashboards_id_cells_id(self, dashboard_id, cell_id, cell_update, **kwargs): ... - def patch_dashboards_id_cells_id_with_http_info(self, dashboard_id, cell_id, cell_update, **kwargs): ... - async def patch_dashboards_id_cells_id_async(self, dashboard_id, cell_id, cell_update, **kwargs): ... - def patch_dashboards_id_cells_id_view(self, dashboard_id, cell_id, view, **kwargs): ... - def patch_dashboards_id_cells_id_view_with_http_info(self, dashboard_id, cell_id, view, **kwargs): ... - async def patch_dashboards_id_cells_id_view_async(self, dashboard_id, cell_id, view, **kwargs): ... - def post_dashboards(self, create_dashboard_request, **kwargs): ... - def post_dashboards_with_http_info(self, create_dashboard_request, **kwargs): ... - async def post_dashboards_async(self, create_dashboard_request, **kwargs): ... - def post_dashboards_id_cells(self, dashboard_id, create_cell, **kwargs): ... - def post_dashboards_id_cells_with_http_info(self, dashboard_id, create_cell, **kwargs): ... - async def post_dashboards_id_cells_async(self, dashboard_id, create_cell, **kwargs): ... - def post_dashboards_id_labels(self, dashboard_id, label_mapping, **kwargs): ... - def post_dashboards_id_labels_with_http_info(self, dashboard_id, label_mapping, **kwargs): ... - async def post_dashboards_id_labels_async(self, dashboard_id, label_mapping, **kwargs): ... - def post_dashboards_id_members(self, dashboard_id, add_resource_member_request_body, **kwargs): ... - def post_dashboards_id_members_with_http_info(self, dashboard_id, add_resource_member_request_body, **kwargs): ... - async def post_dashboards_id_members_async(self, dashboard_id, add_resource_member_request_body, **kwargs): ... - def post_dashboards_id_owners(self, dashboard_id, add_resource_member_request_body, **kwargs): ... - def post_dashboards_id_owners_with_http_info(self, dashboard_id, add_resource_member_request_body, **kwargs): ... - async def post_dashboards_id_owners_async(self, dashboard_id, add_resource_member_request_body, **kwargs): ... - def put_dashboards_id_cells(self, dashboard_id, cell, **kwargs): ... - def put_dashboards_id_cells_with_http_info(self, dashboard_id, cell, **kwargs): ... - async def put_dashboards_id_cells_async(self, dashboard_id, cell, **kwargs): ... diff --git a/stubs/influxdb-client/influxdb_client/service/dbr_ps_service.pyi b/stubs/influxdb-client/influxdb_client/service/dbr_ps_service.pyi deleted file mode 100644 index 21afd8aec4ac..000000000000 --- a/stubs/influxdb-client/influxdb_client/service/dbr_ps_service.pyi +++ /dev/null @@ -1,21 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.service._base_service import _BaseService - -class DBRPsService(_BaseService): - def __init__(self, api_client: Incomplete | None = None) -> None: ... - def delete_dbrpid(self, dbrp_id, **kwargs): ... - def delete_dbrpid_with_http_info(self, dbrp_id, **kwargs): ... - async def delete_dbrpid_async(self, dbrp_id, **kwargs): ... - def get_dbr_ps(self, **kwargs): ... - def get_dbr_ps_with_http_info(self, **kwargs): ... - async def get_dbr_ps_async(self, **kwargs): ... - def get_dbr_ps_id(self, dbrp_id, **kwargs): ... - def get_dbr_ps_id_with_http_info(self, dbrp_id, **kwargs): ... - async def get_dbr_ps_id_async(self, dbrp_id, **kwargs): ... - def patch_dbrpid(self, dbrp_id, dbrp_update, **kwargs): ... - def patch_dbrpid_with_http_info(self, dbrp_id, dbrp_update, **kwargs): ... - async def patch_dbrpid_async(self, dbrp_id, dbrp_update, **kwargs): ... - def post_dbrp(self, dbrp_create, **kwargs): ... - def post_dbrp_with_http_info(self, dbrp_create, **kwargs): ... - async def post_dbrp_async(self, dbrp_create, **kwargs): ... diff --git a/stubs/influxdb-client/influxdb_client/service/delete_service.pyi b/stubs/influxdb-client/influxdb_client/service/delete_service.pyi deleted file mode 100644 index 87873c899740..000000000000 --- a/stubs/influxdb-client/influxdb_client/service/delete_service.pyi +++ /dev/null @@ -1,9 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.service._base_service import _BaseService - -class DeleteService(_BaseService): - def __init__(self, api_client: Incomplete | None = None) -> None: ... - def post_delete(self, delete_predicate_request, **kwargs): ... - def post_delete_with_http_info(self, delete_predicate_request, **kwargs): ... - async def post_delete_async(self, delete_predicate_request, **kwargs): ... diff --git a/stubs/influxdb-client/influxdb_client/service/health_service.pyi b/stubs/influxdb-client/influxdb_client/service/health_service.pyi deleted file mode 100644 index 7e07a98b5529..000000000000 --- a/stubs/influxdb-client/influxdb_client/service/health_service.pyi +++ /dev/null @@ -1,9 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.service._base_service import _BaseService - -class HealthService(_BaseService): - def __init__(self, api_client: Incomplete | None = None) -> None: ... - def get_health(self, **kwargs): ... - def get_health_with_http_info(self, **kwargs): ... - async def get_health_async(self, **kwargs): ... diff --git a/stubs/influxdb-client/influxdb_client/service/invokable_scripts_service.pyi b/stubs/influxdb-client/influxdb_client/service/invokable_scripts_service.pyi deleted file mode 100644 index 71eea2db17a7..000000000000 --- a/stubs/influxdb-client/influxdb_client/service/invokable_scripts_service.pyi +++ /dev/null @@ -1,27 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.service._base_service import _BaseService - -class InvokableScriptsService(_BaseService): - def __init__(self, api_client: Incomplete | None = None) -> None: ... - def delete_scripts_id(self, script_id, **kwargs): ... - def delete_scripts_id_with_http_info(self, script_id, **kwargs): ... - async def delete_scripts_id_async(self, script_id, **kwargs): ... - def get_scripts(self, **kwargs): ... - def get_scripts_with_http_info(self, **kwargs): ... - async def get_scripts_async(self, **kwargs): ... - def get_scripts_id(self, script_id, **kwargs): ... - def get_scripts_id_with_http_info(self, script_id, **kwargs): ... - async def get_scripts_id_async(self, script_id, **kwargs): ... - def get_scripts_id_params(self, script_id, **kwargs): ... - def get_scripts_id_params_with_http_info(self, script_id, **kwargs): ... - async def get_scripts_id_params_async(self, script_id, **kwargs): ... - def patch_scripts_id(self, script_id, script_update_request, **kwargs): ... - def patch_scripts_id_with_http_info(self, script_id, script_update_request, **kwargs): ... - async def patch_scripts_id_async(self, script_id, script_update_request, **kwargs): ... - def post_scripts(self, script_create_request, **kwargs): ... - def post_scripts_with_http_info(self, script_create_request, **kwargs): ... - async def post_scripts_async(self, script_create_request, **kwargs): ... - def post_scripts_id_invoke(self, script_id, **kwargs): ... - def post_scripts_id_invoke_with_http_info(self, script_id, **kwargs): ... - async def post_scripts_id_invoke_async(self, script_id, **kwargs): ... diff --git a/stubs/influxdb-client/influxdb_client/service/labels_service.pyi b/stubs/influxdb-client/influxdb_client/service/labels_service.pyi deleted file mode 100644 index ef32cb375f54..000000000000 --- a/stubs/influxdb-client/influxdb_client/service/labels_service.pyi +++ /dev/null @@ -1,21 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.service._base_service import _BaseService - -class LabelsService(_BaseService): - def __init__(self, api_client: Incomplete | None = None) -> None: ... - def delete_labels_id(self, label_id, **kwargs): ... - def delete_labels_id_with_http_info(self, label_id, **kwargs): ... - async def delete_labels_id_async(self, label_id, **kwargs): ... - def get_labels(self, **kwargs): ... - def get_labels_with_http_info(self, **kwargs): ... - async def get_labels_async(self, **kwargs): ... - def get_labels_id(self, label_id, **kwargs): ... - def get_labels_id_with_http_info(self, label_id, **kwargs): ... - async def get_labels_id_async(self, label_id, **kwargs): ... - def patch_labels_id(self, label_id, label_update, **kwargs): ... - def patch_labels_id_with_http_info(self, label_id, label_update, **kwargs): ... - async def patch_labels_id_async(self, label_id, label_update, **kwargs): ... - def post_labels(self, label_create_request, **kwargs): ... - def post_labels_with_http_info(self, label_create_request, **kwargs): ... - async def post_labels_async(self, label_create_request, **kwargs): ... diff --git a/stubs/influxdb-client/influxdb_client/service/legacy_authorizations_service.pyi b/stubs/influxdb-client/influxdb_client/service/legacy_authorizations_service.pyi deleted file mode 100644 index 74ab8669db1f..000000000000 --- a/stubs/influxdb-client/influxdb_client/service/legacy_authorizations_service.pyi +++ /dev/null @@ -1,24 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.service._base_service import _BaseService - -class LegacyAuthorizationsService(_BaseService): - def __init__(self, api_client: Incomplete | None = None) -> None: ... - def delete_legacy_authorizations_id(self, auth_id, **kwargs): ... - def delete_legacy_authorizations_id_with_http_info(self, auth_id, **kwargs): ... - async def delete_legacy_authorizations_id_async(self, auth_id, **kwargs): ... - def get_legacy_authorizations(self, **kwargs): ... - def get_legacy_authorizations_with_http_info(self, **kwargs): ... - async def get_legacy_authorizations_async(self, **kwargs): ... - def get_legacy_authorizations_id(self, auth_id, **kwargs): ... - def get_legacy_authorizations_id_with_http_info(self, auth_id, **kwargs): ... - async def get_legacy_authorizations_id_async(self, auth_id, **kwargs): ... - def patch_legacy_authorizations_id(self, auth_id, authorization_update_request, **kwargs): ... - def patch_legacy_authorizations_id_with_http_info(self, auth_id, authorization_update_request, **kwargs): ... - async def patch_legacy_authorizations_id_async(self, auth_id, authorization_update_request, **kwargs): ... - def post_legacy_authorizations(self, legacy_authorization_post_request, **kwargs): ... - def post_legacy_authorizations_with_http_info(self, legacy_authorization_post_request, **kwargs): ... - async def post_legacy_authorizations_async(self, legacy_authorization_post_request, **kwargs): ... - def post_legacy_authorizations_id_password(self, auth_id, password_reset_body, **kwargs): ... - def post_legacy_authorizations_id_password_with_http_info(self, auth_id, password_reset_body, **kwargs): ... - async def post_legacy_authorizations_id_password_async(self, auth_id, password_reset_body, **kwargs): ... diff --git a/stubs/influxdb-client/influxdb_client/service/metrics_service.pyi b/stubs/influxdb-client/influxdb_client/service/metrics_service.pyi deleted file mode 100644 index 28c9f28075d1..000000000000 --- a/stubs/influxdb-client/influxdb_client/service/metrics_service.pyi +++ /dev/null @@ -1,9 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.service._base_service import _BaseService - -class MetricsService(_BaseService): - def __init__(self, api_client: Incomplete | None = None) -> None: ... - def get_metrics(self, **kwargs): ... - def get_metrics_with_http_info(self, **kwargs): ... - async def get_metrics_async(self, **kwargs): ... diff --git a/stubs/influxdb-client/influxdb_client/service/notification_endpoints_service.pyi b/stubs/influxdb-client/influxdb_client/service/notification_endpoints_service.pyi deleted file mode 100644 index 4d99507c158b..000000000000 --- a/stubs/influxdb-client/influxdb_client/service/notification_endpoints_service.pyi +++ /dev/null @@ -1,33 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.service._base_service import _BaseService - -class NotificationEndpointsService(_BaseService): - def __init__(self, api_client: Incomplete | None = None) -> None: ... - def create_notification_endpoint(self, post_notification_endpoint, **kwargs): ... - def create_notification_endpoint_with_http_info(self, post_notification_endpoint, **kwargs): ... - async def create_notification_endpoint_async(self, post_notification_endpoint, **kwargs): ... - def delete_notification_endpoints_id(self, endpoint_id, **kwargs): ... - def delete_notification_endpoints_id_with_http_info(self, endpoint_id, **kwargs): ... - async def delete_notification_endpoints_id_async(self, endpoint_id, **kwargs): ... - def delete_notification_endpoints_id_labels_id(self, endpoint_id, label_id, **kwargs): ... - def delete_notification_endpoints_id_labels_id_with_http_info(self, endpoint_id, label_id, **kwargs): ... - async def delete_notification_endpoints_id_labels_id_async(self, endpoint_id, label_id, **kwargs): ... - def get_notification_endpoints(self, org_id, **kwargs): ... - def get_notification_endpoints_with_http_info(self, org_id, **kwargs): ... - async def get_notification_endpoints_async(self, org_id, **kwargs): ... - def get_notification_endpoints_id(self, endpoint_id, **kwargs): ... - def get_notification_endpoints_id_with_http_info(self, endpoint_id, **kwargs): ... - async def get_notification_endpoints_id_async(self, endpoint_id, **kwargs): ... - def get_notification_endpoints_id_labels(self, endpoint_id, **kwargs): ... - def get_notification_endpoints_id_labels_with_http_info(self, endpoint_id, **kwargs): ... - async def get_notification_endpoints_id_labels_async(self, endpoint_id, **kwargs): ... - def patch_notification_endpoints_id(self, endpoint_id, notification_endpoint_update, **kwargs): ... - def patch_notification_endpoints_id_with_http_info(self, endpoint_id, notification_endpoint_update, **kwargs): ... - async def patch_notification_endpoints_id_async(self, endpoint_id, notification_endpoint_update, **kwargs): ... - def post_notification_endpoint_id_labels(self, endpoint_id, label_mapping, **kwargs): ... - def post_notification_endpoint_id_labels_with_http_info(self, endpoint_id, label_mapping, **kwargs): ... - async def post_notification_endpoint_id_labels_async(self, endpoint_id, label_mapping, **kwargs): ... - def put_notification_endpoints_id(self, endpoint_id, notification_endpoint, **kwargs): ... - def put_notification_endpoints_id_with_http_info(self, endpoint_id, notification_endpoint, **kwargs): ... - async def put_notification_endpoints_id_async(self, endpoint_id, notification_endpoint, **kwargs): ... diff --git a/stubs/influxdb-client/influxdb_client/service/notification_rules_service.pyi b/stubs/influxdb-client/influxdb_client/service/notification_rules_service.pyi deleted file mode 100644 index 614b00de2a9f..000000000000 --- a/stubs/influxdb-client/influxdb_client/service/notification_rules_service.pyi +++ /dev/null @@ -1,33 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.service._base_service import _BaseService - -class NotificationRulesService(_BaseService): - def __init__(self, api_client: Incomplete | None = None) -> None: ... - def create_notification_rule(self, post_notification_rule, **kwargs): ... - def create_notification_rule_with_http_info(self, post_notification_rule, **kwargs): ... - async def create_notification_rule_async(self, post_notification_rule, **kwargs): ... - def delete_notification_rules_id(self, rule_id, **kwargs): ... - def delete_notification_rules_id_with_http_info(self, rule_id, **kwargs): ... - async def delete_notification_rules_id_async(self, rule_id, **kwargs): ... - def delete_notification_rules_id_labels_id(self, rule_id, label_id, **kwargs): ... - def delete_notification_rules_id_labels_id_with_http_info(self, rule_id, label_id, **kwargs): ... - async def delete_notification_rules_id_labels_id_async(self, rule_id, label_id, **kwargs): ... - def get_notification_rules(self, org_id, **kwargs): ... - def get_notification_rules_with_http_info(self, org_id, **kwargs): ... - async def get_notification_rules_async(self, org_id, **kwargs): ... - def get_notification_rules_id(self, rule_id, **kwargs): ... - def get_notification_rules_id_with_http_info(self, rule_id, **kwargs): ... - async def get_notification_rules_id_async(self, rule_id, **kwargs): ... - def get_notification_rules_id_labels(self, rule_id, **kwargs): ... - def get_notification_rules_id_labels_with_http_info(self, rule_id, **kwargs): ... - async def get_notification_rules_id_labels_async(self, rule_id, **kwargs): ... - def patch_notification_rules_id(self, rule_id, notification_rule_update, **kwargs): ... - def patch_notification_rules_id_with_http_info(self, rule_id, notification_rule_update, **kwargs): ... - async def patch_notification_rules_id_async(self, rule_id, notification_rule_update, **kwargs): ... - def post_notification_rule_id_labels(self, rule_id, label_mapping, **kwargs): ... - def post_notification_rule_id_labels_with_http_info(self, rule_id, label_mapping, **kwargs): ... - async def post_notification_rule_id_labels_async(self, rule_id, label_mapping, **kwargs): ... - def put_notification_rules_id(self, rule_id, notification_rule, **kwargs): ... - def put_notification_rules_id_with_http_info(self, rule_id, notification_rule, **kwargs): ... - async def put_notification_rules_id_async(self, rule_id, notification_rule, **kwargs): ... diff --git a/stubs/influxdb-client/influxdb_client/service/organizations_service.pyi b/stubs/influxdb-client/influxdb_client/service/organizations_service.pyi deleted file mode 100644 index 75424e736064..000000000000 --- a/stubs/influxdb-client/influxdb_client/service/organizations_service.pyi +++ /dev/null @@ -1,39 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.service._base_service import _BaseService - -class OrganizationsService(_BaseService): - def __init__(self, api_client: Incomplete | None = None) -> None: ... - def delete_orgs_id(self, org_id, **kwargs): ... - def delete_orgs_id_with_http_info(self, org_id, **kwargs): ... - async def delete_orgs_id_async(self, org_id, **kwargs): ... - def delete_orgs_id_members_id(self, user_id, org_id, **kwargs): ... - def delete_orgs_id_members_id_with_http_info(self, user_id, org_id, **kwargs): ... - async def delete_orgs_id_members_id_async(self, user_id, org_id, **kwargs): ... - def delete_orgs_id_owners_id(self, user_id, org_id, **kwargs): ... - def delete_orgs_id_owners_id_with_http_info(self, user_id, org_id, **kwargs): ... - async def delete_orgs_id_owners_id_async(self, user_id, org_id, **kwargs): ... - def get_orgs(self, **kwargs): ... - def get_orgs_with_http_info(self, **kwargs): ... - async def get_orgs_async(self, **kwargs): ... - def get_orgs_id(self, org_id, **kwargs): ... - def get_orgs_id_with_http_info(self, org_id, **kwargs): ... - async def get_orgs_id_async(self, org_id, **kwargs): ... - def get_orgs_id_members(self, org_id, **kwargs): ... - def get_orgs_id_members_with_http_info(self, org_id, **kwargs): ... - async def get_orgs_id_members_async(self, org_id, **kwargs): ... - def get_orgs_id_owners(self, org_id, **kwargs): ... - def get_orgs_id_owners_with_http_info(self, org_id, **kwargs): ... - async def get_orgs_id_owners_async(self, org_id, **kwargs): ... - def patch_orgs_id(self, org_id, patch_organization_request, **kwargs): ... - def patch_orgs_id_with_http_info(self, org_id, patch_organization_request, **kwargs): ... - async def patch_orgs_id_async(self, org_id, patch_organization_request, **kwargs): ... - def post_orgs(self, post_organization_request, **kwargs): ... - def post_orgs_with_http_info(self, post_organization_request, **kwargs): ... - async def post_orgs_async(self, post_organization_request, **kwargs): ... - def post_orgs_id_members(self, org_id, add_resource_member_request_body, **kwargs): ... - def post_orgs_id_members_with_http_info(self, org_id, add_resource_member_request_body, **kwargs): ... - async def post_orgs_id_members_async(self, org_id, add_resource_member_request_body, **kwargs): ... - def post_orgs_id_owners(self, org_id, add_resource_member_request_body, **kwargs): ... - def post_orgs_id_owners_with_http_info(self, org_id, add_resource_member_request_body, **kwargs): ... - async def post_orgs_id_owners_async(self, org_id, add_resource_member_request_body, **kwargs): ... diff --git a/stubs/influxdb-client/influxdb_client/service/ping_service.pyi b/stubs/influxdb-client/influxdb_client/service/ping_service.pyi deleted file mode 100644 index 5398609419dd..000000000000 --- a/stubs/influxdb-client/influxdb_client/service/ping_service.pyi +++ /dev/null @@ -1,12 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.service._base_service import _BaseService - -class PingService(_BaseService): - def __init__(self, api_client: Incomplete | None = None) -> None: ... - def get_ping(self, **kwargs): ... - def get_ping_with_http_info(self, **kwargs): ... - async def get_ping_async(self, **kwargs): ... - def head_ping(self, **kwargs): ... - def head_ping_with_http_info(self, **kwargs): ... - async def head_ping_async(self, **kwargs): ... diff --git a/stubs/influxdb-client/influxdb_client/service/query_service.pyi b/stubs/influxdb-client/influxdb_client/service/query_service.pyi deleted file mode 100644 index 66c47a5cb0c7..000000000000 --- a/stubs/influxdb-client/influxdb_client/service/query_service.pyi +++ /dev/null @@ -1,21 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.service._base_service import _BaseService - -class QueryService(_BaseService): - def __init__(self, api_client: Incomplete | None = None) -> None: ... - def get_query_suggestions(self, **kwargs): ... - def get_query_suggestions_with_http_info(self, **kwargs): ... - async def get_query_suggestions_async(self, **kwargs): ... - def get_query_suggestions_name(self, name, **kwargs): ... - def get_query_suggestions_name_with_http_info(self, name, **kwargs): ... - async def get_query_suggestions_name_async(self, name, **kwargs): ... - def post_query(self, **kwargs): ... - def post_query_with_http_info(self, **kwargs): ... - async def post_query_async(self, **kwargs): ... - def post_query_analyze(self, **kwargs): ... - def post_query_analyze_with_http_info(self, **kwargs): ... - async def post_query_analyze_async(self, **kwargs): ... - def post_query_ast(self, **kwargs): ... - def post_query_ast_with_http_info(self, **kwargs): ... - async def post_query_ast_async(self, **kwargs): ... diff --git a/stubs/influxdb-client/influxdb_client/service/ready_service.pyi b/stubs/influxdb-client/influxdb_client/service/ready_service.pyi deleted file mode 100644 index 0bd5c56b904e..000000000000 --- a/stubs/influxdb-client/influxdb_client/service/ready_service.pyi +++ /dev/null @@ -1,9 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.service._base_service import _BaseService - -class ReadyService(_BaseService): - def __init__(self, api_client: Incomplete | None = None) -> None: ... - def get_ready(self, **kwargs): ... - def get_ready_with_http_info(self, **kwargs): ... - async def get_ready_async(self, **kwargs): ... diff --git a/stubs/influxdb-client/influxdb_client/service/remote_connections_service.pyi b/stubs/influxdb-client/influxdb_client/service/remote_connections_service.pyi deleted file mode 100644 index 363682d60aa1..000000000000 --- a/stubs/influxdb-client/influxdb_client/service/remote_connections_service.pyi +++ /dev/null @@ -1,21 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.service._base_service import _BaseService - -class RemoteConnectionsService(_BaseService): - def __init__(self, api_client: Incomplete | None = None) -> None: ... - def delete_remote_connection_by_id(self, remote_id, **kwargs): ... - def delete_remote_connection_by_id_with_http_info(self, remote_id, **kwargs): ... - async def delete_remote_connection_by_id_async(self, remote_id, **kwargs): ... - def get_remote_connection_by_id(self, remote_id, **kwargs): ... - def get_remote_connection_by_id_with_http_info(self, remote_id, **kwargs): ... - async def get_remote_connection_by_id_async(self, remote_id, **kwargs): ... - def get_remote_connections(self, org_id, **kwargs): ... - def get_remote_connections_with_http_info(self, org_id, **kwargs): ... - async def get_remote_connections_async(self, org_id, **kwargs): ... - def patch_remote_connection_by_id(self, remote_id, remote_connection_update_request, **kwargs): ... - def patch_remote_connection_by_id_with_http_info(self, remote_id, remote_connection_update_request, **kwargs): ... - async def patch_remote_connection_by_id_async(self, remote_id, remote_connection_update_request, **kwargs): ... - def post_remote_connection(self, remote_connection_creation_request, **kwargs): ... - def post_remote_connection_with_http_info(self, remote_connection_creation_request, **kwargs): ... - async def post_remote_connection_async(self, remote_connection_creation_request, **kwargs): ... diff --git a/stubs/influxdb-client/influxdb_client/service/replications_service.pyi b/stubs/influxdb-client/influxdb_client/service/replications_service.pyi deleted file mode 100644 index bec647cf4068..000000000000 --- a/stubs/influxdb-client/influxdb_client/service/replications_service.pyi +++ /dev/null @@ -1,24 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.service._base_service import _BaseService - -class ReplicationsService(_BaseService): - def __init__(self, api_client: Incomplete | None = None) -> None: ... - def delete_replication_by_id(self, replication_id, **kwargs): ... - def delete_replication_by_id_with_http_info(self, replication_id, **kwargs): ... - async def delete_replication_by_id_async(self, replication_id, **kwargs): ... - def get_replication_by_id(self, replication_id, **kwargs): ... - def get_replication_by_id_with_http_info(self, replication_id, **kwargs): ... - async def get_replication_by_id_async(self, replication_id, **kwargs): ... - def get_replications(self, org_id, **kwargs): ... - def get_replications_with_http_info(self, org_id, **kwargs): ... - async def get_replications_async(self, org_id, **kwargs): ... - def patch_replication_by_id(self, replication_id, replication_update_request, **kwargs): ... - def patch_replication_by_id_with_http_info(self, replication_id, replication_update_request, **kwargs): ... - async def patch_replication_by_id_async(self, replication_id, replication_update_request, **kwargs): ... - def post_replication(self, replication_creation_request, **kwargs): ... - def post_replication_with_http_info(self, replication_creation_request, **kwargs): ... - async def post_replication_async(self, replication_creation_request, **kwargs): ... - def post_validate_replication_by_id(self, replication_id, **kwargs): ... - def post_validate_replication_by_id_with_http_info(self, replication_id, **kwargs): ... - async def post_validate_replication_by_id_async(self, replication_id, **kwargs): ... diff --git a/stubs/influxdb-client/influxdb_client/service/resources_service.pyi b/stubs/influxdb-client/influxdb_client/service/resources_service.pyi deleted file mode 100644 index a98bd7ce4b9a..000000000000 --- a/stubs/influxdb-client/influxdb_client/service/resources_service.pyi +++ /dev/null @@ -1,9 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.service._base_service import _BaseService - -class ResourcesService(_BaseService): - def __init__(self, api_client: Incomplete | None = None) -> None: ... - def get_resources(self, **kwargs): ... - def get_resources_with_http_info(self, **kwargs): ... - async def get_resources_async(self, **kwargs): ... diff --git a/stubs/influxdb-client/influxdb_client/service/restore_service.pyi b/stubs/influxdb-client/influxdb_client/service/restore_service.pyi deleted file mode 100644 index 9f61c4abedca..000000000000 --- a/stubs/influxdb-client/influxdb_client/service/restore_service.pyi +++ /dev/null @@ -1,21 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.service._base_service import _BaseService - -class RestoreService(_BaseService): - def __init__(self, api_client: Incomplete | None = None) -> None: ... - def post_restore_bucket_id(self, bucket_id, body, **kwargs): ... - def post_restore_bucket_id_with_http_info(self, bucket_id, body, **kwargs): ... - async def post_restore_bucket_id_async(self, bucket_id, body, **kwargs): ... - def post_restore_bucket_metadata(self, bucket_metadata_manifest, **kwargs): ... - def post_restore_bucket_metadata_with_http_info(self, bucket_metadata_manifest, **kwargs): ... - async def post_restore_bucket_metadata_async(self, bucket_metadata_manifest, **kwargs): ... - def post_restore_kv(self, body, **kwargs): ... - def post_restore_kv_with_http_info(self, body, **kwargs): ... - async def post_restore_kv_async(self, body, **kwargs): ... - def post_restore_shard_id(self, shard_id, body, **kwargs): ... - def post_restore_shard_id_with_http_info(self, shard_id, body, **kwargs): ... - async def post_restore_shard_id_async(self, shard_id, body, **kwargs): ... - def post_restore_sql(self, body, **kwargs): ... - def post_restore_sql_with_http_info(self, body, **kwargs): ... - async def post_restore_sql_async(self, body, **kwargs): ... diff --git a/stubs/influxdb-client/influxdb_client/service/routes_service.pyi b/stubs/influxdb-client/influxdb_client/service/routes_service.pyi deleted file mode 100644 index 334e168e75f4..000000000000 --- a/stubs/influxdb-client/influxdb_client/service/routes_service.pyi +++ /dev/null @@ -1,9 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.service._base_service import _BaseService - -class RoutesService(_BaseService): - def __init__(self, api_client: Incomplete | None = None) -> None: ... - def get_routes(self, **kwargs): ... - def get_routes_with_http_info(self, **kwargs): ... - async def get_routes_async(self, **kwargs): ... diff --git a/stubs/influxdb-client/influxdb_client/service/rules_service.pyi b/stubs/influxdb-client/influxdb_client/service/rules_service.pyi deleted file mode 100644 index fc280827a679..000000000000 --- a/stubs/influxdb-client/influxdb_client/service/rules_service.pyi +++ /dev/null @@ -1,9 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.service._base_service import _BaseService - -class RulesService(_BaseService): - def __init__(self, api_client: Incomplete | None = None) -> None: ... - def get_notification_rules_id_query(self, rule_id, **kwargs): ... - def get_notification_rules_id_query_with_http_info(self, rule_id, **kwargs): ... - async def get_notification_rules_id_query_async(self, rule_id, **kwargs): ... diff --git a/stubs/influxdb-client/influxdb_client/service/scraper_targets_service.pyi b/stubs/influxdb-client/influxdb_client/service/scraper_targets_service.pyi deleted file mode 100644 index 99ce5213a706..000000000000 --- a/stubs/influxdb-client/influxdb_client/service/scraper_targets_service.pyi +++ /dev/null @@ -1,48 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.service._base_service import _BaseService - -class ScraperTargetsService(_BaseService): - def __init__(self, api_client: Incomplete | None = None) -> None: ... - def delete_scrapers_id(self, scraper_target_id, **kwargs): ... - def delete_scrapers_id_with_http_info(self, scraper_target_id, **kwargs): ... - async def delete_scrapers_id_async(self, scraper_target_id, **kwargs): ... - def delete_scrapers_id_labels_id(self, scraper_target_id, label_id, **kwargs): ... - def delete_scrapers_id_labels_id_with_http_info(self, scraper_target_id, label_id, **kwargs): ... - async def delete_scrapers_id_labels_id_async(self, scraper_target_id, label_id, **kwargs): ... - def delete_scrapers_id_members_id(self, user_id, scraper_target_id, **kwargs): ... - def delete_scrapers_id_members_id_with_http_info(self, user_id, scraper_target_id, **kwargs): ... - async def delete_scrapers_id_members_id_async(self, user_id, scraper_target_id, **kwargs): ... - def delete_scrapers_id_owners_id(self, user_id, scraper_target_id, **kwargs): ... - def delete_scrapers_id_owners_id_with_http_info(self, user_id, scraper_target_id, **kwargs): ... - async def delete_scrapers_id_owners_id_async(self, user_id, scraper_target_id, **kwargs): ... - def get_scrapers(self, **kwargs): ... - def get_scrapers_with_http_info(self, **kwargs): ... - async def get_scrapers_async(self, **kwargs): ... - def get_scrapers_id(self, scraper_target_id, **kwargs): ... - def get_scrapers_id_with_http_info(self, scraper_target_id, **kwargs): ... - async def get_scrapers_id_async(self, scraper_target_id, **kwargs): ... - def get_scrapers_id_labels(self, scraper_target_id, **kwargs): ... - def get_scrapers_id_labels_with_http_info(self, scraper_target_id, **kwargs): ... - async def get_scrapers_id_labels_async(self, scraper_target_id, **kwargs): ... - def get_scrapers_id_members(self, scraper_target_id, **kwargs): ... - def get_scrapers_id_members_with_http_info(self, scraper_target_id, **kwargs): ... - async def get_scrapers_id_members_async(self, scraper_target_id, **kwargs): ... - def get_scrapers_id_owners(self, scraper_target_id, **kwargs): ... - def get_scrapers_id_owners_with_http_info(self, scraper_target_id, **kwargs): ... - async def get_scrapers_id_owners_async(self, scraper_target_id, **kwargs): ... - def patch_scrapers_id(self, scraper_target_id, scraper_target_request, **kwargs): ... - def patch_scrapers_id_with_http_info(self, scraper_target_id, scraper_target_request, **kwargs): ... - async def patch_scrapers_id_async(self, scraper_target_id, scraper_target_request, **kwargs): ... - def post_scrapers(self, scraper_target_request, **kwargs): ... - def post_scrapers_with_http_info(self, scraper_target_request, **kwargs): ... - async def post_scrapers_async(self, scraper_target_request, **kwargs): ... - def post_scrapers_id_labels(self, scraper_target_id, label_mapping, **kwargs): ... - def post_scrapers_id_labels_with_http_info(self, scraper_target_id, label_mapping, **kwargs): ... - async def post_scrapers_id_labels_async(self, scraper_target_id, label_mapping, **kwargs): ... - def post_scrapers_id_members(self, scraper_target_id, add_resource_member_request_body, **kwargs): ... - def post_scrapers_id_members_with_http_info(self, scraper_target_id, add_resource_member_request_body, **kwargs): ... - async def post_scrapers_id_members_async(self, scraper_target_id, add_resource_member_request_body, **kwargs): ... - def post_scrapers_id_owners(self, scraper_target_id, add_resource_member_request_body, **kwargs): ... - def post_scrapers_id_owners_with_http_info(self, scraper_target_id, add_resource_member_request_body, **kwargs): ... - async def post_scrapers_id_owners_async(self, scraper_target_id, add_resource_member_request_body, **kwargs): ... diff --git a/stubs/influxdb-client/influxdb_client/service/secrets_service.pyi b/stubs/influxdb-client/influxdb_client/service/secrets_service.pyi deleted file mode 100644 index 73a935a1119c..000000000000 --- a/stubs/influxdb-client/influxdb_client/service/secrets_service.pyi +++ /dev/null @@ -1,18 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.service._base_service import _BaseService - -class SecretsService(_BaseService): - def __init__(self, api_client: Incomplete | None = None) -> None: ... - def delete_orgs_id_secrets_id(self, org_id, secret_id, **kwargs): ... - def delete_orgs_id_secrets_id_with_http_info(self, org_id, secret_id, **kwargs): ... - async def delete_orgs_id_secrets_id_async(self, org_id, secret_id, **kwargs): ... - def get_orgs_id_secrets(self, org_id, **kwargs): ... - def get_orgs_id_secrets_with_http_info(self, org_id, **kwargs): ... - async def get_orgs_id_secrets_async(self, org_id, **kwargs): ... - def patch_orgs_id_secrets(self, org_id, request_body, **kwargs): ... - def patch_orgs_id_secrets_with_http_info(self, org_id, request_body, **kwargs): ... - async def patch_orgs_id_secrets_async(self, org_id, request_body, **kwargs): ... - def post_orgs_id_secrets(self, org_id, secret_keys, **kwargs): ... - def post_orgs_id_secrets_with_http_info(self, org_id, secret_keys, **kwargs): ... - async def post_orgs_id_secrets_async(self, org_id, secret_keys, **kwargs): ... diff --git a/stubs/influxdb-client/influxdb_client/service/setup_service.pyi b/stubs/influxdb-client/influxdb_client/service/setup_service.pyi deleted file mode 100644 index 576736a42f01..000000000000 --- a/stubs/influxdb-client/influxdb_client/service/setup_service.pyi +++ /dev/null @@ -1,12 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.service._base_service import _BaseService - -class SetupService(_BaseService): - def __init__(self, api_client: Incomplete | None = None) -> None: ... - def get_setup(self, **kwargs): ... - def get_setup_with_http_info(self, **kwargs): ... - async def get_setup_async(self, **kwargs): ... - def post_setup(self, onboarding_request, **kwargs): ... - def post_setup_with_http_info(self, onboarding_request, **kwargs): ... - async def post_setup_async(self, onboarding_request, **kwargs): ... diff --git a/stubs/influxdb-client/influxdb_client/service/signin_service.pyi b/stubs/influxdb-client/influxdb_client/service/signin_service.pyi deleted file mode 100644 index 0d6d357b14d3..000000000000 --- a/stubs/influxdb-client/influxdb_client/service/signin_service.pyi +++ /dev/null @@ -1,9 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.service._base_service import _BaseService - -class SigninService(_BaseService): - def __init__(self, api_client: Incomplete | None = None) -> None: ... - def post_signin(self, **kwargs): ... - def post_signin_with_http_info(self, **kwargs): ... - async def post_signin_async(self, **kwargs): ... diff --git a/stubs/influxdb-client/influxdb_client/service/signout_service.pyi b/stubs/influxdb-client/influxdb_client/service/signout_service.pyi deleted file mode 100644 index 59adb059a975..000000000000 --- a/stubs/influxdb-client/influxdb_client/service/signout_service.pyi +++ /dev/null @@ -1,9 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.service._base_service import _BaseService - -class SignoutService(_BaseService): - def __init__(self, api_client: Incomplete | None = None) -> None: ... - def post_signout(self, **kwargs): ... - def post_signout_with_http_info(self, **kwargs): ... - async def post_signout_async(self, **kwargs): ... diff --git a/stubs/influxdb-client/influxdb_client/service/sources_service.pyi b/stubs/influxdb-client/influxdb_client/service/sources_service.pyi deleted file mode 100644 index 762a197d209a..000000000000 --- a/stubs/influxdb-client/influxdb_client/service/sources_service.pyi +++ /dev/null @@ -1,27 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.service._base_service import _BaseService - -class SourcesService(_BaseService): - def __init__(self, api_client: Incomplete | None = None) -> None: ... - def delete_sources_id(self, source_id, **kwargs): ... - def delete_sources_id_with_http_info(self, source_id, **kwargs): ... - async def delete_sources_id_async(self, source_id, **kwargs): ... - def get_sources(self, **kwargs): ... - def get_sources_with_http_info(self, **kwargs): ... - async def get_sources_async(self, **kwargs): ... - def get_sources_id(self, source_id, **kwargs): ... - def get_sources_id_with_http_info(self, source_id, **kwargs): ... - async def get_sources_id_async(self, source_id, **kwargs): ... - def get_sources_id_buckets(self, source_id, **kwargs): ... - def get_sources_id_buckets_with_http_info(self, source_id, **kwargs): ... - async def get_sources_id_buckets_async(self, source_id, **kwargs): ... - def get_sources_id_health(self, source_id, **kwargs): ... - def get_sources_id_health_with_http_info(self, source_id, **kwargs): ... - async def get_sources_id_health_async(self, source_id, **kwargs): ... - def patch_sources_id(self, source_id, source, **kwargs): ... - def patch_sources_id_with_http_info(self, source_id, source, **kwargs): ... - async def patch_sources_id_async(self, source_id, source, **kwargs): ... - def post_sources(self, source, **kwargs): ... - def post_sources_with_http_info(self, source, **kwargs): ... - async def post_sources_async(self, source, **kwargs): ... diff --git a/stubs/influxdb-client/influxdb_client/service/tasks_service.pyi b/stubs/influxdb-client/influxdb_client/service/tasks_service.pyi deleted file mode 100644 index f539b5acdd0a..000000000000 --- a/stubs/influxdb-client/influxdb_client/service/tasks_service.pyi +++ /dev/null @@ -1,130 +0,0 @@ -from _typeshed import Incomplete -from multiprocessing.pool import ApplyResult -from typing import Literal, overload - -from influxdb_client.domain.tasks import Tasks -from influxdb_client.service._base_service import _BaseService - -class TasksService(_BaseService): - def __init__(self, api_client: Incomplete | None = None) -> None: ... - def delete_tasks_id(self, task_id, **kwargs): ... - def delete_tasks_id_with_http_info(self, task_id, **kwargs): ... - async def delete_tasks_id_async(self, task_id, **kwargs): ... - def delete_tasks_id_labels_id(self, task_id, label_id, **kwargs): ... - def delete_tasks_id_labels_id_with_http_info(self, task_id, label_id, **kwargs): ... - async def delete_tasks_id_labels_id_async(self, task_id, label_id, **kwargs): ... - def delete_tasks_id_members_id(self, user_id, task_id, **kwargs): ... - def delete_tasks_id_members_id_with_http_info(self, user_id, task_id, **kwargs): ... - async def delete_tasks_id_members_id_async(self, user_id, task_id, **kwargs): ... - def delete_tasks_id_owners_id(self, user_id, task_id, **kwargs): ... - def delete_tasks_id_owners_id_with_http_info(self, user_id, task_id, **kwargs): ... - async def delete_tasks_id_owners_id_async(self, user_id, task_id, **kwargs): ... - def delete_tasks_id_runs_id(self, task_id, run_id, **kwargs): ... - def delete_tasks_id_runs_id_with_http_info(self, task_id, run_id, **kwargs): ... - async def delete_tasks_id_runs_id_async(self, task_id, run_id, **kwargs): ... - @overload - def get_tasks( - self, - *, - async_req: Literal[True], - urlopen_kw=..., - zap_trace_span: str = ..., - name: str = ..., - after: str = ..., - user: str = ..., - org: str = ..., - org_id: str = ..., - limit: int = ..., - type: str = ..., - ) -> ApplyResult[Tasks]: ... - @overload - def get_tasks( - self, - *, - async_req: Literal[False] = ..., - urlopen_kw=..., - zap_trace_span: str = ..., - name: str = ..., - after: str = ..., - user: str = ..., - org: str = ..., - org_id: str = ..., - limit: int = ..., - type: str = ..., - ) -> Tasks: ... - @overload - def get_tasks_with_http_info( - self, - *, - async_req: Literal[True], - urlopen_kw=..., - zap_trace_span: str = ..., - name: str = ..., - after: str = ..., - user: str = ..., - org: str = ..., - org_id: str = ..., - limit: int = ..., - type: str = ..., - ) -> ApplyResult[Tasks]: ... - @overload - def get_tasks_with_http_info( - self, - *, - async_req: Literal[False] = ..., - urlopen_kw=..., - zap_trace_span: str = ..., - name: str = ..., - after: str = ..., - user: str = ..., - org: str = ..., - org_id: str = ..., - limit: int = ..., - type: str = ..., - ) -> Tasks: ... - async def get_tasks_async(self, **kwargs): ... - def get_tasks_id(self, task_id, **kwargs): ... - def get_tasks_id_with_http_info(self, task_id, **kwargs): ... - async def get_tasks_id_async(self, task_id, **kwargs): ... - def get_tasks_id_labels(self, task_id, **kwargs): ... - def get_tasks_id_labels_with_http_info(self, task_id, **kwargs): ... - async def get_tasks_id_labels_async(self, task_id, **kwargs): ... - def get_tasks_id_logs(self, task_id, **kwargs): ... - def get_tasks_id_logs_with_http_info(self, task_id, **kwargs): ... - async def get_tasks_id_logs_async(self, task_id, **kwargs): ... - def get_tasks_id_members(self, task_id, **kwargs): ... - def get_tasks_id_members_with_http_info(self, task_id, **kwargs): ... - async def get_tasks_id_members_async(self, task_id, **kwargs): ... - def get_tasks_id_owners(self, task_id, **kwargs): ... - def get_tasks_id_owners_with_http_info(self, task_id, **kwargs): ... - async def get_tasks_id_owners_async(self, task_id, **kwargs): ... - def get_tasks_id_runs(self, task_id, **kwargs): ... - def get_tasks_id_runs_with_http_info(self, task_id, **kwargs): ... - async def get_tasks_id_runs_async(self, task_id, **kwargs): ... - def get_tasks_id_runs_id(self, task_id, run_id, **kwargs): ... - def get_tasks_id_runs_id_with_http_info(self, task_id, run_id, **kwargs): ... - async def get_tasks_id_runs_id_async(self, task_id, run_id, **kwargs): ... - def get_tasks_id_runs_id_logs(self, task_id, run_id, **kwargs): ... - def get_tasks_id_runs_id_logs_with_http_info(self, task_id, run_id, **kwargs): ... - async def get_tasks_id_runs_id_logs_async(self, task_id, run_id, **kwargs): ... - def patch_tasks_id(self, task_id, task_update_request, **kwargs): ... - def patch_tasks_id_with_http_info(self, task_id, task_update_request, **kwargs): ... - async def patch_tasks_id_async(self, task_id, task_update_request, **kwargs): ... - def post_tasks(self, task_create_request, **kwargs): ... - def post_tasks_with_http_info(self, task_create_request, **kwargs): ... - async def post_tasks_async(self, task_create_request, **kwargs): ... - def post_tasks_id_labels(self, task_id, label_mapping, **kwargs): ... - def post_tasks_id_labels_with_http_info(self, task_id, label_mapping, **kwargs): ... - async def post_tasks_id_labels_async(self, task_id, label_mapping, **kwargs): ... - def post_tasks_id_members(self, task_id, add_resource_member_request_body, **kwargs): ... - def post_tasks_id_members_with_http_info(self, task_id, add_resource_member_request_body, **kwargs): ... - async def post_tasks_id_members_async(self, task_id, add_resource_member_request_body, **kwargs): ... - def post_tasks_id_owners(self, task_id, add_resource_member_request_body, **kwargs): ... - def post_tasks_id_owners_with_http_info(self, task_id, add_resource_member_request_body, **kwargs): ... - async def post_tasks_id_owners_async(self, task_id, add_resource_member_request_body, **kwargs): ... - def post_tasks_id_runs(self, task_id, **kwargs): ... - def post_tasks_id_runs_with_http_info(self, task_id, **kwargs): ... - async def post_tasks_id_runs_async(self, task_id, **kwargs): ... - def post_tasks_id_runs_id_retry(self, task_id, run_id, **kwargs): ... - def post_tasks_id_runs_id_retry_with_http_info(self, task_id, run_id, **kwargs): ... - async def post_tasks_id_runs_id_retry_async(self, task_id, run_id, **kwargs): ... diff --git a/stubs/influxdb-client/influxdb_client/service/telegraf_plugins_service.pyi b/stubs/influxdb-client/influxdb_client/service/telegraf_plugins_service.pyi deleted file mode 100644 index 6a985c2c9d4e..000000000000 --- a/stubs/influxdb-client/influxdb_client/service/telegraf_plugins_service.pyi +++ /dev/null @@ -1,9 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.service._base_service import _BaseService - -class TelegrafPluginsService(_BaseService): - def __init__(self, api_client: Incomplete | None = None) -> None: ... - def get_telegraf_plugins(self, **kwargs): ... - def get_telegraf_plugins_with_http_info(self, **kwargs): ... - async def get_telegraf_plugins_async(self, **kwargs): ... diff --git a/stubs/influxdb-client/influxdb_client/service/telegrafs_service.pyi b/stubs/influxdb-client/influxdb_client/service/telegrafs_service.pyi deleted file mode 100644 index 7b6be0bfb691..000000000000 --- a/stubs/influxdb-client/influxdb_client/service/telegrafs_service.pyi +++ /dev/null @@ -1,48 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.service._base_service import _BaseService - -class TelegrafsService(_BaseService): - def __init__(self, api_client: Incomplete | None = None) -> None: ... - def delete_telegrafs_id(self, telegraf_id, **kwargs): ... - def delete_telegrafs_id_with_http_info(self, telegraf_id, **kwargs): ... - async def delete_telegrafs_id_async(self, telegraf_id, **kwargs): ... - def delete_telegrafs_id_labels_id(self, telegraf_id, label_id, **kwargs): ... - def delete_telegrafs_id_labels_id_with_http_info(self, telegraf_id, label_id, **kwargs): ... - async def delete_telegrafs_id_labels_id_async(self, telegraf_id, label_id, **kwargs): ... - def delete_telegrafs_id_members_id(self, user_id, telegraf_id, **kwargs): ... - def delete_telegrafs_id_members_id_with_http_info(self, user_id, telegraf_id, **kwargs): ... - async def delete_telegrafs_id_members_id_async(self, user_id, telegraf_id, **kwargs): ... - def delete_telegrafs_id_owners_id(self, user_id, telegraf_id, **kwargs): ... - def delete_telegrafs_id_owners_id_with_http_info(self, user_id, telegraf_id, **kwargs): ... - async def delete_telegrafs_id_owners_id_async(self, user_id, telegraf_id, **kwargs): ... - def get_telegrafs(self, **kwargs): ... - def get_telegrafs_with_http_info(self, **kwargs): ... - async def get_telegrafs_async(self, **kwargs): ... - def get_telegrafs_id(self, telegraf_id, **kwargs): ... - def get_telegrafs_id_with_http_info(self, telegraf_id, **kwargs): ... - async def get_telegrafs_id_async(self, telegraf_id, **kwargs): ... - def get_telegrafs_id_labels(self, telegraf_id, **kwargs): ... - def get_telegrafs_id_labels_with_http_info(self, telegraf_id, **kwargs): ... - async def get_telegrafs_id_labels_async(self, telegraf_id, **kwargs): ... - def get_telegrafs_id_members(self, telegraf_id, **kwargs): ... - def get_telegrafs_id_members_with_http_info(self, telegraf_id, **kwargs): ... - async def get_telegrafs_id_members_async(self, telegraf_id, **kwargs): ... - def get_telegrafs_id_owners(self, telegraf_id, **kwargs): ... - def get_telegrafs_id_owners_with_http_info(self, telegraf_id, **kwargs): ... - async def get_telegrafs_id_owners_async(self, telegraf_id, **kwargs): ... - def post_telegrafs(self, telegraf_plugin_request, **kwargs): ... - def post_telegrafs_with_http_info(self, telegraf_plugin_request, **kwargs): ... - async def post_telegrafs_async(self, telegraf_plugin_request, **kwargs): ... - def post_telegrafs_id_labels(self, telegraf_id, label_mapping, **kwargs): ... - def post_telegrafs_id_labels_with_http_info(self, telegraf_id, label_mapping, **kwargs): ... - async def post_telegrafs_id_labels_async(self, telegraf_id, label_mapping, **kwargs): ... - def post_telegrafs_id_members(self, telegraf_id, add_resource_member_request_body, **kwargs): ... - def post_telegrafs_id_members_with_http_info(self, telegraf_id, add_resource_member_request_body, **kwargs): ... - async def post_telegrafs_id_members_async(self, telegraf_id, add_resource_member_request_body, **kwargs): ... - def post_telegrafs_id_owners(self, telegraf_id, add_resource_member_request_body, **kwargs): ... - def post_telegrafs_id_owners_with_http_info(self, telegraf_id, add_resource_member_request_body, **kwargs): ... - async def post_telegrafs_id_owners_async(self, telegraf_id, add_resource_member_request_body, **kwargs): ... - def put_telegrafs_id(self, telegraf_id, telegraf_plugin_request, **kwargs): ... - def put_telegrafs_id_with_http_info(self, telegraf_id, telegraf_plugin_request, **kwargs): ... - async def put_telegrafs_id_async(self, telegraf_id, telegraf_plugin_request, **kwargs): ... diff --git a/stubs/influxdb-client/influxdb_client/service/templates_service.pyi b/stubs/influxdb-client/influxdb_client/service/templates_service.pyi deleted file mode 100644 index 21bb85f65f47..000000000000 --- a/stubs/influxdb-client/influxdb_client/service/templates_service.pyi +++ /dev/null @@ -1,30 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.service._base_service import _BaseService - -class TemplatesService(_BaseService): - def __init__(self, api_client: Incomplete | None = None) -> None: ... - def apply_template(self, template_apply, **kwargs): ... - def apply_template_with_http_info(self, template_apply, **kwargs): ... - async def apply_template_async(self, template_apply, **kwargs): ... - def create_stack(self, **kwargs): ... - def create_stack_with_http_info(self, **kwargs): ... - async def create_stack_async(self, **kwargs): ... - def delete_stack(self, stack_id, org_id, **kwargs): ... - def delete_stack_with_http_info(self, stack_id, org_id, **kwargs): ... - async def delete_stack_async(self, stack_id, org_id, **kwargs): ... - def export_template(self, **kwargs): ... - def export_template_with_http_info(self, **kwargs): ... - async def export_template_async(self, **kwargs): ... - def list_stacks(self, org_id, **kwargs): ... - def list_stacks_with_http_info(self, org_id, **kwargs): ... - async def list_stacks_async(self, org_id, **kwargs): ... - def read_stack(self, stack_id, **kwargs): ... - def read_stack_with_http_info(self, stack_id, **kwargs): ... - async def read_stack_async(self, stack_id, **kwargs): ... - def uninstall_stack(self, stack_id, **kwargs): ... - def uninstall_stack_with_http_info(self, stack_id, **kwargs): ... - async def uninstall_stack_async(self, stack_id, **kwargs): ... - def update_stack(self, stack_id, **kwargs): ... - def update_stack_with_http_info(self, stack_id, **kwargs): ... - async def update_stack_async(self, stack_id, **kwargs): ... diff --git a/stubs/influxdb-client/influxdb_client/service/users_service.pyi b/stubs/influxdb-client/influxdb_client/service/users_service.pyi deleted file mode 100644 index c4f52aab7c17..000000000000 --- a/stubs/influxdb-client/influxdb_client/service/users_service.pyi +++ /dev/null @@ -1,33 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.service._base_service import _BaseService - -class UsersService(_BaseService): - def __init__(self, api_client: Incomplete | None = None) -> None: ... - def delete_users_id(self, user_id, **kwargs): ... - def delete_users_id_with_http_info(self, user_id, **kwargs): ... - async def delete_users_id_async(self, user_id, **kwargs): ... - def get_me(self, **kwargs): ... - def get_me_with_http_info(self, **kwargs): ... - async def get_me_async(self, **kwargs): ... - def get_users(self, **kwargs): ... - def get_users_with_http_info(self, **kwargs): ... - async def get_users_async(self, **kwargs): ... - def get_users_id(self, user_id, **kwargs): ... - def get_users_id_with_http_info(self, user_id, **kwargs): ... - async def get_users_id_async(self, user_id, **kwargs): ... - def patch_users_id(self, user_id, user, **kwargs): ... - def patch_users_id_with_http_info(self, user_id, user, **kwargs): ... - async def patch_users_id_async(self, user_id, user, **kwargs): ... - def post_users(self, user, **kwargs): ... - def post_users_with_http_info(self, user, **kwargs): ... - async def post_users_async(self, user, **kwargs): ... - def post_users_id_password(self, user_id, password_reset_body, **kwargs): ... - def post_users_id_password_with_http_info(self, user_id, password_reset_body, **kwargs): ... - async def post_users_id_password_async(self, user_id, password_reset_body, **kwargs): ... - def put_me_password(self, password_reset_body, **kwargs): ... - def put_me_password_with_http_info(self, password_reset_body, **kwargs): ... - async def put_me_password_async(self, password_reset_body, **kwargs): ... - def put_users_id_password(self, user_id, password_reset_body, **kwargs): ... - def put_users_id_password_with_http_info(self, user_id, password_reset_body, **kwargs): ... - async def put_users_id_password_async(self, user_id, password_reset_body, **kwargs): ... diff --git a/stubs/influxdb-client/influxdb_client/service/variables_service.pyi b/stubs/influxdb-client/influxdb_client/service/variables_service.pyi deleted file mode 100644 index 1afa53d20dc1..000000000000 --- a/stubs/influxdb-client/influxdb_client/service/variables_service.pyi +++ /dev/null @@ -1,33 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.service._base_service import _BaseService - -class VariablesService(_BaseService): - def __init__(self, api_client: Incomplete | None = None) -> None: ... - def delete_variables_id(self, variable_id, **kwargs): ... - def delete_variables_id_with_http_info(self, variable_id, **kwargs): ... - async def delete_variables_id_async(self, variable_id, **kwargs): ... - def delete_variables_id_labels_id(self, variable_id, label_id, **kwargs): ... - def delete_variables_id_labels_id_with_http_info(self, variable_id, label_id, **kwargs): ... - async def delete_variables_id_labels_id_async(self, variable_id, label_id, **kwargs): ... - def get_variables(self, **kwargs): ... - def get_variables_with_http_info(self, **kwargs): ... - async def get_variables_async(self, **kwargs): ... - def get_variables_id(self, variable_id, **kwargs): ... - def get_variables_id_with_http_info(self, variable_id, **kwargs): ... - async def get_variables_id_async(self, variable_id, **kwargs): ... - def get_variables_id_labels(self, variable_id, **kwargs): ... - def get_variables_id_labels_with_http_info(self, variable_id, **kwargs): ... - async def get_variables_id_labels_async(self, variable_id, **kwargs): ... - def patch_variables_id(self, variable_id, variable, **kwargs): ... - def patch_variables_id_with_http_info(self, variable_id, variable, **kwargs): ... - async def patch_variables_id_async(self, variable_id, variable, **kwargs): ... - def post_variables(self, variable, **kwargs): ... - def post_variables_with_http_info(self, variable, **kwargs): ... - async def post_variables_async(self, variable, **kwargs): ... - def post_variables_id_labels(self, variable_id, label_mapping, **kwargs): ... - def post_variables_id_labels_with_http_info(self, variable_id, label_mapping, **kwargs): ... - async def post_variables_id_labels_async(self, variable_id, label_mapping, **kwargs): ... - def put_variables_id(self, variable_id, variable, **kwargs): ... - def put_variables_id_with_http_info(self, variable_id, variable, **kwargs): ... - async def put_variables_id_async(self, variable_id, variable, **kwargs): ... diff --git a/stubs/influxdb-client/influxdb_client/service/views_service.pyi b/stubs/influxdb-client/influxdb_client/service/views_service.pyi deleted file mode 100644 index b55fd238b9f0..000000000000 --- a/stubs/influxdb-client/influxdb_client/service/views_service.pyi +++ /dev/null @@ -1,12 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.service._base_service import _BaseService - -class ViewsService(_BaseService): - def __init__(self, api_client: Incomplete | None = None) -> None: ... - def get_dashboards_id_cells_id_view(self, dashboard_id, cell_id, **kwargs): ... - def get_dashboards_id_cells_id_view_with_http_info(self, dashboard_id, cell_id, **kwargs): ... - async def get_dashboards_id_cells_id_view_async(self, dashboard_id, cell_id, **kwargs): ... - def patch_dashboards_id_cells_id_view(self, dashboard_id, cell_id, view, **kwargs): ... - def patch_dashboards_id_cells_id_view_with_http_info(self, dashboard_id, cell_id, view, **kwargs): ... - async def patch_dashboards_id_cells_id_view_async(self, dashboard_id, cell_id, view, **kwargs): ... diff --git a/stubs/influxdb-client/influxdb_client/service/write_service.pyi b/stubs/influxdb-client/influxdb_client/service/write_service.pyi deleted file mode 100644 index 748b716ba392..000000000000 --- a/stubs/influxdb-client/influxdb_client/service/write_service.pyi +++ /dev/null @@ -1,9 +0,0 @@ -from _typeshed import Incomplete - -from influxdb_client.service._base_service import _BaseService - -class WriteService(_BaseService): - def __init__(self, api_client: Incomplete | None = None) -> None: ... - def post_write(self, org, bucket, body, **kwargs): ... - def post_write_with_http_info(self, org, bucket, body, **kwargs): ... - async def post_write_async(self, org, bucket, body, **kwargs): ... diff --git a/stubs/influxdb-client/influxdb_client/version.pyi b/stubs/influxdb-client/influxdb_client/version.pyi deleted file mode 100644 index 3acee936dc41..000000000000 --- a/stubs/influxdb-client/influxdb_client/version.pyi +++ /dev/null @@ -1 +0,0 @@ -VERSION: str From 50eee672979b0fed990b4752cb05b4b1e02b47d8 Mon Sep 17 00:00:00 2001 From: Alex Waygood Date: Tue, 1 Apr 2025 12:19:47 +0100 Subject: [PATCH 167/388] Disable Python version updates for `actions/setup-python` from Renovate (#13758) --- .github/{renovate.json => renovate.json5} | 10 ++++++++++ 1 file changed, 10 insertions(+) rename .github/{renovate.json => renovate.json5} (72%) diff --git a/.github/renovate.json b/.github/renovate.json5 similarity index 72% rename from .github/renovate.json rename to .github/renovate.json5 index afceb5b31b9e..c48c0d253c71 100644 --- a/.github/renovate.json +++ b/.github/renovate.json5 @@ -17,6 +17,16 @@ "description": "Quarterly update of GitHub Action dependencies", "schedule": ["every 3 months on the first day of the month"] }, + { + // This package rule disables updates for `actions/setup-python` Python versions: + // it's better to do these manually as there's often a reason why we can't use + // the latest Python version in CI for a specific job + groupName: "Python versions", + matchManagers: ["github-actions"], + matchPackageNames: ["python"], + description: "Disable PRs updating Python versions", + enabled: false, + }, { "groupName": "most test/lint dependencies", "matchManagers": ["pip_requirements", "pre-commit"], From 60a9cdf02fafa2e7df9f729886405ae0f93203cc Mon Sep 17 00:00:00 2001 From: Avasam Date: Tue, 1 Apr 2025 07:27:33 -0400 Subject: [PATCH 168/388] Enable Ruff flake8-use-pathlib (PTH) (autofix only) (#13750) --- lib/ts_utils/paths.py | 2 +- pyproject.toml | 3 +++ 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/lib/ts_utils/paths.py b/lib/ts_utils/paths.py index 63119231720d..2894aa24b2d7 100644 --- a/lib/ts_utils/paths.py +++ b/lib/ts_utils/paths.py @@ -5,7 +5,7 @@ # installed into the user's virtual env, so we can't determine the path # to typeshed. Installing ts_utils editable would solve that, see # https://github.com/python/typeshed/pull/12806. -TS_BASE_PATH: Final = Path("") +TS_BASE_PATH: Final = Path() STDLIB_PATH: Final = TS_BASE_PATH / "stdlib" STUBS_PATH: Final = TS_BASE_PATH / "stubs" diff --git a/pyproject.toml b/pyproject.toml index 3cfcddda7b5f..1acc43440d07 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -76,6 +76,9 @@ select = [ "FURB169", # Compare the identities of `{object}` and None instead of their respective types "FURB177", # Prefer `Path.cwd()` over `Path().resolve()` for current-directory lookups "FURB187", # Use of assignment of `reversed` on list `{name}` + # Autofixable flake8-use-pathlib only + "PTH201", # Do not pass the current directory explicitly to `Path` + "PTH210", # Invalid suffix passed to `.with_suffix()` # PYI: only enable rules that have autofixes and that we always want to fix (even manually), # avoids duplicate # noqa with flake8-pyi "PYI009", # Empty body should contain `...`, not pass From 0724d8ddff7b5750020f29647c2fc0a5b793fc3f Mon Sep 17 00:00:00 2001 From: Avasam Date: Tue, 1 Apr 2025 09:12:15 -0400 Subject: [PATCH 169/388] Enable Ruff PLW (Pylint Warning) (#13749) --- lib/ts_utils/utils.py | 8 ++++---- pyproject.toml | 10 ++++++---- scripts/create_baseline_stubs.py | 10 +++++----- scripts/stubsabot.py | 4 ++-- scripts/sync_protobuf/_utils.py | 4 +++- scripts/sync_protobuf/google_protobuf.py | 2 +- scripts/sync_protobuf/s2clientprotocol.py | 2 +- scripts/sync_protobuf/tensorflow.py | 6 +++--- tests/mypy_test.py | 4 ++-- tests/pyright_test.py | 4 ++-- tests/regr_test.py | 2 +- tests/runtests.py | 16 +++++++++------- tests/stubtest_third_party.py | 6 +++--- tests/typecheck_typeshed.py | 2 +- 14 files changed, 43 insertions(+), 37 deletions(-) diff --git a/lib/ts_utils/utils.py b/lib/ts_utils/utils.py index 522db807a29e..667b47c6fabd 100644 --- a/lib/ts_utils/utils.py +++ b/lib/ts_utils/utils.py @@ -123,11 +123,11 @@ def parse_stdlib_versions_file() -> SupportedVersionsDict: result: dict[str, tuple[VersionTuple, VersionTuple]] = {} with VERSIONS_PATH.open(encoding="UTF-8") as f: for line in f: - line = strip_comments(line) - if line == "": + stripped_line = strip_comments(line) + if stripped_line == "": continue - m = VERSION_LINE_RE.match(line) - assert m, f"invalid VERSIONS line: {line}" + m = VERSION_LINE_RE.match(stripped_line) + assert m, f"invalid VERSIONS line: {stripped_line}" mod: str = m.group(1) assert mod not in result, f"Duplicate module {mod} in VERSIONS" min_version = _parse_version(m.group(2)) diff --git a/pyproject.toml b/pyproject.toml index 1acc43440d07..7e3e8641f91c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -53,9 +53,7 @@ select = [ "I", # isort "N", # pep8-naming "PGH", # pygrep-hooks - "PLC", # Pylint Convention - "PLE", # Pylint Error - "PLR", # Pylint Refactor + "PL", # Pylint "RUF", # Ruff-specific and unused-noqa "TRY", # tryceratops "UP", # pyupgrade @@ -186,15 +184,19 @@ ignore = [ # Rules that are out of the control of stub authors: ### # Names in stubs should match the implementation, even if it's ambiguous. + # https://github.com/astral-sh/ruff/issues/15293 "A", # flake8-builtins - "F403", # `from . import *` used; unable to detect undefined names # Stubs can sometimes re-export entire modules. # Issues with using a star-imported name will be caught by type-checkers. + "F403", # `from . import *` used; unable to detect undefined names "F405", # may be undefined, or defined from star imports # Most pep8-naming rules don't apply for third-party stubs like typeshed. # N811 to N814 could apply, but we often use them to disambiguate a name whilst making it look like a more common one "N8", # pep8-naming + # Stubs are allowed to use private variables (pyright's reportPrivateUsage is also disabled) "PLC2701", # Private name import from external module + # Names in stubs should match implementation + "PLW0211", # First argument of a static method should not be named `{argument_name}` ] "lib/ts_utils/**" = [ # Doesn't affect stubs. The only re-exports we have should be in our local lib ts_utils diff --git a/scripts/create_baseline_stubs.py b/scripts/create_baseline_stubs.py index 2aab6aea5710..223ef89cfcb2 100755 --- a/scripts/create_baseline_stubs.py +++ b/scripts/create_baseline_stubs.py @@ -59,17 +59,17 @@ def run_stubgen(package: str, output: str) -> None: def run_stubdefaulter(stub_dir: str) -> None: print(f"Running stubdefaulter: stubdefaulter --packages {stub_dir}") - subprocess.run(["stubdefaulter", "--packages", stub_dir]) + subprocess.run(["stubdefaulter", "--packages", stub_dir], check=False) def run_black(stub_dir: str) -> None: print(f"Running Black: black {stub_dir}") - subprocess.run(["pre-commit", "run", "black", "--files", *glob.iglob(f"{stub_dir}/**/*.pyi")]) + subprocess.run(["pre-commit", "run", "black", "--files", *glob.iglob(f"{stub_dir}/**/*.pyi")], check=False) def run_ruff(stub_dir: str) -> None: print(f"Running Ruff: ruff check {stub_dir} --fix-only") - subprocess.run([sys.executable, "-m", "ruff", "check", stub_dir, "--fix-only"]) + subprocess.run([sys.executable, "-m", "ruff", "check", stub_dir, "--fix-only"], check=False) async def get_project_urls_from_pypi(project: str, session: aiohttp.ClientSession) -> dict[str, str]: @@ -102,9 +102,9 @@ async def get_upstream_repo_url(project: str) -> str | None: url for url_name, url in project_urls.items() if url_name not in url_names_probably_pointing_to_source ) - for url in urls_to_check: + for url_to_check in urls_to_check: # Remove `www.`; replace `http://` with `https://` - url = re.sub(r"^(https?://)?(www\.)?", "https://", url) + url = re.sub(r"^(https?://)?(www\.)?", "https://", url_to_check) netloc = urllib.parse.urlparse(url).netloc if netloc in {"gitlab.com", "github.com", "bitbucket.org", "foss.heptapod.net"}: # truncate to https://site.com/user/repo diff --git a/scripts/stubsabot.py b/scripts/stubsabot.py index a8e1b2ba5c73..87f0fd18d720 100755 --- a/scripts/stubsabot.py +++ b/scripts/stubsabot.py @@ -754,8 +754,8 @@ async def main() -> None: dists_to_update = [path.name for path in STUBS_PATH.iterdir()] if args.action_level > ActionLevel.nothing: - subprocess.run(["git", "update-index", "--refresh"], capture_output=True) - diff_result = subprocess.run(["git", "diff-index", "HEAD", "--name-only"], text=True, capture_output=True) + subprocess.run(["git", "update-index", "--refresh"], capture_output=True, check=False) + diff_result = subprocess.run(["git", "diff-index", "HEAD", "--name-only"], text=True, capture_output=True, check=False) if diff_result.returncode: print("Unexpected exception!") print(diff_result.stdout) diff --git a/scripts/sync_protobuf/_utils.py b/scripts/sync_protobuf/_utils.py index 0c49c5a6fa9a..cb82ba1709a0 100644 --- a/scripts/sync_protobuf/_utils.py +++ b/scripts/sync_protobuf/_utils.py @@ -35,7 +35,9 @@ def run_protoc( ) -> str: """TODO: Describe parameters and return.""" protoc_version = ( - subprocess.run([sys.executable, "-m", "grpc_tools.protoc", "--version"], capture_output=True).stdout.decode().strip() + subprocess.run([sys.executable, "-m", "grpc_tools.protoc", "--version"], capture_output=True, check=False) + .stdout.decode() + .strip() ) print() print(protoc_version) diff --git a/scripts/sync_protobuf/google_protobuf.py b/scripts/sync_protobuf/google_protobuf.py index ee238f82618d..e36373cff108 100755 --- a/scripts/sync_protobuf/google_protobuf.py +++ b/scripts/sync_protobuf/google_protobuf.py @@ -90,7 +90,7 @@ def main() -> None: print("Updated protobuf/METADATA.toml") # Run pre-commit to cleanup the stubs - subprocess.run((sys.executable, "-m", "pre_commit", "run", "--files", *STUBS_FOLDER.rglob("*_pb2.pyi"))) + subprocess.run((sys.executable, "-m", "pre_commit", "run", "--files", *STUBS_FOLDER.rglob("*_pb2.pyi")), check=False) if __name__ == "__main__": diff --git a/scripts/sync_protobuf/s2clientprotocol.py b/scripts/sync_protobuf/s2clientprotocol.py index 989f57a4cd8d..cee68e1edea9 100755 --- a/scripts/sync_protobuf/s2clientprotocol.py +++ b/scripts/sync_protobuf/s2clientprotocol.py @@ -69,7 +69,7 @@ def main() -> None: print("Updated s2clientprotocol/METADATA.toml") # Run pre-commit to cleanup the stubs - subprocess.run((sys.executable, "-m", "pre_commit", "run", "--files", *STUBS_FOLDER.rglob("*_pb2.pyi"))) + subprocess.run((sys.executable, "-m", "pre_commit", "run", "--files", *STUBS_FOLDER.rglob("*_pb2.pyi")), check=False) if __name__ == "__main__": diff --git a/scripts/sync_protobuf/tensorflow.py b/scripts/sync_protobuf/tensorflow.py index b26ee90ccabf..3c84980f1cd8 100755 --- a/scripts/sync_protobuf/tensorflow.py +++ b/scripts/sync_protobuf/tensorflow.py @@ -72,7 +72,7 @@ def post_creation() -> None: for path in STUBS_FOLDER.rglob("*_pb2.pyi"): print(f"Fixing imports in '{path}'") - with open(path) as file: + with open(path, encoding="utf-8") as file: filedata = file.read() # Replace the target string @@ -80,7 +80,7 @@ def post_creation() -> None: filedata = re.sub(XLA_IMPORT_PATTERN, "\\1tensorflow.compiler.xla.", filedata) # Write the file out again - with open(path, "w") as file: + with open(path, "w", encoding="utf-8") as file: file.write(filedata) print() @@ -137,7 +137,7 @@ def main() -> None: print("Updated tensorflow/METADATA.toml") # Run pre-commit to cleanup the stubs - subprocess.run((sys.executable, "-m", "pre_commit", "run", "--files", *STUBS_FOLDER.rglob("*_pb2.pyi"))) + subprocess.run((sys.executable, "-m", "pre_commit", "run", "--files", *STUBS_FOLDER.rglob("*_pb2.pyi")), check=False) if __name__ == "__main__": diff --git a/tests/mypy_test.py b/tests/mypy_test.py index 0fea2d56b0a7..9ccc0138229c 100755 --- a/tests/mypy_test.py +++ b/tests/mypy_test.py @@ -277,7 +277,7 @@ def run_mypy( mypy_command = [python_path, "-m", "mypy", *mypy_args] if args.verbose: print(colored(f"running {' '.join(mypy_command)}", "blue")) - result = subprocess.run(mypy_command, capture_output=True, text=True, env=env_vars) + result = subprocess.run(mypy_command, capture_output=True, text=True, env=env_vars, check=False) if result.returncode: print_error(f"failure (exit code {result.returncode})\n") if result.stdout: @@ -286,7 +286,7 @@ def run_mypy( print_error(result.stderr) if non_types_dependencies and args.verbose: print("Ran with the following environment:") - subprocess.run(["uv", "pip", "freeze"], env={**os.environ, "VIRTUAL_ENV": str(venv_dir)}) + subprocess.run(["uv", "pip", "freeze"], env={**os.environ, "VIRTUAL_ENV": str(venv_dir)}, check=False) print() else: print_success_msg() diff --git a/tests/pyright_test.py b/tests/pyright_test.py index 46986a112b4d..04b3df6fa99e 100755 --- a/tests/pyright_test.py +++ b/tests/pyright_test.py @@ -24,7 +24,7 @@ def main() -> None: sys.exit(1) try: - subprocess.run([npx, "--version"]) + subprocess.run([npx, "--version"], check=False) except OSError: print("error running npx; is Node.js installed?", file=sys.stderr) sys.exit(1) @@ -40,7 +40,7 @@ def main() -> None: command = [npx, f"pyright@{pyright_version}"] + sys.argv[1:] print_command(command) - ret = subprocess.run(command).returncode + ret = subprocess.run(command, check=False).returncode sys.exit(ret) diff --git a/tests/regr_test.py b/tests/regr_test.py index 248708b90d64..fc4e48c55ff6 100755 --- a/tests/regr_test.py +++ b/tests/regr_test.py @@ -224,7 +224,7 @@ def run_testcases( msg += f"{description}: MYPYPATH not set" msg += "\n" verbose_log(msg) - return subprocess.run(mypy_command, capture_output=True, text=True, env=env_vars) + return subprocess.run(mypy_command, capture_output=True, text=True, env=env_vars, check=False) @dataclass(frozen=True) diff --git a/tests/runtests.py b/tests/runtests.py index 47be0830ba67..e81fb848c7c4 100755 --- a/tests/runtests.py +++ b/tests/runtests.py @@ -76,10 +76,10 @@ def main() -> None: pytype_result: subprocess.CompletedProcess[bytes] | None = None print("\nRunning pre-commit...") - pre_commit_result = subprocess.run(["pre-commit", "run", "--files", *Path(path).rglob("*")]) + pre_commit_result = subprocess.run(["pre-commit", "run", "--files", *Path(path).rglob("*")], check=False) print("\nRunning check_typeshed_structure.py...") - check_structure_result = subprocess.run([sys.executable, "tests/check_typeshed_structure.py"]) + check_structure_result = subprocess.run([sys.executable, "tests/check_typeshed_structure.py"], check=False) strict_params = _get_strict_params(path) print(f"\nRunning Pyright ({'stricter' if strict_params else 'base' } configs) for Python {python_version}...") @@ -87,6 +87,7 @@ def main() -> None: [sys.executable, "tests/pyright_test.py", path, "--pythonversion", python_version, *strict_params], stderr=subprocess.PIPE, text=True, + check=False, ) if re.match(_NPX_ERROR_PATTERN, pyright_result.stderr): print(_NPX_ERROR_MESSAGE) @@ -98,16 +99,16 @@ def main() -> None: pyright_skipped = False print(f"\nRunning mypy for Python {python_version}...") - mypy_result = subprocess.run([sys.executable, "tests/mypy_test.py", path, "--python-version", python_version]) + mypy_result = subprocess.run([sys.executable, "tests/mypy_test.py", path, "--python-version", python_version], check=False) # If mypy failed, stubtest will fail without any helpful error if mypy_result.returncode == 0: if folder == "stdlib": print("\nRunning stubtest...") - stubtest_result = subprocess.run([sys.executable, "tests/stubtest_stdlib.py", stub]) + stubtest_result = subprocess.run([sys.executable, "tests/stubtest_stdlib.py", stub], check=False) else: if run_stubtest: print("\nRunning stubtest...") - stubtest_result = subprocess.run([sys.executable, "tests/stubtest_third_party.py", stub]) + stubtest_result = subprocess.run([sys.executable, "tests/stubtest_third_party.py", stub], check=False) else: print( colored( @@ -122,7 +123,7 @@ def main() -> None: if find_spec("pytype"): print("\nRunning pytype...") - pytype_result = subprocess.run([sys.executable, "tests/pytype_test.py", path]) + pytype_result = subprocess.run([sys.executable, "tests/pytype_test.py", path], check=False) else: print( colored( @@ -149,7 +150,7 @@ def main() -> None: "-p", _TESTCASES_CONFIG_FILE, ] - pyright_testcases_result = subprocess.run(command, stderr=subprocess.PIPE, text=True) + pyright_testcases_result = subprocess.run(command, stderr=subprocess.PIPE, text=True, check=False) if re.match(_NPX_ERROR_PATTERN, pyright_testcases_result.stderr): print(_NPX_ERROR_MESSAGE) pyright_testcases_returncode = 0 @@ -164,6 +165,7 @@ def main() -> None: [sys.executable, "tests/regr_test.py", "stdlib" if folder == "stdlib" else stub, "--python-version", python_version], stderr=subprocess.PIPE, text=True, + check=False, ) # No test means they all ran successfully (0 out of 0). Not all 3rd-party stubs have regression tests. if "No test cases found" in regr_test_result.stderr: diff --git a/tests/stubtest_third_party.py b/tests/stubtest_third_party.py index 379ba718d1fd..8b8cb6265dfd 100755 --- a/tests/stubtest_third_party.py +++ b/tests/stubtest_third_party.py @@ -147,11 +147,11 @@ def run_stubtest( print_divider() print("Python version: ", end="", flush=True) - ret = subprocess.run([sys.executable, "-VV"], capture_output=True) + ret = subprocess.run([sys.executable, "-VV"], capture_output=True, check=False) print_command_output(ret) print("\nRan with the following environment:") - ret = subprocess.run([pip_exe, "freeze", "--all"], capture_output=True) + ret = subprocess.run([pip_exe, "freeze", "--all"], capture_output=True, check=False) print_command_output(ret) if keep_tmp_dir: print("Path to virtual environment:", venv_dir, flush=True) @@ -163,7 +163,7 @@ def run_stubtest( print() else: print(f"Re-running stubtest with --generate-allowlist.\nAdd the following to {main_allowlist_path}:") - ret = subprocess.run([*stubtest_cmd, "--generate-allowlist"], env=stubtest_env, capture_output=True) + ret = subprocess.run([*stubtest_cmd, "--generate-allowlist"], env=stubtest_env, capture_output=True, check=False) print_command_output(ret) print_divider() diff --git a/tests/typecheck_typeshed.py b/tests/typecheck_typeshed.py index 2fa853ad7d13..a70ddf9b4a83 100755 --- a/tests/typecheck_typeshed.py +++ b/tests/typecheck_typeshed.py @@ -72,7 +72,7 @@ def run_mypy_as_subprocess(directory: str, platform: str, version: str) -> Retur "--custom-typeshed-dir", ".", ] - result = subprocess.run(command, capture_output=True, text=True) + result = subprocess.run(command, capture_output=True, text=True, check=False) if result.stderr: print_error(result.stderr) if result.stdout: From 529ea23aa54a137da1c7fef57b6d03c924c4871c Mon Sep 17 00:00:00 2001 From: Avasam Date: Tue, 1 Apr 2025 12:31:38 -0400 Subject: [PATCH 170/388] Bump ruff to 0.11.2 (#13757) --- .pre-commit-config.yaml | 2 +- requirements-tests.txt | 2 +- stdlib/_typeshed/__init__.pyi | 5 +++-- stdlib/typing.pyi | 2 +- stdlib/typing_extensions.pyi | 2 +- stubs/PyYAML/yaml/representer.pyi | 6 +++--- .../protobuf/google/protobuf/internal/containers.pyi | 10 +++++----- stubs/protobuf/google/protobuf/message.pyi | 11 ++++------- 8 files changed, 19 insertions(+), 21 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index ca5972135220..e73790f3fddc 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -11,7 +11,7 @@ repos: args: [--fix=lf] - id: check-case-conflict - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.9.3 # must match requirements-tests.txt + rev: v0.11.2 # must match requirements-tests.txt hooks: - id: ruff name: Run ruff on stubs, tests and scripts diff --git a/requirements-tests.txt b/requirements-tests.txt index 9857125e9bfb..d28636d9f254 100644 --- a/requirements-tests.txt +++ b/requirements-tests.txt @@ -13,7 +13,7 @@ packaging==24.2 pathspec>=0.11.1 pre-commit # Required by create_baseline_stubs.py. Must match .pre-commit-config.yaml. -ruff==0.9.3 +ruff==0.11.2 stubdefaulter==0.1.0 termcolor>=2.3 tomli==2.2.1 diff --git a/stdlib/_typeshed/__init__.pyi b/stdlib/_typeshed/__init__.pyi index 2b56a4e97519..99d21b67360a 100644 --- a/stdlib/_typeshed/__init__.pyi +++ b/stdlib/_typeshed/__init__.pyi @@ -3,6 +3,7 @@ # See the README.md file in this directory for more information. import sys +import typing_extensions from collections.abc import Awaitable, Callable, Iterable, Sequence, Set as AbstractSet, Sized from dataclasses import Field from os import PathLike @@ -328,9 +329,9 @@ class structseq(Generic[_T_co]): # The second parameter will accept a dict of any kind without raising an exception, # but only has any meaning if you supply it a dict where the keys are strings. # https://github.com/python/typeshed/pull/6560#discussion_r767149830 - def __new__(cls: type[Self], sequence: Iterable[_T_co], dict: dict[str, Any] = ...) -> Self: ... + def __new__(cls, sequence: Iterable[_T_co], dict: dict[str, Any] = ...) -> typing_extensions.Self: ... if sys.version_info >= (3, 13): - def __replace__(self: Self, **kwargs: Any) -> Self: ... + def __replace__(self, **kwargs: Any) -> typing_extensions.Self: ... # Superset of typing.AnyStr that also includes LiteralString AnyOrLiteralStr = TypeVar("AnyOrLiteralStr", str, bytes, LiteralString) # noqa: Y001 diff --git a/stdlib/typing.pyi b/stdlib/typing.pyi index 1ab5dae09cb4..bc8f342ef46b 100644 --- a/stdlib/typing.pyi +++ b/stdlib/typing.pyi @@ -959,7 +959,7 @@ class _TypedDict(Mapping[str, object], metaclass=ABCMeta): def setdefault(self, k: _Never, default: object) -> object: ... # Mypy plugin hook for 'pop' expects that 'default' has a type variable type. def pop(self, k: _Never, default: _T = ...) -> object: ... # pyright: ignore[reportInvalidTypeVarUse] - def update(self: _T, m: _T, /) -> None: ... + def update(self, m: typing_extensions.Self, /) -> None: ... def __delitem__(self, k: _Never) -> None: ... def items(self) -> dict_items[str, object]: ... def keys(self) -> dict_keys[str, object]: ... diff --git a/stdlib/typing_extensions.pyi b/stdlib/typing_extensions.pyi index 3d369dcd63b2..f3b7b8ddf5b1 100644 --- a/stdlib/typing_extensions.pyi +++ b/stdlib/typing_extensions.pyi @@ -249,7 +249,7 @@ class _TypedDict(Mapping[str, object], metaclass=abc.ABCMeta): def setdefault(self, k: Never, default: object) -> object: ... # Mypy plugin hook for 'pop' expects that 'default' has a type variable type. def pop(self, k: Never, default: _T = ...) -> object: ... # pyright: ignore[reportInvalidTypeVarUse] - def update(self: _T, m: _T, /) -> None: ... + def update(self, m: Self, /) -> None: ... def items(self) -> dict_items[str, object]: ... def keys(self) -> dict_keys[str, object]: ... def values(self) -> dict_values[str, object]: ... diff --git a/stubs/PyYAML/yaml/representer.pyi b/stubs/PyYAML/yaml/representer.pyi index 517ea5155a30..b0629b245ccb 100644 --- a/stubs/PyYAML/yaml/representer.pyi +++ b/stubs/PyYAML/yaml/representer.pyi @@ -3,12 +3,12 @@ from _typeshed import Incomplete, ReadableBuffer, SupportsItems from collections.abc import Callable, Iterable, Mapping from types import BuiltinFunctionType, FunctionType, ModuleType from typing import Any, ClassVar, NoReturn, TypeVar +from typing_extensions import Self from yaml.error import YAMLError as YAMLError from yaml.nodes import MappingNode as MappingNode, Node as Node, ScalarNode as ScalarNode, SequenceNode as SequenceNode _T = TypeVar("_T") -_R = TypeVar("_R", bound=BaseRepresenter) class RepresenterError(YAMLError): ... @@ -25,9 +25,9 @@ class BaseRepresenter: def represent(self, data) -> None: ... def represent_data(self, data) -> Node: ... @classmethod - def add_representer(cls: type[_R], data_type: type[_T], representer: Callable[[_R, _T], Node]) -> None: ... + def add_representer(cls, data_type: type[_T], representer: Callable[[Self, _T], Node]) -> None: ... @classmethod - def add_multi_representer(cls: type[_R], data_type: type[_T], representer: Callable[[_R, _T], Node]) -> None: ... + def add_multi_representer(cls, data_type: type[_T], representer: Callable[[Self, _T], Node]) -> None: ... def represent_scalar(self, tag: str, value, style: str | None = None) -> ScalarNode: ... def represent_sequence(self, tag: str, sequence: Iterable[Any], flow_style: bool | None = None) -> SequenceNode: ... def represent_mapping( diff --git a/stubs/protobuf/google/protobuf/internal/containers.pyi b/stubs/protobuf/google/protobuf/internal/containers.pyi index aaa970439216..e660140ac0d4 100644 --- a/stubs/protobuf/google/protobuf/internal/containers.pyi +++ b/stubs/protobuf/google/protobuf/internal/containers.pyi @@ -1,5 +1,6 @@ from collections.abc import Callable, Iterable, Iterator, MutableMapping, Sequence from typing import Any, Protocol, SupportsIndex, TypeVar, overload +from typing_extensions import Self from google.protobuf.descriptor import Descriptor from google.protobuf.internal.message_listener import MessageListener @@ -10,7 +11,6 @@ _T = TypeVar("_T") _K = TypeVar("_K", bound=bool | int | str) _ScalarV = TypeVar("_ScalarV", bound=bool | int | float | str | bytes) _MessageV = TypeVar("_MessageV", bound=Message) -_M = TypeVar("_M") class _ValueChecker(Protocol[_T]): def CheckValue(self, proposed_value: _T) -> _T: ... @@ -33,7 +33,7 @@ class RepeatedScalarFieldContainer(BaseContainer[_ScalarV]): def append(self, value: _ScalarV) -> None: ... def insert(self, key: int, value: _ScalarV) -> None: ... def extend(self, elem_seq: Iterable[_ScalarV] | None) -> None: ... - def MergeFrom(self: _M, other: _M | Iterable[_ScalarV]) -> None: ... + def MergeFrom(self, other: Self | Iterable[_ScalarV]) -> None: ... def remove(self, elem: _ScalarV) -> None: ... def pop(self, key: int = -1) -> _ScalarV: ... @overload @@ -49,7 +49,7 @@ class RepeatedCompositeFieldContainer(BaseContainer[_MessageV]): def append(self, value: _MessageV) -> None: ... def insert(self, key: int, value: _MessageV) -> None: ... def extend(self, elem_seq: Iterable[_MessageV]) -> None: ... - def MergeFrom(self: _M, other: _M | Iterable[_MessageV]) -> None: ... + def MergeFrom(self, other: Self | Iterable[_MessageV]) -> None: ... def remove(self, elem: _MessageV) -> None: ... def pop(self, key: int = -1) -> _MessageV: ... def __delitem__(self, key: int | slice) -> None: ... @@ -73,7 +73,7 @@ class ScalarMap(MutableMapping[_K, _ScalarV]): def get(self, key: _K, default: None = None) -> _ScalarV | None: ... @overload def get(self, key: _K, default: _ScalarV | _T) -> _ScalarV | _T: ... - def MergeFrom(self: _M, other: _M): ... + def MergeFrom(self, other: Self): ... def InvalidateIterators(self) -> None: ... def GetEntryClass(self) -> GeneratedProtocolMessageType: ... @@ -96,6 +96,6 @@ class MessageMap(MutableMapping[_K, _MessageV]): @overload def get(self, key: _K, default: _MessageV | _T) -> _MessageV | _T: ... def get_or_create(self, key: _K) -> _MessageV: ... - def MergeFrom(self: _M, other: _M): ... + def MergeFrom(self, other: Self): ... def InvalidateIterators(self) -> None: ... def GetEntryClass(self) -> GeneratedProtocolMessageType: ... diff --git a/stubs/protobuf/google/protobuf/message.pyi b/stubs/protobuf/google/protobuf/message.pyi index 819ad7aad5d2..a63e610ffca5 100644 --- a/stubs/protobuf/google/protobuf/message.pyi +++ b/stubs/protobuf/google/protobuf/message.pyi @@ -1,5 +1,5 @@ from collections.abc import Sequence -from typing import Any, TypeVar +from typing import Any from typing_extensions import Self from .descriptor import Descriptor, FieldDescriptor @@ -9,8 +9,6 @@ class Error(Exception): ... class DecodeError(Error): ... class EncodeError(Error): ... -_M = TypeVar("_M", bound=Message) # message type (of self) - class Message: DESCRIPTOR: Descriptor def __deepcopy__(self, memo: Any = None) -> Self: ... @@ -26,12 +24,11 @@ class Message: def SerializeToString(self, *, deterministic: bool = ...) -> bytes: ... def SerializePartialToString(self, *, deterministic: bool = ...) -> bytes: ... def ListFields(self) -> Sequence[tuple[FieldDescriptor, Any]]: ... - # The TypeVar must be bound to `Message` or we get mypy errors, so we cannot use `Self` for `HasExtension` & `ClearExtension` - def HasExtension(self: _M, field_descriptor: _ExtensionFieldDescriptor[_M, Any]) -> bool: ... - def ClearExtension(self: _M, field_descriptor: _ExtensionFieldDescriptor[_M, Any]) -> None: ... + def HasExtension(self, field_descriptor: _ExtensionFieldDescriptor[Self, Any]) -> bool: ... + def ClearExtension(self, field_descriptor: _ExtensionFieldDescriptor[Self, Any]) -> None: ... # The TypeVar must be bound to `Message` or we get mypy errors, so we cannot use `Self` for `Extensions` @property - def Extensions(self: _M) -> _ExtensionDict[_M]: ... + def Extensions(self) -> _ExtensionDict[Self]: ... def ByteSize(self) -> int: ... @classmethod def FromString(cls, s: bytes) -> Self: ... From bb7999cc13929ece33770b38eca66eca55403fab Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Tue, 1 Apr 2025 18:44:21 +0100 Subject: [PATCH 171/388] Update most test/lint dependencies (#13754) --- .pre-commit-config.yaml | 2 +- requirements-tests.txt | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index e73790f3fddc..5cd487b841f9 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -31,7 +31,7 @@ repos: hooks: - id: black - repo: https://github.com/pycqa/flake8 - rev: 7.1.2 + rev: 7.2.0 hooks: - id: flake8 language: python diff --git a/requirements-tests.txt b/requirements-tests.txt index d28636d9f254..724ed1d1b983 100644 --- a/requirements-tests.txt +++ b/requirements-tests.txt @@ -6,7 +6,7 @@ pyright==1.1.398 pytype==2024.10.11; platform_system != "Windows" and python_version >= "3.10" and python_version < "3.13" # Libraries used by our various scripts. -aiohttp==3.10.11 +aiohttp==3.11.12 grpcio-tools>=1.66.2 # For grpc_tools.protoc mypy-protobuf==3.6.0 packaging==24.2 @@ -19,7 +19,7 @@ termcolor>=2.3 tomli==2.2.1 tomlkit==0.13.2 typing_extensions>=4.13.0rc1 -uv==0.5.14 +uv==0.6.11 # Utilities for typeshed infrastructure scripts. ts_utils @ file:lib From 7e1ba8312750cb0573f7beb4537bb3769933aa0b Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Tue, 1 Apr 2025 19:59:28 +0200 Subject: [PATCH 172/388] Update dependency aiohttp to v3.11.15 (#13759) --- requirements-tests.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-tests.txt b/requirements-tests.txt index 724ed1d1b983..06b306e663b7 100644 --- a/requirements-tests.txt +++ b/requirements-tests.txt @@ -6,7 +6,7 @@ pyright==1.1.398 pytype==2024.10.11; platform_system != "Windows" and python_version >= "3.10" and python_version < "3.13" # Libraries used by our various scripts. -aiohttp==3.11.12 +aiohttp==3.11.15 grpcio-tools>=1.66.2 # For grpc_tools.protoc mypy-protobuf==3.6.0 packaging==24.2 From a51707914fc76d43e3e449689ee2035f62d385b1 Mon Sep 17 00:00:00 2001 From: Avasam Date: Wed, 2 Apr 2025 02:43:25 -0400 Subject: [PATCH 173/388] Enable Ruff flake8-pie (PIE) (#13747) --- pyproject.toml | 1 + stdlib/tkinter/__init__.pyi | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 7e3e8641f91c..cb630a093f55 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -53,6 +53,7 @@ select = [ "I", # isort "N", # pep8-naming "PGH", # pygrep-hooks + "PIE", # flake8-pie "PL", # Pylint "RUF", # Ruff-specific and unused-noqa "TRY", # tryceratops diff --git a/stdlib/tkinter/__init__.pyi b/stdlib/tkinter/__init__.pyi index 751de523bf7a..73c1e0400fe8 100644 --- a/stdlib/tkinter/__init__.pyi +++ b/stdlib/tkinter/__init__.pyi @@ -265,7 +265,7 @@ else: GraphicsExpose = "13" Gravity = "24" KeyPress = "2" - Key = "2" + Key = KeyPress KeyRelease = "3" Keymap = "11" Leave = "8" From 6968a8d872ce5cf58373c5d5c2cf3de5f1eb7a95 Mon Sep 17 00:00:00 2001 From: Avasam Date: Wed, 2 Apr 2025 02:43:52 -0400 Subject: [PATCH 174/388] Enable Ruff groups we already pass (#13751) --- pyproject.toml | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index cb630a093f55..36af3e424963 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -47,15 +47,22 @@ select = [ "A", # flake8-builtins "ARG", # flake8-unused-arguments "B", # flake8-bugbear + "C4", # flake8-comprehensions "D", # pydocstyle + "DTZ", # flake8-datetimez "EXE", # flake8-executable "FA", # flake8-future-annotations + "FBT", # flake8-boolean-trap + "FLY", # flynt "I", # isort "N", # pep8-naming "PGH", # pygrep-hooks "PIE", # flake8-pie "PL", # Pylint + "RSE", # flake8-raise "RUF", # Ruff-specific and unused-noqa + "SLOT", # flake8-slots + "T10", # flake8-debugger "TRY", # tryceratops "UP", # pyupgrade "YTT", # flake8-2020 From e40cab55fa489749a627c353d3d1097f27cb9819 Mon Sep 17 00:00:00 2001 From: Joren Hammudoglu Date: Wed, 2 Apr 2025 08:47:13 +0200 Subject: [PATCH 175/388] Drop Python 3.8 support in `ast` (#13766) --- stdlib/_ast.pyi | 11 ---------- stdlib/ast.pyi | 57 ++++++++++++++----------------------------------- 2 files changed, 16 insertions(+), 52 deletions(-) diff --git a/stdlib/_ast.pyi b/stdlib/_ast.pyi index 8dc1bcbea32c..bc0ebd9d8a0f 100644 --- a/stdlib/_ast.pyi +++ b/stdlib/_ast.pyi @@ -130,17 +130,6 @@ if sys.version_info >= (3, 10): pattern as pattern, ) -if sys.version_info < (3, 9): - from ast import ( - AugLoad as AugLoad, - AugStore as AugStore, - ExtSlice as ExtSlice, - Index as Index, - Param as Param, - Suite as Suite, - slice as slice, - ) - PyCF_ALLOW_TOP_LEVEL_AWAIT: Literal[8192] PyCF_ONLY_AST: Literal[1024] PyCF_TYPE_COMMENTS: Literal[4096] diff --git a/stdlib/ast.pyi b/stdlib/ast.pyi index 7a4438a33fbc..90c6d2ff0e68 100644 --- a/stdlib/ast.pyi +++ b/stdlib/ast.pyi @@ -1144,8 +1144,7 @@ class Tuple(expr): __match_args__ = ("elts", "ctx") elts: list[expr] ctx: expr_context # Not present in Python < 3.13 if not passed to `__init__` - if sys.version_info >= (3, 9): - dims: list[expr] + dims: list[expr] if sys.version_info >= (3, 13): def __init__(self, elts: list[expr] = ..., ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> None: ... else: @@ -1155,16 +1154,10 @@ class Tuple(expr): def __replace__(self, *, elts: list[expr] = ..., ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> Self: ... @deprecated("Deprecated since Python 3.9.") -class slice(AST): ... # deprecated and moved to ast.py for >= (3, 9) +class slice(AST): ... -if sys.version_info >= (3, 9): - _Slice: typing_extensions.TypeAlias = expr - _SliceAttributes: typing_extensions.TypeAlias = _Attributes -else: - # alias for use with variables named slice - _Slice: typing_extensions.TypeAlias = slice - - class _SliceAttributes(TypedDict): ... +_Slice: typing_extensions.TypeAlias = expr +_SliceAttributes: typing_extensions.TypeAlias = _Attributes class Slice(_Slice): if sys.version_info >= (3, 10): @@ -1187,37 +1180,26 @@ class Slice(_Slice): ) -> Self: ... @deprecated("Deprecated since Python 3.9. Use ast.Tuple instead.") -class ExtSlice(slice): # deprecated and moved to ast.py if sys.version_info >= (3, 9) - if sys.version_info >= (3, 9): - def __new__(cls, dims: Iterable[slice] = (), **kwargs: Unpack[_SliceAttributes]) -> Tuple: ... # type: ignore[misc] - else: - dims: list[slice] - def __init__(self, dims: list[slice], **kwargs: Unpack[_SliceAttributes]) -> None: ... +class ExtSlice(slice): + def __new__(cls, dims: Iterable[slice] = (), **kwargs: Unpack[_SliceAttributes]) -> Tuple: ... # type: ignore[misc] @deprecated("Deprecated since Python 3.9. Use the index value directly instead.") -class Index(slice): # deprecated and moved to ast.py if sys.version_info >= (3, 9) - if sys.version_info >= (3, 9): - def __new__(cls, value: expr, **kwargs: Unpack[_SliceAttributes]) -> expr: ... # type: ignore[misc] - else: - value: expr - def __init__(self, value: expr, **kwargs: Unpack[_SliceAttributes]) -> None: ... +class Index(slice): + def __new__(cls, value: expr, **kwargs: Unpack[_SliceAttributes]) -> expr: ... # type: ignore[misc] class expr_context(AST): ... @deprecated("Deprecated since Python 3.9. Unused in Python 3.") -class AugLoad(expr_context): ... # deprecated and moved to ast.py if sys.version_info >= (3, 9) +class AugLoad(expr_context): ... @deprecated("Deprecated since Python 3.9. Unused in Python 3.") -class AugStore(expr_context): ... # deprecated and moved to ast.py if sys.version_info >= (3, 9) +class AugStore(expr_context): ... @deprecated("Deprecated since Python 3.9. Unused in Python 3.") -class Param(expr_context): ... # deprecated and moved to ast.py if sys.version_info >= (3, 9) +class Param(expr_context): ... @deprecated("Deprecated since Python 3.9. Unused in Python 3.") -class Suite(mod): # deprecated and moved to ast.py if sys.version_info >= (3, 9) - if sys.version_info < (3, 9): - body: list[stmt] - def __init__(self, body: list[stmt]) -> None: ... +class Suite(mod): ... class Load(expr_context): ... class Store(expr_context): ... @@ -1702,8 +1684,7 @@ if sys.version_info >= (3, 12): ) -> Self: ... class _ABC(type): - if sys.version_info >= (3, 9): - def __init__(cls, *args: Unused) -> None: ... + def __init__(cls, *args: Unused) -> None: ... if sys.version_info < (3, 14): @deprecated("Replaced by ast.Constant; removed in Python 3.14") @@ -1894,14 +1875,11 @@ if sys.version_info >= (3, 13): show_empty: bool = False, ) -> str: ... -elif sys.version_info >= (3, 9): +else: def dump( node: AST, annotate_fields: bool = True, include_attributes: bool = False, *, indent: int | str | None = None ) -> str: ... -else: - def dump(node: AST, annotate_fields: bool = True, include_attributes: bool = False) -> str: ... - def copy_location(new_node: _T, old_node: AST) -> _T: ... def fix_missing_locations(node: _T) -> _T: ... def increment_lineno(node: _T, n: int = 1) -> _T: ... @@ -2059,8 +2037,5 @@ class NodeTransformer(NodeVisitor): # The usual return type is AST | None, but Iterable[AST] # is also allowed in some cases -- this needs to be mapped. -if sys.version_info >= (3, 9): - def unparse(ast_obj: AST) -> str: ... - -if sys.version_info >= (3, 9): - def main() -> None: ... +def unparse(ast_obj: AST) -> str: ... +def main() -> None: ... From 7a0516c790127981ea5162f8bdae7d1ad6661447 Mon Sep 17 00:00:00 2001 From: Joren Hammudoglu Date: Wed, 2 Apr 2025 08:50:06 +0200 Subject: [PATCH 176/388] Drop Python 3.8 support in `os` and `sys` (#13764) --- stdlib/os/__init__.pyi | 91 +++++++++++++++-------------------------- stdlib/sys/__init__.pyi | 11 +---- 2 files changed, 33 insertions(+), 69 deletions(-) diff --git a/stdlib/os/__init__.pyi b/stdlib/os/__init__.pyi index 4a7c03632a67..d0ef614abbce 100644 --- a/stdlib/os/__init__.pyi +++ b/stdlib/os/__init__.pyi @@ -24,7 +24,7 @@ from builtins import OSError from collections.abc import Callable, Iterable, Iterator, Mapping, MutableMapping, Sequence from io import BufferedRandom, BufferedReader, BufferedWriter, FileIO, TextIOWrapper from subprocess import Popen -from types import TracebackType +from types import GenericAlias, TracebackType from typing import ( IO, Any, @@ -44,9 +44,6 @@ from typing_extensions import Self, TypeAlias, Unpack, deprecated from . import path as _path -if sys.version_info >= (3, 9): - from types import GenericAlias - __all__ = [ "F_OK", "O_APPEND", @@ -155,14 +152,14 @@ __all__ = [ "umask", "uname_result", "unlink", + "unsetenv", "urandom", "utime", "waitpid", + "waitstatus_to_exitcode", "walk", "write", ] -if sys.version_info >= (3, 9): - __all__ += ["waitstatus_to_exitcode"] if sys.platform == "darwin" and sys.version_info >= (3, 12): __all__ += ["PRIO_DARWIN_BG", "PRIO_DARWIN_NONUI", "PRIO_DARWIN_PROCESS", "PRIO_DARWIN_THREAD"] if sys.platform == "darwin" and sys.version_info >= (3, 10): @@ -194,6 +191,7 @@ if sys.platform == "linux": "O_PATH", "O_RSYNC", "O_TMPFILE", + "P_PIDFD", "RTLD_DEEPBIND", "SCHED_BATCH", "SCHED_IDLE", @@ -206,6 +204,7 @@ if sys.platform == "linux": "getxattr", "listxattr", "memfd_create", + "pidfd_open", "removexattr", "setxattr", ] @@ -256,8 +255,6 @@ if sys.platform == "linux" and sys.version_info >= (3, 10): "eventfd_write", "splice", ] -if sys.platform == "linux" and sys.version_info >= (3, 9): - __all__ += ["P_PIDFD", "pidfd_open"] if sys.platform == "win32": __all__ += [ "O_BINARY", @@ -280,6 +277,8 @@ if sys.platform != "win32": "CLD_CONTINUED", "CLD_DUMPED", "CLD_EXITED", + "CLD_KILLED", + "CLD_STOPPED", "CLD_TRAPPED", "EX_CANTCREAT", "EX_CONFIG", @@ -431,8 +430,6 @@ if sys.platform != "win32" and sys.version_info >= (3, 11): __all__ += ["login_tty"] if sys.platform != "win32" and sys.version_info >= (3, 10): __all__ += ["O_FSYNC"] -if sys.platform != "win32" and sys.version_info >= (3, 9): - __all__ += ["CLD_KILLED", "CLD_STOPPED"] if sys.platform != "darwin" and sys.platform != "win32": __all__ += [ "POSIX_FADV_DONTNEED", @@ -486,8 +483,6 @@ if sys.platform != "win32" or sys.version_info >= (3, 12): __all__ += ["get_blocking", "set_blocking"] if sys.platform != "win32" or sys.version_info >= (3, 11): __all__ += ["EX_OK"] -if sys.platform != "win32" or sys.version_info >= (3, 9): - __all__ += ["unsetenv"] # This unnecessary alias is to work around various errors path = _path @@ -550,7 +545,7 @@ if sys.platform != "win32": P_PGID: int P_ALL: int - if sys.platform == "linux" and sys.version_info >= (3, 9): + if sys.platform == "linux": P_PIDFD: int WEXITED: int @@ -561,10 +556,8 @@ if sys.platform != "win32": CLD_DUMPED: int CLD_TRAPPED: int CLD_CONTINUED: int - - if sys.version_info >= (3, 9): - CLD_KILLED: int - CLD_STOPPED: int + CLD_KILLED: int + CLD_STOPPED: int SCHED_OTHER: int SCHED_FIFO: int @@ -698,29 +691,14 @@ class _Environ(MutableMapping[AnyStr, AnyStr], Generic[AnyStr]): decodekey: _EnvironCodeFunc[AnyStr] encodevalue: _EnvironCodeFunc[AnyStr] decodevalue: _EnvironCodeFunc[AnyStr] - if sys.version_info >= (3, 9): - def __init__( - self, - data: MutableMapping[AnyStr, AnyStr], - encodekey: _EnvironCodeFunc[AnyStr], - decodekey: _EnvironCodeFunc[AnyStr], - encodevalue: _EnvironCodeFunc[AnyStr], - decodevalue: _EnvironCodeFunc[AnyStr], - ) -> None: ... - else: - putenv: Callable[[AnyStr, AnyStr], object] - unsetenv: Callable[[AnyStr, AnyStr], object] - def __init__( - self, - data: MutableMapping[AnyStr, AnyStr], - encodekey: _EnvironCodeFunc[AnyStr], - decodekey: _EnvironCodeFunc[AnyStr], - encodevalue: _EnvironCodeFunc[AnyStr], - decodevalue: _EnvironCodeFunc[AnyStr], - putenv: Callable[[AnyStr, AnyStr], object], - unsetenv: Callable[[AnyStr, AnyStr], object], - ) -> None: ... - + def __init__( + self, + data: MutableMapping[AnyStr, AnyStr], + encodekey: _EnvironCodeFunc[AnyStr], + decodekey: _EnvironCodeFunc[AnyStr], + encodevalue: _EnvironCodeFunc[AnyStr], + decodevalue: _EnvironCodeFunc[AnyStr], + ) -> None: ... def setdefault(self, key: AnyStr, value: AnyStr) -> AnyStr: ... def copy(self) -> dict[AnyStr, AnyStr]: ... def __delitem__(self, key: AnyStr) -> None: ... @@ -728,16 +706,15 @@ class _Environ(MutableMapping[AnyStr, AnyStr], Generic[AnyStr]): def __setitem__(self, key: AnyStr, value: AnyStr) -> None: ... def __iter__(self) -> Iterator[AnyStr]: ... def __len__(self) -> int: ... - if sys.version_info >= (3, 9): - def __or__(self, other: Mapping[_T1, _T2]) -> dict[AnyStr | _T1, AnyStr | _T2]: ... - def __ror__(self, other: Mapping[_T1, _T2]) -> dict[AnyStr | _T1, AnyStr | _T2]: ... - # We use @overload instead of a Union for reasons similar to those given for - # overloading MutableMapping.update in stdlib/typing.pyi - # The type: ignore is needed due to incompatible __or__/__ior__ signatures - @overload # type: ignore[misc] - def __ior__(self, other: Mapping[AnyStr, AnyStr]) -> Self: ... - @overload - def __ior__(self, other: Iterable[tuple[AnyStr, AnyStr]]) -> Self: ... + def __or__(self, other: Mapping[_T1, _T2]) -> dict[AnyStr | _T1, AnyStr | _T2]: ... + def __ror__(self, other: Mapping[_T1, _T2]) -> dict[AnyStr | _T1, AnyStr | _T2]: ... + # We use @overload instead of a Union for reasons similar to those given for + # overloading MutableMapping.update in stdlib/typing.pyi + # The type: ignore is needed due to incompatible __or__/__ior__ signatures + @overload # type: ignore[misc] + def __ior__(self, other: Mapping[AnyStr, AnyStr]) -> Self: ... + @overload + def __ior__(self, other: Iterable[tuple[AnyStr, AnyStr]]) -> Self: ... environ: _Environ[str] if sys.platform != "win32": @@ -900,8 +877,7 @@ class DirEntry(Generic[AnyStr]): def is_symlink(self) -> bool: ... def stat(self, *, follow_symlinks: bool = True) -> stat_result: ... def __fspath__(self) -> AnyStr: ... - if sys.version_info >= (3, 9): - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... if sys.version_info >= (3, 12): def is_junction(self) -> bool: ... @@ -1024,9 +1000,7 @@ if sys.platform != "win32": else: def putenv(name: str, value: str, /) -> None: ... - - if sys.version_info >= (3, 9): - def unsetenv(name: str, /) -> None: ... + def unsetenv(name: str, /) -> None: ... _Opener: TypeAlias = Callable[[str, int], int] @@ -1598,11 +1572,10 @@ if sys.platform == "linux": def memfd_create(name: str, flags: int = ...) -> int: ... def copy_file_range(src: int, dst: int, count: int, offset_src: int | None = ..., offset_dst: int | None = ...) -> int: ... -if sys.version_info >= (3, 9): - def waitstatus_to_exitcode(status: int) -> int: ... +def waitstatus_to_exitcode(status: int) -> int: ... - if sys.platform == "linux": - def pidfd_open(pid: int, flags: int = ...) -> int: ... +if sys.platform == "linux": + def pidfd_open(pid: int, flags: int = ...) -> int: ... if sys.version_info >= (3, 12) and sys.platform == "linux": PIDFD_NONBLOCK: Final = 2048 diff --git a/stdlib/sys/__init__.pyi b/stdlib/sys/__init__.pyi index 4aa1699e8b42..f06afc8a6fbd 100644 --- a/stdlib/sys/__init__.pyi +++ b/stdlib/sys/__init__.pyi @@ -46,8 +46,7 @@ path: list[str] path_hooks: list[Callable[[str], PathEntryFinderProtocol]] path_importer_cache: dict[str, PathEntryFinderProtocol | None] platform: LiteralString -if sys.version_info >= (3, 9): - platlibdir: str +platlibdir: str prefix: str pycache_prefix: str | None ps1: object @@ -410,14 +409,6 @@ def setrecursionlimit(limit: int, /) -> None: ... def setswitchinterval(interval: float, /) -> None: ... def gettotalrefcount() -> int: ... # Debug builds only -if sys.version_info < (3, 9): - def getcheckinterval() -> int: ... # deprecated - def setcheckinterval(n: int, /) -> None: ... # deprecated - -if sys.version_info < (3, 9): - # An 11-tuple or None - def callstats() -> tuple[int, int, int, int, int, int, int, int, int, int, int] | None: ... - # Doesn't exist at runtime, but exported in the stubs so pytest etc. can annotate their code more easily. @type_check_only class UnraisableHookArgs(Protocol): From bd661bfcd4a51f6c577b607fe61e49ecfe46e637 Mon Sep 17 00:00:00 2001 From: Joren Hammudoglu Date: Wed, 2 Apr 2025 08:56:56 +0200 Subject: [PATCH 177/388] Drop Python 3.8 support in `typing[_extensions]` and `types` (#13763) --- stdlib/@tests/test_cases/typing/check_all.py | 4 +- stdlib/types.pyi | 59 ++++++------- stdlib/typing.pyi | 93 ++++++++------------ stdlib/typing_extensions.pyi | 35 +++----- 4 files changed, 78 insertions(+), 113 deletions(-) diff --git a/stdlib/@tests/test_cases/typing/check_all.py b/stdlib/@tests/test_cases/typing/check_all.py index 44eb548e04a9..de34ae32991c 100644 --- a/stdlib/@tests/test_cases/typing/check_all.py +++ b/stdlib/@tests/test_cases/typing/check_all.py @@ -4,11 +4,9 @@ """ from __future__ import annotations -import sys from typing import * from zipfile import * -if sys.version_info >= (3, 9): - x: Annotated[int, 42] +x: Annotated[int, 42] p: Path diff --git a/stdlib/types.pyi b/stdlib/types.pyi index 542979d4afc5..f89a992b72a2 100644 --- a/stdlib/types.pyi +++ b/stdlib/types.pyi @@ -47,11 +47,9 @@ __all__ = [ "WrapperDescriptorType", "resolve_bases", "CellType", + "GenericAlias", ] -if sys.version_info >= (3, 9): - __all__ += ["GenericAlias"] - if sys.version_info >= (3, 10): __all__ += ["EllipsisType", "NoneType", "NotImplementedType", "UnionType"] @@ -320,11 +318,10 @@ class MappingProxyType(Mapping[_KT, _VT_co]): def get(self, key: _KT, /) -> _VT_co | None: ... @overload def get(self, key: _KT, default: _VT_co | _T2, /) -> _VT_co | _T2: ... - if sys.version_info >= (3, 9): - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... - def __reversed__(self) -> Iterator[_KT]: ... - def __or__(self, value: Mapping[_T1, _T2], /) -> dict[_KT | _T1, _VT_co | _T2]: ... - def __ror__(self, value: Mapping[_T1, _T2], /) -> dict[_KT | _T1, _VT_co | _T2]: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __reversed__(self) -> Iterator[_KT]: ... + def __or__(self, value: Mapping[_T1, _T2], /) -> dict[_KT | _T1, _VT_co | _T2]: ... + def __ror__(self, value: Mapping[_T1, _T2], /) -> dict[_KT | _T1, _VT_co | _T2]: ... class SimpleNamespace: __hash__: ClassVar[None] # type: ignore[assignment] @@ -425,8 +422,7 @@ class AsyncGeneratorType(AsyncGenerator[_YieldT_co, _SendT_contra]): @overload async def athrow(self, typ: BaseException, val: None = None, tb: TracebackType | None = ..., /) -> _YieldT_co: ... def aclose(self) -> Coroutine[Any, Any, None]: ... - if sys.version_info >= (3, 9): - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... @final class CoroutineType(Coroutine[_YieldT_co, _SendT_contra, _ReturnT_co]): @@ -647,30 +643,29 @@ def coroutine(func: Callable[_P, Generator[Any, Any, _R]]) -> Callable[_P, Await @overload def coroutine(func: _Fn) -> _Fn: ... -if sys.version_info >= (3, 9): - class GenericAlias: - @property - def __origin__(self) -> type | TypeAliasType: ... +class GenericAlias: + @property + def __origin__(self) -> type | TypeAliasType: ... + @property + def __args__(self) -> tuple[Any, ...]: ... + @property + def __parameters__(self) -> tuple[Any, ...]: ... + def __new__(cls, origin: type, args: Any, /) -> Self: ... + def __getitem__(self, typeargs: Any, /) -> GenericAlias: ... + def __eq__(self, value: object, /) -> bool: ... + def __hash__(self) -> int: ... + def __mro_entries__(self, bases: Iterable[object], /) -> tuple[type, ...]: ... + if sys.version_info >= (3, 11): @property - def __args__(self) -> tuple[Any, ...]: ... + def __unpacked__(self) -> bool: ... @property - def __parameters__(self) -> tuple[Any, ...]: ... - def __new__(cls, origin: type, args: Any, /) -> Self: ... - def __getitem__(self, typeargs: Any, /) -> GenericAlias: ... - def __eq__(self, value: object, /) -> bool: ... - def __hash__(self) -> int: ... - def __mro_entries__(self, bases: Iterable[object], /) -> tuple[type, ...]: ... - if sys.version_info >= (3, 11): - @property - def __unpacked__(self) -> bool: ... - @property - def __typing_unpacked_tuple_args__(self) -> tuple[Any, ...] | None: ... - if sys.version_info >= (3, 10): - def __or__(self, value: Any, /) -> UnionType: ... - def __ror__(self, value: Any, /) -> UnionType: ... - - # GenericAlias delegates attr access to `__origin__` - def __getattr__(self, name: str) -> Any: ... + def __typing_unpacked_tuple_args__(self) -> tuple[Any, ...] | None: ... + if sys.version_info >= (3, 10): + def __or__(self, value: Any, /) -> UnionType: ... + def __ror__(self, value: Any, /) -> UnionType: ... + + # GenericAlias delegates attr access to `__origin__` + def __getattr__(self, name: str) -> Any: ... if sys.version_info >= (3, 10): @final diff --git a/stdlib/typing.pyi b/stdlib/typing.pyi index bc8f342ef46b..df753cfd9bca 100644 --- a/stdlib/typing.pyi +++ b/stdlib/typing.pyi @@ -13,6 +13,7 @@ from types import ( BuiltinFunctionType, CodeType, FunctionType, + GenericAlias, MethodDescriptorType, MethodType, MethodWrapperType, @@ -22,13 +23,12 @@ from types import ( ) from typing_extensions import Never as _Never, ParamSpec as _ParamSpec, deprecated -if sys.version_info >= (3, 9): - from types import GenericAlias if sys.version_info >= (3, 10): from types import UnionType __all__ = [ "AbstractSet", + "Annotated", "Any", "AnyStr", "AsyncContextManager", @@ -36,6 +36,7 @@ __all__ = [ "AsyncIterable", "AsyncIterator", "Awaitable", + "BinaryIO", "ByteString", "Callable", "ChainMap", @@ -49,10 +50,12 @@ __all__ = [ "Deque", "Dict", "Final", + "ForwardRef", "FrozenSet", "Generator", "Generic", "Hashable", + "IO", "ItemsView", "Iterable", "Iterator", @@ -61,12 +64,16 @@ __all__ = [ "Literal", "Mapping", "MappingView", + "Match", "MutableMapping", "MutableSequence", "MutableSet", "NamedTuple", "NewType", + "NoReturn", "Optional", + "OrderedDict", + "Pattern", "Protocol", "Reversible", "Sequence", @@ -80,6 +87,7 @@ __all__ = [ "SupportsInt", "SupportsRound", "Text", + "TextIO", "Tuple", "Type", "TypeVar", @@ -96,14 +104,8 @@ __all__ = [ "no_type_check_decorator", "overload", "runtime_checkable", - "ForwardRef", - "NoReturn", - "OrderedDict", ] -if sys.version_info >= (3, 9): - __all__ += ["Annotated", "BinaryIO", "IO", "Match", "Pattern", "TextIO"] - if sys.version_info >= (3, 10): __all__ += ["Concatenate", "ParamSpec", "ParamSpecArgs", "ParamSpecKwargs", "TypeAlias", "TypeGuard", "is_typeddict"] @@ -203,7 +205,6 @@ class _SpecialForm(_Final): Union: _SpecialForm Generic: _SpecialForm -# Protocol is only present in 3.8 and later, but mypy needs it unconditionally Protocol: _SpecialForm Callable: _SpecialForm Type: _SpecialForm @@ -386,8 +387,7 @@ ChainMap = _Alias() OrderedDict = _Alias() -if sys.version_info >= (3, 9): - Annotated: _SpecialForm +Annotated: _SpecialForm # Predefined type variables. AnyStr = TypeVar("AnyStr", str, bytes) # noqa: Y001 @@ -858,19 +858,12 @@ _get_type_hints_obj_allowed_types: typing_extensions.TypeAlias = ( # noqa: Y042 | MethodDescriptorType ) -if sys.version_info >= (3, 9): - def get_type_hints( - obj: _get_type_hints_obj_allowed_types, - globalns: dict[str, Any] | None = None, - localns: Mapping[str, Any] | None = None, - include_extras: bool = False, - ) -> dict[str, Any]: ... - -else: - def get_type_hints( - obj: _get_type_hints_obj_allowed_types, globalns: dict[str, Any] | None = None, localns: Mapping[str, Any] | None = None - ) -> dict[str, Any]: ... - +def get_type_hints( + obj: _get_type_hints_obj_allowed_types, + globalns: dict[str, Any] | None = None, + localns: Mapping[str, Any] | None = None, + include_extras: bool = False, +) -> dict[str, Any]: ... def get_args(tp: Any) -> tuple[Any, ...]: ... if sys.version_info >= (3, 10): @@ -879,15 +872,10 @@ if sys.version_info >= (3, 10): @overload def get_origin(tp: UnionType) -> type[UnionType]: ... -if sys.version_info >= (3, 9): - @overload - def get_origin(tp: GenericAlias) -> type: ... - @overload - def get_origin(tp: Any) -> Any | None: ... - -else: - def get_origin(tp: Any) -> Any | None: ... - +@overload +def get_origin(tp: GenericAlias) -> type: ... +@overload +def get_origin(tp: Any) -> Any | None: ... @overload def cast(typ: type[_T], val: Any) -> _T: ... @overload @@ -914,8 +902,6 @@ if sys.version_info >= (3, 11): # Type constructors class NamedTuple(tuple[Any, ...]): - if sys.version_info < (3, 9): - _field_types: ClassVar[dict[str, type]] _field_defaults: ClassVar[dict[str, Any]] _fields: ClassVar[tuple[str, ...]] # __orig_bases__ sometimes exists on <3.12, but not consistently @@ -942,9 +928,8 @@ class NamedTuple(tuple[Any, ...]): @type_check_only class _TypedDict(Mapping[str, object], metaclass=ABCMeta): __total__: ClassVar[bool] - if sys.version_info >= (3, 9): - __required_keys__: ClassVar[frozenset[str]] - __optional_keys__: ClassVar[frozenset[str]] + __required_keys__: ClassVar[frozenset[str]] + __optional_keys__: ClassVar[frozenset[str]] # __orig_bases__ sometimes exists on <3.12, but not consistently, # so we only add it to the stub on 3.12+ if sys.version_info >= (3, 12): @@ -964,17 +949,16 @@ class _TypedDict(Mapping[str, object], metaclass=ABCMeta): def items(self) -> dict_items[str, object]: ... def keys(self) -> dict_keys[str, object]: ... def values(self) -> dict_values[str, object]: ... - if sys.version_info >= (3, 9): - @overload - def __or__(self, value: typing_extensions.Self, /) -> typing_extensions.Self: ... - @overload - def __or__(self, value: dict[str, Any], /) -> dict[str, object]: ... - @overload - def __ror__(self, value: typing_extensions.Self, /) -> typing_extensions.Self: ... - @overload - def __ror__(self, value: dict[str, Any], /) -> dict[str, object]: ... - # supposedly incompatible definitions of __or__ and __ior__ - def __ior__(self, value: typing_extensions.Self, /) -> typing_extensions.Self: ... # type: ignore[misc] + @overload + def __or__(self, value: typing_extensions.Self, /) -> typing_extensions.Self: ... + @overload + def __or__(self, value: dict[str, Any], /) -> dict[str, object]: ... + @overload + def __ror__(self, value: typing_extensions.Self, /) -> typing_extensions.Self: ... + @overload + def __ror__(self, value: dict[str, Any], /) -> dict[str, object]: ... + # supposedly incompatible definitions of __or__ and __ior__ + def __ior__(self, value: typing_extensions.Self, /) -> typing_extensions.Self: ... # type: ignore[misc] @final class ForwardRef(_Final): @@ -985,11 +969,8 @@ class ForwardRef(_Final): __forward_is_argument__: bool __forward_is_class__: bool __forward_module__: Any | None - if sys.version_info >= (3, 9): - # The module and is_class arguments were added in later Python 3.9 versions. - def __init__(self, arg: str, is_argument: bool = True, module: Any | None = None, *, is_class: bool = False) -> None: ... - else: - def __init__(self, arg: str, is_argument: bool = True) -> None: ... + + def __init__(self, arg: str, is_argument: bool = True, module: Any | None = None, *, is_class: bool = False) -> None: ... if sys.version_info >= (3, 13): @overload @@ -1019,12 +1000,10 @@ class ForwardRef(_Final): *, recursive_guard: frozenset[str], ) -> Any | None: ... - elif sys.version_info >= (3, 9): + else: def _evaluate( self, globalns: dict[str, Any] | None, localns: Mapping[str, Any] | None, recursive_guard: frozenset[str] ) -> Any | None: ... - else: - def _evaluate(self, globalns: dict[str, Any] | None, localns: Mapping[str, Any] | None) -> Any | None: ... def __eq__(self, other: object) -> bool: ... def __hash__(self) -> int: ... diff --git a/stdlib/typing_extensions.pyi b/stdlib/typing_extensions.pyi index f3b7b8ddf5b1..234e32e30ab6 100644 --- a/stdlib/typing_extensions.pyi +++ b/stdlib/typing_extensions.pyi @@ -5,7 +5,7 @@ import typing from _collections_abc import dict_items, dict_keys, dict_values from _typeshed import IdentityFunction, Incomplete, Unused from contextlib import AbstractAsyncContextManager as AsyncContextManager, AbstractContextManager as ContextManager -from types import ModuleType +from types import GenericAlias, ModuleType from typing import ( # noqa: Y022,Y037,Y038,Y039 IO as IO, TYPE_CHECKING as TYPE_CHECKING, @@ -67,8 +67,6 @@ from typing import ( # noqa: Y022,Y037,Y038,Y039 if sys.version_info >= (3, 10): from types import UnionType -if sys.version_info >= (3, 9): - from types import GenericAlias # Please keep order the same as at runtime. __all__ = [ @@ -254,18 +252,17 @@ class _TypedDict(Mapping[str, object], metaclass=abc.ABCMeta): def keys(self) -> dict_keys[str, object]: ... def values(self) -> dict_values[str, object]: ... def __delitem__(self, k: Never) -> None: ... - if sys.version_info >= (3, 9): - @overload - def __or__(self, value: Self, /) -> Self: ... - @overload - def __or__(self, value: dict[str, Any], /) -> dict[str, object]: ... - @overload - def __ror__(self, value: Self, /) -> Self: ... - @overload - def __ror__(self, value: dict[str, Any], /) -> dict[str, object]: ... - # supposedly incompatible definitions of `__ior__` and `__or__`: - # Since this module defines "Self" it is not recognized by Ruff as typing_extensions.Self - def __ior__(self, value: Self, /) -> Self: ... # type: ignore[misc] + @overload + def __or__(self, value: Self, /) -> Self: ... + @overload + def __or__(self, value: dict[str, Any], /) -> dict[str, object]: ... + @overload + def __ror__(self, value: Self, /) -> Self: ... + @overload + def __ror__(self, value: dict[str, Any], /) -> dict[str, object]: ... + # supposedly incompatible definitions of `__ior__` and `__or__`: + # Since this module defines "Self" it is not recognized by Ruff as typing_extensions.Self + def __ior__(self, value: Self, /) -> Self: ... # type: ignore[misc] OrderedDict = _Alias() @@ -281,10 +278,8 @@ if sys.version_info >= (3, 10): @overload def get_origin(tp: UnionType) -> type[UnionType]: ... -if sys.version_info >= (3, 9): - @overload - def get_origin(tp: GenericAlias) -> type: ... - +@overload +def get_origin(tp: GenericAlias) -> type: ... @overload def get_origin(tp: ParamSpecArgs | ParamSpecKwargs) -> ParamSpec: ... @overload @@ -364,8 +359,6 @@ else: ) -> IdentityFunction: ... class NamedTuple(tuple[Any, ...]): - if sys.version_info < (3, 9): - _field_types: ClassVar[dict[str, type]] _field_defaults: ClassVar[dict[str, Any]] _fields: ClassVar[tuple[str, ...]] __orig_bases__: ClassVar[tuple[Any, ...]] From 57c3966def2a4feecb75820d6b7bd4742f3ac1b2 Mon Sep 17 00:00:00 2001 From: Joren Hammudoglu Date: Wed, 2 Apr 2025 09:01:36 +0200 Subject: [PATCH 178/388] Drop Python 3.8 support in `builtins` (#13762) * remove py38 branches in `builtins` * combined `builtins.dict` tests with those exclusive to `>=3.9` --- .../test_cases/builtins/check_dict-py39.py | 70 ---------------- .../@tests/test_cases/builtins/check_dict.py | 63 +++++++++++++- stdlib/builtins.pyi | 84 ++++++++----------- 3 files changed, 95 insertions(+), 122 deletions(-) delete mode 100644 stdlib/@tests/test_cases/builtins/check_dict-py39.py diff --git a/stdlib/@tests/test_cases/builtins/check_dict-py39.py b/stdlib/@tests/test_cases/builtins/check_dict-py39.py deleted file mode 100644 index 20ac4e622071..000000000000 --- a/stdlib/@tests/test_cases/builtins/check_dict-py39.py +++ /dev/null @@ -1,70 +0,0 @@ -""" -Tests for `dict.__(r)or__`. - -`dict.__or__` and `dict.__ror__` were only added in py39, -hence why these are in a separate file to the other test cases for `dict`. -""" - -from __future__ import annotations - -import os -import sys -from typing import Mapping, TypeVar, Union -from typing_extensions import Self, assert_type - -_KT = TypeVar("_KT") -_VT = TypeVar("_VT") - -if sys.version_info >= (3, 9): - - class CustomDictSubclass(dict[_KT, _VT]): - pass - - class CustomMappingWithDunderOr(Mapping[_KT, _VT]): - def __or__(self, other: Mapping[_KT, _VT]) -> dict[_KT, _VT]: - return {} - - def __ror__(self, other: Mapping[_KT, _VT]) -> dict[_KT, _VT]: - return {} - - def __ior__(self, other: Mapping[_KT, _VT]) -> Self: - return self - - def test_dict_dot_or( - a: dict[int, int], - b: CustomDictSubclass[int, int], - c: dict[str, str], - d: Mapping[int, int], - e: CustomMappingWithDunderOr[str, str], - ) -> None: - # dict.__(r)or__ always returns a dict, even if called on a subclass of dict: - assert_type(a | b, dict[int, int]) - assert_type(b | a, dict[int, int]) - - assert_type(a | c, dict[Union[int, str], Union[int, str]]) - - # arbitrary mappings are not accepted by `dict.__or__`; - # it has to be a subclass of `dict` - a | d # type: ignore - - # but Mappings such as `os._Environ` or `CustomMappingWithDunderOr`, - # which define `__ror__` methods that accept `dict`, are fine: - assert_type(a | os.environ, dict[Union[str, int], Union[str, int]]) - assert_type(os.environ | a, dict[Union[str, int], Union[str, int]]) - - assert_type(c | os.environ, dict[str, str]) - assert_type(c | e, dict[str, str]) - - assert_type(os.environ | c, dict[str, str]) - assert_type(e | c, dict[str, str]) - - # store "untainted" `CustomMappingWithDunderOr[str, str]` to test `__ior__` against ` dict[str, str]` later - # Invalid `e |= a` causes pyright to join `Unknown` to `e`'s type - f = e - - e |= c - e |= a # type: ignore - - c |= f - - c |= a # type: ignore diff --git a/stdlib/@tests/test_cases/builtins/check_dict.py b/stdlib/@tests/test_cases/builtins/check_dict.py index d89c3a27d489..2944730c7c67 100644 --- a/stdlib/@tests/test_cases/builtins/check_dict.py +++ b/stdlib/@tests/test_cases/builtins/check_dict.py @@ -1,7 +1,8 @@ from __future__ import annotations -from typing import Any, Dict, Generic, Iterable, TypeVar, Union -from typing_extensions import assert_type +import os +from typing import Any, Dict, Generic, Iterable, Mapping, TypeVar, Union +from typing_extensions import Self, assert_type # These do follow `__init__` overloads order: # mypy and pyright have different opinions about this one: @@ -148,3 +149,61 @@ def test11() -> str: def test12() -> str: return d_str.get("key", int_value) # type: ignore[arg-type] + + +# Tests for `dict.__(r)or__`. + + +class CustomDictSubclass(dict[_KT, _VT]): + pass + + +class CustomMappingWithDunderOr(Mapping[_KT, _VT]): + def __or__(self, other: Mapping[_KT, _VT]) -> dict[_KT, _VT]: + return {} + + def __ror__(self, other: Mapping[_KT, _VT]) -> dict[_KT, _VT]: + return {} + + def __ior__(self, other: Mapping[_KT, _VT]) -> Self: + return self + + +def test_dict_dot_or( + a: dict[int, int], + b: CustomDictSubclass[int, int], + c: dict[str, str], + d: Mapping[int, int], + e: CustomMappingWithDunderOr[str, str], +) -> None: + # dict.__(r)or__ always returns a dict, even if called on a subclass of dict: + assert_type(a | b, dict[int, int]) + assert_type(b | a, dict[int, int]) + + assert_type(a | c, dict[Union[int, str], Union[int, str]]) + + # arbitrary mappings are not accepted by `dict.__or__`; + # it has to be a subclass of `dict` + a | d # type: ignore + + # but Mappings such as `os._Environ` or `CustomMappingWithDunderOr`, + # which define `__ror__` methods that accept `dict`, are fine: + assert_type(a | os.environ, dict[Union[str, int], Union[str, int]]) + assert_type(os.environ | a, dict[Union[str, int], Union[str, int]]) + + assert_type(c | os.environ, dict[str, str]) + assert_type(c | e, dict[str, str]) + + assert_type(os.environ | c, dict[str, str]) + assert_type(e | c, dict[str, str]) + + # store "untainted" `CustomMappingWithDunderOr[str, str]` to test `__ior__` against ` dict[str, str]` later + # Invalid `e |= a` causes pyright to join `Unknown` to `e`'s type + f = e + + e |= c + e |= a # type: ignore + + c |= f + + c |= a # type: ignore diff --git a/stdlib/builtins.pyi b/stdlib/builtins.pyi index 4e2484509c1d..c9f6efafd6e9 100644 --- a/stdlib/builtins.pyi +++ b/stdlib/builtins.pyi @@ -32,7 +32,7 @@ from _typeshed import ( ) from collections.abc import Awaitable, Callable, Iterable, Iterator, MutableSet, Reversible, Set as AbstractSet, Sized from io import BufferedRandom, BufferedReader, BufferedWriter, FileIO, TextIOWrapper -from types import CellType, CodeType, TracebackType +from types import CellType, CodeType, GenericAlias, TracebackType # mypy crashes if any of {ByteString, Sequence, MutableSequence, Mapping, MutableMapping} # are imported from collections.abc in builtins.pyi @@ -72,9 +72,6 @@ from typing_extensions import ( # noqa: Y023 deprecated, ) -if sys.version_info >= (3, 9): - from types import GenericAlias - _T = TypeVar("_T") _I = TypeVar("_I", default=int) _T_co = TypeVar("_T_co", covariant=True) @@ -377,10 +374,8 @@ class float: def __rpow__(self, value: float, mod: None = None, /) -> Any: ... def __getnewargs__(self) -> tuple[float]: ... def __trunc__(self) -> int: ... - if sys.version_info >= (3, 9): - def __ceil__(self) -> int: ... - def __floor__(self) -> int: ... - + def __ceil__(self) -> int: ... + def __floor__(self) -> int: ... @overload def __round__(self, ndigits: None = None, /) -> int: ... @overload @@ -519,16 +514,15 @@ class str(Sequence[str]): ) -> LiteralString: ... @overload def replace(self, old: str, new: str, count: SupportsIndex = -1, /) -> str: ... # type: ignore[misc] - if sys.version_info >= (3, 9): - @overload - def removeprefix(self: LiteralString, prefix: LiteralString, /) -> LiteralString: ... - @overload - def removeprefix(self, prefix: str, /) -> str: ... # type: ignore[misc] - @overload - def removesuffix(self: LiteralString, suffix: LiteralString, /) -> LiteralString: ... - @overload - def removesuffix(self, suffix: str, /) -> str: ... # type: ignore[misc] + @overload + def removeprefix(self: LiteralString, prefix: LiteralString, /) -> LiteralString: ... + @overload + def removeprefix(self, prefix: str, /) -> str: ... # type: ignore[misc] + @overload + def removesuffix(self: LiteralString, suffix: LiteralString, /) -> LiteralString: ... + @overload + def removesuffix(self, suffix: str, /) -> str: ... # type: ignore[misc] def rfind(self, sub: str, start: SupportsIndex | None = ..., end: SupportsIndex | None = ..., /) -> int: ... def rindex(self, sub: str, start: SupportsIndex | None = ..., end: SupportsIndex | None = ..., /) -> int: ... @overload @@ -666,10 +660,8 @@ class bytes(Sequence[int]): def lstrip(self, bytes: ReadableBuffer | None = None, /) -> bytes: ... def partition(self, sep: ReadableBuffer, /) -> tuple[bytes, bytes, bytes]: ... def replace(self, old: ReadableBuffer, new: ReadableBuffer, count: SupportsIndex = -1, /) -> bytes: ... - if sys.version_info >= (3, 9): - def removeprefix(self, prefix: ReadableBuffer, /) -> bytes: ... - def removesuffix(self, suffix: ReadableBuffer, /) -> bytes: ... - + def removeprefix(self, prefix: ReadableBuffer, /) -> bytes: ... + def removesuffix(self, suffix: ReadableBuffer, /) -> bytes: ... def rfind( self, sub: ReadableBuffer | SupportsIndex, start: SupportsIndex | None = ..., end: SupportsIndex | None = ..., / ) -> int: ... @@ -771,10 +763,8 @@ class bytearray(MutableSequence[int]): def partition(self, sep: ReadableBuffer, /) -> tuple[bytearray, bytearray, bytearray]: ... def pop(self, index: int = -1, /) -> int: ... def remove(self, value: int, /) -> None: ... - if sys.version_info >= (3, 9): - def removeprefix(self, prefix: ReadableBuffer, /) -> bytearray: ... - def removesuffix(self, suffix: ReadableBuffer, /) -> bytearray: ... - + def removeprefix(self, prefix: ReadableBuffer, /) -> bytearray: ... + def removesuffix(self, suffix: ReadableBuffer, /) -> bytearray: ... def replace(self, old: ReadableBuffer, new: ReadableBuffer, count: SupportsIndex = -1, /) -> bytearray: ... def rfind( self, sub: ReadableBuffer | SupportsIndex, start: SupportsIndex | None = ..., end: SupportsIndex | None = ..., / @@ -1009,8 +999,7 @@ class tuple(Sequence[_T_co]): def __rmul__(self, value: SupportsIndex, /) -> tuple[_T_co, ...]: ... def count(self, value: Any, /) -> int: ... def index(self, value: Any, start: SupportsIndex = 0, stop: SupportsIndex = sys.maxsize, /) -> int: ... - if sys.version_info >= (3, 9): - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... # Doesn't exist at runtime, but deleting this breaks mypy and pyright. See: # https://github.com/python/typeshed/issues/7580 @@ -1092,8 +1081,7 @@ class list(MutableSequence[_T]): def __lt__(self, value: list[_T], /) -> bool: ... def __le__(self, value: list[_T], /) -> bool: ... def __eq__(self, value: object, /) -> bool: ... - if sys.version_info >= (3, 9): - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... class dict(MutableMapping[_KT, _VT]): # __init__ should be kept roughly in line with `collections.UserDict.__init__`, which has similar semantics @@ -1162,21 +1150,20 @@ class dict(MutableMapping[_KT, _VT]): def __eq__(self, value: object, /) -> bool: ... def __reversed__(self) -> Iterator[_KT]: ... __hash__: ClassVar[None] # type: ignore[assignment] - if sys.version_info >= (3, 9): - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... - @overload - def __or__(self, value: dict[_KT, _VT], /) -> dict[_KT, _VT]: ... - @overload - def __or__(self, value: dict[_T1, _T2], /) -> dict[_KT | _T1, _VT | _T2]: ... - @overload - def __ror__(self, value: dict[_KT, _VT], /) -> dict[_KT, _VT]: ... - @overload - def __ror__(self, value: dict[_T1, _T2], /) -> dict[_KT | _T1, _VT | _T2]: ... - # dict.__ior__ should be kept roughly in line with MutableMapping.update() - @overload # type: ignore[misc] - def __ior__(self, value: SupportsKeysAndGetItem[_KT, _VT], /) -> Self: ... - @overload - def __ior__(self, value: Iterable[tuple[_KT, _VT]], /) -> Self: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + @overload + def __or__(self, value: dict[_KT, _VT], /) -> dict[_KT, _VT]: ... + @overload + def __or__(self, value: dict[_T1, _T2], /) -> dict[_KT | _T1, _VT | _T2]: ... + @overload + def __ror__(self, value: dict[_KT, _VT], /) -> dict[_KT, _VT]: ... + @overload + def __ror__(self, value: dict[_T1, _T2], /) -> dict[_KT | _T1, _VT | _T2]: ... + # dict.__ior__ should be kept roughly in line with MutableMapping.update() + @overload # type: ignore[misc] + def __ior__(self, value: SupportsKeysAndGetItem[_KT, _VT], /) -> Self: ... + @overload + def __ior__(self, value: Iterable[tuple[_KT, _VT]], /) -> Self: ... class set(MutableSet[_T]): @overload @@ -1215,8 +1202,7 @@ class set(MutableSet[_T]): def __gt__(self, value: AbstractSet[object], /) -> bool: ... def __eq__(self, value: object, /) -> bool: ... __hash__: ClassVar[None] # type: ignore[assignment] - if sys.version_info >= (3, 9): - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... class frozenset(AbstractSet[_T_co]): @overload @@ -1244,15 +1230,13 @@ class frozenset(AbstractSet[_T_co]): def __gt__(self, value: AbstractSet[object], /) -> bool: ... def __eq__(self, value: object, /) -> bool: ... def __hash__(self) -> int: ... - if sys.version_info >= (3, 9): - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... class enumerate(Generic[_T]): def __new__(cls, iterable: Iterable[_T], start: int = 0) -> Self: ... def __iter__(self) -> Self: ... def __next__(self) -> tuple[int, _T]: ... - if sys.version_info >= (3, 9): - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... @final class range(Sequence[int]): From f0cd73f19ac14ae52199047c95f17fbfa89a6852 Mon Sep 17 00:00:00 2001 From: Sebastian Rittau Date: Wed, 2 Apr 2025 12:27:24 +0200 Subject: [PATCH 179/388] Remove dummy threading modules (#13771) Removed in Python 3.8, part of #12112 --- stdlib/VERSIONS | 3 -- stdlib/_dummy_thread.pyi | 33 ---------------------- stdlib/_dummy_threading.pyi | 56 ------------------------------------- stdlib/dummy_threading.pyi | 2 -- 4 files changed, 94 deletions(-) delete mode 100644 stdlib/_dummy_thread.pyi delete mode 100644 stdlib/_dummy_threading.pyi delete mode 100644 stdlib/dummy_threading.pyi diff --git a/stdlib/VERSIONS b/stdlib/VERSIONS index 3c6898dc1a77..7a8b950b0df7 100644 --- a/stdlib/VERSIONS +++ b/stdlib/VERSIONS @@ -36,8 +36,6 @@ _curses: 3.0- _curses_panel: 3.0- _dbm: 3.0- _decimal: 3.3- -_dummy_thread: 3.0-3.8 -_dummy_threading: 3.0-3.8 _frozen_importlib: 3.0- _frozen_importlib_external: 3.5- _gdbm: 3.0- @@ -140,7 +138,6 @@ distutils: 3.0-3.11 distutils.command.bdist_msi: 3.0-3.10 distutils.command.bdist_wininst: 3.0-3.9 doctest: 3.0- -dummy_threading: 3.0-3.8 email: 3.0- encodings: 3.0- encodings.cp1125: 3.4- diff --git a/stdlib/_dummy_thread.pyi b/stdlib/_dummy_thread.pyi deleted file mode 100644 index 1182e53c66c3..000000000000 --- a/stdlib/_dummy_thread.pyi +++ /dev/null @@ -1,33 +0,0 @@ -from collections.abc import Callable -from types import TracebackType -from typing import Any, NoReturn, overload -from typing_extensions import TypeVarTuple, Unpack - -__all__ = ["error", "start_new_thread", "exit", "get_ident", "allocate_lock", "interrupt_main", "LockType", "RLock"] - -_Ts = TypeVarTuple("_Ts") - -TIMEOUT_MAX: int -error = RuntimeError - -@overload -def start_new_thread(function: Callable[[Unpack[_Ts]], object], args: tuple[Unpack[_Ts]]) -> None: ... -@overload -def start_new_thread(function: Callable[..., object], args: tuple[Any, ...], kwargs: dict[str, Any]) -> None: ... -def exit() -> NoReturn: ... -def get_ident() -> int: ... -def allocate_lock() -> LockType: ... -def stack_size(size: int | None = None) -> int: ... - -class LockType: - locked_status: bool - def acquire(self, waitflag: bool | None = None, timeout: int = -1) -> bool: ... - def __enter__(self, waitflag: bool | None = None, timeout: int = -1) -> bool: ... - def __exit__(self, typ: type[BaseException] | None, val: BaseException | None, tb: TracebackType | None) -> None: ... - def release(self) -> bool: ... - def locked(self) -> bool: ... - -class RLock(LockType): - def release(self) -> None: ... # type: ignore[override] - -def interrupt_main() -> None: ... diff --git a/stdlib/_dummy_threading.pyi b/stdlib/_dummy_threading.pyi deleted file mode 100644 index 1b66fb414d7a..000000000000 --- a/stdlib/_dummy_threading.pyi +++ /dev/null @@ -1,56 +0,0 @@ -from _threading_local import local as local -from _typeshed import ProfileFunction, TraceFunction -from threading import ( - TIMEOUT_MAX as TIMEOUT_MAX, - Barrier as Barrier, - BoundedSemaphore as BoundedSemaphore, - BrokenBarrierError as BrokenBarrierError, - Condition as Condition, - Event as Event, - ExceptHookArgs as ExceptHookArgs, - Lock as Lock, - RLock as RLock, - Semaphore as Semaphore, - Thread as Thread, - ThreadError as ThreadError, - Timer as Timer, - _DummyThread as _DummyThread, - _RLock as _RLock, - excepthook as excepthook, -) - -__all__ = [ - "get_ident", - "active_count", - "Condition", - "current_thread", - "enumerate", - "main_thread", - "TIMEOUT_MAX", - "Event", - "Lock", - "RLock", - "Semaphore", - "BoundedSemaphore", - "Thread", - "Barrier", - "BrokenBarrierError", - "Timer", - "ThreadError", - "setprofile", - "settrace", - "local", - "stack_size", - "ExceptHookArgs", - "excepthook", -] - -def active_count() -> int: ... -def current_thread() -> Thread: ... -def currentThread() -> Thread: ... -def get_ident() -> int: ... -def enumerate() -> list[Thread]: ... -def main_thread() -> Thread: ... -def settrace(func: TraceFunction) -> None: ... -def setprofile(func: ProfileFunction | None) -> None: ... -def stack_size(size: int | None = None) -> int: ... diff --git a/stdlib/dummy_threading.pyi b/stdlib/dummy_threading.pyi deleted file mode 100644 index 757cb8d4bd4c..000000000000 --- a/stdlib/dummy_threading.pyi +++ /dev/null @@ -1,2 +0,0 @@ -from _dummy_threading import * -from _dummy_threading import __all__ as __all__ From 595b11352e6215d5a7259c7300ae65a7004d7f4e Mon Sep 17 00:00:00 2001 From: Sebastian Rittau Date: Wed, 2 Apr 2025 12:32:50 +0200 Subject: [PATCH 180/388] Remove Python 3.8 exclusive branches (#13772) --- .../icalendar/icalendar/timezone/zoneinfo.pyi | 7 +----- stubs/mypy-extensions/mypy_extensions.pyi | 22 +++++++++---------- stubs/regex/regex/regex.pyi | 11 +++------- stubs/six/six/moves/_dummy_thread.pyi | 7 +----- 4 files changed, 15 insertions(+), 32 deletions(-) diff --git a/stubs/icalendar/icalendar/timezone/zoneinfo.pyi b/stubs/icalendar/icalendar/timezone/zoneinfo.pyi index 78f8e9b4f542..2e6b6e938fe5 100644 --- a/stubs/icalendar/icalendar/timezone/zoneinfo.pyi +++ b/stubs/icalendar/icalendar/timezone/zoneinfo.pyi @@ -1,14 +1,9 @@ import datetime -import sys from typing import Final, Literal +from zoneinfo import ZoneInfo from dateutil.rrule import rrule, rruleset -if sys.version_info >= (3, 9): - from zoneinfo import ZoneInfo -else: - from backports.zoneinfo import ZoneInfo - from ..cal import Timezone from ..prop import vRecur from .provider import TZProvider diff --git a/stubs/mypy-extensions/mypy_extensions.pyi b/stubs/mypy-extensions/mypy_extensions.pyi index becb313889e9..0bba94c3407e 100644 --- a/stubs/mypy-extensions/mypy_extensions.pyi +++ b/stubs/mypy-extensions/mypy_extensions.pyi @@ -1,5 +1,4 @@ import abc -import sys from _collections_abc import dict_items, dict_keys, dict_values from _typeshed import IdentityFunction, Unused from collections.abc import Mapping @@ -27,17 +26,16 @@ class _TypedDict(Mapping[str, object], metaclass=abc.ABCMeta): def keys(self) -> dict_keys[str, object]: ... def values(self) -> dict_values[str, object]: ... def __delitem__(self, k: Never) -> None: ... - if sys.version_info >= (3, 9): - @overload - def __or__(self, value: Self, /) -> Self: ... - @overload - def __or__(self, value: dict[str, Any], /) -> dict[str, object]: ... - @overload - def __ror__(self, value: Self, /) -> Self: ... - @overload - def __ror__(self, value: dict[str, Any], /) -> dict[str, object]: ... - # supposedly incompatible definitions of `__or__` and `__ior__`: - def __ior__(self, value: Self, /) -> Self: ... # type: ignore[misc] + @overload + def __or__(self, value: Self, /) -> Self: ... + @overload + def __or__(self, value: dict[str, Any], /) -> dict[str, object]: ... + @overload + def __ror__(self, value: Self, /) -> Self: ... + @overload + def __ror__(self, value: dict[str, Any], /) -> dict[str, object]: ... + # supposedly incompatible definitions of `__or__` and `__ior__`: + def __ior__(self, value: Self, /) -> Self: ... # type: ignore[misc] def TypedDict(typename: str, fields: dict[str, type[Any]], total: bool = ...) -> type[dict[str, Any]]: ... @overload diff --git a/stubs/regex/regex/regex.pyi b/stubs/regex/regex/regex.pyi index b777677381ca..990feb4c648e 100644 --- a/stubs/regex/regex/regex.pyi +++ b/stubs/regex/regex/regex.pyi @@ -1,15 +1,12 @@ -import sys from _typeshed import ReadableBuffer, Unused from collections.abc import Callable, Mapping +from types import GenericAlias from typing import Any, AnyStr, Generic, Literal, TypeVar, final, overload from typing_extensions import Self from . import _regex from ._regex_core import * -if sys.version_info >= (3, 9): - from types import GenericAlias - _T = TypeVar("_T") __version__: str @@ -569,8 +566,7 @@ class Pattern(Generic[AnyStr]): ) -> _regex.Scanner[bytes]: ... def __copy__(self) -> Self: ... def __deepcopy__(self, memo: Unused, /) -> Self: ... - if sys.version_info >= (3, 9): - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... @final class Match(Generic[AnyStr]): @@ -648,5 +644,4 @@ class Match(Generic[AnyStr]): def __getitem__(self, key: int | str, /) -> AnyStr | Any: ... def __copy__(self) -> Self: ... def __deepcopy__(self, memo: Unused, /) -> Self: ... - if sys.version_info >= (3, 9): - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... diff --git a/stubs/six/six/moves/_dummy_thread.pyi b/stubs/six/six/moves/_dummy_thread.pyi index 410232d07417..25952a61494f 100644 --- a/stubs/six/six/moves/_dummy_thread.pyi +++ b/stubs/six/six/moves/_dummy_thread.pyi @@ -1,6 +1 @@ -import sys - -if sys.version_info >= (3, 9): - from _thread import * -else: - from _dummy_thread import * +from _thread import * From 731dd8a923268e542ea8b2dd8caf65536dc8f9be Mon Sep 17 00:00:00 2001 From: David Peter Date: Wed, 2 Apr 2025 15:09:56 +0200 Subject: [PATCH 181/388] `property.__get__`: overload to model class-access behavior (#13769) --- stdlib/builtins.pyi | 3 +++ 1 file changed, 3 insertions(+) diff --git a/stdlib/builtins.pyi b/stdlib/builtins.pyi index c9f6efafd6e9..9129c0cba20f 100644 --- a/stdlib/builtins.pyi +++ b/stdlib/builtins.pyi @@ -1281,6 +1281,9 @@ class property: def getter(self, fget: Callable[[Any], Any], /) -> property: ... def setter(self, fset: Callable[[Any, Any], None], /) -> property: ... def deleter(self, fdel: Callable[[Any], None], /) -> property: ... + @overload + def __get__(self, instance: None, owner: type, /) -> Self: ... + @overload def __get__(self, instance: Any, owner: type | None = None, /) -> Any: ... def __set__(self, instance: Any, value: Any, /) -> None: ... def __delete__(self, instance: Any, /) -> None: ... From 7026564c8541876948b3760f68a6508d0dd6948c Mon Sep 17 00:00:00 2001 From: Brian Schubert Date: Wed, 2 Apr 2025 09:13:26 -0400 Subject: [PATCH 182/388] Remove Python 3.8 exclusive branches from _asyncio, _blake2, _codecs, _contextvars (#13773) Remove 3.8 exclusive branches from asyncio, _blake2, codecs, contextvars --- stdlib/_asyncio.pyi | 27 +++------- stdlib/_blake2.pyi | 113 +++++++++++++--------------------------- stdlib/_codecs.pyi | 22 ++------ stdlib/_contextvars.pyi | 11 ++-- 4 files changed, 49 insertions(+), 124 deletions(-) diff --git a/stdlib/_asyncio.pyi b/stdlib/_asyncio.pyi index 89cdff6cc283..be486fddb12d 100644 --- a/stdlib/_asyncio.pyi +++ b/stdlib/_asyncio.pyi @@ -2,13 +2,10 @@ import sys from asyncio.events import AbstractEventLoop from collections.abc import Awaitable, Callable, Coroutine, Generator from contextvars import Context -from types import FrameType +from types import FrameType, GenericAlias from typing import Any, Literal, TextIO, TypeVar from typing_extensions import Self, TypeAlias -if sys.version_info >= (3, 9): - from types import GenericAlias - _T = TypeVar("_T") _T_co = TypeVar("_T_co", covariant=True) _TaskYieldType: TypeAlias = Future[object] | None @@ -29,11 +26,7 @@ class Future(Awaitable[_T]): @property def _callbacks(self) -> list[tuple[Callable[[Self], Any], Context]]: ... def add_done_callback(self, fn: Callable[[Self], object], /, *, context: Context | None = None) -> None: ... - if sys.version_info >= (3, 9): - def cancel(self, msg: Any | None = None) -> bool: ... - else: - def cancel(self) -> bool: ... - + def cancel(self, msg: Any | None = None) -> bool: ... def cancelled(self) -> bool: ... def done(self) -> bool: ... def result(self) -> _T: ... @@ -45,15 +38,12 @@ class Future(Awaitable[_T]): def __await__(self) -> Generator[Any, None, _T]: ... @property def _loop(self) -> AbstractEventLoop: ... - if sys.version_info >= (3, 9): - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... if sys.version_info >= (3, 12): _TaskCompatibleCoro: TypeAlias = Coroutine[Any, Any, _T_co] -elif sys.version_info >= (3, 9): - _TaskCompatibleCoro: TypeAlias = Generator[_TaskYieldType, None, _T_co] | Coroutine[Any, Any, _T_co] else: - _TaskCompatibleCoro: TypeAlias = Generator[_TaskYieldType, None, _T_co] | Awaitable[_T_co] + _TaskCompatibleCoro: TypeAlias = Generator[_TaskYieldType, None, _T_co] | Coroutine[Any, Any, _T_co] # mypy and pyright complain that a subclass of an invariant class shouldn't be covariant. # While this is true in general, here it's sort-of okay to have a covariant subclass, @@ -99,13 +89,8 @@ class Task(Future[_T_co]): # type: ignore[type-var] # pyright: ignore[reportIn if sys.version_info >= (3, 11): def cancelling(self) -> int: ... def uncancel(self) -> int: ... - if sys.version_info < (3, 9): - @classmethod - def current_task(cls, loop: AbstractEventLoop | None = None) -> Task[Any] | None: ... - @classmethod - def all_tasks(cls, loop: AbstractEventLoop | None = None) -> set[Task[Any]]: ... - if sys.version_info >= (3, 9): - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... def get_event_loop() -> AbstractEventLoop: ... def get_running_loop() -> AbstractEventLoop: ... diff --git a/stdlib/_blake2.pyi b/stdlib/_blake2.pyi index 3d17cb59c79b..d578df55c2fa 100644 --- a/stdlib/_blake2.pyi +++ b/stdlib/_blake2.pyi @@ -1,4 +1,3 @@ -import sys from _typeshed import ReadableBuffer from typing import ClassVar, final from typing_extensions import Self @@ -21,44 +20,24 @@ class blake2b: block_size: int digest_size: int name: str - if sys.version_info >= (3, 9): - def __new__( - cls, - data: ReadableBuffer = b"", - /, - *, - digest_size: int = 64, - key: ReadableBuffer = b"", - salt: ReadableBuffer = b"", - person: ReadableBuffer = b"", - fanout: int = 1, - depth: int = 1, - leaf_size: int = 0, - node_offset: int = 0, - node_depth: int = 0, - inner_size: int = 0, - last_node: bool = False, - usedforsecurity: bool = True, - ) -> Self: ... - else: - def __new__( - cls, - data: ReadableBuffer = b"", - /, - *, - digest_size: int = 64, - key: ReadableBuffer = b"", - salt: ReadableBuffer = b"", - person: ReadableBuffer = b"", - fanout: int = 1, - depth: int = 1, - leaf_size: int = 0, - node_offset: int = 0, - node_depth: int = 0, - inner_size: int = 0, - last_node: bool = False, - ) -> Self: ... - + def __new__( + cls, + data: ReadableBuffer = b"", + /, + *, + digest_size: int = 64, + key: ReadableBuffer = b"", + salt: ReadableBuffer = b"", + person: ReadableBuffer = b"", + fanout: int = 1, + depth: int = 1, + leaf_size: int = 0, + node_offset: int = 0, + node_depth: int = 0, + inner_size: int = 0, + last_node: bool = False, + usedforsecurity: bool = True, + ) -> Self: ... def copy(self) -> Self: ... def digest(self) -> bytes: ... def hexdigest(self) -> str: ... @@ -73,44 +52,24 @@ class blake2s: block_size: int digest_size: int name: str - if sys.version_info >= (3, 9): - def __new__( - cls, - data: ReadableBuffer = b"", - /, - *, - digest_size: int = 32, - key: ReadableBuffer = b"", - salt: ReadableBuffer = b"", - person: ReadableBuffer = b"", - fanout: int = 1, - depth: int = 1, - leaf_size: int = 0, - node_offset: int = 0, - node_depth: int = 0, - inner_size: int = 0, - last_node: bool = False, - usedforsecurity: bool = True, - ) -> Self: ... - else: - def __new__( - cls, - data: ReadableBuffer = b"", - /, - *, - digest_size: int = 32, - key: ReadableBuffer = b"", - salt: ReadableBuffer = b"", - person: ReadableBuffer = b"", - fanout: int = 1, - depth: int = 1, - leaf_size: int = 0, - node_offset: int = 0, - node_depth: int = 0, - inner_size: int = 0, - last_node: bool = False, - ) -> Self: ... - + def __new__( + cls, + data: ReadableBuffer = b"", + /, + *, + digest_size: int = 32, + key: ReadableBuffer = b"", + salt: ReadableBuffer = b"", + person: ReadableBuffer = b"", + fanout: int = 1, + depth: int = 1, + leaf_size: int = 0, + node_offset: int = 0, + node_depth: int = 0, + inner_size: int = 0, + last_node: bool = False, + usedforsecurity: bool = True, + ) -> Self: ... def copy(self) -> Self: ... def digest(self) -> bytes: ... def hexdigest(self) -> str: ... diff --git a/stdlib/_codecs.pyi b/stdlib/_codecs.pyi index 11c5d58a855b..89f97edb9ba8 100644 --- a/stdlib/_codecs.pyi +++ b/stdlib/_codecs.pyi @@ -81,26 +81,12 @@ def escape_decode(data: str | ReadableBuffer, errors: str | None = None, /) -> t def escape_encode(data: bytes, errors: str | None = None, /) -> tuple[bytes, int]: ... def latin_1_decode(data: ReadableBuffer, errors: str | None = None, /) -> tuple[str, int]: ... def latin_1_encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ... - -if sys.version_info >= (3, 9): - def raw_unicode_escape_decode( - data: str | ReadableBuffer, errors: str | None = None, final: bool = True, / - ) -> tuple[str, int]: ... - -else: - def raw_unicode_escape_decode(data: str | ReadableBuffer, errors: str | None = None, /) -> tuple[str, int]: ... - +def raw_unicode_escape_decode( + data: str | ReadableBuffer, errors: str | None = None, final: bool = True, / +) -> tuple[str, int]: ... def raw_unicode_escape_encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ... def readbuffer_encode(data: str | ReadableBuffer, errors: str | None = None, /) -> tuple[bytes, int]: ... - -if sys.version_info >= (3, 9): - def unicode_escape_decode( - data: str | ReadableBuffer, errors: str | None = None, final: bool = True, / - ) -> tuple[str, int]: ... - -else: - def unicode_escape_decode(data: str | ReadableBuffer, errors: str | None = None, /) -> tuple[str, int]: ... - +def unicode_escape_decode(data: str | ReadableBuffer, errors: str | None = None, final: bool = True, /) -> tuple[str, int]: ... def unicode_escape_encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ... def utf_16_be_decode(data: ReadableBuffer, errors: str | None = None, final: bool = False, /) -> tuple[str, int]: ... def utf_16_be_encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ... diff --git a/stdlib/_contextvars.pyi b/stdlib/_contextvars.pyi index c7d0814b3cb4..33df799a768c 100644 --- a/stdlib/_contextvars.pyi +++ b/stdlib/_contextvars.pyi @@ -1,11 +1,8 @@ -import sys from collections.abc import Callable, Iterator, Mapping +from types import GenericAlias from typing import Any, ClassVar, Generic, TypeVar, final, overload from typing_extensions import ParamSpec, Self -if sys.version_info >= (3, 9): - from types import GenericAlias - _T = TypeVar("_T") _D = TypeVar("_D") _P = ParamSpec("_P") @@ -27,8 +24,7 @@ class ContextVar(Generic[_T]): def get(self, default: _D, /) -> _D | _T: ... def set(self, value: _T, /) -> Token[_T]: ... def reset(self, token: Token[_T], /) -> None: ... - if sys.version_info >= (3, 9): - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... @final class Token(Generic[_T]): @@ -38,8 +34,7 @@ class Token(Generic[_T]): def old_value(self) -> Any: ... # returns either _T or MISSING, but that's hard to express MISSING: ClassVar[object] __hash__: ClassVar[None] # type: ignore[assignment] - if sys.version_info >= (3, 9): - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... def copy_context() -> Context: ... From a7f425e37a999f7cdfecdf566e280f6b93827334 Mon Sep 17 00:00:00 2001 From: Brian Schubert Date: Wed, 2 Apr 2025 10:09:17 -0400 Subject: [PATCH 183/388] Drop Python 3.8 support in typeshed utilities (#13774) --- lib/ts_utils/metadata.py | 4 ++-- lib/ts_utils/utils.py | 6 +++--- scripts/sync_protobuf/_utils.py | 3 ++- tests/mypy_test.py | 4 ++-- 4 files changed, 9 insertions(+), 8 deletions(-) diff --git a/lib/ts_utils/metadata.py b/lib/ts_utils/metadata.py index 33948bdb8b36..40bc42354089 100644 --- a/lib/ts_utils/metadata.py +++ b/lib/ts_utils/metadata.py @@ -10,8 +10,8 @@ from collections.abc import Mapping from dataclasses import dataclass from pathlib import Path -from typing import Final, NamedTuple, final -from typing_extensions import Annotated, TypeGuard +from typing import Annotated, Final, NamedTuple, final +from typing_extensions import TypeGuard import tomli import tomlkit diff --git a/lib/ts_utils/utils.py b/lib/ts_utils/utils.py index 667b47c6fabd..50b18df69766 100644 --- a/lib/ts_utils/utils.py +++ b/lib/ts_utils/utils.py @@ -7,7 +7,7 @@ from collections.abc import Iterable, Mapping from functools import lru_cache from pathlib import Path -from typing import Any, Dict, Final, NamedTuple, Tuple +from typing import Any, Final, NamedTuple from typing_extensions import TypeAlias import pathspec @@ -111,8 +111,8 @@ def get_mypy_req() -> str: # Parsing the stdlib/VERSIONS file # ==================================================================== -VersionTuple: TypeAlias = Tuple[int, int] -SupportedVersionsDict: TypeAlias = Dict[str, Tuple[VersionTuple, VersionTuple]] +VersionTuple: TypeAlias = tuple[int, int] +SupportedVersionsDict: TypeAlias = dict[str, tuple[VersionTuple, VersionTuple]] VERSIONS_PATH = STDLIB_PATH / "VERSIONS" VERSION_LINE_RE = re.compile(r"^([a-zA-Z_][a-zA-Z0-9_.]*): ([23]\.\d{1,2})-([23]\.\d{1,2})?$") diff --git a/scripts/sync_protobuf/_utils.py b/scripts/sync_protobuf/_utils.py index cb82ba1709a0..2cab826f571a 100644 --- a/scripts/sync_protobuf/_utils.py +++ b/scripts/sync_protobuf/_utils.py @@ -2,8 +2,9 @@ import subprocess import sys +from collections.abc import Iterable from http.client import HTTPResponse -from typing import TYPE_CHECKING, Iterable +from typing import TYPE_CHECKING from urllib.request import urlopen from zipfile import ZipFile diff --git a/tests/mypy_test.py b/tests/mypy_test.py index 9ccc0138229c..bf3a234ebdd8 100755 --- a/tests/mypy_test.py +++ b/tests/mypy_test.py @@ -18,8 +18,8 @@ from itertools import product from pathlib import Path from threading import Lock -from typing import Any, NamedTuple -from typing_extensions import Annotated, TypeAlias +from typing import Annotated, Any, NamedTuple +from typing_extensions import TypeAlias import tomli from packaging.requirements import Requirement From 6b61b6101a36edaaa6d3f4999db10dd500cc0121 Mon Sep 17 00:00:00 2001 From: A5rocks Date: Wed, 2 Apr 2025 11:31:41 -0400 Subject: [PATCH 184/388] Make `code.InteractiveInterpreter#locals` a dict not a mapping (#13775) --- stdlib/code.pyi | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/stdlib/code.pyi b/stdlib/code.pyi index 54971f3ae93c..16721927c236 100644 --- a/stdlib/code.pyi +++ b/stdlib/code.pyi @@ -1,15 +1,15 @@ import sys from codeop import CommandCompiler -from collections.abc import Callable, Mapping +from collections.abc import Callable from types import CodeType from typing import Any __all__ = ["InteractiveInterpreter", "InteractiveConsole", "interact", "compile_command"] class InteractiveInterpreter: - locals: Mapping[str, Any] # undocumented + locals: dict[str, Any] # undocumented compile: CommandCompiler # undocumented - def __init__(self, locals: Mapping[str, Any] | None = None) -> None: ... + def __init__(self, locals: dict[str, Any] | None = None) -> None: ... def runsource(self, source: str, filename: str = "", symbol: str = "single") -> bool: ... def runcode(self, code: CodeType) -> None: ... if sys.version_info >= (3, 13): @@ -25,11 +25,11 @@ class InteractiveConsole(InteractiveInterpreter): filename: str # undocumented if sys.version_info >= (3, 13): def __init__( - self, locals: Mapping[str, Any] | None = None, filename: str = "", *, local_exit: bool = False + self, locals: dict[str, Any] | None = None, filename: str = "", *, local_exit: bool = False ) -> None: ... def push(self, line: str, filename: str | None = None) -> bool: ... else: - def __init__(self, locals: Mapping[str, Any] | None = None, filename: str = "") -> None: ... + def __init__(self, locals: dict[str, Any] | None = None, filename: str = "") -> None: ... def push(self, line: str) -> bool: ... def interact(self, banner: str | None = None, exitmsg: str | None = None) -> None: ... @@ -40,7 +40,7 @@ if sys.version_info >= (3, 13): def interact( banner: str | None = None, readfunc: Callable[[str], str] | None = None, - local: Mapping[str, Any] | None = None, + local: dict[str, Any] | None = None, exitmsg: str | None = None, local_exit: bool = False, ) -> None: ... @@ -49,7 +49,7 @@ else: def interact( banner: str | None = None, readfunc: Callable[[str], str] | None = None, - local: Mapping[str, Any] | None = None, + local: dict[str, Any] | None = None, exitmsg: str | None = None, ) -> None: ... From b5fe0f1148ede131e06a646c1b0b664bfaba9987 Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Wed, 2 Apr 2025 19:45:05 +0400 Subject: [PATCH 185/388] Improve stubs for `oauthlib.oauth2.rfc6749` (#13752) --- stubs/oauthlib/oauthlib/oauth2/__init__.pyi | 1 + .../oauthlib/oauth2/rfc6749/__init__.pyi | 4 +- .../oauthlib/oauth2/rfc6749/clients/base.pyi | 145 ++++++++++-------- .../oauthlib/oauth2/rfc6749/errors.pyi | 39 +++-- .../oauthlib/oauth2/rfc6749/parameters.pyi | 51 ++++-- .../oauth2/rfc6749/request_validator.pyi | 5 +- .../oauthlib/oauth2/rfc6749/tokens.pyi | 85 +++++----- .../oauthlib/oauth2/rfc6749/utils.pyi | 23 ++- .../oauthlib/oauth2/rfc8628/__init__.pyi | 3 + .../oauth2/rfc8628/clients/__init__.pyi | 1 + .../oauth2/rfc8628/clients/device.pyi | 40 +++++ 11 files changed, 257 insertions(+), 140 deletions(-) create mode 100644 stubs/oauthlib/oauthlib/oauth2/rfc8628/__init__.pyi create mode 100644 stubs/oauthlib/oauthlib/oauth2/rfc8628/clients/__init__.pyi create mode 100644 stubs/oauthlib/oauthlib/oauth2/rfc8628/clients/device.pyi diff --git a/stubs/oauthlib/oauthlib/oauth2/__init__.pyi b/stubs/oauthlib/oauthlib/oauth2/__init__.pyi index c56170abc657..8d5cb1a614b6 100644 --- a/stubs/oauthlib/oauthlib/oauth2/__init__.pyi +++ b/stubs/oauthlib/oauthlib/oauth2/__init__.pyi @@ -57,3 +57,4 @@ from .rfc6749.grant_types import ( from .rfc6749.request_validator import RequestValidator as RequestValidator from .rfc6749.tokens import BearerToken as BearerToken, OAuth2Token as OAuth2Token from .rfc6749.utils import is_secure_transport as is_secure_transport +from .rfc8628.clients import DeviceClient as DeviceClient diff --git a/stubs/oauthlib/oauthlib/oauth2/rfc6749/__init__.pyi b/stubs/oauthlib/oauthlib/oauth2/rfc6749/__init__.pyi index efc812cce8bb..f168fb8a2b5d 100644 --- a/stubs/oauthlib/oauthlib/oauth2/rfc6749/__init__.pyi +++ b/stubs/oauthlib/oauthlib/oauth2/rfc6749/__init__.pyi @@ -1,4 +1,4 @@ -from typing import Any +from logging import Logger from .endpoints.base import BaseEndpoint as BaseEndpoint, catch_errors_and_unavailability as catch_errors_and_unavailability from .errors import ( @@ -8,4 +8,4 @@ from .errors import ( TemporarilyUnavailableError as TemporarilyUnavailableError, ) -log: Any +log: Logger diff --git a/stubs/oauthlib/oauthlib/oauth2/rfc6749/clients/base.pyi b/stubs/oauthlib/oauthlib/oauth2/rfc6749/clients/base.pyi index 12633222616f..dc5ae3608a70 100644 --- a/stubs/oauthlib/oauthlib/oauth2/rfc6749/clients/base.pyi +++ b/stubs/oauthlib/oauthlib/oauth2/rfc6749/clients/base.pyi @@ -1,97 +1,122 @@ -from _typeshed import Incomplete -from typing import Any +from _typeshed import ConvertibleToInt, Incomplete +from collections.abc import Callable +from typing import Final, Literal +from typing_extensions import TypeAlias -AUTH_HEADER: str -URI_QUERY: str -BODY: str -FORM_ENC_HEADERS: Any +from oauthlib.common import _HTTPMethod +from oauthlib.oauth2.rfc6749.tokens import OAuth2Token + +_TokenPlacement: TypeAlias = Literal["auth_header", "query", "body"] + +AUTH_HEADER: Final[_TokenPlacement] +URI_QUERY: Final[_TokenPlacement] +BODY: Final[_TokenPlacement] +FORM_ENC_HEADERS: Final[dict[str, str]] class Client: refresh_token_key: str - client_id: Any - default_token_placement: Any - token_type: Any - access_token: Any - refresh_token: Any - mac_key: Any - mac_algorithm: Any - token: Any - scope: Any - state_generator: Any - state: Any - redirect_url: Any - code: Any - expires_in: Any - code_verifier: str - code_challenge: str - code_challenge_method: str + client_id: str + default_token_placement: _TokenPlacement + token_type: str + access_token: str | None + refresh_token: str | None + mac_key: str | bytes | bytearray | None + mac_algorithm: str | None + token: dict[str, Incomplete] + scope: str | set[object] | tuple[object] | list[object] + state_generator: Callable[[], str] + state: str | None + redirect_url: str | None + code: Incomplete + expires_in: ConvertibleToInt | None + code_verifier: str | None + code_challenge: str | None + code_challenge_method: str | None def __init__( self, - client_id, - default_token_placement="auth_header", + client_id: str, + default_token_placement: _TokenPlacement = "auth_header", token_type: str = "Bearer", - access_token: Incomplete | None = None, - refresh_token: Incomplete | None = None, - mac_key: Incomplete | None = None, - mac_algorithm: Incomplete | None = None, - token: Incomplete | None = None, - scope: Incomplete | None = None, - state: Incomplete | None = None, - redirect_url: Incomplete | None = None, - state_generator=..., + access_token: str | None = None, + refresh_token: str | None = None, + mac_key: str | bytes | bytearray | None = None, + mac_algorithm: str | None = None, + token: dict[str, Incomplete] | None = None, + scope: str | set[object] | tuple[object] | list[object] | None = None, + state: str | None = None, + redirect_url: str | None = None, + state_generator: Callable[[], str] = ..., code_verifier: str | None = None, code_challenge: str | None = None, code_challenge_method: str | None = None, **kwargs, ) -> None: ... @property - def token_types(self): ... + def token_types( + self, + ) -> dict[ + Literal["Bearer", "MAC"], + Callable[ + [str, str, str | None, dict[str, str] | None, str | None, Incomplete], tuple[str, dict[str, str] | None, str | None] + ], + ]: ... def prepare_request_uri(self, *args, **kwargs) -> str: ... def prepare_request_body(self, *args, **kwargs) -> str: ... def parse_request_uri_response(self, *args, **kwargs) -> dict[str, str]: ... def add_token( self, - uri, - http_method: str = "GET", - body: Incomplete | None = None, - headers: Incomplete | None = None, - token_placement: Incomplete | None = None, + uri: str, + http_method: _HTTPMethod = "GET", + body: str | None = None, + headers: dict[str, str] | None = None, + token_placement: _TokenPlacement | None = None, **kwargs, - ): ... + ) -> tuple[str, dict[str, str] | None, str | None]: ... def prepare_authorization_request( self, - authorization_url, - state: Incomplete | None = None, - redirect_url: Incomplete | None = None, - scope: Incomplete | None = None, + authorization_url: str, + state: str | None = None, + redirect_url: str | None = None, + scope: str | set[object] | tuple[object] | list[object] | None = None, **kwargs, - ): ... + ) -> tuple[str, dict[str, str], str]: ... def prepare_token_request( self, - token_url, - authorization_response: Incomplete | None = None, - redirect_url: Incomplete | None = None, - state: Incomplete | None = None, + token_url: str, + authorization_response: str | None = None, + redirect_url: str | None = None, + state: str | None = None, body: str = "", **kwargs, - ): ... + ) -> tuple[str, dict[str, str], str]: ... def prepare_refresh_token_request( - self, token_url, refresh_token: Incomplete | None = None, body: str = "", scope: Incomplete | None = None, **kwargs - ): ... + self, + token_url: str, + refresh_token: str | None = None, + body: str = "", + scope: str | set[object] | tuple[object] | list[object] | None = None, + **kwargs, + ) -> tuple[str, dict[str, str], str]: ... def prepare_token_revocation_request( self, revocation_url, token, - token_type_hint: str = "access_token", + token_type_hint: Literal["access_token", "refresh_token"] | None = "access_token", body: str = "", - callback: Incomplete | None = None, + callback: Callable[[Incomplete], Incomplete] | None = None, **kwargs, ): ... - def parse_request_body_response(self, body, scope: Incomplete | None = None, **kwargs): ... + def parse_request_body_response( + self, body: str, scope: str | set[object] | tuple[object] | list[object] | None = None, **kwargs + ) -> OAuth2Token: ... def prepare_refresh_body( - self, body: str = "", refresh_token: Incomplete | None = None, scope: Incomplete | None = None, **kwargs - ): ... + self, + body: str = "", + refresh_token: str | None = None, + scope: str | set[object] | tuple[object] | list[object] | None = None, + **kwargs, + ) -> str: ... def create_code_verifier(self, length: int) -> str: ... def create_code_challenge(self, code_verifier: str, code_challenge_method: str | None = None) -> str: ... - def populate_code_attributes(self, response) -> None: ... - def populate_token_attributes(self, response) -> None: ... + def populate_code_attributes(self, response: dict[str, Incomplete]) -> None: ... + def populate_token_attributes(self, response: dict[str, Incomplete]) -> None: ... diff --git a/stubs/oauthlib/oauthlib/oauth2/rfc6749/errors.pyi b/stubs/oauthlib/oauthlib/oauth2/rfc6749/errors.pyi index 3eb2db2c00fe..a10c600c239e 100644 --- a/stubs/oauthlib/oauthlib/oauth2/rfc6749/errors.pyi +++ b/stubs/oauthlib/oauthlib/oauth2/rfc6749/errors.pyi @@ -1,11 +1,13 @@ from _typeshed import Incomplete -from typing import Any +from typing import Any, NoReturn + +from oauthlib.common import Request class OAuth2Error(Exception): - error: Any + error: str | None status_code: int description: str - uri: Any + uri: str | None state: Any redirect_uri: Any client_id: Any @@ -15,21 +17,21 @@ class OAuth2Error(Exception): grant_type: Any def __init__( self, - description: Incomplete | None = None, - uri: Incomplete | None = None, + description: str | None = None, + uri: str | None = None, state: Incomplete | None = None, - status_code: Incomplete | None = None, - request: Incomplete | None = None, + status_code: int | None = None, + request: Request | None = None, ) -> None: ... - def in_uri(self, uri): ... + def in_uri(self, uri: str) -> str: ... @property - def twotuples(self): ... + def twotuples(self) -> list[tuple[str, Incomplete | str | None]]: ... @property - def urlencoded(self): ... + def urlencoded(self) -> str: ... @property - def json(self): ... + def json(self) -> str: ... @property - def headers(self): ... + def headers(self) -> dict[str, str]: ... class TokenExpiredError(OAuth2Error): error: str @@ -135,7 +137,14 @@ class LoginRequired(OAuth2Error): error: str class CustomOAuth2Error(OAuth2Error): - error: Any - def __init__(self, error, *args, **kwargs) -> None: ... + def __init__( + self, + error: str, + description: str | None = None, + uri: str | None = None, + state: Incomplete | None = None, + status_code: int | None = None, + request: Request | None = None, + ) -> None: ... -def raise_from_error(error, params: Incomplete | None = None) -> None: ... +def raise_from_error(error: str, params: dict[str, Incomplete] | None = None) -> NoReturn: ... diff --git a/stubs/oauthlib/oauthlib/oauth2/rfc6749/parameters.pyi b/stubs/oauthlib/oauthlib/oauth2/rfc6749/parameters.pyi index c0dd38f9a5a7..78420ebe0496 100644 --- a/stubs/oauthlib/oauthlib/oauth2/rfc6749/parameters.pyi +++ b/stubs/oauthlib/oauthlib/oauth2/rfc6749/parameters.pyi @@ -1,23 +1,44 @@ from _typeshed import Incomplete +from collections.abc import Callable +from typing import Literal + +from .tokens import OAuth2Token def prepare_grant_uri( - uri, - client_id, - response_type, - redirect_uri: Incomplete | None = None, - scope: Incomplete | None = None, - state: Incomplete | None = None, + uri: str, + client_id: str, + response_type: Literal["code", "token"], + redirect_uri: str | None = None, + scope: str | set[object] | tuple[object] | list[object] | None = None, + state: str | None = None, code_challenge: str | None = None, code_challenge_method: str | None = "plain", **kwargs, -): ... +) -> str: ... def prepare_token_request( - grant_type, body: str = "", include_client_id: bool = True, code_verifier: str | None = None, **kwargs -): ... + grant_type: str, + body: str = "", + include_client_id: bool = True, + code_verifier: str | None = None, + *, + scope: str | set[object] | tuple[object] | list[object] | None = None, + client_id: str | None = None, + client_secret: str | None = None, + **kwargs, +) -> str: ... def prepare_token_revocation_request( - url, token, token_type_hint: str = "access_token", callback: Incomplete | None = None, body: str = "", **kwargs -): ... -def parse_authorization_code_response(uri, state: Incomplete | None = None): ... -def parse_implicit_response(uri, state: Incomplete | None = None, scope: Incomplete | None = None): ... -def parse_token_response(body, scope: Incomplete | None = None): ... -def validate_token_parameters(params) -> None: ... + url: str, + token: str, + token_type_hint: Literal["access_token", "refresh_token"] | None = "access_token", + callback: Callable[[Incomplete], Incomplete] | None = None, + body: str = "", + **kwargs, +) -> tuple[str, dict[str, str], str]: ... +def parse_authorization_code_response(uri: str, state: str | None = None) -> dict[str, str]: ... +def parse_implicit_response( + uri: str, state: str | None = None, scope: str | set[object] | tuple[object] | list[object] | None = None +) -> OAuth2Token: ... +def parse_token_response( + body: str | bytes | bytearray, scope: str | set[object] | tuple[object] | list[object] | None = None +) -> OAuth2Token: ... +def validate_token_parameters(params: dict[str, Incomplete]) -> None: ... diff --git a/stubs/oauthlib/oauthlib/oauth2/rfc6749/request_validator.pyi b/stubs/oauthlib/oauthlib/oauth2/rfc6749/request_validator.pyi index 6c823b7fa0d6..a057ee5aab69 100644 --- a/stubs/oauthlib/oauthlib/oauth2/rfc6749/request_validator.pyi +++ b/stubs/oauthlib/oauthlib/oauth2/rfc6749/request_validator.pyi @@ -1,5 +1,6 @@ from collections.abc import Mapping -from typing import Any, Literal, TypedDict +from logging import Logger +from typing import Literal, TypedDict from typing_extensions import NotRequired from oauthlib.common import Request @@ -18,7 +19,7 @@ class _AuthorizationCode(TypedDict): state: NotRequired[str] nonce: NotRequired[str] -log: Any +log: Logger class RequestValidator: def client_authentication_required(self, request: Request, *args, **kwargs) -> bool: ... diff --git a/stubs/oauthlib/oauthlib/oauth2/rfc6749/tokens.pyi b/stubs/oauthlib/oauthlib/oauth2/rfc6749/tokens.pyi index 9ef160a13a6d..e11777274fcb 100644 --- a/stubs/oauthlib/oauthlib/oauth2/rfc6749/tokens.pyi +++ b/stubs/oauthlib/oauthlib/oauth2/rfc6749/tokens.pyi @@ -1,60 +1,67 @@ +import datetime from _typeshed import Incomplete -from typing import Any +from collections.abc import Callable +from typing import Literal -class OAuth2Token(dict[Any, Any]): - def __init__(self, params, old_scope: Incomplete | None = None) -> None: ... +from oauthlib.common import Request, _HTTPMethod +from oauthlib.oauth2.rfc6749.request_validator import RequestValidator + +class OAuth2Token(dict[str, Incomplete]): + def __init__( + self, params: dict[str, Incomplete], old_scope: str | set[object] | tuple[object] | list[object] | None = None + ) -> None: ... @property - def scope_changed(self): ... + def scope_changed(self) -> bool: ... @property - def old_scope(self): ... + def old_scope(self) -> str | None: ... @property - def old_scopes(self): ... + def old_scopes(self) -> list[str]: ... @property - def scope(self): ... + def scope(self) -> str | None: ... @property - def scopes(self): ... + def scopes(self) -> list[str]: ... @property - def missing_scopes(self): ... + def missing_scopes(self) -> list[str]: ... @property - def additional_scopes(self): ... + def additional_scopes(self) -> list[str]: ... def prepare_mac_header( - token, - uri, - key, - http_method, - nonce: Incomplete | None = None, - headers: Incomplete | None = None, - body: Incomplete | None = None, + token: str, + uri: str, + key: str | bytes | bytearray, + http_method: _HTTPMethod, + nonce: str | None = None, + headers: dict[str, str] | None = None, + body: str | None = None, ext: str = "", hash_algorithm: str = "hmac-sha-1", - issue_time: Incomplete | None = None, + issue_time: datetime.datetime | None = None, draft: int = 0, -): ... -def prepare_bearer_uri(token, uri): ... -def prepare_bearer_headers(token, headers: Incomplete | None = None): ... -def prepare_bearer_body(token, body: str = ""): ... -def random_token_generator(request, refresh_token: bool = False): ... -def signed_token_generator(private_pem, **kwargs): ... -def get_token_from_header(request): ... +) -> dict[str, str]: ... +def prepare_bearer_uri(token: str, uri: str) -> str: ... +def prepare_bearer_headers(token: str, headers: dict[str, str] | None = None) -> dict[str, str]: ... +def prepare_bearer_body(token: str, body: str = "") -> str: ... +def random_token_generator(request: Request, refresh_token: bool = False) -> str: ... +def signed_token_generator(private_pem: str, **kwargs) -> Callable[[Request], str]: ... +def get_token_from_header(request: Request) -> str | None: ... class TokenBase: - def __call__(self, request, refresh_token: bool = False) -> None: ... - def validate_request(self, request) -> None: ... - def estimate_type(self, request) -> None: ... + def __call__(self, request: Request, refresh_token: bool = False) -> None: ... + def validate_request(self, request: Request) -> bool: ... + def estimate_type(self, request: Request) -> int: ... class BearerToken(TokenBase): - request_validator: Any - token_generator: Any - refresh_token_generator: Any - expires_in: Any + request_validator: RequestValidator | None + token_generator: Callable[[Request], str] + refresh_token_generator: Callable[[Request], str] + expires_in: int def __init__( self, - request_validator: Incomplete | None = None, - token_generator: Incomplete | None = None, - expires_in: Incomplete | None = None, - refresh_token_generator: Incomplete | None = None, + request_validator: RequestValidator | None = None, + token_generator: Callable[[Request], str] | None = None, + expires_in: int | None = None, + refresh_token_generator: Callable[[Request], str] | None = None, ) -> None: ... - def create_token(self, request, refresh_token: bool = False, **kwargs): ... - def validate_request(self, request): ... - def estimate_type(self, request): ... + def create_token(self, request: Request, refresh_token: bool = False, **kwargs) -> OAuth2Token: ... + def validate_request(self, request: Request) -> bool: ... + def estimate_type(self, request: Request) -> Literal[9, 5, 0]: ... diff --git a/stubs/oauthlib/oauthlib/oauth2/rfc6749/utils.pyi b/stubs/oauthlib/oauthlib/oauth2/rfc6749/utils.pyi index dc660b791580..be3fff47be18 100644 --- a/stubs/oauthlib/oauthlib/oauth2/rfc6749/utils.pyi +++ b/stubs/oauthlib/oauthlib/oauth2/rfc6749/utils.pyi @@ -1,7 +1,16 @@ -def list_to_scope(scope): ... -def scope_to_list(scope): ... -def params_from_uri(uri): ... -def host_from_uri(uri): ... -def escape(u): ... -def generate_age(issue_time): ... -def is_secure_transport(uri): ... +import datetime +from typing import overload + +@overload +def list_to_scope(scope: None) -> None: ... +@overload +def list_to_scope(scope: str | set[object] | tuple[object] | list[object]) -> str: ... +@overload +def scope_to_list(scope: None) -> None: ... +@overload +def scope_to_list(scope: str | set[object] | tuple[object] | list[object]) -> list[str]: ... +def params_from_uri(uri: str) -> dict[str, str | list[str]]: ... +def host_from_uri(uri: str) -> tuple[str, str | None]: ... +def escape(u: str) -> str: ... +def generate_age(issue_time: datetime.datetime | datetime.timedelta) -> str: ... +def is_secure_transport(uri: str) -> bool: ... diff --git a/stubs/oauthlib/oauthlib/oauth2/rfc8628/__init__.pyi b/stubs/oauthlib/oauthlib/oauth2/rfc8628/__init__.pyi new file mode 100644 index 000000000000..cb35af91145e --- /dev/null +++ b/stubs/oauthlib/oauthlib/oauth2/rfc8628/__init__.pyi @@ -0,0 +1,3 @@ +from logging import Logger + +log: Logger diff --git a/stubs/oauthlib/oauthlib/oauth2/rfc8628/clients/__init__.pyi b/stubs/oauthlib/oauthlib/oauth2/rfc8628/clients/__init__.pyi new file mode 100644 index 000000000000..e03f44a720ca --- /dev/null +++ b/stubs/oauthlib/oauthlib/oauth2/rfc8628/clients/__init__.pyi @@ -0,0 +1 @@ +from .device import DeviceClient as DeviceClient diff --git a/stubs/oauthlib/oauthlib/oauth2/rfc8628/clients/device.pyi b/stubs/oauthlib/oauthlib/oauth2/rfc8628/clients/device.pyi new file mode 100644 index 000000000000..e7919258117d --- /dev/null +++ b/stubs/oauthlib/oauthlib/oauth2/rfc8628/clients/device.pyi @@ -0,0 +1,40 @@ +from _typeshed import Incomplete +from collections.abc import Callable + +from oauthlib.oauth2.rfc6749.clients.base import Client, _TokenPlacement + +class DeviceClient(Client): + grant_type: str + client_secret: str | None + def __init__( + self, + client_id: str, + *, + client_secret: str | None = None, + default_token_placement: _TokenPlacement = "auth_header", + token_type: str = "Bearer", + access_token: str | None = None, + refresh_token: str | None = None, + mac_key: str | bytes | bytearray | None = None, + mac_algorithm: str | None = None, + token: dict[str, Incomplete] | None = None, + scope: str | set[object] | tuple[object] | list[object] | None = None, + state: str | None = None, + redirect_url: str | None = None, + state_generator: Callable[[], str] = ..., + code_verifier: str | None = None, + code_challenge: str | None = None, + code_challenge_method: str | None = None, + **kwargs, + ) -> None: ... + def prepare_request_uri( + self, uri: str, scope: str | set[object] | tuple[object] | list[object] | None = None, **kwargs + ) -> str: ... + def prepare_request_body( + self, + device_code: str, + body: str = "", + scope: str | set[object] | tuple[object] | list[object] | None = None, + include_client_id: bool = False, + **kwargs, + ) -> str: ... From 7d9e900479c8a820d8a47e8a5109b95c84b6fbcb Mon Sep 17 00:00:00 2001 From: Peter Bierma Date: Wed, 2 Apr 2025 17:15:39 -0400 Subject: [PATCH 186/388] Add stub for `sys._is_interned` (#13778) --- stdlib/sys/__init__.pyi | 1 + 1 file changed, 1 insertion(+) diff --git a/stdlib/sys/__init__.pyi b/stdlib/sys/__init__.pyi index f06afc8a6fbd..a2cca3509a9c 100644 --- a/stdlib/sys/__init__.pyi +++ b/stdlib/sys/__init__.pyi @@ -396,6 +396,7 @@ def intern(string: str, /) -> str: ... if sys.version_info >= (3, 13): def _is_gil_enabled() -> bool: ... def _clear_internal_caches() -> None: ... + def _is_interned(string: str, /) -> bool: ... def is_finalizing() -> bool: ... def breakpointhook(*args: Any, **kwargs: Any) -> Any: ... From 87d7e2c1aeb2e405a666847156945bbb3de18fc5 Mon Sep 17 00:00:00 2001 From: David Gilman Date: Wed, 2 Apr 2025 22:41:39 -0700 Subject: [PATCH 187/388] [auth0-python] Add auth0-python stubs (#13716) --- pyrightconfig.stricter.json | 1 + .../@tests/stubtest_allowlist.txt | 17 +++ stubs/auth0-python/METADATA.toml | 3 + stubs/auth0-python/auth0/__init__.pyi | 7 + stubs/auth0-python/auth0/asyncify.pyi | 6 + .../auth0/authentication/__init__.pyi | 10 ++ .../authentication/async_token_verifier.pyi | 27 ++++ .../authentication/back_channel_login.pyi | 4 + .../auth0/authentication/base.pyi | 31 ++++ .../authentication/client_authentication.pyi | 13 ++ .../auth0/authentication/database.pyi | 21 +++ .../auth0/authentication/delegated.pyi | 12 ++ .../auth0/authentication/enterprise.pyi | 5 + .../auth0/authentication/get_token.pyi | 21 +++ .../auth0/authentication/passwordless.pyi | 5 + .../pushed_authorization_requests.pyi | 4 + .../auth0/authentication/revoke_token.pyi | 4 + .../auth0/authentication/social.pyi | 4 + .../auth0/authentication/token_verifier.pyi | 29 ++++ .../auth0/authentication/users.pyi | 11 ++ stubs/auth0-python/auth0/exceptions.pyi | 14 ++ .../auth0/management/__init__.pyi | 63 ++++++++ .../auth0-python/auth0/management/actions.pyi | 64 +++++++++ .../auth0/management/async_auth0.pyi | 15 ++ .../auth0/management/attack_protection.pyi | 30 ++++ stubs/auth0-python/auth0/management/auth0.pyi | 67 +++++++++ .../auth0/management/blacklists.pyi | 21 +++ .../auth0/management/branding.pyi | 38 +++++ .../auth0/management/client_credentials.pyi | 26 ++++ .../auth0/management/client_grants.pyi | 60 ++++++++ .../auth0-python/auth0/management/clients.pyi | 44 ++++++ .../auth0/management/connections.pyi | 48 +++++++ .../auth0/management/custom_domains.pyi | 28 ++++ .../auth0/management/device_credentials.pyi | 44 ++++++ .../auth0/management/email_templates.pyi | 24 ++++ .../auth0-python/auth0/management/emails.pyi | 26 ++++ .../auth0-python/auth0/management/grants.pyi | 34 +++++ .../auth0/management/guardian.pyi | 38 +++++ stubs/auth0-python/auth0/management/hooks.pyi | 52 +++++++ stubs/auth0-python/auth0/management/jobs.pyi | 42 ++++++ .../auth0/management/log_streams.pyi | 29 ++++ stubs/auth0-python/auth0/management/logs.pyi | 44 ++++++ .../auth0/management/organizations.pyi | 134 ++++++++++++++++++ .../auth0-python/auth0/management/prompts.pyi | 28 ++++ .../auth0/management/resource_servers.pyi | 28 ++++ stubs/auth0-python/auth0/management/roles.pyi | 63 ++++++++ stubs/auth0-python/auth0/management/rules.pyi | 46 ++++++ .../auth0/management/rules_configs.pyi | 24 ++++ stubs/auth0-python/auth0/management/stats.pyi | 24 ++++ .../auth0-python/auth0/management/tenants.pyi | 22 +++ .../auth0-python/auth0/management/tickets.pyi | 22 +++ .../auth0/management/user_blocks.pyi | 26 ++++ stubs/auth0-python/auth0/management/users.pyi | 117 +++++++++++++++ .../auth0/management/users_by_email.pyi | 24 ++++ stubs/auth0-python/auth0/rest.pyi | 48 +++++++ stubs/auth0-python/auth0/rest_async.pyi | 29 ++++ stubs/auth0-python/auth0/types.pyi | 5 + stubs/auth0-python/auth0/utils.pyi | 1 + 58 files changed, 1727 insertions(+) create mode 100644 stubs/auth0-python/@tests/stubtest_allowlist.txt create mode 100644 stubs/auth0-python/METADATA.toml create mode 100644 stubs/auth0-python/auth0/__init__.pyi create mode 100644 stubs/auth0-python/auth0/asyncify.pyi create mode 100644 stubs/auth0-python/auth0/authentication/__init__.pyi create mode 100644 stubs/auth0-python/auth0/authentication/async_token_verifier.pyi create mode 100644 stubs/auth0-python/auth0/authentication/back_channel_login.pyi create mode 100644 stubs/auth0-python/auth0/authentication/base.pyi create mode 100644 stubs/auth0-python/auth0/authentication/client_authentication.pyi create mode 100644 stubs/auth0-python/auth0/authentication/database.pyi create mode 100644 stubs/auth0-python/auth0/authentication/delegated.pyi create mode 100644 stubs/auth0-python/auth0/authentication/enterprise.pyi create mode 100644 stubs/auth0-python/auth0/authentication/get_token.pyi create mode 100644 stubs/auth0-python/auth0/authentication/passwordless.pyi create mode 100644 stubs/auth0-python/auth0/authentication/pushed_authorization_requests.pyi create mode 100644 stubs/auth0-python/auth0/authentication/revoke_token.pyi create mode 100644 stubs/auth0-python/auth0/authentication/social.pyi create mode 100644 stubs/auth0-python/auth0/authentication/token_verifier.pyi create mode 100644 stubs/auth0-python/auth0/authentication/users.pyi create mode 100644 stubs/auth0-python/auth0/exceptions.pyi create mode 100644 stubs/auth0-python/auth0/management/__init__.pyi create mode 100644 stubs/auth0-python/auth0/management/actions.pyi create mode 100644 stubs/auth0-python/auth0/management/async_auth0.pyi create mode 100644 stubs/auth0-python/auth0/management/attack_protection.pyi create mode 100644 stubs/auth0-python/auth0/management/auth0.pyi create mode 100644 stubs/auth0-python/auth0/management/blacklists.pyi create mode 100644 stubs/auth0-python/auth0/management/branding.pyi create mode 100644 stubs/auth0-python/auth0/management/client_credentials.pyi create mode 100644 stubs/auth0-python/auth0/management/client_grants.pyi create mode 100644 stubs/auth0-python/auth0/management/clients.pyi create mode 100644 stubs/auth0-python/auth0/management/connections.pyi create mode 100644 stubs/auth0-python/auth0/management/custom_domains.pyi create mode 100644 stubs/auth0-python/auth0/management/device_credentials.pyi create mode 100644 stubs/auth0-python/auth0/management/email_templates.pyi create mode 100644 stubs/auth0-python/auth0/management/emails.pyi create mode 100644 stubs/auth0-python/auth0/management/grants.pyi create mode 100644 stubs/auth0-python/auth0/management/guardian.pyi create mode 100644 stubs/auth0-python/auth0/management/hooks.pyi create mode 100644 stubs/auth0-python/auth0/management/jobs.pyi create mode 100644 stubs/auth0-python/auth0/management/log_streams.pyi create mode 100644 stubs/auth0-python/auth0/management/logs.pyi create mode 100644 stubs/auth0-python/auth0/management/organizations.pyi create mode 100644 stubs/auth0-python/auth0/management/prompts.pyi create mode 100644 stubs/auth0-python/auth0/management/resource_servers.pyi create mode 100644 stubs/auth0-python/auth0/management/roles.pyi create mode 100644 stubs/auth0-python/auth0/management/rules.pyi create mode 100644 stubs/auth0-python/auth0/management/rules_configs.pyi create mode 100644 stubs/auth0-python/auth0/management/stats.pyi create mode 100644 stubs/auth0-python/auth0/management/tenants.pyi create mode 100644 stubs/auth0-python/auth0/management/tickets.pyi create mode 100644 stubs/auth0-python/auth0/management/user_blocks.pyi create mode 100644 stubs/auth0-python/auth0/management/users.pyi create mode 100644 stubs/auth0-python/auth0/management/users_by_email.pyi create mode 100644 stubs/auth0-python/auth0/rest.pyi create mode 100644 stubs/auth0-python/auth0/rest_async.pyi create mode 100644 stubs/auth0-python/auth0/types.pyi create mode 100644 stubs/auth0-python/auth0/utils.pyi diff --git a/pyrightconfig.stricter.json b/pyrightconfig.stricter.json index ee21eb97e43e..54d64e5f1529 100644 --- a/pyrightconfig.stricter.json +++ b/pyrightconfig.stricter.json @@ -24,6 +24,7 @@ "stdlib/tkinter/ttk.pyi", "stubs/aiofiles/aiofiles/tempfile/temptypes.pyi", "stubs/antlr4-python3-runtime", + "stubs/auth0-python", "stubs/Authlib", "stubs/aws-xray-sdk", "stubs/beautifulsoup4", diff --git a/stubs/auth0-python/@tests/stubtest_allowlist.txt b/stubs/auth0-python/@tests/stubtest_allowlist.txt new file mode 100644 index 000000000000..8264245ada21 --- /dev/null +++ b/stubs/auth0-python/@tests/stubtest_allowlist.txt @@ -0,0 +1,17 @@ +# Omit tests +auth0\.test.* + +# Omit _async functions because they aren't present in the code +auth0\..*_async + +# Inconsistently implemented, ommitted +auth0.asyncify.AsyncRestClient.file_post +auth0.authentication.async_token_verifier.AsyncRestClient.file_post +auth0.management.Auth0\..* +auth0.rest_async.AsyncRestClient.file_post +auth0.authentication.async_token_verifier.AsyncTokenVerifier.verify + +# TYPE_CHECKING override makes these show up wrong +auth0.management.async_auth0.RestClientOptions +auth0.management.auth0.RestClientOptions +auth0.rest.RequestsResponse diff --git a/stubs/auth0-python/METADATA.toml b/stubs/auth0-python/METADATA.toml new file mode 100644 index 000000000000..86faedcae1e1 --- /dev/null +++ b/stubs/auth0-python/METADATA.toml @@ -0,0 +1,3 @@ +version = "4.8.*" +upstream_repository = "https://github.com/auth0/auth0-python" +requires = ["cryptography", "types-requests"] diff --git a/stubs/auth0-python/auth0/__init__.pyi b/stubs/auth0-python/auth0/__init__.pyi new file mode 100644 index 000000000000..3dfa028c921c --- /dev/null +++ b/stubs/auth0-python/auth0/__init__.pyi @@ -0,0 +1,7 @@ +from auth0.exceptions import ( + Auth0Error as Auth0Error, + RateLimitError as RateLimitError, + TokenValidationError as TokenValidationError, +) + +__all__ = ("Auth0Error", "RateLimitError", "TokenValidationError") diff --git a/stubs/auth0-python/auth0/asyncify.pyi b/stubs/auth0-python/auth0/asyncify.pyi new file mode 100644 index 000000000000..37c0503bc273 --- /dev/null +++ b/stubs/auth0-python/auth0/asyncify.pyi @@ -0,0 +1,6 @@ +from auth0.authentication import Users as Users +from auth0.authentication.base import AuthenticationBase as AuthenticationBase +from auth0.rest import RestClientOptions as RestClientOptions +from auth0.rest_async import AsyncRestClient as AsyncRestClient + +def asyncify(cls): ... diff --git a/stubs/auth0-python/auth0/authentication/__init__.pyi b/stubs/auth0-python/auth0/authentication/__init__.pyi new file mode 100644 index 000000000000..d6263712af40 --- /dev/null +++ b/stubs/auth0-python/auth0/authentication/__init__.pyi @@ -0,0 +1,10 @@ +from .database import Database as Database +from .delegated import Delegated as Delegated +from .enterprise import Enterprise as Enterprise +from .get_token import GetToken as GetToken +from .passwordless import Passwordless as Passwordless +from .revoke_token import RevokeToken as RevokeToken +from .social import Social as Social +from .users import Users as Users + +__all__ = ("Database", "Delegated", "Enterprise", "GetToken", "Passwordless", "RevokeToken", "Social", "Users") diff --git a/stubs/auth0-python/auth0/authentication/async_token_verifier.pyi b/stubs/auth0-python/auth0/authentication/async_token_verifier.pyi new file mode 100644 index 000000000000..a65856b039d0 --- /dev/null +++ b/stubs/auth0-python/auth0/authentication/async_token_verifier.pyi @@ -0,0 +1,27 @@ +from _typeshed import Incomplete + +from .. import TokenValidationError as TokenValidationError +from ..rest_async import AsyncRestClient as AsyncRestClient +from .token_verifier import ( + AsymmetricSignatureVerifier as AsymmetricSignatureVerifier, + JwksFetcher as JwksFetcher, + TokenVerifier as TokenVerifier, +) + +class AsyncAsymmetricSignatureVerifier(AsymmetricSignatureVerifier): + def __init__(self, jwks_url: str, algorithm: str = "RS256") -> None: ... + def set_session(self, session) -> None: ... + +class AsyncJwksFetcher(JwksFetcher): + def __init__(self, *args, **kwargs) -> None: ... + def set_session(self, session) -> None: ... + async def get_key(self, key_id: str): ... + +class AsyncTokenVerifier(TokenVerifier): + iss: Incomplete + aud: Incomplete + leeway: Incomplete + def __init__( + self, signature_verifier: AsyncAsymmetricSignatureVerifier, issuer: str, audience: str, leeway: int = 0 + ) -> None: ... + def set_session(self, session) -> None: ... diff --git a/stubs/auth0-python/auth0/authentication/back_channel_login.pyi b/stubs/auth0-python/auth0/authentication/back_channel_login.pyi new file mode 100644 index 000000000000..7e45855fa264 --- /dev/null +++ b/stubs/auth0-python/auth0/authentication/back_channel_login.pyi @@ -0,0 +1,4 @@ +from .base import AuthenticationBase as AuthenticationBase + +class BackChannelLogin(AuthenticationBase): + def back_channel_login(self, binding_message: str, login_hint: str, scope: str, **kwargs): ... diff --git a/stubs/auth0-python/auth0/authentication/base.pyi b/stubs/auth0-python/auth0/authentication/base.pyi new file mode 100644 index 000000000000..e67c3c414e00 --- /dev/null +++ b/stubs/auth0-python/auth0/authentication/base.pyi @@ -0,0 +1,31 @@ +from _typeshed import Incomplete + +from auth0.rest import RestClient as RestClient, RestClientOptions as RestClientOptions +from auth0.types import RequestData as RequestData + +from .client_authentication import add_client_authentication as add_client_authentication + +UNKNOWN_ERROR: str + +class AuthenticationBase: + domain: Incomplete + client_id: Incomplete + client_secret: Incomplete + client_assertion_signing_key: Incomplete + client_assertion_signing_alg: Incomplete + protocol: Incomplete + client: Incomplete + def __init__( + self, + domain: str, + client_id: str, + client_secret: str | None = None, + client_assertion_signing_key: str | None = None, + client_assertion_signing_alg: str | None = None, + telemetry: bool = True, + timeout: float | tuple[float, float] = 5.0, + protocol: str = "https", + ) -> None: ... + def post(self, url: str, data: RequestData | None = None, headers: dict[str, str] | None = None): ... + def authenticated_post(self, url: str, data: dict[str, Incomplete], headers: dict[str, str] | None = None): ... + def get(self, url: str, params: dict[str, Incomplete] | None = None, headers: dict[str, str] | None = None): ... diff --git a/stubs/auth0-python/auth0/authentication/client_authentication.pyi b/stubs/auth0-python/auth0/authentication/client_authentication.pyi new file mode 100644 index 000000000000..e75647fe00f5 --- /dev/null +++ b/stubs/auth0-python/auth0/authentication/client_authentication.pyi @@ -0,0 +1,13 @@ +from _typeshed import Incomplete + +def create_client_assertion_jwt( + domain: str, client_id: str, client_assertion_signing_key: str, client_assertion_signing_alg: str | None +) -> str: ... +def add_client_authentication( + payload: dict[str, Incomplete], + domain: str, + client_id: str, + client_secret: str | None, + client_assertion_signing_key: str | None, + client_assertion_signing_alg: str | None, +) -> dict[str, Incomplete]: ... diff --git a/stubs/auth0-python/auth0/authentication/database.pyi b/stubs/auth0-python/auth0/authentication/database.pyi new file mode 100644 index 000000000000..e08752c27897 --- /dev/null +++ b/stubs/auth0-python/auth0/authentication/database.pyi @@ -0,0 +1,21 @@ +from _typeshed import Incomplete + +from .base import AuthenticationBase as AuthenticationBase + +class Database(AuthenticationBase): + def signup( + self, + email: str, + password: str, + connection: str, + username: str | None = None, + user_metadata: dict[str, Incomplete] | None = None, + given_name: str | None = None, + family_name: str | None = None, + name: str | None = None, + nickname: str | None = None, + picture: str | None = None, + ) -> dict[str, Incomplete]: ... + def change_password( + self, email: str, connection: str, password: str | None = None, organization: str | None = None + ) -> str: ... diff --git a/stubs/auth0-python/auth0/authentication/delegated.pyi b/stubs/auth0-python/auth0/authentication/delegated.pyi new file mode 100644 index 000000000000..cedc50b05e6d --- /dev/null +++ b/stubs/auth0-python/auth0/authentication/delegated.pyi @@ -0,0 +1,12 @@ +from .base import AuthenticationBase as AuthenticationBase + +class Delegated(AuthenticationBase): + def get_token( + self, + target: str, + api_type: str, + grant_type: str, + id_token: str | None = None, + refresh_token: str | None = None, + scope: str = "openid", + ): ... diff --git a/stubs/auth0-python/auth0/authentication/enterprise.pyi b/stubs/auth0-python/auth0/authentication/enterprise.pyi new file mode 100644 index 000000000000..a438ec9a0fad --- /dev/null +++ b/stubs/auth0-python/auth0/authentication/enterprise.pyi @@ -0,0 +1,5 @@ +from .base import AuthenticationBase as AuthenticationBase + +class Enterprise(AuthenticationBase): + def saml_metadata(self): ... + def wsfed_metadata(self): ... diff --git a/stubs/auth0-python/auth0/authentication/get_token.pyi b/stubs/auth0-python/auth0/authentication/get_token.pyi new file mode 100644 index 000000000000..64457306c40f --- /dev/null +++ b/stubs/auth0-python/auth0/authentication/get_token.pyi @@ -0,0 +1,21 @@ +from .base import AuthenticationBase as AuthenticationBase + +class GetToken(AuthenticationBase): + def authorization_code(self, code: str, redirect_uri: str | None, grant_type: str = "authorization_code"): ... + def authorization_code_pkce( + self, code_verifier: str, code: str, redirect_uri: str | None, grant_type: str = "authorization_code" + ): ... + def client_credentials(self, audience: str, grant_type: str = "client_credentials", organization: str | None = None): ... + def login( + self, + username: str, + password: str, + scope: str | None = None, + realm: str | None = None, + audience: str | None = None, + grant_type: str = "http://auth0.com/oauth/grant-type/password-realm", + forwarded_for: str | None = None, + ): ... + def refresh_token(self, refresh_token: str, scope: str = "", grant_type: str = "refresh_token"): ... + def passwordless_login(self, username: str, otp: str, realm: str, scope: str, audience: str): ... + def backchannel_login(self, auth_req_id: str, grant_type: str = "urn:openid:params:grant-type:ciba"): ... diff --git a/stubs/auth0-python/auth0/authentication/passwordless.pyi b/stubs/auth0-python/auth0/authentication/passwordless.pyi new file mode 100644 index 000000000000..841c6f4fcc9c --- /dev/null +++ b/stubs/auth0-python/auth0/authentication/passwordless.pyi @@ -0,0 +1,5 @@ +from .base import AuthenticationBase as AuthenticationBase + +class Passwordless(AuthenticationBase): + def email(self, email: str, send: str = "link", auth_params: dict[str, str] | None = None): ... + def sms(self, phone_number: str): ... diff --git a/stubs/auth0-python/auth0/authentication/pushed_authorization_requests.pyi b/stubs/auth0-python/auth0/authentication/pushed_authorization_requests.pyi new file mode 100644 index 000000000000..67bdc0074e35 --- /dev/null +++ b/stubs/auth0-python/auth0/authentication/pushed_authorization_requests.pyi @@ -0,0 +1,4 @@ +from .base import AuthenticationBase as AuthenticationBase + +class PushedAuthorizationRequests(AuthenticationBase): + def pushed_authorization_request(self, response_type: str, redirect_uri: str, **kwargs): ... diff --git a/stubs/auth0-python/auth0/authentication/revoke_token.pyi b/stubs/auth0-python/auth0/authentication/revoke_token.pyi new file mode 100644 index 000000000000..7190be6c6535 --- /dev/null +++ b/stubs/auth0-python/auth0/authentication/revoke_token.pyi @@ -0,0 +1,4 @@ +from .base import AuthenticationBase as AuthenticationBase + +class RevokeToken(AuthenticationBase): + def revoke_refresh_token(self, token: str): ... diff --git a/stubs/auth0-python/auth0/authentication/social.pyi b/stubs/auth0-python/auth0/authentication/social.pyi new file mode 100644 index 000000000000..b63c702fd934 --- /dev/null +++ b/stubs/auth0-python/auth0/authentication/social.pyi @@ -0,0 +1,4 @@ +from .base import AuthenticationBase as AuthenticationBase + +class Social(AuthenticationBase): + def login(self, access_token: str, connection: str, scope: str = "openid"): ... diff --git a/stubs/auth0-python/auth0/authentication/token_verifier.pyi b/stubs/auth0-python/auth0/authentication/token_verifier.pyi new file mode 100644 index 000000000000..c8c38ca3b0db --- /dev/null +++ b/stubs/auth0-python/auth0/authentication/token_verifier.pyi @@ -0,0 +1,29 @@ +from _typeshed import Incomplete +from typing import ClassVar + +from auth0.exceptions import TokenValidationError as TokenValidationError + +class SignatureVerifier: + DISABLE_JWT_CHECKS: ClassVar[dict[str, bool]] + def __init__(self, algorithm: str) -> None: ... + async def verify_signature(self, token: str) -> dict[str, Incomplete]: ... + +class SymmetricSignatureVerifier(SignatureVerifier): + def __init__(self, shared_secret: str, algorithm: str = "HS256") -> None: ... + +class JwksFetcher: + CACHE_TTL: ClassVar[int] + def __init__(self, jwks_url: str, cache_ttl: int = ...) -> None: ... + def get_key(self, key_id: str): ... + +class AsymmetricSignatureVerifier(SignatureVerifier): + def __init__(self, jwks_url: str, algorithm: str = "RS256", cache_ttl: int = ...) -> None: ... + +class TokenVerifier: + iss: Incomplete + aud: Incomplete + leeway: Incomplete + def __init__(self, signature_verifier: SignatureVerifier, issuer: str, audience: str, leeway: int = 0) -> None: ... + def verify( + self, token: str, nonce: str | None = None, max_age: int | None = None, organization: str | None = None + ) -> dict[str, Incomplete]: ... diff --git a/stubs/auth0-python/auth0/authentication/users.pyi b/stubs/auth0-python/auth0/authentication/users.pyi new file mode 100644 index 000000000000..c5a15db5221b --- /dev/null +++ b/stubs/auth0-python/auth0/authentication/users.pyi @@ -0,0 +1,11 @@ +from _typeshed import Incomplete + +from auth0.rest import RestClient as RestClient, RestClientOptions as RestClientOptions +from auth0.types import TimeoutType as TimeoutType + +class Users: + domain: Incomplete + protocol: Incomplete + client: Incomplete + def __init__(self, domain: str, telemetry: bool = True, timeout: TimeoutType = 5.0, protocol: str = "https") -> None: ... + def userinfo(self, access_token: str) -> dict[str, Incomplete]: ... diff --git a/stubs/auth0-python/auth0/exceptions.pyi b/stubs/auth0-python/auth0/exceptions.pyi new file mode 100644 index 000000000000..f500aad795bc --- /dev/null +++ b/stubs/auth0-python/auth0/exceptions.pyi @@ -0,0 +1,14 @@ +from _typeshed import Incomplete + +class Auth0Error(Exception): + status_code: Incomplete + error_code: Incomplete + message: Incomplete + content: Incomplete + def __init__(self, status_code: int, error_code: str, message: str, content: Incomplete | None = None) -> None: ... + +class RateLimitError(Auth0Error): + reset_at: Incomplete + def __init__(self, error_code: str, message: str, reset_at: int) -> None: ... + +class TokenValidationError(Exception): ... diff --git a/stubs/auth0-python/auth0/management/__init__.pyi b/stubs/auth0-python/auth0/management/__init__.pyi new file mode 100644 index 000000000000..37f4aa7c2067 --- /dev/null +++ b/stubs/auth0-python/auth0/management/__init__.pyi @@ -0,0 +1,63 @@ +from .actions import Actions as Actions +from .attack_protection import AttackProtection as AttackProtection +from .auth0 import Auth0 as Auth0 +from .blacklists import Blacklists as Blacklists +from .branding import Branding as Branding +from .client_credentials import ClientCredentials as ClientCredentials +from .client_grants import ClientGrants as ClientGrants +from .clients import Clients as Clients +from .connections import Connections as Connections +from .custom_domains import CustomDomains as CustomDomains +from .device_credentials import DeviceCredentials as DeviceCredentials +from .email_templates import EmailTemplates as EmailTemplates +from .emails import Emails as Emails +from .grants import Grants as Grants +from .guardian import Guardian as Guardian +from .hooks import Hooks as Hooks +from .jobs import Jobs as Jobs +from .log_streams import LogStreams as LogStreams +from .logs import Logs as Logs +from .organizations import Organizations as Organizations +from .resource_servers import ResourceServers as ResourceServers +from .roles import Roles as Roles +from .rules import Rules as Rules +from .rules_configs import RulesConfigs as RulesConfigs +from .stats import Stats as Stats +from .tenants import Tenants as Tenants +from .tickets import Tickets as Tickets +from .user_blocks import UserBlocks as UserBlocks +from .users import Users as Users +from .users_by_email import UsersByEmail as UsersByEmail + +__all__ = ( + "Auth0", + "Actions", + "AttackProtection", + "Blacklists", + "Branding", + "ClientCredentials", + "ClientGrants", + "Clients", + "Connections", + "CustomDomains", + "DeviceCredentials", + "EmailTemplates", + "Emails", + "Grants", + "Guardian", + "Hooks", + "Jobs", + "LogStreams", + "Logs", + "Organizations", + "ResourceServers", + "Roles", + "RulesConfigs", + "Rules", + "Stats", + "Tenants", + "Tickets", + "UserBlocks", + "UsersByEmail", + "Users", +) diff --git a/stubs/auth0-python/auth0/management/actions.pyi b/stubs/auth0-python/auth0/management/actions.pyi new file mode 100644 index 000000000000..bce092dff25e --- /dev/null +++ b/stubs/auth0-python/auth0/management/actions.pyi @@ -0,0 +1,64 @@ +from _typeshed import Incomplete + +from ..rest import RestClient as RestClient, RestClientOptions as RestClientOptions +from ..types import TimeoutType as TimeoutType + +class Actions: + domain: Incomplete + protocol: Incomplete + client: Incomplete + def __init__( + self, + domain: str, + token: str, + telemetry: bool = True, + timeout: TimeoutType = 5.0, + protocol: str = "https", + rest_options: RestClientOptions | None = None, + ) -> None: ... + def get_actions( + self, + trigger_id: str | None = None, + action_name: str | None = None, + deployed: bool | None = None, + installed: bool = False, + page: int | None = None, + per_page: int | None = None, + ): ... + async def get_actions_async( + self, + trigger_id: str | None = None, + action_name: str | None = None, + deployed: bool | None = None, + installed: bool = False, + page: int | None = None, + per_page: int | None = None, + ): ... + def create_action(self, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + async def create_action_async(self, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + def update_action(self, id: str, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + async def update_action_async(self, id: str, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + def get_action(self, id: str) -> dict[str, Incomplete]: ... + async def get_action_async(self, id: str) -> dict[str, Incomplete]: ... + def delete_action(self, id: str, force: bool = False): ... + async def delete_action_async(self, id: str, force: bool = False): ... + def get_triggers(self) -> dict[str, Incomplete]: ... + async def get_triggers_async(self) -> dict[str, Incomplete]: ... + def get_execution(self, id: str) -> dict[str, Incomplete]: ... + async def get_execution_async(self, id: str) -> dict[str, Incomplete]: ... + def get_action_versions(self, id: str, page: int | None = None, per_page: int | None = None) -> dict[str, Incomplete]: ... + async def get_action_versions_async( + self, id: str, page: int | None = None, per_page: int | None = None + ) -> dict[str, Incomplete]: ... + def get_trigger_bindings(self, id: str, page: int | None = None, per_page: int | None = None) -> dict[str, Incomplete]: ... + async def get_trigger_bindings_async( + self, id: str, page: int | None = None, per_page: int | None = None + ) -> dict[str, Incomplete]: ... + def get_action_version(self, action_id: str, version_id: str) -> dict[str, Incomplete]: ... + async def get_action_version_async(self, action_id: str, version_id: str) -> dict[str, Incomplete]: ... + def deploy_action(self, id: str) -> dict[str, Incomplete]: ... + async def deploy_action_async(self, id: str) -> dict[str, Incomplete]: ... + def rollback_action_version(self, action_id: str, version_id: str) -> dict[str, Incomplete]: ... + async def rollback_action_version_async(self, action_id: str, version_id: str) -> dict[str, Incomplete]: ... + def update_trigger_bindings(self, id: str, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + async def update_trigger_bindings_async(self, id: str, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... diff --git a/stubs/auth0-python/auth0/management/async_auth0.pyi b/stubs/auth0-python/auth0/management/async_auth0.pyi new file mode 100644 index 000000000000..6b3f78886ca1 --- /dev/null +++ b/stubs/auth0-python/auth0/management/async_auth0.pyi @@ -0,0 +1,15 @@ +from types import TracebackType +from typing_extensions import Self + +from auth0.rest import RestClientOptions as RestClientOptions + +from ..asyncify import asyncify as asyncify +from .auth0 import Auth0 as Auth0 + +class AsyncAuth0: + def __init__(self, domain: str, token: str, rest_options: RestClientOptions | None = None) -> None: ... + def set_session(self, session) -> None: ... + async def __aenter__(self) -> Self: ... + async def __aexit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + ) -> None: ... diff --git a/stubs/auth0-python/auth0/management/attack_protection.pyi b/stubs/auth0-python/auth0/management/attack_protection.pyi new file mode 100644 index 000000000000..d84ecc180939 --- /dev/null +++ b/stubs/auth0-python/auth0/management/attack_protection.pyi @@ -0,0 +1,30 @@ +from _typeshed import Incomplete + +from ..rest import RestClient as RestClient, RestClientOptions as RestClientOptions +from ..types import TimeoutType as TimeoutType + +class AttackProtection: + domain: Incomplete + protocol: Incomplete + client: Incomplete + def __init__( + self, + domain: str, + token: str, + telemetry: bool = True, + timeout: TimeoutType = 5.0, + protocol: str = "https", + rest_options: RestClientOptions | None = None, + ) -> None: ... + def get_breached_password_detection(self) -> dict[str, Incomplete]: ... + async def get_breached_password_detection_async(self) -> dict[str, Incomplete]: ... + def update_breached_password_detection(self, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + async def update_breached_password_detection_async(self, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + def get_brute_force_protection(self) -> dict[str, Incomplete]: ... + async def get_brute_force_protection_async(self) -> dict[str, Incomplete]: ... + def update_brute_force_protection(self, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + async def update_brute_force_protection_async(self, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + def get_suspicious_ip_throttling(self) -> dict[str, Incomplete]: ... + async def get_suspicious_ip_throttling_async(self) -> dict[str, Incomplete]: ... + def update_suspicious_ip_throttling(self, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + async def update_suspicious_ip_throttling_async(self, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... diff --git a/stubs/auth0-python/auth0/management/auth0.pyi b/stubs/auth0-python/auth0/management/auth0.pyi new file mode 100644 index 000000000000..14f7e64caa38 --- /dev/null +++ b/stubs/auth0-python/auth0/management/auth0.pyi @@ -0,0 +1,67 @@ +from _typeshed import Incomplete + +from auth0.rest import RestClientOptions as RestClientOptions + +from .actions import Actions as Actions +from .attack_protection import AttackProtection as AttackProtection +from .blacklists import Blacklists as Blacklists +from .branding import Branding as Branding +from .client_credentials import ClientCredentials as ClientCredentials +from .client_grants import ClientGrants as ClientGrants +from .clients import Clients as Clients +from .connections import Connections as Connections +from .custom_domains import CustomDomains as CustomDomains +from .device_credentials import DeviceCredentials as DeviceCredentials +from .email_templates import EmailTemplates as EmailTemplates +from .emails import Emails as Emails +from .grants import Grants as Grants +from .guardian import Guardian as Guardian +from .hooks import Hooks as Hooks +from .jobs import Jobs as Jobs +from .log_streams import LogStreams as LogStreams +from .logs import Logs as Logs +from .organizations import Organizations as Organizations +from .prompts import Prompts as Prompts +from .resource_servers import ResourceServers as ResourceServers +from .roles import Roles as Roles +from .rules import Rules as Rules +from .rules_configs import RulesConfigs as RulesConfigs +from .stats import Stats as Stats +from .tenants import Tenants as Tenants +from .tickets import Tickets as Tickets +from .user_blocks import UserBlocks as UserBlocks +from .users import Users as Users +from .users_by_email import UsersByEmail as UsersByEmail + +class Auth0: + actions: Incomplete + attack_protection: Incomplete + blacklists: Incomplete + branding: Incomplete + client_credentials: Incomplete + client_grants: Incomplete + clients: Incomplete + connections: Incomplete + custom_domains: Incomplete + device_credentials: Incomplete + email_templates: Incomplete + emails: Incomplete + grants: Incomplete + guardian: Incomplete + hooks: Incomplete + jobs: Incomplete + log_streams: Incomplete + logs: Incomplete + organizations: Incomplete + prompts: Incomplete + resource_servers: Incomplete + roles: Incomplete + rules_configs: Incomplete + rules: Incomplete + stats: Incomplete + tenants: Incomplete + tickets: Incomplete + user_blocks: Incomplete + users_by_email: Incomplete + users: Incomplete + def __init__(self, domain: str, token: str, rest_options: RestClientOptions | None = None) -> None: ... diff --git a/stubs/auth0-python/auth0/management/blacklists.pyi b/stubs/auth0-python/auth0/management/blacklists.pyi new file mode 100644 index 000000000000..9baf22376176 --- /dev/null +++ b/stubs/auth0-python/auth0/management/blacklists.pyi @@ -0,0 +1,21 @@ +from _typeshed import Incomplete + +from ..rest import RestClient as RestClient, RestClientOptions as RestClientOptions +from ..types import TimeoutType as TimeoutType + +class Blacklists: + url: Incomplete + client: Incomplete + def __init__( + self, + domain: str, + token: str, + telemetry: bool = True, + timeout: TimeoutType = 5.0, + protocol: str = "https", + rest_options: RestClientOptions | None = None, + ) -> None: ... + def get(self, aud: str | None = None) -> list[dict[str, str]]: ... + async def get_async(self, aud: str | None = None) -> list[dict[str, str]]: ... + def create(self, jti: str, aud: str | None = None) -> dict[str, str]: ... + async def create_async(self, jti: str, aud: str | None = None) -> dict[str, str]: ... diff --git a/stubs/auth0-python/auth0/management/branding.pyi b/stubs/auth0-python/auth0/management/branding.pyi new file mode 100644 index 000000000000..71be39cf4b6b --- /dev/null +++ b/stubs/auth0-python/auth0/management/branding.pyi @@ -0,0 +1,38 @@ +from _typeshed import Incomplete + +from ..rest import RestClient as RestClient, RestClientOptions as RestClientOptions +from ..types import TimeoutType as TimeoutType + +class Branding: + domain: Incomplete + protocol: Incomplete + client: Incomplete + def __init__( + self, + domain: str, + token: str, + telemetry: bool = True, + timeout: TimeoutType = 5.0, + protocol: str = "https", + rest_options: RestClientOptions | None = None, + ) -> None: ... + def get(self) -> dict[str, Incomplete]: ... + async def get_async(self) -> dict[str, Incomplete]: ... + def update(self, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + async def update_async(self, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + def get_template_universal_login(self) -> dict[str, Incomplete]: ... + async def get_template_universal_login_async(self) -> dict[str, Incomplete]: ... + def delete_template_universal_login(self): ... + async def delete_template_universal_login_async(self): ... + def update_template_universal_login(self, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + async def update_template_universal_login_async(self, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + def get_default_branding_theme(self) -> dict[str, Incomplete]: ... + async def get_default_branding_theme_async(self) -> dict[str, Incomplete]: ... + def get_branding_theme(self, theme_id: str) -> dict[str, Incomplete]: ... + async def get_branding_theme_async(self, theme_id: str) -> dict[str, Incomplete]: ... + def delete_branding_theme(self, theme_id: str): ... + async def delete_branding_theme_async(self, theme_id: str): ... + def update_branding_theme(self, theme_id: str, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + async def update_branding_theme_async(self, theme_id: str, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + def create_branding_theme(self, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + async def create_branding_theme_async(self, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... diff --git a/stubs/auth0-python/auth0/management/client_credentials.pyi b/stubs/auth0-python/auth0/management/client_credentials.pyi new file mode 100644 index 000000000000..895f79bcc5ee --- /dev/null +++ b/stubs/auth0-python/auth0/management/client_credentials.pyi @@ -0,0 +1,26 @@ +from _typeshed import Incomplete + +from ..rest import RestClient as RestClient, RestClientOptions as RestClientOptions +from ..types import TimeoutType as TimeoutType + +class ClientCredentials: + domain: Incomplete + protocol: Incomplete + client: Incomplete + def __init__( + self, + domain: str, + token: str, + telemetry: bool = True, + timeout: TimeoutType = 5.0, + protocol: str = "https", + rest_options: RestClientOptions | None = None, + ) -> None: ... + def all(self, client_id: str) -> list[dict[str, Incomplete]]: ... + async def all_async(self, client_id: str) -> list[dict[str, Incomplete]]: ... + def get(self, client_id: str, id: str) -> dict[str, Incomplete]: ... + async def get_async(self, client_id: str, id: str) -> dict[str, Incomplete]: ... + def create(self, client_id: str, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + async def create_async(self, client_id: str, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + def delete(self, client_id: str, id: str) -> dict[str, Incomplete]: ... + async def delete_async(self, client_id: str, id: str) -> dict[str, Incomplete]: ... diff --git a/stubs/auth0-python/auth0/management/client_grants.pyi b/stubs/auth0-python/auth0/management/client_grants.pyi new file mode 100644 index 000000000000..bae2e974df3c --- /dev/null +++ b/stubs/auth0-python/auth0/management/client_grants.pyi @@ -0,0 +1,60 @@ +from _typeshed import Incomplete + +from ..rest import RestClient as RestClient, RestClientOptions as RestClientOptions +from ..types import TimeoutType as TimeoutType + +class ClientGrants: + domain: Incomplete + protocol: Incomplete + client: Incomplete + def __init__( + self, + domain: str, + token: str, + telemetry: bool = True, + timeout: TimeoutType = 5.0, + protocol: str = "https", + rest_options: RestClientOptions | None = None, + ) -> None: ... + def all( + self, + audience: str | None = None, + page: int | None = None, + per_page: int | None = None, + include_totals: bool = False, + client_id: str | None = None, + allow_any_organization: bool | None = None, + ) -> dict[str, Incomplete]: ... + async def all_async( + self, + audience: str | None = None, + page: int | None = None, + per_page: int | None = None, + include_totals: bool = False, + client_id: str | None = None, + allow_any_organization: bool | None = None, + ) -> dict[str, Incomplete]: ... + def create(self, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + async def create_async(self, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + def delete(self, id: str): ... + async def delete_async(self, id: str): ... + def update(self, id: str, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + async def update_async(self, id: str, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + def get_organizations( + self, + id: str, + page: int | None = None, + per_page: int | None = None, + include_totals: bool = False, + from_param: str | None = None, + take: int | None = None, + ) -> dict[str, Incomplete]: ... + async def get_organizations_async( + self, + id: str, + page: int | None = None, + per_page: int | None = None, + include_totals: bool = False, + from_param: str | None = None, + take: int | None = None, + ) -> dict[str, Incomplete]: ... diff --git a/stubs/auth0-python/auth0/management/clients.pyi b/stubs/auth0-python/auth0/management/clients.pyi new file mode 100644 index 000000000000..1144a33b3dff --- /dev/null +++ b/stubs/auth0-python/auth0/management/clients.pyi @@ -0,0 +1,44 @@ +from _typeshed import Incomplete + +from ..rest import RestClient as RestClient, RestClientOptions as RestClientOptions +from ..types import TimeoutType as TimeoutType + +class Clients: + domain: Incomplete + protocol: Incomplete + client: Incomplete + def __init__( + self, + domain: str, + token: str, + telemetry: bool = True, + timeout: TimeoutType = 5.0, + protocol: str = "https", + rest_options: RestClientOptions | None = None, + ) -> None: ... + def all( + self, + fields: list[str] | None = None, + include_fields: bool = True, + page: int | None = None, + per_page: int | None = None, + extra_params: dict[str, Incomplete] | None = None, + ) -> list[dict[str, Incomplete]]: ... + async def all_async( + self, + fields: list[str] | None = None, + include_fields: bool = True, + page: int | None = None, + per_page: int | None = None, + extra_params: dict[str, Incomplete] | None = None, + ) -> list[dict[str, Incomplete]]: ... + def create(self, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + async def create_async(self, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + def get(self, id: str, fields: list[str] | None = None, include_fields: bool = True) -> dict[str, Incomplete]: ... + async def get_async(self, id: str, fields: list[str] | None = None, include_fields: bool = True) -> dict[str, Incomplete]: ... + def delete(self, id: str): ... + async def delete_async(self, id: str): ... + def update(self, id: str, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + async def update_async(self, id: str, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + def rotate_secret(self, id: str) -> dict[str, Incomplete]: ... + async def rotate_secret_async(self, id: str) -> dict[str, Incomplete]: ... diff --git a/stubs/auth0-python/auth0/management/connections.pyi b/stubs/auth0-python/auth0/management/connections.pyi new file mode 100644 index 000000000000..a7b53124c8e6 --- /dev/null +++ b/stubs/auth0-python/auth0/management/connections.pyi @@ -0,0 +1,48 @@ +from _typeshed import Incomplete + +from ..rest import RestClient as RestClient, RestClientOptions as RestClientOptions +from ..types import TimeoutType as TimeoutType + +class Connections: + domain: Incomplete + protocol: Incomplete + client: Incomplete + def __init__( + self, + domain: str, + token: str, + telemetry: bool = True, + timeout: TimeoutType = 5.0, + protocol: str = "https", + rest_options: RestClientOptions | None = None, + ) -> None: ... + def all( + self, + strategy: str | None = None, + fields: list[str] | None = None, + include_fields: bool = True, + page: int | None = None, + per_page: int | None = None, + extra_params: dict[str, Incomplete] | None = None, + name: str | None = None, + ) -> list[dict[str, Incomplete]]: ... + async def all_async( + self, + strategy: str | None = None, + fields: list[str] | None = None, + include_fields: bool = True, + page: int | None = None, + per_page: int | None = None, + extra_params: dict[str, Incomplete] | None = None, + name: str | None = None, + ) -> list[dict[str, Incomplete]]: ... + def get(self, id: str, fields: list[str] | None = None, include_fields: bool = True) -> dict[str, Incomplete]: ... + async def get_async(self, id: str, fields: list[str] | None = None, include_fields: bool = True) -> dict[str, Incomplete]: ... + def delete(self, id: str): ... + async def delete_async(self, id: str): ... + def update(self, id: str, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + async def update_async(self, id: str, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + def create(self, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + async def create_async(self, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + def delete_user_by_email(self, id: str, email: str): ... + async def delete_user_by_email_async(self, id: str, email: str): ... diff --git a/stubs/auth0-python/auth0/management/custom_domains.pyi b/stubs/auth0-python/auth0/management/custom_domains.pyi new file mode 100644 index 000000000000..84b0a1859cb0 --- /dev/null +++ b/stubs/auth0-python/auth0/management/custom_domains.pyi @@ -0,0 +1,28 @@ +from _typeshed import Incomplete + +from ..rest import RestClient as RestClient, RestClientOptions as RestClientOptions +from ..types import TimeoutType as TimeoutType + +class CustomDomains: + domain: Incomplete + protocol: Incomplete + client: Incomplete + def __init__( + self, + domain: str, + token: str, + telemetry: bool = True, + timeout: TimeoutType = 5.0, + protocol: str = "https", + rest_options: RestClientOptions | None = None, + ) -> None: ... + def all(self) -> list[dict[str, Incomplete]]: ... + async def all_async(self) -> list[dict[str, Incomplete]]: ... + def get(self, id: str) -> dict[str, Incomplete]: ... + async def get_async(self, id: str) -> dict[str, Incomplete]: ... + def delete(self, id: str): ... + async def delete_async(self, id: str): ... + def create_new(self, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + async def create_new_async(self, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + def verify(self, id: str) -> dict[str, Incomplete]: ... + async def verify_async(self, id: str) -> dict[str, Incomplete]: ... diff --git a/stubs/auth0-python/auth0/management/device_credentials.pyi b/stubs/auth0-python/auth0/management/device_credentials.pyi new file mode 100644 index 000000000000..3d1f8e4f3a1b --- /dev/null +++ b/stubs/auth0-python/auth0/management/device_credentials.pyi @@ -0,0 +1,44 @@ +from _typeshed import Incomplete + +from ..rest import RestClient as RestClient, RestClientOptions as RestClientOptions +from ..types import TimeoutType as TimeoutType + +class DeviceCredentials: + domain: Incomplete + protocol: Incomplete + client: Incomplete + def __init__( + self, + domain: str, + token: str, + telemetry: bool = True, + timeout: TimeoutType = 5.0, + protocol: str = "https", + rest_options: RestClientOptions | None = None, + ) -> None: ... + def get( + self, + user_id: str, + client_id: str, + type: str, + fields: list[str] | None = None, + include_fields: bool = True, + page: int | None = None, + per_page: int | None = None, + include_totals: bool = False, + ): ... + async def get_async( + self, + user_id: str, + client_id: str, + type: str, + fields: list[str] | None = None, + include_fields: bool = True, + page: int | None = None, + per_page: int | None = None, + include_totals: bool = False, + ): ... + def create(self, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + async def create_async(self, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + def delete(self, id: str): ... + async def delete_async(self, id: str): ... diff --git a/stubs/auth0-python/auth0/management/email_templates.pyi b/stubs/auth0-python/auth0/management/email_templates.pyi new file mode 100644 index 000000000000..9a4e8919da65 --- /dev/null +++ b/stubs/auth0-python/auth0/management/email_templates.pyi @@ -0,0 +1,24 @@ +from _typeshed import Incomplete + +from ..rest import RestClient as RestClient, RestClientOptions as RestClientOptions +from ..types import TimeoutType as TimeoutType + +class EmailTemplates: + domain: Incomplete + protocol: Incomplete + client: Incomplete + def __init__( + self, + domain: str, + token: str, + telemetry: bool = True, + timeout: TimeoutType = 5.0, + protocol: str = "https", + rest_options: RestClientOptions | None = None, + ) -> None: ... + def create(self, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + async def create_async(self, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + def get(self, template_name: str) -> dict[str, Incomplete]: ... + async def get_async(self, template_name: str) -> dict[str, Incomplete]: ... + def update(self, template_name: str, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + async def update_async(self, template_name: str, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... diff --git a/stubs/auth0-python/auth0/management/emails.pyi b/stubs/auth0-python/auth0/management/emails.pyi new file mode 100644 index 000000000000..f681b1ea9150 --- /dev/null +++ b/stubs/auth0-python/auth0/management/emails.pyi @@ -0,0 +1,26 @@ +from _typeshed import Incomplete + +from ..rest import RestClient as RestClient, RestClientOptions as RestClientOptions +from ..types import TimeoutType as TimeoutType + +class Emails: + domain: Incomplete + protocol: Incomplete + client: Incomplete + def __init__( + self, + domain: str, + token: str, + telemetry: bool = True, + timeout: TimeoutType = 5.0, + protocol: str = "https", + rest_options: RestClientOptions | None = None, + ) -> None: ... + def get(self, fields: list[str] | None = None, include_fields: bool = True) -> dict[str, Incomplete]: ... + async def get_async(self, fields: list[str] | None = None, include_fields: bool = True) -> dict[str, Incomplete]: ... + def config(self, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + async def config_async(self, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + def delete(self): ... + async def delete_async(self): ... + def update(self, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + async def update_async(self, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... diff --git a/stubs/auth0-python/auth0/management/grants.pyi b/stubs/auth0-python/auth0/management/grants.pyi new file mode 100644 index 000000000000..e7946afc5a65 --- /dev/null +++ b/stubs/auth0-python/auth0/management/grants.pyi @@ -0,0 +1,34 @@ +from _typeshed import Incomplete + +from ..rest import RestClient as RestClient, RestClientOptions as RestClientOptions +from ..types import TimeoutType as TimeoutType + +class Grants: + domain: Incomplete + protocol: Incomplete + client: Incomplete + def __init__( + self, + domain: str, + token: str, + telemetry: bool = True, + timeout: TimeoutType = 5.0, + protocol: str = "https", + rest_options: RestClientOptions | None = None, + ) -> None: ... + def all( + self, + page: int | None = None, + per_page: int | None = None, + include_totals: bool = False, + extra_params: dict[str, Incomplete] | None = None, + ): ... + async def all_async( + self, + page: int | None = None, + per_page: int | None = None, + include_totals: bool = False, + extra_params: dict[str, Incomplete] | None = None, + ): ... + def delete(self, id: str): ... + async def delete_async(self, id: str): ... diff --git a/stubs/auth0-python/auth0/management/guardian.pyi b/stubs/auth0-python/auth0/management/guardian.pyi new file mode 100644 index 000000000000..19e46a0e238e --- /dev/null +++ b/stubs/auth0-python/auth0/management/guardian.pyi @@ -0,0 +1,38 @@ +from _typeshed import Incomplete + +from ..rest import RestClient as RestClient, RestClientOptions as RestClientOptions +from ..types import TimeoutType as TimeoutType + +class Guardian: + domain: Incomplete + protocol: Incomplete + client: Incomplete + def __init__( + self, + domain: str, + token: str, + telemetry: bool = True, + timeout: TimeoutType = 5.0, + protocol: str = "https", + rest_options: RestClientOptions | None = None, + ) -> None: ... + def all_factors(self) -> list[dict[str, Incomplete]]: ... + async def all_factors_async(self) -> list[dict[str, Incomplete]]: ... + def update_factor(self, name: str, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + async def update_factor_async(self, name: str, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + def update_templates(self, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + async def update_templates_async(self, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + def get_templates(self) -> dict[str, Incomplete]: ... + async def get_templates_async(self) -> dict[str, Incomplete]: ... + def get_enrollment(self, id: str) -> dict[str, Incomplete]: ... + async def get_enrollment_async(self, id: str) -> dict[str, Incomplete]: ... + def delete_enrollment(self, id: str): ... + async def delete_enrollment_async(self, id: str): ... + def create_enrollment_ticket(self, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + async def create_enrollment_ticket_async(self, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + def get_factor_providers(self, factor_name: str, name: str) -> dict[str, Incomplete]: ... + async def get_factor_providers_async(self, factor_name: str, name: str) -> dict[str, Incomplete]: ... + def update_factor_providers(self, factor_name: str, name: str, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + async def update_factor_providers_async( + self, factor_name: str, name: str, body: dict[str, Incomplete] + ) -> dict[str, Incomplete]: ... diff --git a/stubs/auth0-python/auth0/management/hooks.pyi b/stubs/auth0-python/auth0/management/hooks.pyi new file mode 100644 index 000000000000..37ceddff41b6 --- /dev/null +++ b/stubs/auth0-python/auth0/management/hooks.pyi @@ -0,0 +1,52 @@ +from _typeshed import Incomplete + +from ..rest import RestClient as RestClient, RestClientOptions as RestClientOptions +from ..types import TimeoutType as TimeoutType + +class Hooks: + domain: Incomplete + protocol: Incomplete + client: Incomplete + def __init__( + self, + domain: str, + token: str, + telemetry: bool = True, + timeout: TimeoutType = 5.0, + protocol: str = "https", + rest_options: RestClientOptions | None = None, + ) -> None: ... + def all( + self, + enabled: bool = True, + fields: list[str] | None = None, + include_fields: bool = True, + page: int | None = None, + per_page: int | None = None, + include_totals: bool = False, + ): ... + async def all_async( + self, + enabled: bool = True, + fields: list[str] | None = None, + include_fields: bool = True, + page: int | None = None, + per_page: int | None = None, + include_totals: bool = False, + ): ... + def create(self, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + async def create_async(self, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + def get(self, id: str, fields: list[str] | None = None) -> dict[str, Incomplete]: ... + async def get_async(self, id: str, fields: list[str] | None = None) -> dict[str, Incomplete]: ... + def delete(self, id: str): ... + async def delete_async(self, id: str): ... + def update(self, id: str, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + async def update_async(self, id: str, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + def get_secrets(self, id: str) -> dict[str, Incomplete]: ... + async def get_secrets_async(self, id: str) -> dict[str, Incomplete]: ... + def add_secrets(self, id: str, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + async def add_secrets_async(self, id: str, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + def delete_secrets(self, id: str, body: list[str]): ... + async def delete_secrets_async(self, id: str, body: list[str]): ... + def update_secrets(self, id: str, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + async def update_secrets_async(self, id: str, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... diff --git a/stubs/auth0-python/auth0/management/jobs.pyi b/stubs/auth0-python/auth0/management/jobs.pyi new file mode 100644 index 000000000000..7ac7699525ba --- /dev/null +++ b/stubs/auth0-python/auth0/management/jobs.pyi @@ -0,0 +1,42 @@ +from _typeshed import Incomplete + +from ..rest import RestClient as RestClient, RestClientOptions as RestClientOptions +from ..types import TimeoutType as TimeoutType + +class Jobs: + domain: Incomplete + protocol: Incomplete + client: Incomplete + def __init__( + self, + domain: str, + token: str, + telemetry: bool = True, + timeout: TimeoutType = 5.0, + protocol: str = "https", + rest_options: RestClientOptions | None = None, + ) -> None: ... + def get(self, id: str) -> dict[str, Incomplete]: ... + async def get_async(self, id: str) -> dict[str, Incomplete]: ... + def get_failed_job(self, id: str) -> dict[str, Incomplete]: ... + async def get_failed_job_async(self, id: str) -> dict[str, Incomplete]: ... + def export_users(self, body: dict[str, Incomplete]): ... + async def export_users_async(self, body: dict[str, Incomplete]): ... + def import_users( + self, + connection_id: str, + file_obj, + upsert: bool = False, + send_completion_email: bool = True, + external_id: str | None = None, + ) -> dict[str, Incomplete]: ... + async def import_users_async( + self, + connection_id: str, + file_obj, + upsert: bool = False, + send_completion_email: bool = True, + external_id: str | None = None, + ) -> dict[str, Incomplete]: ... + def send_verification_email(self, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + async def send_verification_email_async(self, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... diff --git a/stubs/auth0-python/auth0/management/log_streams.pyi b/stubs/auth0-python/auth0/management/log_streams.pyi new file mode 100644 index 000000000000..85dcd922690a --- /dev/null +++ b/stubs/auth0-python/auth0/management/log_streams.pyi @@ -0,0 +1,29 @@ +from _typeshed import Incomplete +from builtins import list as _list + +from ..rest import RestClient as RestClient, RestClientOptions as RestClientOptions +from ..types import TimeoutType as TimeoutType + +class LogStreams: + domain: Incomplete + protocol: Incomplete + client: Incomplete + def __init__( + self, + domain: str, + token: str, + telemetry: bool = True, + timeout: TimeoutType = 5.0, + protocol: str = "https", + rest_options: RestClientOptions | None = None, + ) -> None: ... + def list(self) -> _list[dict[str, Incomplete]]: ... + async def list_async(self) -> _list[dict[str, Incomplete]]: ... + def get(self, id: str) -> dict[str, Incomplete]: ... + async def get_async(self, id: str) -> dict[str, Incomplete]: ... + def create(self, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + async def create_async(self, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + def delete(self, id: str) -> dict[str, Incomplete]: ... + async def delete_async(self, id: str) -> dict[str, Incomplete]: ... + def update(self, id: str, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + async def update_async(self, id: str, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... diff --git a/stubs/auth0-python/auth0/management/logs.pyi b/stubs/auth0-python/auth0/management/logs.pyi new file mode 100644 index 000000000000..a5ac9e4c74b7 --- /dev/null +++ b/stubs/auth0-python/auth0/management/logs.pyi @@ -0,0 +1,44 @@ +from _typeshed import Incomplete + +from ..rest import RestClient as RestClient, RestClientOptions as RestClientOptions +from ..types import TimeoutType as TimeoutType + +class Logs: + domain: Incomplete + protocol: Incomplete + client: Incomplete + def __init__( + self, + domain: str, + token: str, + telemetry: bool = True, + timeout: TimeoutType = 5.0, + protocol: str = "https", + rest_options: RestClientOptions | None = None, + ) -> None: ... + def search( + self, + page: int = 0, + per_page: int = 50, + sort: str | None = None, + q: str | None = None, + include_totals: bool = True, + fields: list[str] | None = None, + from_param: str | None = None, + take: int | None = None, + include_fields: bool = True, + ) -> dict[str, Incomplete]: ... + async def search_async( + self, + page: int = 0, + per_page: int = 50, + sort: str | None = None, + q: str | None = None, + include_totals: bool = True, + fields: list[str] | None = None, + from_param: str | None = None, + take: int | None = None, + include_fields: bool = True, + ) -> dict[str, Incomplete]: ... + def get(self, id: str) -> dict[str, Incomplete]: ... + async def get_async(self, id: str) -> dict[str, Incomplete]: ... diff --git a/stubs/auth0-python/auth0/management/organizations.pyi b/stubs/auth0-python/auth0/management/organizations.pyi new file mode 100644 index 000000000000..9dc1205b3e43 --- /dev/null +++ b/stubs/auth0-python/auth0/management/organizations.pyi @@ -0,0 +1,134 @@ +from _typeshed import Incomplete + +from ..rest import RestClient as RestClient, RestClientOptions as RestClientOptions +from ..types import TimeoutType as TimeoutType + +class Organizations: + domain: Incomplete + protocol: Incomplete + client: Incomplete + def __init__( + self, + domain: str, + token: str, + telemetry: bool = True, + timeout: TimeoutType = 5.0, + protocol: str = "https", + rest_options: RestClientOptions | None = None, + ) -> None: ... + def all_organizations( + self, + page: int | None = None, + per_page: int | None = None, + include_totals: bool = True, + from_param: str | None = None, + take: int | None = None, + ) -> dict[str, Incomplete]: ... + async def all_organizations_async( + self, + page: int | None = None, + per_page: int | None = None, + include_totals: bool = True, + from_param: str | None = None, + take: int | None = None, + ) -> dict[str, Incomplete]: ... + def get_organization_by_name(self, name: str | None = None) -> dict[str, Incomplete]: ... + async def get_organization_by_name_async(self, name: str | None = None) -> dict[str, Incomplete]: ... + def get_organization(self, id: str) -> dict[str, Incomplete]: ... + async def get_organization_async(self, id: str) -> dict[str, Incomplete]: ... + def create_organization(self, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + async def create_organization_async(self, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + def update_organization(self, id: str, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + async def update_organization_async(self, id: str, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + def delete_organization(self, id: str): ... + async def delete_organization_async(self, id: str): ... + def all_organization_connections( + self, id: str, page: int | None = None, per_page: int | None = None + ) -> list[dict[str, Incomplete]]: ... + async def all_organization_connections_async( + self, id: str, page: int | None = None, per_page: int | None = None + ) -> list[dict[str, Incomplete]]: ... + def get_organization_connection(self, id: str, connection_id: str) -> dict[str, Incomplete]: ... + async def get_organization_connection_async(self, id: str, connection_id: str) -> dict[str, Incomplete]: ... + def create_organization_connection(self, id: str, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + async def create_organization_connection_async(self, id: str, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + def update_organization_connection( + self, id: str, connection_id: str, body: dict[str, Incomplete] + ) -> dict[str, Incomplete]: ... + async def update_organization_connection_async( + self, id: str, connection_id: str, body: dict[str, Incomplete] + ) -> dict[str, Incomplete]: ... + def delete_organization_connection(self, id: str, connection_id: str): ... + async def delete_organization_connection_async(self, id: str, connection_id: str): ... + def all_organization_members( + self, + id: str, + page: int | None = None, + per_page: int | None = None, + include_totals: bool = True, + from_param: str | None = None, + take: int | None = None, + fields: list[str] | None = None, + include_fields: bool = True, + ) -> dict[str, Incomplete]: ... + async def all_organization_members_async( + self, + id: str, + page: int | None = None, + per_page: int | None = None, + include_totals: bool = True, + from_param: str | None = None, + take: int | None = None, + fields: list[str] | None = None, + include_fields: bool = True, + ) -> dict[str, Incomplete]: ... + def create_organization_members(self, id: str, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + async def create_organization_members_async(self, id: str, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + def delete_organization_members(self, id: str, body: dict[str, Incomplete]): ... + async def delete_organization_members_async(self, id: str, body: dict[str, Incomplete]): ... + def all_organization_member_roles( + self, id: str, user_id: str, page: int | None = None, per_page: int | None = None, include_totals: bool = False + ) -> list[dict[str, Incomplete]]: ... + async def all_organization_member_roles_async( + self, id: str, user_id: str, page: int | None = None, per_page: int | None = None, include_totals: bool = False + ) -> list[dict[str, Incomplete]]: ... + def create_organization_member_roles(self, id: str, user_id: str, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + async def create_organization_member_roles_async( + self, id: str, user_id: str, body: dict[str, Incomplete] + ) -> dict[str, Incomplete]: ... + def delete_organization_member_roles(self, id: str, user_id: str, body: dict[str, Incomplete]): ... + async def delete_organization_member_roles_async(self, id: str, user_id: str, body: dict[str, Incomplete]): ... + def all_organization_invitations( + self, id: str, page: int | None = None, per_page: int | None = None, include_totals: bool = False + ) -> dict[str, Incomplete]: ... + async def all_organization_invitations_async( + self, id: str, page: int | None = None, per_page: int | None = None, include_totals: bool = False + ) -> dict[str, Incomplete]: ... + def get_organization_invitation(self, id: str, invitaton_id: str) -> dict[str, Incomplete]: ... + async def get_organization_invitation_async(self, id: str, invitaton_id: str) -> dict[str, Incomplete]: ... + def create_organization_invitation(self, id: str, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + async def create_organization_invitation_async(self, id: str, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + def delete_organization_invitation(self, id: str, invitation_id: str): ... + async def delete_organization_invitation_async(self, id: str, invitation_id: str): ... + def get_client_grants( + self, + id: str, + audience: str | None = None, + client_id: str | None = None, + page: int | None = None, + per_page: int | None = None, + include_totals: bool = False, + ) -> dict[str, Incomplete]: ... + async def get_client_grants_async( + self, + id: str, + audience: str | None = None, + client_id: str | None = None, + page: int | None = None, + per_page: int | None = None, + include_totals: bool = False, + ) -> dict[str, Incomplete]: ... + def add_client_grant(self, id: str, grant_id: str) -> dict[str, Incomplete]: ... + async def add_client_grant_async(self, id: str, grant_id: str) -> dict[str, Incomplete]: ... + def delete_client_grant(self, id: str, grant_id: str) -> dict[str, Incomplete]: ... + async def delete_client_grant_async(self, id: str, grant_id: str) -> dict[str, Incomplete]: ... diff --git a/stubs/auth0-python/auth0/management/prompts.pyi b/stubs/auth0-python/auth0/management/prompts.pyi new file mode 100644 index 000000000000..37376d758e3c --- /dev/null +++ b/stubs/auth0-python/auth0/management/prompts.pyi @@ -0,0 +1,28 @@ +from _typeshed import Incomplete + +from ..rest import RestClient as RestClient, RestClientOptions as RestClientOptions +from ..types import TimeoutType as TimeoutType + +class Prompts: + domain: Incomplete + protocol: Incomplete + client: Incomplete + def __init__( + self, + domain: str, + token: str, + telemetry: bool = True, + timeout: TimeoutType = 5.0, + protocol: str = "https", + rest_options: RestClientOptions | None = None, + ) -> None: ... + def get(self) -> dict[str, Incomplete]: ... + async def get_async(self) -> dict[str, Incomplete]: ... + def update(self, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + async def update_async(self, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + def get_custom_text(self, prompt: str, language: str): ... + async def get_custom_text_async(self, prompt: str, language: str): ... + def update_custom_text(self, prompt: str, language: str, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + async def update_custom_text_async( + self, prompt: str, language: str, body: dict[str, Incomplete] + ) -> dict[str, Incomplete]: ... diff --git a/stubs/auth0-python/auth0/management/resource_servers.pyi b/stubs/auth0-python/auth0/management/resource_servers.pyi new file mode 100644 index 000000000000..8e6ca07b0b9d --- /dev/null +++ b/stubs/auth0-python/auth0/management/resource_servers.pyi @@ -0,0 +1,28 @@ +from _typeshed import Incomplete + +from ..rest import RestClient as RestClient, RestClientOptions as RestClientOptions +from ..types import TimeoutType as TimeoutType + +class ResourceServers: + domain: Incomplete + protocol: Incomplete + client: Incomplete + def __init__( + self, + domain: str, + token: str, + telemetry: bool = True, + timeout: TimeoutType = 5.0, + protocol: str = "https", + rest_options: RestClientOptions | None = None, + ) -> None: ... + def create(self, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + async def create_async(self, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + def get_all(self, page: int | None = None, per_page: int | None = None, include_totals: bool = False): ... + async def get_all_async(self, page: int | None = None, per_page: int | None = None, include_totals: bool = False): ... + def get(self, id: str) -> dict[str, Incomplete]: ... + async def get_async(self, id: str) -> dict[str, Incomplete]: ... + def delete(self, id: str): ... + async def delete_async(self, id: str): ... + def update(self, id: str, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + async def update_async(self, id: str, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... diff --git a/stubs/auth0-python/auth0/management/roles.pyi b/stubs/auth0-python/auth0/management/roles.pyi new file mode 100644 index 000000000000..1cb8b1ff9b34 --- /dev/null +++ b/stubs/auth0-python/auth0/management/roles.pyi @@ -0,0 +1,63 @@ +from _typeshed import Incomplete +from builtins import list as _list + +from ..rest import RestClient as RestClient, RestClientOptions as RestClientOptions +from ..types import TimeoutType as TimeoutType + +class Roles: + domain: Incomplete + protocol: Incomplete + client: Incomplete + def __init__( + self, + domain: str, + token: str, + telemetry: bool = True, + timeout: TimeoutType = 5.0, + protocol: str = "https", + rest_options: RestClientOptions | None = None, + ) -> None: ... + def list( + self, page: int = 0, per_page: int = 25, include_totals: bool = True, name_filter: str | None = None + ) -> dict[str, Incomplete]: ... + async def list_async( + self, page: int = 0, per_page: int = 25, include_totals: bool = True, name_filter: str | None = None + ) -> dict[str, Incomplete]: ... + def create(self, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + async def create_async(self, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + def get(self, id: str) -> dict[str, Incomplete]: ... + async def get_async(self, id: str) -> dict[str, Incomplete]: ... + def delete(self, id: str): ... + async def delete_async(self, id: str): ... + def update(self, id: str, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + async def update_async(self, id: str, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + def list_users( + self, + id: str, + page: int = 0, + per_page: int = 25, + include_totals: bool = True, + from_param: str | None = None, + take: int | None = None, + ) -> dict[str, Incomplete]: ... + async def list_users_async( + self, + id: str, + page: int = 0, + per_page: int = 25, + include_totals: bool = True, + from_param: str | None = None, + take: int | None = None, + ) -> dict[str, Incomplete]: ... + def add_users(self, id: str, users: _list[str]) -> dict[str, Incomplete]: ... + async def add_users_async(self, id: str, users: _list[str]) -> dict[str, Incomplete]: ... + def list_permissions( + self, id: str, page: int = 0, per_page: int = 25, include_totals: bool = True + ) -> dict[str, Incomplete]: ... + async def list_permissions_async( + self, id: str, page: int = 0, per_page: int = 25, include_totals: bool = True + ) -> dict[str, Incomplete]: ... + def remove_permissions(self, id: str, permissions: _list[dict[str, str]]): ... + async def remove_permissions_async(self, id: str, permissions: _list[dict[str, str]]): ... + def add_permissions(self, id: str, permissions: _list[dict[str, str]]) -> dict[str, Incomplete]: ... + async def add_permissions_async(self, id: str, permissions: _list[dict[str, str]]) -> dict[str, Incomplete]: ... diff --git a/stubs/auth0-python/auth0/management/rules.pyi b/stubs/auth0-python/auth0/management/rules.pyi new file mode 100644 index 000000000000..1eac99eb60f7 --- /dev/null +++ b/stubs/auth0-python/auth0/management/rules.pyi @@ -0,0 +1,46 @@ +from _typeshed import Incomplete + +from ..rest import RestClient as RestClient, RestClientOptions as RestClientOptions +from ..types import TimeoutType as TimeoutType + +class Rules: + domain: Incomplete + protocol: Incomplete + client: Incomplete + def __init__( + self, + domain: str, + token: str, + telemetry: bool = True, + timeout: TimeoutType = 5.0, + protocol: str = "https", + rest_options: RestClientOptions | None = None, + ) -> None: ... + def all( + self, + stage: str = "login_success", + enabled: bool = True, + fields: list[str] | None = None, + include_fields: bool = True, + page: int | None = None, + per_page: int | None = None, + include_totals: bool = False, + ) -> dict[str, Incomplete]: ... + async def all_async( + self, + stage: str = "login_success", + enabled: bool = True, + fields: list[str] | None = None, + include_fields: bool = True, + page: int | None = None, + per_page: int | None = None, + include_totals: bool = False, + ) -> dict[str, Incomplete]: ... + def create(self, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + async def create_async(self, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + def get(self, id: str, fields: list[str] | None = None, include_fields: bool = True) -> dict[str, Incomplete]: ... + async def get_async(self, id: str, fields: list[str] | None = None, include_fields: bool = True) -> dict[str, Incomplete]: ... + def delete(self, id: str): ... + async def delete_async(self, id: str): ... + def update(self, id: str, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + async def update_async(self, id: str, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... diff --git a/stubs/auth0-python/auth0/management/rules_configs.pyi b/stubs/auth0-python/auth0/management/rules_configs.pyi new file mode 100644 index 000000000000..f276a18d9a36 --- /dev/null +++ b/stubs/auth0-python/auth0/management/rules_configs.pyi @@ -0,0 +1,24 @@ +from _typeshed import Incomplete + +from ..rest import RestClient as RestClient, RestClientOptions as RestClientOptions +from ..types import TimeoutType as TimeoutType + +class RulesConfigs: + domain: Incomplete + protocol: Incomplete + client: Incomplete + def __init__( + self, + domain: str, + token: str, + telemetry: bool = True, + timeout: TimeoutType = 5.0, + protocol: str = "https", + rest_options: RestClientOptions | None = None, + ) -> None: ... + def all(self) -> list[dict[str, Incomplete]]: ... + async def all_async(self) -> list[dict[str, Incomplete]]: ... + def unset(self, key: str): ... + async def unset_async(self, key: str): ... + def set(self, key: str, value: str) -> dict[str, Incomplete]: ... + async def set_async(self, key: str, value: str) -> dict[str, Incomplete]: ... diff --git a/stubs/auth0-python/auth0/management/stats.pyi b/stubs/auth0-python/auth0/management/stats.pyi new file mode 100644 index 000000000000..f3128be12eaf --- /dev/null +++ b/stubs/auth0-python/auth0/management/stats.pyi @@ -0,0 +1,24 @@ +from _typeshed import Incomplete + +from ..rest import RestClient as RestClient, RestClientOptions as RestClientOptions +from ..types import TimeoutType as TimeoutType + +class Stats: + domain: Incomplete + protocol: Incomplete + client: Incomplete + def __init__( + self, + domain: str, + token: str, + telemetry: bool = True, + timeout: TimeoutType = 5.0, + protocol: str = "https", + rest_options: RestClientOptions | None = None, + ) -> None: ... + def active_users(self) -> int: ... + async def active_users_async(self) -> int: ... + def daily_stats(self, from_date: str | None = None, to_date: str | None = None) -> list[dict[str, Incomplete]]: ... + async def daily_stats_async( + self, from_date: str | None = None, to_date: str | None = None + ) -> list[dict[str, Incomplete]]: ... diff --git a/stubs/auth0-python/auth0/management/tenants.pyi b/stubs/auth0-python/auth0/management/tenants.pyi new file mode 100644 index 000000000000..12a4df7ad40a --- /dev/null +++ b/stubs/auth0-python/auth0/management/tenants.pyi @@ -0,0 +1,22 @@ +from _typeshed import Incomplete + +from ..rest import RestClient as RestClient, RestClientOptions as RestClientOptions +from ..types import TimeoutType as TimeoutType + +class Tenants: + domain: Incomplete + protocol: Incomplete + client: Incomplete + def __init__( + self, + domain: str, + token: str, + telemetry: bool = True, + timeout: TimeoutType = 5.0, + protocol: str = "https", + rest_options: RestClientOptions | None = None, + ) -> None: ... + def get(self, fields: list[str] | None = None, include_fields: bool = True) -> dict[str, Incomplete]: ... + async def get_async(self, fields: list[str] | None = None, include_fields: bool = True) -> dict[str, Incomplete]: ... + def update(self, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + async def update_async(self, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... diff --git a/stubs/auth0-python/auth0/management/tickets.pyi b/stubs/auth0-python/auth0/management/tickets.pyi new file mode 100644 index 000000000000..236596687f29 --- /dev/null +++ b/stubs/auth0-python/auth0/management/tickets.pyi @@ -0,0 +1,22 @@ +from _typeshed import Incomplete + +from ..rest import RestClient as RestClient, RestClientOptions as RestClientOptions +from ..types import TimeoutType as TimeoutType + +class Tickets: + domain: Incomplete + protocol: Incomplete + client: Incomplete + def __init__( + self, + domain: str, + token: str, + telemetry: bool = True, + timeout: TimeoutType = 5.0, + protocol: str = "https", + rest_options: RestClientOptions | None = None, + ) -> None: ... + def create_email_verification(self, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + async def create_email_verification_async(self, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + def create_pswd_change(self, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + async def create_pswd_change_async(self, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... diff --git a/stubs/auth0-python/auth0/management/user_blocks.pyi b/stubs/auth0-python/auth0/management/user_blocks.pyi new file mode 100644 index 000000000000..7417dd514e59 --- /dev/null +++ b/stubs/auth0-python/auth0/management/user_blocks.pyi @@ -0,0 +1,26 @@ +from _typeshed import Incomplete + +from ..rest import RestClient as RestClient, RestClientOptions as RestClientOptions +from ..types import TimeoutType as TimeoutType + +class UserBlocks: + domain: Incomplete + protocol: Incomplete + client: Incomplete + def __init__( + self, + domain: str, + token: str, + telemetry: bool = True, + timeout: TimeoutType = 5.0, + protocol: str = "https", + rest_options: RestClientOptions | None = None, + ) -> None: ... + def get_by_identifier(self, identifier: str) -> dict[str, Incomplete]: ... + async def get_by_identifier_async(self, identifier: str) -> dict[str, Incomplete]: ... + def unblock_by_identifier(self, identifier: dict[str, Incomplete]): ... + async def unblock_by_identifier_async(self, identifier: dict[str, Incomplete]): ... + def get(self, id: str) -> dict[str, Incomplete]: ... + async def get_async(self, id: str) -> dict[str, Incomplete]: ... + def unblock(self, id: str): ... + async def unblock_async(self, id: str): ... diff --git a/stubs/auth0-python/auth0/management/users.pyi b/stubs/auth0-python/auth0/management/users.pyi new file mode 100644 index 000000000000..6ba1283d69cf --- /dev/null +++ b/stubs/auth0-python/auth0/management/users.pyi @@ -0,0 +1,117 @@ +from _typeshed import Incomplete +from builtins import list as _list + +from ..rest import RestClient as RestClient, RestClientOptions as RestClientOptions +from ..types import TimeoutType as TimeoutType + +class Users: + domain: Incomplete + protocol: Incomplete + client: Incomplete + def __init__( + self, + domain: str, + token: str, + telemetry: bool = True, + timeout: TimeoutType = 5.0, + protocol: str = "https", + rest_options: RestClientOptions | None = None, + ) -> None: ... + def list( + self, + page: int = 0, + per_page: int = 25, + sort: str | None = None, + connection: str | None = None, + q: str | None = None, + search_engine: str | None = None, + include_totals: bool = True, + fields: _list[str] | None = None, + include_fields: bool = True, + ) -> dict[str, Incomplete]: ... + async def list_async( + self, + page: int = 0, + per_page: int = 25, + sort: str | None = None, + connection: str | None = None, + q: str | None = None, + search_engine: str | None = None, + include_totals: bool = True, + fields: _list[str] | None = None, + include_fields: bool = True, + ) -> dict[str, Incomplete]: ... + def create(self, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + async def create_async(self, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + def get(self, id: str, fields: _list[str] | None = None, include_fields: bool = True) -> dict[str, Incomplete]: ... + async def get_async( + self, id: str, fields: _list[str] | None = None, include_fields: bool = True + ) -> dict[str, Incomplete]: ... + def delete(self, id: str): ... + async def delete_async(self, id: str): ... + def update(self, id: str, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + async def update_async(self, id: str, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + def list_organizations( + self, id: str, page: int = 0, per_page: int = 25, include_totals: bool = True + ) -> dict[str, Incomplete]: ... + async def list_organizations_async( + self, id: str, page: int = 0, per_page: int = 25, include_totals: bool = True + ) -> dict[str, Incomplete]: ... + def list_roles(self, id: str, page: int = 0, per_page: int = 25, include_totals: bool = True) -> dict[str, Incomplete]: ... + async def list_roles_async( + self, id: str, page: int = 0, per_page: int = 25, include_totals: bool = True + ) -> dict[str, Incomplete]: ... + def remove_roles(self, id: str, roles: _list[str]): ... + async def remove_roles_async(self, id: str, roles: _list[str]): ... + def add_roles(self, id: str, roles: _list[str]) -> dict[str, Incomplete]: ... + async def add_roles_async(self, id: str, roles: _list[str]) -> dict[str, Incomplete]: ... + def list_permissions( + self, id: str, page: int = 0, per_page: int = 25, include_totals: bool = True + ) -> dict[str, Incomplete]: ... + async def list_permissions_async( + self, id: str, page: int = 0, per_page: int = 25, include_totals: bool = True + ) -> dict[str, Incomplete]: ... + def remove_permissions(self, id: str, permissions: _list[str]): ... + async def remove_permissions_async(self, id: str, permissions: _list[str]): ... + def add_permissions(self, id: str, permissions: _list[str]) -> dict[str, Incomplete]: ... + async def add_permissions_async(self, id: str, permissions: _list[str]) -> dict[str, Incomplete]: ... + def delete_multifactor(self, id: str, provider: str): ... + async def delete_multifactor_async(self, id: str, provider: str): ... + def delete_authenticators(self, id: str): ... + async def delete_authenticators_async(self, id: str): ... + def unlink_user_account(self, id: str, provider: str, user_id: str): ... + async def unlink_user_account_async(self, id: str, provider: str, user_id: str): ... + def link_user_account(self, user_id: str, body: dict[str, Incomplete]) -> _list[dict[str, Incomplete]]: ... + async def link_user_account_async(self, user_id: str, body: dict[str, Incomplete]) -> _list[dict[str, Incomplete]]: ... + def regenerate_recovery_code(self, user_id: str) -> dict[str, Incomplete]: ... + async def regenerate_recovery_code_async(self, user_id: str) -> dict[str, Incomplete]: ... + def get_guardian_enrollments(self, user_id: str) -> dict[str, Incomplete]: ... + async def get_guardian_enrollments_async(self, user_id: str) -> dict[str, Incomplete]: ... + def get_log_events( + self, user_id: str, page: int = 0, per_page: int = 50, sort: str | None = None, include_totals: bool = False + ) -> dict[str, Incomplete]: ... + async def get_log_events_async( + self, user_id: str, page: int = 0, per_page: int = 50, sort: str | None = None, include_totals: bool = False + ) -> dict[str, Incomplete]: ... + def invalidate_remembered_browsers(self, user_id: str) -> dict[str, Incomplete]: ... + async def invalidate_remembered_browsers_async(self, user_id: str) -> dict[str, Incomplete]: ... + def get_authentication_methods(self, user_id: str) -> dict[str, Incomplete]: ... + async def get_authentication_methods_async(self, user_id: str) -> dict[str, Incomplete]: ... + def get_authentication_method_by_id(self, user_id: str, authentication_method_id: str) -> dict[str, Incomplete]: ... + async def get_authentication_method_by_id_async( + self, user_id: str, authentication_method_id: str + ) -> dict[str, Incomplete]: ... + def create_authentication_method(self, user_id: str, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + async def create_authentication_method_async(self, user_id: str, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + def update_authentication_methods(self, user_id: str, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + async def update_authentication_methods_async(self, user_id: str, body: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + def update_authentication_method_by_id( + self, user_id: str, authentication_method_id: str, body: dict[str, Incomplete] + ) -> dict[str, Incomplete]: ... + async def update_authentication_method_by_id_async( + self, user_id: str, authentication_method_id: str, body: dict[str, Incomplete] + ) -> dict[str, Incomplete]: ... + def delete_authentication_methods(self, user_id: str): ... + async def delete_authentication_methods_async(self, user_id: str): ... + def delete_authentication_method_by_id(self, user_id: str, authentication_method_id: str): ... + async def delete_authentication_method_by_id_async(self, user_id: str, authentication_method_id: str): ... diff --git a/stubs/auth0-python/auth0/management/users_by_email.pyi b/stubs/auth0-python/auth0/management/users_by_email.pyi new file mode 100644 index 000000000000..177fe7b092bf --- /dev/null +++ b/stubs/auth0-python/auth0/management/users_by_email.pyi @@ -0,0 +1,24 @@ +from _typeshed import Incomplete + +from ..rest import RestClient as RestClient, RestClientOptions as RestClientOptions +from ..types import TimeoutType as TimeoutType + +class UsersByEmail: + domain: Incomplete + protocol: Incomplete + client: Incomplete + def __init__( + self, + domain: str, + token: str, + telemetry: bool = True, + timeout: TimeoutType = 5.0, + protocol: str = "https", + rest_options: RestClientOptions | None = None, + ) -> None: ... + def search_users_by_email( + self, email: str, fields: list[str] | None = None, include_fields: bool = True + ) -> list[dict[str, Incomplete]]: ... + async def search_users_by_email_async( + self, email: str, fields: list[str] | None = None, include_fields: bool = True + ) -> list[dict[str, Incomplete]]: ... diff --git a/stubs/auth0-python/auth0/rest.pyi b/stubs/auth0-python/auth0/rest.pyi new file mode 100644 index 000000000000..131dd1d75fa1 --- /dev/null +++ b/stubs/auth0-python/auth0/rest.pyi @@ -0,0 +1,48 @@ +from _typeshed import Incomplete +from collections.abc import Mapping + +import requests +from auth0.exceptions import Auth0Error as Auth0Error, RateLimitError as RateLimitError +from auth0.rest_async import RequestsResponse as RequestsResponse +from auth0.types import RequestData as RequestData, TimeoutType as TimeoutType + +UNKNOWN_ERROR: str + +class RestClientOptions: + telemetry: Incomplete + timeout: Incomplete + retries: Incomplete + def __init__(self, telemetry: bool = True, timeout: TimeoutType = 5.0, retries: int = 3) -> None: ... + +class RestClient: + options: Incomplete + jwt: Incomplete + base_headers: Incomplete + telemetry: Incomplete + timeout: Incomplete + def __init__( + self, jwt: str | None, telemetry: bool = True, timeout: TimeoutType = 5.0, options: RestClientOptions | None = None + ) -> None: ... + def MAX_REQUEST_RETRIES(self) -> int: ... + def MAX_REQUEST_RETRY_JITTER(self) -> int: ... + def MAX_REQUEST_RETRY_DELAY(self) -> int: ... + def MIN_REQUEST_RETRY_DELAY(self) -> int: ... + def get(self, url: str, params: dict[str, Incomplete] | None = None, headers: dict[str, str] | None = None): ... + def post(self, url: str, data: RequestData | None = None, headers: dict[str, str] | None = None): ... + def file_post(self, url: str, data: RequestData | None = None, files: dict[str, Incomplete] | None = None): ... + def patch(self, url: str, data: RequestData | None = None): ... + def put(self, url: str, data: RequestData | None = None): ... + def delete(self, url: str, params: dict[str, Incomplete] | None = None, data: RequestData | None = None): ... + +class Response: + def __init__(self, status_code: int, content, headers: Mapping[str, str]) -> None: ... + def content(self): ... + +class JsonResponse(Response): + def __init__(self, response: requests.Response | RequestsResponse) -> None: ... + +class PlainResponse(Response): + def __init__(self, response: requests.Response | RequestsResponse) -> None: ... + +class EmptyResponse(Response): + def __init__(self, status_code: int) -> None: ... diff --git a/stubs/auth0-python/auth0/rest_async.pyi b/stubs/auth0-python/auth0/rest_async.pyi new file mode 100644 index 000000000000..7bc59c471611 --- /dev/null +++ b/stubs/auth0-python/auth0/rest_async.pyi @@ -0,0 +1,29 @@ +from _typeshed import Incomplete + +from auth0.exceptions import RateLimitError as RateLimitError +from auth0.types import RequestData as RequestData + +from .rest import ( + EmptyResponse as EmptyResponse, + JsonResponse as JsonResponse, + PlainResponse as PlainResponse, + Response as Response, + RestClient as RestClient, +) + +class AsyncRestClient(RestClient): + timeout: Incomplete + def __init__(self, *args, **kwargs) -> None: ... + def set_session(self, session) -> None: ... + async def get(self, url: str, params: dict[str, Incomplete] | None = None, headers: dict[str, str] | None = None): ... + async def post(self, url: str, data: RequestData | None = None, headers: dict[str, str] | None = None): ... + async def file_post(self, *args, **kwargs): ... + async def patch(self, url: str, data: RequestData | None = None): ... + async def put(self, url: str, data: RequestData | None = None): ... + async def delete(self, url: str, params: dict[str, Incomplete] | None = None, data: RequestData | None = None): ... + +class RequestsResponse: + status_code: Incomplete + headers: Incomplete + text: Incomplete + def __init__(self, response, text: str) -> None: ... diff --git a/stubs/auth0-python/auth0/types.pyi b/stubs/auth0-python/auth0/types.pyi new file mode 100644 index 000000000000..385643dc72d0 --- /dev/null +++ b/stubs/auth0-python/auth0/types.pyi @@ -0,0 +1,5 @@ +from _typeshed import Incomplete +from typing_extensions import TypeAlias + +TimeoutType: TypeAlias = float | tuple[float, float] +RequestData: TypeAlias = dict[str, Incomplete] | list[Incomplete] diff --git a/stubs/auth0-python/auth0/utils.pyi b/stubs/auth0-python/auth0/utils.pyi new file mode 100644 index 000000000000..611cd037c258 --- /dev/null +++ b/stubs/auth0-python/auth0/utils.pyi @@ -0,0 +1 @@ +def is_async_available() -> bool: ... From 89379b14b09a085bba8b6b6fd55512588e5d7c02 Mon Sep 17 00:00:00 2001 From: Avasam Date: Thu, 3 Apr 2025 01:43:54 -0400 Subject: [PATCH 188/388] Enable Ruff flake8-todos (TD) (#13748) --- pyproject.toml | 6 ++++++ stdlib/_ctypes.pyi | 2 +- stdlib/colorsys.pyi | 2 +- stdlib/ctypes/__init__.pyi | 4 ++-- stdlib/distutils/fancy_getopt.pyi | 2 +- stdlib/logging/handlers.pyi | 2 +- stdlib/optparse.pyi | 2 +- stdlib/subprocess.pyi | 2 +- stdlib/urllib/request.pyi | 2 +- stdlib/xml/etree/ElementTree.pyi | 2 +- stubs/geopandas/geopandas/tools/geocoding.pyi | 2 +- stubs/shapely/shapely/_typing.pyi | 2 +- stubs/tensorflow/tensorflow/__init__.pyi | 2 +- stubs/tqdm/tqdm/contrib/logging.pyi | 2 +- 14 files changed, 20 insertions(+), 14 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 36af3e424963..72d0a4df4c9a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -63,6 +63,7 @@ select = [ "RUF", # Ruff-specific and unused-noqa "SLOT", # flake8-slots "T10", # flake8-debugger + "TD", # flake8-todos "TRY", # tryceratops "UP", # pyupgrade "YTT", # flake8-2020 @@ -167,6 +168,11 @@ ignore = [ "PLR2004", # Magic value used in comparison, consider replacing `{value}` with a constant variable # Keep codeflow path separation explicit "PLR5501", # Use `elif` instead of `else` then `if`, to reduce indentation + # Allow FIXME + "TD001", # Invalid TODO tag: `{tag}` + # Git blame is sufficient + "TD002", # Missing author in TODO; + "TD003", # Missing issue link for this TODO # Mostly from scripts and tests, it's ok to have messages passed directly to exceptions "TRY003", # Avoid specifying long messages outside the exception class # Slower and more verbose https://github.com/astral-sh/ruff/issues/7871 diff --git a/stdlib/_ctypes.pyi b/stdlib/_ctypes.pyi index 0ce1cb39eff6..690f900eb75a 100644 --- a/stdlib/_ctypes.pyi +++ b/stdlib/_ctypes.pyi @@ -292,7 +292,7 @@ class Array(_CData, Generic[_CT], metaclass=_PyCArrayType): @raw.setter def raw(self, value: ReadableBuffer) -> None: ... value: Any # Note: bytes if _CT == c_char, str if _CT == c_wchar, unavailable otherwise - # TODO These methods cannot be annotated correctly at the moment. + # TODO: These methods cannot be annotated correctly at the moment. # All of these "Any"s stand for the array's element type, but it's not possible to use _CT # here, because of a special feature of ctypes. # By default, when accessing an element of an Array[_CT], the returned object has type _CT. diff --git a/stdlib/colorsys.pyi b/stdlib/colorsys.pyi index 443ee828ebfe..7842f80284ef 100644 --- a/stdlib/colorsys.pyi +++ b/stdlib/colorsys.pyi @@ -7,7 +7,7 @@ def hls_to_rgb(h: float, l: float, s: float) -> tuple[float, float, float]: ... def rgb_to_hsv(r: float, g: float, b: float) -> tuple[float, float, float]: ... def hsv_to_rgb(h: float, s: float, v: float) -> tuple[float, float, float]: ... -# TODO undocumented +# TODO: undocumented ONE_SIXTH: float ONE_THIRD: float TWO_THIRD: float diff --git a/stdlib/ctypes/__init__.pyi b/stdlib/ctypes/__init__.pyi index 4f44975d657f..459c01a60fd1 100644 --- a/stdlib/ctypes/__init__.pyi +++ b/stdlib/ctypes/__init__.pyi @@ -158,7 +158,7 @@ def ARRAY(typ: _CT, len: int) -> Array[_CT]: ... # Soft Deprecated, no plans to if sys.platform == "win32": def DllCanUnloadNow() -> int: ... - def DllGetClassObject(rclsid: Any, riid: Any, ppv: Any) -> int: ... # TODO not documented + def DllGetClassObject(rclsid: Any, riid: Any, ppv: Any) -> int: ... # TODO: not documented # Actually just an instance of _NamedFuncPointer (aka _CDLLFuncPointer), # but we want to set a more specific __call__ @@ -247,7 +247,7 @@ class c_bool(_SimpleCData[bool]): def __init__(self, value: bool = ...) -> None: ... if sys.platform == "win32": - class HRESULT(_SimpleCData[int]): ... # TODO undocumented + class HRESULT(_SimpleCData[int]): ... # TODO: undocumented if sys.version_info >= (3, 12): # At runtime, this is an alias for either c_int32 or c_int64, diff --git a/stdlib/distutils/fancy_getopt.pyi b/stdlib/distutils/fancy_getopt.pyi index e66d8cc9f2c5..f3fa2a1255a6 100644 --- a/stdlib/distutils/fancy_getopt.pyi +++ b/stdlib/distutils/fancy_getopt.pyi @@ -13,7 +13,7 @@ longopt_xlate: Final[dict[int, int]] class FancyGetopt: def __init__(self, option_table: list[_Option] | None = None) -> None: ... - # TODO kinda wrong, `getopt(object=object())` is invalid + # TODO: kinda wrong, `getopt(object=object())` is invalid @overload def getopt( self, args: _SliceableT[_StrSequenceT_co] | None = None, object: None = None diff --git a/stdlib/logging/handlers.pyi b/stdlib/logging/handlers.pyi index d594d6569a7e..7f913bd97fd7 100644 --- a/stdlib/logging/handlers.pyi +++ b/stdlib/logging/handlers.pyi @@ -191,7 +191,7 @@ class SysLogHandler(Handler): class NTEventLogHandler(Handler): def __init__(self, appname: str, dllname: str | None = None, logtype: str = "Application") -> None: ... def getEventCategory(self, record: LogRecord) -> int: ... - # TODO correct return value? + # TODO: correct return value? def getEventType(self, record: LogRecord) -> int: ... def getMessageID(self, record: LogRecord) -> int: ... diff --git a/stdlib/optparse.pyi b/stdlib/optparse.pyi index 56a4574bdba8..8b7fcd82e5a5 100644 --- a/stdlib/optparse.pyi +++ b/stdlib/optparse.pyi @@ -239,7 +239,7 @@ class Values: # __getattr__ doesn't exist, but anything passed as a default to __init__ # is set on the instance. def __getattr__(self, name: str) -> Any: ... - # TODO mypy infers -> object for __getattr__ if __setattr__ has `value: object` + # TODO: mypy infers -> object for __getattr__ if __setattr__ has `value: object` def __setattr__(self, name: str, value: Any, /) -> None: ... def __eq__(self, other: object) -> bool: ... diff --git a/stdlib/subprocess.pyi b/stdlib/subprocess.pyi index fef35b56945a..21ac3a6a2798 100644 --- a/stdlib/subprocess.pyi +++ b/stdlib/subprocess.pyi @@ -2556,7 +2556,7 @@ class Popen(Generic[AnyStr]): def poll(self) -> int | None: ... def wait(self, timeout: float | None = None) -> int: ... # morally the members of the returned tuple should be optional - # TODO this should allow ReadableBuffer for Popen[bytes], but adding + # TODO: this should allow ReadableBuffer for Popen[bytes], but adding # overloads for that runs into a mypy bug (python/mypy#14070). def communicate(self, input: AnyStr | None = None, timeout: float | None = None) -> tuple[AnyStr, AnyStr]: ... def send_signal(self, sig: int) -> None: ... diff --git a/stdlib/urllib/request.pyi b/stdlib/urllib/request.pyi index ad4f91fc31ae..1f453fd1e1d6 100644 --- a/stdlib/urllib/request.pyi +++ b/stdlib/urllib/request.pyi @@ -175,7 +175,7 @@ class HTTPCookieProcessor(BaseHandler): class ProxyHandler(BaseHandler): def __init__(self, proxies: dict[str, str] | None = None) -> None: ... def proxy_open(self, req: Request, proxy: str, type: str) -> _UrlopenRet | None: ... # undocumented - # TODO add a method for every (common) proxy protocol + # TODO: add a method for every (common) proxy protocol class HTTPPasswordMgr: def add_password(self, realm: str, uri: str | Sequence[str], user: str, passwd: str) -> None: ... diff --git a/stdlib/xml/etree/ElementTree.pyi b/stdlib/xml/etree/ElementTree.pyi index 4a9113868d7e..198e1c3d6435 100644 --- a/stdlib/xml/etree/ElementTree.pyi +++ b/stdlib/xml/etree/ElementTree.pyi @@ -366,7 +366,7 @@ _E = TypeVar("_E", default=Element) class XMLParser(Generic[_E]): parser: XMLParserType target: _Target - # TODO-what is entity used for??? + # TODO: what is entity used for??? entity: dict[str, str] version: str def __init__(self, *, target: _Target | None = None, encoding: str | None = None) -> None: ... diff --git a/stubs/geopandas/geopandas/tools/geocoding.pyi b/stubs/geopandas/geopandas/tools/geocoding.pyi index cb6474150141..3748d7945de1 100644 --- a/stubs/geopandas/geopandas/tools/geocoding.pyi +++ b/stubs/geopandas/geopandas/tools/geocoding.pyi @@ -10,7 +10,7 @@ class _GeoCoder(Protocol): def geocode(self, query: str, /): ... def reverse(self, coords, /, exactly_one: bool = ...): ... -# TODO Use something like `provider: Callable[P, _GeoCoder], **kwargs: P.kwargs` in the functions +# TODO: Use something like `provider: Callable[P, _GeoCoder], **kwargs: P.kwargs` in the functions # below if this ever becomes a thing def geocode(strings: Iterable[str], provider: str | Callable[..., _GeoCoder] | None = None, **kwargs) -> GeoDataFrame: ... def reverse_geocode( diff --git a/stubs/shapely/shapely/_typing.pyi b/stubs/shapely/shapely/_typing.pyi index 226fa7a1d1d0..224af9553f03 100644 --- a/stubs/shapely/shapely/_typing.pyi +++ b/stubs/shapely/shapely/_typing.pyi @@ -24,7 +24,7 @@ OptGeoT = TypeVar("OptGeoT", bound=Geometry | None) # noqa: Y001 class SupportsArray(Protocol[_DType_co]): def __array__(self) -> np.ndarray[Any, _DType_co]: ... -# TODO revisit when mypy is happy with generic recursive type alias +# TODO: revisit when mypy is happy with generic recursive type alias # NestedSequence: TypeAlias = Sequence[_T] | Sequence[NestedSequence[_T]] NestedSequence: TypeAlias = Sequence[_T] | Sequence[Sequence[_T]] | Sequence[Sequence[Sequence[_T]]] DualArrayLike: TypeAlias = SupportsArray[_DType] | NestedSequence[SupportsArray[_DType]] | NestedSequence[_T] diff --git a/stubs/tensorflow/tensorflow/__init__.pyi b/stubs/tensorflow/tensorflow/__init__.pyi index 6ef9e1cb54f3..873a2b860f0e 100644 --- a/stubs/tensorflow/tensorflow/__init__.pyi +++ b/stubs/tensorflow/tensorflow/__init__.pyi @@ -147,7 +147,7 @@ class VariableAggregation(Enum): class _VariableMetaclass(type): ... # Variable class in intent/documentation is a Tensor. In implementation there's -# TODO comment to make it Tensor. It is not actually Tensor type wise, but even +# TODO: comment to make it Tensor. It is not actually Tensor type wise, but even # dynamically patches on most methods of tf.Tensor # https://github.com/tensorflow/tensorflow/blob/9524a636cae9ae3f0554203c1ba7ee29c85fcf12/tensorflow/python/ops/variables.py#L1086. class Variable(Tensor, metaclass=_VariableMetaclass): diff --git a/stubs/tqdm/tqdm/contrib/logging.pyi b/stubs/tqdm/tqdm/contrib/logging.pyi index 3b5bfde1c402..73e0006846a6 100644 --- a/stubs/tqdm/tqdm/contrib/logging.pyi +++ b/stubs/tqdm/tqdm/contrib/logging.pyi @@ -12,7 +12,7 @@ def logging_redirect_tqdm( loggers: Sequence[logging.Logger] | None = None, tqdm_class: type[std_tqdm[Any]] = ... ) -> _GeneratorContextManager[None]: ... -# TODO type *args, **kwargs here more precisely +# TODO: type *args, **kwargs here more precisely @overload def tqdm_logging_redirect(*args, tqdm_class: Callable[..., _TqdmT], **kwargs) -> _GeneratorContextManager[_TqdmT]: ... @overload From f68b950b2dbd6b3acac56993b90d93004ff36d63 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Thu, 3 Apr 2025 07:52:12 +0200 Subject: [PATCH 189/388] [stubsabot] Bump qrcode to 8.1.* (#13779) --- stubs/qrcode/METADATA.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stubs/qrcode/METADATA.toml b/stubs/qrcode/METADATA.toml index cd791c012433..152711e7a255 100644 --- a/stubs/qrcode/METADATA.toml +++ b/stubs/qrcode/METADATA.toml @@ -1,4 +1,4 @@ -version = "8.0.*" +version = "8.1.*" upstream_repository = "https://github.com/lincolnloop/python-qrcode" # must be a version of Pillow that is py.typed requires = ["Pillow>=10.3.0"] From 8f6ad3cd009cc4e26c9a17bf26e9b011e0ed351a Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Thu, 3 Apr 2025 07:56:09 +0200 Subject: [PATCH 190/388] [stubsabot] Bump zstd to 1.5.6.7 (#13780) Release: https://pypi.org/pypi/zstd/1.5.6.7 Homepage: https://github.com/sergey-dryabzhinsky/python-zstd Repository: https://github.com/sergey-dryabzhinsky/python-zstd Typeshed stubs: https://github.com/python/typeshed/tree/main/stubs/zstd Diff: https://github.com/sergey-dryabzhinsky/python-zstd/compare/v1.5.6.6...v1.5.6.7 Stubsabot analysis of the diff between the two releases: - Total lines of Python code added: 4. - Total lines of Python code deleted: 3. If stubtest fails for this PR: - Leave this PR open (as a reminder, and to prevent stubsabot from opening another PR) - Fix stubtest failures in another PR, then close this PR Note that you will need to close and re-open the PR in order to trigger CI Co-authored-by: stubsabot <> --- stubs/zstd/METADATA.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stubs/zstd/METADATA.toml b/stubs/zstd/METADATA.toml index 1797040d3e9d..76bf2c1ba8b6 100644 --- a/stubs/zstd/METADATA.toml +++ b/stubs/zstd/METADATA.toml @@ -1,2 +1,2 @@ -version = "1.5.6.6" +version = "1.5.6.7" upstream_repository = "https://github.com/sergey-dryabzhinsky/python-zstd" From 2018175b9905275b378f2562a4b9692e2a4e7276 Mon Sep 17 00:00:00 2001 From: Avasam Date: Thu, 3 Apr 2025 04:22:53 -0400 Subject: [PATCH 191/388] Exact return types instead of shutil._PathReturn (#13767) --- stdlib/shutil.pyi | 26 ++++++++++---------------- 1 file changed, 10 insertions(+), 16 deletions(-) diff --git a/stdlib/shutil.pyi b/stdlib/shutil.pyi index 0fe560fd9b6a..ea2c29d4625f 100644 --- a/stdlib/shutil.pyi +++ b/stdlib/shutil.pyi @@ -1,6 +1,6 @@ import os import sys -from _typeshed import BytesPath, ExcInfo, FileDescriptorOrPath, StrOrBytesPath, StrPath, SupportsRead, SupportsWrite +from _typeshed import BytesPath, ExcInfo, FileDescriptorOrPath, MaybeNone, StrOrBytesPath, StrPath, SupportsRead, SupportsWrite from collections.abc import Callable, Iterable, Sequence from tarfile import _TarfileFilter from typing import Any, AnyStr, NamedTuple, NoReturn, Protocol, TypeVar, overload @@ -36,9 +36,8 @@ __all__ = [ ] _StrOrBytesPathT = TypeVar("_StrOrBytesPathT", bound=StrOrBytesPath) -# Return value of some functions that may either return a path-like object that was passed in or -# a string -_PathReturn: TypeAlias = Any +_StrPathT = TypeVar("_StrPathT", bound=StrPath) +_BytesPathT = TypeVar("_BytesPathT", bound=BytesPath) class Error(OSError): ... class SameFileError(Error): ... @@ -52,23 +51,23 @@ def copyfile(src: StrOrBytesPath, dst: _StrOrBytesPathT, *, follow_symlinks: boo def copymode(src: StrOrBytesPath, dst: StrOrBytesPath, *, follow_symlinks: bool = True) -> None: ... def copystat(src: StrOrBytesPath, dst: StrOrBytesPath, *, follow_symlinks: bool = True) -> None: ... @overload -def copy(src: StrPath, dst: StrPath, *, follow_symlinks: bool = True) -> _PathReturn: ... +def copy(src: StrPath, dst: _StrPathT, *, follow_symlinks: bool = True) -> _StrPathT | str: ... @overload -def copy(src: BytesPath, dst: BytesPath, *, follow_symlinks: bool = True) -> _PathReturn: ... +def copy(src: BytesPath, dst: _BytesPathT, *, follow_symlinks: bool = True) -> _BytesPathT | bytes: ... @overload -def copy2(src: StrPath, dst: StrPath, *, follow_symlinks: bool = True) -> _PathReturn: ... +def copy2(src: StrPath, dst: _StrPathT, *, follow_symlinks: bool = True) -> _StrPathT | str: ... @overload -def copy2(src: BytesPath, dst: BytesPath, *, follow_symlinks: bool = True) -> _PathReturn: ... +def copy2(src: BytesPath, dst: _BytesPathT, *, follow_symlinks: bool = True) -> _BytesPathT | bytes: ... def ignore_patterns(*patterns: StrPath) -> Callable[[Any, list[str]], set[str]]: ... def copytree( src: StrPath, - dst: StrPath, + dst: _StrPathT, symlinks: bool = False, ignore: None | Callable[[str, list[str]], Iterable[str]] | Callable[[StrPath, list[str]], Iterable[str]] = None, copy_function: Callable[[str, str], object] = ..., ignore_dangling_symlinks: bool = False, dirs_exist_ok: bool = False, -) -> _PathReturn: ... +) -> _StrPathT: ... _OnErrorCallback: TypeAlias = Callable[[Callable[..., Any], str, ExcInfo], object] _OnExcCallback: TypeAlias = Callable[[Callable[..., Any], str, BaseException], object] @@ -129,12 +128,7 @@ _CopyFn: TypeAlias = Callable[[str, str], object] | Callable[[StrPath, StrPath], # N.B. shutil.move appears to take bytes arguments, however, # this does not work when dst is (or is within) an existing directory. # (#6832) -if sys.version_info >= (3, 9): - def move(src: StrPath, dst: StrPath, copy_function: _CopyFn = ...) -> _PathReturn: ... - -else: - # See https://bugs.python.org/issue32689 - def move(src: str, dst: StrPath, copy_function: _CopyFn = ...) -> _PathReturn: ... +def move(src: StrPath, dst: _StrPathT, copy_function: _CopyFn = ...) -> _StrPathT | str | MaybeNone: ... class _ntuple_diskusage(NamedTuple): total: int From 1b0dd6d6fd043c8527b8aa4f11754b658df9d033 Mon Sep 17 00:00:00 2001 From: Sebastian Rittau Date: Thu, 3 Apr 2025 10:35:36 +0200 Subject: [PATCH 192/388] Drop Python 3.8 branches (#13776) --- stdlib/@tests/test_cases/check_importlib.py | 10 +- stdlib/@tests/test_cases/check_platform.py | 6 +- stdlib/@tests/test_cases/check_xml.py | 18 +- .../collections/check_defaultdict-py39.py | 72 --- .../collections/check_defaultdict.py | 70 +++ stdlib/_ctypes.pyi | 7 +- stdlib/_curses.pyi | 14 +- stdlib/_hashlib.pyi | 79 ++- stdlib/_queue.pyi | 8 +- stdlib/_socket.pyi | 65 +-- stdlib/_tracemalloc.pyi | 6 +- stdlib/_weakrefset.pyi | 8 +- stdlib/aifc.pyi | 14 +- stdlib/argparse.pyi | 83 +-- stdlib/array.pyi | 3 - stdlib/asyncio/__init__.pyi | 181 +----- stdlib/asyncio/base_events.pyi | 7 +- stdlib/asyncio/events.pyi | 53 +- stdlib/asyncio/locks.pyi | 30 +- stdlib/asyncio/queues.pyi | 7 +- stdlib/asyncio/tasks.pyi | 4 +- stdlib/asyncio/unix_events.pyi | 37 +- stdlib/base64.pyi | 4 - stdlib/bz2.pyi | 38 +- stdlib/collections/__init__.pyi | 101 ++-- stdlib/compileall.pyi | 26 +- stdlib/concurrent/futures/_base.pyi | 34 +- stdlib/concurrent/futures/process.pyi | 50 +- stdlib/concurrent/futures/thread.pyi | 8 +- stdlib/contextlib.pyi | 11 +- stdlib/ctypes/__init__.pyi | 11 +- stdlib/dataclasses.pyi | 28 +- stdlib/datetime.pyi | 30 +- stdlib/difflib.pyi | 14 +- stdlib/distutils/command/bdist_msi.pyi | 3 +- stdlib/encodings/raw_unicode_escape.pyi | 21 +- stdlib/encodings/unicode_escape.pyi | 21 +- stdlib/enum.pyi | 13 +- stdlib/fcntl.pyi | 10 +- stdlib/filecmp.pyi | 7 +- stdlib/fileinput.pyi | 8 +- stdlib/fractions.pyi | 15 +- stdlib/ftplib.pyi | 49 +- stdlib/functools.pyi | 32 +- stdlib/gc.pyi | 6 +- stdlib/hashlib.pyi | 34 +- stdlib/hmac.pyi | 13 +- stdlib/http/__init__.pyi | 9 +- stdlib/http/client.pyi | 9 +- stdlib/http/cookies.pyi | 8 +- stdlib/imaplib.pyi | 35 +- stdlib/importlib/abc.pyi | 81 ++- stdlib/importlib/metadata/__init__.pyi | 9 +- stdlib/importlib/resources/__init__.pyi | 25 +- stdlib/inspect.pyi | 3 +- stdlib/ipaddress.pyi | 10 +- stdlib/itertools.pyi | 7 +- stdlib/keyword.pyi | 15 +- stdlib/linecache.pyi | 6 +- stdlib/logging/__init__.pyi | 62 +- stdlib/logging/handlers.pyi | 104 ++-- stdlib/mailbox.pyi | 11 +- stdlib/math.pyi | 19 +- stdlib/multiprocessing/managers.pyi | 8 +- stdlib/multiprocessing/pool.pyi | 9 +- stdlib/multiprocessing/queues.pyi | 11 +- stdlib/multiprocessing/shared_memory.pyi | 7 +- stdlib/nntplib.pyi | 5 - stdlib/nt.pyi | 4 +- stdlib/opcode.pyi | 18 +- stdlib/pathlib.pyi | 16 +- stdlib/pkgutil.pyi | 4 +- stdlib/platform.pyi | 53 +- stdlib/plistlib.pyi | 34 +- stdlib/posix.pyi | 11 +- stdlib/pstats.pyi | 39 +- stdlib/queue.pyi | 7 +- stdlib/random.pyi | 25 +- stdlib/re.pyi | 10 +- stdlib/signal.pyi | 5 +- stdlib/smtplib.pyi | 25 +- stdlib/socket.pyi | 121 ++-- stdlib/ssl.pyi | 2 - stdlib/statistics.pyi | 4 +- stdlib/string.pyi | 11 +- stdlib/subprocess.pyi | 551 +----------------- stdlib/sunau.pyi | 4 - stdlib/symtable.pyi | 8 +- stdlib/tarfile.pyi | 6 +- stdlib/tempfile.pyi | 11 +- stdlib/threading.pyi | 7 +- stdlib/time.pyi | 2 +- stdlib/tkinter/__init__.pyi | 267 +++++---- stdlib/tkinter/colorchooser.pyi | 16 +- stdlib/tkinter/commondialog.pyi | 4 +- stdlib/tkinter/dialog.pyi | 4 +- stdlib/tkinter/dnd.pyi | 4 +- stdlib/tkinter/filedialog.pyi | 32 +- stdlib/tkinter/font.pyi | 3 +- stdlib/tkinter/messagebox.pyi | 13 +- stdlib/trace.pyi | 6 +- stdlib/tracemalloc.pyi | 15 +- stdlib/unittest/async_case.pyi | 4 +- stdlib/unittest/case.pyi | 47 +- stdlib/unittest/mock.pyi | 3 - stdlib/urllib/parse.pyi | 15 +- stdlib/urllib/response.pyi | 7 +- stdlib/uuid.pyi | 8 +- stdlib/venv/__init__.pyi | 30 +- stdlib/wave.pyi | 9 +- stdlib/weakref.pyi | 38 +- stdlib/xml/dom/minidom.pyi | 128 ++-- stdlib/xml/etree/ElementInclude.pyi | 13 +- stdlib/xml/etree/ElementTree.pyi | 15 +- stdlib/xml/sax/expatreader.pyi | 6 +- stdlib/zipfile/__init__.pyi | 34 +- stdlib/zoneinfo/__init__.pyi | 51 +- 117 files changed, 978 insertions(+), 2594 deletions(-) delete mode 100644 stdlib/@tests/test_cases/collections/check_defaultdict-py39.py create mode 100644 stdlib/@tests/test_cases/collections/check_defaultdict.py diff --git a/stdlib/@tests/test_cases/check_importlib.py b/stdlib/@tests/test_cases/check_importlib.py index 17eefdafc971..72aec6bd30ed 100644 --- a/stdlib/@tests/test_cases/check_importlib.py +++ b/stdlib/@tests/test_cases/check_importlib.py @@ -10,14 +10,14 @@ from types import ModuleType from typing_extensions import Self + # Assert that some Path classes are Traversable. -if sys.version_info >= (3, 9): +def traverse(t: importlib.abc.Traversable) -> None: + pass - def traverse(t: importlib.abc.Traversable) -> None: - pass - traverse(pathlib.Path()) - traverse(zipfile.Path("")) +traverse(pathlib.Path()) +traverse(zipfile.Path("")) class MetaFinder: diff --git a/stdlib/@tests/test_cases/check_platform.py b/stdlib/@tests/test_cases/check_platform.py index efcbd992c90b..9d2c83ce2815 100644 --- a/stdlib/@tests/test_cases/check_platform.py +++ b/stdlib/@tests/test_cases/check_platform.py @@ -1,16 +1,12 @@ from __future__ import annotations import platform -import sys from typing_extensions import assert_type # platform.uname_result emulates a 6 field named tuple, but on 3.9+ the processor # field is lazily evaluated, which results in it being a little funky. uname = platform.uname() -if sys.version_info >= (3, 9): - myuname = platform.uname_result("Darwin", "local", "22.5.0", "Darwin Kernel Version 22.5.0", "arm64") -else: - myuname = platform.uname_result("Darwin", "local", "22.5.0", "Darwin Kernel Version 22.5.0", "arm64", "arm") +myuname = platform.uname_result("Darwin", "local", "22.5.0", "Darwin Kernel Version 22.5.0", "arm64") assert_type(uname, platform.uname_result) assert_type(myuname, platform.uname_result) diff --git a/stdlib/@tests/test_cases/check_xml.py b/stdlib/@tests/test_cases/check_xml.py index b485dac8dc29..31a4fa243eaa 100644 --- a/stdlib/@tests/test_cases/check_xml.py +++ b/stdlib/@tests/test_cases/check_xml.py @@ -1,6 +1,5 @@ from __future__ import annotations -import sys from typing_extensions import assert_type from xml.dom.minidom import Document @@ -10,10 +9,9 @@ assert_type(document.toxml(encoding=None), str) assert_type(document.toxml(encoding="UTF8"), bytes) assert_type(document.toxml("UTF8"), bytes) -if sys.version_info >= (3, 9): - assert_type(document.toxml(standalone=True), str) - assert_type(document.toxml("UTF8", True), bytes) - assert_type(document.toxml(encoding="UTF8", standalone=True), bytes) +assert_type(document.toxml(standalone=True), str) +assert_type(document.toxml("UTF8", True), bytes) +assert_type(document.toxml(encoding="UTF8", standalone=True), bytes) # Because toprettyxml can mix positional and keyword variants of the "encoding" argument, which @@ -23,13 +21,11 @@ assert_type(document.toprettyxml(), str) assert_type(document.toprettyxml(encoding=None), str) assert_type(document.toprettyxml(encoding="UTF8"), bytes) -if sys.version_info >= (3, 9): - assert_type(document.toprettyxml(standalone=True), str) - assert_type(document.toprettyxml(encoding="UTF8", standalone=True), bytes) +assert_type(document.toprettyxml(standalone=True), str) +assert_type(document.toprettyxml(encoding="UTF8", standalone=True), bytes) # Test cases unique to toprettyxml assert_type(document.toprettyxml(" "), str) assert_type(document.toprettyxml(" ", "\r\n"), str) assert_type(document.toprettyxml(" ", "\r\n", "UTF8"), bytes) -if sys.version_info >= (3, 9): - assert_type(document.toprettyxml(" ", "\r\n", "UTF8", True), bytes) - assert_type(document.toprettyxml(" ", "\r\n", standalone=True), str) +assert_type(document.toprettyxml(" ", "\r\n", "UTF8", True), bytes) +assert_type(document.toprettyxml(" ", "\r\n", standalone=True), str) diff --git a/stdlib/@tests/test_cases/collections/check_defaultdict-py39.py b/stdlib/@tests/test_cases/collections/check_defaultdict-py39.py deleted file mode 100644 index ff2d1d1f58d9..000000000000 --- a/stdlib/@tests/test_cases/collections/check_defaultdict-py39.py +++ /dev/null @@ -1,72 +0,0 @@ -""" -Tests for `defaultdict.__or__` and `defaultdict.__ror__`. -These methods were only added in py39. -""" - -from __future__ import annotations - -import os -import sys -from collections import defaultdict -from typing import Mapping, TypeVar, Union -from typing_extensions import Self, assert_type - -_KT = TypeVar("_KT") -_VT = TypeVar("_VT") - - -if sys.version_info >= (3, 9): - - class CustomDefaultDictSubclass(defaultdict[_KT, _VT]): - pass - - class CustomMappingWithDunderOr(Mapping[_KT, _VT]): - def __or__(self, other: Mapping[_KT, _VT]) -> dict[_KT, _VT]: - return {} - - def __ror__(self, other: Mapping[_KT, _VT]) -> dict[_KT, _VT]: - return {} - - def __ior__(self, other: Mapping[_KT, _VT]) -> Self: - return self - - def test_defaultdict_dot_or( - a: defaultdict[int, int], - b: CustomDefaultDictSubclass[int, int], - c: defaultdict[str, str], - d: Mapping[int, int], - e: CustomMappingWithDunderOr[str, str], - ) -> None: - assert_type(a | b, defaultdict[int, int]) - - # In contrast to `dict.__or__`, `defaultdict.__or__` returns `Self` if called on a subclass of `defaultdict`: - assert_type(b | a, CustomDefaultDictSubclass[int, int]) - - assert_type(a | c, defaultdict[Union[int, str], Union[int, str]]) - - # arbitrary mappings are not accepted by `defaultdict.__or__`; - # it has to be a subclass of `dict` - a | d # type: ignore - - # but Mappings such as `os._Environ` or `CustomMappingWithDunderOr`, - # which define `__ror__` methods that accept `dict`, are fine - # (`os._Environ.__(r)or__` always returns `dict`, even if a `defaultdict` is passed): - assert_type(a | os.environ, dict[Union[str, int], Union[str, int]]) - assert_type(os.environ | a, dict[Union[str, int], Union[str, int]]) - - assert_type(c | os.environ, dict[str, str]) - assert_type(c | e, dict[str, str]) - - assert_type(os.environ | c, dict[str, str]) - assert_type(e | c, dict[str, str]) - - # store "untainted" `CustomMappingWithDunderOr[str, str]` to test `__ior__` against ` defaultdict[str, str]` later - # Invalid `e |= a` causes pyright to join `Unknown` to `e`'s type - f = e - - e |= c - e |= a # type: ignore - - c |= f - - c |= a # type: ignore diff --git a/stdlib/@tests/test_cases/collections/check_defaultdict.py b/stdlib/@tests/test_cases/collections/check_defaultdict.py new file mode 100644 index 000000000000..f608f3f3062e --- /dev/null +++ b/stdlib/@tests/test_cases/collections/check_defaultdict.py @@ -0,0 +1,70 @@ +""" +Tests for `defaultdict.__or__` and `defaultdict.__ror__`. +""" + +from __future__ import annotations + +import os +from collections import defaultdict +from typing import Mapping, TypeVar, Union +from typing_extensions import Self, assert_type + +_KT = TypeVar("_KT") +_VT = TypeVar("_VT") + + +class CustomDefaultDictSubclass(defaultdict[_KT, _VT]): + pass + + +class CustomMappingWithDunderOr(Mapping[_KT, _VT]): + def __or__(self, other: Mapping[_KT, _VT]) -> dict[_KT, _VT]: + return {} + + def __ror__(self, other: Mapping[_KT, _VT]) -> dict[_KT, _VT]: + return {} + + def __ior__(self, other: Mapping[_KT, _VT]) -> Self: + return self + + +def test_defaultdict_dot_or( + a: defaultdict[int, int], + b: CustomDefaultDictSubclass[int, int], + c: defaultdict[str, str], + d: Mapping[int, int], + e: CustomMappingWithDunderOr[str, str], +) -> None: + assert_type(a | b, defaultdict[int, int]) + + # In contrast to `dict.__or__`, `defaultdict.__or__` returns `Self` if called on a subclass of `defaultdict`: + assert_type(b | a, CustomDefaultDictSubclass[int, int]) + + assert_type(a | c, defaultdict[Union[int, str], Union[int, str]]) + + # arbitrary mappings are not accepted by `defaultdict.__or__`; + # it has to be a subclass of `dict` + a | d # type: ignore + + # but Mappings such as `os._Environ` or `CustomMappingWithDunderOr`, + # which define `__ror__` methods that accept `dict`, are fine + # (`os._Environ.__(r)or__` always returns `dict`, even if a `defaultdict` is passed): + assert_type(a | os.environ, dict[Union[str, int], Union[str, int]]) + assert_type(os.environ | a, dict[Union[str, int], Union[str, int]]) + + assert_type(c | os.environ, dict[str, str]) + assert_type(c | e, dict[str, str]) + + assert_type(os.environ | c, dict[str, str]) + assert_type(e | c, dict[str, str]) + + # store "untainted" `CustomMappingWithDunderOr[str, str]` to test `__ior__` against ` defaultdict[str, str]` later + # Invalid `e |= a` causes pyright to join `Unknown` to `e`'s type + f = e + + e |= c + e |= a # type: ignore + + c |= f + + c |= a # type: ignore diff --git a/stdlib/_ctypes.pyi b/stdlib/_ctypes.pyi index 690f900eb75a..4cbb030bb136 100644 --- a/stdlib/_ctypes.pyi +++ b/stdlib/_ctypes.pyi @@ -4,12 +4,10 @@ from _typeshed import ReadableBuffer, StrOrBytesPath, WriteableBuffer from abc import abstractmethod from collections.abc import Callable, Iterable, Iterator, Mapping, Sequence from ctypes import CDLL, ArgumentError as ArgumentError, c_void_p +from types import GenericAlias from typing import Any, ClassVar, Generic, TypeVar, final, overload, type_check_only from typing_extensions import Self, TypeAlias -if sys.version_info >= (3, 9): - from types import GenericAlias - _T = TypeVar("_T") _CT = TypeVar("_CT", bound=_CData) @@ -317,8 +315,7 @@ class Array(_CData, Generic[_CT], metaclass=_PyCArrayType): # Can't inherit from Sized because the metaclass conflict between # Sized and _CData prevents using _CDataMeta. def __len__(self) -> int: ... - if sys.version_info >= (3, 9): - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... def addressof(obj: _CData | _CDataType, /) -> int: ... def alignment(obj_or_type: _CData | _CDataType | type[_CData | _CDataType], /) -> int: ... diff --git a/stdlib/_curses.pyi b/stdlib/_curses.pyi index 52c5185727e7..23dead01e6ca 100644 --- a/stdlib/_curses.pyi +++ b/stdlib/_curses.pyi @@ -292,11 +292,8 @@ def erasechar() -> bytes: ... def filter() -> None: ... def flash() -> None: ... def flushinp() -> None: ... - -if sys.version_info >= (3, 9): - def get_escdelay() -> int: ... - def get_tabsize() -> int: ... - +def get_escdelay() -> int: ... +def get_tabsize() -> int: ... def getmouse() -> tuple[int, int, int, int, int]: ... def getsyx() -> tuple[int, int]: ... def getwin(file: SupportsRead[bytes], /) -> window: ... @@ -341,11 +338,8 @@ def resetty() -> None: ... def resize_term(nlines: int, ncols: int, /) -> None: ... def resizeterm(nlines: int, ncols: int, /) -> None: ... def savetty() -> None: ... - -if sys.version_info >= (3, 9): - def set_escdelay(ms: int, /) -> None: ... - def set_tabsize(size: int, /) -> None: ... - +def set_escdelay(ms: int, /) -> None: ... +def set_tabsize(size: int, /) -> None: ... def setsyx(y: int, x: int, /) -> None: ... def setupterm(term: str | None = None, fd: int = -1) -> None: ... def start_color() -> None: ... diff --git a/stdlib/_hashlib.pyi b/stdlib/_hashlib.pyi index e91f2cdb331c..746b1657e2db 100644 --- a/stdlib/_hashlib.pyi +++ b/stdlib/_hashlib.pyi @@ -37,53 +37,42 @@ class HASH: if sys.version_info >= (3, 10): class UnsupportedDigestmodError(ValueError): ... -if sys.version_info >= (3, 9): - class HASHXOF(HASH): - def digest(self, length: int) -> bytes: ... # type: ignore[override] - def hexdigest(self, length: int) -> str: ... # type: ignore[override] +class HASHXOF(HASH): + def digest(self, length: int) -> bytes: ... # type: ignore[override] + def hexdigest(self, length: int) -> str: ... # type: ignore[override] - @final - class HMAC: - @property - def digest_size(self) -> int: ... - @property - def block_size(self) -> int: ... - @property - def name(self) -> str: ... - def copy(self) -> Self: ... - def digest(self) -> bytes: ... - def hexdigest(self) -> str: ... - def update(self, msg: ReadableBuffer) -> None: ... - - @overload - def compare_digest(a: ReadableBuffer, b: ReadableBuffer, /) -> bool: ... - @overload - def compare_digest(a: AnyStr, b: AnyStr, /) -> bool: ... - def get_fips_mode() -> int: ... - def hmac_new(key: bytes | bytearray, msg: ReadableBuffer = b"", digestmod: _DigestMod = None) -> HMAC: ... - def new(name: str, string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: ... - def openssl_md5(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: ... - def openssl_sha1(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: ... - def openssl_sha224(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: ... - def openssl_sha256(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: ... - def openssl_sha384(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: ... - def openssl_sha512(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: ... - def openssl_sha3_224(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: ... - def openssl_sha3_256(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: ... - def openssl_sha3_384(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: ... - def openssl_sha3_512(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: ... - def openssl_shake_128(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASHXOF: ... - def openssl_shake_256(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASHXOF: ... - -else: - def new(name: str, string: ReadableBuffer = b"") -> HASH: ... - def openssl_md5(string: ReadableBuffer = b"") -> HASH: ... - def openssl_sha1(string: ReadableBuffer = b"") -> HASH: ... - def openssl_sha224(string: ReadableBuffer = b"") -> HASH: ... - def openssl_sha256(string: ReadableBuffer = b"") -> HASH: ... - def openssl_sha384(string: ReadableBuffer = b"") -> HASH: ... - def openssl_sha512(string: ReadableBuffer = b"") -> HASH: ... +@final +class HMAC: + @property + def digest_size(self) -> int: ... + @property + def block_size(self) -> int: ... + @property + def name(self) -> str: ... + def copy(self) -> Self: ... + def digest(self) -> bytes: ... + def hexdigest(self) -> str: ... + def update(self, msg: ReadableBuffer) -> None: ... +@overload +def compare_digest(a: ReadableBuffer, b: ReadableBuffer, /) -> bool: ... +@overload +def compare_digest(a: AnyStr, b: AnyStr, /) -> bool: ... +def get_fips_mode() -> int: ... +def hmac_new(key: bytes | bytearray, msg: ReadableBuffer = b"", digestmod: _DigestMod = None) -> HMAC: ... +def new(name: str, string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: ... +def openssl_md5(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: ... +def openssl_sha1(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: ... +def openssl_sha224(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: ... +def openssl_sha256(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: ... +def openssl_sha384(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: ... +def openssl_sha512(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: ... +def openssl_sha3_224(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: ... +def openssl_sha3_256(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: ... +def openssl_sha3_384(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: ... +def openssl_sha3_512(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: ... +def openssl_shake_128(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASHXOF: ... +def openssl_shake_256(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASHXOF: ... def hmac_digest(key: bytes | bytearray, msg: ReadableBuffer, digest: str) -> bytes: ... def pbkdf2_hmac( hash_name: str, password: ReadableBuffer, salt: ReadableBuffer, iterations: int, dklen: int | None = None diff --git a/stdlib/_queue.pyi b/stdlib/_queue.pyi index 0d4caea7442e..f98397b132ab 100644 --- a/stdlib/_queue.pyi +++ b/stdlib/_queue.pyi @@ -1,9 +1,6 @@ -import sys +from types import GenericAlias from typing import Any, Generic, TypeVar -if sys.version_info >= (3, 9): - from types import GenericAlias - _T = TypeVar("_T") class Empty(Exception): ... @@ -16,5 +13,4 @@ class SimpleQueue(Generic[_T]): def put(self, item: _T, block: bool = True, timeout: float | None = None) -> None: ... def put_nowait(self, item: _T) -> None: ... def qsize(self) -> int: ... - if sys.version_info >= (3, 9): - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... diff --git a/stdlib/_socket.pyi b/stdlib/_socket.pyi index 649728257c1a..1a25fc6b13a8 100644 --- a/stdlib/_socket.pyi +++ b/stdlib/_socket.pyi @@ -192,7 +192,7 @@ if sys.platform != "win32" and sys.platform != "darwin" and sys.platform != "lin IPPROTO_BIP: int # Not FreeBSD either IPPROTO_MOBILE: int # Not FreeBSD either IPPROTO_VRRP: int # Not FreeBSD either -if sys.version_info >= (3, 9) and sys.platform == "linux": +if sys.platform == "linux": # Availability: Linux >= 2.6.20, FreeBSD >= 10.1 IPPROTO_UDPLITE: int if sys.version_info >= (3, 10) and sys.platform == "linux": @@ -250,29 +250,26 @@ IPV6_RECVTCLASS: int IPV6_TCLASS: int IPV6_UNICAST_HOPS: int IPV6_V6ONLY: int -if sys.version_info >= (3, 9) or sys.platform != "darwin": - IPV6_DONTFRAG: int - IPV6_HOPLIMIT: int - IPV6_HOPOPTS: int - IPV6_PKTINFO: int - IPV6_RECVRTHDR: int - IPV6_RTHDR: int +IPV6_DONTFRAG: int +IPV6_HOPLIMIT: int +IPV6_HOPOPTS: int +IPV6_PKTINFO: int +IPV6_RECVRTHDR: int +IPV6_RTHDR: int if sys.platform != "win32": IPV6_RTHDR_TYPE_0: int - if sys.version_info >= (3, 9) or sys.platform != "darwin": - IPV6_DSTOPTS: int - IPV6_NEXTHOP: int - IPV6_PATHMTU: int - IPV6_RECVDSTOPTS: int - IPV6_RECVHOPLIMIT: int - IPV6_RECVHOPOPTS: int - IPV6_RECVPATHMTU: int - IPV6_RECVPKTINFO: int - IPV6_RTHDRDSTOPTS: int + IPV6_DSTOPTS: int + IPV6_NEXTHOP: int + IPV6_PATHMTU: int + IPV6_RECVDSTOPTS: int + IPV6_RECVHOPLIMIT: int + IPV6_RECVHOPOPTS: int + IPV6_RECVPATHMTU: int + IPV6_RECVPKTINFO: int + IPV6_RTHDRDSTOPTS: int if sys.platform != "win32" and sys.platform != "linux": - if sys.version_info >= (3, 9) or sys.platform != "darwin": - IPV6_USE_MIN_MTU: int + IPV6_USE_MIN_MTU: int EAI_AGAIN: int EAI_BADFLAGS: int @@ -414,16 +411,10 @@ if sys.platform == "linux": if sys.platform == "linux": # Availability: Linux >= 3.6 CAN_RAW_FD_FRAMES: int - -if sys.platform == "linux" and sys.version_info >= (3, 9): # Availability: Linux >= 4.1 CAN_RAW_JOIN_FILTERS: int - -if sys.platform == "linux": # Availability: Linux >= 2.6.25 CAN_ISOTP: int - -if sys.platform == "linux" and sys.version_info >= (3, 9): # Availability: Linux >= 5.4 CAN_J1939: int @@ -566,18 +557,16 @@ if sys.platform == "linux": SO_VM_SOCKETS_BUFFER_MIN_SIZE: int VM_SOCKETS_INVALID_VERSION: int # undocumented -if sys.platform != "win32" or sys.version_info >= (3, 9): - # Documented as only available on BSD, macOS, but empirically sometimes - # available on Windows - if sys.platform != "linux": - AF_LINK: int +# Documented as only available on BSD, macOS, but empirically sometimes +# available on Windows +if sys.platform != "linux": + AF_LINK: int has_ipv6: bool if sys.platform != "darwin" and sys.platform != "linux": - if sys.platform != "win32" or sys.version_info >= (3, 9): - BDADDR_ANY: str - BDADDR_LOCAL: str + BDADDR_ANY: str + BDADDR_LOCAL: str if sys.platform != "win32" and sys.platform != "darwin" and sys.platform != "linux": HCI_FILTER: int # not in NetBSD or DragonFlyBSD @@ -649,8 +638,7 @@ if sys.platform == "darwin": SYSPROTO_CONTROL: int if sys.platform != "darwin" and sys.platform != "linux": - if sys.version_info >= (3, 9) or sys.platform != "win32": - AF_BLUETOOTH: int + AF_BLUETOOTH: int if sys.platform != "win32" and sys.platform != "darwin" and sys.platform != "linux": # Linux and some BSD support is explicit in the docs @@ -659,10 +647,9 @@ if sys.platform != "win32" and sys.platform != "darwin" and sys.platform != "lin BTPROTO_L2CAP: int BTPROTO_SCO: int # not in FreeBSD if sys.platform != "darwin" and sys.platform != "linux": - if sys.version_info >= (3, 9) or sys.platform != "win32": - BTPROTO_RFCOMM: int + BTPROTO_RFCOMM: int -if sys.version_info >= (3, 9) and sys.platform == "linux": +if sys.platform == "linux": UDPLITE_RECV_CSCOV: int UDPLITE_SEND_CSCOV: int diff --git a/stdlib/_tracemalloc.pyi b/stdlib/_tracemalloc.pyi index b1aeb710233e..e9720f46692c 100644 --- a/stdlib/_tracemalloc.pyi +++ b/stdlib/_tracemalloc.pyi @@ -1,4 +1,3 @@ -import sys from collections.abc import Sequence from tracemalloc import _FrameTuple, _TraceTuple @@ -9,9 +8,6 @@ def get_traceback_limit() -> int: ... def get_traced_memory() -> tuple[int, int]: ... def get_tracemalloc_memory() -> int: ... def is_tracing() -> bool: ... - -if sys.version_info >= (3, 9): - def reset_peak() -> None: ... - +def reset_peak() -> None: ... def start(nframe: int = 1, /) -> None: ... def stop() -> None: ... diff --git a/stdlib/_weakrefset.pyi b/stdlib/_weakrefset.pyi index b55318528208..dad1ed7a4fb5 100644 --- a/stdlib/_weakrefset.pyi +++ b/stdlib/_weakrefset.pyi @@ -1,11 +1,8 @@ -import sys from collections.abc import Iterable, Iterator, MutableSet +from types import GenericAlias from typing import Any, ClassVar, TypeVar, overload from typing_extensions import Self -if sys.version_info >= (3, 9): - from types import GenericAlias - __all__ = ["WeakSet"] _S = TypeVar("_S") @@ -48,5 +45,4 @@ class WeakSet(MutableSet[_T]): def union(self, other: Iterable[_S]) -> WeakSet[_S | _T]: ... def __or__(self, other: Iterable[_S]) -> WeakSet[_S | _T]: ... def isdisjoint(self, other: Iterable[_T]) -> bool: ... - if sys.version_info >= (3, 9): - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... diff --git a/stdlib/aifc.pyi b/stdlib/aifc.pyi index 05bf53986b29..bfe12c6af2b0 100644 --- a/stdlib/aifc.pyi +++ b/stdlib/aifc.pyi @@ -1,12 +1,8 @@ -import sys from types import TracebackType from typing import IO, Any, Literal, NamedTuple, overload from typing_extensions import Self, TypeAlias -if sys.version_info >= (3, 9): - __all__ = ["Error", "open"] -else: - __all__ = ["Error", "open", "openfp"] +__all__ = ["Error", "open"] class Error(Exception): ... @@ -81,11 +77,3 @@ def open(f: _File, mode: Literal["r", "rb"]) -> Aifc_read: ... def open(f: _File, mode: Literal["w", "wb"]) -> Aifc_write: ... @overload def open(f: _File, mode: str | None = None) -> Any: ... - -if sys.version_info < (3, 9): - @overload - def openfp(f: _File, mode: Literal["r", "rb"]) -> Aifc_read: ... - @overload - def openfp(f: _File, mode: Literal["w", "wb"]) -> Aifc_write: ... - @overload - def openfp(f: _File, mode: str | None = None) -> Any: ... diff --git a/stdlib/argparse.pyi b/stdlib/argparse.pyi index 029bfeefe4b3..32beaff14696 100644 --- a/stdlib/argparse.pyi +++ b/stdlib/argparse.pyi @@ -17,6 +17,7 @@ __all__ = [ "MetavarTypeHelpFormatter", "Namespace", "Action", + "BooleanOptionalAction", "ONE_OR_MORE", "OPTIONAL", "PARSER", @@ -25,9 +26,6 @@ __all__ = [ "ZERO_OR_MORE", ] -if sys.version_info >= (3, 9): - __all__ += ["BooleanOptionalAction"] - _T = TypeVar("_T") _ActionT = TypeVar("_ActionT", bound=Action) _ArgumentParserT = TypeVar("_ArgumentParserT", bound=ArgumentParser) @@ -132,40 +130,22 @@ class ArgumentParser(_AttributeHolder, _ActionsContainer): _subparsers: _ArgumentGroup | None # Note: the constructor arguments are also used in _SubParsersAction.add_parser. - if sys.version_info >= (3, 9): - def __init__( - self, - prog: str | None = None, - usage: str | None = None, - description: str | None = None, - epilog: str | None = None, - parents: Sequence[ArgumentParser] = [], - formatter_class: _FormatterClass = ..., - prefix_chars: str = "-", - fromfile_prefix_chars: str | None = None, - argument_default: Any = None, - conflict_handler: str = "error", - add_help: bool = True, - allow_abbrev: bool = True, - exit_on_error: bool = True, - ) -> None: ... - else: - def __init__( - self, - prog: str | None = None, - usage: str | None = None, - description: str | None = None, - epilog: str | None = None, - parents: Sequence[ArgumentParser] = [], - formatter_class: _FormatterClass = ..., - prefix_chars: str = "-", - fromfile_prefix_chars: str | None = None, - argument_default: Any = None, - conflict_handler: str = "error", - add_help: bool = True, - allow_abbrev: bool = True, - ) -> None: ... - + def __init__( + self, + prog: str | None = None, + usage: str | None = None, + description: str | None = None, + epilog: str | None = None, + parents: Sequence[ArgumentParser] = [], + formatter_class: _FormatterClass = ..., + prefix_chars: str = "-", + fromfile_prefix_chars: str | None = None, + argument_default: Any = None, + conflict_handler: str = "error", + add_help: bool = True, + allow_abbrev: bool = True, + exit_on_error: bool = True, + ) -> None: ... @overload def parse_args(self, args: Sequence[str] | None = None, namespace: None = None) -> Namespace: ... @overload @@ -352,8 +332,7 @@ class Action(_AttributeHolder): def __call__( self, parser: ArgumentParser, namespace: Namespace, values: str | Sequence[Any] | None, option_string: str | None = None ) -> None: ... - if sys.version_info >= (3, 9): - def format_usage(self) -> str: ... + def format_usage(self) -> str: ... if sys.version_info >= (3, 12): class BooleanOptionalAction(Action): @@ -418,7 +397,7 @@ if sys.version_info >= (3, 12): metavar: str | tuple[str, ...] | None = sentinel, ) -> None: ... -elif sys.version_info >= (3, 9): +else: class BooleanOptionalAction(Action): @overload def __init__( @@ -713,29 +692,6 @@ class _SubParsersAction(Action, Generic[_ArgumentParserT]): exit_on_error: bool = ..., **kwargs: Any, # Accepting any additional kwargs for custom parser classes ) -> _ArgumentParserT: ... - elif sys.version_info >= (3, 9): - def add_parser( - self, - name: str, - *, - help: str | None = ..., - aliases: Sequence[str] = ..., - # Kwargs from ArgumentParser constructor - prog: str | None = ..., - usage: str | None = ..., - description: str | None = ..., - epilog: str | None = ..., - parents: Sequence[_ArgumentParserT] = ..., - formatter_class: _FormatterClass = ..., - prefix_chars: str = ..., - fromfile_prefix_chars: str | None = ..., - argument_default: Any = ..., - conflict_handler: str = ..., - add_help: bool = ..., - allow_abbrev: bool = ..., - exit_on_error: bool = ..., - **kwargs: Any, # Accepting any additional kwargs for custom parser classes - ) -> _ArgumentParserT: ... else: def add_parser( self, @@ -756,6 +712,7 @@ class _SubParsersAction(Action, Generic[_ArgumentParserT]): conflict_handler: str = ..., add_help: bool = ..., allow_abbrev: bool = ..., + exit_on_error: bool = ..., **kwargs: Any, # Accepting any additional kwargs for custom parser classes ) -> _ArgumentParserT: ... diff --git a/stdlib/array.pyi b/stdlib/array.pyi index 19ec8c1e78f9..b26336f3e969 100644 --- a/stdlib/array.pyi +++ b/stdlib/array.pyi @@ -60,9 +60,6 @@ class array(MutableSequence[_T]): def tofile(self, f: SupportsWrite[bytes], /) -> None: ... def tolist(self) -> list[_T]: ... def tounicode(self) -> str: ... - if sys.version_info < (3, 9): - def fromstring(self, buffer: str | ReadableBuffer, /) -> None: ... - def tostring(self) -> bytes: ... __hash__: ClassVar[None] # type: ignore[assignment] def __len__(self) -> int: ... diff --git a/stdlib/asyncio/__init__.pyi b/stdlib/asyncio/__init__.pyi index e47f640a1f9b..c314acbea1ca 100644 --- a/stdlib/asyncio/__init__.pyi +++ b/stdlib/asyncio/__init__.pyi @@ -18,11 +18,9 @@ from .runners import * from .streams import * from .subprocess import * from .tasks import * +from .threads import * from .transports import * -if sys.version_info >= (3, 9): - from .threads import * - if sys.version_info >= (3, 11): from .taskgroups import * from .timeouts import * @@ -412,7 +410,7 @@ if sys.platform == "win32": "WindowsSelectorEventLoopPolicy", # from windows_events "WindowsProactorEventLoopPolicy", # from windows_events ) - elif sys.version_info >= (3, 9): + else: __all__ = ( "BaseEventLoop", # from base_events "Server", # from base_events @@ -499,91 +497,6 @@ if sys.platform == "win32": "WindowsSelectorEventLoopPolicy", # from windows_events "WindowsProactorEventLoopPolicy", # from windows_events ) - else: - __all__ = ( - "BaseEventLoop", # from base_events - "coroutine", # from coroutines - "iscoroutinefunction", # from coroutines - "iscoroutine", # from coroutines - "AbstractEventLoopPolicy", # from events - "AbstractEventLoop", # from events - "AbstractServer", # from events - "Handle", # from events - "TimerHandle", # from events - "get_event_loop_policy", # from events - "set_event_loop_policy", # from events - "get_event_loop", # from events - "set_event_loop", # from events - "new_event_loop", # from events - "get_child_watcher", # from events - "set_child_watcher", # from events - "_set_running_loop", # from events - "get_running_loop", # from events - "_get_running_loop", # from events - "CancelledError", # from exceptions - "InvalidStateError", # from exceptions - "TimeoutError", # from exceptions - "IncompleteReadError", # from exceptions - "LimitOverrunError", # from exceptions - "SendfileNotAvailableError", # from exceptions - "Future", # from futures - "wrap_future", # from futures - "isfuture", # from futures - "Lock", # from locks - "Event", # from locks - "Condition", # from locks - "Semaphore", # from locks - "BoundedSemaphore", # from locks - "BaseProtocol", # from protocols - "Protocol", # from protocols - "DatagramProtocol", # from protocols - "SubprocessProtocol", # from protocols - "BufferedProtocol", # from protocols - "run", # from runners - "Queue", # from queues - "PriorityQueue", # from queues - "LifoQueue", # from queues - "QueueFull", # from queues - "QueueEmpty", # from queues - "StreamReader", # from streams - "StreamWriter", # from streams - "StreamReaderProtocol", # from streams - "open_connection", # from streams - "start_server", # from streams - "create_subprocess_exec", # from subprocess - "create_subprocess_shell", # from subprocess - "Task", # from tasks - "create_task", # from tasks - "FIRST_COMPLETED", # from tasks - "FIRST_EXCEPTION", # from tasks - "ALL_COMPLETED", # from tasks - "wait", # from tasks - "wait_for", # from tasks - "as_completed", # from tasks - "sleep", # from tasks - "gather", # from tasks - "shield", # from tasks - "ensure_future", # from tasks - "run_coroutine_threadsafe", # from tasks - "current_task", # from tasks - "all_tasks", # from tasks - "_register_task", # from tasks - "_unregister_task", # from tasks - "_enter_task", # from tasks - "_leave_task", # from tasks - "BaseTransport", # from transports - "ReadTransport", # from transports - "WriteTransport", # from transports - "Transport", # from transports - "DatagramTransport", # from transports - "SubprocessTransport", # from transports - "SelectorEventLoop", # from windows_events - "ProactorEventLoop", # from windows_events - "IocpProactor", # from windows_events - "DefaultEventLoopPolicy", # from windows_events - "WindowsSelectorEventLoopPolicy", # from windows_events - "WindowsProactorEventLoopPolicy", # from windows_events - ) else: if sys.version_info >= (3, 14): __all__ = ( @@ -974,7 +887,7 @@ else: "ThreadedChildWatcher", # from unix_events "DefaultEventLoopPolicy", # from unix_events ) - elif sys.version_info >= (3, 9): + else: __all__ = ( "BaseEventLoop", # from base_events "Server", # from base_events @@ -1065,94 +978,6 @@ else: "ThreadedChildWatcher", # from unix_events "DefaultEventLoopPolicy", # from unix_events ) - else: - __all__ = ( - "BaseEventLoop", # from base_events - "coroutine", # from coroutines - "iscoroutinefunction", # from coroutines - "iscoroutine", # from coroutines - "AbstractEventLoopPolicy", # from events - "AbstractEventLoop", # from events - "AbstractServer", # from events - "Handle", # from events - "TimerHandle", # from events - "get_event_loop_policy", # from events - "set_event_loop_policy", # from events - "get_event_loop", # from events - "set_event_loop", # from events - "new_event_loop", # from events - "get_child_watcher", # from events - "set_child_watcher", # from events - "_set_running_loop", # from events - "get_running_loop", # from events - "_get_running_loop", # from events - "CancelledError", # from exceptions - "InvalidStateError", # from exceptions - "TimeoutError", # from exceptions - "IncompleteReadError", # from exceptions - "LimitOverrunError", # from exceptions - "SendfileNotAvailableError", # from exceptions - "Future", # from futures - "wrap_future", # from futures - "isfuture", # from futures - "Lock", # from locks - "Event", # from locks - "Condition", # from locks - "Semaphore", # from locks - "BoundedSemaphore", # from locks - "BaseProtocol", # from protocols - "Protocol", # from protocols - "DatagramProtocol", # from protocols - "SubprocessProtocol", # from protocols - "BufferedProtocol", # from protocols - "run", # from runners - "Queue", # from queues - "PriorityQueue", # from queues - "LifoQueue", # from queues - "QueueFull", # from queues - "QueueEmpty", # from queues - "StreamReader", # from streams - "StreamWriter", # from streams - "StreamReaderProtocol", # from streams - "open_connection", # from streams - "start_server", # from streams - "open_unix_connection", # from streams - "start_unix_server", # from streams - "create_subprocess_exec", # from subprocess - "create_subprocess_shell", # from subprocess - "Task", # from tasks - "create_task", # from tasks - "FIRST_COMPLETED", # from tasks - "FIRST_EXCEPTION", # from tasks - "ALL_COMPLETED", # from tasks - "wait", # from tasks - "wait_for", # from tasks - "as_completed", # from tasks - "sleep", # from tasks - "gather", # from tasks - "shield", # from tasks - "ensure_future", # from tasks - "run_coroutine_threadsafe", # from tasks - "current_task", # from tasks - "all_tasks", # from tasks - "_register_task", # from tasks - "_unregister_task", # from tasks - "_enter_task", # from tasks - "_leave_task", # from tasks - "BaseTransport", # from transports - "ReadTransport", # from transports - "WriteTransport", # from transports - "Transport", # from transports - "DatagramTransport", # from transports - "SubprocessTransport", # from transports - "SelectorEventLoop", # from unix_events - "AbstractChildWatcher", # from unix_events - "SafeChildWatcher", # from unix_events - "FastChildWatcher", # from unix_events - "MultiLoopChildWatcher", # from unix_events - "ThreadedChildWatcher", # from unix_events - "DefaultEventLoopPolicy", # from unix_events - ) _T_co = TypeVar("_T_co", covariant=True) diff --git a/stdlib/asyncio/base_events.pyi b/stdlib/asyncio/base_events.pyi index 9527e9d052aa..cad7dde40b01 100644 --- a/stdlib/asyncio/base_events.pyi +++ b/stdlib/asyncio/base_events.pyi @@ -15,10 +15,7 @@ from typing import IO, Any, Literal, TypeVar, overload from typing_extensions import TypeAlias, TypeVarTuple, Unpack # Keep asyncio.__all__ updated with any changes to __all__ here -if sys.version_info >= (3, 9): - __all__ = ("BaseEventLoop", "Server") -else: - __all__ = ("BaseEventLoop",) +__all__ = ("BaseEventLoop", "Server") _T = TypeVar("_T") _Ts = TypeVarTuple("_Ts") @@ -485,7 +482,7 @@ class BaseEventLoop(AbstractEventLoop): def set_debug(self, enabled: bool) -> None: ... if sys.version_info >= (3, 12): async def shutdown_default_executor(self, timeout: float | None = None) -> None: ... - elif sys.version_info >= (3, 9): + else: async def shutdown_default_executor(self) -> None: ... def __del__(self) -> None: ... diff --git a/stdlib/asyncio/events.pyi b/stdlib/asyncio/events.pyi index a9f7d24237a4..afe912d01fe1 100644 --- a/stdlib/asyncio/events.pyi +++ b/stdlib/asyncio/events.pyi @@ -138,27 +138,19 @@ class AbstractEventLoop: @abstractmethod async def shutdown_asyncgens(self) -> None: ... # Methods scheduling callbacks. All these return Handles. - if sys.version_info >= (3, 9): # "context" added in 3.9.10/3.10.2 - @abstractmethod - def call_soon( - self, callback: Callable[[Unpack[_Ts]], object], *args: Unpack[_Ts], context: Context | None = None - ) -> Handle: ... - @abstractmethod - def call_later( - self, delay: float, callback: Callable[[Unpack[_Ts]], object], *args: Unpack[_Ts], context: Context | None = None - ) -> TimerHandle: ... - @abstractmethod - def call_at( - self, when: float, callback: Callable[[Unpack[_Ts]], object], *args: Unpack[_Ts], context: Context | None = None - ) -> TimerHandle: ... - else: - @abstractmethod - def call_soon(self, callback: Callable[[Unpack[_Ts]], object], *args: Unpack[_Ts]) -> Handle: ... - @abstractmethod - def call_later(self, delay: float, callback: Callable[[Unpack[_Ts]], object], *args: Unpack[_Ts]) -> TimerHandle: ... - @abstractmethod - def call_at(self, when: float, callback: Callable[[Unpack[_Ts]], object], *args: Unpack[_Ts]) -> TimerHandle: ... - + # "context" added in 3.9.10/3.10.2 for call_* + @abstractmethod + def call_soon( + self, callback: Callable[[Unpack[_Ts]], object], *args: Unpack[_Ts], context: Context | None = None + ) -> Handle: ... + @abstractmethod + def call_later( + self, delay: float, callback: Callable[[Unpack[_Ts]], object], *args: Unpack[_Ts], context: Context | None = None + ) -> TimerHandle: ... + @abstractmethod + def call_at( + self, when: float, callback: Callable[[Unpack[_Ts]], object], *args: Unpack[_Ts], context: Context | None = None + ) -> TimerHandle: ... @abstractmethod def time(self) -> float: ... # Future methods @@ -179,15 +171,11 @@ class AbstractEventLoop: @abstractmethod def get_task_factory(self) -> _TaskFactory | None: ... # Methods for interacting with threads - if sys.version_info >= (3, 9): # "context" added in 3.9.10/3.10.2 - @abstractmethod - def call_soon_threadsafe( - self, callback: Callable[[Unpack[_Ts]], object], *args: Unpack[_Ts], context: Context | None = None - ) -> Handle: ... - else: - @abstractmethod - def call_soon_threadsafe(self, callback: Callable[[Unpack[_Ts]], object], *args: Unpack[_Ts]) -> Handle: ... - + # "context" added in 3.9.10/3.10.2 + @abstractmethod + def call_soon_threadsafe( + self, callback: Callable[[Unpack[_Ts]], object], *args: Unpack[_Ts], context: Context | None = None + ) -> Handle: ... @abstractmethod def run_in_executor(self, executor: Executor | None, func: Callable[[Unpack[_Ts]], _T], *args: Unpack[_Ts]) -> Future[_T]: ... @abstractmethod @@ -607,9 +595,8 @@ class AbstractEventLoop: def get_debug(self) -> bool: ... @abstractmethod def set_debug(self, enabled: bool) -> None: ... - if sys.version_info >= (3, 9): - @abstractmethod - async def shutdown_default_executor(self) -> None: ... + @abstractmethod + async def shutdown_default_executor(self) -> None: ... class AbstractEventLoopPolicy: @abstractmethod diff --git a/stdlib/asyncio/locks.pyi b/stdlib/asyncio/locks.pyi index 4eef69dee5c3..17390b0c5a0e 100644 --- a/stdlib/asyncio/locks.pyi +++ b/stdlib/asyncio/locks.pyi @@ -2,7 +2,7 @@ import enum import sys from _typeshed import Unused from collections import deque -from collections.abc import Callable, Generator +from collections.abc import Callable from types import TracebackType from typing import Any, Literal, TypeVar from typing_extensions import Self @@ -23,29 +23,11 @@ else: _T = TypeVar("_T") -if sys.version_info >= (3, 9): - class _ContextManagerMixin: - async def __aenter__(self) -> None: ... - async def __aexit__( - self, exc_type: type[BaseException] | None, exc: BaseException | None, tb: TracebackType | None - ) -> None: ... - -else: - class _ContextManager: - def __init__(self, lock: Lock | Semaphore) -> None: ... - def __enter__(self) -> None: ... - def __exit__(self, *args: Unused) -> None: ... - - class _ContextManagerMixin: - # Apparently this exists to *prohibit* use as a context manager. - # def __enter__(self) -> NoReturn: ... see: https://github.com/python/typing/issues/1043 - # def __exit__(self, *args: Any) -> None: ... - def __iter__(self) -> Generator[Any, None, _ContextManager]: ... - def __await__(self) -> Generator[Any, None, _ContextManager]: ... - async def __aenter__(self) -> None: ... - async def __aexit__( - self, exc_type: type[BaseException] | None, exc: BaseException | None, tb: TracebackType | None - ) -> None: ... +class _ContextManagerMixin: + async def __aenter__(self) -> None: ... + async def __aexit__( + self, exc_type: type[BaseException] | None, exc: BaseException | None, tb: TracebackType | None + ) -> None: ... class Lock(_ContextManagerMixin, _LoopBoundMixin): _waiters: deque[Future[Any]] | None diff --git a/stdlib/asyncio/queues.pyi b/stdlib/asyncio/queues.pyi index d287fe779297..63cd98f53da3 100644 --- a/stdlib/asyncio/queues.pyi +++ b/stdlib/asyncio/queues.pyi @@ -1,10 +1,8 @@ import sys from asyncio.events import AbstractEventLoop +from types import GenericAlias from typing import Any, Generic, TypeVar -if sys.version_info >= (3, 9): - from types import GenericAlias - if sys.version_info >= (3, 10): from .mixins import _LoopBoundMixin else: @@ -48,8 +46,7 @@ class Queue(Generic[_T], _LoopBoundMixin): # noqa: Y059 def get_nowait(self) -> _T: ... async def join(self) -> None: ... def task_done(self) -> None: ... - if sys.version_info >= (3, 9): - def __class_getitem__(cls, type: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, type: Any, /) -> GenericAlias: ... if sys.version_info >= (3, 13): def shutdown(self, immediate: bool = False) -> None: ... diff --git a/stdlib/asyncio/tasks.pyi b/stdlib/asyncio/tasks.pyi index f6ee109915e0..e42151213e69 100644 --- a/stdlib/asyncio/tasks.pyi +++ b/stdlib/asyncio/tasks.pyi @@ -407,10 +407,8 @@ else: if sys.version_info >= (3, 12): _TaskCompatibleCoro: TypeAlias = Coroutine[Any, Any, _T_co] -elif sys.version_info >= (3, 9): - _TaskCompatibleCoro: TypeAlias = Generator[_TaskYieldType, None, _T_co] | Coroutine[Any, Any, _T_co] else: - _TaskCompatibleCoro: TypeAlias = Generator[_TaskYieldType, None, _T_co] | Awaitable[_T_co] + _TaskCompatibleCoro: TypeAlias = Generator[_TaskYieldType, None, _T_co] | Coroutine[Any, Any, _T_co] def all_tasks(loop: AbstractEventLoop | None = None) -> set[Task[Any]]: ... diff --git a/stdlib/asyncio/unix_events.pyi b/stdlib/asyncio/unix_events.pyi index abf5d7ffd699..79f99fbe37f0 100644 --- a/stdlib/asyncio/unix_events.pyi +++ b/stdlib/asyncio/unix_events.pyi @@ -30,7 +30,7 @@ if sys.platform != "win32": "DefaultEventLoopPolicy", "EventLoop", ) - elif sys.version_info >= (3, 9): + else: # adds PidfdChildWatcher __all__ = ( "SelectorEventLoop", @@ -42,16 +42,6 @@ if sys.platform != "win32": "ThreadedChildWatcher", "DefaultEventLoopPolicy", ) - else: - __all__ = ( - "SelectorEventLoop", - "AbstractChildWatcher", - "SafeChildWatcher", - "FastChildWatcher", - "MultiLoopChildWatcher", - "ThreadedChildWatcher", - "DefaultEventLoopPolicy", - ) # This is also technically not available on Win, # but other parts of typeshed need this definition. @@ -239,16 +229,15 @@ if sys.platform != "win32": def remove_child_handler(self, pid: int) -> bool: ... def attach_loop(self, loop: AbstractEventLoop | None) -> None: ... - if sys.version_info >= (3, 9): - class PidfdChildWatcher(AbstractChildWatcher): - def __enter__(self) -> Self: ... - def __exit__( - self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: types.TracebackType | None - ) -> None: ... - def is_active(self) -> bool: ... - def close(self) -> None: ... - def attach_loop(self, loop: AbstractEventLoop | None) -> None: ... - def add_child_handler( - self, pid: int, callback: Callable[[int, int, Unpack[_Ts]], object], *args: Unpack[_Ts] - ) -> None: ... - def remove_child_handler(self, pid: int) -> bool: ... + class PidfdChildWatcher(AbstractChildWatcher): + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: types.TracebackType | None + ) -> None: ... + def is_active(self) -> bool: ... + def close(self) -> None: ... + def attach_loop(self, loop: AbstractEventLoop | None) -> None: ... + def add_child_handler( + self, pid: int, callback: Callable[[int, int, Unpack[_Ts]], object], *args: Unpack[_Ts] + ) -> None: ... + def remove_child_handler(self, pid: int) -> bool: ... diff --git a/stdlib/base64.pyi b/stdlib/base64.pyi index 8be4cfe69de0..279d74a94ebe 100644 --- a/stdlib/base64.pyi +++ b/stdlib/base64.pyi @@ -56,10 +56,6 @@ def encode(input: IO[bytes], output: IO[bytes]) -> None: ... def encodebytes(s: ReadableBuffer) -> bytes: ... def decodebytes(s: ReadableBuffer) -> bytes: ... -if sys.version_info < (3, 9): - def encodestring(s: ReadableBuffer) -> bytes: ... - def decodestring(s: ReadableBuffer) -> bytes: ... - if sys.version_info >= (3, 13): def z85encode(s: ReadableBuffer) -> bytes: ... def z85decode(s: str | ReadableBuffer) -> bytes: ... diff --git a/stdlib/bz2.pyi b/stdlib/bz2.pyi index 2f869f9697f4..3b21fbcf7117 100644 --- a/stdlib/bz2.pyi +++ b/stdlib/bz2.pyi @@ -1,10 +1,9 @@ import _compression -import sys from _bz2 import BZ2Compressor as BZ2Compressor, BZ2Decompressor as BZ2Decompressor from _compression import BaseStream from _typeshed import ReadableBuffer, StrOrBytesPath, WriteableBuffer from collections.abc import Iterable -from typing import IO, Any, Literal, Protocol, SupportsIndex, TextIO, overload +from typing import IO, Literal, Protocol, SupportsIndex, TextIO, overload from typing_extensions import Self, TypeAlias __all__ = ["BZ2File", "BZ2Compressor", "BZ2Decompressor", "open", "compress", "decompress"] @@ -94,33 +93,14 @@ def open( class BZ2File(BaseStream, IO[bytes]): def __enter__(self) -> Self: ... - if sys.version_info >= (3, 9): - @overload - def __init__(self, filename: _WritableFileobj, mode: _WriteBinaryMode, *, compresslevel: int = 9) -> None: ... - @overload - def __init__(self, filename: _ReadableFileobj, mode: _ReadBinaryMode = "r", *, compresslevel: int = 9) -> None: ... - @overload - def __init__( - self, filename: StrOrBytesPath, mode: _ReadBinaryMode | _WriteBinaryMode = "r", *, compresslevel: int = 9 - ) -> None: ... - else: - @overload - def __init__( - self, filename: _WritableFileobj, mode: _WriteBinaryMode, buffering: Any | None = None, compresslevel: int = 9 - ) -> None: ... - @overload - def __init__( - self, filename: _ReadableFileobj, mode: _ReadBinaryMode = "r", buffering: Any | None = None, compresslevel: int = 9 - ) -> None: ... - @overload - def __init__( - self, - filename: StrOrBytesPath, - mode: _ReadBinaryMode | _WriteBinaryMode = "r", - buffering: Any | None = None, - compresslevel: int = 9, - ) -> None: ... - + @overload + def __init__(self, filename: _WritableFileobj, mode: _WriteBinaryMode, *, compresslevel: int = 9) -> None: ... + @overload + def __init__(self, filename: _ReadableFileobj, mode: _ReadBinaryMode = "r", *, compresslevel: int = 9) -> None: ... + @overload + def __init__( + self, filename: StrOrBytesPath, mode: _ReadBinaryMode | _WriteBinaryMode = "r", *, compresslevel: int = 9 + ) -> None: ... def read(self, size: int | None = -1) -> bytes: ... def read1(self, size: int = -1) -> bytes: ... def readline(self, size: SupportsIndex = -1) -> bytes: ... # type: ignore[override] diff --git a/stdlib/collections/__init__.pyi b/stdlib/collections/__init__.pyi index 0f99b5c3c67e..b9e4f84ec0b6 100644 --- a/stdlib/collections/__init__.pyi +++ b/stdlib/collections/__init__.pyi @@ -1,12 +1,10 @@ import sys from _collections_abc import dict_items, dict_keys, dict_values from _typeshed import SupportsItems, SupportsKeysAndGetItem, SupportsRichComparison, SupportsRichComparisonT +from types import GenericAlias from typing import Any, ClassVar, Generic, NoReturn, SupportsIndex, TypeVar, final, overload from typing_extensions import Self -if sys.version_info >= (3, 9): - from types import GenericAlias - if sys.version_info >= (3, 10): from collections.abc import ( Callable, @@ -93,20 +91,19 @@ class UserDict(MutableMapping[_KT, _VT]): @classmethod @overload def fromkeys(cls, iterable: Iterable[_T], value: _S) -> UserDict[_T, _S]: ... - if sys.version_info >= (3, 9): - @overload - def __or__(self, other: UserDict[_KT, _VT] | dict[_KT, _VT]) -> Self: ... - @overload - def __or__(self, other: UserDict[_T1, _T2] | dict[_T1, _T2]) -> UserDict[_KT | _T1, _VT | _T2]: ... - @overload - def __ror__(self, other: UserDict[_KT, _VT] | dict[_KT, _VT]) -> Self: ... - @overload - def __ror__(self, other: UserDict[_T1, _T2] | dict[_T1, _T2]) -> UserDict[_KT | _T1, _VT | _T2]: ... - # UserDict.__ior__ should be kept roughly in line with MutableMapping.update() - @overload # type: ignore[misc] - def __ior__(self, other: SupportsKeysAndGetItem[_KT, _VT]) -> Self: ... - @overload - def __ior__(self, other: Iterable[tuple[_KT, _VT]]) -> Self: ... + @overload + def __or__(self, other: UserDict[_KT, _VT] | dict[_KT, _VT]) -> Self: ... + @overload + def __or__(self, other: UserDict[_T1, _T2] | dict[_T1, _T2]) -> UserDict[_KT | _T1, _VT | _T2]: ... + @overload + def __ror__(self, other: UserDict[_KT, _VT] | dict[_KT, _VT]) -> Self: ... + @overload + def __ror__(self, other: UserDict[_T1, _T2] | dict[_T1, _T2]) -> UserDict[_KT | _T1, _VT | _T2]: ... + # UserDict.__ior__ should be kept roughly in line with MutableMapping.update() + @overload # type: ignore[misc] + def __ior__(self, other: SupportsKeysAndGetItem[_KT, _VT]) -> Self: ... + @overload + def __ior__(self, other: Iterable[tuple[_KT, _VT]]) -> Self: ... if sys.version_info >= (3, 12): @overload def get(self, key: _KT, default: None = None) -> _VT | None: ... @@ -213,10 +210,8 @@ class UserString(Sequence[UserString]): def lstrip(self, chars: str | None = None) -> Self: ... maketrans = str.maketrans def partition(self, sep: str) -> tuple[str, str, str]: ... - if sys.version_info >= (3, 9): - def removeprefix(self, prefix: str | UserString, /) -> Self: ... - def removesuffix(self, suffix: str | UserString, /) -> Self: ... - + def removeprefix(self, prefix: str | UserString, /) -> Self: ... + def removesuffix(self, suffix: str | UserString, /) -> Self: ... def replace(self, old: str | UserString, new: str | UserString, maxsplit: int = -1) -> Self: ... def rfind(self, sub: str | UserString, start: int = 0, end: int = sys.maxsize) -> int: ... def rindex(self, sub: str | UserString, start: int = 0, end: int = sys.maxsize) -> int: ... @@ -271,8 +266,7 @@ class deque(MutableSequence[_T]): def __gt__(self, value: deque[_T], /) -> bool: ... def __ge__(self, value: deque[_T], /) -> bool: ... def __eq__(self, value: object, /) -> bool: ... - if sys.version_info >= (3, 9): - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... class Counter(dict[_T, int], Generic[_T]): @overload @@ -387,15 +381,14 @@ class OrderedDict(dict[_KT, _VT]): @overload def pop(self, key: _KT, default: _T) -> _VT | _T: ... def __eq__(self, value: object, /) -> bool: ... - if sys.version_info >= (3, 9): - @overload - def __or__(self, value: dict[_KT, _VT], /) -> Self: ... - @overload - def __or__(self, value: dict[_T1, _T2], /) -> OrderedDict[_KT | _T1, _VT | _T2]: ... - @overload - def __ror__(self, value: dict[_KT, _VT], /) -> Self: ... - @overload - def __ror__(self, value: dict[_T1, _T2], /) -> OrderedDict[_KT | _T1, _VT | _T2]: ... # type: ignore[misc] + @overload + def __or__(self, value: dict[_KT, _VT], /) -> Self: ... + @overload + def __or__(self, value: dict[_T1, _T2], /) -> OrderedDict[_KT | _T1, _VT | _T2]: ... + @overload + def __ror__(self, value: dict[_KT, _VT], /) -> Self: ... + @overload + def __ror__(self, value: dict[_T1, _T2], /) -> OrderedDict[_KT | _T1, _VT | _T2]: ... # type: ignore[misc] class defaultdict(dict[_KT, _VT]): default_factory: Callable[[], _VT] | None @@ -435,15 +428,14 @@ class defaultdict(dict[_KT, _VT]): def __missing__(self, key: _KT, /) -> _VT: ... def __copy__(self) -> Self: ... def copy(self) -> Self: ... - if sys.version_info >= (3, 9): - @overload - def __or__(self, value: dict[_KT, _VT], /) -> Self: ... - @overload - def __or__(self, value: dict[_T1, _T2], /) -> defaultdict[_KT | _T1, _VT | _T2]: ... - @overload - def __ror__(self, value: dict[_KT, _VT], /) -> Self: ... - @overload - def __ror__(self, value: dict[_T1, _T2], /) -> defaultdict[_KT | _T1, _VT | _T2]: ... # type: ignore[misc] + @overload + def __or__(self, value: dict[_KT, _VT], /) -> Self: ... + @overload + def __or__(self, value: dict[_T1, _T2], /) -> defaultdict[_KT | _T1, _VT | _T2]: ... + @overload + def __ror__(self, value: dict[_KT, _VT], /) -> Self: ... + @overload + def __ror__(self, value: dict[_T1, _T2], /) -> defaultdict[_KT | _T1, _VT | _T2]: ... # type: ignore[misc] class ChainMap(MutableMapping[_KT, _VT]): maps: list[MutableMapping[_KT, _VT]] @@ -488,17 +480,16 @@ class ChainMap(MutableMapping[_KT, _VT]): @classmethod @overload def fromkeys(cls, iterable: Iterable[_T], value: _S, /) -> ChainMap[_T, _S]: ... - if sys.version_info >= (3, 9): - @overload - def __or__(self, other: Mapping[_KT, _VT]) -> Self: ... - @overload - def __or__(self, other: Mapping[_T1, _T2]) -> ChainMap[_KT | _T1, _VT | _T2]: ... - @overload - def __ror__(self, other: Mapping[_KT, _VT]) -> Self: ... - @overload - def __ror__(self, other: Mapping[_T1, _T2]) -> ChainMap[_KT | _T1, _VT | _T2]: ... - # ChainMap.__ior__ should be kept roughly in line with MutableMapping.update() - @overload # type: ignore[misc] - def __ior__(self, other: SupportsKeysAndGetItem[_KT, _VT]) -> Self: ... - @overload - def __ior__(self, other: Iterable[tuple[_KT, _VT]]) -> Self: ... + @overload + def __or__(self, other: Mapping[_KT, _VT]) -> Self: ... + @overload + def __or__(self, other: Mapping[_T1, _T2]) -> ChainMap[_KT | _T1, _VT | _T2]: ... + @overload + def __ror__(self, other: Mapping[_KT, _VT]) -> Self: ... + @overload + def __ror__(self, other: Mapping[_T1, _T2]) -> ChainMap[_KT | _T1, _VT | _T2]: ... + # ChainMap.__ior__ should be kept roughly in line with MutableMapping.update() + @overload # type: ignore[misc] + def __ior__(self, other: SupportsKeysAndGetItem[_KT, _VT]) -> Self: ... + @overload + def __ior__(self, other: Iterable[tuple[_KT, _VT]]) -> Self: ... diff --git a/stdlib/compileall.pyi b/stdlib/compileall.pyi index f35c584cedfb..a599b1b23540 100644 --- a/stdlib/compileall.pyi +++ b/stdlib/compileall.pyi @@ -42,7 +42,7 @@ if sys.version_info >= (3, 10): hardlink_dupes: bool = False, ) -> bool: ... -elif sys.version_info >= (3, 9): +else: def compile_dir( dir: StrPath, maxlevels: int | None = None, @@ -76,30 +76,6 @@ elif sys.version_info >= (3, 9): hardlink_dupes: bool = False, ) -> bool: ... -else: - def compile_dir( - dir: StrPath, - maxlevels: int = 10, - ddir: StrPath | None = None, - force: bool = False, - rx: _SupportsSearch | None = None, - quiet: int = 0, - legacy: bool = False, - optimize: int = -1, - workers: int = 1, - invalidation_mode: PycInvalidationMode | None = None, - ) -> bool: ... - def compile_file( - fullname: StrPath, - ddir: StrPath | None = None, - force: bool = False, - rx: _SupportsSearch | None = None, - quiet: int = 0, - legacy: bool = False, - optimize: int = -1, - invalidation_mode: PycInvalidationMode | None = None, - ) -> bool: ... - def compile_path( skip_curdir: bool = ..., maxlevels: int = 0, diff --git a/stdlib/concurrent/futures/_base.pyi b/stdlib/concurrent/futures/_base.pyi index 0c019457902b..7294b69567d6 100644 --- a/stdlib/concurrent/futures/_base.pyi +++ b/stdlib/concurrent/futures/_base.pyi @@ -1,15 +1,12 @@ import sys import threading from _typeshed import Unused -from collections.abc import Callable, Collection, Iterable, Iterator +from collections.abc import Callable, Iterable, Iterator from logging import Logger -from types import TracebackType +from types import GenericAlias, TracebackType from typing import Any, Final, Generic, NamedTuple, Protocol, TypeVar from typing_extensions import ParamSpec, Self -if sys.version_info >= (3, 9): - from types import GenericAlias - FIRST_COMPLETED: Final = "FIRST_COMPLETED" FIRST_EXCEPTION: Final = "FIRST_EXCEPTION" ALL_COMPLETED: Final = "ALL_COMPLETED" @@ -53,23 +50,14 @@ class Future(Generic[_T]): def set_result(self, result: _T) -> None: ... def exception(self, timeout: float | None = None) -> BaseException | None: ... def set_exception(self, exception: BaseException | None) -> None: ... - if sys.version_info >= (3, 9): - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... class Executor: - if sys.version_info >= (3, 9): - def submit(self, fn: Callable[_P, _T], /, *args: _P.args, **kwargs: _P.kwargs) -> Future[_T]: ... - else: - def submit(self, fn: Callable[_P, _T], *args: _P.args, **kwargs: _P.kwargs) -> Future[_T]: ... - + def submit(self, fn: Callable[_P, _T], /, *args: _P.args, **kwargs: _P.kwargs) -> Future[_T]: ... def map( self, fn: Callable[..., _T], *iterables: Iterable[Any], timeout: float | None = None, chunksize: int = 1 ) -> Iterator[_T]: ... - if sys.version_info >= (3, 9): - def shutdown(self, wait: bool = True, *, cancel_futures: bool = False) -> None: ... - else: - def shutdown(self, wait: bool = True) -> None: ... - + def shutdown(self, wait: bool = True, *, cancel_futures: bool = False) -> None: ... def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None @@ -91,15 +79,9 @@ class DoneAndNotDoneFutures(NamedTuple, Generic[_T]): done: set[Future[_T]] not_done: set[Future[_T]] -if sys.version_info >= (3, 9): - def wait( - fs: Iterable[Future[_T]], timeout: float | None = None, return_when: str = "ALL_COMPLETED" - ) -> DoneAndNotDoneFutures[_T]: ... - -else: - def wait( - fs: Collection[Future[_T]], timeout: float | None = None, return_when: str = "ALL_COMPLETED" - ) -> DoneAndNotDoneFutures[_T]: ... +def wait( + fs: Iterable[Future[_T]], timeout: float | None = None, return_when: str = "ALL_COMPLETED" +) -> DoneAndNotDoneFutures[_T]: ... class _Waiter: event: threading.Event diff --git a/stdlib/concurrent/futures/process.pyi b/stdlib/concurrent/futures/process.pyi index 97dc261be7ed..9c904f793fa9 100644 --- a/stdlib/concurrent/futures/process.pyi +++ b/stdlib/concurrent/futures/process.pyi @@ -84,7 +84,7 @@ class _SafeQueue(Queue[Future[Any]]): pending_work_items: dict[int, _WorkItem[Any]], thread_wakeup: _ThreadWakeup, ) -> None: ... - elif sys.version_info >= (3, 9): + else: def __init__( self, max_size: int | None = 0, @@ -94,10 +94,6 @@ class _SafeQueue(Queue[Future[Any]]): shutdown_lock: Lock, thread_wakeup: _ThreadWakeup, ) -> None: ... - else: - def __init__( - self, max_size: int | None = 0, *, ctx: BaseContext, pending_work_items: dict[int, _WorkItem[Any]] - ) -> None: ... def _on_queue_feeder_error(self, e: Exception, obj: _CallItem) -> None: ... @@ -135,27 +131,26 @@ else: initargs: tuple[Unpack[_Ts]], ) -> None: ... -if sys.version_info >= (3, 9): - class _ExecutorManagerThread(Thread): - thread_wakeup: _ThreadWakeup - shutdown_lock: Lock - executor_reference: ref[Any] - processes: MutableMapping[int, Process] - call_queue: Queue[_CallItem] - result_queue: SimpleQueue[_ResultItem] - work_ids_queue: Queue[int] - pending_work_items: dict[int, _WorkItem[Any]] - def __init__(self, executor: ProcessPoolExecutor) -> None: ... - def run(self) -> None: ... - def add_call_item_to_queue(self) -> None: ... - def wait_result_broken_or_wakeup(self) -> tuple[Any, bool, str]: ... - def process_result_item(self, result_item: int | _ResultItem) -> None: ... - def is_shutting_down(self) -> bool: ... - def terminate_broken(self, cause: str) -> None: ... - def flag_executor_shutting_down(self) -> None: ... - def shutdown_workers(self) -> None: ... - def join_executor_internals(self) -> None: ... - def get_n_children_alive(self) -> int: ... +class _ExecutorManagerThread(Thread): + thread_wakeup: _ThreadWakeup + shutdown_lock: Lock + executor_reference: ref[Any] + processes: MutableMapping[int, Process] + call_queue: Queue[_CallItem] + result_queue: SimpleQueue[_ResultItem] + work_ids_queue: Queue[int] + pending_work_items: dict[int, _WorkItem[Any]] + def __init__(self, executor: ProcessPoolExecutor) -> None: ... + def run(self) -> None: ... + def add_call_item_to_queue(self) -> None: ... + def wait_result_broken_or_wakeup(self) -> tuple[Any, bool, str]: ... + def process_result_item(self, result_item: int | _ResultItem) -> None: ... + def is_shutting_down(self) -> bool: ... + def terminate_broken(self, cause: str) -> None: ... + def flag_executor_shutting_down(self) -> None: ... + def shutdown_workers(self) -> None: ... + def join_executor_internals(self) -> None: ... + def get_n_children_alive(self) -> int: ... _system_limits_checked: bool _system_limited: bool | None @@ -238,7 +233,6 @@ class ProcessPoolExecutor(Executor): initializer: Callable[[Unpack[_Ts]], object], initargs: tuple[Unpack[_Ts]], ) -> None: ... - if sys.version_info >= (3, 9): - def _start_executor_manager_thread(self) -> None: ... + def _start_executor_manager_thread(self) -> None: ... def _adjust_process_count(self) -> None: ... diff --git a/stdlib/concurrent/futures/thread.pyi b/stdlib/concurrent/futures/thread.pyi index d1b7858eae02..da3e006b6f13 100644 --- a/stdlib/concurrent/futures/thread.pyi +++ b/stdlib/concurrent/futures/thread.pyi @@ -1,7 +1,7 @@ import queue -import sys from collections.abc import Callable, Iterable, Mapping, Set as AbstractSet from threading import Lock, Semaphore, Thread +from types import GenericAlias from typing import Any, Generic, TypeVar, overload from typing_extensions import TypeVarTuple, Unpack from weakref import ref @@ -16,9 +16,6 @@ _global_shutdown_lock: Lock def _python_exit() -> None: ... -if sys.version_info >= (3, 9): - from types import GenericAlias - _S = TypeVar("_S") class _WorkItem(Generic[_S]): @@ -28,8 +25,7 @@ class _WorkItem(Generic[_S]): kwargs: Mapping[str, Any] def __init__(self, future: Future[_S], fn: Callable[..., _S], args: Iterable[Any], kwargs: Mapping[str, Any]) -> None: ... def run(self) -> None: ... - if sys.version_info >= (3, 9): - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... def _worker( executor_reference: ref[Any], diff --git a/stdlib/contextlib.pyi b/stdlib/contextlib.pyi index 08ac5a28b8b8..70d0dbdcb2f1 100644 --- a/stdlib/contextlib.pyi +++ b/stdlib/contextlib.pyi @@ -81,14 +81,9 @@ class _GeneratorContextManager( AbstractContextManager[_T_co, bool | None], ContextDecorator, ): - if sys.version_info >= (3, 9): - def __exit__( - self, typ: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None - ) -> bool | None: ... - else: - def __exit__( - self, type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None - ) -> bool | None: ... + def __exit__( + self, typ: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None + ) -> bool | None: ... def contextmanager(func: Callable[_P, Iterator[_T_co]]) -> Callable[_P, _GeneratorContextManager[_T_co]]: ... diff --git a/stdlib/ctypes/__init__.pyi b/stdlib/ctypes/__init__.pyi index 459c01a60fd1..fe9c239176d2 100644 --- a/stdlib/ctypes/__init__.pyi +++ b/stdlib/ctypes/__init__.pyi @@ -26,6 +26,7 @@ from _ctypes import ( ) from _typeshed import StrPath from ctypes._endian import BigEndianStructure as BigEndianStructure, LittleEndianStructure as LittleEndianStructure +from types import GenericAlias from typing import Any, ClassVar, Generic, TypeVar, type_check_only from typing_extensions import Self, TypeAlias, deprecated @@ -35,9 +36,6 @@ if sys.platform == "win32": if sys.version_info >= (3, 11): from ctypes._endian import BigEndianUnion as BigEndianUnion, LittleEndianUnion as LittleEndianUnion -if sys.version_info >= (3, 9): - from types import GenericAlias - _T = TypeVar("_T") _DLLT = TypeVar("_DLLT", bound=CDLL) _CT = TypeVar("_CT", bound=_CData) @@ -92,8 +90,7 @@ class LibraryLoader(Generic[_DLLT]): def __getattr__(self, name: str) -> _DLLT: ... def __getitem__(self, name: str) -> _DLLT: ... def LoadLibrary(self, name: str) -> _DLLT: ... - if sys.version_info >= (3, 9): - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... cdll: LibraryLoader[CDLL] if sys.platform == "win32": @@ -151,9 +148,7 @@ c_buffer = create_string_buffer def create_unicode_buffer(init: int | str, size: int | None = None) -> Array[c_wchar]: ... @deprecated("Deprecated in Python 3.13; removal scheduled for Python 3.15") -def SetPointerType( - pointer: type[_Pointer[Any]], cls: Any # noqa: F811 # Redefinition of unused `pointer` from line 22 -) -> None: ... +def SetPointerType(pointer: type[_Pointer[Any]], cls: Any) -> None: ... # noqa: F811 def ARRAY(typ: _CT, len: int) -> Array[_CT]: ... # Soft Deprecated, no plans to remove if sys.platform == "win32": diff --git a/stdlib/dataclasses.pyi b/stdlib/dataclasses.pyi index 3d89b830352b..e08b1919d8e5 100644 --- a/stdlib/dataclasses.pyi +++ b/stdlib/dataclasses.pyi @@ -4,11 +4,9 @@ import types from _typeshed import DataclassInstance from builtins import type as Type # alias to avoid name clashes with fields named "type" from collections.abc import Callable, Iterable, Mapping +from types import GenericAlias from typing import Any, Generic, Literal, Protocol, TypeVar, overload -from typing_extensions import Never, TypeAlias, TypeIs - -if sys.version_info >= (3, 9): - from types import GenericAlias +from typing_extensions import Never, TypeIs _T = TypeVar("_T") _T_co = TypeVar("_T_co", covariant=True) @@ -142,8 +140,7 @@ class Field(Generic[_T]): ) -> None: ... def __set_name__(self, owner: Type[Any], name: str) -> None: ... - if sys.version_info >= (3, 9): - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... # NOTE: Actual return type is 'Field[_T]', but we want to help type checkers # to understand the magic that happens at runtime. @@ -232,22 +229,13 @@ def is_dataclass(obj: object) -> TypeIs[DataclassInstance | type[DataclassInstan class FrozenInstanceError(AttributeError): ... -if sys.version_info >= (3, 9): - _InitVarMeta: TypeAlias = type -else: - class _InitVarMeta(type): - # Not used, instead `InitVar.__class_getitem__` is called. - # pyright (not unreasonably) thinks this is an invalid use of InitVar. - def __getitem__(self, params: Any) -> InitVar[Any]: ... # pyright: ignore[reportInvalidTypeForm] - -class InitVar(Generic[_T], metaclass=_InitVarMeta): +class InitVar(Generic[_T], metaclass=type): type: Type[_T] def __init__(self, type: Type[_T]) -> None: ... - if sys.version_info >= (3, 9): - @overload - def __class_getitem__(cls, type: Type[_T]) -> InitVar[_T]: ... # pyright: ignore[reportInvalidTypeForm] - @overload - def __class_getitem__(cls, type: Any) -> InitVar[Any]: ... # pyright: ignore[reportInvalidTypeForm] + @overload + def __class_getitem__(cls, type: Type[_T]) -> InitVar[_T]: ... # pyright: ignore[reportInvalidTypeForm] + @overload + def __class_getitem__(cls, type: Any) -> InitVar[Any]: ... # pyright: ignore[reportInvalidTypeForm] if sys.version_info >= (3, 12): def make_dataclass( diff --git a/stdlib/datetime.pyi b/stdlib/datetime.pyi index 4907bf4607c8..72fb5fceb1fb 100644 --- a/stdlib/datetime.pyi +++ b/stdlib/datetime.pyi @@ -6,7 +6,7 @@ from typing_extensions import CapsuleType, Self, TypeAlias, deprecated if sys.version_info >= (3, 11): __all__ = ("date", "datetime", "time", "timedelta", "timezone", "tzinfo", "MINYEAR", "MAXYEAR", "UTC") -elif sys.version_info >= (3, 9): +else: __all__ = ("date", "datetime", "time", "timedelta", "timezone", "tzinfo", "MINYEAR", "MAXYEAR") MINYEAR: Final = 1 @@ -39,18 +39,17 @@ class timezone(tzinfo): if sys.version_info >= (3, 11): UTC: timezone -if sys.version_info >= (3, 9): - # This class calls itself datetime.IsoCalendarDate. It's neither - # NamedTuple nor structseq. - @final - @type_check_only - class _IsoCalendarDate(tuple[int, int, int]): - @property - def year(self) -> int: ... - @property - def week(self) -> int: ... - @property - def weekday(self) -> int: ... +# This class calls itself datetime.IsoCalendarDate. It's neither +# NamedTuple nor structseq. +@final +@type_check_only +class _IsoCalendarDate(tuple[int, int, int]): + @property + def year(self) -> int: ... + @property + def week(self) -> int: ... + @property + def weekday(self) -> int: ... class date: min: ClassVar[date] @@ -106,10 +105,7 @@ class date: def __hash__(self) -> int: ... def weekday(self) -> int: ... def isoweekday(self) -> int: ... - if sys.version_info >= (3, 9): - def isocalendar(self) -> _IsoCalendarDate: ... - else: - def isocalendar(self) -> tuple[int, int, int]: ... + def isocalendar(self) -> _IsoCalendarDate: ... class time: min: ClassVar[time] diff --git a/stdlib/difflib.pyi b/stdlib/difflib.pyi index 50154d785c2f..18583a3acfe9 100644 --- a/stdlib/difflib.pyi +++ b/stdlib/difflib.pyi @@ -1,10 +1,7 @@ -import sys from collections.abc import Callable, Iterable, Iterator, Sequence +from types import GenericAlias from typing import Any, AnyStr, Generic, Literal, NamedTuple, TypeVar, overload -if sys.version_info >= (3, 9): - from types import GenericAlias - __all__ = [ "get_close_matches", "ndiff", @@ -43,19 +40,14 @@ class SequenceMatcher(Generic[_T]): def set_seqs(self, a: Sequence[_T], b: Sequence[_T]) -> None: ... def set_seq1(self, a: Sequence[_T]) -> None: ... def set_seq2(self, b: Sequence[_T]) -> None: ... - if sys.version_info >= (3, 9): - def find_longest_match(self, alo: int = 0, ahi: int | None = None, blo: int = 0, bhi: int | None = None) -> Match: ... - else: - def find_longest_match(self, alo: int, ahi: int, blo: int, bhi: int) -> Match: ... - + def find_longest_match(self, alo: int = 0, ahi: int | None = None, blo: int = 0, bhi: int | None = None) -> Match: ... def get_matching_blocks(self) -> list[Match]: ... def get_opcodes(self) -> list[tuple[Literal["replace", "delete", "insert", "equal"], int, int, int, int]]: ... def get_grouped_opcodes(self, n: int = 3) -> Iterable[list[tuple[str, int, int, int, int]]]: ... def ratio(self) -> float: ... def quick_ratio(self) -> float: ... def real_quick_ratio(self) -> float: ... - if sys.version_info >= (3, 9): - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... @overload def get_close_matches(word: AnyStr, possibilities: Iterable[AnyStr], n: int = 3, cutoff: float = 0.6) -> list[AnyStr]: ... diff --git a/stdlib/distutils/command/bdist_msi.pyi b/stdlib/distutils/command/bdist_msi.pyi index baeee7d3eccb..d677f81d1425 100644 --- a/stdlib/distutils/command/bdist_msi.pyi +++ b/stdlib/distutils/command/bdist_msi.pyi @@ -21,8 +21,7 @@ if sys.platform == "win32": boolean_options: ClassVar[list[str]] all_versions: Incomplete other_version: str - if sys.version_info >= (3, 9): - def __init__(self, *args, **kw) -> None: ... + def __init__(self, *args, **kw) -> None: ... bdist_dir: Incomplete plat_name: Incomplete keep_temp: int diff --git a/stdlib/encodings/raw_unicode_escape.pyi b/stdlib/encodings/raw_unicode_escape.pyi index 74abb4623fab..2887739468f2 100644 --- a/stdlib/encodings/raw_unicode_escape.pyi +++ b/stdlib/encodings/raw_unicode_escape.pyi @@ -1,5 +1,4 @@ import codecs -import sys from _typeshed import ReadableBuffer class Codec(codecs.Codec): @@ -7,28 +6,18 @@ class Codec(codecs.Codec): @staticmethod def encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ... # At runtime, this is codecs.raw_unicode_escape_decode - if sys.version_info >= (3, 9): - @staticmethod - def decode(data: str | ReadableBuffer, errors: str | None = None, final: bool = True, /) -> tuple[str, int]: ... - else: - @staticmethod - def decode(data: str | ReadableBuffer, errors: str | None = None, /) -> tuple[str, int]: ... + @staticmethod + def decode(data: str | ReadableBuffer, errors: str | None = None, final: bool = True, /) -> tuple[str, int]: ... class IncrementalEncoder(codecs.IncrementalEncoder): def encode(self, input: str, final: bool = False) -> bytes: ... -if sys.version_info >= (3, 9): - class IncrementalDecoder(codecs.BufferedIncrementalDecoder): - def _buffer_decode(self, input: str | ReadableBuffer, errors: str | None, final: bool) -> tuple[str, int]: ... - -else: - class IncrementalDecoder(codecs.IncrementalDecoder): - def decode(self, input: str | ReadableBuffer, final: bool = False) -> str: ... +class IncrementalDecoder(codecs.BufferedIncrementalDecoder): + def _buffer_decode(self, input: str | ReadableBuffer, errors: str | None, final: bool) -> tuple[str, int]: ... class StreamWriter(Codec, codecs.StreamWriter): ... class StreamReader(Codec, codecs.StreamReader): - if sys.version_info >= (3, 9): - def decode(self, input: str | ReadableBuffer, errors: str = "strict") -> tuple[str, int]: ... # type: ignore[override] + def decode(self, input: str | ReadableBuffer, errors: str = "strict") -> tuple[str, int]: ... # type: ignore[override] def getregentry() -> codecs.CodecInfo: ... diff --git a/stdlib/encodings/unicode_escape.pyi b/stdlib/encodings/unicode_escape.pyi index 1e942f57916e..ceaa39a3859a 100644 --- a/stdlib/encodings/unicode_escape.pyi +++ b/stdlib/encodings/unicode_escape.pyi @@ -1,5 +1,4 @@ import codecs -import sys from _typeshed import ReadableBuffer class Codec(codecs.Codec): @@ -7,28 +6,18 @@ class Codec(codecs.Codec): @staticmethod def encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ... # At runtime, this is codecs.unicode_escape_decode - if sys.version_info >= (3, 9): - @staticmethod - def decode(data: str | ReadableBuffer, errors: str | None = None, final: bool = True, /) -> tuple[str, int]: ... - else: - @staticmethod - def decode(data: str | ReadableBuffer, errors: str | None = None, /) -> tuple[str, int]: ... + @staticmethod + def decode(data: str | ReadableBuffer, errors: str | None = None, final: bool = True, /) -> tuple[str, int]: ... class IncrementalEncoder(codecs.IncrementalEncoder): def encode(self, input: str, final: bool = False) -> bytes: ... -if sys.version_info >= (3, 9): - class IncrementalDecoder(codecs.BufferedIncrementalDecoder): - def _buffer_decode(self, input: str | ReadableBuffer, errors: str | None, final: bool) -> tuple[str, int]: ... - -else: - class IncrementalDecoder(codecs.IncrementalDecoder): - def decode(self, input: str | ReadableBuffer, final: bool = False) -> str: ... +class IncrementalDecoder(codecs.BufferedIncrementalDecoder): + def _buffer_decode(self, input: str | ReadableBuffer, errors: str | None, final: bool) -> tuple[str, int]: ... class StreamWriter(Codec, codecs.StreamWriter): ... class StreamReader(Codec, codecs.StreamReader): - if sys.version_info >= (3, 9): - def decode(self, input: str | ReadableBuffer, errors: str = "strict") -> tuple[str, int]: ... # type: ignore[override] + def decode(self, input: str | ReadableBuffer, errors: str = "strict") -> tuple[str, int]: ... # type: ignore[override] def getregentry() -> codecs.CodecInfo: ... diff --git a/stdlib/enum.pyi b/stdlib/enum.pyi index 4a6287a712af..8c88b26a3a2f 100644 --- a/stdlib/enum.pyi +++ b/stdlib/enum.pyi @@ -100,20 +100,13 @@ class EnumMeta(type): _simple: bool = False, **kwds: Any, ) -> _typeshed.Self: ... - elif sys.version_info >= (3, 9): + else: def __new__( metacls: type[_typeshed.Self], cls: str, bases: tuple[type, ...], classdict: _EnumDict, **kwds: Any ) -> _typeshed.Self: ... - else: - def __new__(metacls: type[_typeshed.Self], cls: str, bases: tuple[type, ...], classdict: _EnumDict) -> _typeshed.Self: ... - - if sys.version_info >= (3, 9): - @classmethod - def __prepare__(metacls, cls: str, bases: tuple[type, ...], **kwds: Any) -> _EnumDict: ... # type: ignore[override] - else: - @classmethod - def __prepare__(metacls, cls: str, bases: tuple[type, ...]) -> _EnumDict: ... # type: ignore[override] + @classmethod + def __prepare__(metacls, cls: str, bases: tuple[type, ...], **kwds: Any) -> _EnumDict: ... # type: ignore[override] def __iter__(self: type[_EnumMemberT]) -> Iterator[_EnumMemberT]: ... def __reversed__(self: type[_EnumMemberT]) -> Iterator[_EnumMemberT]: ... if sys.version_info >= (3, 12): diff --git a/stdlib/fcntl.pyi b/stdlib/fcntl.pyi index 71078b3b4579..2fe64eb53201 100644 --- a/stdlib/fcntl.pyi +++ b/stdlib/fcntl.pyi @@ -26,8 +26,7 @@ if sys.platform != "win32": if sys.platform == "darwin": F_FULLFSYNC: int F_NOCACHE: int - if sys.version_info >= (3, 9): - F_GETPATH: int + F_GETPATH: int if sys.platform == "linux": F_SETLKW64: int F_SETSIG: int @@ -43,10 +42,9 @@ if sys.platform != "win32": F_SEAL_SEAL: int F_SEAL_SHRINK: int F_SEAL_WRITE: int - if sys.version_info >= (3, 9): - F_OFD_GETLK: Final[int] - F_OFD_SETLK: Final[int] - F_OFD_SETLKW: Final[int] + F_OFD_GETLK: Final[int] + F_OFD_SETLK: Final[int] + F_OFD_SETLKW: Final[int] if sys.version_info >= (3, 10): F_GETPIPE_SZ: int diff --git a/stdlib/filecmp.pyi b/stdlib/filecmp.pyi index cb7b94596077..a2a2b235fdad 100644 --- a/stdlib/filecmp.pyi +++ b/stdlib/filecmp.pyi @@ -1,11 +1,9 @@ import sys from _typeshed import GenericPath, StrOrBytesPath from collections.abc import Callable, Iterable, Sequence +from types import GenericAlias from typing import Any, AnyStr, Final, Generic, Literal -if sys.version_info >= (3, 9): - from types import GenericAlias - __all__ = ["clear_cache", "cmp", "dircmp", "cmpfiles", "DEFAULT_IGNORES"] DEFAULT_IGNORES: list[str] @@ -62,7 +60,6 @@ class dircmp(Generic[AnyStr]): def phase3(self) -> None: ... def phase4(self) -> None: ... def phase4_closure(self) -> None: ... - if sys.version_info >= (3, 9): - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... def clear_cache() -> None: ... diff --git a/stdlib/fileinput.pyi b/stdlib/fileinput.pyi index bf6daad0aea7..948b39ea1e1c 100644 --- a/stdlib/fileinput.pyi +++ b/stdlib/fileinput.pyi @@ -1,13 +1,10 @@ import sys from _typeshed import AnyStr_co, StrOrBytesPath from collections.abc import Callable, Iterable -from types import TracebackType +from types import GenericAlias, TracebackType from typing import IO, Any, AnyStr, Generic, Literal, Protocol, overload from typing_extensions import Self, TypeAlias -if sys.version_info >= (3, 9): - from types import GenericAlias - __all__ = [ "input", "close", @@ -199,8 +196,7 @@ class FileInput(Generic[AnyStr]): def fileno(self) -> int: ... def isfirstline(self) -> bool: ... def isstdin(self) -> bool: ... - if sys.version_info >= (3, 9): - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... if sys.version_info >= (3, 10): def hook_compressed( diff --git a/stdlib/fractions.pyi b/stdlib/fractions.pyi index aaa3a22087fc..4d5c2160e60a 100644 --- a/stdlib/fractions.pyi +++ b/stdlib/fractions.pyi @@ -1,24 +1,13 @@ import sys from collections.abc import Callable from decimal import Decimal -from numbers import Integral, Rational, Real +from numbers import Rational, Real from typing import Any, Literal, Protocol, SupportsIndex, overload from typing_extensions import Self, TypeAlias _ComparableNum: TypeAlias = int | float | Decimal | Real -if sys.version_info >= (3, 9): - __all__ = ["Fraction"] -else: - __all__ = ["Fraction", "gcd"] - @overload - def gcd(a: int, b: int) -> int: ... - @overload - def gcd(a: Integral, b: int) -> Integral: ... - @overload - def gcd(a: int, b: Integral) -> Integral: ... - @overload - def gcd(a: Integral, b: Integral) -> Integral: ... +__all__ = ["Fraction"] class _ConvertibleToIntegerRatio(Protocol): def as_integer_ratio(self) -> tuple[int | Rational, int | Rational]: ... diff --git a/stdlib/ftplib.pyi b/stdlib/ftplib.pyi index 3693d7c52a26..44bc2165fe0e 100644 --- a/stdlib/ftplib.pyi +++ b/stdlib/ftplib.pyi @@ -41,29 +41,17 @@ class FTP: self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None ) -> None: ... source_address: tuple[str, int] | None - if sys.version_info >= (3, 9): - def __init__( - self, - host: str = "", - user: str = "", - passwd: str = "", - acct: str = "", - timeout: float | None = ..., - source_address: tuple[str, int] | None = None, - *, - encoding: str = "utf-8", - ) -> None: ... - else: - def __init__( - self, - host: str = "", - user: str = "", - passwd: str = "", - acct: str = "", - timeout: float | None = ..., - source_address: tuple[str, int] | None = None, - ) -> None: ... - + def __init__( + self, + host: str = "", + user: str = "", + passwd: str = "", + acct: str = "", + timeout: float | None = ..., + source_address: tuple[str, int] | None = None, + *, + encoding: str = "utf-8", + ) -> None: ... def connect( self, host: str = "", port: int = 0, timeout: float = -999, source_address: tuple[str, int] | None = None ) -> str: ... @@ -131,7 +119,7 @@ class FTP_TLS(FTP): source_address: tuple[str, int] | None = None, encoding: str = "utf-8", ) -> None: ... - elif sys.version_info >= (3, 9): + else: def __init__( self, host: str = "", @@ -146,19 +134,6 @@ class FTP_TLS(FTP): *, encoding: str = "utf-8", ) -> None: ... - else: - def __init__( - self, - host: str = "", - user: str = "", - passwd: str = "", - acct: str = "", - keyfile: str | None = None, - certfile: str | None = None, - context: SSLContext | None = None, - timeout: float | None = ..., - source_address: tuple[str, int] | None = None, - ) -> None: ... ssl_version: int keyfile: str | None certfile: str | None diff --git a/stdlib/functools.pyi b/stdlib/functools.pyi index f786167e322d..d35c295754e5 100644 --- a/stdlib/functools.pyi +++ b/stdlib/functools.pyi @@ -2,12 +2,10 @@ import sys import types from _typeshed import SupportsAllComparisons, SupportsItems from collections.abc import Callable, Hashable, Iterable, Sized +from types import GenericAlias from typing import Any, Generic, Literal, NamedTuple, TypedDict, TypeVar, final, overload from typing_extensions import ParamSpec, Self, TypeAlias -if sys.version_info >= (3, 9): - from types import GenericAlias - __all__ = [ "update_wrapper", "wraps", @@ -22,11 +20,9 @@ __all__ = [ "singledispatch", "cached_property", "singledispatchmethod", + "cache", ] -if sys.version_info >= (3, 9): - __all__ += ["cache"] - _T = TypeVar("_T") _T_co = TypeVar("_T_co", covariant=True) _S = TypeVar("_S") @@ -46,10 +42,9 @@ class _CacheInfo(NamedTuple): maxsize: int | None currsize: int -if sys.version_info >= (3, 9): - class _CacheParameters(TypedDict): - maxsize: int - typed: bool +class _CacheParameters(TypedDict): + maxsize: int + typed: bool @final class _lru_cache_wrapper(Generic[_T]): @@ -57,9 +52,7 @@ class _lru_cache_wrapper(Generic[_T]): def __call__(self, *args: Hashable, **kwargs: Hashable) -> _T: ... def cache_info(self) -> _CacheInfo: ... def cache_clear(self) -> None: ... - if sys.version_info >= (3, 9): - def cache_parameters(self) -> _CacheParameters: ... - + def cache_parameters(self) -> _CacheParameters: ... def __copy__(self) -> _lru_cache_wrapper[_T]: ... def __deepcopy__(self, memo: Any, /) -> _lru_cache_wrapper[_T]: ... @@ -131,8 +124,7 @@ class partial(Generic[_T]): def keywords(self) -> dict[str, Any]: ... def __new__(cls, func: Callable[..., _T], /, *args: Any, **kwargs: Any) -> Self: ... def __call__(self, /, *args: Any, **kwargs: Any) -> _T: ... - if sys.version_info >= (3, 9): - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... # With protocols, this could change into a generic protocol that defines __get__ and returns _T _Descriptor: TypeAlias = Any @@ -148,8 +140,7 @@ class partialmethod(Generic[_T]): def __get__(self, obj: Any, cls: type[Any] | None = None) -> Callable[..., _T]: ... @property def __isabstractmethod__(self) -> bool: ... - if sys.version_info >= (3, 9): - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... if sys.version_info >= (3, 11): _RegType: TypeAlias = type[Any] | types.UnionType @@ -200,12 +191,9 @@ class cached_property(Generic[_T_co]): def __set_name__(self, owner: type[Any], name: str) -> None: ... # __set__ is not defined at runtime, but @cached_property is designed to be settable def __set__(self, instance: object, value: _T_co) -> None: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] - if sys.version_info >= (3, 9): - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... - -if sys.version_info >= (3, 9): - def cache(user_function: Callable[..., _T], /) -> _lru_cache_wrapper[_T]: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... +def cache(user_function: Callable[..., _T], /) -> _lru_cache_wrapper[_T]: ... def _make_key( args: tuple[Hashable, ...], kwds: SupportsItems[Any, Any], diff --git a/stdlib/gc.pyi b/stdlib/gc.pyi index 9d34e0d6213a..06fb6b47c2d1 100644 --- a/stdlib/gc.pyi +++ b/stdlib/gc.pyi @@ -1,4 +1,3 @@ -import sys from collections.abc import Callable from typing import Any, Final, Literal from typing_extensions import TypeAlias @@ -28,10 +27,7 @@ def get_referrers(*objs: Any) -> list[Any]: ... def get_stats() -> list[dict[str, Any]]: ... def get_threshold() -> tuple[int, int, int]: ... def is_tracked(obj: Any, /) -> bool: ... - -if sys.version_info >= (3, 9): - def is_finalized(obj: Any, /) -> bool: ... - +def is_finalized(obj: Any, /) -> bool: ... def isenabled() -> bool: ... def set_debug(flags: int, /) -> None: ... def set_threshold(threshold0: int, threshold1: int = ..., threshold2: int = ..., /) -> None: ... diff --git a/stdlib/hashlib.pyi b/stdlib/hashlib.pyi index 84666a7fa725..b32c0e992574 100644 --- a/stdlib/hashlib.pyi +++ b/stdlib/hashlib.pyi @@ -5,16 +5,22 @@ from _hashlib import ( _HashObject, openssl_md5 as md5, openssl_sha1 as sha1, + openssl_sha3_224 as sha3_224, + openssl_sha3_256 as sha3_256, + openssl_sha3_384 as sha3_384, + openssl_sha3_512 as sha3_512, openssl_sha224 as sha224, openssl_sha256 as sha256, openssl_sha384 as sha384, openssl_sha512 as sha512, + openssl_shake_128 as shake_128, + openssl_shake_256 as shake_256, pbkdf2_hmac as pbkdf2_hmac, scrypt as scrypt, ) from _typeshed import ReadableBuffer from collections.abc import Callable, Set as AbstractSet -from typing import Protocol, type_check_only +from typing import Protocol if sys.version_info >= (3, 11): __all__ = ( @@ -60,31 +66,7 @@ else: "pbkdf2_hmac", ) -if sys.version_info >= (3, 9): - def new(name: str, data: ReadableBuffer = b"", *, usedforsecurity: bool = ...) -> HASH: ... - from _hashlib import ( - openssl_sha3_224 as sha3_224, - openssl_sha3_256 as sha3_256, - openssl_sha3_384 as sha3_384, - openssl_sha3_512 as sha3_512, - openssl_shake_128 as shake_128, - openssl_shake_256 as shake_256, - ) - -else: - @type_check_only - class _VarLenHash(HASH): - def digest(self, length: int) -> bytes: ... # type: ignore[override] - def hexdigest(self, length: int) -> str: ... # type: ignore[override] - - def new(name: str, data: ReadableBuffer = b"") -> HASH: ... - # At runtime these aren't functions but classes imported from _sha3 - def sha3_224(string: ReadableBuffer = b"") -> HASH: ... - def sha3_256(string: ReadableBuffer = b"") -> HASH: ... - def sha3_384(string: ReadableBuffer = b"") -> HASH: ... - def sha3_512(string: ReadableBuffer = b"") -> HASH: ... - def shake_128(string: ReadableBuffer = b"") -> _VarLenHash: ... - def shake_256(string: ReadableBuffer = b"") -> _VarLenHash: ... +def new(name: str, data: ReadableBuffer = b"", *, usedforsecurity: bool = ...) -> HASH: ... algorithms_guaranteed: AbstractSet[str] algorithms_available: AbstractSet[str] diff --git a/stdlib/hmac.pyi b/stdlib/hmac.pyi index dfb574c177cd..300ed9eb26d8 100644 --- a/stdlib/hmac.pyi +++ b/stdlib/hmac.pyi @@ -1,9 +1,8 @@ -import sys -from _hashlib import _HashObject +from _hashlib import _HashObject, compare_digest as compare_digest from _typeshed import ReadableBuffer, SizedBuffer from collections.abc import Callable from types import ModuleType -from typing import AnyStr, overload +from typing import overload from typing_extensions import TypeAlias _DigestMod: TypeAlias = str | Callable[[], _HashObject] | ModuleType @@ -32,11 +31,3 @@ class HMAC: def copy(self) -> HMAC: ... def digest(key: SizedBuffer, msg: ReadableBuffer, digest: _DigestMod) -> bytes: ... - -if sys.version_info >= (3, 9): - from _hashlib import compare_digest as compare_digest -else: - @overload - def compare_digest(a: ReadableBuffer, b: ReadableBuffer, /) -> bool: ... - @overload - def compare_digest(a: AnyStr, b: AnyStr, /) -> bool: ... diff --git a/stdlib/http/__init__.pyi b/stdlib/http/__init__.pyi index ef413a349125..f60c3909736d 100644 --- a/stdlib/http/__init__.pyi +++ b/stdlib/http/__init__.pyi @@ -19,8 +19,7 @@ class HTTPStatus(IntEnum): CONTINUE = 100 SWITCHING_PROTOCOLS = 101 PROCESSING = 102 - if sys.version_info >= (3, 9): - EARLY_HINTS = 103 + EARLY_HINTS = 103 OK = 200 CREATED = 201 @@ -66,16 +65,14 @@ class HTTPStatus(IntEnum): RANGE_NOT_SATISFIABLE = 416 REQUESTED_RANGE_NOT_SATISFIABLE = 416 EXPECTATION_FAILED = 417 - if sys.version_info >= (3, 9): - IM_A_TEAPOT = 418 + IM_A_TEAPOT = 418 MISDIRECTED_REQUEST = 421 if sys.version_info >= (3, 13): UNPROCESSABLE_CONTENT = 422 UNPROCESSABLE_ENTITY = 422 LOCKED = 423 FAILED_DEPENDENCY = 424 - if sys.version_info >= (3, 9): - TOO_EARLY = 425 + TOO_EARLY = 425 UPGRADE_REQUIRED = 426 PRECONDITION_REQUIRED = 428 TOO_MANY_REQUESTS = 429 diff --git a/stdlib/http/client.pyi b/stdlib/http/client.pyi index cd2fc4f5a652..9e0f61598cb8 100644 --- a/stdlib/http/client.pyi +++ b/stdlib/http/client.pyi @@ -44,8 +44,7 @@ HTTPS_PORT: int CONTINUE: Literal[100] SWITCHING_PROTOCOLS: Literal[101] PROCESSING: Literal[102] -if sys.version_info >= (3, 9): - EARLY_HINTS: Literal[103] +EARLY_HINTS: Literal[103] OK: Literal[200] CREATED: Literal[201] @@ -91,16 +90,14 @@ if sys.version_info >= (3, 13): RANGE_NOT_SATISFIABLE: Literal[416] REQUESTED_RANGE_NOT_SATISFIABLE: Literal[416] EXPECTATION_FAILED: Literal[417] -if sys.version_info >= (3, 9): - IM_A_TEAPOT: Literal[418] +IM_A_TEAPOT: Literal[418] MISDIRECTED_REQUEST: Literal[421] if sys.version_info >= (3, 13): UNPROCESSABLE_CONTENT: Literal[422] UNPROCESSABLE_ENTITY: Literal[422] LOCKED: Literal[423] FAILED_DEPENDENCY: Literal[424] -if sys.version_info >= (3, 9): - TOO_EARLY: Literal[425] +TOO_EARLY: Literal[425] UPGRADE_REQUIRED: Literal[426] PRECONDITION_REQUIRED: Literal[428] TOO_MANY_REQUESTS: Literal[429] diff --git a/stdlib/http/cookies.pyi b/stdlib/http/cookies.pyi index c4af5256b5d8..4df12e3125d4 100644 --- a/stdlib/http/cookies.pyi +++ b/stdlib/http/cookies.pyi @@ -1,11 +1,8 @@ -import sys from collections.abc import Iterable, Mapping +from types import GenericAlias from typing import Any, Generic, TypeVar, overload from typing_extensions import TypeAlias -if sys.version_info >= (3, 9): - from types import GenericAlias - __all__ = ["CookieError", "BaseCookie", "SimpleCookie"] _DataType: TypeAlias = str | Mapping[str, str | Morsel[Any]] @@ -44,8 +41,7 @@ class Morsel(dict[str, Any], Generic[_T]): def OutputString(self, attrs: list[str] | None = None) -> str: ... def __eq__(self, morsel: object) -> bool: ... def __setitem__(self, K: str, V: Any) -> None: ... - if sys.version_info >= (3, 9): - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... class BaseCookie(dict[str, Morsel[_T]], Generic[_T]): def __init__(self, input: _DataType | None = None) -> None: ... diff --git a/stdlib/imaplib.pyi b/stdlib/imaplib.pyi index 6a4d8b2e720a..ccee92bd5e88 100644 --- a/stdlib/imaplib.pyi +++ b/stdlib/imaplib.pyi @@ -40,13 +40,8 @@ class IMAP4: welcome: bytes capabilities: tuple[str, ...] PROTOCOL_VERSION: str - if sys.version_info >= (3, 9): - def __init__(self, host: str = "", port: int = 143, timeout: float | None = None) -> None: ... - def open(self, host: str = "", port: int = 143, timeout: float | None = None) -> None: ... - else: - def __init__(self, host: str = "", port: int = 143) -> None: ... - def open(self, host: str = "", port: int = 143) -> None: ... - + def __init__(self, host: str = "", port: int = 143, timeout: float | None = None) -> None: ... + def open(self, host: str = "", port: int = 143, timeout: float | None = None) -> None: ... def __getattr__(self, attr: str) -> Any: ... host: str port: int @@ -101,9 +96,7 @@ class IMAP4: def thread(self, threading_algorithm: str, charset: str, *search_criteria: str) -> _CommandResults: ... def uid(self, command: str, *args: str) -> _CommandResults: ... def unsubscribe(self, mailbox: str) -> _CommandResults: ... - if sys.version_info >= (3, 9): - def unselect(self) -> _CommandResults: ... - + def unselect(self) -> _CommandResults: ... def xatom(self, name: str, *args: str) -> _CommandResults: ... def print_log(self) -> None: ... @@ -115,16 +108,6 @@ class IMAP4_SSL(IMAP4): def __init__( self, host: str = "", port: int = 993, *, ssl_context: SSLContext | None = None, timeout: float | None = None ) -> None: ... - elif sys.version_info >= (3, 9): - def __init__( - self, - host: str = "", - port: int = 993, - keyfile: str | None = None, - certfile: str | None = None, - ssl_context: SSLContext | None = None, - timeout: float | None = None, - ) -> None: ... else: def __init__( self, @@ -133,14 +116,11 @@ class IMAP4_SSL(IMAP4): keyfile: str | None = None, certfile: str | None = None, ssl_context: SSLContext | None = None, + timeout: float | None = None, ) -> None: ... sslobj: SSLSocket file: IO[Any] - if sys.version_info >= (3, 9): - def open(self, host: str = "", port: int | None = 993, timeout: float | None = None) -> None: ... - else: - def open(self, host: str = "", port: int | None = 993) -> None: ... - + def open(self, host: str = "", port: int | None = 993, timeout: float | None = None) -> None: ... def ssl(self) -> SSLSocket: ... class IMAP4_stream(IMAP4): @@ -150,10 +130,7 @@ class IMAP4_stream(IMAP4): process: subprocess.Popen[bytes] writefile: IO[Any] readfile: IO[Any] - if sys.version_info >= (3, 9): - def open(self, host: str | None = None, port: int | None = None, timeout: float | None = None) -> None: ... - else: - def open(self, host: str | None = None, port: int | None = None) -> None: ... + def open(self, host: str | None = None, port: int | None = None, timeout: float | None = None) -> None: ... class _Authenticator: mech: Callable[[bytes], bytes | bytearray | memoryview | str | None] diff --git a/stdlib/importlib/abc.pyi b/stdlib/importlib/abc.pyi index 588377d7d871..8a106b3a64d7 100644 --- a/stdlib/importlib/abc.pyi +++ b/stdlib/importlib/abc.pyi @@ -125,49 +125,48 @@ class ResourceReader(metaclass=ABCMeta): @abstractmethod def contents(self) -> Iterator[str]: ... -if sys.version_info >= (3, 9): - @runtime_checkable - class Traversable(Protocol): - @abstractmethod - def is_dir(self) -> bool: ... - @abstractmethod - def is_file(self) -> bool: ... - @abstractmethod - def iterdir(self) -> Iterator[Traversable]: ... - if sys.version_info >= (3, 11): - @abstractmethod - def joinpath(self, *descendants: str) -> Traversable: ... - else: - @abstractmethod - def joinpath(self, child: str, /) -> Traversable: ... - - # The documentation and runtime protocol allows *args, **kwargs arguments, - # but this would mean that all implementers would have to support them, - # which is not the case. - @overload - @abstractmethod - def open(self, mode: Literal["r"] = "r", *, encoding: str | None = None, errors: str | None = None) -> IO[str]: ... - @overload +@runtime_checkable +class Traversable(Protocol): + @abstractmethod + def is_dir(self) -> bool: ... + @abstractmethod + def is_file(self) -> bool: ... + @abstractmethod + def iterdir(self) -> Iterator[Traversable]: ... + if sys.version_info >= (3, 11): @abstractmethod - def open(self, mode: Literal["rb"]) -> IO[bytes]: ... - @property + def joinpath(self, *descendants: str) -> Traversable: ... + else: @abstractmethod - def name(self) -> str: ... - if sys.version_info >= (3, 10): - def __truediv__(self, child: str, /) -> Traversable: ... - else: - @abstractmethod - def __truediv__(self, child: str, /) -> Traversable: ... + def joinpath(self, child: str, /) -> Traversable: ... + # The documentation and runtime protocol allows *args, **kwargs arguments, + # but this would mean that all implementers would have to support them, + # which is not the case. + @overload + @abstractmethod + def open(self, mode: Literal["r"] = "r", *, encoding: str | None = None, errors: str | None = None) -> IO[str]: ... + @overload + @abstractmethod + def open(self, mode: Literal["rb"]) -> IO[bytes]: ... + @property + @abstractmethod + def name(self) -> str: ... + if sys.version_info >= (3, 10): + def __truediv__(self, child: str, /) -> Traversable: ... + else: @abstractmethod - def read_bytes(self) -> bytes: ... - @abstractmethod - def read_text(self, encoding: str | None = None) -> str: ... + def __truediv__(self, child: str, /) -> Traversable: ... - class TraversableResources(ResourceReader): - @abstractmethod - def files(self) -> Traversable: ... - def open_resource(self, resource: str) -> BufferedReader: ... - def resource_path(self, resource: Any) -> str: ... - def is_resource(self, path: str) -> bool: ... - def contents(self) -> Iterator[str]: ... + @abstractmethod + def read_bytes(self) -> bytes: ... + @abstractmethod + def read_text(self, encoding: str | None = None) -> str: ... + +class TraversableResources(ResourceReader): + @abstractmethod + def files(self) -> Traversable: ... + def open_resource(self, resource: str) -> BufferedReader: ... + def resource_path(self, resource: Any) -> str: ... + def is_resource(self, path: str) -> bool: ... + def contents(self) -> Iterator[str]: ... diff --git a/stdlib/importlib/metadata/__init__.pyi b/stdlib/importlib/metadata/__init__.pyi index 8ab7a0c4a9e8..15d8b50b09d2 100644 --- a/stdlib/importlib/metadata/__init__.pyi +++ b/stdlib/importlib/metadata/__init__.pyi @@ -71,11 +71,10 @@ class EntryPoint(_EntryPointBase): def load(self) -> Any: ... # Callable[[], Any] or an importable module @property def extras(self) -> list[str]: ... - if sys.version_info >= (3, 9): - @property - def module(self) -> str: ... - @property - def attr(self) -> str: ... + @property + def module(self) -> str: ... + @property + def attr(self) -> str: ... if sys.version_info >= (3, 10): dist: ClassVar[Distribution | None] def matches( diff --git a/stdlib/importlib/resources/__init__.pyi b/stdlib/importlib/resources/__init__.pyi index a30e6cdce5c6..88ce8f5cef48 100644 --- a/stdlib/importlib/resources/__init__.pyi +++ b/stdlib/importlib/resources/__init__.pyi @@ -2,6 +2,7 @@ import os import sys from collections.abc import Iterator from contextlib import AbstractContextManager +from importlib.abc import Traversable from pathlib import Path from types import ModuleType from typing import Any, BinaryIO, Literal, TextIO @@ -12,13 +13,18 @@ if sys.version_info >= (3, 11): else: Package: TypeAlias = str | ModuleType -if sys.version_info >= (3, 9): - from importlib.abc import Traversable - -__all__ = ["Package", "contents", "is_resource", "open_binary", "open_text", "path", "read_binary", "read_text"] - -if sys.version_info >= (3, 9): - __all__ += ["as_file", "files"] +__all__ = [ + "Package", + "as_file", + "contents", + "files", + "is_resource", + "open_binary", + "open_text", + "path", + "read_binary", + "read_text", +] if sys.version_info >= (3, 10): __all__ += ["ResourceReader"] @@ -57,13 +63,12 @@ else: if sys.version_info >= (3, 11): from importlib.resources._common import as_file as as_file -elif sys.version_info >= (3, 9): +else: def as_file(path: Traversable) -> AbstractContextManager[Path, Literal[False]]: ... if sys.version_info >= (3, 11): from importlib.resources._common import files as files - -elif sys.version_info >= (3, 9): +else: def files(package: Package) -> Traversable: ... if sys.version_info >= (3, 10): diff --git a/stdlib/inspect.pyi b/stdlib/inspect.pyi index 5bebe9bf4482..c525418c104b 100644 --- a/stdlib/inspect.pyi +++ b/stdlib/inspect.pyi @@ -616,8 +616,7 @@ class Attribute(NamedTuple): def classify_class_attrs(cls: type) -> list[Attribute]: ... -if sys.version_info >= (3, 9): - class ClassFoundException(Exception): ... +class ClassFoundException(Exception): ... if sys.version_info >= (3, 12): class BufferFlags(enum.IntFlag): diff --git a/stdlib/ipaddress.pyi b/stdlib/ipaddress.pyi index e8e81abc6f79..6883895fd219 100644 --- a/stdlib/ipaddress.pyi +++ b/stdlib/ipaddress.pyi @@ -36,9 +36,7 @@ class _BaseAddress(_IPAddressBase): def __hash__(self) -> int: ... def __int__(self) -> int: ... def __sub__(self, other: int) -> Self: ... - if sys.version_info >= (3, 9): - def __format__(self, fmt: str) -> str: ... - + def __format__(self, fmt: str) -> str: ... def __eq__(self, other: object) -> bool: ... def __lt__(self, other: Self) -> bool: ... if sys.version_info >= (3, 11): @@ -184,10 +182,8 @@ class IPv6Address(_BaseV6, _BaseAddress): def sixtofour(self) -> IPv4Address | None: ... @property def teredo(self) -> tuple[IPv4Address, IPv4Address] | None: ... - if sys.version_info >= (3, 9): - @property - def scope_id(self) -> str | None: ... - + @property + def scope_id(self) -> str | None: ... def __hash__(self) -> int: ... def __eq__(self, other: object) -> bool: ... diff --git a/stdlib/itertools.pyi b/stdlib/itertools.pyi index 55b0814ac5e0..d0085dd7224d 100644 --- a/stdlib/itertools.pyi +++ b/stdlib/itertools.pyi @@ -1,12 +1,10 @@ import sys from _typeshed import MaybeNone from collections.abc import Callable, Iterable, Iterator +from types import GenericAlias from typing import Any, Generic, Literal, SupportsComplex, SupportsFloat, SupportsIndex, SupportsInt, TypeVar, overload from typing_extensions import Self, TypeAlias -if sys.version_info >= (3, 9): - from types import GenericAlias - _T = TypeVar("_T") _S = TypeVar("_S") _N = TypeVar("_N", int, float, SupportsFloat, SupportsInt, SupportsIndex, SupportsComplex) @@ -68,8 +66,7 @@ class chain(Generic[_T]): @classmethod # We use type[Any] and not type[_S] to not lose the type inference from __iterable def from_iterable(cls: type[Any], iterable: Iterable[Iterable[_S]], /) -> chain[_S]: ... - if sys.version_info >= (3, 9): - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... class compress(Generic[_T]): def __new__(cls, data: Iterable[_T], selectors: Iterable[Any]) -> Self: ... diff --git a/stdlib/keyword.pyi b/stdlib/keyword.pyi index 960dfd2fa155..6b8bdad6beb6 100644 --- a/stdlib/keyword.pyi +++ b/stdlib/keyword.pyi @@ -1,11 +1,7 @@ -import sys from collections.abc import Sequence from typing import Final -if sys.version_info >= (3, 9): - __all__ = ["iskeyword", "issoftkeyword", "kwlist", "softkwlist"] -else: - __all__ = ["iskeyword", "kwlist"] +__all__ = ["iskeyword", "issoftkeyword", "kwlist", "softkwlist"] def iskeyword(s: str, /) -> bool: ... @@ -13,9 +9,8 @@ def iskeyword(s: str, /) -> bool: ... # type it as a sequence kwlist: Final[Sequence[str]] -if sys.version_info >= (3, 9): - def issoftkeyword(s: str, /) -> bool: ... +def issoftkeyword(s: str, /) -> bool: ... - # a list at runtime, but you're not meant to mutate it; - # type it as a sequence - softkwlist: Final[Sequence[str]] +# a list at runtime, but you're not meant to mutate it; +# type it as a sequence +softkwlist: Final[Sequence[str]] diff --git a/stdlib/linecache.pyi b/stdlib/linecache.pyi index 2e050e13b621..5379a21e7d12 100644 --- a/stdlib/linecache.pyi +++ b/stdlib/linecache.pyi @@ -1,12 +1,8 @@ -import sys from collections.abc import Callable from typing import Any from typing_extensions import TypeAlias -if sys.version_info >= (3, 9): - __all__ = ["getline", "clearcache", "checkcache", "lazycache"] -else: - __all__ = ["getline", "clearcache", "checkcache"] +__all__ = ["getline", "clearcache", "checkcache", "lazycache"] _ModuleGlobals: TypeAlias = dict[str, Any] _ModuleMetadata: TypeAlias = tuple[int, float | None, list[str], str] diff --git a/stdlib/logging/__init__.pyi b/stdlib/logging/__init__.pyi index 9a4827a8f626..1d6edb0246d4 100644 --- a/stdlib/logging/__init__.pyi +++ b/stdlib/logging/__init__.pyi @@ -273,10 +273,7 @@ class Formatter: datefmt: str | None # undocumented _style: PercentStyle # undocumented default_time_format: str - if sys.version_info >= (3, 9): - default_msec_format: str | None - else: - default_msec_format: str + default_msec_format: str | None if sys.version_info >= (3, 10): def __init__( @@ -577,37 +574,20 @@ if sys.version_info >= (3, 11): def getLevelNamesMapping() -> dict[str, int]: ... def makeLogRecord(dict: Mapping[str, object]) -> LogRecord: ... - -if sys.version_info >= (3, 9): - def basicConfig( - *, - filename: StrPath | None = ..., - filemode: str = ..., - format: str = ..., - datefmt: str | None = ..., - style: _FormatStyle = ..., - level: _Level | None = ..., - stream: SupportsWrite[str] | None = ..., - handlers: Iterable[Handler] | None = ..., - force: bool | None = ..., - encoding: str | None = ..., - errors: str | None = ..., - ) -> None: ... - -else: - def basicConfig( - *, - filename: StrPath | None = ..., - filemode: str = ..., - format: str = ..., - datefmt: str | None = ..., - style: _FormatStyle = ..., - level: _Level | None = ..., - stream: SupportsWrite[str] | None = ..., - handlers: Iterable[Handler] | None = ..., - force: bool = ..., - ) -> None: ... - +def basicConfig( + *, + filename: StrPath | None = ..., + filemode: str = ..., + format: str = ..., + datefmt: str | None = ..., + style: _FormatStyle = ..., + level: _Level | None = ..., + stream: SupportsWrite[str] | None = ..., + handlers: Iterable[Handler] | None = ..., + force: bool | None = ..., + encoding: str | None = ..., + errors: str | None = ..., +) -> None: ... def shutdown(handlerList: Sequence[Any] = ...) -> None: ... # handlerList is undocumented def setLoggerClass(klass: type[Logger]) -> None: ... def captureWarnings(capture: bool) -> None: ... @@ -633,14 +613,10 @@ class FileHandler(StreamHandler[TextIOWrapper]): mode: str # undocumented encoding: str | None # undocumented delay: bool # undocumented - if sys.version_info >= (3, 9): - errors: str | None # undocumented - def __init__( - self, filename: StrPath, mode: str = "a", encoding: str | None = None, delay: bool = False, errors: str | None = None - ) -> None: ... - else: - def __init__(self, filename: StrPath, mode: str = "a", encoding: str | None = None, delay: bool = False) -> None: ... - + errors: str | None # undocumented + def __init__( + self, filename: StrPath, mode: str = "a", encoding: str | None = None, delay: bool = False, errors: str | None = None + ) -> None: ... def _open(self) -> TextIOWrapper: ... # undocumented class NullHandler(Handler): ... diff --git a/stdlib/logging/handlers.pyi b/stdlib/logging/handlers.pyi index 7f913bd97fd7..2c7ec05afe9a 100644 --- a/stdlib/logging/handlers.pyi +++ b/stdlib/logging/handlers.pyi @@ -22,54 +22,34 @@ SYSLOG_TCP_PORT: Final[int] class WatchedFileHandler(FileHandler): dev: int # undocumented ino: int # undocumented - if sys.version_info >= (3, 9): - def __init__( - self, filename: StrPath, mode: str = "a", encoding: str | None = None, delay: bool = False, errors: str | None = None - ) -> None: ... - else: - def __init__(self, filename: StrPath, mode: str = "a", encoding: str | None = None, delay: bool = False) -> None: ... - + def __init__( + self, filename: StrPath, mode: str = "a", encoding: str | None = None, delay: bool = False, errors: str | None = None + ) -> None: ... def _statstream(self) -> None: ... # undocumented def reopenIfNeeded(self) -> None: ... class BaseRotatingHandler(FileHandler): namer: Callable[[str], str] | None rotator: Callable[[str, str], None] | None - if sys.version_info >= (3, 9): - def __init__( - self, filename: StrPath, mode: str, encoding: str | None = None, delay: bool = False, errors: str | None = None - ) -> None: ... - else: - def __init__(self, filename: StrPath, mode: str, encoding: str | None = None, delay: bool = False) -> None: ... - + def __init__( + self, filename: StrPath, mode: str, encoding: str | None = None, delay: bool = False, errors: str | None = None + ) -> None: ... def rotation_filename(self, default_name: str) -> str: ... def rotate(self, source: str, dest: str) -> None: ... class RotatingFileHandler(BaseRotatingHandler): maxBytes: int # undocumented backupCount: int # undocumented - if sys.version_info >= (3, 9): - def __init__( - self, - filename: StrPath, - mode: str = "a", - maxBytes: int = 0, - backupCount: int = 0, - encoding: str | None = None, - delay: bool = False, - errors: str | None = None, - ) -> None: ... - else: - def __init__( - self, - filename: StrPath, - mode: str = "a", - maxBytes: int = 0, - backupCount: int = 0, - encoding: str | None = None, - delay: bool = False, - ) -> None: ... - + def __init__( + self, + filename: StrPath, + mode: str = "a", + maxBytes: int = 0, + backupCount: int = 0, + encoding: str | None = None, + delay: bool = False, + errors: str | None = None, + ) -> None: ... def doRollover(self) -> None: ... def shouldRollover(self, record: LogRecord) -> int: ... # undocumented @@ -83,32 +63,18 @@ class TimedRotatingFileHandler(BaseRotatingHandler): dayOfWeek: int # undocumented rolloverAt: int # undocumented extMatch: Pattern[str] # undocumented - if sys.version_info >= (3, 9): - def __init__( - self, - filename: StrPath, - when: str = "h", - interval: int = 1, - backupCount: int = 0, - encoding: str | None = None, - delay: bool = False, - utc: bool = False, - atTime: datetime.time | None = None, - errors: str | None = None, - ) -> None: ... - else: - def __init__( - self, - filename: StrPath, - when: str = "h", - interval: int = 1, - backupCount: int = 0, - encoding: str | None = None, - delay: bool = False, - utc: bool = False, - atTime: datetime.time | None = None, - ) -> None: ... - + def __init__( + self, + filename: StrPath, + when: str = "h", + interval: int = 1, + backupCount: int = 0, + encoding: str | None = None, + delay: bool = False, + utc: bool = False, + atTime: datetime.time | None = None, + errors: str | None = None, + ) -> None: ... def doRollover(self) -> None: ... def shouldRollover(self, record: LogRecord) -> int: ... # undocumented def computeRollover(self, currentTime: int) -> int: ... # undocumented @@ -155,13 +121,10 @@ class SysLogHandler(Handler): LOG_CRON: int LOG_AUTHPRIV: int LOG_FTP: int - - if sys.version_info >= (3, 9): - LOG_NTP: int - LOG_SECURITY: int - LOG_CONSOLE: int - LOG_SOLCRON: int - + LOG_NTP: int + LOG_SECURITY: int + LOG_CONSOLE: int + LOG_SOLCRON: int LOG_LOCAL0: int LOG_LOCAL1: int LOG_LOCAL2: int @@ -248,8 +211,7 @@ class HTTPHandler(Handler): context: ssl.SSLContext | None = None, ) -> None: ... def mapLogRecord(self, record: LogRecord) -> dict[str, Any]: ... - if sys.version_info >= (3, 9): - def getConnection(self, host: str, secure: bool) -> http.client.HTTPConnection: ... # undocumented + def getConnection(self, host: str, secure: bool) -> http.client.HTTPConnection: ... # undocumented class _QueueLike(Protocol[_T]): def get(self) -> _T: ... diff --git a/stdlib/mailbox.pyi b/stdlib/mailbox.pyi index a98a00a42853..dc2fbd593d67 100644 --- a/stdlib/mailbox.pyi +++ b/stdlib/mailbox.pyi @@ -4,13 +4,10 @@ import sys from _typeshed import StrPath, SupportsNoArgReadline, SupportsRead from abc import ABCMeta, abstractmethod from collections.abc import Callable, Iterable, Iterator, Mapping, Sequence -from types import TracebackType +from types import GenericAlias, TracebackType from typing import IO, Any, AnyStr, Generic, Literal, Protocol, TypeVar, overload from typing_extensions import Self, TypeAlias -if sys.version_info >= (3, 9): - from types import GenericAlias - __all__ = [ "Mailbox", "Maildir", @@ -101,8 +98,7 @@ class Mailbox(Generic[_MessageT]): def unlock(self) -> None: ... @abstractmethod def close(self) -> None: ... - if sys.version_info >= (3, 9): - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... class Maildir(Mailbox[MaildirMessage]): colon: str @@ -251,8 +247,7 @@ class _ProxyFile(Generic[AnyStr]): def flush(self) -> None: ... @property def closed(self) -> bool: ... - if sys.version_info >= (3, 9): - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... class _PartialFile(_ProxyFile[AnyStr]): def __init__(self, f: IO[AnyStr], start: int | None = None, stop: int | None = None) -> None: ... diff --git a/stdlib/math.pyi b/stdlib/math.pyi index f73429cf6940..9e77f0cd7e06 100644 --- a/stdlib/math.pyi +++ b/stdlib/math.pyi @@ -61,13 +61,7 @@ def fmod(x: _SupportsFloatOrIndex, y: _SupportsFloatOrIndex, /) -> float: ... def frexp(x: _SupportsFloatOrIndex, /) -> tuple[float, int]: ... def fsum(seq: Iterable[_SupportsFloatOrIndex], /) -> float: ... def gamma(x: _SupportsFloatOrIndex, /) -> float: ... - -if sys.version_info >= (3, 9): - def gcd(*integers: SupportsIndex) -> int: ... - -else: - def gcd(x: SupportsIndex, y: SupportsIndex, /) -> int: ... - +def gcd(*integers: SupportsIndex) -> int: ... def hypot(*coordinates: _SupportsFloatOrIndex) -> float: ... def isclose( a: _SupportsFloatOrIndex, @@ -80,10 +74,7 @@ def isinf(x: _SupportsFloatOrIndex, /) -> bool: ... def isfinite(x: _SupportsFloatOrIndex, /) -> bool: ... def isnan(x: _SupportsFloatOrIndex, /) -> bool: ... def isqrt(n: SupportsIndex, /) -> int: ... - -if sys.version_info >= (3, 9): - def lcm(*integers: SupportsIndex) -> int: ... - +def lcm(*integers: SupportsIndex) -> int: ... def ldexp(x: _SupportsFloatOrIndex, i: int, /) -> float: ... def lgamma(x: _SupportsFloatOrIndex, /) -> float: ... def log(x: _SupportsFloatOrIndex, base: _SupportsFloatOrIndex = ...) -> float: ... @@ -95,7 +86,7 @@ def modf(x: _SupportsFloatOrIndex, /) -> tuple[float, float]: ... if sys.version_info >= (3, 12): def nextafter(x: _SupportsFloatOrIndex, y: _SupportsFloatOrIndex, /, *, steps: SupportsIndex | None = None) -> float: ... -elif sys.version_info >= (3, 9): +else: def nextafter(x: _SupportsFloatOrIndex, y: _SupportsFloatOrIndex, /) -> float: ... def perm(n: SupportsIndex, k: SupportsIndex | None = None, /) -> int: ... @@ -140,9 +131,7 @@ class _SupportsTrunc(Protocol[_T_co]): def __trunc__(self) -> _T_co: ... def trunc(x: _SupportsTrunc[_T], /) -> _T: ... - -if sys.version_info >= (3, 9): - def ulp(x: _SupportsFloatOrIndex, /) -> float: ... +def ulp(x: _SupportsFloatOrIndex, /) -> float: ... if sys.version_info >= (3, 13): def fma(x: _SupportsFloatOrIndex, y: _SupportsFloatOrIndex, z: _SupportsFloatOrIndex, /) -> float: ... diff --git a/stdlib/multiprocessing/managers.pyi b/stdlib/multiprocessing/managers.pyi index a5190e2fcb98..37584b515909 100644 --- a/stdlib/multiprocessing/managers.pyi +++ b/stdlib/multiprocessing/managers.pyi @@ -13,7 +13,7 @@ from collections.abc import ( Sequence, Set as AbstractSet, ) -from types import TracebackType +from types import GenericAlias, TracebackType from typing import Any, AnyStr, ClassVar, Generic, SupportsIndex, TypeVar, overload from typing_extensions import Self, TypeAlias @@ -25,9 +25,6 @@ from .util import Finalize as _Finalize __all__ = ["BaseManager", "SyncManager", "BaseProxy", "Token", "SharedMemoryManager"] -if sys.version_info >= (3, 9): - from types import GenericAlias - _T = TypeVar("_T") _KT = TypeVar("_KT") _VT = TypeVar("_VT") @@ -70,8 +67,7 @@ class ValueProxy(BaseProxy, Generic[_T]): def get(self) -> _T: ... def set(self, value: _T) -> None: ... value: _T - if sys.version_info >= (3, 9): - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... if sys.version_info >= (3, 13): class _BaseDictProxy(BaseProxy, MutableMapping[_KT, _VT]): diff --git a/stdlib/multiprocessing/pool.pyi b/stdlib/multiprocessing/pool.pyi index 2937d45e3b03..b79f9e77359a 100644 --- a/stdlib/multiprocessing/pool.pyi +++ b/stdlib/multiprocessing/pool.pyi @@ -1,13 +1,9 @@ -import sys from collections.abc import Callable, Iterable, Mapping from multiprocessing.context import DefaultContext, Process -from types import TracebackType +from types import GenericAlias, TracebackType from typing import Any, Final, Generic, TypeVar from typing_extensions import Self -if sys.version_info >= (3, 9): - from types import GenericAlias - __all__ = ["Pool", "ThreadPool"] _S = TypeVar("_S") @@ -21,8 +17,7 @@ class ApplyResult(Generic[_T]): def wait(self, timeout: float | None = None) -> None: ... def ready(self) -> bool: ... def successful(self) -> bool: ... - if sys.version_info >= (3, 9): - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... # alias created during issue #17805 AsyncResult = ApplyResult diff --git a/stdlib/multiprocessing/queues.pyi b/stdlib/multiprocessing/queues.pyi index 581a46ea0bc8..a6b00d744c42 100644 --- a/stdlib/multiprocessing/queues.pyi +++ b/stdlib/multiprocessing/queues.pyi @@ -1,9 +1,7 @@ import sys +from types import GenericAlias from typing import Any, Generic, TypeVar -if sys.version_info >= (3, 9): - from types import GenericAlias - __all__ = ["Queue", "SimpleQueue", "JoinableQueue"] _T = TypeVar("_T") @@ -31,11 +29,8 @@ class JoinableQueue(Queue[_T]): class SimpleQueue(Generic[_T]): def __init__(self, *, ctx: Any = ...) -> None: ... - if sys.version_info >= (3, 9): - def close(self) -> None: ... - + def close(self) -> None: ... def empty(self) -> bool: ... def get(self) -> _T: ... def put(self, obj: _T) -> None: ... - if sys.version_info >= (3, 9): - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... diff --git a/stdlib/multiprocessing/shared_memory.pyi b/stdlib/multiprocessing/shared_memory.pyi index b63cedf85867..1a12812c27e4 100644 --- a/stdlib/multiprocessing/shared_memory.pyi +++ b/stdlib/multiprocessing/shared_memory.pyi @@ -1,11 +1,9 @@ import sys from collections.abc import Iterable +from types import GenericAlias from typing import Any, Generic, TypeVar, overload from typing_extensions import Self -if sys.version_info >= (3, 9): - from types import GenericAlias - __all__ = ["SharedMemory", "ShareableList"] _SLT = TypeVar("_SLT", int, float, bool, str, bytes, None) @@ -40,5 +38,4 @@ class ShareableList(Generic[_SLT]): def format(self) -> str: ... def count(self, value: _SLT) -> int: ... def index(self, value: _SLT) -> int: ... - if sys.version_info >= (3, 9): - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... diff --git a/stdlib/nntplib.pyi b/stdlib/nntplib.pyi index 85dfbff1cb50..1fb1e79f69a1 100644 --- a/stdlib/nntplib.pyi +++ b/stdlib/nntplib.pyi @@ -1,7 +1,6 @@ import datetime import socket import ssl -import sys from _typeshed import Unused from builtins import list as _list # conflicts with a method named "list" from collections.abc import Iterable @@ -98,10 +97,6 @@ class NNTP: def over( self, message_spec: None | str | _list[Any] | tuple[Any, ...], *, file: _File = None ) -> tuple[str, _list[tuple[int, dict[str, str]]]]: ... - if sys.version_info < (3, 9): - def xgtitle(self, group: str, *, file: _File = None) -> tuple[str, _list[tuple[str, str]]]: ... - def xpath(self, id: Any) -> tuple[str, str]: ... - def date(self) -> tuple[str, datetime.datetime]: ... def post(self, data: bytes | Iterable[bytes]) -> str: ... def ihave(self, message_id: Any, data: bytes | Iterable[bytes]) -> str: ... diff --git a/stdlib/nt.pyi b/stdlib/nt.pyi index e1d57d09a9bd..3ed8f8af379b 100644 --- a/stdlib/nt.pyi +++ b/stdlib/nt.pyi @@ -89,14 +89,14 @@ if sys.platform == "win32": umask as umask, uname_result as uname_result, unlink as unlink, + unsetenv as unsetenv, urandom as urandom, utime as utime, waitpid as waitpid, + waitstatus_to_exitcode as waitstatus_to_exitcode, write as write, ) - if sys.version_info >= (3, 9): - from os import unsetenv as unsetenv, waitstatus_to_exitcode as waitstatus_to_exitcode if sys.version_info >= (3, 11): from os import EX_OK as EX_OK if sys.version_info >= (3, 12): diff --git a/stdlib/opcode.pyi b/stdlib/opcode.pyi index f9f76962f876..a5a3a79c323b 100644 --- a/stdlib/opcode.pyi +++ b/stdlib/opcode.pyi @@ -23,23 +23,7 @@ else: if sys.version_info >= (3, 13): __all__ += ["hasjump"] -if sys.version_info >= (3, 9): - cmp_op: tuple[Literal["<"], Literal["<="], Literal["=="], Literal["!="], Literal[">"], Literal[">="]] -else: - cmp_op: tuple[ - Literal["<"], - Literal["<="], - Literal["=="], - Literal["!="], - Literal[">"], - Literal[">="], - Literal["in"], - Literal["not in"], - Literal["is"], - Literal["is not"], - Literal["exception match"], - Literal["BAD"], - ] +cmp_op: tuple[Literal["<"], Literal["<="], Literal["=="], Literal["!="], Literal[">"], Literal[">="]] hasconst: list[int] hasname: list[int] hasjrel: list[int] diff --git a/stdlib/pathlib.pyi b/stdlib/pathlib.pyi index a18aed4ba57a..1e4d97770b7b 100644 --- a/stdlib/pathlib.pyi +++ b/stdlib/pathlib.pyi @@ -14,13 +14,10 @@ from _typeshed import ( from collections.abc import Callable, Generator, Iterator, Sequence from io import BufferedRandom, BufferedReader, BufferedWriter, FileIO, TextIOWrapper from os import PathLike, stat_result -from types import TracebackType +from types import GenericAlias, TracebackType from typing import IO, Any, BinaryIO, ClassVar, Literal, overload from typing_extensions import Never, Self, deprecated -if sys.version_info >= (3, 9): - from types import GenericAlias - __all__ = ["PurePath", "PurePosixPath", "PureWindowsPath", "Path", "PosixPath", "WindowsPath"] if sys.version_info >= (3, 13): @@ -68,7 +65,7 @@ class PurePath(PathLike[str]): def is_reserved(self) -> bool: ... if sys.version_info >= (3, 12): def is_relative_to(self, other: StrPath, /, *_deprecated: StrPath) -> bool: ... - elif sys.version_info >= (3, 9): + else: def is_relative_to(self, *other: StrPath) -> bool: ... if sys.version_info >= (3, 12): @@ -82,16 +79,14 @@ class PurePath(PathLike[str]): def relative_to(self, *other: StrPath) -> Self: ... def with_name(self, name: str) -> Self: ... - if sys.version_info >= (3, 9): - def with_stem(self, stem: str) -> Self: ... - + def with_stem(self, stem: str) -> Self: ... def with_suffix(self, suffix: str) -> Self: ... def joinpath(self, *other: StrPath) -> Self: ... @property def parents(self) -> Sequence[Self]: ... @property def parent(self) -> Self: ... - if sys.version_info >= (3, 9) and sys.version_info < (3, 11): + if sys.version_info < (3, 11): def __class_getitem__(cls, type: Any) -> GenericAlias: ... if sys.version_info >= (3, 12): @@ -247,8 +242,7 @@ class Path(PurePath): else: def is_mount(self) -> bool: ... - if sys.version_info >= (3, 9): - def readlink(self) -> Self: ... + def readlink(self) -> Self: ... if sys.version_info >= (3, 10): def rename(self, target: StrPath) -> Self: ... diff --git a/stdlib/pkgutil.pyi b/stdlib/pkgutil.pyi index 59d70779c72f..d60e9bad53ae 100644 --- a/stdlib/pkgutil.pyi +++ b/stdlib/pkgutil.pyi @@ -48,6 +48,4 @@ def walk_packages( path: Iterable[StrOrBytesPath] | None = None, prefix: str = "", onerror: Callable[[str], object] | None = None ) -> Iterator[ModuleInfo]: ... def get_data(package: str, resource: str) -> bytes | None: ... - -if sys.version_info >= (3, 9): - def resolve_name(name: str) -> Any: ... +def resolve_name(name: str) -> Any: ... diff --git a/stdlib/platform.pyi b/stdlib/platform.pyi index 73393eada02c..19fac26134eb 100644 --- a/stdlib/platform.pyi +++ b/stdlib/platform.pyi @@ -15,40 +15,29 @@ def java_ver( def system_alias(system: str, release: str, version: str) -> tuple[str, str, str]: ... def architecture(executable: str = sys.executable, bits: str = "", linkage: str = "") -> tuple[str, str]: ... -if sys.version_info >= (3, 9): - # This class is not exposed. It calls itself platform.uname_result_base. - # At runtime it only has 5 fields. - @type_check_only - class _uname_result_base(NamedTuple): - system: str - node: str - release: str - version: str - machine: str - # This base class doesn't have this field at runtime, but claiming it - # does is the least bad way to handle the situation. Nobody really - # sees this class anyway. See #13068 - processor: str - - # uname_result emulates a 6-field named tuple, but the processor field - # is lazily evaluated rather than being passed in to the constructor. - class uname_result(_uname_result_base): - if sys.version_info >= (3, 10): - __match_args__ = ("system", "node", "release", "version", "machine") # pyright: ignore[reportAssignmentType] +# This class is not exposed. It calls itself platform.uname_result_base. +# At runtime it only has 5 fields. +@type_check_only +class _uname_result_base(NamedTuple): + system: str + node: str + release: str + version: str + machine: str + # This base class doesn't have this field at runtime, but claiming it + # does is the least bad way to handle the situation. Nobody really + # sees this class anyway. See #13068 + processor: str - def __new__(_cls, system: str, node: str, release: str, version: str, machine: str) -> Self: ... - @property - def processor(self) -> str: ... +# uname_result emulates a 6-field named tuple, but the processor field +# is lazily evaluated rather than being passed in to the constructor. +class uname_result(_uname_result_base): + if sys.version_info >= (3, 10): + __match_args__ = ("system", "node", "release", "version", "machine") # pyright: ignore[reportAssignmentType] -else: - # On 3.8, uname_result is actually just a regular NamedTuple. - class uname_result(NamedTuple): - system: str - node: str - release: str - version: str - machine: str - processor: str + def __new__(_cls, system: str, node: str, release: str, version: str, machine: str) -> Self: ... + @property + def processor(self) -> str: ... def uname() -> uname_result: ... def system() -> str: ... diff --git a/stdlib/plistlib.pyi b/stdlib/plistlib.pyi index 72b5398f0a52..8b39b4217eae 100644 --- a/stdlib/plistlib.pyi +++ b/stdlib/plistlib.pyi @@ -3,12 +3,10 @@ from _typeshed import ReadableBuffer from collections.abc import Mapping, MutableMapping from datetime import datetime from enum import Enum -from typing import IO, Any, ClassVar +from typing import IO, Any from typing_extensions import Self __all__ = ["InvalidFileException", "FMT_XML", "FMT_BINARY", "load", "dump", "loads", "dumps", "UID"] -if sys.version_info < (3, 9): - __all__ += ["readPlist", "writePlist", "readPlistFromBytes", "writePlistToBytes", "Data"] class PlistFormat(Enum): FMT_XML = 1 @@ -32,28 +30,12 @@ if sys.version_info >= (3, 13): aware_datetime: bool = False, ) -> Any: ... -elif sys.version_info >= (3, 9): +else: def load(fp: IO[bytes], *, fmt: PlistFormat | None = None, dict_type: type[MutableMapping[str, Any]] = ...) -> Any: ... def loads( value: ReadableBuffer, *, fmt: PlistFormat | None = None, dict_type: type[MutableMapping[str, Any]] = ... ) -> Any: ... -else: - def load( - fp: IO[bytes], - *, - fmt: PlistFormat | None = None, - use_builtin_types: bool = True, - dict_type: type[MutableMapping[str, Any]] = ..., - ) -> Any: ... - def loads( - value: ReadableBuffer, - *, - fmt: PlistFormat | None = None, - use_builtin_types: bool = True, - dict_type: type[MutableMapping[str, Any]] = ..., - ) -> Any: ... - if sys.version_info >= (3, 13): def dump( value: Mapping[str, Any] | list[Any] | tuple[Any, ...] | str | bool | float | bytes | bytearray | datetime, @@ -90,18 +72,6 @@ else: sort_keys: bool = True, ) -> bytes: ... -if sys.version_info < (3, 9): - def readPlist(pathOrFile: str | IO[bytes]) -> Any: ... - def writePlist(value: Mapping[str, Any], pathOrFile: str | IO[bytes]) -> None: ... - def readPlistFromBytes(data: ReadableBuffer) -> Any: ... - def writePlistToBytes(value: Mapping[str, Any]) -> bytes: ... - -if sys.version_info < (3, 9): - class Data: - data: bytes - def __init__(self, data: bytes) -> None: ... - __hash__: ClassVar[None] # type: ignore[assignment] - class UID: data: int def __init__(self, data: int) -> None: ... diff --git a/stdlib/posix.pyi b/stdlib/posix.pyi index e7223842ace5..88f4135af2a7 100644 --- a/stdlib/posix.pyi +++ b/stdlib/posix.pyi @@ -6,6 +6,8 @@ if sys.platform != "win32": CLD_CONTINUED as CLD_CONTINUED, CLD_DUMPED as CLD_DUMPED, CLD_EXITED as CLD_EXITED, + CLD_KILLED as CLD_KILLED, + CLD_STOPPED as CLD_STOPPED, CLD_TRAPPED as CLD_TRAPPED, EX_CANTCREAT as EX_CANTCREAT, EX_CONFIG as EX_CONFIG, @@ -220,13 +222,11 @@ if sys.platform != "win32": wait3 as wait3, wait4 as wait4, waitpid as waitpid, + waitstatus_to_exitcode as waitstatus_to_exitcode, write as write, writev as writev, ) - if sys.version_info >= (3, 9): - from os import CLD_KILLED as CLD_KILLED, CLD_STOPPED as CLD_STOPPED, waitstatus_to_exitcode as waitstatus_to_exitcode - if sys.version_info >= (3, 10): from os import O_FSYNC as O_FSYNC @@ -330,6 +330,7 @@ if sys.platform != "win32": O_PATH as O_PATH, O_RSYNC as O_RSYNC, O_TMPFILE as O_TMPFILE, + P_PIDFD as P_PIDFD, RTLD_DEEPBIND as RTLD_DEEPBIND, SCHED_BATCH as SCHED_BATCH, SCHED_IDLE as SCHED_IDLE, @@ -342,13 +343,11 @@ if sys.platform != "win32": getxattr as getxattr, listxattr as listxattr, memfd_create as memfd_create, + pidfd_open as pidfd_open, removexattr as removexattr, setxattr as setxattr, ) - if sys.version_info >= (3, 9): - from os import P_PIDFD as P_PIDFD, pidfd_open as pidfd_open - if sys.version_info >= (3, 10): from os import ( EFD_CLOEXEC as EFD_CLOEXEC, diff --git a/stdlib/pstats.pyi b/stdlib/pstats.pyi index d41fa202cf77..c4dee1f6b8f6 100644 --- a/stdlib/pstats.pyi +++ b/stdlib/pstats.pyi @@ -2,6 +2,7 @@ import sys from _typeshed import StrOrBytesPath from collections.abc import Iterable from cProfile import Profile as _cProfile +from dataclasses import dataclass from profile import Profile from typing import IO, Any, Literal, overload from typing_extensions import Self, TypeAlias @@ -11,10 +12,7 @@ if sys.version_info >= (3, 11): else: from enum import Enum -if sys.version_info >= (3, 9): - __all__ = ["Stats", "SortKey", "FunctionProfile", "StatsProfile"] -else: - __all__ = ["Stats", "SortKey"] +__all__ = ["Stats", "SortKey", "FunctionProfile", "StatsProfile"] _Selector: TypeAlias = str | float | int @@ -42,23 +40,20 @@ else: STDNAME = "stdname" TIME = "time" -if sys.version_info >= (3, 9): - from dataclasses import dataclass - - @dataclass(unsafe_hash=True) - class FunctionProfile: - ncalls: str - tottime: float - percall_tottime: float - cumtime: float - percall_cumtime: float - file_name: str - line_number: int +@dataclass(unsafe_hash=True) +class FunctionProfile: + ncalls: str + tottime: float + percall_tottime: float + cumtime: float + percall_cumtime: float + file_name: str + line_number: int - @dataclass(unsafe_hash=True) - class StatsProfile: - total_tt: float - func_profiles: dict[str, FunctionProfile] +@dataclass(unsafe_hash=True) +class StatsProfile: + total_tt: float + func_profiles: dict[str, FunctionProfile] _SortArgDict: TypeAlias = dict[str, tuple[tuple[tuple[int, int], ...], str]] @@ -85,9 +80,7 @@ class Stats: def strip_dirs(self) -> Self: ... def calc_callees(self) -> None: ... def eval_print_amount(self, sel: _Selector, list: list[str], msg: str) -> tuple[list[str], str]: ... - if sys.version_info >= (3, 9): - def get_stats_profile(self) -> StatsProfile: ... - + def get_stats_profile(self) -> StatsProfile: ... def get_print_list(self, sel_list: Iterable[_Selector]) -> tuple[int, list[str]]: ... def print_stats(self, *amount: _Selector) -> Self: ... def print_callees(self, *amount: _Selector) -> Self: ... diff --git a/stdlib/queue.pyi b/stdlib/queue.pyi index 4fb49cb6102b..f5d9179e079d 100644 --- a/stdlib/queue.pyi +++ b/stdlib/queue.pyi @@ -1,11 +1,9 @@ import sys from _queue import Empty as Empty, SimpleQueue as SimpleQueue from threading import Condition, Lock +from types import GenericAlias from typing import Any, Generic, TypeVar -if sys.version_info >= (3, 9): - from types import GenericAlias - __all__ = ["Empty", "Full", "Queue", "PriorityQueue", "LifoQueue", "SimpleQueue"] if sys.version_info >= (3, 13): __all__ += ["ShutDown"] @@ -47,8 +45,7 @@ class Queue(Generic[_T]): def qsize(self) -> int: ... def _qsize(self) -> int: ... def task_done(self) -> None: ... - if sys.version_info >= (3, 9): - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... class PriorityQueue(Queue[_T]): queue: list[_T] diff --git a/stdlib/random.pyi b/stdlib/random.pyi index e7320369c377..83e37113a941 100644 --- a/stdlib/random.pyi +++ b/stdlib/random.pyi @@ -30,10 +30,9 @@ __all__ = [ "getrandbits", "choices", "SystemRandom", + "randbytes", ] -if sys.version_info >= (3, 9): - __all__ += ["randbytes"] if sys.version_info >= (3, 12): __all__ += ["binomialvariate"] @@ -41,25 +40,16 @@ _T = TypeVar("_T") class Random(_random.Random): VERSION: ClassVar[int] - if sys.version_info >= (3, 9): - def __init__(self, x: int | float | str | bytes | bytearray | None = None) -> None: ... # noqa: Y041 - else: - def __init__(self, x: Any = None) -> None: ... + def __init__(self, x: int | float | str | bytes | bytearray | None = None) -> None: ... # noqa: Y041 # Using other `seed` types is deprecated since 3.9 and removed in 3.11 # Ignore Y041, since random.seed doesn't treat int like a float subtype. Having an explicit # int better documents conventional usage of random.seed. - if sys.version_info >= (3, 9): - def seed(self, a: int | float | str | bytes | bytearray | None = None, version: int = 2) -> None: ... # type: ignore[override] # noqa: Y041 - else: - def seed(self, a: Any = None, version: int = 2) -> None: ... - + def seed(self, a: int | float | str | bytes | bytearray | None = None, version: int = 2) -> None: ... # type: ignore[override] # noqa: Y041 def getstate(self) -> tuple[Any, ...]: ... def setstate(self, state: tuple[Any, ...]) -> None: ... def randrange(self, start: int, stop: int | None = None, step: int = 1) -> int: ... def randint(self, a: int, b: int) -> int: ... - if sys.version_info >= (3, 9): - def randbytes(self, n: int) -> bytes: ... - + def randbytes(self, n: int) -> bytes: ... def choice(self, seq: SupportsLenAndGetItem[_T]) -> _T: ... def choices( self, @@ -75,12 +65,10 @@ class Random(_random.Random): def shuffle(self, x: MutableSequence[Any], random: Callable[[], float] | None = None) -> None: ... if sys.version_info >= (3, 11): def sample(self, population: Sequence[_T], k: int, *, counts: Iterable[int] | None = None) -> list[_T]: ... - elif sys.version_info >= (3, 9): + else: def sample( self, population: Sequence[_T] | AbstractSet[_T], k: int, *, counts: Iterable[int] | None = None ) -> list[_T]: ... - else: - def sample(self, population: Sequence[_T] | AbstractSet[_T], k: int) -> list[_T]: ... def uniform(self, a: float, b: float) -> float: ... def triangular(self, low: float = 0.0, high: float = 1.0, mode: float | None = None) -> float: ... @@ -137,5 +125,4 @@ weibullvariate = _inst.weibullvariate getstate = _inst.getstate setstate = _inst.setstate getrandbits = _inst.getrandbits -if sys.version_info >= (3, 9): - randbytes = _inst.randbytes +randbytes = _inst.randbytes diff --git a/stdlib/re.pyi b/stdlib/re.pyi index fccdedae9436..f25a0a376704 100644 --- a/stdlib/re.pyi +++ b/stdlib/re.pyi @@ -4,12 +4,10 @@ import sre_constants import sys from _typeshed import MaybeNone, ReadableBuffer from collections.abc import Callable, Iterator, Mapping +from types import GenericAlias from typing import Any, AnyStr, Final, Generic, Literal, TypeVar, final, overload from typing_extensions import TypeAlias -if sys.version_info >= (3, 9): - from types import GenericAlias - __all__ = [ "match", "fullmatch", @@ -117,8 +115,7 @@ class Match(Generic[AnyStr]): def __getitem__(self, key: int | str, /) -> AnyStr | MaybeNone: ... def __copy__(self) -> Match[AnyStr]: ... def __deepcopy__(self, memo: Any, /) -> Match[AnyStr]: ... - if sys.version_info >= (3, 9): - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... @final class Pattern(Generic[AnyStr]): @@ -197,8 +194,7 @@ class Pattern(Generic[AnyStr]): def __deepcopy__(self, memo: Any, /) -> Pattern[AnyStr]: ... def __eq__(self, value: object, /) -> bool: ... def __hash__(self) -> int: ... - if sys.version_info >= (3, 9): - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... # ----- re variables and constants ----- diff --git a/stdlib/signal.pyi b/stdlib/signal.pyi index 8fc853b25cc1..d50565d1c8ac 100644 --- a/stdlib/signal.pyi +++ b/stdlib/signal.pyi @@ -183,6 +183,5 @@ def valid_signals() -> set[Signals]: ... def raise_signal(signalnum: _SIGNUM, /) -> None: ... def set_wakeup_fd(fd: int, /, *, warn_on_full_buffer: bool = ...) -> int: ... -if sys.version_info >= (3, 9): - if sys.platform == "linux": - def pidfd_send_signal(pidfd: int, sig: int, siginfo: None = None, flags: int = ..., /) -> None: ... +if sys.platform == "linux": + def pidfd_send_signal(pidfd: int, sig: int, siginfo: None = None, flags: int = ..., /) -> None: ... diff --git a/stdlib/smtplib.pyi b/stdlib/smtplib.pyi index a762427bcab3..609b3e6426c4 100644 --- a/stdlib/smtplib.pyi +++ b/stdlib/smtplib.pyi @@ -185,20 +185,11 @@ class SMTP_SSL(SMTP): LMTP_PORT: int class LMTP(SMTP): - if sys.version_info >= (3, 9): - def __init__( - self, - host: str = "", - port: int = 2003, - local_hostname: str | None = None, - source_address: _SourceAddress | None = None, - timeout: float = ..., - ) -> None: ... - else: - def __init__( - self, - host: str = "", - port: int = 2003, - local_hostname: str | None = None, - source_address: _SourceAddress | None = None, - ) -> None: ... + def __init__( + self, + host: str = "", + port: int = 2003, + local_hostname: str | None = None, + source_address: _SourceAddress | None = None, + timeout: float = ..., + ) -> None: ... diff --git a/stdlib/socket.pyi b/stdlib/socket.pyi index 1c996ac32278..680c6475a3b7 100644 --- a/stdlib/socket.pyi +++ b/stdlib/socket.pyi @@ -53,12 +53,18 @@ from _socket import ( IPPROTO_TCP as IPPROTO_TCP, IPPROTO_UDP as IPPROTO_UDP, IPV6_CHECKSUM as IPV6_CHECKSUM, + IPV6_DONTFRAG as IPV6_DONTFRAG, + IPV6_HOPLIMIT as IPV6_HOPLIMIT, + IPV6_HOPOPTS as IPV6_HOPOPTS, IPV6_JOIN_GROUP as IPV6_JOIN_GROUP, IPV6_LEAVE_GROUP as IPV6_LEAVE_GROUP, IPV6_MULTICAST_HOPS as IPV6_MULTICAST_HOPS, IPV6_MULTICAST_IF as IPV6_MULTICAST_IF, IPV6_MULTICAST_LOOP as IPV6_MULTICAST_LOOP, + IPV6_PKTINFO as IPV6_PKTINFO, + IPV6_RECVRTHDR as IPV6_RECVRTHDR, IPV6_RECVTCLASS as IPV6_RECVTCLASS, + IPV6_RTHDR as IPV6_RTHDR, IPV6_TCLASS as IPV6_TCLASS, IPV6_UNICAST_HOPS as IPV6_UNICAST_HOPS, IPV6_V6ONLY as IPV6_V6ONLY, @@ -195,12 +201,18 @@ __all__ = [ "IPPROTO_TCP", "IPPROTO_UDP", "IPV6_CHECKSUM", + "IPV6_DONTFRAG", + "IPV6_HOPLIMIT", + "IPV6_HOPOPTS", "IPV6_JOIN_GROUP", "IPV6_LEAVE_GROUP", "IPV6_MULTICAST_HOPS", "IPV6_MULTICAST_IF", "IPV6_MULTICAST_LOOP", + "IPV6_PKTINFO", + "IPV6_RECVRTHDR", "IPV6_RECVTCLASS", + "IPV6_RTHDR", "IPV6_TCLASS", "IPV6_UNICAST_HOPS", "IPV6_V6ONLY", @@ -335,18 +347,6 @@ if sys.platform == "win32": "MSG_MCAST", ] -if sys.platform != "darwin" or sys.version_info >= (3, 9): - from _socket import ( - IPV6_DONTFRAG as IPV6_DONTFRAG, - IPV6_HOPLIMIT as IPV6_HOPLIMIT, - IPV6_HOPOPTS as IPV6_HOPOPTS, - IPV6_PKTINFO as IPV6_PKTINFO, - IPV6_RECVRTHDR as IPV6_RECVRTHDR, - IPV6_RTHDR as IPV6_RTHDR, - ) - - __all__ += ["IPV6_DONTFRAG", "IPV6_HOPLIMIT", "IPV6_HOPOPTS", "IPV6_PKTINFO", "IPV6_RECVRTHDR", "IPV6_RTHDR"] - if sys.platform == "darwin": from _socket import PF_SYSTEM as PF_SYSTEM, SYSPROTO_CONTROL as SYSPROTO_CONTROL @@ -490,30 +490,29 @@ if sys.platform != "win32": "MSG_NOSIGNAL", ] - if sys.platform != "darwin" or sys.version_info >= (3, 9): - from _socket import ( - IPV6_DSTOPTS as IPV6_DSTOPTS, - IPV6_NEXTHOP as IPV6_NEXTHOP, - IPV6_PATHMTU as IPV6_PATHMTU, - IPV6_RECVDSTOPTS as IPV6_RECVDSTOPTS, - IPV6_RECVHOPLIMIT as IPV6_RECVHOPLIMIT, - IPV6_RECVHOPOPTS as IPV6_RECVHOPOPTS, - IPV6_RECVPATHMTU as IPV6_RECVPATHMTU, - IPV6_RECVPKTINFO as IPV6_RECVPKTINFO, - IPV6_RTHDRDSTOPTS as IPV6_RTHDRDSTOPTS, - ) + from _socket import ( + IPV6_DSTOPTS as IPV6_DSTOPTS, + IPV6_NEXTHOP as IPV6_NEXTHOP, + IPV6_PATHMTU as IPV6_PATHMTU, + IPV6_RECVDSTOPTS as IPV6_RECVDSTOPTS, + IPV6_RECVHOPLIMIT as IPV6_RECVHOPLIMIT, + IPV6_RECVHOPOPTS as IPV6_RECVHOPOPTS, + IPV6_RECVPATHMTU as IPV6_RECVPATHMTU, + IPV6_RECVPKTINFO as IPV6_RECVPKTINFO, + IPV6_RTHDRDSTOPTS as IPV6_RTHDRDSTOPTS, + ) - __all__ += [ - "IPV6_DSTOPTS", - "IPV6_NEXTHOP", - "IPV6_PATHMTU", - "IPV6_RECVDSTOPTS", - "IPV6_RECVHOPLIMIT", - "IPV6_RECVHOPOPTS", - "IPV6_RECVPATHMTU", - "IPV6_RECVPKTINFO", - "IPV6_RTHDRDSTOPTS", - ] + __all__ += [ + "IPV6_DSTOPTS", + "IPV6_NEXTHOP", + "IPV6_PATHMTU", + "IPV6_RECVDSTOPTS", + "IPV6_RECVHOPLIMIT", + "IPV6_RECVHOPOPTS", + "IPV6_RECVPATHMTU", + "IPV6_RECVPKTINFO", + "IPV6_RTHDRDSTOPTS", + ] if sys.platform != "darwin": from _socket import SO_BINDTODEVICE as SO_BINDTODEVICE @@ -521,10 +520,9 @@ if sys.platform != "win32": __all__ += ["SO_BINDTODEVICE"] if sys.platform != "darwin" and sys.platform != "linux": - if sys.platform != "win32" or sys.version_info >= (3, 9): - from _socket import BDADDR_ANY as BDADDR_ANY, BDADDR_LOCAL as BDADDR_LOCAL, BTPROTO_RFCOMM as BTPROTO_RFCOMM + from _socket import BDADDR_ANY as BDADDR_ANY, BDADDR_LOCAL as BDADDR_LOCAL, BTPROTO_RFCOMM as BTPROTO_RFCOMM - __all__ += ["BDADDR_ANY", "BDADDR_LOCAL", "BTPROTO_RFCOMM"] + __all__ += ["BDADDR_ANY", "BDADDR_LOCAL", "BTPROTO_RFCOMM"] if sys.platform == "darwin" and sys.version_info >= (3, 10): from _socket import TCP_KEEPALIVE as TCP_KEEPALIVE @@ -777,7 +775,7 @@ if sys.platform == "linux": __all__ += ["CAN_RAW_ERR_FILTER"] -if sys.platform == "linux" and sys.version_info >= (3, 9): +if sys.platform == "linux": from _socket import ( CAN_J1939 as CAN_J1939, CAN_RAW_JOIN_FILTERS as CAN_RAW_JOIN_FILTERS, @@ -959,14 +957,13 @@ if sys.version_info >= (3, 12): __all__ += ["PF_DIVERT", "AF_DIVERT"] -if sys.platform != "win32" and sys.version_info >= (3, 9): +if sys.platform != "win32": __all__ += ["send_fds", "recv_fds"] -if sys.platform != "win32" or sys.version_info >= (3, 9): - if sys.platform != "linux": - __all__ += ["AF_LINK"] - if sys.platform != "darwin" and sys.platform != "linux": - __all__ += ["AF_BLUETOOTH"] +if sys.platform != "linux": + __all__ += ["AF_LINK"] +if sys.platform != "darwin" and sys.platform != "linux": + __all__ += ["AF_BLUETOOTH"] if sys.platform == "win32" and sys.version_info >= (3, 12): __all__ += ["AF_HYPERV"] @@ -980,6 +977,7 @@ if sys.platform != "win32" and sys.platform != "linux": IPPROTO_HELLO as IPPROTO_HELLO, IPPROTO_IPCOMP as IPPROTO_IPCOMP, IPPROTO_XTP as IPPROTO_XTP, + IPV6_USE_MIN_MTU as IPV6_USE_MIN_MTU, LOCAL_PEERCRED as LOCAL_PEERCRED, SCM_CREDS as SCM_CREDS, ) @@ -992,6 +990,7 @@ if sys.platform != "win32" and sys.platform != "linux": "IPPROTO_HELLO", "IPPROTO_IPCOMP", "IPPROTO_XTP", + "IPV6_USE_MIN_MTU", "LOCAL_PEERCRED", "SCM_CREDS", "AI_DEFAULT", @@ -999,10 +998,6 @@ if sys.platform != "win32" and sys.platform != "linux": "AI_V4MAPPED_CFG", "MSG_EOF", ] - if sys.platform != "darwin" or sys.version_info >= (3, 9): - from _socket import IPV6_USE_MIN_MTU as IPV6_USE_MIN_MTU - - __all__ += ["IPV6_USE_MIN_MTU"] if sys.platform != "win32" and sys.platform != "darwin" and sys.platform != "linux": from _socket import ( @@ -1084,11 +1079,10 @@ class AddressFamily(IntEnum): AF_NETLINK = 16 AF_VSOCK = 40 AF_QIPCRTR = 42 - if sys.platform != "win32" or sys.version_info >= (3, 9): - if sys.platform != "linux": - AF_LINK = 33 - if sys.platform != "darwin" and sys.platform != "linux": - AF_BLUETOOTH = 32 + if sys.platform != "linux": + AF_LINK = 33 + if sys.platform != "darwin" and sys.platform != "linux": + AF_BLUETOOTH = 32 if sys.platform == "win32" and sys.version_info >= (3, 12): AF_HYPERV = 34 if sys.platform != "linux" and sys.platform != "win32" and sys.platform != "darwin" and sys.version_info >= (3, 12): @@ -1140,12 +1134,10 @@ if sys.platform == "linux": AF_VSOCK = AddressFamily.AF_VSOCK AF_QIPCRTR = AddressFamily.AF_QIPCRTR -if sys.platform != "win32" or sys.version_info >= (3, 9): - if sys.platform != "linux": - AF_LINK = AddressFamily.AF_LINK - if sys.platform != "darwin" and sys.platform != "linux": - AF_BLUETOOTH = AddressFamily.AF_BLUETOOTH - +if sys.platform != "linux": + AF_LINK = AddressFamily.AF_LINK +if sys.platform != "darwin" and sys.platform != "linux": + AF_BLUETOOTH = AddressFamily.AF_BLUETOOTH if sys.platform == "win32" and sys.version_info >= (3, 12): AF_HYPERV = AddressFamily.AF_HYPERV if sys.platform != "linux" and sys.platform != "win32" and sys.platform != "darwin" and sys.version_info >= (3, 12): @@ -1352,11 +1344,10 @@ class socket(_socket.socket): def fromfd(fd: SupportsIndex, family: AddressFamily | int, type: SocketKind | int, proto: int = 0) -> socket: ... if sys.platform != "win32": - if sys.version_info >= (3, 9): - def send_fds( - sock: socket, buffers: Iterable[ReadableBuffer], fds: Iterable[int], flags: Unused = 0, address: Unused = None - ) -> int: ... - def recv_fds(sock: socket, bufsize: int, maxfds: int, flags: int = 0) -> tuple[bytes, list[int], int, Any]: ... + def send_fds( + sock: socket, buffers: Iterable[ReadableBuffer], fds: Iterable[int], flags: Unused = 0, address: Unused = None + ) -> int: ... + def recv_fds(sock: socket, bufsize: int, maxfds: int, flags: int = 0) -> tuple[bytes, list[int], int, Any]: ... if sys.platform == "win32": def fromshare(info: bytes) -> socket: ... diff --git a/stdlib/ssl.pyi b/stdlib/ssl.pyi index 388e521c1ef5..042d0266c649 100644 --- a/stdlib/ssl.pyi +++ b/stdlib/ssl.pyi @@ -512,8 +512,6 @@ SSL_ERROR_ZERO_RETURN: SSLErrorNumber # undocumented def get_protocol_name(protocol_code: int) -> str: ... -if sys.version_info < (3, 9): - AF_INET: int PEM_FOOTER: str PEM_HEADER: str SOCK_STREAM: int diff --git a/stdlib/statistics.pyi b/stdlib/statistics.pyi index 9418bdea9d6d..6d7d3fbb4956 100644 --- a/stdlib/statistics.pyi +++ b/stdlib/statistics.pyi @@ -98,9 +98,7 @@ class NormalDist: def inv_cdf(self, p: float) -> float: ... def overlap(self, other: NormalDist) -> float: ... def quantiles(self, n: int = 4) -> list[float]: ... - if sys.version_info >= (3, 9): - def zscore(self, x: float) -> float: ... - + def zscore(self, x: float) -> float: ... def __eq__(x1, x2: object) -> bool: ... def __add__(x1, x2: float | NormalDist) -> NormalDist: ... def __sub__(x1, x2: float | NormalDist) -> NormalDist: ... diff --git a/stdlib/string.pyi b/stdlib/string.pyi index 35a76e9c8628..da752327d3f7 100644 --- a/stdlib/string.pyi +++ b/stdlib/string.pyi @@ -3,7 +3,7 @@ from _typeshed import StrOrLiteralStr from collections.abc import Iterable, Mapping, Sequence from re import Pattern, RegexFlag from typing import Any, ClassVar, overload -from typing_extensions import LiteralString, TypeAlias +from typing_extensions import LiteralString __all__ = [ "ascii_letters", @@ -32,14 +32,7 @@ whitespace: LiteralString def capwords(s: StrOrLiteralStr, sep: StrOrLiteralStr | None = None) -> StrOrLiteralStr: ... -if sys.version_info >= (3, 9): - _TemplateMetaclass: TypeAlias = type -else: - class _TemplateMetaclass(type): - pattern: ClassVar[str] - def __init__(cls, name: str, bases: tuple[type, ...], dct: dict[str, Any]) -> None: ... - -class Template(metaclass=_TemplateMetaclass): +class Template(metaclass=type): template: str delimiter: ClassVar[str] idpattern: ClassVar[str] diff --git a/stdlib/subprocess.pyi b/stdlib/subprocess.pyi index 21ac3a6a2798..8b72e2ec7ae2 100644 --- a/stdlib/subprocess.pyi +++ b/stdlib/subprocess.pyi @@ -1,13 +1,10 @@ import sys from _typeshed import MaybeNone, ReadableBuffer, StrOrBytesPath from collections.abc import Callable, Collection, Iterable, Mapping, Sequence -from types import TracebackType +from types import GenericAlias, TracebackType from typing import IO, Any, AnyStr, Final, Generic, Literal, TypeVar, overload from typing_extensions import Self, TypeAlias -if sys.version_info >= (3, 9): - from types import GenericAlias - __all__ = [ "Popen", "PIPE", @@ -87,8 +84,7 @@ class CompletedProcess(Generic[_T]): stderr: _T def __init__(self, args: _CMD, returncode: int, stdout: _T | None = None, stderr: _T | None = None) -> None: ... def check_returncode(self) -> None: ... - if sys.version_info >= (3, 9): - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... if sys.version_info >= (3, 11): # 3.11 adds "process_group" argument @@ -500,7 +496,7 @@ elif sys.version_info >= (3, 10): pipesize: int = -1, ) -> CompletedProcess[Any]: ... -elif sys.version_info >= (3, 9): +else: # 3.9 adds arguments "user", "group", "extra_groups" and "umask" @overload def run( @@ -696,177 +692,6 @@ elif sys.version_info >= (3, 9): umask: int = -1, ) -> CompletedProcess[Any]: ... -else: - @overload - def run( - args: _CMD, - bufsize: int = -1, - executable: StrOrBytesPath | None = None, - stdin: _FILE = None, - stdout: _FILE = None, - stderr: _FILE = None, - preexec_fn: Callable[[], Any] | None = None, - close_fds: bool = True, - shell: bool = False, - cwd: StrOrBytesPath | None = None, - env: _ENV | None = None, - universal_newlines: bool | None = None, - startupinfo: Any = None, - creationflags: int = 0, - restore_signals: bool = True, - start_new_session: bool = False, - pass_fds: Collection[int] = ..., - *, - capture_output: bool = False, - check: bool = False, - encoding: str | None = None, - errors: str | None = None, - input: str | None = None, - text: Literal[True], - timeout: float | None = None, - ) -> CompletedProcess[str]: ... - @overload - def run( - args: _CMD, - bufsize: int = -1, - executable: StrOrBytesPath | None = None, - stdin: _FILE = None, - stdout: _FILE = None, - stderr: _FILE = None, - preexec_fn: Callable[[], Any] | None = None, - close_fds: bool = True, - shell: bool = False, - cwd: StrOrBytesPath | None = None, - env: _ENV | None = None, - universal_newlines: bool | None = None, - startupinfo: Any = None, - creationflags: int = 0, - restore_signals: bool = True, - start_new_session: bool = False, - pass_fds: Collection[int] = ..., - *, - capture_output: bool = False, - check: bool = False, - encoding: str, - errors: str | None = None, - input: str | None = None, - text: bool | None = None, - timeout: float | None = None, - ) -> CompletedProcess[str]: ... - @overload - def run( - args: _CMD, - bufsize: int = -1, - executable: StrOrBytesPath | None = None, - stdin: _FILE = None, - stdout: _FILE = None, - stderr: _FILE = None, - preexec_fn: Callable[[], Any] | None = None, - close_fds: bool = True, - shell: bool = False, - cwd: StrOrBytesPath | None = None, - env: _ENV | None = None, - universal_newlines: bool | None = None, - startupinfo: Any = None, - creationflags: int = 0, - restore_signals: bool = True, - start_new_session: bool = False, - pass_fds: Collection[int] = ..., - *, - capture_output: bool = False, - check: bool = False, - encoding: str | None = None, - errors: str, - input: str | None = None, - text: bool | None = None, - timeout: float | None = None, - ) -> CompletedProcess[str]: ... - @overload - def run( - args: _CMD, - bufsize: int = -1, - executable: StrOrBytesPath | None = None, - stdin: _FILE = None, - stdout: _FILE = None, - stderr: _FILE = None, - preexec_fn: Callable[[], Any] | None = None, - close_fds: bool = True, - shell: bool = False, - cwd: StrOrBytesPath | None = None, - env: _ENV | None = None, - *, - universal_newlines: Literal[True], - startupinfo: Any = None, - creationflags: int = 0, - restore_signals: bool = True, - start_new_session: bool = False, - pass_fds: Collection[int] = ..., - # where the *real* keyword only args start - capture_output: bool = False, - check: bool = False, - encoding: str | None = None, - errors: str | None = None, - input: str | None = None, - text: bool | None = None, - timeout: float | None = None, - ) -> CompletedProcess[str]: ... - @overload - def run( - args: _CMD, - bufsize: int = -1, - executable: StrOrBytesPath | None = None, - stdin: _FILE = None, - stdout: _FILE = None, - stderr: _FILE = None, - preexec_fn: Callable[[], Any] | None = None, - close_fds: bool = True, - shell: bool = False, - cwd: StrOrBytesPath | None = None, - env: _ENV | None = None, - universal_newlines: Literal[False] | None = None, - startupinfo: Any = None, - creationflags: int = 0, - restore_signals: bool = True, - start_new_session: bool = False, - pass_fds: Collection[int] = ..., - *, - capture_output: bool = False, - check: bool = False, - encoding: None = None, - errors: None = None, - input: ReadableBuffer | None = None, - text: Literal[False] | None = None, - timeout: float | None = None, - ) -> CompletedProcess[bytes]: ... - @overload - def run( - args: _CMD, - bufsize: int = -1, - executable: StrOrBytesPath | None = None, - stdin: _FILE = None, - stdout: _FILE = None, - stderr: _FILE = None, - preexec_fn: Callable[[], Any] | None = None, - close_fds: bool = True, - shell: bool = False, - cwd: StrOrBytesPath | None = None, - env: _ENV | None = None, - universal_newlines: bool | None = None, - startupinfo: Any = None, - creationflags: int = 0, - restore_signals: bool = True, - start_new_session: bool = False, - pass_fds: Collection[int] = ..., - *, - capture_output: bool = False, - check: bool = False, - encoding: str | None = None, - errors: str | None = None, - input: _InputString | None = None, - text: bool | None = None, - timeout: float | None = None, - ) -> CompletedProcess[Any]: ... - # Same args as Popen.__init__ if sys.version_info >= (3, 11): # 3.11 adds "process_group" argument @@ -931,8 +756,7 @@ elif sys.version_info >= (3, 10): pipesize: int = -1, ) -> int: ... -elif sys.version_info >= (3, 9): - # 3.9 adds arguments "user", "group", "extra_groups" and "umask" +else: def call( args: _CMD, bufsize: int = -1, @@ -961,31 +785,6 @@ elif sys.version_info >= (3, 9): umask: int = -1, ) -> int: ... -else: - def call( - args: _CMD, - bufsize: int = -1, - executable: StrOrBytesPath | None = None, - stdin: _FILE = None, - stdout: _FILE = None, - stderr: _FILE = None, - preexec_fn: Callable[[], Any] | None = None, - close_fds: bool = True, - shell: bool = False, - cwd: StrOrBytesPath | None = None, - env: _ENV | None = None, - universal_newlines: bool | None = None, - startupinfo: Any = None, - creationflags: int = 0, - restore_signals: bool = True, - start_new_session: bool = False, - pass_fds: Collection[int] = ..., - *, - encoding: str | None = None, - timeout: float | None = None, - text: bool | None = None, - ) -> int: ... - # Same args as Popen.__init__ if sys.version_info >= (3, 11): # 3.11 adds "process_group" argument @@ -1050,8 +849,7 @@ elif sys.version_info >= (3, 10): pipesize: int = -1, ) -> int: ... -elif sys.version_info >= (3, 9): - # 3.9 adds arguments "user", "group", "extra_groups" and "umask" +else: def check_call( args: _CMD, bufsize: int = -1, @@ -1080,31 +878,6 @@ elif sys.version_info >= (3, 9): umask: int = -1, ) -> int: ... -else: - def check_call( - args: _CMD, - bufsize: int = -1, - executable: StrOrBytesPath | None = None, - stdin: _FILE = None, - stdout: _FILE = None, - stderr: _FILE = None, - preexec_fn: Callable[[], Any] | None = None, - close_fds: bool = True, - shell: bool = False, - cwd: StrOrBytesPath | None = None, - env: _ENV | None = None, - universal_newlines: bool | None = None, - startupinfo: Any = None, - creationflags: int = 0, - restore_signals: bool = True, - start_new_session: bool = False, - pass_fds: Collection[int] = ..., - timeout: float | None = ..., - *, - encoding: str | None = None, - text: bool | None = None, - ) -> int: ... - if sys.version_info >= (3, 11): # 3.11 adds "process_group" argument @overload @@ -1479,8 +1252,7 @@ elif sys.version_info >= (3, 10): pipesize: int = -1, ) -> Any: ... # morally: -> str | bytes -elif sys.version_info >= (3, 9): - # 3.9 adds arguments "user", "group", "extra_groups" and "umask" +else: @overload def check_output( args: _CMD, @@ -1657,159 +1429,6 @@ elif sys.version_info >= (3, 9): umask: int = -1, ) -> Any: ... # morally: -> str | bytes -else: - @overload - def check_output( - args: _CMD, - bufsize: int = -1, - executable: StrOrBytesPath | None = None, - stdin: _FILE = None, - stderr: _FILE = None, - preexec_fn: Callable[[], Any] | None = None, - close_fds: bool = True, - shell: bool = False, - cwd: StrOrBytesPath | None = None, - env: _ENV | None = None, - universal_newlines: bool | None = None, - startupinfo: Any = None, - creationflags: int = 0, - restore_signals: bool = True, - start_new_session: bool = False, - pass_fds: Collection[int] = ..., - *, - timeout: float | None = None, - input: _InputString | None = ..., - encoding: str | None = None, - errors: str | None = None, - text: Literal[True], - ) -> str: ... - @overload - def check_output( - args: _CMD, - bufsize: int = -1, - executable: StrOrBytesPath | None = None, - stdin: _FILE = None, - stderr: _FILE = None, - preexec_fn: Callable[[], Any] | None = None, - close_fds: bool = True, - shell: bool = False, - cwd: StrOrBytesPath | None = None, - env: _ENV | None = None, - universal_newlines: bool | None = None, - startupinfo: Any = None, - creationflags: int = 0, - restore_signals: bool = True, - start_new_session: bool = False, - pass_fds: Collection[int] = ..., - *, - timeout: float | None = None, - input: _InputString | None = ..., - encoding: str, - errors: str | None = None, - text: bool | None = None, - ) -> str: ... - @overload - def check_output( - args: _CMD, - bufsize: int = -1, - executable: StrOrBytesPath | None = None, - stdin: _FILE = None, - stderr: _FILE = None, - preexec_fn: Callable[[], Any] | None = None, - close_fds: bool = True, - shell: bool = False, - cwd: StrOrBytesPath | None = None, - env: _ENV | None = None, - universal_newlines: bool | None = None, - startupinfo: Any = None, - creationflags: int = 0, - restore_signals: bool = True, - start_new_session: bool = False, - pass_fds: Collection[int] = ..., - *, - timeout: float | None = None, - input: _InputString | None = ..., - encoding: str | None = None, - errors: str, - text: bool | None = None, - ) -> str: ... - @overload - def check_output( - args: _CMD, - bufsize: int = -1, - executable: StrOrBytesPath | None = None, - stdin: _FILE = None, - stderr: _FILE = None, - preexec_fn: Callable[[], Any] | None = None, - close_fds: bool = True, - shell: bool = False, - cwd: StrOrBytesPath | None = None, - env: _ENV | None = None, - *, - universal_newlines: Literal[True], - startupinfo: Any = None, - creationflags: int = 0, - restore_signals: bool = True, - start_new_session: bool = False, - pass_fds: Collection[int] = ..., - # where the real keyword only ones start - timeout: float | None = None, - input: _InputString | None = ..., - encoding: str | None = None, - errors: str | None = None, - text: bool | None = None, - ) -> str: ... - @overload - def check_output( - args: _CMD, - bufsize: int = -1, - executable: StrOrBytesPath | None = None, - stdin: _FILE = None, - stderr: _FILE = None, - preexec_fn: Callable[[], Any] | None = None, - close_fds: bool = True, - shell: bool = False, - cwd: StrOrBytesPath | None = None, - env: _ENV | None = None, - universal_newlines: Literal[False] | None = None, - startupinfo: Any = None, - creationflags: int = 0, - restore_signals: bool = True, - start_new_session: bool = False, - pass_fds: Collection[int] = ..., - *, - timeout: float | None = None, - input: _InputString | None = ..., - encoding: None = None, - errors: None = None, - text: Literal[False] | None = None, - ) -> bytes: ... - @overload - def check_output( - args: _CMD, - bufsize: int = -1, - executable: StrOrBytesPath | None = None, - stdin: _FILE = None, - stderr: _FILE = None, - preexec_fn: Callable[[], Any] | None = None, - close_fds: bool = True, - shell: bool = False, - cwd: StrOrBytesPath | None = None, - env: _ENV | None = None, - universal_newlines: bool | None = None, - startupinfo: Any = None, - creationflags: int = 0, - restore_signals: bool = True, - start_new_session: bool = False, - pass_fds: Collection[int] = ..., - *, - timeout: float | None = None, - input: _InputString | None = ..., - encoding: str | None = None, - errors: str | None = None, - text: bool | None = None, - ) -> Any: ... # morally: -> str | bytes - PIPE: Final[int] STDOUT: Final[int] DEVNULL: Final[int] @@ -2223,8 +1842,7 @@ class Popen(Generic[AnyStr]): umask: int = -1, pipesize: int = -1, ) -> None: ... - elif sys.version_info >= (3, 9): - # user, group, extra_groups, umask were added in 3.9 + else: @overload def __init__( self: Popen[str], @@ -2400,158 +2018,6 @@ class Popen(Generic[AnyStr]): extra_groups: Iterable[str | int] | None = None, umask: int = -1, ) -> None: ... - else: - @overload - def __init__( - self: Popen[str], - args: _CMD, - bufsize: int = -1, - executable: StrOrBytesPath | None = None, - stdin: _FILE | None = None, - stdout: _FILE | None = None, - stderr: _FILE | None = None, - preexec_fn: Callable[[], Any] | None = None, - close_fds: bool = True, - shell: bool = False, - cwd: StrOrBytesPath | None = None, - env: _ENV | None = None, - universal_newlines: bool | None = None, - startupinfo: Any | None = None, - creationflags: int = 0, - restore_signals: bool = True, - start_new_session: bool = False, - pass_fds: Collection[int] = (), - *, - text: bool | None = None, - encoding: str, - errors: str | None = None, - ) -> None: ... - @overload - def __init__( - self: Popen[str], - args: _CMD, - bufsize: int = -1, - executable: StrOrBytesPath | None = None, - stdin: _FILE | None = None, - stdout: _FILE | None = None, - stderr: _FILE | None = None, - preexec_fn: Callable[[], Any] | None = None, - close_fds: bool = True, - shell: bool = False, - cwd: StrOrBytesPath | None = None, - env: _ENV | None = None, - universal_newlines: bool | None = None, - startupinfo: Any | None = None, - creationflags: int = 0, - restore_signals: bool = True, - start_new_session: bool = False, - pass_fds: Collection[int] = (), - *, - text: bool | None = None, - encoding: str | None = None, - errors: str, - ) -> None: ... - @overload - def __init__( - self: Popen[str], - args: _CMD, - bufsize: int = -1, - executable: StrOrBytesPath | None = None, - stdin: _FILE | None = None, - stdout: _FILE | None = None, - stderr: _FILE | None = None, - preexec_fn: Callable[[], Any] | None = None, - close_fds: bool = True, - shell: bool = False, - cwd: StrOrBytesPath | None = None, - env: _ENV | None = None, - *, - universal_newlines: Literal[True], - startupinfo: Any | None = None, - creationflags: int = 0, - restore_signals: bool = True, - start_new_session: bool = False, - pass_fds: Collection[int] = (), - # where the *real* keyword only args start - text: bool | None = None, - encoding: str | None = None, - errors: str | None = None, - ) -> None: ... - @overload - def __init__( - self: Popen[str], - args: _CMD, - bufsize: int = -1, - executable: StrOrBytesPath | None = None, - stdin: _FILE | None = None, - stdout: _FILE | None = None, - stderr: _FILE | None = None, - preexec_fn: Callable[[], Any] | None = None, - close_fds: bool = True, - shell: bool = False, - cwd: StrOrBytesPath | None = None, - env: _ENV | None = None, - universal_newlines: bool | None = None, - startupinfo: Any | None = None, - creationflags: int = 0, - restore_signals: bool = True, - start_new_session: bool = False, - pass_fds: Collection[int] = (), - *, - text: Literal[True], - encoding: str | None = None, - errors: str | None = None, - ) -> None: ... - @overload - def __init__( - self: Popen[bytes], - args: _CMD, - bufsize: int = -1, - executable: StrOrBytesPath | None = None, - stdin: _FILE | None = None, - stdout: _FILE | None = None, - stderr: _FILE | None = None, - preexec_fn: Callable[[], Any] | None = None, - close_fds: bool = True, - shell: bool = False, - cwd: StrOrBytesPath | None = None, - env: _ENV | None = None, - universal_newlines: Literal[False] | None = None, - startupinfo: Any | None = None, - creationflags: int = 0, - restore_signals: bool = True, - start_new_session: bool = False, - pass_fds: Collection[int] = (), - *, - text: Literal[False] | None = None, - encoding: None = None, - errors: None = None, - ) -> None: ... - @overload - def __init__( - self: Popen[Any], - args: _CMD, - bufsize: int = -1, - executable: StrOrBytesPath | None = None, - stdin: _FILE | None = None, - stdout: _FILE | None = None, - stderr: _FILE | None = None, - preexec_fn: Callable[[], Any] | None = None, - close_fds: bool = True, - shell: bool = False, - cwd: StrOrBytesPath | None = None, - env: _ENV | None = None, - universal_newlines: bool | None = None, - startupinfo: Any | None = None, - creationflags: int = 0, - restore_signals: bool = True, - start_new_session: bool = False, - pass_fds: Collection[int] = (), - *, - text: bool | None = None, - encoding: str | None = None, - errors: str | None = None, - ) -> None: ... def poll(self) -> int | None: ... def wait(self, timeout: float | None = None) -> int: ... @@ -2567,8 +2033,7 @@ class Popen(Generic[AnyStr]): self, exc_type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None ) -> None: ... def __del__(self) -> None: ... - if sys.version_info >= (3, 9): - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... # The result really is always a str. if sys.version_info >= (3, 11): diff --git a/stdlib/sunau.pyi b/stdlib/sunau.pyi index 9b051e82b64b..d81645cb5687 100644 --- a/stdlib/sunau.pyi +++ b/stdlib/sunau.pyi @@ -1,4 +1,3 @@ -import sys from _typeshed import Unused from typing import IO, Any, Literal, NamedTuple, NoReturn, overload from typing_extensions import Self, TypeAlias @@ -81,6 +80,3 @@ def open(f: _File, mode: Literal["r", "rb"]) -> Au_read: ... def open(f: _File, mode: Literal["w", "wb"]) -> Au_write: ... @overload def open(f: _File, mode: str | None = None) -> Any: ... - -if sys.version_info < (3, 9): - openfp = open diff --git a/stdlib/symtable.pyi b/stdlib/symtable.pyi index ee0a1eb2f1cb..d5f2be04b600 100644 --- a/stdlib/symtable.pyi +++ b/stdlib/symtable.pyi @@ -36,9 +36,6 @@ class SymbolTable: def is_optimized(self) -> bool: ... def is_nested(self) -> bool: ... def has_children(self) -> bool: ... - if sys.version_info < (3, 9): - def has_exec(self) -> bool: ... - def get_identifiers(self) -> dict_keys[str, int]: ... def lookup(self, name: str) -> Symbol: ... def get_symbols(self) -> list[Symbol]: ... @@ -52,9 +49,8 @@ class Function(SymbolTable): def get_nonlocals(self) -> tuple[str, ...]: ... class Class(SymbolTable): - if sys.version_info < (3, 16): - @deprecated("deprecated in Python 3.14, will be removed in Python 3.16") - def get_methods(self) -> tuple[str, ...]: ... + @deprecated("deprecated in Python 3.14, will be removed in Python 3.16") + def get_methods(self) -> tuple[str, ...]: ... class Symbol: def __init__( diff --git a/stdlib/tarfile.pyi b/stdlib/tarfile.pyi index 6a00e070aee9..51195eb98fcc 100644 --- a/stdlib/tarfile.pyi +++ b/stdlib/tarfile.pyi @@ -520,11 +520,7 @@ class TarFile: open = TarFile.open -if sys.version_info >= (3, 9): - def is_tarfile(name: StrOrBytesPath | IO[bytes]) -> bool: ... - -else: - def is_tarfile(name: StrOrBytesPath) -> bool: ... +def is_tarfile(name: StrOrBytesPath | IO[bytes]) -> bool: ... class TarError(Exception): ... class ReadError(TarError): ... diff --git a/stdlib/tempfile.pyi b/stdlib/tempfile.pyi index 0c19d56fc7a6..c4861f7c6f39 100644 --- a/stdlib/tempfile.pyi +++ b/stdlib/tempfile.pyi @@ -13,13 +13,10 @@ from _typeshed import ( WriteableBuffer, ) from collections.abc import Iterable, Iterator -from types import TracebackType +from types import GenericAlias, TracebackType from typing import IO, Any, AnyStr, Generic, Literal, overload from typing_extensions import Self -if sys.version_info >= (3, 9): - from types import GenericAlias - __all__ = [ "NamedTemporaryFile", "TemporaryFile", @@ -399,8 +396,7 @@ class SpooledTemporaryFile(IO[AnyStr], _SpooledTemporaryFileBase): def seekable(self) -> bool: ... def writable(self) -> bool: ... def __next__(self) -> AnyStr: ... # type: ignore[override] - if sys.version_info >= (3, 9): - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... class TemporaryDirectory(Generic[AnyStr]): name: AnyStr @@ -458,8 +454,7 @@ class TemporaryDirectory(Generic[AnyStr]): def cleanup(self) -> None: ... def __enter__(self) -> AnyStr: ... def __exit__(self, exc: type[BaseException] | None, value: BaseException | None, tb: TracebackType | None) -> None: ... - if sys.version_info >= (3, 9): - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... # The overloads overlap, but they should still work fine. @overload diff --git a/stdlib/threading.pyi b/stdlib/threading.pyi index efeea69d0234..e3965fab0e80 100644 --- a/stdlib/threading.pyi +++ b/stdlib/threading.pyi @@ -89,8 +89,6 @@ class Thread: @property def native_id(self) -> int | None: ... # only available on some platforms def is_alive(self) -> bool: ... - if sys.version_info < (3, 9): - def isAlive(self) -> bool: ... # the following methods are all deprecated def getName(self) -> str: ... def setName(self, name: str) -> None: ... @@ -134,10 +132,7 @@ class Semaphore: def __exit__(self, t: type[BaseException] | None, v: BaseException | None, tb: TracebackType | None) -> None: ... def acquire(self, blocking: bool = True, timeout: float | None = None) -> bool: ... def __enter__(self, blocking: bool = True, timeout: float | None = None) -> bool: ... - if sys.version_info >= (3, 9): - def release(self, n: int = 1) -> None: ... - else: - def release(self) -> None: ... + def release(self, n: int = 1) -> None: ... class BoundedSemaphore(Semaphore): ... diff --git a/stdlib/time.pyi b/stdlib/time.pyi index 71cdc4d78fdc..6d2538ea7e3e 100644 --- a/stdlib/time.pyi +++ b/stdlib/time.pyi @@ -31,7 +31,7 @@ if sys.platform == "darwin": CLOCK_UPTIME_RAW_APPROX: int CLOCK_MONOTONIC_RAW_APPROX: int -if sys.version_info >= (3, 9) and sys.platform == "linux": +if sys.platform == "linux": CLOCK_TAI: int # Constructor takes an iterable of any type, of length between 9 and 11 elements. diff --git a/stdlib/tkinter/__init__.pyi b/stdlib/tkinter/__init__.pyi index 73c1e0400fe8..291e2fc5108f 100644 --- a/stdlib/tkinter/__init__.pyi +++ b/stdlib/tkinter/__init__.pyi @@ -13,140 +13,139 @@ if sys.version_info >= (3, 11): else: from enum import Enum -if sys.version_info >= (3, 9): - __all__ = [ - "TclError", - "NO", - "FALSE", - "OFF", - "YES", - "TRUE", - "ON", - "N", - "S", - "W", - "E", - "NW", - "SW", - "NE", - "SE", - "NS", - "EW", - "NSEW", - "CENTER", - "NONE", - "X", - "Y", - "BOTH", - "LEFT", - "TOP", - "RIGHT", - "BOTTOM", - "RAISED", - "SUNKEN", - "FLAT", - "RIDGE", - "GROOVE", - "SOLID", - "HORIZONTAL", - "VERTICAL", - "NUMERIC", - "CHAR", - "WORD", - "BASELINE", - "INSIDE", - "OUTSIDE", - "SEL", - "SEL_FIRST", - "SEL_LAST", - "END", - "INSERT", - "CURRENT", - "ANCHOR", - "ALL", - "NORMAL", - "DISABLED", - "ACTIVE", - "HIDDEN", - "CASCADE", - "CHECKBUTTON", - "COMMAND", - "RADIOBUTTON", - "SEPARATOR", - "SINGLE", - "BROWSE", - "MULTIPLE", - "EXTENDED", - "DOTBOX", - "UNDERLINE", - "PIESLICE", - "CHORD", - "ARC", - "FIRST", - "LAST", - "BUTT", - "PROJECTING", - "ROUND", - "BEVEL", - "MITER", - "MOVETO", - "SCROLL", - "UNITS", - "PAGES", - "TkVersion", - "TclVersion", - "READABLE", - "WRITABLE", - "EXCEPTION", - "EventType", - "Event", - "NoDefaultRoot", - "Variable", - "StringVar", - "IntVar", - "DoubleVar", - "BooleanVar", - "mainloop", - "getint", - "getdouble", - "getboolean", - "Misc", - "CallWrapper", - "XView", - "YView", - "Wm", - "Tk", - "Tcl", - "Pack", - "Place", - "Grid", - "BaseWidget", - "Widget", - "Toplevel", - "Button", - "Canvas", - "Checkbutton", - "Entry", - "Frame", - "Label", - "Listbox", - "Menu", - "Menubutton", - "Message", - "Radiobutton", - "Scale", - "Scrollbar", - "Text", - "OptionMenu", - "Image", - "PhotoImage", - "BitmapImage", - "image_names", - "image_types", - "Spinbox", - "LabelFrame", - "PanedWindow", - ] +__all__ = [ + "TclError", + "NO", + "FALSE", + "OFF", + "YES", + "TRUE", + "ON", + "N", + "S", + "W", + "E", + "NW", + "SW", + "NE", + "SE", + "NS", + "EW", + "NSEW", + "CENTER", + "NONE", + "X", + "Y", + "BOTH", + "LEFT", + "TOP", + "RIGHT", + "BOTTOM", + "RAISED", + "SUNKEN", + "FLAT", + "RIDGE", + "GROOVE", + "SOLID", + "HORIZONTAL", + "VERTICAL", + "NUMERIC", + "CHAR", + "WORD", + "BASELINE", + "INSIDE", + "OUTSIDE", + "SEL", + "SEL_FIRST", + "SEL_LAST", + "END", + "INSERT", + "CURRENT", + "ANCHOR", + "ALL", + "NORMAL", + "DISABLED", + "ACTIVE", + "HIDDEN", + "CASCADE", + "CHECKBUTTON", + "COMMAND", + "RADIOBUTTON", + "SEPARATOR", + "SINGLE", + "BROWSE", + "MULTIPLE", + "EXTENDED", + "DOTBOX", + "UNDERLINE", + "PIESLICE", + "CHORD", + "ARC", + "FIRST", + "LAST", + "BUTT", + "PROJECTING", + "ROUND", + "BEVEL", + "MITER", + "MOVETO", + "SCROLL", + "UNITS", + "PAGES", + "TkVersion", + "TclVersion", + "READABLE", + "WRITABLE", + "EXCEPTION", + "EventType", + "Event", + "NoDefaultRoot", + "Variable", + "StringVar", + "IntVar", + "DoubleVar", + "BooleanVar", + "mainloop", + "getint", + "getdouble", + "getboolean", + "Misc", + "CallWrapper", + "XView", + "YView", + "Wm", + "Tk", + "Tcl", + "Pack", + "Place", + "Grid", + "BaseWidget", + "Widget", + "Toplevel", + "Button", + "Canvas", + "Checkbutton", + "Entry", + "Frame", + "Label", + "Listbox", + "Menu", + "Menubutton", + "Message", + "Radiobutton", + "Scale", + "Scrollbar", + "Text", + "OptionMenu", + "Image", + "PhotoImage", + "BitmapImage", + "image_names", + "image_types", + "Spinbox", + "LabelFrame", + "PanedWindow", +] # Using anything from tkinter.font in this file means that 'import tkinter' # seems to also load tkinter.font. That's not how it actually works, but diff --git a/stdlib/tkinter/colorchooser.pyi b/stdlib/tkinter/colorchooser.pyi index 09bc8cbb4f1e..d0d6de842656 100644 --- a/stdlib/tkinter/colorchooser.pyi +++ b/stdlib/tkinter/colorchooser.pyi @@ -1,20 +1,12 @@ -import sys from tkinter import Misc from tkinter.commondialog import Dialog from typing import ClassVar -if sys.version_info >= (3, 9): - __all__ = ["Chooser", "askcolor"] +__all__ = ["Chooser", "askcolor"] class Chooser(Dialog): command: ClassVar[str] -if sys.version_info >= (3, 9): - def askcolor( - color: str | bytes | None = None, *, initialcolor: str = ..., parent: Misc = ..., title: str = ... - ) -> tuple[None, None] | tuple[tuple[int, int, int], str]: ... - -else: - def askcolor( - color: str | bytes | None = None, *, initialcolor: str = ..., parent: Misc = ..., title: str = ... - ) -> tuple[None, None] | tuple[tuple[float, float, float], str]: ... +def askcolor( + color: str | bytes | None = None, *, initialcolor: str = ..., parent: Misc = ..., title: str = ... +) -> tuple[None, None] | tuple[tuple[int, int, int], str]: ... diff --git a/stdlib/tkinter/commondialog.pyi b/stdlib/tkinter/commondialog.pyi index d06c08df5b76..201ca13ddd9c 100644 --- a/stdlib/tkinter/commondialog.pyi +++ b/stdlib/tkinter/commondialog.pyi @@ -1,10 +1,8 @@ -import sys from _typeshed import Incomplete from collections.abc import Mapping from typing import ClassVar -if sys.version_info >= (3, 9): - __all__ = ["Dialog"] +__all__ = ["Dialog"] class Dialog: command: ClassVar[str | None] diff --git a/stdlib/tkinter/dialog.pyi b/stdlib/tkinter/dialog.pyi index b7d74c0fa71e..3dc059940964 100644 --- a/stdlib/tkinter/dialog.pyi +++ b/stdlib/tkinter/dialog.pyi @@ -1,11 +1,9 @@ -import sys from _typeshed import Incomplete from collections.abc import Mapping from tkinter import Widget from typing import Any, Final -if sys.version_info >= (3, 9): - __all__ = ["Dialog"] +__all__ = ["Dialog"] DIALOG_ICON: Final = "questhead" diff --git a/stdlib/tkinter/dnd.pyi b/stdlib/tkinter/dnd.pyi index d806be74068e..fe2961701c61 100644 --- a/stdlib/tkinter/dnd.pyi +++ b/stdlib/tkinter/dnd.pyi @@ -1,9 +1,7 @@ -import sys from tkinter import Event, Misc, Tk, Widget from typing import ClassVar, Protocol -if sys.version_info >= (3, 9): - __all__ = ["dnd_start", "DndHandler"] +__all__ = ["dnd_start", "DndHandler"] class _DndSource(Protocol): def dnd_end(self, target: Widget | None, event: Event[Misc] | None, /) -> None: ... diff --git a/stdlib/tkinter/filedialog.pyi b/stdlib/tkinter/filedialog.pyi index 03f89cfbe3e6..cafcf61e8635 100644 --- a/stdlib/tkinter/filedialog.pyi +++ b/stdlib/tkinter/filedialog.pyi @@ -1,25 +1,23 @@ -import sys from _typeshed import Incomplete, StrOrBytesPath from collections.abc import Iterable from tkinter import Button, Entry, Frame, Listbox, Misc, Scrollbar, StringVar, Toplevel, commondialog from typing import IO, ClassVar, Literal -if sys.version_info >= (3, 9): - __all__ = [ - "FileDialog", - "LoadFileDialog", - "SaveFileDialog", - "Open", - "SaveAs", - "Directory", - "askopenfilename", - "asksaveasfilename", - "askopenfilenames", - "askopenfile", - "askopenfiles", - "asksaveasfile", - "askdirectory", - ] +__all__ = [ + "FileDialog", + "LoadFileDialog", + "SaveFileDialog", + "Open", + "SaveAs", + "Directory", + "askopenfilename", + "asksaveasfilename", + "askopenfilenames", + "askopenfile", + "askopenfiles", + "asksaveasfile", + "askdirectory", +] dialogstates: dict[Incomplete, tuple[Incomplete, Incomplete]] diff --git a/stdlib/tkinter/font.pyi b/stdlib/tkinter/font.pyi index 3b73f982c4ca..cab97490be34 100644 --- a/stdlib/tkinter/font.pyi +++ b/stdlib/tkinter/font.pyi @@ -5,8 +5,7 @@ import tkinter from typing import Any, ClassVar, Final, Literal, TypedDict, overload from typing_extensions import TypeAlias, Unpack -if sys.version_info >= (3, 9): - __all__ = ["NORMAL", "ROMAN", "BOLD", "ITALIC", "nametofont", "Font", "families", "names"] +__all__ = ["NORMAL", "ROMAN", "BOLD", "ITALIC", "nametofont", "Font", "families", "names"] NORMAL: Final = "normal" ROMAN: Final = "roman" diff --git a/stdlib/tkinter/messagebox.pyi b/stdlib/tkinter/messagebox.pyi index 5cdfe512f9b7..902fab62ac05 100644 --- a/stdlib/tkinter/messagebox.pyi +++ b/stdlib/tkinter/messagebox.pyi @@ -1,18 +1,7 @@ -import sys from tkinter.commondialog import Dialog from typing import ClassVar, Final -if sys.version_info >= (3, 9): - __all__ = [ - "showinfo", - "showwarning", - "showerror", - "askquestion", - "askokcancel", - "askyesno", - "askyesnocancel", - "askretrycancel", - ] +__all__ = ["showinfo", "showwarning", "showerror", "askquestion", "askokcancel", "askyesno", "askyesnocancel", "askretrycancel"] ERROR: Final = "error" INFO: Final = "info" diff --git a/stdlib/trace.pyi b/stdlib/trace.pyi index 04390f119195..7e7cc1e9ac54 100644 --- a/stdlib/trace.pyi +++ b/stdlib/trace.pyi @@ -75,11 +75,7 @@ class Trace: def runctx( self, cmd: str | types.CodeType, globals: Mapping[str, Any] | None = None, locals: Mapping[str, Any] | None = None ) -> None: ... - if sys.version_info >= (3, 9): - def runfunc(self, func: Callable[_P, _T], /, *args: _P.args, **kw: _P.kwargs) -> _T: ... - else: - def runfunc(self, func: Callable[_P, _T], *args: _P.args, **kw: _P.kwargs) -> _T: ... - + def runfunc(self, func: Callable[_P, _T], /, *args: _P.args, **kw: _P.kwargs) -> _T: ... def file_module_function_of(self, frame: types.FrameType) -> _FileModuleFunction: ... def globaltrace_trackcallers(self, frame: types.FrameType, why: str, arg: Any) -> None: ... def globaltrace_countfuncs(self, frame: types.FrameType, why: str, arg: Any) -> None: ... diff --git a/stdlib/tracemalloc.pyi b/stdlib/tracemalloc.pyi index e721e414138b..05d98ae127d8 100644 --- a/stdlib/tracemalloc.pyi +++ b/stdlib/tracemalloc.pyi @@ -69,10 +69,7 @@ class Frame: def __ge__(self, other: Frame, NotImplemented: Any = ...) -> bool: ... def __le__(self, other: Frame, NotImplemented: Any = ...) -> bool: ... -if sys.version_info >= (3, 9): - _TraceTuple: TypeAlias = tuple[int, int, Sequence[_FrameTuple], int | None] | tuple[int, int, Sequence[_FrameTuple]] -else: - _TraceTuple: TypeAlias = tuple[int, int, Sequence[_FrameTuple]] +_TraceTuple: TypeAlias = tuple[int, int, Sequence[_FrameTuple], int | None] | tuple[int, int, Sequence[_FrameTuple]] class Trace: @property @@ -86,13 +83,9 @@ class Trace: def __hash__(self) -> int: ... class Traceback(Sequence[Frame]): - if sys.version_info >= (3, 9): - @property - def total_nframe(self) -> int | None: ... - def __init__(self, frames: Sequence[_FrameTuple], total_nframe: int | None = None) -> None: ... - else: - def __init__(self, frames: Sequence[_FrameTuple]) -> None: ... - + @property + def total_nframe(self) -> int | None: ... + def __init__(self, frames: Sequence[_FrameTuple], total_nframe: int | None = None) -> None: ... def format(self, limit: int | None = None, most_recent_first: bool = False) -> list[str]: ... @overload def __getitem__(self, index: SupportsIndex) -> Frame: ... diff --git a/stdlib/unittest/async_case.pyi b/stdlib/unittest/async_case.pyi index 565dd91c0fda..0b3fb9122c7b 100644 --- a/stdlib/unittest/async_case.pyi +++ b/stdlib/unittest/async_case.pyi @@ -21,5 +21,5 @@ class IsolatedAsyncioTestCase(TestCase): def addAsyncCleanup(self, func: Callable[_P, Awaitable[object]], /, *args: _P.args, **kwargs: _P.kwargs) -> None: ... if sys.version_info >= (3, 11): async def enterAsyncContext(self, cm: AbstractAsyncContextManager[_T]) -> _T: ... - if sys.version_info >= (3, 9): - def __del__(self) -> None: ... + + def __del__(self) -> None: ... diff --git a/stdlib/unittest/case.pyi b/stdlib/unittest/case.pyi index 33cd556d2e3b..7d1a382a54a4 100644 --- a/stdlib/unittest/case.pyi +++ b/stdlib/unittest/case.pyi @@ -5,27 +5,12 @@ from _typeshed import SupportsDunderGE, SupportsDunderGT, SupportsDunderLE, Supp from collections.abc import Callable, Container, Iterable, Mapping, Sequence, Set as AbstractSet from contextlib import AbstractContextManager from re import Pattern -from types import TracebackType -from typing import ( - Any, - AnyStr, - ClassVar, - Final, - Generic, - NamedTuple, - NoReturn, - Protocol, - SupportsAbs, - SupportsRound, - TypeVar, - overload, -) +from types import GenericAlias, TracebackType +from typing import Any, AnyStr, Final, Generic, NoReturn, Protocol, SupportsAbs, SupportsRound, TypeVar, overload from typing_extensions import Never, ParamSpec, Self, TypeAlias +from unittest._log import _AssertLogsContext, _LoggingWatcher from warnings import WarningMessage -if sys.version_info >= (3, 9): - from types import GenericAlias - if sys.version_info >= (3, 10): from types import UnionType @@ -58,29 +43,6 @@ class _AssertRaisesBaseContext(_BaseTestCaseContext): # but it's not possible to construct an overload which expresses that def handle(self, name: str, args: list[Any], kwargs: dict[str, Any]) -> Any: ... -if sys.version_info >= (3, 9): - from unittest._log import _AssertLogsContext, _LoggingWatcher -else: - # Unused dummy for _AssertLogsContext. Starting with Python 3.10, - # this is generic over the logging watcher, but in lower versions - # the watcher is hard-coded. - _L = TypeVar("_L") - - class _LoggingWatcher(NamedTuple): - records: list[logging.LogRecord] - output: list[str] - - class _AssertLogsContext(_BaseTestCaseContext, Generic[_L]): - LOGGING_FORMAT: ClassVar[str] - logger_name: str - level: int - msg: None - def __init__(self, test_case: TestCase, logger_name: str, level: int) -> None: ... - def __enter__(self) -> _LoggingWatcher: ... - def __exit__( - self, exc_type: type[BaseException] | None, exc_value: BaseException | None, tb: TracebackType | None - ) -> bool | None: ... - def addModuleCleanup(function: Callable[_P, object], /, *args: _P.args, **kwargs: _P.kwargs) -> None: ... def doModuleCleanups() -> None: ... @@ -345,8 +307,7 @@ class _AssertRaisesContext(_AssertRaisesBaseContext, Generic[_E]): def __exit__( self, exc_type: type[BaseException] | None, exc_value: BaseException | None, tb: TracebackType | None ) -> bool: ... - if sys.version_info >= (3, 9): - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... class _AssertWarnsContext(_AssertRaisesBaseContext): warning: WarningMessage diff --git a/stdlib/unittest/mock.pyi b/stdlib/unittest/mock.pyi index 4b32f15095d6..d2664465097f 100644 --- a/stdlib/unittest/mock.pyi +++ b/stdlib/unittest/mock.pyi @@ -51,9 +51,6 @@ else: "seal", ) -if sys.version_info < (3, 9): - __version__: Final[str] - FILTER_DIR: Any class _SentinelObject: diff --git a/stdlib/urllib/parse.pyi b/stdlib/urllib/parse.pyi index 785bb9678ec7..f2fae0c3d402 100644 --- a/stdlib/urllib/parse.pyi +++ b/stdlib/urllib/parse.pyi @@ -1,11 +1,9 @@ import sys from collections.abc import Callable, Iterable, Mapping, Sequence +from types import GenericAlias from typing import Any, AnyStr, Generic, Literal, NamedTuple, TypeVar, overload from typing_extensions import TypeAlias -if sys.version_info >= (3, 9): - from types import GenericAlias - __all__ = [ "urlparse", "urlunparse", @@ -55,8 +53,7 @@ class _NetlocResultMixinBase(Generic[AnyStr]): def hostname(self) -> AnyStr | None: ... @property def port(self) -> int | None: ... - if sys.version_info >= (3, 9): - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... class _NetlocResultMixinStr(_NetlocResultMixinBase[str], _ResultMixinStr): ... class _NetlocResultMixinBytes(_NetlocResultMixinBase[bytes], _ResultMixinBytes): ... @@ -127,13 +124,7 @@ def quote_from_bytes(bs: bytes | bytearray, safe: str | Iterable[int] = "/") -> def quote_plus(string: str, safe: str | Iterable[int] = "", encoding: str | None = None, errors: str | None = None) -> str: ... @overload def quote_plus(string: bytes | bytearray, safe: str | Iterable[int] = "") -> str: ... - -if sys.version_info >= (3, 9): - def unquote(string: str | bytes, encoding: str = "utf-8", errors: str = "replace") -> str: ... - -else: - def unquote(string: str, encoding: str = "utf-8", errors: str = "replace") -> str: ... - +def unquote(string: str | bytes, encoding: str = "utf-8", errors: str = "replace") -> str: ... def unquote_to_bytes(string: str | bytes | bytearray) -> bytes: ... def unquote_plus(string: str, encoding: str = "utf-8", errors: str = "replace") -> str: ... @overload diff --git a/stdlib/urllib/response.pyi b/stdlib/urllib/response.pyi index bbec4cacc750..65df9cdff58f 100644 --- a/stdlib/urllib/response.pyi +++ b/stdlib/urllib/response.pyi @@ -1,4 +1,3 @@ -import sys import tempfile from _typeshed import ReadableBuffer from collections.abc import Callable, Iterable @@ -34,10 +33,8 @@ class addinfo(addbase): class addinfourl(addinfo): url: str code: int | None - if sys.version_info >= (3, 9): - @property - def status(self) -> int | None: ... - + @property + def status(self) -> int | None: ... def __init__(self, fp: IO[bytes], headers: Message, url: str, code: int | None = None) -> None: ... def geturl(self) -> str: ... def getcode(self) -> int | None: ... diff --git a/stdlib/uuid.pyi b/stdlib/uuid.pyi index 1be7a5ef009f..3202ae212cae 100644 --- a/stdlib/uuid.pyi +++ b/stdlib/uuid.pyi @@ -1,6 +1,5 @@ import builtins import sys -from _typeshed import Unused from enum import Enum from typing_extensions import TypeAlias @@ -65,12 +64,7 @@ class UUID: def __ge__(self, other: UUID) -> bool: ... def __hash__(self) -> builtins.int: ... -if sys.version_info >= (3, 9): - def getnode() -> int: ... - -else: - def getnode(*, getters: Unused = None) -> int: ... # undocumented - +def getnode() -> int: ... def uuid1(node: int | None = None, clock_seq: int | None = None) -> UUID: ... if sys.version_info >= (3, 12): diff --git a/stdlib/venv/__init__.pyi b/stdlib/venv/__init__.pyi index 0490c35b44f2..0f71f0e073f5 100644 --- a/stdlib/venv/__init__.pyi +++ b/stdlib/venv/__init__.pyi @@ -6,8 +6,7 @@ from types import SimpleNamespace logger: logging.Logger -if sys.version_info >= (3, 9): - CORE_VENV_DEPS: tuple[str, ...] +CORE_VENV_DEPS: tuple[str, ...] class EnvBuilder: system_site_packages: bool @@ -30,17 +29,6 @@ class EnvBuilder: *, scm_ignore_files: Iterable[str] = ..., ) -> None: ... - elif sys.version_info >= (3, 9): - def __init__( - self, - system_site_packages: bool = False, - clear: bool = False, - symlinks: bool = False, - upgrade: bool = False, - with_pip: bool = False, - prompt: str | None = None, - upgrade_deps: bool = False, - ) -> None: ... else: def __init__( self, @@ -50,6 +38,7 @@ class EnvBuilder: upgrade: bool = False, with_pip: bool = False, prompt: str | None = None, + upgrade_deps: bool = False, ) -> None: ... def create(self, env_dir: StrOrBytesPath) -> None: ... @@ -65,8 +54,7 @@ class EnvBuilder: def post_setup(self, context: SimpleNamespace) -> None: ... def replace_variables(self, text: str, context: SimpleNamespace) -> str: ... # undocumented def install_scripts(self, context: SimpleNamespace, path: str) -> None: ... - if sys.version_info >= (3, 9): - def upgrade_dependencies(self, context: SimpleNamespace) -> None: ... + def upgrade_dependencies(self, context: SimpleNamespace) -> None: ... if sys.version_info >= (3, 13): def create_git_ignore_file(self, context: SimpleNamespace) -> None: ... @@ -83,17 +71,6 @@ if sys.version_info >= (3, 13): scm_ignore_files: Iterable[str] = ..., ) -> None: ... -elif sys.version_info >= (3, 9): - def create( - env_dir: StrOrBytesPath, - system_site_packages: bool = False, - clear: bool = False, - symlinks: bool = False, - with_pip: bool = False, - prompt: str | None = None, - upgrade_deps: bool = False, - ) -> None: ... - else: def create( env_dir: StrOrBytesPath, @@ -102,6 +79,7 @@ else: symlinks: bool = False, with_pip: bool = False, prompt: str | None = None, + upgrade_deps: bool = False, ) -> None: ... def main(args: Sequence[str] | None = None) -> None: ... diff --git a/stdlib/wave.pyi b/stdlib/wave.pyi index 9319d5347c79..ddc6f6bd02a5 100644 --- a/stdlib/wave.pyi +++ b/stdlib/wave.pyi @@ -1,12 +1,8 @@ -import sys from _typeshed import ReadableBuffer, Unused from typing import IO, Any, BinaryIO, Final, Literal, NamedTuple, NoReturn, overload from typing_extensions import Self, TypeAlias, deprecated -if sys.version_info >= (3, 9): - __all__ = ["open", "Error", "Wave_read", "Wave_write"] -else: - __all__ = ["open", "openfp", "Error", "Wave_read", "Wave_write"] +__all__ = ["open", "Error", "Wave_read", "Wave_write"] _File: TypeAlias = str | IO[bytes] @@ -80,6 +76,3 @@ def open(f: _File, mode: Literal["r", "rb"]) -> Wave_read: ... def open(f: _File, mode: Literal["w", "wb"]) -> Wave_write: ... @overload def open(f: _File, mode: str | None = None) -> Any: ... - -if sys.version_info < (3, 9): - openfp = open diff --git a/stdlib/weakref.pyi b/stdlib/weakref.pyi index 05a7b2bcda66..593eb4615c8f 100644 --- a/stdlib/weakref.pyi +++ b/stdlib/weakref.pyi @@ -1,14 +1,11 @@ -import sys from _typeshed import SupportsKeysAndGetItem from _weakref import getweakrefcount as getweakrefcount, getweakrefs as getweakrefs, proxy as proxy from _weakrefset import WeakSet as WeakSet from collections.abc import Callable, Iterable, Iterator, Mapping, MutableMapping +from types import GenericAlias from typing import Any, ClassVar, Generic, TypeVar, final, overload from typing_extensions import ParamSpec, Self -if sys.version_info >= (3, 9): - from types import GenericAlias - __all__ = [ "ref", "proxy", @@ -61,8 +58,7 @@ class ReferenceType(Generic[_T]): # "weakref" def __call__(self) -> _T | None: ... def __eq__(self, value: object, /) -> bool: ... def __hash__(self) -> int: ... - if sys.version_info >= (3, 9): - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... ref = ReferenceType @@ -123,14 +119,13 @@ class WeakValueDictionary(MutableMapping[_KT, _VT]): def update(self, other: Iterable[tuple[_KT, _VT]], /, **kwargs: _VT) -> None: ... @overload def update(self, other: None = None, /, **kwargs: _VT) -> None: ... - if sys.version_info >= (3, 9): - def __or__(self, other: Mapping[_T1, _T2]) -> WeakValueDictionary[_KT | _T1, _VT | _T2]: ... - def __ror__(self, other: Mapping[_T1, _T2]) -> WeakValueDictionary[_KT | _T1, _VT | _T2]: ... - # WeakValueDictionary.__ior__ should be kept roughly in line with MutableMapping.update() - @overload # type: ignore[misc] - def __ior__(self, other: SupportsKeysAndGetItem[_KT, _VT]) -> Self: ... - @overload - def __ior__(self, other: Iterable[tuple[_KT, _VT]]) -> Self: ... + def __or__(self, other: Mapping[_T1, _T2]) -> WeakValueDictionary[_KT | _T1, _VT | _T2]: ... + def __ror__(self, other: Mapping[_T1, _T2]) -> WeakValueDictionary[_KT | _T1, _VT | _T2]: ... + # WeakValueDictionary.__ior__ should be kept roughly in line with MutableMapping.update() + @overload # type: ignore[misc] + def __ior__(self, other: SupportsKeysAndGetItem[_KT, _VT]) -> Self: ... + @overload + def __ior__(self, other: Iterable[tuple[_KT, _VT]]) -> Self: ... class KeyedRef(ref[_T], Generic[_KT, _T]): key: _KT @@ -177,14 +172,13 @@ class WeakKeyDictionary(MutableMapping[_KT, _VT]): def update(self, dict: Iterable[tuple[_KT, _VT]], /, **kwargs: _VT) -> None: ... @overload def update(self, dict: None = None, /, **kwargs: _VT) -> None: ... - if sys.version_info >= (3, 9): - def __or__(self, other: Mapping[_T1, _T2]) -> WeakKeyDictionary[_KT | _T1, _VT | _T2]: ... - def __ror__(self, other: Mapping[_T1, _T2]) -> WeakKeyDictionary[_KT | _T1, _VT | _T2]: ... - # WeakKeyDictionary.__ior__ should be kept roughly in line with MutableMapping.update() - @overload # type: ignore[misc] - def __ior__(self, other: SupportsKeysAndGetItem[_KT, _VT]) -> Self: ... - @overload - def __ior__(self, other: Iterable[tuple[_KT, _VT]]) -> Self: ... + def __or__(self, other: Mapping[_T1, _T2]) -> WeakKeyDictionary[_KT | _T1, _VT | _T2]: ... + def __ror__(self, other: Mapping[_T1, _T2]) -> WeakKeyDictionary[_KT | _T1, _VT | _T2]: ... + # WeakKeyDictionary.__ior__ should be kept roughly in line with MutableMapping.update() + @overload # type: ignore[misc] + def __ior__(self, other: SupportsKeysAndGetItem[_KT, _VT]) -> Self: ... + @overload + def __ior__(self, other: Iterable[tuple[_KT, _VT]]) -> Self: ... class finalize(Generic[_P, _T]): def __init__(self, obj: _T, func: Callable[_P, Any], /, *args: _P.args, **kwargs: _P.kwargs) -> None: ... diff --git a/stdlib/xml/dom/minidom.pyi b/stdlib/xml/dom/minidom.pyi index 51bbf4993657..ab2ef87e38a8 100644 --- a/stdlib/xml/dom/minidom.pyi +++ b/stdlib/xml/dom/minidom.pyi @@ -1,4 +1,3 @@ -import sys import xml.dom from _collections_abc import dict_keys, dict_values from _typeshed import Incomplete, ReadableBuffer, SupportsRead, SupportsWrite @@ -88,71 +87,39 @@ class Node(xml.dom.Node): @property def localName(self) -> str | None: ... # non-null only for Element and Attr def __bool__(self) -> Literal[True]: ... - if sys.version_info >= (3, 9): - @overload - def toxml(self, encoding: str, standalone: bool | None = None) -> bytes: ... - @overload - def toxml(self, encoding: None = None, standalone: bool | None = None) -> str: ... - @overload - def toprettyxml( - self, - indent: str = "\t", - newl: str = "\n", - # Handle any case where encoding is not provided or where it is passed with None - encoding: None = None, - standalone: bool | None = None, - ) -> str: ... - @overload - def toprettyxml( - self, - indent: str, - newl: str, - # Handle cases where encoding is passed as str *positionally* - encoding: str, - standalone: bool | None = None, - ) -> bytes: ... - @overload - def toprettyxml( - self, - indent: str = "\t", - newl: str = "\n", - # Handle all cases where encoding is passed as a keyword argument; because standalone - # comes after, it will also have to be a keyword arg if encoding is - *, - encoding: str, - standalone: bool | None = None, - ) -> bytes: ... - else: - @overload - def toxml(self, encoding: str) -> bytes: ... - @overload - def toxml(self, encoding: None = None) -> str: ... - @overload - def toprettyxml( - self, - indent: str = "\t", - newl: str = "\n", - # Handle any case where encoding is not provided or where it is passed with None - encoding: None = None, - ) -> str: ... - @overload - def toprettyxml( - self, - indent: str, - newl: str, - # Handle cases where encoding is passed as str *positionally* - encoding: str, - ) -> bytes: ... - @overload - def toprettyxml( - self, - indent: str = "\t", - newl: str = "\n", - # Handle all cases where encoding is passed as a keyword argument - *, - encoding: str, - ) -> bytes: ... - + @overload + def toxml(self, encoding: str, standalone: bool | None = None) -> bytes: ... + @overload + def toxml(self, encoding: None = None, standalone: bool | None = None) -> str: ... + @overload + def toprettyxml( + self, + indent: str = "\t", + newl: str = "\n", + # Handle any case where encoding is not provided or where it is passed with None + encoding: None = None, + standalone: bool | None = None, + ) -> str: ... + @overload + def toprettyxml( + self, + indent: str, + newl: str, + # Handle cases where encoding is passed as str *positionally* + encoding: str, + standalone: bool | None = None, + ) -> bytes: ... + @overload + def toprettyxml( + self, + indent: str = "\t", + newl: str = "\n", + # Handle all cases where encoding is passed as a keyword argument; because standalone + # comes after, it will also have to be a keyword arg if encoding is + *, + encoding: str, + standalone: bool | None = None, + ) -> bytes: ... def hasChildNodes(self) -> bool: ... def insertBefore( # type: ignore[misc] self: _NodesWithChildren, # pyright: ignore[reportGeneralTypeIssues] @@ -657,26 +624,15 @@ class Document(Node, DocumentLS): def getElementsByTagNameNS(self, namespaceURI: str | None, localName: str) -> NodeList[Element]: ... def isSupported(self, feature: str, version: str | None) -> bool: ... def importNode(self, node: _ImportableNodeVar, deep: bool) -> _ImportableNodeVar: ... - if sys.version_info >= (3, 9): - def writexml( - self, - writer: SupportsWrite[str], - indent: str = "", - addindent: str = "", - newl: str = "", - encoding: str | None = None, - standalone: bool | None = None, - ) -> None: ... - else: - def writexml( - self, - writer: SupportsWrite[str], - indent: str = "", - addindent: str = "", - newl: str = "", - encoding: Incomplete | None = None, - ) -> None: ... - + def writexml( + self, + writer: SupportsWrite[str], + indent: str = "", + addindent: str = "", + newl: str = "", + encoding: str | None = None, + standalone: bool | None = None, + ) -> None: ... @overload def renameNode(self, n: Element, namespaceURI: str, name: str) -> Element: ... @overload diff --git a/stdlib/xml/etree/ElementInclude.pyi b/stdlib/xml/etree/ElementInclude.pyi index 10c305826453..8f20ee15a14e 100644 --- a/stdlib/xml/etree/ElementInclude.pyi +++ b/stdlib/xml/etree/ElementInclude.pyi @@ -1,4 +1,3 @@ -import sys from _typeshed import FileDescriptorOrPath from typing import Final, Literal, Protocol, overload from xml.etree.ElementTree import Element @@ -13,8 +12,7 @@ XINCLUDE: Final[str] XINCLUDE_INCLUDE: Final[str] XINCLUDE_FALLBACK: Final[str] -if sys.version_info >= (3, 9): - DEFAULT_MAX_INCLUSION_DEPTH: Final = 6 +DEFAULT_MAX_INCLUSION_DEPTH: Final = 6 class FatalIncludeError(SyntaxError): ... @@ -22,11 +20,6 @@ class FatalIncludeError(SyntaxError): ... def default_loader(href: FileDescriptorOrPath, parse: Literal["xml"], encoding: str | None = None) -> Element: ... @overload def default_loader(href: FileDescriptorOrPath, parse: Literal["text"], encoding: str | None = None) -> str: ... +def include(elem: Element, loader: _Loader | None = None, base_url: str | None = None, max_depth: int | None = 6) -> None: ... -if sys.version_info >= (3, 9): - def include(elem: Element, loader: _Loader | None = None, base_url: str | None = None, max_depth: int | None = 6) -> None: ... - - class LimitedRecursiveIncludeError(FatalIncludeError): ... - -else: - def include(elem: Element, loader: _Loader | None = None) -> None: ... +class LimitedRecursiveIncludeError(FatalIncludeError): ... diff --git a/stdlib/xml/etree/ElementTree.pyi b/stdlib/xml/etree/ElementTree.pyi index 198e1c3d6435..4c55a1a7452e 100644 --- a/stdlib/xml/etree/ElementTree.pyi +++ b/stdlib/xml/etree/ElementTree.pyi @@ -15,6 +15,7 @@ __all__ = [ "canonicalize", "fromstring", "fromstringlist", + "indent", "iselement", "iterparse", "parse", @@ -34,9 +35,6 @@ __all__ = [ "register_namespace", ] -if sys.version_info >= (3, 9): - __all__ += ["indent"] - _T = TypeVar("_T") _FileRead: TypeAlias = FileDescriptorOrPath | SupportsRead[bytes] | SupportsRead[str] _FileWriteC14N: TypeAlias = FileDescriptorOrPath | SupportsWrite[bytes] @@ -138,9 +136,6 @@ class Element(Generic[_Tag]): # Doesn't really exist in earlier versions, where __len__ is called implicitly instead @deprecated("Testing an element's truth value is deprecated.") def __bool__(self) -> bool: ... - if sys.version_info < (3, 9): - def getchildren(self) -> list[Element]: ... - def getiterator(self, tag: str | None = None) -> list[Element]: ... def SubElement(parent: Element, tag: str, attrib: dict[str, str] = ..., **extra: str) -> Element: ... def Comment(text: str | None = None) -> _CallableElement: ... @@ -165,9 +160,6 @@ class ElementTree(Generic[_Root]): def getroot(self) -> _Root: ... def parse(self, source: _FileRead, parser: XMLParser | None = None) -> Element: ... def iter(self, tag: str | None = None) -> Generator[Element, None, None]: ... - if sys.version_info < (3, 9): - def getiterator(self, tag: str | None = None) -> list[Element]: ... - def find(self, path: str, namespaces: dict[str, str] | None = None) -> Element | None: ... @overload def findtext(self, path: str, default: None = None, namespaces: dict[str, str] | None = None) -> str | None: ... @@ -254,10 +246,7 @@ def tostringlist( short_empty_elements: bool = True, ) -> list[Any]: ... def dump(elem: Element | ElementTree[Any]) -> None: ... - -if sys.version_info >= (3, 9): - def indent(tree: Element | ElementTree[Any], space: str = " ", level: int = 0) -> None: ... - +def indent(tree: Element | ElementTree[Any], space: str = " ", level: int = 0) -> None: ... def parse(source: _FileRead, parser: XMLParser[Any] | None = None) -> ElementTree[Element]: ... # This class is defined inside the body of iterparse diff --git a/stdlib/xml/sax/expatreader.pyi b/stdlib/xml/sax/expatreader.pyi index 6a68f52f0e99..012d6c03e121 100644 --- a/stdlib/xml/sax/expatreader.pyi +++ b/stdlib/xml/sax/expatreader.pyi @@ -53,11 +53,7 @@ class ExpatParser(xmlreader.IncrementalParser, xmlreader.Locator): ) -> None: ... @overload def setProperty(self, name: str, value: object) -> None: ... - if sys.version_info >= (3, 9): - def feed(self, data: str | ReadableBuffer, isFinal: bool = False) -> None: ... - else: - def feed(self, data: str | ReadableBuffer, isFinal: _BoolType = 0) -> None: ... - + def feed(self, data: str | ReadableBuffer, isFinal: bool = False) -> None: ... def flush(self) -> None: ... def close(self) -> None: ... def reset(self) -> None: ... diff --git a/stdlib/zipfile/__init__.pyi b/stdlib/zipfile/__init__.pyi index 91bc051df686..ede732c0f86a 100644 --- a/stdlib/zipfile/__init__.pyi +++ b/stdlib/zipfile/__init__.pyi @@ -30,7 +30,6 @@ _DateTuple = tuple[int, int, int, int, int, int] # noqa: Y026 _ZipFileMode = Literal["r", "w", "x", "a"] # noqa: Y026 _ReadWriteMode: TypeAlias = Literal["r", "w"] -_ReadWriteBinaryMode: TypeAlias = Literal["r", "w", "rb", "wb"] class BadZipFile(Exception): ... @@ -321,25 +320,20 @@ else: @property def stem(self) -> str: ... - if sys.version_info >= (3, 9): - @overload - def open( - self, - mode: Literal["r", "w"] = "r", - encoding: str | None = None, - errors: str | None = None, - newline: str | None = None, - line_buffering: bool = ..., - write_through: bool = ..., - *, - pwd: bytes | None = None, - ) -> TextIOWrapper: ... - @overload - def open(self, mode: Literal["rb", "wb"], *, pwd: bytes | None = None) -> IO[bytes]: ... - else: - def open( - self, mode: _ReadWriteBinaryMode = "r", pwd: bytes | None = None, *, force_zip64: bool = False - ) -> IO[bytes]: ... + @overload + def open( + self, + mode: Literal["r", "w"] = "r", + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, + line_buffering: bool = ..., + write_through: bool = ..., + *, + pwd: bytes | None = None, + ) -> TextIOWrapper: ... + @overload + def open(self, mode: Literal["rb", "wb"], *, pwd: bytes | None = None) -> IO[bytes]: ... if sys.version_info >= (3, 10): def iterdir(self) -> Iterator[Self]: ... diff --git a/stdlib/zoneinfo/__init__.pyi b/stdlib/zoneinfo/__init__.pyi index fb21b00c45dc..35381758a1b7 100644 --- a/stdlib/zoneinfo/__init__.pyi +++ b/stdlib/zoneinfo/__init__.pyi @@ -1,35 +1,28 @@ -import sys from collections.abc import Iterable from datetime import datetime, timedelta, tzinfo from typing_extensions import Self +from zoneinfo._common import ZoneInfoNotFoundError as ZoneInfoNotFoundError, _IOBytes +from zoneinfo._tzpath import ( + TZPATH as TZPATH, + InvalidTZPathWarning as InvalidTZPathWarning, + available_timezones as available_timezones, + reset_tzpath as reset_tzpath, +) -# TODO: remove this version check -# In theory we shouldn't need this version check. Pyright complains about the imports -# from zoneinfo.* when run on 3.8 and 3.7 without this. Updates to typeshed's -# pyright test script are probably needed, see #11189 -if sys.version_info >= (3, 9): - from zoneinfo._common import ZoneInfoNotFoundError as ZoneInfoNotFoundError, _IOBytes - from zoneinfo._tzpath import ( - TZPATH as TZPATH, - InvalidTZPathWarning as InvalidTZPathWarning, - available_timezones as available_timezones, - reset_tzpath as reset_tzpath, - ) +__all__ = ["ZoneInfo", "reset_tzpath", "available_timezones", "TZPATH", "ZoneInfoNotFoundError", "InvalidTZPathWarning"] - __all__ = ["ZoneInfo", "reset_tzpath", "available_timezones", "TZPATH", "ZoneInfoNotFoundError", "InvalidTZPathWarning"] +class ZoneInfo(tzinfo): + @property + def key(self) -> str: ... + def __new__(cls, key: str) -> Self: ... + @classmethod + def no_cache(cls, key: str) -> Self: ... + @classmethod + def from_file(cls, fobj: _IOBytes, /, key: str | None = None) -> Self: ... + @classmethod + def clear_cache(cls, *, only_keys: Iterable[str] | None = None) -> None: ... + def tzname(self, dt: datetime | None, /) -> str | None: ... + def utcoffset(self, dt: datetime | None, /) -> timedelta | None: ... + def dst(self, dt: datetime | None, /) -> timedelta | None: ... - class ZoneInfo(tzinfo): - @property - def key(self) -> str: ... - def __new__(cls, key: str) -> Self: ... - @classmethod - def no_cache(cls, key: str) -> Self: ... - @classmethod - def from_file(cls, fobj: _IOBytes, /, key: str | None = None) -> Self: ... - @classmethod - def clear_cache(cls, *, only_keys: Iterable[str] | None = None) -> None: ... - def tzname(self, dt: datetime | None, /) -> str | None: ... - def utcoffset(self, dt: datetime | None, /) -> timedelta | None: ... - def dst(self, dt: datetime | None, /) -> timedelta | None: ... - - def __dir__() -> list[str]: ... +def __dir__() -> list[str]: ... From df443ae6323d0397b15075886cf9a1ddacee9e7a Mon Sep 17 00:00:00 2001 From: Sebastian Rittau Date: Thu, 3 Apr 2025 12:23:19 +0200 Subject: [PATCH 193/388] Migrate renovate config (#13784) --- .github/renovate.json5 | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/renovate.json5 b/.github/renovate.json5 index c48c0d253c71..abd3023ce8f2 100644 --- a/.github/renovate.json5 +++ b/.github/renovate.json5 @@ -28,11 +28,11 @@ enabled: false, }, { - "groupName": "most test/lint dependencies", - "matchManagers": ["pip_requirements", "pre-commit"], - "excludePackageNames": ["pytype", "pyright"], - "description": "Quarterly update of most test dependencies", - "schedule": ["every 3 months on the first day of the month"] + groupName: "most test/lint dependencies", + matchManagers: ["pip_requirements", "pre-commit"], + matchPackageNames: ["!pytype", "!pyright"], + description: "Quarterly update of most test dependencies", + schedule: ["every 3 months on the first day of the month"] }, { "groupName": "pytype and pyright", From 78ce8fd728c62427817185b06a2b7bf050e8ac93 Mon Sep 17 00:00:00 2001 From: Alex Waygood Date: Thu, 3 Apr 2025 11:56:38 +0100 Subject: [PATCH 194/388] Remove more Python 3.8 cruft (#13787) --- lib/ts_utils/metadata.py | 14 ++--- lib/ts_utils/utils.py | 13 ++--- pyproject.toml | 2 +- scripts/stubsabot.py | 2 - .../stubtest_allowlists/darwin-py39.txt | 6 +-- .../@tests/stubtest_allowlists/linux-py39.txt | 6 +-- stdlib/@tests/stubtest_allowlists/py39.txt | 32 ++---------- .../@tests/stubtest_allowlists/win32-py39.txt | 6 +-- stdlib/VERSIONS | 1 - stdlib/_collections_abc.pyi | 4 +- stdlib/array.pyi | 10 ++-- stdlib/builtins.pyi | 2 +- stdlib/csv.pyi | 4 +- stdlib/encodings/mac_centeuro.pyi | 21 -------- stdlib/logging/__init__.pyi | 5 +- stdlib/types.pyi | 3 +- stdlib/typing_extensions.pyi | 51 ++++++++++--------- tests/check_typeshed_structure.py | 2 - tests/mypy_test.py | 3 +- 19 files changed, 63 insertions(+), 124 deletions(-) delete mode 100644 stdlib/encodings/mac_centeuro.pyi diff --git a/lib/ts_utils/metadata.py b/lib/ts_utils/metadata.py index 40bc42354089..f851ce536519 100644 --- a/lib/ts_utils/metadata.py +++ b/lib/ts_utils/metadata.py @@ -5,6 +5,7 @@ from __future__ import annotations +import functools import re import urllib.parse from collections.abc import Mapping @@ -19,7 +20,6 @@ from packaging.specifiers import Specifier from .paths import PYPROJECT_PATH, STUBS_PATH, distribution_path -from .utils import cache __all__ = [ "NoSuchStubError", @@ -42,7 +42,7 @@ def _is_list_of_strings(obj: object) -> TypeGuard[list[str]]: return isinstance(obj, list) and all(isinstance(item, str) for item in obj) -@cache +@functools.cache def _get_oldest_supported_python() -> str: with PYPROJECT_PATH.open("rb") as config: val = tomli.load(config)["tool"]["typeshed"]["oldest_supported_python"] @@ -79,7 +79,7 @@ def system_requirements_for_platform(self, platform: str) -> list[str]: return ret -@cache +@functools.cache def read_stubtest_settings(distribution: str) -> StubtestSettings: """Return an object describing the stubtest settings for a single stubs distribution.""" with metadata_path(distribution).open("rb") as f: @@ -187,7 +187,7 @@ class NoSuchStubError(ValueError): """Raise NoSuchStubError to indicate that a stubs/{distribution} directory doesn't exist.""" -@cache +@functools.cache def read_metadata(distribution: str) -> StubMetadata: """Return an object describing the metadata of a stub as given in the METADATA.toml file. @@ -328,12 +328,12 @@ class PackageDependencies(NamedTuple): external_pkgs: tuple[Requirement, ...] -@cache +@functools.cache def get_pypi_name_to_typeshed_name_mapping() -> Mapping[str, str]: return {read_metadata(stub_dir.name).stub_distribution: stub_dir.name for stub_dir in STUBS_PATH.iterdir()} -@cache +@functools.cache def read_dependencies(distribution: str) -> PackageDependencies: """Read the dependencies listed in a METADATA.toml file for a stubs package. @@ -360,7 +360,7 @@ def read_dependencies(distribution: str) -> PackageDependencies: return PackageDependencies(tuple(typeshed), tuple(external)) -@cache +@functools.cache def get_recursive_requirements(package_name: str) -> PackageDependencies: """Recursively gather dependencies for a single stubs package. diff --git a/lib/ts_utils/utils.py b/lib/ts_utils/utils.py index 50b18df69766..4df06665e3af 100644 --- a/lib/ts_utils/utils.py +++ b/lib/ts_utils/utils.py @@ -2,10 +2,10 @@ from __future__ import annotations +import functools import re import sys from collections.abc import Iterable, Mapping -from functools import lru_cache from pathlib import Path from typing import Any, Final, NamedTuple from typing_extensions import TypeAlias @@ -26,11 +26,6 @@ def colored(text: str, color: str | None = None, **kwargs: Any) -> str: # type: PYTHON_VERSION: Final = f"{sys.version_info.major}.{sys.version_info.minor}" -# A backport of functools.cache for Python <3.9 -# This module is imported by mypy_test.py, which needs to run on 3.8 in CI -cache = lru_cache(None) - - def strip_comments(text: str) -> str: return text.split("#")[0].strip() @@ -81,7 +76,7 @@ def print_time(t: float) -> None: # ==================================================================== -@cache +@functools.cache def venv_python(venv_dir: Path) -> Path: if sys.platform == "win32": return venv_dir / "Scripts" / "python.exe" @@ -93,7 +88,7 @@ def venv_python(venv_dir: Path) -> Path: # ==================================================================== -@cache +@functools.cache def parse_requirements() -> Mapping[str, Requirement]: """Return a dictionary of requirements from the requirements file.""" with REQUIREMENTS_PATH.open(encoding="UTF-8") as requirements_file: @@ -206,7 +201,7 @@ def allowlists(distribution_name: str) -> list[str]: # ==================================================================== -@cache +@functools.cache def get_gitignore_spec() -> pathspec.PathSpec: with open(".gitignore", encoding="UTF-8") as f: return pathspec.PathSpec.from_lines("gitwildmatch", f.readlines()) diff --git a/pyproject.toml b/pyproject.toml index 72d0a4df4c9a..63e187ac1dcd 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -18,7 +18,7 @@ force-exclude = ".*_pb2.pyi" [tool.ruff] line-length = 130 # Oldest supported Python version -target-version = "py38" +target-version = "py39" fix = true exclude = [ # virtual environment diff --git a/scripts/stubsabot.py b/scripts/stubsabot.py index 87f0fd18d720..1ce147b0be65 100755 --- a/scripts/stubsabot.py +++ b/scripts/stubsabot.py @@ -730,8 +730,6 @@ async def suggest_typeshed_obsolete(obsolete: Obsolete, session: aiohttp.ClientS async def main() -> None: - assert sys.version_info >= (3, 9) - parser = argparse.ArgumentParser() parser.add_argument( "--action-level", diff --git a/stdlib/@tests/stubtest_allowlists/darwin-py39.txt b/stdlib/@tests/stubtest_allowlists/darwin-py39.txt index ad9ecc3014cf..8deeed14226e 100644 --- a/stdlib/@tests/stubtest_allowlists/darwin-py39.txt +++ b/stdlib/@tests/stubtest_allowlists/darwin-py39.txt @@ -10,9 +10,9 @@ email.utils.getaddresses email.utils.parseaddr -# ====== -# <= 3.9 -# ====== +# ======== +# 3.9 only +# ======== # Added in Python 3.9.14 sys.set_int_max_str_digits diff --git a/stdlib/@tests/stubtest_allowlists/linux-py39.txt b/stdlib/@tests/stubtest_allowlists/linux-py39.txt index 21522122b158..6c3feb290296 100644 --- a/stdlib/@tests/stubtest_allowlists/linux-py39.txt +++ b/stdlib/@tests/stubtest_allowlists/linux-py39.txt @@ -1,6 +1,6 @@ -# ====== -# <= 3.9 -# ====== +# ======== +# 3.9 only +# ======== # `eventmask` argument exists at runtime, but is not correctly recognized # while being inspected by stubtest. Fixed in Python 3.10. diff --git a/stdlib/@tests/stubtest_allowlists/py39.txt b/stdlib/@tests/stubtest_allowlists/py39.txt index b52ad6594b78..e84afe3defe7 100644 --- a/stdlib/@tests/stubtest_allowlists/py39.txt +++ b/stdlib/@tests/stubtest_allowlists/py39.txt @@ -1,8 +1,3 @@ -# ======================== -# New errors in Python 3.9 -# ======================== - - # ======== # 3.9 only # ======== @@ -16,24 +11,7 @@ collections.MappingView.__class_getitem__ hmac.HMAC.digest_cons hmac.HMAC.inner hmac.HMAC.outer - - -# =========== -# 3.9 to 3.10 -# =========== - -builtins.float.__setformat__ # Internal method for CPython test suite -typing._SpecialForm.__mro_entries__ # Exists at runtime, but missing from stubs - - -# =================================== -# Pre-existing errors from Python 3.8 -# =================================== - - -# ====== -# <= 3.9 -# ====== +xxsubtype # module missing from the stubs builtins.input # Incorrect default value in text signature, fixed in 3.10 collections.AsyncGenerator.__anext__ # async at runtime, deliberately not in the stub, see #7491 @@ -44,13 +22,13 @@ collections.ByteString # see comments in py3_common.txt collections.Callable collections.Mapping.get # Adding None to the Union messed up mypy collections.Sequence.index # Supporting None in end is not mandatory -xxsubtype # module missing from the stubs - -# ======= +# =========== # <= 3.10 -# ======= +# =========== +builtins.float.__setformat__ # Internal method for CPython test suite +typing._SpecialForm.__mro_entries__ # Exists at runtime, but missing from stubs email.contentmanager.typ gettext.install # codeset default value is ['unspecified'] so can't be specified gettext.translation # codeset default value is ['unspecified'] so can't be specified diff --git a/stdlib/@tests/stubtest_allowlists/win32-py39.txt b/stdlib/@tests/stubtest_allowlists/win32-py39.txt index 40a525566cc5..3ab6c98d454e 100644 --- a/stdlib/@tests/stubtest_allowlists/win32-py39.txt +++ b/stdlib/@tests/stubtest_allowlists/win32-py39.txt @@ -11,9 +11,9 @@ email.utils.getaddresses email.utils.parseaddr -# ====== -# <= 3.9 -# ====== +# ======== +# 3.9 only +# ======== # Added in Python 3.9.14 sys.set_int_max_str_digits diff --git a/stdlib/VERSIONS b/stdlib/VERSIONS index 7a8b950b0df7..fec56ce59e36 100644 --- a/stdlib/VERSIONS +++ b/stdlib/VERSIONS @@ -145,7 +145,6 @@ encodings.cp273: 3.4- encodings.cp858: 3.2- encodings.koi8_t: 3.5- encodings.kz1048: 3.5- -encodings.mac_centeuro: 3.0-3.8 ensurepip: 3.0- enum: 3.4- errno: 3.0- diff --git a/stdlib/_collections_abc.pyi b/stdlib/_collections_abc.pyi index 8bac0ce1dca3..b099bdd98f3c 100644 --- a/stdlib/_collections_abc.pyi +++ b/stdlib/_collections_abc.pyi @@ -1,7 +1,7 @@ import sys from abc import abstractmethod from types import MappingProxyType -from typing import ( # noqa: Y022,Y038 +from typing import ( # noqa: Y022,Y038,UP035 AbstractSet as Set, AsyncGenerator as AsyncGenerator, AsyncIterable as AsyncIterable, @@ -61,7 +61,7 @@ __all__ = [ "MutableSequence", ] if sys.version_info < (3, 14): - from typing import ByteString as ByteString # noqa: Y057 + from typing import ByteString as ByteString # noqa: Y057,UP035 __all__ += ["ByteString"] diff --git a/stdlib/array.pyi b/stdlib/array.pyi index b26336f3e969..bd96c9bc2d31 100644 --- a/stdlib/array.pyi +++ b/stdlib/array.pyi @@ -1,14 +1,10 @@ import sys from _typeshed import ReadableBuffer, SupportsRead, SupportsWrite -from collections.abc import Iterable - -# pytype crashes if array inherits from collections.abc.MutableSequence instead of typing.MutableSequence -from typing import Any, ClassVar, Literal, MutableSequence, SupportsIndex, TypeVar, overload # noqa: Y022 +from collections.abc import Iterable, MutableSequence +from types import GenericAlias +from typing import Any, ClassVar, Literal, SupportsIndex, TypeVar, overload from typing_extensions import Self, TypeAlias -if sys.version_info >= (3, 12): - from types import GenericAlias - _IntTypeCode: TypeAlias = Literal["b", "B", "h", "H", "i", "I", "l", "L", "q", "Q"] _FloatTypeCode: TypeAlias = Literal["f", "d"] _UnicodeTypeCode: TypeAlias = Literal["u"] diff --git a/stdlib/builtins.pyi b/stdlib/builtins.pyi index 9129c0cba20f..b75250aad3de 100644 --- a/stdlib/builtins.pyi +++ b/stdlib/builtins.pyi @@ -36,7 +36,7 @@ from types import CellType, CodeType, GenericAlias, TracebackType # mypy crashes if any of {ByteString, Sequence, MutableSequence, Mapping, MutableMapping} # are imported from collections.abc in builtins.pyi -from typing import ( # noqa: Y022 +from typing import ( # noqa: Y022,UP035 IO, Any, BinaryIO, diff --git a/stdlib/csv.pyi b/stdlib/csv.pyi index 4a82de638136..2c8e7109cdfc 100644 --- a/stdlib/csv.pyi +++ b/stdlib/csv.pyi @@ -26,12 +26,10 @@ else: from _typeshed import SupportsWrite from collections.abc import Collection, Iterable, Mapping, Sequence +from types import GenericAlias from typing import Any, Generic, Literal, TypeVar, overload from typing_extensions import Self -if sys.version_info >= (3, 12): - from types import GenericAlias - __all__ = [ "QUOTE_MINIMAL", "QUOTE_ALL", diff --git a/stdlib/encodings/mac_centeuro.pyi b/stdlib/encodings/mac_centeuro.pyi deleted file mode 100644 index f62195662ce9..000000000000 --- a/stdlib/encodings/mac_centeuro.pyi +++ /dev/null @@ -1,21 +0,0 @@ -import codecs -from _codecs import _EncodingMap -from _typeshed import ReadableBuffer - -class Codec(codecs.Codec): - def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... - def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: ... - -class IncrementalEncoder(codecs.IncrementalEncoder): - def encode(self, input: str, final: bool = False) -> bytes: ... - -class IncrementalDecoder(codecs.IncrementalDecoder): - def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... - -class StreamWriter(Codec, codecs.StreamWriter): ... -class StreamReader(Codec, codecs.StreamReader): ... - -def getregentry() -> codecs.CodecInfo: ... - -decoding_table: str -encoding_table: _EncodingMap diff --git a/stdlib/logging/__init__.pyi b/stdlib/logging/__init__.pyi index 1d6edb0246d4..e555f74a81af 100644 --- a/stdlib/logging/__init__.pyi +++ b/stdlib/logging/__init__.pyi @@ -6,13 +6,10 @@ from io import TextIOWrapper from re import Pattern from string import Template from time import struct_time -from types import FrameType, TracebackType +from types import FrameType, GenericAlias, TracebackType from typing import Any, ClassVar, Final, Generic, Literal, Protocol, TextIO, TypeVar, overload from typing_extensions import Self, TypeAlias, deprecated -if sys.version_info >= (3, 11): - from types import GenericAlias - __all__ = [ "BASIC_FORMAT", "BufferingFormatter", diff --git a/stdlib/types.pyi b/stdlib/types.pyi index f89a992b72a2..537370487c8d 100644 --- a/stdlib/types.pyi +++ b/stdlib/types.pyi @@ -11,13 +11,14 @@ from collections.abc import ( Iterable, Iterator, KeysView, + Mapping, # noqa: Y022 MutableSequence, ValuesView, ) from importlib.machinery import ModuleSpec # pytype crashes if types.MappingProxyType inherits from collections.abc.Mapping instead of typing.Mapping -from typing import Any, ClassVar, Literal, Mapping, TypeVar, final, overload # noqa: Y022 +from typing import Any, ClassVar, Literal, TypeVar, final, overload from typing_extensions import ParamSpec, Self, TypeAliasType, TypeVarTuple, deprecated __all__ = [ diff --git a/stdlib/typing_extensions.pyi b/stdlib/typing_extensions.pyi index 234e32e30ab6..3799f4e666e9 100644 --- a/stdlib/typing_extensions.pyi +++ b/stdlib/typing_extensions.pyi @@ -4,59 +4,60 @@ import sys import typing from _collections_abc import dict_items, dict_keys, dict_values from _typeshed import IdentityFunction, Incomplete, Unused +from collections.abc import ( + AsyncGenerator as AsyncGenerator, + AsyncIterable as AsyncIterable, + AsyncIterator as AsyncIterator, + Awaitable as Awaitable, + Collection as Collection, + Container as Container, + Coroutine as Coroutine, + Generator as Generator, + Hashable as Hashable, + ItemsView as ItemsView, + Iterable as Iterable, + Iterator as Iterator, + KeysView as KeysView, + Mapping as Mapping, + MappingView as MappingView, + MutableMapping as MutableMapping, + MutableSequence as MutableSequence, + MutableSet as MutableSet, + Reversible as Reversible, + Sequence as Sequence, + Sized as Sized, + ValuesView as ValuesView, +) from contextlib import AbstractAsyncContextManager as AsyncContextManager, AbstractContextManager as ContextManager +from re import Match as Match, Pattern as Pattern from types import GenericAlias, ModuleType -from typing import ( # noqa: Y022,Y037,Y038,Y039 +from typing import ( # noqa: Y022,Y037,Y038,Y039,UP035 IO as IO, TYPE_CHECKING as TYPE_CHECKING, AbstractSet as AbstractSet, Any as Any, AnyStr as AnyStr, - AsyncGenerator as AsyncGenerator, - AsyncIterable as AsyncIterable, - AsyncIterator as AsyncIterator, - Awaitable as Awaitable, BinaryIO as BinaryIO, Callable as Callable, ChainMap as ChainMap, ClassVar as ClassVar, - Collection as Collection, - Container as Container, - Coroutine as Coroutine, Counter as Counter, DefaultDict as DefaultDict, Deque as Deque, Dict as Dict, ForwardRef as ForwardRef, FrozenSet as FrozenSet, - Generator as Generator, Generic as Generic, - Hashable as Hashable, - ItemsView as ItemsView, - Iterable as Iterable, - Iterator as Iterator, - KeysView as KeysView, List as List, - Mapping as Mapping, - MappingView as MappingView, - Match as Match, - MutableMapping as MutableMapping, - MutableSequence as MutableSequence, - MutableSet as MutableSet, NoReturn as NoReturn, Optional as Optional, - Pattern as Pattern, - Reversible as Reversible, - Sequence as Sequence, Set as Set, - Sized as Sized, Text as Text, TextIO as TextIO, Tuple as Tuple, Type as Type, TypedDict as TypedDict, Union as Union, - ValuesView as ValuesView, _Alias, cast as cast, no_type_check as no_type_check, diff --git a/tests/check_typeshed_structure.py b/tests/check_typeshed_structure.py index 81adb8c74269..bcb02061e055 100755 --- a/tests/check_typeshed_structure.py +++ b/tests/check_typeshed_structure.py @@ -9,7 +9,6 @@ import os import re -import sys from pathlib import Path from ts_utils.metadata import read_metadata @@ -170,7 +169,6 @@ def check_requirement_pins() -> None: if __name__ == "__main__": - assert sys.version_info >= (3, 9), "Python 3.9+ is required to run this test" check_versions_file() check_metadata() check_requirement_pins() diff --git a/tests/mypy_test.py b/tests/mypy_test.py index bf3a234ebdd8..2eeb532d1ca6 100755 --- a/tests/mypy_test.py +++ b/tests/mypy_test.py @@ -393,8 +393,7 @@ def stdlib_module_name_from_path(path: Path) -> str: assert path.suffix == ".pyi" parts = list(path.parts[1:-1]) if path.parts[-1] != "__init__.pyi": - # TODO: Python 3.9+: Use removesuffix. - parts.append(path.parts[-1][:-4]) + parts.append(path.parts[-1].removesuffix(".pyi")) return ".".join(parts) From 9db363d99b4ea8973d04c39830c42fdfc31dd1ec Mon Sep 17 00:00:00 2001 From: Alex Waygood Date: Thu, 3 Apr 2025 12:12:12 +0100 Subject: [PATCH 195/388] remove outdated comment from `types.pyi` (#13788) --- stdlib/types.pyi | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/stdlib/types.pyi b/stdlib/types.pyi index 537370487c8d..fe443be27121 100644 --- a/stdlib/types.pyi +++ b/stdlib/types.pyi @@ -11,13 +11,11 @@ from collections.abc import ( Iterable, Iterator, KeysView, - Mapping, # noqa: Y022 + Mapping, MutableSequence, ValuesView, ) from importlib.machinery import ModuleSpec - -# pytype crashes if types.MappingProxyType inherits from collections.abc.Mapping instead of typing.Mapping from typing import Any, ClassVar, Literal, TypeVar, final, overload from typing_extensions import ParamSpec, Self, TypeAliasType, TypeVarTuple, deprecated From acbef35d10fb599f59b94915668aaa56da54ba80 Mon Sep 17 00:00:00 2001 From: Sebastian Rittau Date: Thu, 3 Apr 2025 19:14:05 +0200 Subject: [PATCH 196/388] Try to disable Python in github-actions (#13789) --- .github/renovate.json5 | 16 +++++----------- 1 file changed, 5 insertions(+), 11 deletions(-) diff --git a/.github/renovate.json5 b/.github/renovate.json5 index abd3023ce8f2..f2daa35c2a79 100644 --- a/.github/renovate.json5 +++ b/.github/renovate.json5 @@ -12,20 +12,14 @@ }, "packageRules": [ { - "groupName": "GitHub Actions", - "matchManagers": ["github-actions"], - "description": "Quarterly update of GitHub Action dependencies", - "schedule": ["every 3 months on the first day of the month"] - }, - { + groupName: "GitHub Actions", + matchManagers: ["github-actions"], // This package rule disables updates for `actions/setup-python` Python versions: // it's better to do these manually as there's often a reason why we can't use // the latest Python version in CI for a specific job - groupName: "Python versions", - matchManagers: ["github-actions"], - matchPackageNames: ["python"], - description: "Disable PRs updating Python versions", - enabled: false, + matchPackageNames: ["!python"], + description: "Quarterly update of GitHub Action dependencies", + schedule: ["every 3 months on the first day of the month"] }, { groupName: "most test/lint dependencies", From c4f295197f97ef38556c3c918b779ba1acaf2c9e Mon Sep 17 00:00:00 2001 From: Sebastian Rittau Date: Thu, 3 Apr 2025 19:40:51 +0200 Subject: [PATCH 197/388] renovate: Try disabling Python updates (try #3) (#13790) --- .github/renovate.json5 | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/renovate.json5 b/.github/renovate.json5 index f2daa35c2a79..5b0e2d1e7956 100644 --- a/.github/renovate.json5 +++ b/.github/renovate.json5 @@ -7,6 +7,10 @@ "semanticCommits": "disabled", "separateMajorMinor": false, "prHourlyLimit": 10, + // This package rule disables updates for `actions/setup-python` Python versions: + // it's better to do these manually as there's often a reason why we can't use + // the latest Python version in CI for a specific job + ignoreDeps: ["python"], "pre-commit": { "enabled": true }, @@ -14,10 +18,6 @@ { groupName: "GitHub Actions", matchManagers: ["github-actions"], - // This package rule disables updates for `actions/setup-python` Python versions: - // it's better to do these manually as there's often a reason why we can't use - // the latest Python version in CI for a specific job - matchPackageNames: ["!python"], description: "Quarterly update of GitHub Action dependencies", schedule: ["every 3 months on the first day of the month"] }, From ead803add51394d5af8ea4499b3e98ba3fcfc65a Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Sun, 6 Apr 2025 21:10:19 +0400 Subject: [PATCH 198/388] Mark internal stuff in allowlist for `assertpy` (#13802) Co-authored-by: Jelle Zijlstra --- stubs/assertpy/@tests/stubtest_allowlist.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/stubs/assertpy/@tests/stubtest_allowlist.txt b/stubs/assertpy/@tests/stubtest_allowlist.txt index 2de67f3589cd..57fe5e2ee114 100644 --- a/stubs/assertpy/@tests/stubtest_allowlist.txt +++ b/stubs/assertpy/@tests/stubtest_allowlist.txt @@ -1,3 +1,4 @@ +# Python 2 compatibility cruft: assertpy.collection.Iterable assertpy.contains.str_types assertpy.contains.xrange From 3f90fd645df28cd14d9b8bea0ef72682134993b0 Mon Sep 17 00:00:00 2001 From: Avasam Date: Mon, 7 Apr 2025 07:58:57 -0400 Subject: [PATCH 199/388] Use `GitIgnoreSpec` in `ts_utils.get_gitignore_spec` (#13797) --- lib/ts_utils/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/ts_utils/utils.py b/lib/ts_utils/utils.py index 4df06665e3af..e4a687600099 100644 --- a/lib/ts_utils/utils.py +++ b/lib/ts_utils/utils.py @@ -204,7 +204,7 @@ def allowlists(distribution_name: str) -> list[str]: @functools.cache def get_gitignore_spec() -> pathspec.PathSpec: with open(".gitignore", encoding="UTF-8") as f: - return pathspec.PathSpec.from_lines("gitwildmatch", f.readlines()) + return pathspec.GitIgnoreSpec.from_lines(f) def spec_matches_path(spec: pathspec.PathSpec, path: Path) -> bool: From 90d926de48f333bd761d628e55c0fe622797d6c6 Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Mon, 7 Apr 2025 16:00:39 +0400 Subject: [PATCH 200/388] Improve `dateparser` (#13796) --- .../dateparser/dateparser/calendars/hijri.pyi | 5 +- .../dateparser/calendars/hijri_parser.pyi | 15 ++-- .../dateparser/calendars/jalali.pyi | 4 +- .../dateparser/calendars/jalali_parser.pyi | 2 +- stubs/dateparser/dateparser/conf.pyi | 11 ++- .../dateparser/data/languages_info.pyi | 8 +- stubs/dateparser/dateparser/date.pyi | 36 ++++----- stubs/dateparser/dateparser/date_parser.pyi | 15 +++- .../dateparser/freshness_date_parser.pyi | 18 +++-- .../dateparser/languages/__init__.pyi | 2 + .../dateparser/languages/dictionary.pyi | 28 ++++--- .../dateparser/languages/loader.pyi | 7 +- .../dateparser/languages/locale.pyi | 3 +- .../dateparser/languages/validation.pyi | 11 +-- stubs/dateparser/dateparser/parser.pyi | 81 +++++++++++-------- .../dateparser/dateparser/timezone_parser.pyi | 29 +++---- stubs/dateparser/dateparser/timezones.pyi | 4 +- .../dateparser/dateparser/utils/__init__.pyi | 3 +- .../dateparser/dateparser/utils/strptime.pyi | 7 +- stubs/dateparser/dateparser_data/settings.pyi | 6 +- 20 files changed, 166 insertions(+), 129 deletions(-) diff --git a/stubs/dateparser/dateparser/calendars/hijri.pyi b/stubs/dateparser/dateparser/calendars/hijri.pyi index ef8381576622..3a134a5b0b69 100644 --- a/stubs/dateparser/dateparser/calendars/hijri.pyi +++ b/stubs/dateparser/dateparser/calendars/hijri.pyi @@ -1,6 +1,5 @@ -from typing import Any - from dateparser.calendars import CalendarBase +from dateparser.calendars.hijri_parser import hijri_parser class HijriCalendar(CalendarBase): - parser: Any + parser: type[hijri_parser] diff --git a/stubs/dateparser/dateparser/calendars/hijri_parser.pyi b/stubs/dateparser/dateparser/calendars/hijri_parser.pyi index bc4cc6cc811f..5101ef9fc8b6 100644 --- a/stubs/dateparser/dateparser/calendars/hijri_parser.pyi +++ b/stubs/dateparser/dateparser/calendars/hijri_parser.pyi @@ -1,15 +1,16 @@ -from _typeshed import Incomplete -from typing import Any +from typing import Any, SupportsIndex from dateparser.calendars import non_gregorian_parser class hijri: @classmethod - def to_gregorian(cls, year: Incomplete | None = ..., month: Incomplete | None = ..., day: Incomplete | None = ...): ... + def to_gregorian(cls, year: int | None = None, month: int | None = None, day: int | None = None) -> tuple[int, int, int]: ... @classmethod - def from_gregorian(cls, year: Incomplete | None = ..., month: Incomplete | None = ..., day: Incomplete | None = ...): ... + def from_gregorian( + cls, year: SupportsIndex | None = None, month: SupportsIndex | None = None, day: SupportsIndex | None = None + ) -> tuple[int, int, int]: ... @classmethod - def month_length(cls, year, month): ... + def month_length(cls, year: int, month: int) -> int: ... class HijriDate: year: Any @@ -19,9 +20,9 @@ class HijriDate: def weekday(self): ... class hijri_parser(non_gregorian_parser): - calendar_converter: Any + calendar_converter: type[hijri] default_year: int default_month: int default_day: int - non_gregorian_date_cls: Any + non_gregorian_date_cls: type[HijriDate] def handle_two_digit_year(self, year: int) -> int: ... diff --git a/stubs/dateparser/dateparser/calendars/jalali.pyi b/stubs/dateparser/dateparser/calendars/jalali.pyi index 355120027b94..8df2dce7228a 100644 --- a/stubs/dateparser/dateparser/calendars/jalali.pyi +++ b/stubs/dateparser/dateparser/calendars/jalali.pyi @@ -1,6 +1,6 @@ -from typing import Any +from dateparser.calendars.jalali_parser import jalali_parser from . import CalendarBase class JalaliCalendar(CalendarBase): - parser: Any + parser: type[jalali_parser] diff --git a/stubs/dateparser/dateparser/calendars/jalali_parser.pyi b/stubs/dateparser/dateparser/calendars/jalali_parser.pyi index 0329c1c9c3cf..4b7d2a0df27d 100644 --- a/stubs/dateparser/dateparser/calendars/jalali_parser.pyi +++ b/stubs/dateparser/dateparser/calendars/jalali_parser.pyi @@ -14,5 +14,5 @@ class jalali_parser(non_gregorian_parser): default_year: int default_month: int default_day: int - non_gregorian_date_cls: Any + non_gregorian_date_cls: type[PersianDate] def handle_two_digit_year(self, year: int) -> int: ... diff --git a/stubs/dateparser/dateparser/conf.pyi b/stubs/dateparser/dateparser/conf.pyi index 096afd732f03..7c1f85361186 100644 --- a/stubs/dateparser/dateparser/conf.pyi +++ b/stubs/dateparser/dateparser/conf.pyi @@ -1,18 +1,17 @@ -from _typeshed import Incomplete from typing import Any from typing_extensions import Self class Settings: def __new__(cls, *args, **kw) -> Self: ... - def __init__(self, settings: Incomplete | None = None) -> None: ... + def __init__(self, settings: dict[str, Any] | None = None) -> None: ... @classmethod - def get_key(cls, settings: Incomplete | None = None): ... - def replace(self, mod_settings: Incomplete | None = None, **kwds): ... + def get_key(cls, settings: dict[str, Any] | None = None) -> str: ... + def replace(self, mod_settings: dict[str, Any] | None = None, **kwds) -> Self: ... -settings: Any +settings: Settings def apply_settings(f): ... class SettingValidationError(ValueError): ... -def check_settings(settings) -> None: ... +def check_settings(settings: Settings) -> None: ... diff --git a/stubs/dateparser/dateparser/data/languages_info.pyi b/stubs/dateparser/dateparser/data/languages_info.pyi index 21a8e508d13f..92038f5150de 100644 --- a/stubs/dateparser/dateparser/data/languages_info.pyi +++ b/stubs/dateparser/dateparser/data/languages_info.pyi @@ -1,3 +1,5 @@ -language_order: list[str] -language_locale_dict: dict[str, str] -language_map: dict[str, list[str]] +from typing import Final + +language_order: Final[list[str]] +language_locale_dict: Final[dict[str, str]] +language_map: Final[dict[str, list[str]]] diff --git a/stubs/dateparser/dateparser/date.pyi b/stubs/dateparser/dateparser/date.pyi index f659361ea1f4..c26e35566218 100644 --- a/stubs/dateparser/dateparser/date.pyi +++ b/stubs/dateparser/dateparser/date.pyi @@ -1,8 +1,8 @@ import collections from collections.abc import Callable, Iterable, Iterator -from datetime import datetime +from datetime import datetime, tzinfo from re import Pattern -from typing import ClassVar, Literal, overload +from typing import ClassVar, Final, Literal, overload from typing_extensions import TypeAlias from dateparser import _Settings @@ -13,23 +13,23 @@ from dateparser.languages.locale import Locale _DetectLanguagesFunction: TypeAlias = Callable[[str, float], list[str]] _Period: TypeAlias = Literal["time", "day", "week", "month", "year"] -APOSTROPHE_LOOK_ALIKE_CHARS: list[str] -RE_NBSP: Pattern[str] -RE_SPACES: Pattern[str] -RE_TRIM_SPACES: Pattern[str] -RE_TRIM_COLONS: Pattern[str] -RE_SANITIZE_SKIP: Pattern[str] -RE_SANITIZE_RUSSIAN: Pattern[str] -RE_SANITIZE_PERIOD: Pattern[str] -RE_SANITIZE_ON: Pattern[str] -RE_SANITIZE_APOSTROPHE: Pattern[str] -RE_SEARCH_TIMESTAMP: Pattern[str] -RE_SANITIZE_CROATIAN: Pattern[str] -RE_SEARCH_NEGATIVE_TIMESTAMP: Pattern[str] +APOSTROPHE_LOOK_ALIKE_CHARS: Final[list[str]] +RE_NBSP: Final[Pattern[str]] +RE_SPACES: Final[Pattern[str]] +RE_TRIM_SPACES: Final[Pattern[str]] +RE_TRIM_COLONS: Final[Pattern[str]] +RE_SANITIZE_SKIP: Final[Pattern[str]] +RE_SANITIZE_RUSSIAN: Final[Pattern[str]] +RE_SANITIZE_PERIOD: Final[Pattern[str]] +RE_SANITIZE_ON: Final[Pattern[str]] +RE_SANITIZE_APOSTROPHE: Final[Pattern[str]] +RE_SEARCH_TIMESTAMP: Final[Pattern[str]] +RE_SANITIZE_CROATIAN: Final[Pattern[str]] +RE_SEARCH_NEGATIVE_TIMESTAMP: Final[Pattern[str]] def sanitize_spaces(date_string: str) -> str: ... -def date_range(begin, end, **kwargs) -> None: ... -def get_intersecting_periods(low, high, period: str = "day") -> None: ... +def date_range(begin: datetime, end: datetime, **kwargs) -> None: ... +def get_intersecting_periods(low: datetime, high: datetime, period: str = "day") -> None: ... def sanitize_date(date_string: str) -> str: ... def get_date_from_timestamp(date_string: str, settings: Settings, negative: bool = False) -> datetime | None: ... def parse_with_formats(date_string: str, date_formats: Iterable[str], settings: Settings) -> DateData: ... @@ -58,7 +58,7 @@ class _DateLocaleParser: def _try_freshness_parser(self) -> DateData | None: ... def _try_absolute_parser(self) -> DateData | None: ... def _try_nospaces_parser(self) -> DateData | None: ... - def _try_parser(self, parse_method) -> DateData | None: ... + def _try_parser(self, parse_method: Callable[[str, Settings, tzinfo | None], tuple[datetime, str]]) -> DateData | None: ... def _try_given_formats(self) -> DateData | None: ... def _get_translated_date(self) -> str: ... def _get_translated_date_with_formatting(self) -> str: ... diff --git a/stubs/dateparser/dateparser/date_parser.pyi b/stubs/dateparser/dateparser/date_parser.pyi index ed36456214e2..73a70cc9b735 100644 --- a/stubs/dateparser/dateparser/date_parser.pyi +++ b/stubs/dateparser/dateparser/date_parser.pyi @@ -1,7 +1,14 @@ -from _typeshed import Incomplete -from typing import Any +from collections.abc import Callable +from datetime import datetime, tzinfo + +from dateparser.conf import Settings class DateParser: - def parse(self, date_string, parse_method, settings: Incomplete | None = None): ... + def parse( + self, + date_string: str, + parse_method: Callable[[str, Settings, tzinfo | None], tuple[datetime, str]], + settings: Settings | None = None, + ) -> tuple[datetime, str]: ... -date_parser: Any +date_parser: DateParser diff --git a/stubs/dateparser/dateparser/freshness_date_parser.pyi b/stubs/dateparser/dateparser/freshness_date_parser.pyi index 54784064830d..93df25f3bafe 100644 --- a/stubs/dateparser/dateparser/freshness_date_parser.pyi +++ b/stubs/dateparser/dateparser/freshness_date_parser.pyi @@ -1,12 +1,16 @@ +import re from _typeshed import Incomplete -from typing import Any +from typing import Final +from zoneinfo import ZoneInfo -PATTERN: Any +from dateparser.date import DateData + +PATTERN: Final[re.Pattern[str]] class FreshnessDateDataParser: - def get_local_tz(self): ... - def parse(self, date_string, settings): ... - def get_kwargs(self, date_string): ... - def get_date_data(self, date_string, settings: Incomplete | None = None): ... + def get_local_tz(self) -> ZoneInfo: ... + def parse(self, date_string: str, settings) -> tuple[Incomplete | None, str | None]: ... + def get_kwargs(self, date_string: str) -> dict[str, float]: ... + def get_date_data(self, date_string: str, settings: Incomplete | None = None) -> DateData: ... -freshness_date_parser: Any +freshness_date_parser: FreshnessDateDataParser diff --git a/stubs/dateparser/dateparser/languages/__init__.pyi b/stubs/dateparser/dateparser/languages/__init__.pyi index e69de29bb2d1..2f14a878d997 100644 --- a/stubs/dateparser/dateparser/languages/__init__.pyi +++ b/stubs/dateparser/dateparser/languages/__init__.pyi @@ -0,0 +1,2 @@ +from .loader import default_loader as default_loader +from .locale import Locale as Locale diff --git a/stubs/dateparser/dateparser/languages/dictionary.pyi b/stubs/dateparser/dateparser/languages/dictionary.pyi index 6935464302da..74211e657a23 100644 --- a/stubs/dateparser/dateparser/languages/dictionary.pyi +++ b/stubs/dateparser/dateparser/languages/dictionary.pyi @@ -1,24 +1,28 @@ +import re from _typeshed import Incomplete -from typing import Any +from typing import Any, Final, overload -PARSER_HARDCODED_TOKENS: Any -PARSER_KNOWN_TOKENS: Any -ALWAYS_KEEP_TOKENS: list[str] -KNOWN_WORD_TOKENS: Any -PARENTHESES_PATTERN: Any -NUMERAL_PATTERN: Any -KEEP_TOKEN_PATTERN: Any +PARSER_HARDCODED_TOKENS: Final[list[str]] +PARSER_KNOWN_TOKENS: Final[list[str]] +ALWAYS_KEEP_TOKENS: Final[list[str]] +KNOWN_WORD_TOKENS: Final[list[str]] +PARENTHESES_PATTERN: Final[re.Pattern[str]] +NUMERAL_PATTERN: Final[re.Pattern[str]] +KEEP_TOKEN_PATTERN: Final[re.Pattern[str]] class UnknownTokenError(Exception): ... class Dictionary: info: Any - def __init__(self, locale_info, settings: Incomplete | None = None) -> None: ... + def __init__(self, locale_info: dict[str, Incomplete], settings: Incomplete | None = None) -> None: ... def __contains__(self, key): ... def __getitem__(self, key): ... def __iter__(self) -> Any: ... - def are_tokens_valid(self, tokens): ... - def split(self, string, keep_formatting: bool = False): ... + def are_tokens_valid(self, tokens: list[str]) -> bool: ... + @overload + def split(self, string: None, keep_formatting: bool = False) -> None: ... + @overload + def split(self, string: str, keep_formatting: bool = False) -> list[str]: ... class NormalizedDictionary(Dictionary): - def __init__(self, locale_info, settings: Incomplete | None = None) -> None: ... + def __init__(self, locale_info: dict[str, Incomplete], settings: Incomplete | None = None) -> None: ... diff --git a/stubs/dateparser/dateparser/languages/loader.pyi b/stubs/dateparser/dateparser/languages/loader.pyi index 36d08c63f207..8dc3a09fb94d 100644 --- a/stubs/dateparser/dateparser/languages/loader.pyi +++ b/stubs/dateparser/dateparser/languages/loader.pyi @@ -1,10 +1,11 @@ +import re from collections import OrderedDict from collections.abc import Iterator -from typing import Any +from typing import Any, Final from .locale import Locale -LOCALE_SPLIT_PATTERN: Any +LOCALE_SPLIT_PATTERN: Final[re.Pattern[str]] class LocaleDataLoader: def get_locale_map( @@ -25,4 +26,4 @@ class LocaleDataLoader: ) -> Iterator[Locale]: ... def get_locale(self, shortname: str) -> Locale: ... -default_loader: Any +default_loader: LocaleDataLoader diff --git a/stubs/dateparser/dateparser/languages/locale.pyi b/stubs/dateparser/dateparser/languages/locale.pyi index 5ac12c35badc..edbc1a403085 100644 --- a/stubs/dateparser/dateparser/languages/locale.pyi +++ b/stubs/dateparser/dateparser/languages/locale.pyi @@ -1,8 +1,9 @@ from re import Pattern +from typing import Final from dateparser.conf import Settings -NUMERAL_PATTERN: Pattern[str] +NUMERAL_PATTERN: Final[Pattern[str]] class Locale: shortname: str diff --git a/stubs/dateparser/dateparser/languages/validation.pyi b/stubs/dateparser/dateparser/languages/validation.pyi index 790faa6af36a..d14701860ae8 100644 --- a/stubs/dateparser/dateparser/languages/validation.pyi +++ b/stubs/dateparser/dateparser/languages/validation.pyi @@ -1,9 +1,10 @@ -from typing import Any +from _typeshed import Incomplete +from logging import Logger class LanguageValidator: - logger: Any - VALID_KEYS: Any + logger: Logger | None + VALID_KEYS: list[str] @classmethod - def get_logger(cls): ... + def get_logger(cls) -> Logger: ... @classmethod - def validate_info(cls, language_id, info): ... + def validate_info(cls, language_id, info: dict[str, Incomplete]) -> bool: ... diff --git a/stubs/dateparser/dateparser/parser.pyi b/stubs/dateparser/dateparser/parser.pyi index e95a043cb404..09b0ce79ab77 100644 --- a/stubs/dateparser/dateparser/parser.pyi +++ b/stubs/dateparser/dateparser/parser.pyi @@ -1,53 +1,64 @@ +import collections import datetime +import re from _typeshed import Incomplete -from typing import Any +from collections.abc import Callable, Generator +from io import StringIO +from typing import Any, Final, Literal, overload -NSP_COMPATIBLE: Any -MERIDIAN: Any -MICROSECOND: Any -EIGHT_DIGIT: Any -HOUR_MINUTE_REGEX: Any +from dateparser.conf import Settings -def no_space_parser_eligibile(datestring): ... -def get_unresolved_attrs(parser_object): ... +NSP_COMPATIBLE: Final[re.Pattern[str]] +MERIDIAN: Final[re.Pattern[str]] +MICROSECOND: Final[re.Pattern[str]] +EIGHT_DIGIT: Final[re.Pattern[str]] +HOUR_MINUTE_REGEX: Final[re.Pattern[str]] -date_order_chart: Any +def no_space_parser_eligibile(datestring: str) -> bool: ... +def get_unresolved_attrs( + parser_object: object, +) -> tuple[list[Literal["year", "month", "day"]], list[Literal["year", "month", "day"]]]: ... -def resolve_date_order(order, lst: Incomplete | None = None): ... +date_order_chart: Final[dict[str, str]] + +@overload +def resolve_date_order(order: str, lst: Literal[True]) -> list[str]: ... +@overload +def resolve_date_order(order: str, lst: Literal[False] | None = None) -> str: ... class _time_parser: - time_directives: Any - def __call__(self, timestring): ... + time_directives: list[str] + def __call__(self, timestring: str) -> datetime.time: ... -time_parser: Any +time_parser: _time_parser class _no_spaces_parser: - period: Any - date_formats: Any + period: dict[str, list[str]] + date_formats: dict[str, list[str]] def __init__(self, *args, **kwargs): ... @classmethod - def parse(cls, datestring, settings): ... + def parse(cls, datestring: str, settings: Settings) -> tuple[datetime.datetime, str]: ... class _parser: - alpha_directives: Any - num_directives: Any - settings: Any - tokens: Any - filtered_tokens: Any - unset_tokens: Any - day: Any - month: Any - year: Any - time: Any - auto_order: Any - ordered_num_directives: Any - def __init__(self, tokens, settings): ... + alpha_directives: collections.OrderedDict[str, list[str]] + num_directives: dict[str, list[str]] + settings: Settings + tokens: list[tuple[Incomplete, Incomplete]] + filtered_tokens: list[tuple[Incomplete, Incomplete, int]] + unset_tokens: list[Incomplete] + day: int | None + month: int | None + year: int | None + time: Callable[[], datetime.time] | None + auto_order: list[str] + ordered_num_directives: collections.OrderedDict[str, list[str]] + def __init__(self, tokens, settings: Settings): ... @classmethod - def parse(cls, datestring, settings, tz: datetime.tzinfo | None = None): ... + def parse(cls, datestring: str, settings: Settings, tz: datetime.tzinfo | None = None): ... class tokenizer: - digits: str - letters: str - instream: Any - def __init__(self, ds) -> None: ... - def tokenize(self) -> None: ... + digits: Literal["0123456789:"] + letters: Literal["abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"] + instream: StringIO + def __init__(self, ds: str) -> None: ... + def tokenize(self) -> Generator[tuple[str, Literal[0, 1, 2]], Any, None]: ... diff --git a/stubs/dateparser/dateparser/timezone_parser.pyi b/stubs/dateparser/dateparser/timezone_parser.pyi index 51ed8deddd3a..0b0558d169b3 100644 --- a/stubs/dateparser/dateparser/timezone_parser.pyi +++ b/stubs/dateparser/dateparser/timezone_parser.pyi @@ -1,18 +1,19 @@ -from datetime import tzinfo -from typing import Any +import re +from collections.abc import Generator +from datetime import datetime, timedelta, tzinfo class StaticTzInfo(tzinfo): - def __init__(self, name, offset) -> None: ... - def tzname(self, dt): ... - def utcoffset(self, dt): ... - def dst(self, dt): ... - def localize(self, dt, is_dst: bool = False): ... - def __getinitargs__(self): ... + def __init__(self, name: str, offset: timedelta) -> None: ... + def tzname(self, dt) -> str: ... + def utcoffset(self, dt) -> timedelta: ... + def dst(self, dt) -> timedelta: ... + def localize(self, dt: datetime, is_dst: bool = False) -> datetime: ... + def __getinitargs__(self) -> tuple[str, timedelta]: ... -def pop_tz_offset_from_string(date_string, as_offset: bool = True): ... -def word_is_tz(word): ... -def convert_to_local_tz(datetime_obj, datetime_tz_offset): ... -def build_tz_offsets(search_regex_parts): ... -def get_local_tz_offset(): ... +def pop_tz_offset_from_string(date_string: str, as_offset: bool = True) -> tuple[str, StaticTzInfo | str | None]: ... +def word_is_tz(word: str) -> bool: ... +def convert_to_local_tz(datetime_obj: datetime, datetime_tz_offset: timedelta) -> datetime: ... +def build_tz_offsets(search_regex_parts: list[str]) -> Generator[tuple[str, dict[str, re.Pattern[str] | timedelta]]]: ... +def get_local_tz_offset() -> timedelta: ... -local_tz_offset: Any +local_tz_offset: timedelta diff --git a/stubs/dateparser/dateparser/timezones.pyi b/stubs/dateparser/dateparser/timezones.pyi index aeb47a82712f..c767c52dd861 100644 --- a/stubs/dateparser/dateparser/timezones.pyi +++ b/stubs/dateparser/dateparser/timezones.pyi @@ -1 +1,3 @@ -timezone_info_list: list[dict[str, list[str | tuple[str, ...]]]] +from typing import Final + +timezone_info_list: Final[list[dict[str, list[str | tuple[str, str | int]]]]] diff --git a/stubs/dateparser/dateparser/utils/__init__.pyi b/stubs/dateparser/dateparser/utils/__init__.pyi index 0bafcab4d633..573e5f1d1957 100644 --- a/stubs/dateparser/dateparser/utils/__init__.pyi +++ b/stubs/dateparser/dateparser/utils/__init__.pyi @@ -1,6 +1,7 @@ from _typeshed import Incomplete from collections import OrderedDict from collections.abc import Mapping +from logging import Logger from typing import Any def strip_braces(date_string: str) -> str: ... @@ -20,7 +21,7 @@ def get_next_leap_year(year): ... def set_correct_day_from_settings(date_obj, settings, current_day: Incomplete | None = None): ... def set_correct_month_from_settings(date_obj, settings, current_month=None): ... def registry(cls): ... -def get_logger() -> Any: ... +def get_logger() -> Logger: ... def setup_logging() -> None: ... # TODO: this needs `types-pytz` and a type-alias diff --git a/stubs/dateparser/dateparser/utils/strptime.pyi b/stubs/dateparser/dateparser/utils/strptime.pyi index 5fceb4c1a3fe..f2171fa58873 100644 --- a/stubs/dateparser/dateparser/utils/strptime.pyi +++ b/stubs/dateparser/dateparser/utils/strptime.pyi @@ -1,8 +1,9 @@ +import re from datetime import datetime -from typing import Any +from typing import Any, Final -TIME_MATCHER: Any -MS_SEARCHER: Any +TIME_MATCHER: Final[re.Pattern[str]] +MS_SEARCHER: Final[re.Pattern[str]] def patch_strptime() -> Any: ... def strptime(date_string, format) -> datetime: ... diff --git a/stubs/dateparser/dateparser_data/settings.pyi b/stubs/dateparser/dateparser_data/settings.pyi index 2de84e3de707..bc4d69704e4a 100644 --- a/stubs/dateparser/dateparser_data/settings.pyi +++ b/stubs/dateparser/dateparser_data/settings.pyi @@ -1,4 +1,4 @@ -from typing import Any +from typing import Final -default_parsers: Any -settings: Any +default_parsers: Final[list[str]] +settings: Final[dict[str, str | bool | list[str] | float | int]] From 2b4b606acc4db21d501626b4e31ca855237e59dc Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Mon, 7 Apr 2025 16:01:30 +0400 Subject: [PATCH 201/388] Improve `oauthlib` (#13794) --- .../oauthlib/oauth1/rfc5849/__init__.pyi | 44 +++++++++-------- .../oauth1/rfc5849/endpoints/access_token.pyi | 4 +- .../rfc5849/endpoints/request_token.pyi | 4 +- .../oauth1/rfc5849/endpoints/resource.pyi | 4 +- .../rfc5849/endpoints/signature_only.pyi | 4 +- .../oauthlib/oauth1/rfc5849/signature.pyi | 49 +++++++++++-------- .../oauthlib/oauth1/rfc5849/utils.pyi | 25 +++++++--- .../connect/core/endpoints/userinfo.pyi | 3 +- .../core/grant_types/authorization_code.pyi | 3 +- .../openid/connect/core/grant_types/base.pyi | 4 +- .../connect/core/grant_types/dispatchers.pyi | 3 +- .../connect/core/grant_types/hybrid.pyi | 3 +- .../connect/core/grant_types/implicit.pyi | 3 +- .../core/grant_types/refresh_token.pyi | 3 +- .../openid/connect/core/request_validator.pyi | 4 +- 15 files changed, 92 insertions(+), 68 deletions(-) diff --git a/stubs/oauthlib/oauthlib/oauth1/rfc5849/__init__.pyi b/stubs/oauthlib/oauthlib/oauth1/rfc5849/__init__.pyi index 6058e9d7f1a3..a98b62b667fb 100644 --- a/stubs/oauthlib/oauthlib/oauth1/rfc5849/__init__.pyi +++ b/stubs/oauthlib/oauthlib/oauth1/rfc5849/__init__.pyi @@ -1,24 +1,26 @@ from _typeshed import Incomplete -from typing import Any +from collections.abc import Callable +from logging import Logger +from typing import Any, Final -log: Any -SIGNATURE_HMAC_SHA1: str -SIGNATURE_HMAC_SHA256: str -SIGNATURE_HMAC_SHA512: str -SIGNATURE_HMAC: str -SIGNATURE_RSA_SHA1: str -SIGNATURE_RSA_SHA256: str -SIGNATURE_RSA_SHA512: str -SIGNATURE_RSA: str -SIGNATURE_PLAINTEXT: str -SIGNATURE_METHODS: Any -SIGNATURE_TYPE_AUTH_HEADER: str -SIGNATURE_TYPE_QUERY: str -SIGNATURE_TYPE_BODY: str -CONTENT_TYPE_FORM_URLENCODED: str +log: Logger +SIGNATURE_HMAC_SHA1: Final[str] +SIGNATURE_HMAC_SHA256: Final[str] +SIGNATURE_HMAC_SHA512: Final[str] +SIGNATURE_HMAC: Final[str] +SIGNATURE_RSA_SHA1: Final[str] +SIGNATURE_RSA_SHA256: Final[str] +SIGNATURE_RSA_SHA512: Final[str] +SIGNATURE_RSA: Final[str] +SIGNATURE_PLAINTEXT: Final[str] +SIGNATURE_METHODS: Final[tuple[str, str, str, str, str, str, str]] +SIGNATURE_TYPE_AUTH_HEADER: Final[str] +SIGNATURE_TYPE_QUERY: Final[str] +SIGNATURE_TYPE_BODY: Final[str] +CONTENT_TYPE_FORM_URLENCODED: Final[str] class Client: - SIGNATURE_METHODS: Any + SIGNATURE_METHODS: dict[str, Callable[[str, Incomplete], str]] @classmethod def register_signature_method(cls, method_name, method_callback) -> None: ... client_key: Any @@ -37,8 +39,8 @@ class Client: timestamp: Any def __init__( self, - client_key, - client_secret: Incomplete | None = None, + client_key: str, + client_secret: str | None = None, resource_owner_key: Incomplete | None = None, resource_owner_secret: Incomplete | None = None, callback_uri: Incomplete | None = None, @@ -58,7 +60,7 @@ class Client: self, uri, http_method: str = "GET", - body: Incomplete | None = None, - headers: Incomplete | None = None, + body: str | None = None, + headers: dict[str, str] | None = None, realm: Incomplete | None = None, ): ... diff --git a/stubs/oauthlib/oauthlib/oauth1/rfc5849/endpoints/access_token.pyi b/stubs/oauthlib/oauthlib/oauth1/rfc5849/endpoints/access_token.pyi index b924cc7cc7f0..a9f541a2b282 100644 --- a/stubs/oauthlib/oauthlib/oauth1/rfc5849/endpoints/access_token.pyi +++ b/stubs/oauthlib/oauthlib/oauth1/rfc5849/endpoints/access_token.pyi @@ -1,9 +1,9 @@ from _typeshed import Incomplete -from typing import Any +from logging import Logger from .base import BaseEndpoint as BaseEndpoint -log: Any +log: Logger class AccessTokenEndpoint(BaseEndpoint): def create_access_token(self, request, credentials): ... diff --git a/stubs/oauthlib/oauthlib/oauth1/rfc5849/endpoints/request_token.pyi b/stubs/oauthlib/oauthlib/oauth1/rfc5849/endpoints/request_token.pyi index 940486b28a02..dcdd497ec614 100644 --- a/stubs/oauthlib/oauthlib/oauth1/rfc5849/endpoints/request_token.pyi +++ b/stubs/oauthlib/oauthlib/oauth1/rfc5849/endpoints/request_token.pyi @@ -1,9 +1,9 @@ from _typeshed import Incomplete -from typing import Any +from logging import Logger from .base import BaseEndpoint as BaseEndpoint -log: Any +log: Logger class RequestTokenEndpoint(BaseEndpoint): def create_request_token(self, request, credentials): ... diff --git a/stubs/oauthlib/oauthlib/oauth1/rfc5849/endpoints/resource.pyi b/stubs/oauthlib/oauthlib/oauth1/rfc5849/endpoints/resource.pyi index a7219c670789..c59cab3e82f8 100644 --- a/stubs/oauthlib/oauthlib/oauth1/rfc5849/endpoints/resource.pyi +++ b/stubs/oauthlib/oauthlib/oauth1/rfc5849/endpoints/resource.pyi @@ -1,9 +1,9 @@ from _typeshed import Incomplete -from typing import Any +from logging import Logger from .base import BaseEndpoint as BaseEndpoint -log: Any +log: Logger class ResourceEndpoint(BaseEndpoint): def validate_protected_resource_request( diff --git a/stubs/oauthlib/oauthlib/oauth1/rfc5849/endpoints/signature_only.pyi b/stubs/oauthlib/oauthlib/oauth1/rfc5849/endpoints/signature_only.pyi index 0b0994158455..38ac0d31be9d 100644 --- a/stubs/oauthlib/oauthlib/oauth1/rfc5849/endpoints/signature_only.pyi +++ b/stubs/oauthlib/oauthlib/oauth1/rfc5849/endpoints/signature_only.pyi @@ -1,9 +1,9 @@ from _typeshed import Incomplete -from typing import Any +from logging import Logger from .base import BaseEndpoint as BaseEndpoint -log: Any +log: Logger class SignatureOnlyEndpoint(BaseEndpoint): def validate_request( diff --git a/stubs/oauthlib/oauthlib/oauth1/rfc5849/signature.pyi b/stubs/oauthlib/oauthlib/oauth1/rfc5849/signature.pyi index e37b9be48439..9da09f5d07e3 100644 --- a/stubs/oauthlib/oauthlib/oauth1/rfc5849/signature.pyi +++ b/stubs/oauthlib/oauthlib/oauth1/rfc5849/signature.pyi @@ -1,33 +1,40 @@ -from _typeshed import Incomplete -from typing import Any +from _typeshed import Incomplete, Unused +from collections.abc import Iterable +from logging import Logger -log: Any +from oauthlib.common import Request, _HTTPMethod -def signature_base_string(http_method: str, base_str_uri: str, normalized_encoded_request_parameters: str) -> str: ... +log: Logger + +def signature_base_string(http_method: _HTTPMethod, base_str_uri: str, normalized_encoded_request_parameters: str) -> str: ... def base_string_uri(uri: str, host: str | None = None) -> str: ... def collect_parameters( uri_query: str = "", - body: Incomplete | None = None, - headers: Incomplete | None = None, + body: str | bytes | dict[str, str] | Iterable[tuple[str, str]] | None = None, + headers: dict[str, str] | None = None, exclude_oauth_signature: bool = True, with_realm: bool = False, -): ... -def normalize_parameters(params) -> str: ... -def sign_hmac_sha1_with_client(sig_base_str, client): ... -def verify_hmac_sha1(request, client_secret: Incomplete | None = None, resource_owner_secret: Incomplete | None = None): ... -def sign_hmac_sha1(base_string, client_secret, resource_owner_secret): ... +) -> list[tuple[str, str]]: ... +def normalize_parameters(params: dict[str, str]) -> str: ... +def sign_hmac_sha1_with_client(sig_base_str: str, client): ... +def verify_hmac_sha1( + request: Request, client_secret: Incomplete | None = None, resource_owner_secret: Incomplete | None = None +) -> bool: ... +def sign_hmac_sha1(base_string: str | bytes, client_secret, resource_owner_secret): ... def sign_hmac_sha256_with_client(sig_base_str, client): ... -def verify_hmac_sha256(request, client_secret: Incomplete | None = None, resource_owner_secret: Incomplete | None = None): ... -def sign_hmac_sha256(base_string, client_secret, resource_owner_secret): ... +def verify_hmac_sha256( + request, client_secret: Incomplete | None = None, resource_owner_secret: Incomplete | None = None +) -> bool: ... +def sign_hmac_sha256(base_string: str | bytes, client_secret, resource_owner_secret): ... def sign_hmac_sha512_with_client(sig_base_str: str, client): ... -def verify_hmac_sha512(request, client_secret: str | None = None, resource_owner_secret: str | None = None): ... -def sign_rsa_sha1_with_client(sig_base_str, client): ... -def verify_rsa_sha1(request, rsa_public_key: str): ... +def verify_hmac_sha512(request, client_secret: str | None = None, resource_owner_secret: str | None = None) -> bool: ... +def sign_rsa_sha1_with_client(sig_base_str: str | bytes, client): ... +def verify_rsa_sha1(request, rsa_public_key: str) -> bool: ... def sign_rsa_sha1(base_string, rsa_private_key): ... def sign_rsa_sha256_with_client(sig_base_str: str, client): ... -def verify_rsa_sha256(request, rsa_public_key: str): ... +def verify_rsa_sha256(request, rsa_public_key: str) -> bool: ... def sign_rsa_sha512_with_client(sig_base_str: str, client): ... -def verify_rsa_sha512(request, rsa_public_key: str): ... -def sign_plaintext_with_client(_signature_base_string, client): ... -def sign_plaintext(client_secret, resource_owner_secret): ... -def verify_plaintext(request, client_secret: Incomplete | None = None, resource_owner_secret: Incomplete | None = None): ... +def verify_rsa_sha512(request, rsa_public_key: str) -> bool: ... +def sign_plaintext_with_client(_signature_base_string: Unused, client) -> str: ... +def sign_plaintext(client_secret: str | None, resource_owner_secret: str | None) -> str: ... +def verify_plaintext(request, client_secret: str | None = None, resource_owner_secret: str | None = None) -> bool: ... diff --git a/stubs/oauthlib/oauthlib/oauth1/rfc5849/utils.pyi b/stubs/oauthlib/oauthlib/oauth1/rfc5849/utils.pyi index cce8ec232e67..14f91fd1434c 100644 --- a/stubs/oauthlib/oauthlib/oauth1/rfc5849/utils.pyi +++ b/stubs/oauthlib/oauthlib/oauth1/rfc5849/utils.pyi @@ -1,9 +1,18 @@ -UNICODE_ASCII_CHARACTER_SET: str +from collections.abc import Callable, Iterable +from typing import Any, Final, TypeVar -def filter_params(target): ... -def filter_oauth_params(params): ... -def escape(u): ... -def unescape(u): ... -def parse_keqv_list(l): ... -def parse_http_list(u): ... -def parse_authorization_header(authorization_header): ... +_T = TypeVar("_T") + +UNICODE_ASCII_CHARACTER_SET: Final[str] + +def filter_params( + target: Callable[[dict[str, Any] | Iterable[tuple[str, Any]], _T], object], +) -> Callable[[list[str], _T], object]: ... +def filter_oauth_params( + params: dict[str, Any] | Iterable[tuple[str, Any]], +) -> list[str]: ... # we don't care about second (Any) part +def escape(u: str) -> str: ... +def unescape(u: str) -> str: ... +def parse_keqv_list(l: list[str]) -> dict[str, str]: ... +def parse_http_list(u: str) -> list[str]: ... +def parse_authorization_header(authorization_header: str) -> list[tuple[str, str]]: ... diff --git a/stubs/oauthlib/oauthlib/openid/connect/core/endpoints/userinfo.pyi b/stubs/oauthlib/oauthlib/openid/connect/core/endpoints/userinfo.pyi index 4e6239a084cb..1b22e896f817 100644 --- a/stubs/oauthlib/oauthlib/openid/connect/core/endpoints/userinfo.pyi +++ b/stubs/oauthlib/oauthlib/openid/connect/core/endpoints/userinfo.pyi @@ -1,9 +1,10 @@ from _typeshed import Incomplete +from logging import Logger from typing import Any from oauthlib.oauth2.rfc6749.endpoints.base import BaseEndpoint as BaseEndpoint -log: Any +log: Logger class UserInfoEndpoint(BaseEndpoint): bearer: Any diff --git a/stubs/oauthlib/oauthlib/openid/connect/core/grant_types/authorization_code.pyi b/stubs/oauthlib/oauthlib/openid/connect/core/grant_types/authorization_code.pyi index c977ff3928cb..a244bccd5eb4 100644 --- a/stubs/oauthlib/oauthlib/openid/connect/core/grant_types/authorization_code.pyi +++ b/stubs/oauthlib/oauthlib/openid/connect/core/grant_types/authorization_code.pyi @@ -1,9 +1,10 @@ from _typeshed import Incomplete +from logging import Logger from typing import Any from .base import GrantTypeBase as GrantTypeBase -log: Any +log: Logger class AuthorizationCodeGrant(GrantTypeBase): proxy_target: Any diff --git a/stubs/oauthlib/oauthlib/openid/connect/core/grant_types/base.pyi b/stubs/oauthlib/oauthlib/openid/connect/core/grant_types/base.pyi index be05e3f78c57..0e049a5bcb0a 100644 --- a/stubs/oauthlib/oauthlib/openid/connect/core/grant_types/base.pyi +++ b/stubs/oauthlib/oauthlib/openid/connect/core/grant_types/base.pyi @@ -1,7 +1,7 @@ from _typeshed import Incomplete -from typing import Any +from logging import Logger -log: Any +log: Logger class GrantTypeBase: def __getattr__(self, attr: str): ... diff --git a/stubs/oauthlib/oauthlib/openid/connect/core/grant_types/dispatchers.pyi b/stubs/oauthlib/oauthlib/openid/connect/core/grant_types/dispatchers.pyi index f33ed52817fb..bfc918c0b74b 100644 --- a/stubs/oauthlib/oauthlib/openid/connect/core/grant_types/dispatchers.pyi +++ b/stubs/oauthlib/oauthlib/openid/connect/core/grant_types/dispatchers.pyi @@ -1,7 +1,8 @@ from _typeshed import Incomplete +from logging import Logger from typing import Any -log: Any +log: Logger class Dispatcher: default_grant: Any diff --git a/stubs/oauthlib/oauthlib/openid/connect/core/grant_types/hybrid.pyi b/stubs/oauthlib/oauthlib/openid/connect/core/grant_types/hybrid.pyi index 042510bacdec..9c909a884835 100644 --- a/stubs/oauthlib/oauthlib/openid/connect/core/grant_types/hybrid.pyi +++ b/stubs/oauthlib/oauthlib/openid/connect/core/grant_types/hybrid.pyi @@ -1,4 +1,5 @@ from _typeshed import Incomplete +from logging import Logger from typing import Any from oauthlib.oauth2.rfc6749.errors import InvalidRequestError as InvalidRequestError @@ -6,7 +7,7 @@ from oauthlib.oauth2.rfc6749.errors import InvalidRequestError as InvalidRequest from ..request_validator import RequestValidator as RequestValidator from .base import GrantTypeBase as GrantTypeBase -log: Any +log: Logger class HybridGrant(GrantTypeBase): request_validator: Any diff --git a/stubs/oauthlib/oauthlib/openid/connect/core/grant_types/implicit.pyi b/stubs/oauthlib/oauthlib/openid/connect/core/grant_types/implicit.pyi index 136f25661891..138ab6e228ee 100644 --- a/stubs/oauthlib/oauthlib/openid/connect/core/grant_types/implicit.pyi +++ b/stubs/oauthlib/oauthlib/openid/connect/core/grant_types/implicit.pyi @@ -1,9 +1,10 @@ from _typeshed import Incomplete +from logging import Logger from typing import Any from .base import GrantTypeBase as GrantTypeBase -log: Any +log: Logger class ImplicitGrant(GrantTypeBase): proxy_target: Any diff --git a/stubs/oauthlib/oauthlib/openid/connect/core/grant_types/refresh_token.pyi b/stubs/oauthlib/oauthlib/openid/connect/core/grant_types/refresh_token.pyi index 810e4f8dbfc6..cf126c21c85e 100644 --- a/stubs/oauthlib/oauthlib/openid/connect/core/grant_types/refresh_token.pyi +++ b/stubs/oauthlib/oauthlib/openid/connect/core/grant_types/refresh_token.pyi @@ -1,8 +1,9 @@ from _typeshed import Incomplete +from logging import Logger from .base import GrantTypeBase -log: Incomplete +log: Logger class RefreshTokenGrant(GrantTypeBase): proxy_target: Incomplete diff --git a/stubs/oauthlib/oauthlib/openid/connect/core/request_validator.pyi b/stubs/oauthlib/oauthlib/openid/connect/core/request_validator.pyi index 6790cf455374..4e29f272031d 100644 --- a/stubs/oauthlib/oauthlib/openid/connect/core/request_validator.pyi +++ b/stubs/oauthlib/oauthlib/openid/connect/core/request_validator.pyi @@ -1,8 +1,8 @@ -from typing import Any +from logging import Logger from oauthlib.oauth2.rfc6749.request_validator import RequestValidator as OAuth2RequestValidator -log: Any +log: Logger class RequestValidator(OAuth2RequestValidator): def get_authorization_code_scopes(self, client_id, code, redirect_uri, request) -> None: ... From db1663f702987c83f5290426a32043645da24f66 Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Mon, 7 Apr 2025 16:02:09 +0400 Subject: [PATCH 202/388] Improve `oauthlib.oauth2.rfc6749` (#13793) --- .../rfc6749/clients/backend_application.pyi | 16 ++-- .../oauthlib/oauth2/rfc6749/clients/base.pyi | 2 +- .../rfc6749/clients/legacy_application.pyi | 40 +++++++++- .../rfc6749/clients/mobile_application.pyi | 19 ++--- .../rfc6749/clients/service_application.pyi | 62 +++++++++----- .../rfc6749/clients/web_application.pyi | 50 +++++++++--- .../rfc6749/endpoints/authorization.pyi | 22 ++--- .../oauth2/rfc6749/endpoints/base.pyi | 22 ++--- .../oauth2/rfc6749/endpoints/introspect.pyi | 11 ++- .../oauth2/rfc6749/endpoints/metadata.pyi | 8 +- .../rfc6749/endpoints/pre_configured.pyi | 10 +-- .../oauth2/rfc6749/endpoints/resource.pyi | 16 ++-- .../oauth2/rfc6749/endpoints/revocation.pyi | 11 ++- .../oauth2/rfc6749/endpoints/token.pyi | 20 ++--- .../grant_types/authorization_code.pyi | 30 ++++--- .../oauth2/rfc6749/grant_types/base.pyi | 80 ++++++++++++++----- .../grant_types/client_credentials.pyi | 13 +-- .../oauth2/rfc6749/grant_types/implicit.pyi | 22 +++-- .../rfc6749/grant_types/refresh_token.pyi | 28 +++++-- .../resource_owner_password_credentials.pyi | 13 +-- .../oauthlib/oauth2/rfc6749/parameters.pyi | 2 + 21 files changed, 334 insertions(+), 163 deletions(-) diff --git a/stubs/oauthlib/oauthlib/oauth2/rfc6749/clients/backend_application.pyi b/stubs/oauthlib/oauthlib/oauth2/rfc6749/clients/backend_application.pyi index 8bc1bd5b5a36..7f8ff4894942 100644 --- a/stubs/oauthlib/oauthlib/oauth2/rfc6749/clients/backend_application.pyi +++ b/stubs/oauthlib/oauthlib/oauth2/rfc6749/clients/backend_application.pyi @@ -1,9 +1,15 @@ -from _typeshed import Incomplete - -from .base import Client as Client +from .base import Client class BackendApplicationClient(Client): grant_type: str def prepare_request_body( - self, body: str = "", scope: Incomplete | None = None, include_client_id: bool = False, **kwargs - ): ... + self, + body: str = "", + scope: str | set[object] | tuple[object] | list[object] | None = None, + include_client_id: bool = False, + *, + code_verifier: str | None = None, + client_id: str | None = None, + client_secret: str | None = None, + **kwargs, + ) -> str: ... diff --git a/stubs/oauthlib/oauthlib/oauth2/rfc6749/clients/base.pyi b/stubs/oauthlib/oauthlib/oauth2/rfc6749/clients/base.pyi index dc5ae3608a70..94624d1e207e 100644 --- a/stubs/oauthlib/oauthlib/oauth2/rfc6749/clients/base.pyi +++ b/stubs/oauthlib/oauthlib/oauth2/rfc6749/clients/base.pyi @@ -27,7 +27,7 @@ class Client: state_generator: Callable[[], str] state: str | None redirect_url: str | None - code: Incomplete + code: str | None expires_in: ConvertibleToInt | None code_verifier: str | None code_challenge: str | None diff --git a/stubs/oauthlib/oauthlib/oauth2/rfc6749/clients/legacy_application.pyi b/stubs/oauthlib/oauthlib/oauth2/rfc6749/clients/legacy_application.pyi index 81e2d0600ee6..9fe5fd41d0ff 100644 --- a/stubs/oauthlib/oauthlib/oauth2/rfc6749/clients/legacy_application.pyi +++ b/stubs/oauthlib/oauthlib/oauth2/rfc6749/clients/legacy_application.pyi @@ -1,10 +1,42 @@ from _typeshed import Incomplete +from collections.abc import Callable -from .base import Client as Client +from .base import Client, _TokenPlacement class LegacyApplicationClient(Client): grant_type: str - def __init__(self, client_id, **kwargs) -> None: ... + def __init__( + self, + client_id: str, + *, + default_token_placement: _TokenPlacement = "auth_header", + token_type: str = "Bearer", + access_token: str | None = None, + refresh_token: str | None = None, + mac_key: str | bytes | bytearray | None = None, + mac_algorithm: str | None = None, + token: dict[str, Incomplete] | None = None, + scope: str | set[object] | tuple[object] | list[object] | None = None, + state: str | None = None, + redirect_url: str | None = None, + state_generator: Callable[[], str] = ..., + code_verifier: str | None = None, + code_challenge: str | None = None, + code_challenge_method: str | None = None, + **kwargs, + ) -> None: ... def prepare_request_body( - self, username, password, body: str = "", scope: Incomplete | None = None, include_client_id: bool = False, **kwargs - ): ... + self, + username: str, + password: str, + body: str = "", + scope: str | set[object] | tuple[object] | list[object] | None = None, + include_client_id: bool = False, + *, + code_verifier: str | None = None, + client_id: str | None = None, + client_secret: str | None = None, + code: str | None = None, + redirect_uri: str | None = None, + **kwargs, + ) -> str: ... diff --git a/stubs/oauthlib/oauthlib/oauth2/rfc6749/clients/mobile_application.pyi b/stubs/oauthlib/oauthlib/oauth2/rfc6749/clients/mobile_application.pyi index ce40e76b2f00..a6b945f79259 100644 --- a/stubs/oauthlib/oauthlib/oauth2/rfc6749/clients/mobile_application.pyi +++ b/stubs/oauthlib/oauthlib/oauth2/rfc6749/clients/mobile_application.pyi @@ -1,17 +1,18 @@ -from _typeshed import Incomplete -from typing import Any +from oauthlib.oauth2.rfc6749.tokens import OAuth2Token -from .base import Client as Client +from .base import Client class MobileApplicationClient(Client): response_type: str def prepare_request_uri( self, uri, - redirect_uri: Incomplete | None = None, - scope: Incomplete | None = None, - state: Incomplete | None = None, + redirect_uri: str | None = None, + scope: str | set[object] | tuple[object] | list[object] | None = None, + state: str | None = None, **kwargs, - ): ... - token: Any - def parse_request_uri_response(self, uri, state: Incomplete | None = None, scope: Incomplete | None = None): ... + ) -> str: ... + token: OAuth2Token + def parse_request_uri_response( + self, uri: str, state: str | None = None, scope: str | set[object] | tuple[object] | list[object] | None = None + ) -> OAuth2Token: ... diff --git a/stubs/oauthlib/oauthlib/oauth2/rfc6749/clients/service_application.pyi b/stubs/oauthlib/oauthlib/oauth2/rfc6749/clients/service_application.pyi index 2c3809439039..00b375898b98 100644 --- a/stubs/oauthlib/oauthlib/oauth2/rfc6749/clients/service_application.pyi +++ b/stubs/oauthlib/oauthlib/oauth2/rfc6749/clients/service_application.pyi @@ -1,34 +1,56 @@ from _typeshed import Incomplete -from typing import Any +from collections.abc import Callable -from .base import Client as Client +from .base import Client, _TokenPlacement class ServiceApplicationClient(Client): grant_type: str - private_key: Any - subject: Any - issuer: Any - audience: Any + private_key: str | None + subject: str | None + issuer: str | None + audience: str | None def __init__( self, - client_id, - private_key: Incomplete | None = None, - subject: Incomplete | None = None, - issuer: Incomplete | None = None, - audience: Incomplete | None = None, + client_id: str, + private_key: str | None = None, + subject: str | None = None, + issuer: str | None = None, + audience: str | None = None, + *, + default_token_placement: _TokenPlacement = "auth_header", + token_type: str = "Bearer", + access_token: str | None = None, + refresh_token: str | None = None, + mac_key: str | bytes | bytearray | None = None, + mac_algorithm: str | None = None, + token: dict[str, Incomplete] | None = None, + scope: str | set[object] | tuple[object] | list[object] | None = None, + state: str | None = None, + redirect_url: str | None = None, + state_generator: Callable[[], str] = ..., + code_verifier: str | None = None, + code_challenge: str | None = None, + code_challenge_method: str | None = None, **kwargs, ) -> None: ... def prepare_request_body( self, - private_key: Incomplete | None = None, - subject: Incomplete | None = None, - issuer: Incomplete | None = None, - audience: Incomplete | None = None, - expires_at: Incomplete | None = None, - issued_at: Incomplete | None = None, - extra_claims: Incomplete | None = None, + private_key: str | None = None, + subject: str | None = None, + issuer: str | None = None, + audience: str | None = None, + expires_at: float | None = None, + issued_at: float | None = None, + extra_claims: dict[str, Incomplete] | None = None, body: str = "", - scope: Incomplete | None = None, + scope: str | set[object] | tuple[object] | list[object] | None = None, include_client_id: bool = False, + *, + not_before: int | None = None, + jwt_id: str | None = None, + client_id: str | None = None, + client_secret: str | None = None, + code: str | None = None, + redirect_uri: str | None = None, **kwargs, - ): ... + ) -> str: ... diff --git a/stubs/oauthlib/oauthlib/oauth2/rfc6749/clients/web_application.pyi b/stubs/oauthlib/oauthlib/oauth2/rfc6749/clients/web_application.pyi index ab025bb04e83..53d4d600b9c1 100644 --- a/stubs/oauthlib/oauthlib/oauth2/rfc6749/clients/web_application.pyi +++ b/stubs/oauthlib/oauthlib/oauth2/rfc6749/clients/web_application.pyi @@ -1,29 +1,53 @@ from _typeshed import Incomplete -from typing import Any +from collections.abc import Callable -from .base import Client as Client +from .base import Client, _TokenPlacement class WebApplicationClient(Client): grant_type: str - code: Any - def __init__(self, client_id, code: Incomplete | None = None, **kwargs) -> None: ... + code: str | None + def __init__( + self, + client_id: str, + code: str | None = None, + *, + default_token_placement: _TokenPlacement = "auth_header", + token_type: str = "Bearer", + access_token: str | None = None, + refresh_token: str | None = None, + mac_key: str | bytes | bytearray | None = None, + mac_algorithm: str | None = None, + token: dict[str, Incomplete] | None = None, + scope: str | set[object] | tuple[object] | list[object] | None = None, + state: str | None = None, + redirect_url: str | None = None, + state_generator: Callable[[], str] = ..., + code_verifier: str | None = None, + code_challenge: str | None = None, + code_challenge_method: str | None = None, + **kwargs, + ) -> None: ... def prepare_request_uri( self, - uri, - redirect_uri: Incomplete | None = None, - scope: Incomplete | None = None, - state: Incomplete | None = None, + uri: str, + redirect_uri: str | None = None, + scope: str | set[object] | tuple[object] | list[object] | None = None, + state: str | None = None, code_challenge: str | None = None, code_challenge_method: str | None = "plain", **kwargs, - ): ... + ) -> str: ... def prepare_request_body( self, - code: Incomplete | None = None, - redirect_uri: Incomplete | None = None, + code: str | None = None, + redirect_uri: str | None = None, body: str = "", include_client_id: bool = True, code_verifier: str | None = None, + *, + scope: str | set[object] | tuple[object] | list[object] | None = None, + client_id: str | None = None, + client_secret: str | None = None, **kwargs, - ): ... - def parse_request_uri_response(self, uri, state: Incomplete | None = None): ... + ) -> str: ... + def parse_request_uri_response(self, uri: str, state: str | None = None) -> dict[str, str]: ... diff --git a/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/authorization.pyi b/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/authorization.pyi index c0f59544443d..f99aec2fa801 100644 --- a/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/authorization.pyi +++ b/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/authorization.pyi @@ -1,12 +1,14 @@ from _typeshed import Incomplete -from typing import Any +from logging import Logger -from .base import BaseEndpoint as BaseEndpoint +from oauthlib.common import _HTTPMethod -log: Any +from .base import BaseEndpoint + +log: Logger class AuthorizationEndpoint(BaseEndpoint): - def __init__(self, default_response_type, default_token_type, response_types) -> None: ... + def __init__(self, default_response_type, default_token_type, response_types: dict[str, Incomplete]) -> None: ... @property def response_types(self): ... @property @@ -17,13 +19,13 @@ class AuthorizationEndpoint(BaseEndpoint): def default_token_type(self): ... def create_authorization_response( self, - uri, - http_method: str = "GET", - body: Incomplete | None = None, - headers: Incomplete | None = None, + uri: str, + http_method: _HTTPMethod = "GET", + body: str | None = None, + headers: dict[str, str] | None = None, scopes: Incomplete | None = None, - credentials: Incomplete | None = None, + credentials: dict[str, Incomplete] | None = None, ): ... def validate_authorization_request( - self, uri, http_method: str = "GET", body: Incomplete | None = None, headers: Incomplete | None = None + self, uri: str, http_method: _HTTPMethod = "GET", body: str | None = None, headers: dict[str, str] | None = None ): ... diff --git a/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/base.pyi b/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/base.pyi index a7d18663ee87..7a5be82caa62 100644 --- a/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/base.pyi +++ b/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/base.pyi @@ -1,20 +1,24 @@ -from typing import Any +from _typeshed import Incomplete +from collections.abc import Callable, Sequence +from logging import Logger -log: Any +log: Logger class BaseEndpoint: def __init__(self) -> None: ... @property - def valid_request_methods(self): ... + def valid_request_methods(self) -> Sequence[str] | None: ... @valid_request_methods.setter - def valid_request_methods(self, valid_request_methods) -> None: ... + def valid_request_methods(self, valid_request_methods: Sequence[str] | None) -> None: ... @property - def available(self): ... + def available(self) -> bool: ... @available.setter - def available(self, available) -> None: ... + def available(self, available: bool) -> None: ... @property - def catch_errors(self): ... + def catch_errors(self) -> bool: ... @catch_errors.setter - def catch_errors(self, catch_errors) -> None: ... + def catch_errors(self, catch_errors: bool) -> None: ... -def catch_errors_and_unavailability(f): ... +def catch_errors_and_unavailability( + f: Callable[..., tuple[dict[str, Incomplete], str, int]], +) -> Callable[..., tuple[dict[str, Incomplete], str, int]]: ... diff --git a/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/introspect.pyi b/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/introspect.pyi index 2a933125e8c8..56a65c6b3bcd 100644 --- a/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/introspect.pyi +++ b/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/introspect.pyi @@ -1,9 +1,12 @@ from _typeshed import Incomplete +from logging import Logger from typing import Any -from .base import BaseEndpoint as BaseEndpoint +from oauthlib.common import Request, _HTTPMethod -log: Any +from .base import BaseEndpoint + +log: Logger class IntrospectEndpoint(BaseEndpoint): valid_token_types: Any @@ -12,6 +15,6 @@ class IntrospectEndpoint(BaseEndpoint): supported_token_types: Any def __init__(self, request_validator, supported_token_types: Incomplete | None = None) -> None: ... def create_introspect_response( - self, uri, http_method: str = "POST", body: Incomplete | None = None, headers: Incomplete | None = None + self, uri: str, http_method: _HTTPMethod = "POST", body: str | None = None, headers: dict[str, str] | None = None ): ... - def validate_introspect_request(self, request) -> None: ... + def validate_introspect_request(self, request: Request) -> None: ... diff --git a/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/metadata.pyi b/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/metadata.pyi index 39494768f56c..c758bd01008e 100644 --- a/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/metadata.pyi +++ b/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/metadata.pyi @@ -1,9 +1,9 @@ -from _typeshed import Incomplete +from logging import Logger from typing import Any -from .base import BaseEndpoint as BaseEndpoint +from .base import BaseEndpoint -log: Any +log: Logger class MetadataEndpoint(BaseEndpoint): raise_errors: Any @@ -12,7 +12,7 @@ class MetadataEndpoint(BaseEndpoint): claims: Any def __init__(self, endpoints, claims={}, raise_errors: bool = True) -> None: ... def create_metadata_response( - self, uri, http_method: str = "GET", body: Incomplete | None = None, headers: Incomplete | None = None + self, uri: str, http_method: str = "GET", body: str | None = None, headers: dict[str, str] | None = None ): ... def validate_metadata( self, array, key, is_required: bool = False, is_list: bool = False, is_url: bool = False, is_issuer: bool = False diff --git a/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/pre_configured.pyi b/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/pre_configured.pyi index 35bd25c7e3bf..c6f6b442fa3f 100644 --- a/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/pre_configured.pyi +++ b/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/pre_configured.pyi @@ -1,11 +1,11 @@ from _typeshed import Incomplete from typing import Any -from .authorization import AuthorizationEndpoint as AuthorizationEndpoint -from .introspect import IntrospectEndpoint as IntrospectEndpoint -from .resource import ResourceEndpoint as ResourceEndpoint -from .revocation import RevocationEndpoint as RevocationEndpoint -from .token import TokenEndpoint as TokenEndpoint +from .authorization import AuthorizationEndpoint +from .introspect import IntrospectEndpoint +from .resource import ResourceEndpoint +from .revocation import RevocationEndpoint +from .token import TokenEndpoint class Server(AuthorizationEndpoint, IntrospectEndpoint, TokenEndpoint, ResourceEndpoint, RevocationEndpoint): auth_grant: Any diff --git a/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/resource.pyi b/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/resource.pyi index 7bc1e22a9212..34c2f39eb08b 100644 --- a/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/resource.pyi +++ b/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/resource.pyi @@ -1,9 +1,11 @@ from _typeshed import Incomplete -from typing import Any +from logging import Logger -from .base import BaseEndpoint as BaseEndpoint +from oauthlib.common import Request, _HTTPMethod -log: Any +from .base import BaseEndpoint + +log: Logger class ResourceEndpoint(BaseEndpoint): def __init__(self, default_token, token_types) -> None: ... @@ -16,9 +18,9 @@ class ResourceEndpoint(BaseEndpoint): def verify_request( self, uri, - http_method: str = "GET", - body: Incomplete | None = None, - headers: Incomplete | None = None, + http_method: _HTTPMethod = "GET", + body: str | None = None, + headers: dict[str, str] | None = None, scopes: Incomplete | None = None, ): ... - def find_token_type(self, request): ... + def find_token_type(self, request: Request): ... diff --git a/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/revocation.pyi b/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/revocation.pyi index 3fb860c8bda1..6000e99e125a 100644 --- a/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/revocation.pyi +++ b/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/revocation.pyi @@ -1,9 +1,12 @@ from _typeshed import Incomplete +from logging import Logger from typing import Any -from .base import BaseEndpoint as BaseEndpoint +from oauthlib.common import Request, _HTTPMethod -log: Any +from .base import BaseEndpoint + +log: Logger class RevocationEndpoint(BaseEndpoint): valid_token_types: Any @@ -15,6 +18,6 @@ class RevocationEndpoint(BaseEndpoint): self, request_validator, supported_token_types: Incomplete | None = None, enable_jsonp: bool = False ) -> None: ... def create_revocation_response( - self, uri, http_method: str = "POST", body: Incomplete | None = None, headers: Incomplete | None = None + self, uri: str, http_method: _HTTPMethod = "POST", body: str | None = None, headers: dict[str, str] | None = None ): ... - def validate_revocation_request(self, request) -> None: ... + def validate_revocation_request(self, request: Request) -> None: ... diff --git a/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/token.pyi b/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/token.pyi index 5d73cc7a8a03..ffce66d48ae2 100644 --- a/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/token.pyi +++ b/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/token.pyi @@ -1,12 +1,14 @@ from _typeshed import Incomplete -from typing import Any +from logging import Logger -from .base import BaseEndpoint as BaseEndpoint +from oauthlib.common import Request, _HTTPMethod -log: Any +from .base import BaseEndpoint + +log: Logger class TokenEndpoint(BaseEndpoint): - valid_request_methods: Any + valid_request_methods: tuple[str] def __init__(self, default_grant_type, default_token_type, grant_types) -> None: ... @property def grant_types(self): ... @@ -18,12 +20,12 @@ class TokenEndpoint(BaseEndpoint): def default_token_type(self): ... def create_token_response( self, - uri, - http_method: str = "POST", - body: Incomplete | None = None, - headers: Incomplete | None = None, + uri: str, + http_method: _HTTPMethod = "POST", + body: str | None = None, + headers: dict[str, str] | None = None, credentials: Incomplete | None = None, grant_type_for_scope: Incomplete | None = None, claims: Incomplete | None = None, ): ... - def validate_token_request(self, request) -> None: ... + def validate_token_request(self, request: Request) -> None: ... diff --git a/stubs/oauthlib/oauthlib/oauth2/rfc6749/grant_types/authorization_code.pyi b/stubs/oauthlib/oauthlib/oauth2/rfc6749/grant_types/authorization_code.pyi index 28cd3018e7fc..560dd3d63abf 100644 --- a/stubs/oauthlib/oauthlib/oauth2/rfc6749/grant_types/authorization_code.pyi +++ b/stubs/oauthlib/oauthlib/oauth2/rfc6749/grant_types/authorization_code.pyi @@ -1,18 +1,24 @@ -from typing import Any +from _typeshed import Incomplete +from logging import Logger -from .base import GrantTypeBase as GrantTypeBase +from oauthlib.common import Request -log: Any +from ..tokens import TokenBase +from .base import GrantTypeBase -def code_challenge_method_s256(verifier, challenge): ... -def code_challenge_method_plain(verifier, challenge): ... +log: Logger + +def code_challenge_method_s256(verifier: str, challenge: str) -> bool: ... +def code_challenge_method_plain(verifier: str, challenge: str) -> bool: ... class AuthorizationCodeGrant(GrantTypeBase): default_response_mode: str - response_types: Any - def create_authorization_code(self, request): ... - def create_authorization_response(self, request, token_handler): ... - def create_token_response(self, request, token_handler): ... - def validate_authorization_request(self, request): ... - def validate_token_request(self, request) -> None: ... - def validate_code_challenge(self, challenge, challenge_method, verifier): ... + response_types: list[str] + def create_authorization_code(self, request: Request) -> dict[str, str]: ... + def create_authorization_response( + self, request: Request, token_handler: TokenBase + ) -> tuple[dict[str, str], None, int | None]: ... + def create_token_response(self, request: Request, token_handler: TokenBase) -> tuple[dict[str, str], str, int | None]: ... + def validate_authorization_request(self, request: Request) -> tuple[Incomplete, dict[str, Incomplete]]: ... + def validate_token_request(self, request: Request) -> None: ... + def validate_code_challenge(self, challenge: str, challenge_method: str, verifier: str) -> bool: ... diff --git a/stubs/oauthlib/oauthlib/oauth2/rfc6749/grant_types/base.pyi b/stubs/oauthlib/oauthlib/oauth2/rfc6749/grant_types/base.pyi index 16955dff3612..f8f35c9827fd 100644 --- a/stubs/oauthlib/oauthlib/oauth2/rfc6749/grant_types/base.pyi +++ b/stubs/oauthlib/oauthlib/oauth2/rfc6749/grant_types/base.pyi @@ -1,32 +1,68 @@ from _typeshed import Incomplete -from typing import Any +from collections.abc import Callable, Iterable +from itertools import chain +from logging import Logger +from typing import TypeVar +from typing_extensions import TypeAlias -log: Any +from oauthlib.common import Request + +from ..request_validator import RequestValidator +from ..tokens import TokenBase + +log: Logger + +_T = TypeVar("_T") +_AuthValidator: TypeAlias = Callable[[Request], dict[str, Incomplete]] +_TokenValidator: TypeAlias = Callable[[Request], None] +_CodeModifier: TypeAlias = Callable[[dict[str, str], TokenBase | None, Request | None], dict[str, str]] +_TokenModifier: TypeAlias = Callable[[dict[str, Incomplete], TokenBase | None, Request | None], dict[str, Incomplete]] class ValidatorsContainer: - pre_auth: Any - post_auth: Any - pre_token: Any - post_token: Any - def __init__(self, post_auth, post_token, pre_auth, pre_token) -> None: ... + pre_auth: Iterable[_AuthValidator] + post_auth: Iterable[_AuthValidator] + pre_token: Iterable[_TokenValidator] + post_token: Iterable[_TokenValidator] + def __init__( + self, + post_auth: Iterable[_AuthValidator], + post_token: Iterable[_TokenValidator], + pre_auth: Iterable[_AuthValidator], + pre_token: Iterable[_TokenValidator], + ) -> None: ... @property - def all_pre(self): ... + def all_pre(self) -> chain[_AuthValidator | _TokenValidator]: ... @property - def all_post(self): ... + def all_post(self) -> chain[_AuthValidator | _TokenValidator]: ... class GrantTypeBase: - error_uri: Any - request_validator: Any + error_uri: str | None + request_validator: RequestValidator | None default_response_mode: str refresh_token: bool - response_types: Any - def __init__(self, request_validator: Incomplete | None = None, **kwargs) -> None: ... - def register_response_type(self, response_type) -> None: ... - def register_code_modifier(self, modifier) -> None: ... - def register_token_modifier(self, modifier) -> None: ... - def create_authorization_response(self, request, token_handler) -> None: ... - def create_token_response(self, request, token_handler) -> None: ... - def add_token(self, token, token_handler, request): ... - def validate_grant_type(self, request) -> None: ... - def validate_scopes(self, request) -> None: ... - def prepare_authorization_response(self, request, token, headers, body, status): ... + response_types: list[str] + def __init__( + self, + request_validator: RequestValidator | None = None, + *, + post_auth: Iterable[_AuthValidator] | None = None, + post_token: Iterable[_TokenValidator] | None = None, + pre_auth: Iterable[_AuthValidator] | None = None, + pre_token: Iterable[_TokenValidator] | None = None, + **kwargs, + ) -> None: ... + def register_response_type(self, response_type: str) -> None: ... + def register_code_modifier(self, modifier: _CodeModifier) -> None: ... + def register_token_modifier(self, modifier: _TokenModifier) -> None: ... + def create_authorization_response( + self, request: Request, token_handler: TokenBase + ) -> tuple[dict[str, str], str | None, int | None]: ... + def create_token_response( + self, request: Request, token_handler: TokenBase + ) -> tuple[dict[str, str], str | None, int | None]: ... + def add_token(self, token: dict[str, _T], token_handler: TokenBase, request: Request) -> dict[str, _T]: ... + def validate_grant_type(self, request: Request) -> None: ... + def validate_scopes(self, request: Request) -> None: ... + def prepare_authorization_response( + self, request: Request, token: dict[str, Incomplete], headers: dict[str, str], body: str | None, status: int | None + ) -> tuple[dict[str, str], str | None, int | None]: ... diff --git a/stubs/oauthlib/oauthlib/oauth2/rfc6749/grant_types/client_credentials.pyi b/stubs/oauthlib/oauthlib/oauth2/rfc6749/grant_types/client_credentials.pyi index 0d33d8298c1a..d500ef4d4655 100644 --- a/stubs/oauthlib/oauthlib/oauth2/rfc6749/grant_types/client_credentials.pyi +++ b/stubs/oauthlib/oauthlib/oauth2/rfc6749/grant_types/client_credentials.pyi @@ -1,9 +1,12 @@ -from typing import Any +from logging import Logger -from .base import GrantTypeBase as GrantTypeBase +from oauthlib.common import Request -log: Any +from ..tokens import TokenBase +from .base import GrantTypeBase + +log: Logger class ClientCredentialsGrant(GrantTypeBase): - def create_token_response(self, request, token_handler): ... - def validate_token_request(self, request) -> None: ... + def create_token_response(self, request: Request, token_handler: TokenBase) -> tuple[dict[str, str], str, int | None]: ... + def validate_token_request(self, request: Request) -> None: ... diff --git a/stubs/oauthlib/oauthlib/oauth2/rfc6749/grant_types/implicit.pyi b/stubs/oauthlib/oauthlib/oauth2/rfc6749/grant_types/implicit.pyi index 613ac0a050d0..e17902e4b361 100644 --- a/stubs/oauthlib/oauthlib/oauth2/rfc6749/grant_types/implicit.pyi +++ b/stubs/oauthlib/oauthlib/oauth2/rfc6749/grant_types/implicit.pyi @@ -1,13 +1,19 @@ -from typing import Any +from _typeshed import Incomplete +from logging import Logger -from .base import GrantTypeBase as GrantTypeBase +from oauthlib.common import Request -log: Any +from ..tokens import TokenBase +from .base import GrantTypeBase + +log: Logger class ImplicitGrant(GrantTypeBase): - response_types: Any + response_types: list[str] grant_allows_refresh_token: bool - def create_authorization_response(self, request, token_handler): ... - def create_token_response(self, request, token_handler): ... - def validate_authorization_request(self, request): ... - def validate_token_request(self, request): ... + def create_authorization_response( + self, request: Request, token_handler: TokenBase + ) -> tuple[dict[str, str], str | None, int]: ... + def create_token_response(self, request: Request, token_handler: TokenBase) -> tuple[dict[str, str], str | None, int]: ... + def validate_authorization_request(self, request: Request) -> tuple[Incomplete, dict[str, Incomplete]]: ... + def validate_token_request(self, request: Request) -> tuple[Incomplete, dict[str, Incomplete]]: ... diff --git a/stubs/oauthlib/oauthlib/oauth2/rfc6749/grant_types/refresh_token.pyi b/stubs/oauthlib/oauthlib/oauth2/rfc6749/grant_types/refresh_token.pyi index 61a5eacd1e84..6d26d66cba39 100644 --- a/stubs/oauthlib/oauthlib/oauth2/rfc6749/grant_types/refresh_token.pyi +++ b/stubs/oauthlib/oauthlib/oauth2/rfc6749/grant_types/refresh_token.pyi @@ -1,11 +1,25 @@ -from _typeshed import Incomplete -from typing import Any +from collections.abc import Iterable +from logging import Logger -from .base import GrantTypeBase as GrantTypeBase +from oauthlib.common import Request -log: Any +from ..request_validator import RequestValidator +from ..tokens import TokenBase +from .base import GrantTypeBase, _AuthValidator, _TokenValidator + +log: Logger class RefreshTokenGrant(GrantTypeBase): - def __init__(self, request_validator: Incomplete | None = None, issue_new_refresh_tokens: bool = True, **kwargs) -> None: ... - def create_token_response(self, request, token_handler): ... - def validate_token_request(self, request) -> None: ... + def __init__( + self, + request_validator: RequestValidator | None = None, + issue_new_refresh_tokens: bool = True, + *, + post_auth: Iterable[_AuthValidator] | None = None, + post_token: Iterable[_TokenValidator] | None = None, + pre_auth: Iterable[_AuthValidator] | None = None, + pre_token: Iterable[_TokenValidator] | None = None, + **kwargs, + ) -> None: ... + def create_token_response(self, request: Request, token_handler: TokenBase) -> tuple[dict[str, str], str, int | None]: ... + def validate_token_request(self, request: Request) -> None: ... diff --git a/stubs/oauthlib/oauthlib/oauth2/rfc6749/grant_types/resource_owner_password_credentials.pyi b/stubs/oauthlib/oauthlib/oauth2/rfc6749/grant_types/resource_owner_password_credentials.pyi index 347fa4569b5e..ad407436f188 100644 --- a/stubs/oauthlib/oauthlib/oauth2/rfc6749/grant_types/resource_owner_password_credentials.pyi +++ b/stubs/oauthlib/oauthlib/oauth2/rfc6749/grant_types/resource_owner_password_credentials.pyi @@ -1,9 +1,12 @@ -from typing import Any +from logging import Logger -from .base import GrantTypeBase as GrantTypeBase +from oauthlib.common import Request -log: Any +from ..tokens import TokenBase +from .base import GrantTypeBase + +log: Logger class ResourceOwnerPasswordCredentialsGrant(GrantTypeBase): - def create_token_response(self, request, token_handler): ... - def validate_token_request(self, request) -> None: ... + def create_token_response(self, request: Request, token_handler: TokenBase) -> tuple[dict[str, str], str, int | None]: ... + def validate_token_request(self, request: Request) -> None: ... diff --git a/stubs/oauthlib/oauthlib/oauth2/rfc6749/parameters.pyi b/stubs/oauthlib/oauthlib/oauth2/rfc6749/parameters.pyi index 78420ebe0496..0d8fdfcc0987 100644 --- a/stubs/oauthlib/oauthlib/oauth2/rfc6749/parameters.pyi +++ b/stubs/oauthlib/oauthlib/oauth2/rfc6749/parameters.pyi @@ -24,6 +24,8 @@ def prepare_token_request( scope: str | set[object] | tuple[object] | list[object] | None = None, client_id: str | None = None, client_secret: str | None = None, + code: str | None = None, + redirect_uri: str | None = None, **kwargs, ) -> str: ... def prepare_token_revocation_request( From f7c903b1ce98646eecba01a7398530dd9a75ba7f Mon Sep 17 00:00:00 2001 From: Joren Hammudoglu Date: Mon, 7 Apr 2025 14:03:17 +0200 Subject: [PATCH 203/388] Make the type-parameter of `ctypes.py_object` optional (#13760) --- stdlib/ctypes/__init__.pyi | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/stdlib/ctypes/__init__.pyi b/stdlib/ctypes/__init__.pyi index fe9c239176d2..a7e9c1ce068c 100644 --- a/stdlib/ctypes/__init__.pyi +++ b/stdlib/ctypes/__init__.pyi @@ -27,8 +27,8 @@ from _ctypes import ( from _typeshed import StrPath from ctypes._endian import BigEndianStructure as BigEndianStructure, LittleEndianStructure as LittleEndianStructure from types import GenericAlias -from typing import Any, ClassVar, Generic, TypeVar, type_check_only -from typing_extensions import Self, TypeAlias, deprecated +from typing import Any, ClassVar, Generic, type_check_only +from typing_extensions import Self, TypeAlias, TypeVar, deprecated if sys.platform == "win32": from _ctypes import FormatError as FormatError, get_last_error as get_last_error, set_last_error as set_last_error @@ -36,7 +36,7 @@ if sys.platform == "win32": if sys.version_info >= (3, 11): from ctypes._endian import BigEndianUnion as BigEndianUnion, LittleEndianUnion as LittleEndianUnion -_T = TypeVar("_T") +_T = TypeVar("_T", default=Any) _DLLT = TypeVar("_DLLT", bound=CDLL) _CT = TypeVar("_CT", bound=_CData) From fc0d90405d6a1fe375a4df9957a42cbd1dec9f82 Mon Sep 17 00:00:00 2001 From: Joren Hammudoglu Date: Mon, 7 Apr 2025 14:16:11 +0200 Subject: [PATCH 204/388] `_type_` class attributes in `ctypes`, and fix `ctypes.wintypes.BYTE` (#13777) --- stdlib/ctypes/__init__.pyi | 140 +++++++++++++++++++++++++------------ stdlib/ctypes/wintypes.pyi | 12 +++- 2 files changed, 104 insertions(+), 48 deletions(-) diff --git a/stdlib/ctypes/__init__.pyi b/stdlib/ctypes/__init__.pyi index a7e9c1ce068c..a7e19483301c 100644 --- a/stdlib/ctypes/__init__.pyi +++ b/stdlib/ctypes/__init__.pyi @@ -27,8 +27,8 @@ from _ctypes import ( from _typeshed import StrPath from ctypes._endian import BigEndianStructure as BigEndianStructure, LittleEndianStructure as LittleEndianStructure from types import GenericAlias -from typing import Any, ClassVar, Generic, type_check_only -from typing_extensions import Self, TypeAlias, TypeVar, deprecated +from typing import Any, ClassVar, Generic, Literal, TypeVar, type_check_only +from typing_extensions import Self, TypeAlias, deprecated if sys.platform == "win32": from _ctypes import FormatError as FormatError, get_last_error as get_last_error, set_last_error as set_last_error @@ -186,73 +186,121 @@ if sys.platform == "win32": def wstring_at(ptr: _CVoidConstPLike, size: int = -1) -> str: ... -class c_byte(_SimpleCData[int]): ... +class py_object(_CanCastTo, _SimpleCData[_T]): + _type_: ClassVar[Literal["O"]] + +class c_bool(_SimpleCData[bool]): + _type_: ClassVar[Literal["?"]] + def __init__(self, value: bool = ...) -> None: ... + +class c_byte(_SimpleCData[int]): + _type_: ClassVar[Literal["b"]] + +class c_ubyte(_SimpleCData[int]): + _type_: ClassVar[Literal["B"]] + +class c_short(_SimpleCData[int]): + _type_: ClassVar[Literal["h"]] + +class c_ushort(_SimpleCData[int]): + _type_: ClassVar[Literal["H"]] + +class c_long(_SimpleCData[int]): + _type_: ClassVar[Literal["l"]] + +class c_ulong(_SimpleCData[int]): + _type_: ClassVar[Literal["L"]] + +class c_int(_SimpleCData[int]): # can be an alias for c_long + _type_: ClassVar[Literal["i", "l"]] + +class c_uint(_SimpleCData[int]): # can be an alias for c_ulong + _type_: ClassVar[Literal["I", "L"]] + +class c_longlong(_SimpleCData[int]): # can be an alias for c_long + _type_: ClassVar[Literal["q", "l"]] + +class c_ulonglong(_SimpleCData[int]): # can be an alias for c_ulong + _type_: ClassVar[Literal["Q", "L"]] + +c_int8 = c_byte +c_uint8 = c_ubyte + +class c_int16(_SimpleCData[int]): # can be an alias for c_short or c_int + _type_: ClassVar[Literal["h", "i"]] + +class c_uint16(_SimpleCData[int]): # can be an alias for c_ushort or c_uint + _type_: ClassVar[Literal["H", "I"]] + +class c_int32(_SimpleCData[int]): # can be an alias for c_int or c_long + _type_: ClassVar[Literal["i", "l"]] + +class c_uint32(_SimpleCData[int]): # can be an alias for c_uint or c_ulong + _type_: ClassVar[Literal["I", "L"]] + +class c_int64(_SimpleCData[int]): # can be an alias for c_long or c_longlong + _type_: ClassVar[Literal["l", "q"]] + +class c_uint64(_SimpleCData[int]): # can be an alias for c_ulong or c_ulonglong + _type_: ClassVar[Literal["L", "Q"]] + +class c_ssize_t(_SimpleCData[int]): # alias for c_int, c_long, or c_longlong + _type_: ClassVar[Literal["i", "l", "q"]] + +class c_size_t(_SimpleCData[int]): # alias for c_uint, c_ulong, or c_ulonglong + _type_: ClassVar[Literal["I", "L", "Q"]] + +class c_float(_SimpleCData[float]): + _type_: ClassVar[Literal["f"]] + +class c_double(_SimpleCData[float]): + _type_: ClassVar[Literal["d"]] + +class c_longdouble(_SimpleCData[float]): # can be an alias for c_double + _type_: ClassVar[Literal["d", "g"]] + +if sys.version_info >= (3, 14): + class c_float_complex(_SimpleCData[complex]): + _type_: ClassVar[Literal["E"]] + + class c_double_complex(_SimpleCData[complex]): + _type_: ClassVar[Literal["C"]] + + class c_longdouble_complex(_SimpleCData[complex]): + _type_: ClassVar[Literal["F"]] class c_char(_SimpleCData[bytes]): + _type_: ClassVar[Literal["c"]] def __init__(self, value: int | bytes | bytearray = ...) -> None: ... class c_char_p(_PointerLike, _SimpleCData[bytes | None]): + _type_: ClassVar[Literal["z"]] def __init__(self, value: int | bytes | None = ...) -> None: ... @classmethod def from_param(cls, value: Any, /) -> Self | _CArgObject: ... -class c_double(_SimpleCData[float]): ... -class c_longdouble(_SimpleCData[float]): ... # can be an alias for c_double -class c_float(_SimpleCData[float]): ... -class c_int(_SimpleCData[int]): ... # can be an alias for c_long -class c_long(_SimpleCData[int]): ... -class c_longlong(_SimpleCData[int]): ... # can be an alias for c_long -class c_short(_SimpleCData[int]): ... -class c_size_t(_SimpleCData[int]): ... # alias for c_uint, c_ulong, or c_ulonglong -class c_ssize_t(_SimpleCData[int]): ... # alias for c_int, c_long, or c_longlong -class c_ubyte(_SimpleCData[int]): ... -class c_uint(_SimpleCData[int]): ... # can be an alias for c_ulong -class c_ulong(_SimpleCData[int]): ... -class c_ulonglong(_SimpleCData[int]): ... # can be an alias for c_ulong -class c_ushort(_SimpleCData[int]): ... - class c_void_p(_PointerLike, _SimpleCData[int | None]): + _type_: ClassVar[Literal["P"]] @classmethod def from_param(cls, value: Any, /) -> Self | _CArgObject: ... c_voidp = c_void_p # backwards compatibility (to a bug) -class c_wchar(_SimpleCData[str]): ... - -c_int8 = c_byte - -# these are actually dynamic aliases for c_short, c_int, c_long, or c_longlong -class c_int16(_SimpleCData[int]): ... -class c_int32(_SimpleCData[int]): ... -class c_int64(_SimpleCData[int]): ... - -c_uint8 = c_ubyte - -# these are actually dynamic aliases for c_ushort, c_uint, c_ulong, or c_ulonglong -class c_uint16(_SimpleCData[int]): ... -class c_uint32(_SimpleCData[int]): ... -class c_uint64(_SimpleCData[int]): ... +class c_wchar(_SimpleCData[str]): + _type_: ClassVar[Literal["u"]] class c_wchar_p(_PointerLike, _SimpleCData[str | None]): + _type_: ClassVar[Literal["Z"]] def __init__(self, value: int | str | None = ...) -> None: ... @classmethod def from_param(cls, value: Any, /) -> Self | _CArgObject: ... -class c_bool(_SimpleCData[bool]): - def __init__(self, value: bool = ...) -> None: ... - if sys.platform == "win32": - class HRESULT(_SimpleCData[int]): ... # TODO: undocumented + class HRESULT(_SimpleCData[int]): # TODO: undocumented + _type_: ClassVar[Literal["l"]] if sys.version_info >= (3, 12): # At runtime, this is an alias for either c_int32 or c_int64, - # which are themselves an alias for one of c_short, c_int, c_long, or c_longlong + # which are themselves an alias for one of c_int, c_long, or c_longlong # This covers all our bases. - c_time_t: type[c_int32 | c_int64 | c_short | c_int | c_long | c_longlong] - -class py_object(_CanCastTo, _SimpleCData[_T]): ... - -if sys.version_info >= (3, 14): - class c_float_complex(_SimpleCData[complex]): ... - class c_double_complex(_SimpleCData[complex]): ... - class c_longdouble_complex(_SimpleCData[complex]): ... + c_time_t: type[c_int32 | c_int64 | c_int | c_long | c_longlong] diff --git a/stdlib/ctypes/wintypes.pyi b/stdlib/ctypes/wintypes.pyi index e938d8f22957..63f117787aa0 100644 --- a/stdlib/ctypes/wintypes.pyi +++ b/stdlib/ctypes/wintypes.pyi @@ -1,10 +1,10 @@ +import sys from _ctypes import _CArgObject, _CField from ctypes import ( Array, Structure, _Pointer, _SimpleCData, - c_byte, c_char, c_char_p, c_double, @@ -24,7 +24,15 @@ from ctypes import ( from typing import Any, TypeVar from typing_extensions import Self, TypeAlias -BYTE = c_byte +if sys.version_info >= (3, 12): + from ctypes import c_ubyte + + BYTE = c_ubyte +else: + from ctypes import c_byte + + BYTE = c_byte + WORD = c_ushort DWORD = c_ulong CHAR = c_char From c6e6323e1ee028cbf83c42f8e46112b1da6308da Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Mon, 7 Apr 2025 16:21:51 +0400 Subject: [PATCH 205/388] Improve `passlib.utils` (#13798) --- stubs/passlib/passlib/__init__.pyi | 3 ++ stubs/passlib/passlib/utils/__init__.pyi | 60 ++++++++++++++---------- stubs/passlib/passlib/utils/binary.pyi | 45 +++++++++--------- 3 files changed, 63 insertions(+), 45 deletions(-) diff --git a/stubs/passlib/passlib/__init__.pyi b/stubs/passlib/passlib/__init__.pyi index e69de29bb2d1..c5dd95466063 100644 --- a/stubs/passlib/passlib/__init__.pyi +++ b/stubs/passlib/passlib/__init__.pyi @@ -0,0 +1,3 @@ +from typing import Final + +__version__: Final[str] diff --git a/stubs/passlib/passlib/utils/__init__.pyi b/stubs/passlib/passlib/utils/__init__.pyi index 05c444674fa7..0e1c25103bca 100644 --- a/stubs/passlib/passlib/utils/__init__.pyi +++ b/stubs/passlib/passlib/utils/__init__.pyi @@ -1,8 +1,9 @@ +import random import timeit -from _typeshed import Incomplete -from collections.abc import Generator +from _typeshed import ReadableBuffer +from collections.abc import Iterable from hmac import compare_digest -from typing import Any +from typing import Final, Literal, SupportsBytes, SupportsIndex, overload from passlib.utils.compat import JYTHON as JYTHON @@ -34,9 +35,9 @@ __all__ = [ "has_salt_info", ] -sys_bits: Any +sys_bits: Final[int] unix_crypt_schemes: list[str] -rounds_cost_values: Any +rounds_cost_values: Final[list[str]] class SequenceMixin: def __getitem__(self, idx): ... @@ -47,29 +48,40 @@ class SequenceMixin: consteq = compare_digest -def str_consteq(left, right): ... -def saslprep(source, param: str = "value"): ... -def render_bytes(source, *args): ... -def xor_bytes(left, right): ... -def is_same_codec(left, right): ... -def is_ascii_safe(source): ... -def to_bytes(source, encoding: str = "utf-8", param: str = "value", source_encoding: Incomplete | None = None): ... -def to_unicode(source, encoding: str = "utf-8", param: str = "value"): ... -def to_native_str(source, encoding: str = "utf-8", param: str = "value"): ... +def str_consteq(left: str | bytes, right: str | bytes) -> bool: ... +def splitcomma(source: str, sep: str = ",") -> list[str]: ... +def saslprep(source: str, param: str = "value") -> str: ... +def render_bytes(source: str | bytes, *args: str | bytes) -> bytes: ... +def bytes_to_int(value: Iterable[SupportsIndex] | SupportsBytes | ReadableBuffer) -> int: ... +def int_to_bytes(value: int, count: SupportsIndex) -> bytes: ... +def xor_bytes( + left: Iterable[SupportsIndex] | SupportsBytes | ReadableBuffer, + right: Iterable[SupportsIndex] | SupportsBytes | ReadableBuffer, +) -> bytes: ... +def repeat_string(source: str | bytes, size: int) -> str | bytes: ... +def is_ascii_codec(codec: str) -> bool: ... +def is_same_codec(left: str, right: str) -> bool: ... +def is_ascii_safe(source: str | bytes) -> bool: ... +def to_bytes(source: str | bytes, encoding: str = "utf-8", param: str = "value", source_encoding: str | None = None) -> bytes: ... +def to_unicode(source: str | bytes, encoding: str = "utf-8", param: str = "value") -> str: ... +def to_native_str(source: str | bytes, encoding: str = "utf-8", param: str = "value") -> str: ... has_crypt: bool -def safe_crypt(secret, hash) -> None: ... -def test_crypt(secret, hash): ... +def safe_crypt(secret: str | bytes, hash: str | bytes) -> str | None: ... +def test_crypt(secret: str | bytes, hash: str) -> bool: ... timer = timeit.default_timer tick = timer -rng: Any +rng: random.Random -def getrandbytes(rng, count) -> Generator[None, None, Any]: ... -def getrandstr(rng, charset, count) -> Generator[None, None, Any]: ... -def generate_password(size: int = 10, charset=...): ... -def is_crypt_handler(obj): ... -def is_crypt_context(obj): ... -def has_rounds_info(handler): ... -def has_salt_info(handler): ... +@overload +def getrandbytes(rng: random.Random, count: None) -> Literal[b""]: ... +@overload +def getrandbytes(rng: random.Random, count) -> bytes: ... +def getrandstr(rng: random.Random, charset: str, count: int) -> str: ... +def generate_password(size: int = 10, charset: str = ...) -> str: ... +def is_crypt_handler(obj) -> bool: ... +def is_crypt_context(obj) -> bool: ... +def has_rounds_info(handler) -> bool: ... +def has_salt_info(handler) -> bool: ... diff --git a/stubs/passlib/passlib/utils/binary.pyi b/stubs/passlib/passlib/utils/binary.pyi index 64b5d5d8c465..2c1336b5131d 100644 --- a/stubs/passlib/passlib/utils/binary.pyi +++ b/stubs/passlib/passlib/utils/binary.pyi @@ -1,23 +1,26 @@ -from _typeshed import Incomplete -from typing import Any +from _typeshed import ReadableBuffer +from logging import Logger +from typing import Any, Final -BASE64_CHARS: Any -AB64_CHARS: Any -HASH64_CHARS: Any -BCRYPT_CHARS: Any -PADDED_BASE64_CHARS: Any -HEX_CHARS: Any -UPPER_HEX_CHARS: Any -LOWER_HEX_CHARS: Any -ALL_BYTE_VALUES: Any +log: Logger -def compile_byte_translation(mapping, source: Incomplete | None = None): ... -def b64s_encode(data): ... -def b64s_decode(data): ... -def ab64_encode(data): ... -def ab64_decode(data): ... -def b32encode(source): ... -def b32decode(source): ... +BASE64_CHARS: Final[str] +AB64_CHARS: Final[str] +HASH64_CHARS: Final[str] +BCRYPT_CHARS: Final[str] +PADDED_BASE64_CHARS: Final[str] +HEX_CHARS: Final[str] +UPPER_HEX_CHARS: Final[str] +LOWER_HEX_CHARS: Final[str] +ALL_BYTE_VALUES: Final[bytes] + +def compile_byte_translation(mapping: dict[str | bytes | int, str | bytes], source: bytes | None = None) -> bytes: ... +def b64s_encode(data: ReadableBuffer) -> bytes: ... +def b64s_decode(data: str | ReadableBuffer) -> bytes: ... +def ab64_encode(data: ReadableBuffer) -> bytes: ... +def ab64_decode(data: str | ReadableBuffer) -> bytes: ... +def b32encode(source: ReadableBuffer) -> str: ... +def b32decode(source: str | bytes) -> bytes: ... class Base64Engine: bytemap: Any @@ -46,9 +49,9 @@ class LazyBase64Engine(Base64Engine): def __init__(self, *args, **kwds) -> None: ... def __getattribute__(self, attr: str): ... -h64: Any -h64big: Any -bcrypt64: Any +h64: Base64Engine +h64big: Base64Engine +bcrypt64: Base64Engine __all__ = [ # constants From 89ec61cb31a5f2e86943e8eeb5f789c4e5cf9453 Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Mon, 7 Apr 2025 16:34:05 +0400 Subject: [PATCH 206/388] Improve `Authlib` (#13801) --- stubs/Authlib/authlib/__init__.pyi | 10 ++++-- stubs/Authlib/authlib/common/encoding.pyi | 36 +++++++++++++------ stubs/Authlib/authlib/common/errors.pyi | 27 ++++++-------- stubs/Authlib/authlib/common/security.pyi | 4 ++- stubs/Authlib/authlib/common/urls.pyi | 8 ++--- stubs/Authlib/authlib/consts.pyi | 14 ++++---- stubs/Authlib/authlib/deprecate.pyi | 6 +--- stubs/Authlib/authlib/jose/__init__.pyi | 6 ++-- stubs/Authlib/authlib/jose/rfc7519/claims.pyi | 4 +-- stubs/Authlib/authlib/jose/util.pyi | 10 ++++-- 10 files changed, 70 insertions(+), 55 deletions(-) diff --git a/stubs/Authlib/authlib/__init__.pyi b/stubs/Authlib/authlib/__init__.pyi index d1c285f4e6d6..a254fc53981c 100644 --- a/stubs/Authlib/authlib/__init__.pyi +++ b/stubs/Authlib/authlib/__init__.pyi @@ -1,4 +1,8 @@ -from .consts import homepage, version +from typing import Final -__version__ = version -__homepage__ = homepage +from .consts import author, homepage, version + +__version__: Final = version +__homepage__: Final = homepage +__author__: Final = author +__license__: Final = "BSD-3-Clause" diff --git a/stubs/Authlib/authlib/common/encoding.pyi b/stubs/Authlib/authlib/common/encoding.pyi index e76f84f033e8..0cddc1311100 100644 --- a/stubs/Authlib/authlib/common/encoding.pyi +++ b/stubs/Authlib/authlib/common/encoding.pyi @@ -1,10 +1,26 @@ -def to_bytes(x, charset: str = "utf-8", errors: str = "strict") -> bytes | None: ... -def to_unicode(x, charset: str = "utf-8", errors: str = "strict") -> str | None: ... -def to_native(x, encoding: str = "ascii"): ... -def json_loads(s): ... -def json_dumps(data, ensure_ascii: bool = False): ... -def urlsafe_b64decode(s): ... -def urlsafe_b64encode(s): ... -def base64_to_int(s): ... -def int_to_base64(num): ... -def json_b64encode(text): ... +from _typeshed import ReadableBuffer +from collections.abc import Iterable +from typing import Any, SupportsBytes, SupportsIndex, overload + +@overload +def to_bytes(x: None, charset: str = "utf-8", errors: str = "strict") -> None: ... +@overload +def to_bytes( + x: str | bytes | float | Iterable[SupportsIndex] | SupportsIndex | SupportsBytes | ReadableBuffer, + charset: str = "utf-8", + errors: str = "strict", +) -> bytes: ... +@overload +def to_unicode(x: None, charset: str = "utf-8", errors: str = "strict") -> None: ... +@overload +def to_unicode(x: object, charset: str = "utf-8", errors: str = "strict") -> str: ... +def to_native(x: str | bytes, encoding: str = "ascii") -> str: ... +def json_loads(s: str | bytes | bytearray) -> Any: ... # returns json.loads() +def json_dumps(data: Any, ensure_ascii: bool = False) -> str: ... # data pass to json.dumps() +def urlsafe_b64decode(s: bytes) -> bytes: ... +def urlsafe_b64encode(s: ReadableBuffer) -> bytes: ... +def base64_to_int(s: str | bytes | float | Iterable[SupportsIndex] | SupportsIndex | SupportsBytes | ReadableBuffer) -> int: ... +def int_to_base64(num: int) -> str: ... +def json_b64encode( + text: str | bytes | float | Iterable[SupportsIndex] | SupportsIndex | SupportsBytes | ReadableBuffer, +) -> bytes: ... diff --git a/stubs/Authlib/authlib/common/errors.pyi b/stubs/Authlib/authlib/common/errors.pyi index 89a45b2d9b4a..c6b1276d2571 100644 --- a/stubs/Authlib/authlib/common/errors.pyi +++ b/stubs/Authlib/authlib/common/errors.pyi @@ -1,26 +1,21 @@ -from _typeshed import Incomplete +from typing import Literal class AuthlibBaseError(Exception): - error: Incomplete + error: str | None description: str - uri: Incomplete - def __init__( - self, error: Incomplete | None = None, description: Incomplete | None = None, uri: Incomplete | None = None - ) -> None: ... + uri: str | None + def __init__(self, error: str | None = None, description: str | None = None, uri: str | None = None) -> None: ... class AuthlibHTTPError(AuthlibBaseError): status_code: int def __init__( - self, - error: Incomplete | None = None, - description: Incomplete | None = None, - uri: Incomplete | None = None, - status_code: Incomplete | None = None, + self, error: str | None = None, description: str | None = None, uri: str | None = None, status_code: int | None = None ) -> None: ... - def get_error_description(self): ... - def get_body(self): ... - def get_headers(self): ... - uri: Incomplete - def __call__(self, uri: Incomplete | None = None): ... + def get_error_description(self) -> str: ... + def get_body(self) -> list[tuple[Literal["error", "error_description", "error_uri"], str | None]]: ... + def get_headers(self) -> list[tuple[str, str]]: ... + def __call__( + self, uri: str | None = None + ) -> tuple[int, dict[Literal["error", "error_description", "error_uri"], str | None], list[tuple[str, str]]]: ... class ContinueIteration(AuthlibBaseError): ... diff --git a/stubs/Authlib/authlib/common/security.pyi b/stubs/Authlib/authlib/common/security.pyi index d69563f0e0cb..dc6786cbcb57 100644 --- a/stubs/Authlib/authlib/common/security.pyi +++ b/stubs/Authlib/authlib/common/security.pyi @@ -1,4 +1,6 @@ -UNICODE_ASCII_CHARACTER_SET: str +from typing import Final + +UNICODE_ASCII_CHARACTER_SET: Final[str] def generate_token(length: int = 30, chars: str = ...) -> str: ... def is_secure_transport(uri: str) -> bool: ... diff --git a/stubs/Authlib/authlib/common/urls.pyi b/stubs/Authlib/authlib/common/urls.pyi index 1d9bffd42459..457993f01006 100644 --- a/stubs/Authlib/authlib/common/urls.pyi +++ b/stubs/Authlib/authlib/common/urls.pyi @@ -1,10 +1,10 @@ -from collections.abc import Collection from re import Pattern +from typing import Final from typing_extensions import TypeAlias -always_safe: str -urlencoded: Collection[str] -INVALID_HEX_PATTERN: Pattern[str] +always_safe: Final[str] +urlencoded: Final[set[str]] +INVALID_HEX_PATTERN: Final[Pattern[str]] _ExplodedQueryString: TypeAlias = list[tuple[str, str]] diff --git a/stubs/Authlib/authlib/consts.pyi b/stubs/Authlib/authlib/consts.pyi index ab047d25406a..6ec14d37c50c 100644 --- a/stubs/Authlib/authlib/consts.pyi +++ b/stubs/Authlib/authlib/consts.pyi @@ -1,8 +1,8 @@ -from _typeshed import Incomplete +from typing import Final -name: str -version: str -author: str -homepage: str -default_user_agent: Incomplete -default_json_headers: Incomplete +name: Final[str] +version: Final[str] +author: Final[str] +homepage: Final[str] +default_user_agent: Final[str] +default_json_headers: Final[list[tuple[str, str]]] diff --git a/stubs/Authlib/authlib/deprecate.pyi b/stubs/Authlib/authlib/deprecate.pyi index c4c5a11b59ce..9f7f18218194 100644 --- a/stubs/Authlib/authlib/deprecate.pyi +++ b/stubs/Authlib/authlib/deprecate.pyi @@ -1,7 +1,3 @@ -from _typeshed import Incomplete - class AuthlibDeprecationWarning(DeprecationWarning): ... -def deprecate( - message, version: Incomplete | None = None, link_uid: Incomplete | None = None, link_file: Incomplete | None = None -) -> None: ... +def deprecate(message: str, version: str | None = None, link_uid: str | None = None, link_file: str | None = None) -> None: ... diff --git a/stubs/Authlib/authlib/jose/__init__.pyi b/stubs/Authlib/authlib/jose/__init__.pyi index e1c930f3cb2d..3377c3dbdef7 100644 --- a/stubs/Authlib/authlib/jose/__init__.pyi +++ b/stubs/Authlib/authlib/jose/__init__.pyi @@ -1,5 +1,3 @@ -from _typeshed import Incomplete - from .errors import JoseError as JoseError from .rfc7515 import ( JsonWebSignature as JsonWebSignature, @@ -18,6 +16,8 @@ from .rfc7518 import ECKey as ECKey, OctKey as OctKey, RSAKey as RSAKey from .rfc7519 import BaseClaims as BaseClaims, JsonWebToken as JsonWebToken, JWTClaims as JWTClaims from .rfc8037 import OKPKey as OKPKey +jwt: JsonWebToken + __all__ = [ "JoseError", "JsonWebSignature", @@ -40,5 +40,3 @@ __all__ = [ "JWTClaims", "jwt", ] - -jwt: Incomplete diff --git a/stubs/Authlib/authlib/jose/rfc7519/claims.pyi b/stubs/Authlib/authlib/jose/rfc7519/claims.pyi index e195536a74b4..63e206f11115 100644 --- a/stubs/Authlib/authlib/jose/rfc7519/claims.pyi +++ b/stubs/Authlib/authlib/jose/rfc7519/claims.pyi @@ -1,7 +1,7 @@ from _typeshed import Incomplete class BaseClaims(dict[str, object]): - REGISTERED_CLAIMS: Incomplete + REGISTERED_CLAIMS: list[str] header: Incomplete options: Incomplete params: Incomplete @@ -10,7 +10,7 @@ class BaseClaims(dict[str, object]): def get_registered_claims(self): ... class JWTClaims(BaseClaims): - REGISTERED_CLAIMS: Incomplete + REGISTERED_CLAIMS: list[str] def validate(self, now: Incomplete | None = None, leeway: int = 0) -> None: ... def validate_iss(self) -> None: ... def validate_sub(self) -> None: ... diff --git a/stubs/Authlib/authlib/jose/util.pyi b/stubs/Authlib/authlib/jose/util.pyi index ef9dc7671b67..780229067d4f 100644 --- a/stubs/Authlib/authlib/jose/util.pyi +++ b/stubs/Authlib/authlib/jose/util.pyi @@ -1,3 +1,7 @@ -def extract_header(header_segment, error_cls): ... -def extract_segment(segment, error_cls, name: str = "payload"): ... -def ensure_dict(s, structure_name): ... +from _typeshed import Incomplete + +from authlib.common.errors import AuthlibBaseError + +def extract_header(header_segment: bytes, error_cls: AuthlibBaseError) -> dict[Incomplete, Incomplete]: ... +def extract_segment(segment: bytes, error_cls: AuthlibBaseError, name: str = "payload") -> bytes: ... +def ensure_dict(s: object, structure_name: str) -> dict[Incomplete, Incomplete]: ... From 72af9668f9bd8a13a78e9dabdf1a4cae12be6483 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 7 Apr 2025 22:05:06 +0100 Subject: [PATCH 207/388] [pre-commit.ci] pre-commit autoupdate (#13805) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * [pre-commit.ci] pre-commit autoupdate updates: - [github.com/astral-sh/ruff-pre-commit: v0.11.2 → v0.11.4](https://github.com/astral-sh/ruff-pre-commit/compare/v0.11.2...v0.11.4) * Update requirements-tests.txt --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Alex Waygood --- .pre-commit-config.yaml | 2 +- requirements-tests.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 5cd487b841f9..6ed7dede571a 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -11,7 +11,7 @@ repos: args: [--fix=lf] - id: check-case-conflict - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.11.2 # must match requirements-tests.txt + rev: v0.11.4 # must match requirements-tests.txt hooks: - id: ruff name: Run ruff on stubs, tests and scripts diff --git a/requirements-tests.txt b/requirements-tests.txt index 06b306e663b7..a5cac406bbb2 100644 --- a/requirements-tests.txt +++ b/requirements-tests.txt @@ -13,7 +13,7 @@ packaging==24.2 pathspec>=0.11.1 pre-commit # Required by create_baseline_stubs.py. Must match .pre-commit-config.yaml. -ruff==0.11.2 +ruff==0.11.4 stubdefaulter==0.1.0 termcolor>=2.3 tomli==2.2.1 From ba2552ef42ebb5526b0ee139beed81bd58ccc169 Mon Sep 17 00:00:00 2001 From: Brian Schubert Date: Thu, 10 Apr 2025 17:19:45 -0400 Subject: [PATCH 208/388] Fix stdlib stubtest for latest Python patch releases (#13812) --- .../@tests/stubtest_allowlists/darwin-py313.txt | 5 ----- stdlib/@tests/stubtest_allowlists/py310.txt | 5 +++++ stdlib/@tests/stubtest_allowlists/py311.txt | 5 +++++ stdlib/@tests/stubtest_allowlists/py39.txt | 4 ++++ stdlib/_curses.pyi | 15 ++++++++------- stdlib/_socket.pyi | 2 +- stdlib/email/_header_value_parser.pyi | 5 +++-- stdlib/importlib/resources/__init__.pyi | 3 ++- stdlib/multiprocessing/resource_tracker.pyi | 3 +++ stdlib/socket.pyi | 2 +- stdlib/tokenize.pyi | 2 +- 11 files changed, 33 insertions(+), 18 deletions(-) diff --git a/stdlib/@tests/stubtest_allowlists/darwin-py313.txt b/stdlib/@tests/stubtest_allowlists/darwin-py313.txt index d76c31595ec8..bb7f4bba4c2f 100644 --- a/stdlib/@tests/stubtest_allowlists/darwin-py313.txt +++ b/stdlib/@tests/stubtest_allowlists/darwin-py313.txt @@ -2,9 +2,4 @@ # >= 3.13 # ======= -# Depends on HAVE_NCURSESW and how we install CPython, -# should be removed when 3.13 will be officially released: -_?curses.unget_wch -_?curses.window.get_wch - (mmap.MAP_32BIT)? # Exists locally on MacOS but not on GitHub diff --git a/stdlib/@tests/stubtest_allowlists/py310.txt b/stdlib/@tests/stubtest_allowlists/py310.txt index 9f6ce2cf82e6..d2d31ee8ef53 100644 --- a/stdlib/@tests/stubtest_allowlists/py310.txt +++ b/stdlib/@tests/stubtest_allowlists/py310.txt @@ -290,3 +290,8 @@ sunau.Au_write.initfp threading.Lock # Factory function at runtime, but that wouldn't let us use it in type hints types.SimpleNamespace.__init__ # class doesn't accept positional arguments but has default C signature typing_extensions\.Annotated # Undocumented implementation details + +# Incompatible changes introduced in Python 3.10.17 +# (Remove once 3.10.17 becomes available for all platforms) +(email._header_value_parser.get_encoded_word)? +(email._header_value_parser.make_quoted_pairs)? diff --git a/stdlib/@tests/stubtest_allowlists/py311.txt b/stdlib/@tests/stubtest_allowlists/py311.txt index 2b7e1b3e527c..92a7543862d7 100644 --- a/stdlib/@tests/stubtest_allowlists/py311.txt +++ b/stdlib/@tests/stubtest_allowlists/py311.txt @@ -254,3 +254,8 @@ sunau.Au_write.initfp threading.Lock # Factory function at runtime, but that wouldn't let us use it in type hints types.SimpleNamespace.__init__ # class doesn't accept positional arguments but has default C signature typing_extensions\.Annotated # Undocumented implementation details + +# Incompatible changes introduced in Python 3.11.12 +# (Remove once 3.11.12 becomes available for all platforms) +(email._header_value_parser.get_encoded_word)? +(email._header_value_parser.make_quoted_pairs)? diff --git a/stdlib/@tests/stubtest_allowlists/py39.txt b/stdlib/@tests/stubtest_allowlists/py39.txt index e84afe3defe7..04e9978a1ed6 100644 --- a/stdlib/@tests/stubtest_allowlists/py39.txt +++ b/stdlib/@tests/stubtest_allowlists/py39.txt @@ -241,3 +241,7 @@ sunau.Au_write.initfp threading.Lock # Factory function at runtime, but that wouldn't let us use it in type hints types.SimpleNamespace.__init__ # class doesn't accept positional arguments but has default C signature typing_extensions\.Annotated # Undocumented implementation details + +# Incompatible changes introduced in Python 3.9.22 +# (Remove once 3.9.22 becomes available for all platforms) +(email._header_value_parser.get_encoded_word)? diff --git a/stdlib/_curses.pyi b/stdlib/_curses.pyi index 23dead01e6ca..d7820c72c090 100644 --- a/stdlib/_curses.pyi +++ b/stdlib/_curses.pyi @@ -95,13 +95,14 @@ BUTTON4_DOUBLE_CLICKED: int BUTTON4_PRESSED: int BUTTON4_RELEASED: int BUTTON4_TRIPLE_CLICKED: int -# Darwin ncurses doesn't provide BUTTON5_* constants -if sys.version_info >= (3, 10) and sys.platform != "darwin": - BUTTON5_PRESSED: int - BUTTON5_RELEASED: int - BUTTON5_CLICKED: int - BUTTON5_DOUBLE_CLICKED: int - BUTTON5_TRIPLE_CLICKED: int +# Darwin ncurses doesn't provide BUTTON5_* constants prior to 3.12.10 and 3.13.3 +if sys.version_info >= (3, 10): + if sys.version_info >= (3, 12) or sys.platform != "darwin": + BUTTON5_PRESSED: int + BUTTON5_RELEASED: int + BUTTON5_CLICKED: int + BUTTON5_DOUBLE_CLICKED: int + BUTTON5_TRIPLE_CLICKED: int BUTTON_ALT: int BUTTON_CTRL: int BUTTON_SHIFT: int diff --git a/stdlib/_socket.pyi b/stdlib/_socket.pyi index 1a25fc6b13a8..5399f4edf010 100644 --- a/stdlib/_socket.pyi +++ b/stdlib/_socket.pyi @@ -78,7 +78,7 @@ if sys.platform == "win32": SO_EXCLUSIVEADDRUSE: int if sys.platform != "win32": SO_REUSEPORT: int - if sys.platform != "darwin": + if sys.platform != "darwin" or sys.version_info >= (3, 13): SO_BINDTODEVICE: int if sys.platform != "win32" and sys.platform != "darwin": diff --git a/stdlib/email/_header_value_parser.pyi b/stdlib/email/_header_value_parser.pyi index a4c2d8b1a92e..f4e9ca68d6a9 100644 --- a/stdlib/email/_header_value_parser.pyi +++ b/stdlib/email/_header_value_parser.pyi @@ -22,7 +22,8 @@ NLSET: Final[set[str]] # Added in Python 3.8.20, 3.9.20, 3.10.15, 3.11.10, 3.12.5 SPECIALSNL: Final[set[str]] -if sys.version_info >= (3, 12): +if sys.version_info >= (3, 10): + # Added in Python 3.10.17, 3.11.12, 3.12.9, 3.13.2 (may still be backported to 3.9) def make_quoted_pairs(value: Any) -> str: ... def quote_string(value: Any) -> str: ... @@ -349,7 +350,7 @@ ListSeparator: Final[ValueTerminal] RouteComponentMarker: Final[ValueTerminal] def get_fws(value: str) -> tuple[WhiteSpaceTerminal, str]: ... -def get_encoded_word(value: str) -> tuple[EncodedWord, str]: ... +def get_encoded_word(value: str, terminal_type: str = "vtext") -> tuple[EncodedWord, str]: ... def get_unstructured(value: str) -> UnstructuredTokenList: ... def get_qp_ctext(value: str) -> tuple[WhiteSpaceTerminal, str]: ... def get_qcontent(value: str) -> tuple[ValueTerminal, str]: ... diff --git a/stdlib/importlib/resources/__init__.pyi b/stdlib/importlib/resources/__init__.pyi index 88ce8f5cef48..2cf6366b6cb3 100644 --- a/stdlib/importlib/resources/__init__.pyi +++ b/stdlib/importlib/resources/__init__.pyi @@ -37,11 +37,12 @@ if sys.version_info < (3, 11): elif sys.version_info < (3, 13): Resource: TypeAlias = str -if sys.version_info >= (3, 13): +if sys.version_info >= (3, 12): from importlib.resources._common import Anchor as Anchor __all__ += ["Anchor"] +if sys.version_info >= (3, 13): from importlib.resources._functional import ( contents as contents, is_resource as is_resource, diff --git a/stdlib/multiprocessing/resource_tracker.pyi b/stdlib/multiprocessing/resource_tracker.pyi index 61da7fdf1ceb..cb2f27a62861 100644 --- a/stdlib/multiprocessing/resource_tracker.pyi +++ b/stdlib/multiprocessing/resource_tracker.pyi @@ -1,3 +1,4 @@ +import sys from _typeshed import FileDescriptorOrPath from collections.abc import Sized @@ -8,6 +9,8 @@ class ResourceTracker: def ensure_running(self) -> None: ... def register(self, name: Sized, rtype: str) -> None: ... def unregister(self, name: Sized, rtype: str) -> None: ... + if sys.version_info >= (3, 12): + def __del__(self) -> None: ... _resource_tracker: ResourceTracker ensure_running = _resource_tracker.ensure_running diff --git a/stdlib/socket.pyi b/stdlib/socket.pyi index 680c6475a3b7..ff89dcc72209 100644 --- a/stdlib/socket.pyi +++ b/stdlib/socket.pyi @@ -514,7 +514,7 @@ if sys.platform != "win32": "IPV6_RTHDRDSTOPTS", ] - if sys.platform != "darwin": + if sys.platform != "darwin" or sys.version_info >= (3, 13): from _socket import SO_BINDTODEVICE as SO_BINDTODEVICE __all__ += ["SO_BINDTODEVICE"] diff --git a/stdlib/tokenize.pyi b/stdlib/tokenize.pyi index a1c4b412da83..86e87704eb02 100644 --- a/stdlib/tokenize.pyi +++ b/stdlib/tokenize.pyi @@ -125,7 +125,7 @@ class Untokenizer: prev_col: int encoding: str | None def add_whitespace(self, start: _Position) -> None: ... - if sys.version_info >= (3, 13): + if sys.version_info >= (3, 12): def add_backslash_continuation(self, start: _Position) -> None: ... def untokenize(self, iterable: Iterable[_Token]) -> str: ... From cddf6f2ab6e01dfba188262c13e74003d3235a84 Mon Sep 17 00:00:00 2001 From: Sebastian Rittau Date: Fri, 11 Apr 2025 02:10:49 +0200 Subject: [PATCH 209/388] Update croniter to 6.0.0 (#13548) Make class `croniter` generic over `ret_type` and tighten return types for some methods. --- stubs/croniter/METADATA.toml | 2 +- stubs/croniter/croniter/__init__.pyi | 11 ++ stubs/croniter/croniter/croniter.pyi | 204 +++++++++++++++++++++------ 3 files changed, 170 insertions(+), 47 deletions(-) diff --git a/stubs/croniter/METADATA.toml b/stubs/croniter/METADATA.toml index 2dc7e1c08c70..b0539637d669 100644 --- a/stubs/croniter/METADATA.toml +++ b/stubs/croniter/METADATA.toml @@ -1,2 +1,2 @@ -version = "5.0.1" +version = "6.0.0" upstream_repository = "https://github.com/pallets-eco/croniter" diff --git a/stubs/croniter/croniter/__init__.pyi b/stubs/croniter/croniter/__init__.pyi index f9bdbea7d109..97fa561af5c4 100644 --- a/stubs/croniter/croniter/__init__.pyi +++ b/stubs/croniter/croniter/__init__.pyi @@ -1,11 +1,22 @@ +from . import croniter as croniter_m from .croniter import ( + DAY_FIELD as DAY_FIELD, + HOUR_FIELD as HOUR_FIELD, + MINUTE_FIELD as MINUTE_FIELD, + MONTH_FIELD as MONTH_FIELD, OVERFLOW32B_MODE as OVERFLOW32B_MODE, + SECOND_FIELD as SECOND_FIELD, + UTC_DT as UTC_DT, + YEAR_FIELD as YEAR_FIELD, CroniterBadCronError as CroniterBadCronError, CroniterBadDateError as CroniterBadDateError, CroniterBadTypeRangeError as CroniterBadTypeRangeError, + CroniterError as CroniterError, CroniterNotAlphaError as CroniterNotAlphaError, CroniterUnsupportedSyntaxError as CroniterUnsupportedSyntaxError, croniter as croniter, croniter_range as croniter_range, datetime_to_timestamp as datetime_to_timestamp, ) + +cron_m = croniter_m diff --git a/stubs/croniter/croniter/croniter.pyi b/stubs/croniter/croniter/croniter.pyi index e92a633debeb..9d9dfe102bd2 100644 --- a/stubs/croniter/croniter/croniter.pyi +++ b/stubs/croniter/croniter/croniter.pyi @@ -1,22 +1,46 @@ import datetime from _typeshed import Unused from collections import OrderedDict -from collections.abc import Iterator +from collections.abc import Generator from re import Match, Pattern -from typing import Any, Final, Literal, overload +from typing import Any, Final, Generic, Literal, Protocol, TypeVar, overload from typing_extensions import Never, Self, TypeAlias -_RetType: TypeAlias = type[float | datetime.datetime] +_R_co = TypeVar("_R_co", float, datetime.datetime, default=float, covariant=True) +_R2_co = TypeVar("_R2_co", float, datetime.datetime, covariant=True) _Expressions: TypeAlias = list[str] # fixed-length list of 5 or 6 strings +class _AllIter(Protocol[_R_co]): + @overload + def __call__( + self, ret_type: type[_R2_co], start_time: float | datetime.datetime | None = None, update_current: bool | None = None + ) -> Generator[_R2_co]: ... + @overload + def __call__( + self, ret_type: None = None, start_time: float | datetime.datetime | None = None, update_current: bool | None = None + ) -> Generator[_R_co]: ... + def is_32bit() -> bool: ... OVERFLOW32B_MODE: Final[bool] +UTC_DT: Final[datetime.timezone] EPOCH: Final[datetime.datetime] M_ALPHAS: Final[dict[str, int]] DOW_ALPHAS: Final[dict[str, int]] -ALPHAS: Final[dict[str, int]] + +MINUTE_FIELD: Final = 0 +HOUR_FIELD: Final = 1 +DAY_FIELD: Final = 2 +MONTH_FIELD: Final = 3 +DOW_FIELD: Final = 4 +SECOND_FIELD: Final = 5 +YEAR_FIELD: Final = 6 + +UNIX_FIELDS: Final[tuple[int, int, int, int, int]] +SECOND_FIELDS: Final[tuple[int, int, int, int, int, int]] +YEAR_FIELDS: Final[tuple[int, int, int, int, int, int, int]] + step_search_re: Final[Pattern[str]] only_int_re: Final[Pattern[str]] @@ -26,26 +50,18 @@ star_or_int_re: Final[Pattern[str]] special_dow_re: Final[Pattern[str]] re_star: Final[Pattern[str]] hash_expression_re: Final[Pattern[str]] -MINUTE_FIELD: Final = 0 -HOUR_FIELD: Final = 1 -DAY_FIELD: Final = 2 -MONTH_FIELD: Final = 3 -DOW_FIELD: Final = 4 -SECOND_FIELD: Final = 5 + +CRON_FIELDS: Final[dict[str | int, tuple[int, ...]]] UNIX_CRON_LEN: Final = 5 -YEAR_FIELD: Final = 6 SECOND_CRON_LEN: Final = 6 YEAR_CRON_LEN: Final = 7 -SECOND_FIELDS: Final[tuple[int, int, int, int, int, int]] -UNIX_FIELDS: Final[tuple[int, int, int, int, int]] -YEAR_FIELDS: Final[tuple[int, int, int, int, int, int, int]] -CRON_FIELDS: Final[dict[str | int, tuple[int, ...]]] VALID_LEN_EXPRESSION: Final[set[int]] +TIMESTAMP_TO_DT_CACHE: Final[dict[tuple[float, str], datetime.datetime]] EXPRESSIONS: dict[tuple[str, bytes], _Expressions] - -UTC_DT: Final[datetime.timezone] +MARKER: object def timedelta_to_seconds(td: datetime.timedelta) -> float: ... +def datetime_to_timestamp(d: datetime.datetime) -> float: ... class CroniterError(ValueError): ... class CroniterBadTypeRangeError(TypeError): ... @@ -54,9 +70,7 @@ class CroniterUnsupportedSyntaxError(CroniterBadCronError): ... class CroniterBadDateError(CroniterError): ... class CroniterNotAlphaError(CroniterError): ... -def datetime_to_timestamp(d: datetime.datetime) -> float: ... - -class croniter(Iterator[Any]): +class croniter(Generic[_R_co]): MONTHS_IN_YEAR: Final = 12 RANGES: Final[ tuple[ @@ -102,8 +116,8 @@ class croniter(Iterator[Any]): ] ] LEN_MEANS_ALL: Final[tuple[int, int, int, int, int, int, int]] - bad_length: Final[str] + second_at_beginning: bool tzinfo: datetime.tzinfo | None # Initialized to None, but immediately set to a float. @@ -113,13 +127,56 @@ class croniter(Iterator[Any]): expanded: list[list[str]] nth_weekday_of_month: dict[str, set[int]] + fields: tuple[int, ...] expressions: _Expressions + @overload + def __new__( + cls, + expr_format: str, + start_time: float | datetime.datetime | None = None, + ret_type: type[float] = ..., + day_or: bool = True, + max_years_between_matches: int | None = None, + is_prev: bool = False, + hash_id: str | bytes | None = None, + implement_cron_bug: bool = False, + second_at_beginning: bool | None = None, + expand_from_start_time: bool = False, + ) -> croniter[float]: ... + @overload + def __new__( + cls, + expr_format: str, + start_time: float | datetime.datetime | None, + ret_type: type[datetime.datetime], + day_or: bool = True, + max_years_between_matches: int | None = None, + is_prev: bool = False, + hash_id: str | bytes | None = None, + implement_cron_bug: bool = False, + second_at_beginning: bool | None = None, + expand_from_start_time: bool = False, + ) -> croniter[datetime.datetime]: ... + @overload + def __new__( + cls, + expr_format: str, + *, + ret_type: type[datetime.datetime], + day_or: bool = True, + max_years_between_matches: int | None = None, + is_prev: bool = False, + hash_id: str | bytes | None = None, + implement_cron_bug: bool = False, + second_at_beginning: bool | None = None, + expand_from_start_time: bool = False, + ) -> croniter[datetime.datetime]: ... def __init__( self, expr_format: str, start_time: float | datetime.datetime | None = None, - ret_type: _RetType | None = ..., + ret_type: type[_R_co] = ..., day_or: bool = True, max_years_between_matches: int | None = None, is_prev: bool = False, @@ -128,44 +185,74 @@ class croniter(Iterator[Any]): second_at_beginning: bool | None = None, expand_from_start_time: bool = False, ) -> None: ... - # Most return value depend on ret_type, which can be passed in both as a method argument and as - # a constructor argument. + @overload + def get_next( + self, ret_type: type[_R2_co], start_time: float | datetime.datetime | None = None, update_current: bool = True + ) -> _R_co: ... + @overload def get_next( - self, ret_type: _RetType | None = None, start_time: float | datetime.datetime | None = None, update_current: bool = True - ) -> Any: ... + self, ret_type: None = None, start_time: float | datetime.datetime | None = None, update_current: bool = True + ) -> _R_co: ... + @overload + def get_prev( + self, ret_type: type[_R2_co], start_time: float | datetime.datetime | None = None, update_current: bool = True + ) -> _R2_co: ... + @overload def get_prev( - self, ret_type: _RetType | None = None, start_time: float | datetime.datetime | None = None, update_current: bool = True - ) -> Any: ... - def get_current(self, ret_type: _RetType | None = None) -> Any: ... + self, ret_type: None = None, start_time: float | datetime.datetime | None = None, update_current: bool = True + ) -> _R_co: ... + @overload + def get_current(self, ret_type: type[_R2_co]) -> _R2_co: ... + @overload + def get_current(self, ret_type: None = None) -> _R_co: ... def set_current(self, start_time: float | datetime.datetime | None, force: bool = True) -> float: ... + @staticmethod + def datetime_to_timestamp(d: datetime.datetime) -> float: ... + def timestamp_to_datetime(self, timestamp: float, tzinfo: datetime.tzinfo | None = ...) -> datetime.datetime: ... + @staticmethod + def timedelta_to_seconds(td: datetime.timedelta) -> float: ... + @overload + def all_next( + self, ret_type: type[_R2_co], start_time: float | datetime.datetime | None = None, update_current: bool | None = None + ) -> Generator[_R2_co]: ... + @overload + def all_next( + self, ret_type: None = None, start_time: float | datetime.datetime | None = None, update_current: bool | None = None + ) -> Generator[_R_co]: ... + @overload + def all_prev( + self, ret_type: type[_R2_co], start_time: float | datetime.datetime | None = None, update_current: bool | None = None + ) -> Generator[_R2_co]: ... + @overload + def all_prev( + self, ret_type: None = None, start_time: float | datetime.datetime | None = None, update_current: bool | None = None + ) -> Generator[_R_co]: ... + def iter(self, *args: Unused, **kwargs: Unused) -> _AllIter[_R_co]: ... def __iter__(self) -> Self: ... + @overload def next( self, - ret_type: _RetType | None = None, + ret_type: type[_R2_co], start_time: float | datetime.datetime | None = None, is_prev: bool | None = None, update_current: bool | None = None, - ) -> Any: ... - __next__ = next - def all_next( - self, - ret_type: _RetType | None = None, - start_time: float | datetime.datetime | None = None, - update_current: bool | None = None, - ) -> Iterator[Any]: ... - def all_prev( + ) -> _R2_co: ... + @overload + def next( self, - ret_type: _RetType | None = None, + ret_type: None = None, start_time: float | datetime.datetime | None = None, + is_prev: bool | None = None, update_current: bool | None = None, - ) -> Iterator[Any]: ... - def iter(self, ret_type: _RetType | None = ...) -> Iterator[Any]: ... - def is_leap(self, year: int) -> bool: ... + ) -> _R_co: ... + __next__ = next + @staticmethod + def is_leap(year: int) -> bool: ... @classmethod def value_alias( cls, val: int, - field: Literal[0, 1, 2, 3, 4, 5, 6], + field_index: Literal[0, 1, 2, 3, 4, 5, 6], len_expressions: int | list[Any] | dict[Any, Any] | tuple[Any, ...] | set[Any] = 5, ) -> int: ... @classmethod @@ -198,17 +285,42 @@ class croniter(Iterator[Any]): second_at_beginning: bool = False, ) -> bool: ... +@overload def croniter_range( start: float | datetime.datetime, stop: float | datetime.datetime, expr_format: str, - ret_type: _RetType | None = None, + ret_type: type[_R2_co], + day_or: bool = True, + exclude_ends: bool = False, + _croniter: type[croniter] | None = None, + second_at_beginning: bool = False, + expand_from_start_time: bool = False, +) -> Generator[_R2_co, None, None]: ... +@overload +def croniter_range( + start: float, + stop: float | datetime.datetime, + expr_format: str, + ret_type: None = None, + day_or: bool = True, + exclude_ends: bool = False, + _croniter: type[croniter] | None = None, + second_at_beginning: bool = False, + expand_from_start_time: bool = False, +) -> Generator[float, None, None]: ... +@overload +def croniter_range( + start: datetime.datetime, + stop: float | datetime.datetime, + expr_format: str, + ret_type: None = None, day_or: bool = True, exclude_ends: bool = False, _croniter: type[croniter] | None = None, second_at_beginning: bool = False, expand_from_start_time: bool = False, -) -> Iterator[Any]: ... +) -> Generator[datetime.datetime, None, None]: ... class HashExpander: cron: croniter From 7a5953fa03ac4cf334874d147f583e6a0b158082 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Fri, 11 Apr 2025 02:44:40 +0200 Subject: [PATCH 210/388] Update dependency pyright to v1.1.399 (#13813) --- requirements-tests.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-tests.txt b/requirements-tests.txt index a5cac406bbb2..8c6fc56cca4d 100644 --- a/requirements-tests.txt +++ b/requirements-tests.txt @@ -1,7 +1,7 @@ # Type checkers that we test our stubs against. These should always # be pinned to a specific version to make failure reproducible. mypy==1.15.0 -pyright==1.1.398 +pyright==1.1.399 # pytype can be installed on Windows, but requires building wheels, let's not do that on the CI pytype==2024.10.11; platform_system != "Windows" and python_version >= "3.10" and python_version < "3.13" From 907f288d7a2a183a492cb16f537d0198f7ff8dad Mon Sep 17 00:00:00 2001 From: Edward Peek <44851039+edwardpeek-crown-public@users.noreply.github.com> Date: Fri, 11 Apr 2025 21:48:34 +1200 Subject: [PATCH 211/388] Fix incorrect keyword-only arguments in tarfile.open() (#13814) --- stdlib/@tests/test_cases/check_tarfile.py | 4 ++ stdlib/tarfile.pyi | 58 +++++++++++++++++++++++ 2 files changed, 62 insertions(+) diff --git a/stdlib/@tests/test_cases/check_tarfile.py b/stdlib/@tests/test_cases/check_tarfile.py index 54510a3d7626..815a6350c837 100644 --- a/stdlib/@tests/test_cases/check_tarfile.py +++ b/stdlib/@tests/test_cases/check_tarfile.py @@ -11,3 +11,7 @@ # Test with invalid preset values tarfile.open("test.tar.xz", "w:xz", preset=-1) # type: ignore tarfile.open("test.tar.xz", "w:xz", preset=10) # type: ignore + +# Test pipe modes +tarfile.open("test.tar.xz", "r|*") +tarfile.open("test.tar.xz", mode="r|*") diff --git a/stdlib/tarfile.pyi b/stdlib/tarfile.pyi index 51195eb98fcc..cd31b101c886 100644 --- a/stdlib/tarfile.pyi +++ b/stdlib/tarfile.pyi @@ -304,6 +304,25 @@ class TarFile: ) -> Self: ... @overload @classmethod + def open( + cls, + name: StrOrBytesPath | ReadableBuffer | None, + mode: Literal["r|*", "r|", "r|gz", "r|bz2", "r|xz"], + fileobj: _Fileobj | None = None, + bufsize: int = 10240, + *, + format: int | None = ..., + tarinfo: type[TarInfo] | None = ..., + dereference: bool | None = ..., + ignore_zeros: bool | None = ..., + encoding: str | None = ..., + errors: str = ..., + pax_headers: Mapping[str, str] | None = ..., + debug: int | None = ..., + errorlevel: int | None = ..., + ) -> Self: ... + @overload + @classmethod def open( cls, name: StrOrBytesPath | ReadableBuffer | None = None, @@ -323,6 +342,25 @@ class TarFile: ) -> Self: ... @overload @classmethod + def open( + cls, + name: StrOrBytesPath | WriteableBuffer | None, + mode: Literal["w|", "w|xz"], + fileobj: _Fileobj | None = None, + bufsize: int = 10240, + *, + format: int | None = ..., + tarinfo: type[TarInfo] | None = ..., + dereference: bool | None = ..., + ignore_zeros: bool | None = ..., + encoding: str | None = ..., + errors: str = ..., + pax_headers: Mapping[str, str] | None = ..., + debug: int | None = ..., + errorlevel: int | None = ..., + ) -> Self: ... + @overload + @classmethod def open( cls, name: StrOrBytesPath | WriteableBuffer | None = None, @@ -342,6 +380,26 @@ class TarFile: ) -> Self: ... @overload @classmethod + def open( + cls, + name: StrOrBytesPath | WriteableBuffer | None, + mode: Literal["w|gz", "w|bz2"], + fileobj: _Fileobj | None = None, + bufsize: int = 10240, + *, + format: int | None = ..., + tarinfo: type[TarInfo] | None = ..., + dereference: bool | None = ..., + ignore_zeros: bool | None = ..., + encoding: str | None = ..., + errors: str = ..., + pax_headers: Mapping[str, str] | None = ..., + debug: int | None = ..., + errorlevel: int | None = ..., + compresslevel: int = 9, + ) -> Self: ... + @overload + @classmethod def open( cls, name: StrOrBytesPath | WriteableBuffer | None = None, From 78ad3b286f6fb32a3fca0f35ba4904233083e101 Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Fri, 11 Apr 2025 16:54:34 +0400 Subject: [PATCH 212/388] Complete stubs for `click-web` (#13806) --- .../click_web/resources/__init__.pyi | 0 .../click_web/resources/cmd_exec.pyi | 24 +++++-- stubs/click-web/click_web/resources/index.pyi | 5 +- .../click_web/resources/input_fields.pyi | 64 ++++++++++++++----- stubs/click-web/click_web/web_click_types.pyi | 10 +-- 5 files changed, 76 insertions(+), 27 deletions(-) create mode 100644 stubs/click-web/click_web/resources/__init__.pyi diff --git a/stubs/click-web/click_web/resources/__init__.pyi b/stubs/click-web/click_web/resources/__init__.pyi new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/stubs/click-web/click_web/resources/cmd_exec.pyi b/stubs/click-web/click_web/resources/cmd_exec.pyi index 1c616c6782b6..b1121b450b8d 100644 --- a/stubs/click-web/click_web/resources/cmd_exec.pyi +++ b/stubs/click-web/click_web/resources/cmd_exec.pyi @@ -1,4 +1,6 @@ import logging +from collections.abc import Generator +from typing import ClassVar, Final from flask import Response @@ -6,19 +8,20 @@ from .input_fields import FieldId logger: logging.Logger | None -HTML_HEAD: str -HTML_TAIL: str +HTML_HEAD: Final[str] +HTML_TAIL: Final[str] class Executor: - RAW_CMD_PATH: str + RAW_CMD_PATH: ClassVar[str] + returncode: int | None def __init__(self) -> None: ... def exec(self, command_path: str) -> Response: ... def _exec_raw(self, command: list[str]) -> Response: ... # undocumented def _exec_html(self, command_path: str) -> Response: ... # undocumented - def _run_script_and_generate_stream(self) -> None: ... # undocumented + def _run_script_and_generate_stream(self) -> Generator[str]: ... # undocumented def _create_cmd_header(self, commands: list[CmdPart]) -> str: ... # undocumented - def _create_result_footer(self) -> str: ... # undocumented + def _create_result_footer(self) -> Generator[str]: ... # undocumented def _get_download_link(field_info: FieldFileInfo) -> str: ... # undocumented @@ -30,6 +33,7 @@ class CommandLineRaw: def after_script_executed(self) -> None: ... class CommandLineForm: + command_line_bulder: FormToCommandLineBuilder def __init__(self, script_file_path: str, commands: list[str]) -> None: ... def append(self, part: str, secret: bool = False) -> None: ... def get_commandline(self, obfuscate: bool = False) -> list[str]: ... @@ -49,6 +53,11 @@ class FormToCommandLineBuilder: def _process_option(self, field_info: FieldInfo) -> None: ... class FieldInfo: + param: FieldId + key: str + is_file: bool + cmd_opt: str + generate_download_link: bool @staticmethod def factory(key: str) -> FieldInfo: ... def __init__(self, param: FieldId) -> None: ... @@ -58,6 +67,10 @@ class FieldInfo: def __eq__(self, other: object) -> bool: ... class FieldFileInfo(FieldInfo): + mode: str + generate_download_link: bool + link_name: str + file_path: str def __init__(self, fimeta: FieldId) -> None: ... def before_script_execute(self) -> None: ... @classmethod @@ -65,6 +78,7 @@ class FieldFileInfo(FieldInfo): def save(self) -> None: ... class FieldOutFileInfo(FieldFileInfo): + file_suffix: str def __init__(self, fimeta: FieldId) -> None: ... def save(self) -> None: ... diff --git a/stubs/click-web/click_web/resources/index.pyi b/stubs/click-web/click_web/resources/index.pyi index c932a5a7e7b0..c0f3aebbbdf8 100644 --- a/stubs/click-web/click_web/resources/index.pyi +++ b/stubs/click-web/click_web/resources/index.pyi @@ -1,6 +1,9 @@ +from collections import OrderedDict from typing import Any import click def index() -> str: ... -def _click_to_tree(ctx: click.Context, node: click.Command, ancestors: list[click.Command] | None = None) -> dict[str, Any]: ... +def _click_to_tree( + ctx: click.Context, node: click.Command, ancestors: list[click.Command] | None = None +) -> OrderedDict[str, Any]: ... diff --git a/stubs/click-web/click_web/resources/input_fields.pyi b/stubs/click-web/click_web/resources/input_fields.pyi index 65b3018982eb..92e2289e9b85 100644 --- a/stubs/click-web/click_web/resources/input_fields.pyi +++ b/stubs/click-web/click_web/resources/input_fields.pyi @@ -1,9 +1,18 @@ -from typing import Any +from typing import Any, ClassVar, Final import click +from click_web.web_click_types import EmailParamType, PasswordParamType, TextAreaParamType class FieldId: - SEPARATOR: str + SEPARATOR: ClassVar[str] + command_index: int + param_index: int + param_type: str + click_type: str + nargs: int + form_type: str + name: str + key: str def __init__( self, @@ -22,7 +31,11 @@ class FieldId: class NotSupported(ValueError): ... class BaseInput: - param_type_cls: type | None + param_type_cls: type[click.types.ParamType] | None + ctx: click.Context + param: click.Parameter + command_index: int + param_index: int def __init__(self, ctx: click.Context, param: click.Parameter, command_index: int, param_index: int) -> None: ... def is_supported(self) -> bool: ... @property @@ -32,18 +45,37 @@ class BaseInput: def _to_cmd_line_name(self, name: str) -> str: ... def _build_name(self, name: str): ... -class ChoiceInput(BaseInput): ... -class FlagInput(BaseInput): ... -class IntInput(BaseInput): ... -class FloatInput(BaseInput): ... -class FolderInput(BaseInput): ... -class FileInput(BaseInput): ... -class EmailInput(BaseInput): ... -class PasswordInput(BaseInput): ... -class TextAreaInput(BaseInput): ... -class DefaultInput(BaseInput): ... - -INPUT_TYPES: list[type] -_DEFAULT_INPUT: list[type] +class ChoiceInput(BaseInput): + param_type_cls: type[click.Choice] + +class FlagInput(BaseInput): + param_type_cls: None + +class IntInput(BaseInput): + param_type_cls: type[click.types.IntParamType] + +class FloatInput(BaseInput): + param_type_cls: type[click.types.FloatParamType] + +class FolderInput(BaseInput): + param_type_cls: None + +class FileInput(BaseInput): + param_type_cls: None + +class EmailInput(BaseInput): + param_type_cls: type[EmailParamType] + +class PasswordInput(BaseInput): + param_type_cls: type[PasswordParamType] + +class TextAreaInput(BaseInput): + param_type_cls: type[TextAreaParamType] + +class DefaultInput(BaseInput): + param_type_cls: type[click.ParamType] + +INPUT_TYPES: Final[list[type[BaseInput]]] +_DEFAULT_INPUT: Final[list[type[DefaultInput]]] def get_input_field(ctx: click.Context, param: click.Parameter, command_index, param_index) -> dict[str, Any]: ... diff --git a/stubs/click-web/click_web/web_click_types.pyi b/stubs/click-web/click_web/web_click_types.pyi index 1546f7e7d7d1..b0e9e378fa5a 100644 --- a/stubs/click-web/click_web/web_click_types.pyi +++ b/stubs/click-web/click_web/web_click_types.pyi @@ -1,17 +1,17 @@ import re -import typing as t +from typing import ClassVar import click class EmailParamType(click.ParamType): - EMAIL_REGEX: re.Pattern[str] - def convert(self, value: t.Any, param: click.Parameter | None, ctx: click.Context | None) -> t.Any: ... + EMAIL_REGEX: ClassVar[re.Pattern[str]] + def convert(self, value: str, param: click.Parameter | None, ctx: click.Context | None) -> str | None: ... class PasswordParamType(click.ParamType): - def convert(self, value: t.Any, param: click.Parameter | None, ctx: click.Context | None) -> t.Any: ... + def convert(self, value: str, param: click.Parameter | None, ctx: click.Context | None) -> str | None: ... class TextAreaParamType(click.ParamType): - def convert(self, value: t.Any, param: click.Parameter | None, ctx: click.Context | None) -> t.Any: ... + def convert(self, value: str, param: click.Parameter | None, ctx: click.Context | None) -> str | None: ... EMAIL_TYPE: EmailParamType PASSWORD_TYPE: PasswordParamType From e59461ce1a9e849d05dec945e8ae0f68770b292f Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Fri, 11 Apr 2025 22:36:36 -0700 Subject: [PATCH 213/388] [stubsabot] Bump Markdown to 3.8.* (#13819) Release: https://pypi.org/pypi/Markdown/3.8 Homepage: https://Python-Markdown.github.io/ Repository: https://github.com/Python-Markdown/markdown Typeshed stubs: https://github.com/python/typeshed/tree/main/stubs/Markdown Changelog: https://python-markdown.github.io/changelog/ Diff: https://github.com/Python-Markdown/markdown/compare/3.7...3.8 Stubsabot analysis of the diff between the two releases: - 0 public Python files have been added. - 0 files included in typeshed's stubs have been deleted. - 12 files included in typeshed's stubs have been modified or renamed. - Total lines of Python code added: 730. - Total lines of Python code deleted: 103. If stubtest fails for this PR: - Leave this PR open (as a reminder, and to prevent stubsabot from opening another PR) - Fix stubtest failures in another PR, then close this PR Note that you will need to close and re-open the PR in order to trigger CI Co-authored-by: stubsabot <> --- stubs/Markdown/METADATA.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stubs/Markdown/METADATA.toml b/stubs/Markdown/METADATA.toml index 7720828f4533..68fead4ecf6b 100644 --- a/stubs/Markdown/METADATA.toml +++ b/stubs/Markdown/METADATA.toml @@ -1,4 +1,4 @@ -version = "3.7.*" +version = "3.8.*" upstream_repository = "https://github.com/Python-Markdown/markdown" partial_stub = true From e68c80e88932f0a77e464de3bfcd6c3c9cb0385e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mika=C3=ABl=20Capelle?= Date: Sat, 12 Apr 2025 19:09:31 +0200 Subject: [PATCH 214/388] Fix typing of Pickler.persistent_id and Unpickler.persistent_load for Python < 3.13. (#13818) --- stdlib/@tests/stubtest_allowlists/py310.txt | 6 ++++++ stdlib/@tests/stubtest_allowlists/py311.txt | 6 ++++++ stdlib/@tests/stubtest_allowlists/py312.txt | 6 ++++++ stdlib/@tests/stubtest_allowlists/py39.txt | 6 ++++++ stdlib/_pickle.pyi | 15 ++++++--------- 5 files changed, 30 insertions(+), 9 deletions(-) diff --git a/stdlib/@tests/stubtest_allowlists/py310.txt b/stdlib/@tests/stubtest_allowlists/py310.txt index d2d31ee8ef53..73bc3da9eb6e 100644 --- a/stdlib/@tests/stubtest_allowlists/py310.txt +++ b/stdlib/@tests/stubtest_allowlists/py310.txt @@ -295,3 +295,9 @@ typing_extensions\.Annotated # Undocumented implementation details # (Remove once 3.10.17 becomes available for all platforms) (email._header_value_parser.get_encoded_word)? (email._header_value_parser.make_quoted_pairs)? + +# These methods have no default implementation for Python < 3.13. +_pickle.Pickler.persistent_id +_pickle.Unpickler.persistent_load +pickle.Pickler.persistent_id +pickle.Unpickler.persistent_load diff --git a/stdlib/@tests/stubtest_allowlists/py311.txt b/stdlib/@tests/stubtest_allowlists/py311.txt index 92a7543862d7..655e603c06d1 100644 --- a/stdlib/@tests/stubtest_allowlists/py311.txt +++ b/stdlib/@tests/stubtest_allowlists/py311.txt @@ -259,3 +259,9 @@ typing_extensions\.Annotated # Undocumented implementation details # (Remove once 3.11.12 becomes available for all platforms) (email._header_value_parser.get_encoded_word)? (email._header_value_parser.make_quoted_pairs)? + +# These methods have no default implementation for Python < 3.13. +_pickle.Pickler.persistent_id +_pickle.Unpickler.persistent_load +pickle.Pickler.persistent_id +pickle.Unpickler.persistent_load diff --git a/stdlib/@tests/stubtest_allowlists/py312.txt b/stdlib/@tests/stubtest_allowlists/py312.txt index aaea7009760b..3222fed0d8aa 100644 --- a/stdlib/@tests/stubtest_allowlists/py312.txt +++ b/stdlib/@tests/stubtest_allowlists/py312.txt @@ -228,3 +228,9 @@ sunau.Au_write.initfp threading.Lock # Factory function at runtime, but that wouldn't let us use it in type hints types.SimpleNamespace.__init__ # class doesn't accept positional arguments but has default C signature typing_extensions\.Annotated # Undocumented implementation details + +# These methods have no default implementation for Python < 3.13. +_pickle.Pickler.persistent_id +_pickle.Unpickler.persistent_load +pickle.Pickler.persistent_id +pickle.Unpickler.persistent_load diff --git a/stdlib/@tests/stubtest_allowlists/py39.txt b/stdlib/@tests/stubtest_allowlists/py39.txt index 04e9978a1ed6..a912b6f85b5b 100644 --- a/stdlib/@tests/stubtest_allowlists/py39.txt +++ b/stdlib/@tests/stubtest_allowlists/py39.txt @@ -245,3 +245,9 @@ typing_extensions\.Annotated # Undocumented implementation details # Incompatible changes introduced in Python 3.9.22 # (Remove once 3.9.22 becomes available for all platforms) (email._header_value_parser.get_encoded_word)? + +# These methods have no default implementation for Python < 3.13. +_pickle.Pickler.persistent_id +_pickle.Unpickler.persistent_load +pickle.Pickler.persistent_id +pickle.Unpickler.persistent_load diff --git a/stdlib/_pickle.pyi b/stdlib/_pickle.pyi index 50bbb6bc16cd..8e8afb600efa 100644 --- a/stdlib/_pickle.pyi +++ b/stdlib/_pickle.pyi @@ -1,4 +1,3 @@ -import sys from _typeshed import ReadableBuffer, SupportsWrite from collections.abc import Callable, Iterable, Iterator, Mapping from pickle import PickleBuffer as PickleBuffer @@ -75,10 +74,9 @@ class Pickler: def memo(self, value: PicklerMemoProxy | dict[int, tuple[int, Any]]) -> None: ... def dump(self, obj: Any, /) -> None: ... def clear_memo(self) -> None: ... - if sys.version_info >= (3, 13): - def persistent_id(self, obj: Any, /) -> Any: ... - else: - persistent_id: Callable[[Any], Any] + + # this method has no default implementation for Python < 3.13 + def persistent_id(self, obj: Any, /) -> Any: ... @type_check_only class UnpicklerMemoProxy: @@ -101,7 +99,6 @@ class Unpickler: def memo(self, value: UnpicklerMemoProxy | dict[int, tuple[int, Any]]) -> None: ... def load(self) -> Any: ... def find_class(self, module_name: str, global_name: str, /) -> Any: ... - if sys.version_info >= (3, 13): - def persistent_load(self, pid: Any, /) -> Any: ... - else: - persistent_load: Callable[[Any], Any] + + # this method has no default implementation for Python < 3.13 + def persistent_load(self, pid: Any, /) -> Any: ... From 7eed9d17652de91f02ac86bd464a8c45e601d902 Mon Sep 17 00:00:00 2001 From: Avasam Date: Sat, 12 Apr 2025 13:10:09 -0400 Subject: [PATCH 215/388] Import names from typing directly rather than importing module (#13761) --- .pre-commit-config.yaml | 2 +- pyproject.toml | 9 +++++++++ stdlib/@tests/test_cases/check_re.py | 12 ++++++------ .../test_cases/typing/check_regression_issue_9296.py | 8 ++++---- stdlib/_typeshed/__init__.pyi | 7 +++---- stdlib/typing_extensions.pyi | 10 +++++----- stubs/click-default-group/click_default_group.pyi | 4 ++-- stubs/click-log/click_log/options.pyi | 11 +++++------ stubs/click-web/click_web/web_click_types.pyi | 10 ++++++---- stubs/corus/corus/third/WikiExtractor.pyi | 4 ++-- 10 files changed, 43 insertions(+), 34 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 6ed7dede571a..bf2fbc3f7488 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -22,7 +22,7 @@ repos: name: Run ruff on the test cases args: - "--exit-non-zero-on-fix" - - "--select=FA,I,RUF100" + - "--select=FA,I,ICN001,RUF100" - "--no-force-exclude" - "--unsafe-fixes" files: '.*test_cases/.+\.py$' diff --git a/pyproject.toml b/pyproject.toml index 63e187ac1dcd..5d6bd434156b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -83,6 +83,8 @@ select = [ "FURB169", # Compare the identities of `{object}` and None instead of their respective types "FURB177", # Prefer `Path.cwd()` over `Path().resolve()` for current-directory lookups "FURB187", # Use of assignment of `reversed` on list `{name}` + # Used for lint.flake8-import-conventions.aliases + "ICN001", # `{name}` should be imported as `{asname}` # Autofixable flake8-use-pathlib only "PTH201", # Do not pass the current directory explicitly to `Path` "PTH210", # Invalid suffix passed to `.with_suffix()` @@ -217,6 +219,8 @@ ignore = [ "PLC0414", # Import alias does not rename original package ] "*_pb2.pyi" = [ + # Special autogenerated typing --> typing_extensions aliases + "ICN001", # `{name}` should be imported as `{asname}` # Leave the docstrings as-is, matching source "D", # pydocstyle # See comment on black's force-exclude config above @@ -226,6 +230,11 @@ ignore = [ [tool.ruff.lint.pydocstyle] convention = "pep257" # https://docs.astral.sh/ruff/settings/#lint_pydocstyle_convention +[tool.ruff.lint.flake8-import-conventions.aliases] +# Prevent aliasing these, as it causes false-negatives for certain rules +typing_extensions = "typing_extensions" +typing = "typing" + [tool.ruff.lint.isort] split-on-trailing-comma = false combine-as-imports = true diff --git a/stdlib/@tests/test_cases/check_re.py b/stdlib/@tests/test_cases/check_re.py index b6ab2b0d59d2..dee87b474fe2 100644 --- a/stdlib/@tests/test_cases/check_re.py +++ b/stdlib/@tests/test_cases/check_re.py @@ -2,18 +2,18 @@ import mmap import re -import typing as t +from typing import AnyStr, Match, Optional from typing_extensions import assert_type def check_search(str_pat: re.Pattern[str], bytes_pat: re.Pattern[bytes]) -> None: - assert_type(str_pat.search("x"), t.Optional[t.Match[str]]) - assert_type(bytes_pat.search(b"x"), t.Optional[t.Match[bytes]]) - assert_type(bytes_pat.search(bytearray(b"x")), t.Optional[t.Match[bytes]]) - assert_type(bytes_pat.search(mmap.mmap(0, 10)), t.Optional[t.Match[bytes]]) + assert_type(str_pat.search("x"), Optional[Match[str]]) + assert_type(bytes_pat.search(b"x"), Optional[Match[bytes]]) + assert_type(bytes_pat.search(bytearray(b"x")), Optional[Match[bytes]]) + assert_type(bytes_pat.search(mmap.mmap(0, 10)), Optional[Match[bytes]]) -def check_search_with_AnyStr(pattern: re.Pattern[t.AnyStr], string: t.AnyStr) -> re.Match[t.AnyStr]: +def check_search_with_AnyStr(pattern: re.Pattern[AnyStr], string: AnyStr) -> re.Match[AnyStr]: """See issue #9591""" match = pattern.search(string) if match is None: diff --git a/stdlib/@tests/test_cases/typing/check_regression_issue_9296.py b/stdlib/@tests/test_cases/typing/check_regression_issue_9296.py index 34c5631aeb1a..23beaa87ae05 100644 --- a/stdlib/@tests/test_cases/typing/check_regression_issue_9296.py +++ b/stdlib/@tests/test_cases/typing/check_regression_issue_9296.py @@ -1,15 +1,15 @@ from __future__ import annotations -import typing as t +from typing import Any, KeysView, TypeVar -KT = t.TypeVar("KT") +KT = TypeVar("KT") -class MyKeysView(t.KeysView[KT]): +class MyKeysView(KeysView[KT]): pass -d: dict[t.Any, t.Any] = {} +d: dict[Any, Any] = {} dict_keys = type(d.keys()) # This should not cause an error like `Member "register" is unknown`: diff --git a/stdlib/_typeshed/__init__.pyi b/stdlib/_typeshed/__init__.pyi index 99d21b67360a..a503637998d0 100644 --- a/stdlib/_typeshed/__init__.pyi +++ b/stdlib/_typeshed/__init__.pyi @@ -3,7 +3,6 @@ # See the README.md file in this directory for more information. import sys -import typing_extensions from collections.abc import Awaitable, Callable, Iterable, Sequence, Set as AbstractSet, Sized from dataclasses import Field from os import PathLike @@ -23,7 +22,7 @@ from typing import ( final, overload, ) -from typing_extensions import Buffer, LiteralString, TypeAlias +from typing_extensions import Buffer, LiteralString, Self as _Self, TypeAlias _KT = TypeVar("_KT") _KT_co = TypeVar("_KT_co", covariant=True) @@ -329,9 +328,9 @@ class structseq(Generic[_T_co]): # The second parameter will accept a dict of any kind without raising an exception, # but only has any meaning if you supply it a dict where the keys are strings. # https://github.com/python/typeshed/pull/6560#discussion_r767149830 - def __new__(cls, sequence: Iterable[_T_co], dict: dict[str, Any] = ...) -> typing_extensions.Self: ... + def __new__(cls, sequence: Iterable[_T_co], dict: dict[str, Any] = ...) -> _Self: ... if sys.version_info >= (3, 13): - def __replace__(self, **kwargs: Any) -> typing_extensions.Self: ... + def __replace__(self, **kwargs: Any) -> _Self: ... # Superset of typing.AnyStr that also includes LiteralString AnyOrLiteralStr = TypeVar("AnyOrLiteralStr", str, bytes, LiteralString) # noqa: Y001 diff --git a/stdlib/typing_extensions.pyi b/stdlib/typing_extensions.pyi index 3799f4e666e9..bad5fae880c0 100644 --- a/stdlib/typing_extensions.pyi +++ b/stdlib/typing_extensions.pyi @@ -1,7 +1,6 @@ import abc import enum import sys -import typing from _collections_abc import dict_items, dict_keys, dict_values from _typeshed import IdentityFunction, Incomplete, Unused from collections.abc import ( @@ -57,6 +56,7 @@ from typing import ( # noqa: Y022,Y037,Y038,Y039,UP035 Tuple as Tuple, Type as Type, TypedDict as TypedDict, + TypeVar as _TypeVar, Union as Union, _Alias, cast as cast, @@ -195,10 +195,10 @@ __all__ = [ "CapsuleType", ] -_T = typing.TypeVar("_T") -_F = typing.TypeVar("_F", bound=Callable[..., Any]) -_TC = typing.TypeVar("_TC", bound=type[object]) -_T_co = typing.TypeVar("_T_co", covariant=True) # Any type covariant containers. +_T = _TypeVar("_T") +_F = _TypeVar("_F", bound=Callable[..., Any]) +_TC = _TypeVar("_TC", bound=type[object]) +_T_co = _TypeVar("_T_co", covariant=True) # Any type covariant containers. class _Final: ... # This should be imported from typing but that breaks pytype diff --git a/stubs/click-default-group/click_default_group.pyi b/stubs/click-default-group/click_default_group.pyi index 5c73c4dd8db4..83541f3cfbbf 100644 --- a/stubs/click-default-group/click_default_group.pyi +++ b/stubs/click-default-group/click_default_group.pyi @@ -1,5 +1,5 @@ -import typing as t from _typeshed import Incomplete +from collections.abc import Sequence import click @@ -23,7 +23,7 @@ class DefaultCommandFormatter: formatter: click.HelpFormatter mark: str def __init__(self, group: click.Group, formatter: click.HelpFormatter, mark: str = ...) -> None: ... - def write_dl(self, rows: t.Sequence[tuple[str, str]], col_max: int = 30, col_spacing: int = -2) -> None: ... + def write_dl(self, rows: Sequence[tuple[str, str]], col_max: int = 30, col_spacing: int = -2) -> None: ... def __getattr__(self, attr: str) -> Incomplete: ... # __getattr__ used to ala-derive from click.HelpFormatter: # indent_increment: int diff --git a/stubs/click-log/click_log/options.pyi b/stubs/click-log/click_log/options.pyi index e1310ab8ea44..f5fa67d96eac 100644 --- a/stubs/click-log/click_log/options.pyi +++ b/stubs/click-log/click_log/options.pyi @@ -1,12 +1,11 @@ import logging -import typing as t +from collections.abc import Callable +from typing import Any, TypeVar from typing_extensions import TypeAlias import click -_AnyCallable: TypeAlias = t.Callable[..., t.Any] -_FC = t.TypeVar("_FC", bound=_AnyCallable | click.Command) +_AnyCallable: TypeAlias = Callable[..., Any] +_FC = TypeVar("_FC", bound=_AnyCallable | click.Command) -def simple_verbosity_option( - logger: logging.Logger | str | None = None, *names: str, **kwargs: t.Any -) -> t.Callable[[_FC], _FC]: ... +def simple_verbosity_option(logger: logging.Logger | str | None = None, *names: str, **kwargs: Any) -> Callable[[_FC], _FC]: ... diff --git a/stubs/click-web/click_web/web_click_types.pyi b/stubs/click-web/click_web/web_click_types.pyi index b0e9e378fa5a..d3a0d9bcc116 100644 --- a/stubs/click-web/click_web/web_click_types.pyi +++ b/stubs/click-web/click_web/web_click_types.pyi @@ -1,17 +1,19 @@ import re -from typing import ClassVar +from typing import ClassVar, TypeVar import click +_T = TypeVar("_T") + class EmailParamType(click.ParamType): EMAIL_REGEX: ClassVar[re.Pattern[str]] - def convert(self, value: str, param: click.Parameter | None, ctx: click.Context | None) -> str | None: ... + def convert(self, value: str, param: click.Parameter | None, ctx: click.Context | None) -> str: ... class PasswordParamType(click.ParamType): - def convert(self, value: str, param: click.Parameter | None, ctx: click.Context | None) -> str | None: ... + def convert(self, value: _T, param: click.Parameter | None, ctx: click.Context | None) -> _T: ... class TextAreaParamType(click.ParamType): - def convert(self, value: str, param: click.Parameter | None, ctx: click.Context | None) -> str | None: ... + def convert(self, value: _T, param: click.Parameter | None, ctx: click.Context | None) -> _T: ... EMAIL_TYPE: EmailParamType PASSWORD_TYPE: PasswordParamType diff --git a/stubs/corus/corus/third/WikiExtractor.pyi b/stubs/corus/corus/third/WikiExtractor.pyi index b8ee370c8a26..bda32a5d62a1 100644 --- a/stubs/corus/corus/third/WikiExtractor.pyi +++ b/stubs/corus/corus/third/WikiExtractor.pyi @@ -1,4 +1,3 @@ -import typing from _typeshed import Incomplete from collections.abc import Generator from math import ( @@ -14,6 +13,7 @@ from math import ( tan as tan, trunc as trunc, ) +from typing import TypeVar PY2: Incomplete text_type = str @@ -52,7 +52,7 @@ quote_quote: Incomplete spaces: Incomplete dots: Incomplete -_T = typing.TypeVar("_T") +_T = TypeVar("_T") class Template(list[_T]): @classmethod From 828f4dc3e152f3102a64d0068130862d2f72d676 Mon Sep 17 00:00:00 2001 From: Justine Krejcha Date: Sat, 12 Apr 2025 10:27:16 -0700 Subject: [PATCH 216/388] jwcrypto: type most of the rest of `JWT` and `JWKSet.generate` function (#13807) --- stubs/jwcrypto/jwcrypto/jwk.pyi | 34 +++++++++++++++++++++++++++------ stubs/jwcrypto/jwcrypto/jwt.pyi | 22 ++++++++++----------- 2 files changed, 39 insertions(+), 17 deletions(-) diff --git a/stubs/jwcrypto/jwcrypto/jwk.pyi b/stubs/jwcrypto/jwcrypto/jwk.pyi index 9b3d912c3686..9ec8c3866a41 100644 --- a/stubs/jwcrypto/jwcrypto/jwk.pyi +++ b/stubs/jwcrypto/jwcrypto/jwk.pyi @@ -1,7 +1,7 @@ from collections.abc import Callable, Sequence from enum import Enum from typing import Any, Literal, NamedTuple, TypeVar, overload -from typing_extensions import Self, deprecated +from typing_extensions import Self, TypeAlias, deprecated from cryptography.hazmat.primitives import hashes from cryptography.hazmat.primitives.asymmetric import ec, rsa @@ -46,7 +46,8 @@ class _X448_CURVE(NamedTuple): pubkey: UnimplementedOKPCurveKey privkey: UnimplementedOKPCurveKey -JWKTypesRegistry: dict[str, str] +_JWKKeyTypeSupported: TypeAlias = Literal["oct", "RSA", "EC", "OKP"] +JWKTypesRegistry: dict[_JWKKeyTypeSupported, str] class ParmType(Enum): name = "A string with a name" # pyright: ignore[reportAssignmentType] @@ -63,8 +64,12 @@ class JWKParameter(NamedTuple): JWKValuesRegistry: dict[str, dict[str, JWKParameter]] JWKParamsRegistry: dict[str, JWKParameter] JWKEllipticCurveRegistry: dict[str, str] -JWKUseRegistry: dict[str, str] -JWKOperationsRegistry: dict[str, str] +_JWKUseSupported: TypeAlias = Literal["sig", "enc"] +JWKUseRegistry: dict[_JWKUseSupported, str] +_JWKOperationSupported: TypeAlias = Literal[ + "sign", "verify", "encrypt", "decrypt", "wrapKey", "unwrapKey", "deriveKey", "deriveBits" +] +JWKOperationsRegistry: dict[_JWKOperationSupported, str] JWKpycaCurveMap: dict[str, str] IANANamedInformationHashAlgorithmRegistry: dict[ str, @@ -98,9 +103,26 @@ class InvalidJWKValue(JWException): ... class JWK(dict[str, Any]): def __init__(self, **kwargs) -> None: ... + # `kty` and the other keyword arguments are passed as `params` to the called generator + # function. The possible arguments depend on the value of `kty`. + # TODO: Add overloads for the individual `kty` values. + @classmethod + @overload + def generate( + cls, + *, + kty: Literal["RSA"], + public_exponent: int | None = None, + size: int | None = None, + kid: str | None = None, + alg: str | None = None, + use: _JWKUseSupported | None = None, + key_ops: list[_JWKOperationSupported] | None = None, + ) -> Self: ... @classmethod - def generate(cls, **kwargs) -> Self: ... - def generate_key(self, **params) -> None: ... + @overload + def generate(cls, *, kty: _JWKKeyTypeSupported, **kwargs) -> Self: ... + def generate_key(self, *, kty: _JWKKeyTypeSupported, **kwargs) -> None: ... def import_key(self, **kwargs) -> None: ... @classmethod def from_json(cls, key) -> Self: ... diff --git a/stubs/jwcrypto/jwcrypto/jwt.pyi b/stubs/jwcrypto/jwcrypto/jwt.pyi index 0ede9c5c633f..eb3f062e2a85 100644 --- a/stubs/jwcrypto/jwcrypto/jwt.pyi +++ b/stubs/jwcrypto/jwcrypto/jwt.pyi @@ -1,6 +1,6 @@ from _typeshed import Incomplete from collections.abc import Mapping -from typing import Any +from typing import Any, SupportsInt from typing_extensions import deprecated from jwcrypto.common import JWException, JWKeyNotFound @@ -49,31 +49,31 @@ class JWT: @header.setter def header(self, h: dict[str, Any] | str) -> None: ... @property - def claims(self): ... + def claims(self) -> str: ... @claims.setter - def claims(self, data) -> None: ... + def claims(self, data: str) -> None: ... @property def token(self): ... @token.setter def token(self, t) -> None: ... @property - def leeway(self): ... + def leeway(self) -> int: ... @leeway.setter - def leeway(self, lwy) -> None: ... + def leeway(self, lwy: SupportsInt) -> None: ... @property - def validity(self): ... + def validity(self) -> int: ... @validity.setter - def validity(self, v) -> None: ... + def validity(self, v: SupportsInt) -> None: ... @property def expected_type(self): ... @expected_type.setter def expected_type(self, v) -> None: ... def norm_typ(self, val): ... - def make_signed_token(self, key) -> None: ... - def make_encrypted_token(self, key) -> None: ... - def validate(self, key) -> None: ... + def make_signed_token(self, key: JWK) -> None: ... + def make_encrypted_token(self, key: JWK) -> None: ... + def validate(self, key: JWK | JWKSet) -> None: ... def deserialize(self, jwt, key: Incomplete | None = None) -> None: ... - def serialize(self, compact: bool = True): ... + def serialize(self, compact: bool = True) -> str: ... @classmethod def from_jose_token(cls, token): ... def __eq__(self, other: object) -> bool: ... From d3197ed0ee20a96ca233ba8e125d1b5359d1bea3 Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Sat, 12 Apr 2025 21:48:13 +0400 Subject: [PATCH 217/388] Bump auth0-python to 4.9.* (#13803) --- stubs/auth0-python/METADATA.toml | 2 +- .../authentication/async_token_verifier.pyi | 8 +++----- .../auth0/authentication/base.pyi | 17 +++++++++-------- .../auth0/authentication/get_token.pyi | 8 ++++++++ .../auth0/authentication/token_verifier.pyi | 6 +++--- .../auth0/authentication/users.pyi | 6 +++--- stubs/auth0-python/auth0/exceptions.pyi | 10 +++++----- stubs/auth0-python/auth0/management/users.pyi | 2 ++ stubs/auth0-python/auth0/rest.pyi | 19 ++++++++++--------- stubs/auth0-python/auth0/rest_async.pyi | 5 ++--- 10 files changed, 46 insertions(+), 37 deletions(-) diff --git a/stubs/auth0-python/METADATA.toml b/stubs/auth0-python/METADATA.toml index 86faedcae1e1..8ef32734a656 100644 --- a/stubs/auth0-python/METADATA.toml +++ b/stubs/auth0-python/METADATA.toml @@ -1,3 +1,3 @@ -version = "4.8.*" +version = "4.9.*" upstream_repository = "https://github.com/auth0/auth0-python" requires = ["cryptography", "types-requests"] diff --git a/stubs/auth0-python/auth0/authentication/async_token_verifier.pyi b/stubs/auth0-python/auth0/authentication/async_token_verifier.pyi index a65856b039d0..3880c4a6c7e2 100644 --- a/stubs/auth0-python/auth0/authentication/async_token_verifier.pyi +++ b/stubs/auth0-python/auth0/authentication/async_token_verifier.pyi @@ -1,5 +1,3 @@ -from _typeshed import Incomplete - from .. import TokenValidationError as TokenValidationError from ..rest_async import AsyncRestClient as AsyncRestClient from .token_verifier import ( @@ -18,9 +16,9 @@ class AsyncJwksFetcher(JwksFetcher): async def get_key(self, key_id: str): ... class AsyncTokenVerifier(TokenVerifier): - iss: Incomplete - aud: Incomplete - leeway: Incomplete + iss: str + aud: str + leeway: int def __init__( self, signature_verifier: AsyncAsymmetricSignatureVerifier, issuer: str, audience: str, leeway: int = 0 ) -> None: ... diff --git a/stubs/auth0-python/auth0/authentication/base.pyi b/stubs/auth0-python/auth0/authentication/base.pyi index e67c3c414e00..30c6021462cf 100644 --- a/stubs/auth0-python/auth0/authentication/base.pyi +++ b/stubs/auth0-python/auth0/authentication/base.pyi @@ -1,20 +1,21 @@ from _typeshed import Incomplete +from typing import Final from auth0.rest import RestClient as RestClient, RestClientOptions as RestClientOptions from auth0.types import RequestData as RequestData from .client_authentication import add_client_authentication as add_client_authentication -UNKNOWN_ERROR: str +UNKNOWN_ERROR: Final[str] class AuthenticationBase: - domain: Incomplete - client_id: Incomplete - client_secret: Incomplete - client_assertion_signing_key: Incomplete - client_assertion_signing_alg: Incomplete - protocol: Incomplete - client: Incomplete + domain: str + client_id: str + client_secret: str | None + client_assertion_signing_key: str | None + client_assertion_signing_alg: str | None + protocol: str + client: RestClient def __init__( self, domain: str, diff --git a/stubs/auth0-python/auth0/authentication/get_token.pyi b/stubs/auth0-python/auth0/authentication/get_token.pyi index 64457306c40f..02590cf34a17 100644 --- a/stubs/auth0-python/auth0/authentication/get_token.pyi +++ b/stubs/auth0-python/auth0/authentication/get_token.pyi @@ -19,3 +19,11 @@ class GetToken(AuthenticationBase): def refresh_token(self, refresh_token: str, scope: str = "", grant_type: str = "refresh_token"): ... def passwordless_login(self, username: str, otp: str, realm: str, scope: str, audience: str): ... def backchannel_login(self, auth_req_id: str, grant_type: str = "urn:openid:params:grant-type:ciba"): ... + def access_token_for_connection( + self, + subject_token_type: str, + subject_token: str, + requested_token_type: str, + connection: str | None = None, + grant_type: str = ..., + ): ... diff --git a/stubs/auth0-python/auth0/authentication/token_verifier.pyi b/stubs/auth0-python/auth0/authentication/token_verifier.pyi index c8c38ca3b0db..171e55501f9b 100644 --- a/stubs/auth0-python/auth0/authentication/token_verifier.pyi +++ b/stubs/auth0-python/auth0/authentication/token_verifier.pyi @@ -20,9 +20,9 @@ class AsymmetricSignatureVerifier(SignatureVerifier): def __init__(self, jwks_url: str, algorithm: str = "RS256", cache_ttl: int = ...) -> None: ... class TokenVerifier: - iss: Incomplete - aud: Incomplete - leeway: Incomplete + iss: str + aud: str + leeway: int def __init__(self, signature_verifier: SignatureVerifier, issuer: str, audience: str, leeway: int = 0) -> None: ... def verify( self, token: str, nonce: str | None = None, max_age: int | None = None, organization: str | None = None diff --git a/stubs/auth0-python/auth0/authentication/users.pyi b/stubs/auth0-python/auth0/authentication/users.pyi index c5a15db5221b..c986cbc2fcae 100644 --- a/stubs/auth0-python/auth0/authentication/users.pyi +++ b/stubs/auth0-python/auth0/authentication/users.pyi @@ -4,8 +4,8 @@ from auth0.rest import RestClient as RestClient, RestClientOptions as RestClient from auth0.types import TimeoutType as TimeoutType class Users: - domain: Incomplete - protocol: Incomplete - client: Incomplete + domain: str + protocol: str + client: RestClient def __init__(self, domain: str, telemetry: bool = True, timeout: TimeoutType = 5.0, protocol: str = "https") -> None: ... def userinfo(self, access_token: str) -> dict[str, Incomplete]: ... diff --git a/stubs/auth0-python/auth0/exceptions.pyi b/stubs/auth0-python/auth0/exceptions.pyi index f500aad795bc..22a62ca6d727 100644 --- a/stubs/auth0-python/auth0/exceptions.pyi +++ b/stubs/auth0-python/auth0/exceptions.pyi @@ -1,14 +1,14 @@ from _typeshed import Incomplete class Auth0Error(Exception): - status_code: Incomplete - error_code: Incomplete - message: Incomplete - content: Incomplete + status_code: int + error_code: str + message: str + content: Incomplete | None def __init__(self, status_code: int, error_code: str, message: str, content: Incomplete | None = None) -> None: ... class RateLimitError(Auth0Error): - reset_at: Incomplete + reset_at: int def __init__(self, error_code: str, message: str, reset_at: int) -> None: ... class TokenValidationError(Exception): ... diff --git a/stubs/auth0-python/auth0/management/users.pyi b/stubs/auth0-python/auth0/management/users.pyi index 6ba1283d69cf..e2e159b5755f 100644 --- a/stubs/auth0-python/auth0/management/users.pyi +++ b/stubs/auth0-python/auth0/management/users.pyi @@ -115,3 +115,5 @@ class Users: async def delete_authentication_methods_async(self, user_id: str): ... def delete_authentication_method_by_id(self, user_id: str, authentication_method_id: str): ... async def delete_authentication_method_by_id_async(self, user_id: str, authentication_method_id: str): ... + def list_tokensets(self, id: str, page: int = 0, per_page: int = 25, include_totals: bool = True): ... + def delete_tokenset_by_id(self, user_id: str, tokenset_id: str): ... diff --git a/stubs/auth0-python/auth0/rest.pyi b/stubs/auth0-python/auth0/rest.pyi index 131dd1d75fa1..5b12d48e7713 100644 --- a/stubs/auth0-python/auth0/rest.pyi +++ b/stubs/auth0-python/auth0/rest.pyi @@ -1,25 +1,26 @@ from _typeshed import Incomplete from collections.abc import Mapping +from typing import Final import requests from auth0.exceptions import Auth0Error as Auth0Error, RateLimitError as RateLimitError from auth0.rest_async import RequestsResponse as RequestsResponse from auth0.types import RequestData as RequestData, TimeoutType as TimeoutType -UNKNOWN_ERROR: str +UNKNOWN_ERROR: Final[str] class RestClientOptions: - telemetry: Incomplete - timeout: Incomplete - retries: Incomplete + telemetry: bool + timeout: TimeoutType + retries: int def __init__(self, telemetry: bool = True, timeout: TimeoutType = 5.0, retries: int = 3) -> None: ... class RestClient: - options: Incomplete - jwt: Incomplete - base_headers: Incomplete - telemetry: Incomplete - timeout: Incomplete + options: RestClientOptions + jwt: str | None + base_headers: dict[str, str] + telemetry: bool + timeout: TimeoutType def __init__( self, jwt: str | None, telemetry: bool = True, timeout: TimeoutType = 5.0, options: RestClientOptions | None = None ) -> None: ... diff --git a/stubs/auth0-python/auth0/rest_async.pyi b/stubs/auth0-python/auth0/rest_async.pyi index 7bc59c471611..fec31cd6c515 100644 --- a/stubs/auth0-python/auth0/rest_async.pyi +++ b/stubs/auth0-python/auth0/rest_async.pyi @@ -13,7 +13,6 @@ from .rest import ( class AsyncRestClient(RestClient): timeout: Incomplete - def __init__(self, *args, **kwargs) -> None: ... def set_session(self, session) -> None: ... async def get(self, url: str, params: dict[str, Incomplete] | None = None, headers: dict[str, str] | None = None): ... async def post(self, url: str, data: RequestData | None = None, headers: dict[str, str] | None = None): ... @@ -23,7 +22,7 @@ class AsyncRestClient(RestClient): async def delete(self, url: str, params: dict[str, Incomplete] | None = None, data: RequestData | None = None): ... class RequestsResponse: - status_code: Incomplete + status_code: int headers: Incomplete - text: Incomplete + text: str def __init__(self, response, text: str) -> None: ... From a1312d7b9bb653d5f79de72cdb36af0996a84883 Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Sun, 13 Apr 2025 02:06:02 +0400 Subject: [PATCH 218/388] Mark `Flask-Cors` as complete (#13820) --- stubs/Flask-Cors/METADATA.toml | 4 ---- stubs/Flask-Cors/flask_cors/__init__.pyi | 2 ++ 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/stubs/Flask-Cors/METADATA.toml b/stubs/Flask-Cors/METADATA.toml index 0d7c47f44a07..81c6b80b9b93 100644 --- a/stubs/Flask-Cors/METADATA.toml +++ b/stubs/Flask-Cors/METADATA.toml @@ -2,7 +2,3 @@ version = "5.0.*" upstream_repository = "https://github.com/corydolphin/flask-cors" # Requires a version of flask with a `py.typed` file requires = ["Flask>=2.0.0"] -partial_stub = true - -[tool.stubtest] -ignore_missing_stub = true diff --git a/stubs/Flask-Cors/flask_cors/__init__.pyi b/stubs/Flask-Cors/flask_cors/__init__.pyi index b0962a7393b2..e4d434834d33 100644 --- a/stubs/Flask-Cors/flask_cors/__init__.pyi +++ b/stubs/Flask-Cors/flask_cors/__init__.pyi @@ -5,3 +5,5 @@ from .extension import CORS as CORS from .version import __version__ as __version__ rootlogger: Logger + +__all__ = ["CORS", "__version__", "cross_origin"] From f8fe77ffb1a3cdc730bb20ab4f4aa5c21cca1ae9 Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Mon, 14 Apr 2025 13:00:45 +0400 Subject: [PATCH 219/388] Complete `flake8-builtins` (#13824) --- stubs/flake8-builtins/METADATA.toml | 5 +---- stubs/flake8-builtins/flake8_builtins.pyi | 6 +++--- 2 files changed, 4 insertions(+), 7 deletions(-) diff --git a/stubs/flake8-builtins/METADATA.toml b/stubs/flake8-builtins/METADATA.toml index 7375501f8e41..39b5d88a6f9f 100644 --- a/stubs/flake8-builtins/METADATA.toml +++ b/stubs/flake8-builtins/METADATA.toml @@ -1,6 +1,3 @@ version = "2.5.*" upstream_repository = "https://github.com/gforcada/flake8-builtins" -partial_stub = true - -[tool.stubtest] -ignore_missing_stub = true +requires = ["types-flake8"] diff --git a/stubs/flake8-builtins/flake8_builtins.pyi b/stubs/flake8-builtins/flake8_builtins.pyi index f0c1a68fae85..96043f368ba7 100644 --- a/stubs/flake8-builtins/flake8_builtins.pyi +++ b/stubs/flake8-builtins/flake8_builtins.pyi @@ -1,12 +1,12 @@ import ast from argparse import Namespace -from binascii import Incomplete from collections.abc import Iterator from typing import ClassVar from typing_extensions import TypeAlias +from flake8.options.manager import OptionManager + _Error: TypeAlias = tuple[int, int, str, type[BuiltinsChecker]] -_OptionManager: TypeAlias = Incomplete # flake8.options.manager.OptionManager class BuiltinsChecker: name: ClassVar[str] @@ -24,7 +24,7 @@ class BuiltinsChecker: def __init__(self, tree: ast.AST, filename: str) -> None: ... @classmethod - def add_options(cls, option_manager: _OptionManager) -> None: ... + def add_options(cls, option_manager: OptionManager) -> None: ... @classmethod def parse_options(cls, options: Namespace) -> None: ... def run(self) -> Iterator[_Error]: ... From cd2edb2589c22bbb790c9c7e4b2660094026d492 Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Mon, 14 Apr 2025 13:04:59 +0400 Subject: [PATCH 220/388] Annotate `markdown.util.get_installed_extensions` (#13822) --- stubs/Markdown/markdown/util.pyi | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/stubs/Markdown/markdown/util.pyi b/stubs/Markdown/markdown/util.pyi index d1160266b30c..672c31a2449c 100644 --- a/stubs/Markdown/markdown/util.pyi +++ b/stubs/Markdown/markdown/util.pyi @@ -1,3 +1,4 @@ +import sys from collections.abc import Iterator from re import Pattern from typing import Final, Generic, TypedDict, TypeVar, overload @@ -18,7 +19,13 @@ HTML_PLACEHOLDER_RE: Final[Pattern[str]] TAG_PLACEHOLDER: Final[str] RTL_BIDI_RANGES: Final[tuple[tuple[str, str], tuple[str, str]]] -def get_installed_extensions(): ... +if sys.version_info >= (3, 10): + from importlib import metadata + def get_installed_extensions() -> metadata.EntryPoints: ... + +else: + def get_installed_extensions(): ... + def deprecated(message: str, stacklevel: int = 2): ... @overload def parseBoolValue(value: str) -> bool: ... From b8e1b5d7a49cb03d0b90b37b1c3969a8ee8ca23b Mon Sep 17 00:00:00 2001 From: Sebastian Rittau Date: Mon, 14 Apr 2025 11:11:06 +0200 Subject: [PATCH 221/388] Simplify and fix `urllib.parse.urlencode()` (#13815) Remove overloads and type vars. Introduce a protocol for the `quote_via` argument. This means that the interface accepted by the supplied `quote_via` is stricter, and is not dependent on the actual supplied types in the `query` argument, but must work with all possible query types. --- .../@tests/test_cases/urllib/check_parse.py | 12 +++++ stdlib/urllib/parse.pyi | 46 ++++++++----------- 2 files changed, 32 insertions(+), 26 deletions(-) create mode 100644 stdlib/@tests/test_cases/urllib/check_parse.py diff --git a/stdlib/@tests/test_cases/urllib/check_parse.py b/stdlib/@tests/test_cases/urllib/check_parse.py new file mode 100644 index 000000000000..f464f6341fdc --- /dev/null +++ b/stdlib/@tests/test_cases/urllib/check_parse.py @@ -0,0 +1,12 @@ +from __future__ import annotations + +from urllib.parse import quote, quote_plus, urlencode + +urlencode({"a": "b"}, quote_via=quote) +urlencode({b"a": b"b"}, quote_via=quote) +urlencode({"a": b"b"}, quote_via=quote) +urlencode({b"a": "b"}, quote_via=quote) +mixed_dict: dict[str | bytes, str | bytes] = {} +urlencode(mixed_dict, quote_via=quote) + +urlencode({"a": "b"}, quote_via=quote_plus) diff --git a/stdlib/urllib/parse.pyi b/stdlib/urllib/parse.pyi index f2fae0c3d402..a5ed616d25af 100644 --- a/stdlib/urllib/parse.pyi +++ b/stdlib/urllib/parse.pyi @@ -1,7 +1,7 @@ import sys -from collections.abc import Callable, Iterable, Mapping, Sequence +from collections.abc import Iterable, Mapping, Sequence from types import GenericAlias -from typing import Any, AnyStr, Generic, Literal, NamedTuple, TypeVar, overload +from typing import Any, AnyStr, Generic, Literal, NamedTuple, Protocol, overload, type_check_only from typing_extensions import TypeAlias __all__ = [ @@ -132,38 +132,32 @@ def urldefrag(url: str) -> DefragResult: ... @overload def urldefrag(url: bytes | bytearray | None) -> DefragResultBytes: ... -_Q = TypeVar("_Q", bound=str | Iterable[int]) +# The values are passed through `str()` (unless they are bytes), so anything is valid. _QueryType: TypeAlias = ( - Mapping[Any, Any] | Mapping[Any, Sequence[Any]] | Sequence[tuple[Any, Any]] | Sequence[tuple[Any, Sequence[Any]]] + Mapping[str, object] + | Mapping[bytes, object] + | Mapping[str | bytes, object] + | Mapping[str, Sequence[object]] + | Mapping[bytes, Sequence[object]] + | Mapping[str | bytes, Sequence[object]] + | Sequence[tuple[str | bytes, object]] + | Sequence[tuple[str | bytes, Sequence[object]]] ) -@overload -def urlencode( - query: _QueryType, - doseq: bool = False, - safe: str = "", - encoding: str | None = None, - errors: str | None = None, - quote_via: Callable[[AnyStr, str, str, str], str] = ..., -) -> str: ... -@overload -def urlencode( - query: _QueryType, - doseq: bool, - safe: _Q, - encoding: str | None = None, - errors: str | None = None, - quote_via: Callable[[AnyStr, _Q, str, str], str] = ..., -) -> str: ... -@overload +@type_check_only +class _QuoteVia(Protocol): + @overload + def __call__(self, string: str, safe: str | bytes, encoding: str, errors: str, /) -> str: ... + @overload + def __call__(self, string: bytes, safe: str | bytes, /) -> str: ... + def urlencode( query: _QueryType, doseq: bool = False, - *, - safe: _Q, + safe: str | bytes = "", encoding: str | None = None, errors: str | None = None, - quote_via: Callable[[AnyStr, _Q, str, str], str] = ..., + quote_via: _QuoteVia = ..., ) -> str: ... def urljoin(base: AnyStr, url: AnyStr | None, allow_fragments: bool = True) -> AnyStr: ... @overload From 3f22a63029cb90686ff6c08814fe074e6885dafe Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Mon, 14 Apr 2025 14:44:17 +0400 Subject: [PATCH 222/388] Deprecate `ssl` methods for NPN (#13823) --- stdlib/ssl.pyi | 23 +++++++++++++++++++---- 1 file changed, 19 insertions(+), 4 deletions(-) diff --git a/stdlib/ssl.pyi b/stdlib/ssl.pyi index 042d0266c649..9fbf5e8dfa84 100644 --- a/stdlib/ssl.pyi +++ b/stdlib/ssl.pyi @@ -28,7 +28,7 @@ from _ssl import ( from _typeshed import ReadableBuffer, StrOrBytesPath, WriteableBuffer from collections.abc import Callable, Iterable from typing import Any, Literal, NamedTuple, TypedDict, overload, type_check_only -from typing_extensions import Never, Self, TypeAlias +from typing_extensions import Never, Self, TypeAlias, deprecated if sys.version_info >= (3, 13): from _ssl import HAS_PSK as HAS_PSK @@ -369,7 +369,12 @@ class SSLSocket(socket.socket): def compression(self) -> str | None: ... def get_channel_binding(self, cb_type: str = "tls-unique") -> bytes | None: ... def selected_alpn_protocol(self) -> str | None: ... - def selected_npn_protocol(self) -> str | None: ... + if sys.version_info >= (3, 10): + @deprecated("Deprecated in 3.10. Use ALPN instead.") + def selected_npn_protocol(self) -> str | None: ... + else: + def selected_npn_protocol(self) -> str | None: ... + def accept(self) -> tuple[SSLSocket, socket._RetAddress]: ... def unwrap(self) -> socket.socket: ... def version(self) -> str | None: ... @@ -434,7 +439,12 @@ class SSLContext(_SSLContext): def set_default_verify_paths(self) -> None: ... def set_ciphers(self, cipherlist: str, /) -> None: ... def set_alpn_protocols(self, alpn_protocols: Iterable[str]) -> None: ... - def set_npn_protocols(self, npn_protocols: Iterable[str]) -> None: ... + if sys.version_info >= (3, 10): + @deprecated("Deprecated in 3.10. Use ALPN instead.") + def set_npn_protocols(self, npn_protocols: Iterable[str]) -> None: ... + else: + def set_npn_protocols(self, npn_protocols: Iterable[str]) -> None: ... + def set_servername_callback(self, server_name_callback: _SrvnmeCbType | None) -> None: ... def load_dh_params(self, path: str, /) -> None: ... def set_ecdh_curve(self, name: str, /) -> None: ... @@ -475,7 +485,12 @@ class SSLObject: @overload def getpeercert(self, binary_form: bool) -> _PeerCertRetType: ... def selected_alpn_protocol(self) -> str | None: ... - def selected_npn_protocol(self) -> str | None: ... + if sys.version_info >= (3, 10): + @deprecated("Deprecated in 3.10. Use ALPN instead.") + def selected_npn_protocol(self) -> str | None: ... + else: + def selected_npn_protocol(self) -> str | None: ... + def cipher(self) -> tuple[str, str, int] | None: ... def shared_ciphers(self) -> list[tuple[str, str, int]] | None: ... def compression(self) -> str | None: ... From abc3e5c9edb2e76cfce2bfd7a7d7bcc6b9722df8 Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Tue, 15 Apr 2025 13:59:05 +0400 Subject: [PATCH 223/388] Improve `flake8-bugbear` (#13829) --- .../@tests/stubtest_allowlist.txt | 38 ++- stubs/flake8-bugbear/METADATA.toml | 4 - stubs/flake8-bugbear/bugbear.pyi | 281 +++++++++++++++++- 3 files changed, 311 insertions(+), 12 deletions(-) diff --git a/stubs/flake8-bugbear/@tests/stubtest_allowlist.txt b/stubs/flake8-bugbear/@tests/stubtest_allowlist.txt index 82f069091409..a6bfcd8a7896 100644 --- a/stubs/flake8-bugbear/@tests/stubtest_allowlist.txt +++ b/stubs/flake8-bugbear/@tests/stubtest_allowlist.txt @@ -1 +1,37 @@ -bugbear.BugBearChecker.__getattr__ +# Autogenerated methods using @attr.* decorators +bugbear.B040CaughtException.__attrs_attrs__ +bugbear.B040CaughtException.__attrs_own_setattr__ +bugbear.B040CaughtException.__match_args__ +bugbear.B041VariableKeyType.__attrs_attrs__ +bugbear.B041VariableKeyType.__match_args__ +bugbear.BugBearChecker.__attrs_attrs__ +bugbear.BugBearChecker.__ge__ +bugbear.BugBearChecker.__gt__ +bugbear.BugBearChecker.__le__ +bugbear.BugBearChecker.__lt__ +bugbear.BugBearChecker.__match_args__ +bugbear.BugBearVisitor.__attrs_attrs__ +bugbear.BugBearVisitor.__ge__ +bugbear.BugBearVisitor.__gt__ +bugbear.BugBearVisitor.__le__ +bugbear.BugBearVisitor.__lt__ +bugbear.BugBearVisitor.__match_args__ +bugbear.NameFinder.__attrs_attrs__ +bugbear.NameFinder.__ge__ +bugbear.NameFinder.__gt__ +bugbear.NameFinder.__le__ +bugbear.NameFinder.__lt__ +bugbear.NameFinder.__match_args__ +bugbear.NamedExprFinder.__attrs_attrs__ +bugbear.NamedExprFinder.__ge__ +bugbear.NamedExprFinder.__gt__ +bugbear.NamedExprFinder.__le__ +bugbear.NamedExprFinder.__lt__ +bugbear.NamedExprFinder.__match_args__ +# >= Python 3.13 +bugbear.B040CaughtException.__replace__ +bugbear.B041VariableKeyType.__replace__ +bugbear.BugBearChecker.__replace__ +bugbear.BugBearVisitor.__replace__ +bugbear.NameFinder.__replace__ +bugbear.NamedExprFinder.__replace__ diff --git a/stubs/flake8-bugbear/METADATA.toml b/stubs/flake8-bugbear/METADATA.toml index 34969cb41329..13d13490adac 100644 --- a/stubs/flake8-bugbear/METADATA.toml +++ b/stubs/flake8-bugbear/METADATA.toml @@ -1,6 +1,2 @@ version = "24.12.12" upstream_repository = "https://github.com/PyCQA/flake8-bugbear" -partial_stub = true - -[tool.stubtest] -ignore_missing_stub = true diff --git a/stubs/flake8-bugbear/bugbear.pyi b/stubs/flake8-bugbear/bugbear.pyi index d0b61fa80849..3e708239592e 100644 --- a/stubs/flake8-bugbear/bugbear.pyi +++ b/stubs/flake8-bugbear/bugbear.pyi @@ -1,19 +1,38 @@ import argparse import ast +import sys from _typeshed import Incomplete -from collections.abc import Sequence -from typing import Any +from collections.abc import Callable, Generator, Iterable, Sequence +from functools import partial +from logging import Logger +from typing import Any, ClassVar, Final, Literal, NamedTuple, overload + +__version__: Final[str] +LOG: Logger +CONTEXTFUL_NODES: Final[tuple[type[ast.AST], ...]] +FUNCTION_NODES: Final[tuple[type[ast.AST], ...]] +B908_pytest_functions: Final[set[str]] +B908_unittest_methods: Final[set[str]] +B902_default_decorators: Final[set[str]] + +class Context(NamedTuple): + node: ast.AST + stack: list[str] class BugBearChecker: - name: str - version: str + name: ClassVar[str] + version: ClassVar[str] tree: ast.AST | None filename: str lines: Sequence[str] | None max_line_length: int visitor: ast.NodeVisitor options: argparse.Namespace | None - def run(self) -> None: ... + def run(self) -> Generator[error]: ... + def gen_line_based_checks(self) -> Generator[error]: ... + @classmethod + def adapt_error(cls, e: error) -> tuple[int, int, str, type[BugBearChecker]]: ... + def load_file(self) -> None: ... @staticmethod def add_options(optmanager: Any) -> None: ... def __init__( @@ -24,6 +43,254 @@ class BugBearChecker: max_line_length: int = ..., options: argparse.Namespace | None = ..., ) -> None: ... - def __getattr__(self, name: str) -> Incomplete: ... # incomplete (other attributes are normally not accessed) + def should_warn(self, code: str) -> bool: ... + +def names_from_assignments(assign_target: ast.AST) -> Generator[str]: ... +def children_in_scope(node: ast.AST) -> Generator[ast.AST]: ... +def walk_list(nodes: Iterable[ast.AST]) -> Generator[ast.AST]: ... + +class ExceptBaseExceptionVisitor(ast.NodeVisitor): + root: ast.ExceptHandler + def __init__(self, except_node: ast.ExceptHandler) -> None: ... + def re_raised(self) -> bool: ... + def visit_Raise(self, node: ast.Raise) -> Incomplete | None: ... + def visit_ExceptHandler(self, node: ast.ExceptHandler) -> Incomplete | None: ... + +class B040CaughtException: + name: str + has_note: bool + def __init__(self, name: str, has_note: bool) -> None: ... + +class B041UnhandledKeyType: ... + +class B041VariableKeyType: + name: str + def __init__(self, name: str) -> None: ... + +class BugBearVisitor(ast.NodeVisitor): + NODE_WINDOW_SIZE: ClassVar[int] = 4 + in_trystar: str + def __init__( + self, + filename: str, + lines: Sequence[str] | None, + b008_b039_extend_immutable_calls: set[str] = ..., + b902_classmethod_decorators: set[str] = ..., + node_window: list[ast.AST] = ..., + errors: list[error] = ..., + contexts: list[Context] = ..., + b040_caught_exception: B040CaughtException | None = None, + in_trystar: str = "", + ) -> None: ... + @property + def node_stack(self) -> list[Context]: ... + def in_class_init(self) -> bool: ... + def visit_Return(self, node: ast.Return) -> None: ... + def visit_Yield(self, node: ast.Yield) -> None: ... + def visit_YieldFrom(self, node: ast.YieldFrom) -> None: ... + def visit(self, node: ast.AST) -> None: ... + def visit_ExceptHandler(self, node: ast.ExceptHandler) -> None: ... + def visit_UAdd(self, node: ast.UAdd) -> None: ... + def visit_Call(self, node: ast.Call) -> None: ... + def visit_Module(self, node: ast.Module) -> None: ... + def visit_Assign(self, node: ast.Assign) -> None: ... + def visit_For(self, node: ast.For) -> None: ... + def visit_AsyncFor(self, node: ast.AsyncFor) -> None: ... + def visit_While(self, node: ast.While) -> None: ... + def visit_ListComp(self, node: ast.ListComp) -> None: ... + def visit_SetComp(self, node: ast.SetComp) -> None: ... + def visit_DictComp(self, node: ast.DictComp) -> None: ... + def visit_GeneratorExp(self, node: ast.GeneratorExp) -> None: ... + def visit_Assert(self, node: ast.Assert) -> None: ... + def visit_AsyncFunctionDef(self, node: ast.AsyncFunctionDef) -> None: ... + def visit_FunctionDef(self, node: ast.FunctionDef) -> None: ... + def visit_ClassDef(self, node: ast.ClassDef) -> None: ... + def visit_Try(self, node: ast.Try) -> None: ... + if sys.version_info >= (3, 11): + def visit_TryStar(self, node: ast.TryStar) -> None: ... + else: + def visit_TryStar(self, node: ast.Try) -> None: ... + + def visit_Compare(self, node: ast.Compare) -> None: ... + def visit_Raise(self, node: ast.Raise) -> None: ... + def visit_With(self, node: ast.With) -> None: ... + def visit_JoinedStr(self, node: ast.JoinedStr) -> None: ... + def visit_AnnAssign(self, node: ast.AnnAssign) -> None: ... + def visit_Import(self, node: ast.Import) -> None: ... + def visit_ImportFrom(self, node: ast.ImportFrom) -> None: ... + def visit_Set(self, node: ast.Set) -> None: ... + def visit_Dict(self, node: ast.Dict) -> None: ... + def check_for_b041(self, node: ast.Dict) -> None: ... + def check_for_b005(self, node: ast.Import | ast.ImportFrom | ast.Call) -> None: ... + def check_for_b006_and_b008(self, node: ast.FunctionDef | ast.AsyncFunctionDef) -> None: ... + def check_for_b039(self, node: ast.Call) -> None: ... + def check_for_b007(self, node: ast.For | ast.AsyncFor) -> None: ... + def check_for_b011(self, node: ast.Assert) -> None: ... + if sys.version_info >= (3, 11): + def check_for_b012(self, node: ast.Try | ast.TryStar) -> None: ... + else: + def check_for_b012(self, node: ast.Try) -> None: ... + + def check_for_b013_b014_b029_b030(self, node: ast.ExceptHandler) -> list[str]: ... + def check_for_b015(self, node: ast.Compare) -> None: ... + def check_for_b016(self, node: ast.Raise) -> None: ... + def check_for_b017(self, node: ast.With | ast.AsyncWith) -> None: ... + def check_for_b019(self, node: ast.FunctionDef | ast.AsyncFunctionDef) -> None: ... + def check_for_b020(self, node: ast.For | ast.AsyncFor | ast.comprehension) -> None: ... + def check_for_b023(self, loop_node: ast.For | ast.AsyncFor | ast.comprehension) -> None: ... + def check_for_b024_and_b027(self, node: ast.ClassDef) -> None: ... + def check_for_b026(self, call: ast.Call) -> None: ... + def check_for_b031(self, loop_node: ast.For | ast.AsyncFor) -> None: ... + def check_for_b035(self, node: ast.DictComp) -> None: ... + def check_for_b040_add_note(self, node: ast.Attribute) -> bool: ... + def check_for_b040_usage(self, node: ast.expr | None) -> None: ... + def check_for_b904(self, node: ast.Raise) -> None: ... + def walk_function_body( + self, node: ast.FunctionDef | ast.AsyncFunctionDef + ) -> tuple[ast.FunctionDef | ast.AsyncFunctionDef, ast.stmt]: ... + def check_for_b901(self, node: ast.FunctionDef | ast.AsyncFunctionDef) -> None: ... + @overload + @classmethod + def find_decorator_name(cls, d: ast.Name | ast.Attribute | ast.Call) -> str: ... + @overload + @classmethod + def find_decorator_name(cls, d: ast.AST) -> str | None: ... + def check_for_b902(self, node: ast.FunctionDef | ast.AsyncFunctionDef) -> None: ... + def check_for_b903(self, node: ast.ClassDef) -> None: ... + def check_for_b018(self, node: ast.Expr) -> None: ... + def check_for_b021(self, node: ast.AsyncFunctionDef | ast.FunctionDef | ast.ClassDef | ast.Module) -> None: ... + def check_for_b022(self, node: ast.With | ast.AsyncWith) -> None: ... + def check_for_b908(self, node: ast.With) -> None: ... + def check_for_b025(self, node: ast.Try) -> None: ... + def check_for_b905(self, node: ast.Call) -> None: ... + def check_for_b906(self, node: ast.FunctionDef) -> None: ... + def check_for_b907(self, node: ast.JoinedStr) -> None: ... + def check_for_b028(self, node: ast.Call) -> None: ... + def check_for_b032(self, node: ast.AnnAssign) -> None: ... + def check_for_b033(self, node: ast.Set | ast.List | ast.Tuple) -> None: ... + def check_for_b034(self, node: ast.Call) -> None: ... + def check_for_b909(self, node: ast.For) -> None: ... + def check_for_b910(self, node: ast.Call) -> None: ... + def check_for_b911(self, node: ast.Call) -> None: ... + +def compose_call_path(node: ast.expr) -> Generator[str]: ... +def is_name(node: ast.expr, name: str) -> bool: ... + +class B909Checker(ast.NodeVisitor): + MUTATING_FUNCTIONS: ClassVar[tuple[str, ...]] + name: str + key: str + mutations: dict[int, list[ast.AST]] + def __init__(self, name: str, key: str) -> None: ... + def visit_Assign(self, node: ast.Assign) -> None: ... + def visit_AugAssign(self, node: ast.AugAssign) -> None: ... + def visit_Delete(self, node: ast.Delete) -> None: ... + def visit_Call(self, node: ast.Call) -> None: ... + def visit_If(self, node: ast.If) -> None: ... + def visit(self, node: ast.AST | list[ast.AST]) -> Any: ... + +class NameFinder(ast.NodeVisitor): + names: dict[str, list[ast.Name]] + def __init__(self, names: dict[str, list[ast.Name]] = ...) -> None: ... + def visit_Name(self, node: ast.Name) -> None: ... + def visit(self, node: ast.AST | list[ast.AST]) -> Any: ... + +class NamedExprFinder(ast.NodeVisitor): + names: dict[str, list[ast.Name]] + def __init__(self, names: dict[str, list[ast.Name]] = ...) -> None: ... + def visit_NamedExpr(self, node: ast.NamedExpr) -> None: ... + def visit(self, node: ast.AST | list[ast.AST]) -> Any: ... + +class FunctionDefDefaultsVisitor(ast.NodeVisitor): + def __init__( + self, + error_code_calls: partial[error], + error_code_literals: partial[error], + b008_b039_extend_immutable_calls: set[str] | None = None, + ) -> None: ... + def visit_mutable_literal_or_comprehension( + self, node: ast.List | ast.Dict | ast.Set | ast.ListComp | ast.DictComp | ast.SetComp + ) -> None: ... + def visit_Call(self, node: ast.Call) -> None: ... + def visit_Lambda(self, node: ast.Lambda) -> None: ... + def visit(self, node: ast.AST | list[ast.AST]) -> None: ... + +class B020NameFinder(NameFinder): + def visit_GeneratorExp(self, node: ast.GeneratorExp) -> None: ... + def visit_ListComp(self, node: ast.ListComp) -> None: ... + def visit_DictComp(self, node: ast.DictComp) -> None: ... + def visit_comprehension(self, node: ast.comprehension) -> None: ... + def visit_Lambda(self, node: ast.Lambda) -> None: ... + +class error(NamedTuple): + lineno: int + col: int + message: str + type: type[BugBearChecker] + vars: tuple[Incomplete] -def __getattr__(name: str) -> Incomplete: ... +Error: Callable[..., partial[error]] +B001: partial[error] +B002: partial[error] +B003: partial[error] +B004: partial[error] +B005: partial[error] +B005_METHODS: Final[set[str]] +B006: partial[error] +B006_MUTABLE_LITERALS: Final[tuple[Literal["Dict"], Literal["List"], Literal["Set"]]] +B006_MUTABLE_COMPREHENSIONS: Final[tuple[Literal["ListComp"], Literal["DictComp"], Literal["SetComp"]]] +B006_MUTABLE_CALLS: Final[set[str]] +B007: partial[error] +B008: partial[error] +B008_IMMUTABLE_CALLS: Final[set[str]] +B009: partial[error] +B010: partial[error] +B011: partial[error] +B012: partial[error] +B013: partial[error] +B014: partial[error] +B014_REDUNDANT_EXCEPTIONS: Final[dict[Literal["OSError", "ValueError"], set[str]]] +B015: partial[error] +B016: partial[error] +B017: partial[error] +B018: partial[error] +B019: partial[error] +B019_CACHES: Final[set[str]] +B020: partial[error] +B021: partial[error] +B022: partial[error] +B023: partial[error] +B024: partial[error] +B025: partial[error] +B026: partial[error] +B027: partial[error] +B028: partial[error] +B029: partial[error] +B030: partial[error] +B031: partial[error] +B032: partial[error] +B033: partial[error] +B034: partial[error] +B035: partial[error] +B036: partial[error] +B037: partial[error] +B039: partial[error] +B040: partial[error] +B041: partial[error] +B901: partial[error] +B902: partial[error] +B902_IMPLICIT_CLASSMETHODS: Final[set[str]] +B902_SELF: Final[list[str]] +B902_CLS: Final[list[str]] +B902_METACLS: Final[list[str]] +B903: partial[error] +B904: partial[error] +B905: partial[error] +B906: partial[error] +B907: partial[error] +B908: partial[error] +B909: partial[error] +B910: partial[error] +B911: partial[error] +B950: partial[error] +disabled_by_default: Final[list[str]] From 8b359d1de4bfa61098f57264a06cbb8012cd0813 Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Tue, 15 Apr 2025 15:31:53 +0400 Subject: [PATCH 224/388] Improve `docker.utils` (#13808) --- stubs/docker/docker/__init__.pyi | 4 +- stubs/docker/docker/auth.pyi | 63 +++++++++------ stubs/docker/docker/utils/build.pyi | 55 +++++++------ stubs/docker/docker/utils/config.pyi | 18 +++-- stubs/docker/docker/utils/decorators.pyi | 9 ++- stubs/docker/docker/utils/fnmatch.pyi | 6 +- stubs/docker/docker/utils/json_stream.pyi | 6 +- stubs/docker/docker/utils/ports.pyi | 8 +- stubs/docker/docker/utils/socket.pyi | 33 +++++--- stubs/docker/docker/utils/utils.pyi | 98 ++++++++++++++++------- 10 files changed, 188 insertions(+), 112 deletions(-) diff --git a/stubs/docker/docker/__init__.pyi b/stubs/docker/docker/__init__.pyi index 06c9d2dbc87f..29d1744d9092 100644 --- a/stubs/docker/docker/__init__.pyi +++ b/stubs/docker/docker/__init__.pyi @@ -1,7 +1,9 @@ +from typing import Final + from .api import APIClient as APIClient from .client import DockerClient as DockerClient, from_env as from_env from .context import Context as Context, ContextAPI as ContextAPI from .tls import TLSConfig as TLSConfig from .version import __version__ as __version__ -__title__: str +__title__: Final[str] diff --git a/stubs/docker/docker/auth.pyi b/stubs/docker/docker/auth.pyi index f181edfd32e8..c5edb95d7dc5 100644 --- a/stubs/docker/docker/auth.pyi +++ b/stubs/docker/docker/auth.pyi @@ -1,40 +1,53 @@ -from _typeshed import Incomplete +from _typeshed import FileDescriptorOrPath, Incomplete, ReadableBuffer +from collections.abc import Mapping, MutableMapping +from logging import Logger +from typing import Final +from typing_extensions import Self -INDEX_NAME: str -INDEX_URL: Incomplete -TOKEN_USERNAME: str -log: Incomplete +INDEX_NAME: Final[str] +INDEX_URL: Final[str] +TOKEN_USERNAME: Final[str] +log: Logger -def resolve_repository_name(repo_name): ... -def resolve_index_name(index_name): ... -def get_config_header(client, registry): ... -def split_repo_name(repo_name): ... -def get_credential_store(authconfig, registry): ... +def resolve_repository_name(repo_name: str) -> tuple[str, str]: ... +def resolve_index_name(index_name: str) -> str: ... +def get_config_header(client, registry) -> bytes | None: ... +def split_repo_name(repo_name: str) -> tuple[str, str]: ... +def get_credential_store(authconfig: AuthConfig | MutableMapping[str, Incomplete], registry: str | None): ... class AuthConfig(dict[str, Incomplete]): - def __init__(self, dct, credstore_env: Incomplete | None = None) -> None: ... + def __init__(self, dct: MutableMapping[str, Incomplete], credstore_env: Incomplete | None = None) -> None: ... @classmethod - def parse_auth(cls, entries, raise_on_error: bool = False): ... + def parse_auth( + cls, entries: Mapping[str, dict[Incomplete, Incomplete]], raise_on_error: bool = False + ) -> dict[str, Incomplete]: ... @classmethod - def load_config(cls, config_path, config_dict, credstore_env: Incomplete | None = None): ... + def load_config( + cls, + config_path: FileDescriptorOrPath | None, + config_dict: dict[str, Incomplete] | None, + credstore_env: Incomplete | None = None, + ) -> Self: ... @property - def auths(self): ... + def auths(self) -> dict[str, Incomplete]: ... @property def creds_store(self): ... @property def cred_helpers(self): ... @property - def is_empty(self): ... - def resolve_authconfig(self, registry: Incomplete | None = None): ... - def get_credential_store(self, registry): ... + def is_empty(self) -> bool: ... + def resolve_authconfig(self, registry: str | None = None): ... + def get_credential_store(self, registry: str | None): ... def get_all_credentials(self): ... - def add_auth(self, reg, data) -> None: ... + def add_auth(self, reg: str, data) -> None: ... -def resolve_authconfig(authconfig, registry: Incomplete | None = None, credstore_env: Incomplete | None = None): ... -def convert_to_hostname(url): ... -def decode_auth(auth): ... -def encode_header(auth): ... -def parse_auth(entries, raise_on_error: bool = False): ... +def resolve_authconfig(authconfig, registry: str | None = None, credstore_env: Incomplete | None = None): ... +def convert_to_hostname(url: str) -> str: ... +def decode_auth(auth: str | ReadableBuffer) -> tuple[str, str]: ... +def encode_header(auth) -> bytes: ... +def parse_auth(entries: Mapping[str, dict[Incomplete, Incomplete]], raise_on_error: bool = False): ... def load_config( - config_path: Incomplete | None = None, config_dict: Incomplete | None = None, credstore_env: Incomplete | None = None -): ... + config_path: FileDescriptorOrPath | None = None, + config_dict: dict[str, Incomplete] | None = None, + credstore_env: Incomplete | None = None, +) -> AuthConfig: ... diff --git a/stubs/docker/docker/utils/build.pyi b/stubs/docker/docker/utils/build.pyi index db2fb02f476b..57100aec43b6 100644 --- a/stubs/docker/docker/utils/build.pyi +++ b/stubs/docker/docker/utils/build.pyi @@ -1,38 +1,43 @@ -from _typeshed import Incomplete +import io +from _typeshed import Incomplete, StrOrBytesPath, StrPath +from collections.abc import Generator, Iterable, MutableSequence +from os import PathLike +from tarfile import _Fileobj +from tempfile import _TemporaryFileWrapper def match_tag(tag: str) -> bool: ... def tar( - path, - exclude: Incomplete | None = None, - dockerfile: Incomplete | None = None, - fileobj: Incomplete | None = None, + path: PathLike[str], + exclude: list[str] | None = None, + dockerfile: tuple[str | None, str | None] | None = None, + fileobj: _Fileobj | None = None, gzip: bool = False, -): ... -def exclude_paths(root, patterns, dockerfile: Incomplete | None = None): ... -def build_file_list(root): ... +) -> _TemporaryFileWrapper[bytes] | _Fileobj: ... +def exclude_paths(root: StrPath, patterns: MutableSequence[str], dockerfile: str | None = None) -> set[str]: ... +def build_file_list(root: str) -> list[str]: ... def create_archive( - root, - files: Incomplete | None = None, - fileobj: Incomplete | None = None, + root: str, + files: Iterable[str] | None = None, + fileobj: _Fileobj | None = None, gzip: bool = False, extra_files: Incomplete | None = None, -): ... -def mkbuildcontext(dockerfile): ... -def split_path(p): ... -def normalize_slashes(p): ... -def walk(root, patterns, default: bool = True): ... +) -> _TemporaryFileWrapper[bytes] | _Fileobj: ... +def mkbuildcontext(dockerfile: io.IOBase | StrOrBytesPath) -> _TemporaryFileWrapper[bytes]: ... +def split_path(p: str) -> list[str]: ... +def normalize_slashes(p: str) -> str: ... +def walk(root: StrPath, patterns: Iterable[str], default: bool = True) -> Generator[str]: ... class PatternMatcher: - patterns: Incomplete - def __init__(self, patterns) -> None: ... - def matches(self, filepath): ... - def walk(self, root): ... + patterns: list[Pattern] + def __init__(self, patterns: Iterable[str]) -> None: ... + def matches(self, filepath: PathLike[str]) -> bool: ... + def walk(self, root: StrPath) -> Generator[str]: ... class Pattern: exclusion: bool - dirs: Incomplete - cleaned_pattern: Incomplete - def __init__(self, pattern_str) -> None: ... + dirs: list[str] + cleaned_pattern: str + def __init__(self, pattern_str: str) -> None: ... @classmethod - def normalize(cls, p): ... - def match(self, filepath): ... + def normalize(cls, p: str) -> list[str]: ... + def match(self, filepath: str) -> bool: ... diff --git a/stubs/docker/docker/utils/config.pyi b/stubs/docker/docker/utils/config.pyi index 68aaa9368656..50941696e163 100644 --- a/stubs/docker/docker/utils/config.pyi +++ b/stubs/docker/docker/utils/config.pyi @@ -1,10 +1,12 @@ -from _typeshed import Incomplete +from _typeshed import FileDescriptorOrPath +from logging import Logger +from typing import Final -DOCKER_CONFIG_FILENAME: Incomplete -LEGACY_DOCKER_CONFIG_FILENAME: str -log: Incomplete +DOCKER_CONFIG_FILENAME: Final[str] +LEGACY_DOCKER_CONFIG_FILENAME: Final[str] +log: Logger -def find_config_file(config_path: Incomplete | None = None): ... -def config_path_from_environment(): ... -def home_dir(): ... -def load_general_config(config_path: Incomplete | None = None): ... +def find_config_file(config_path: FileDescriptorOrPath | None = None) -> FileDescriptorOrPath | None: ... +def config_path_from_environment() -> str | None: ... +def home_dir() -> str: ... +def load_general_config(config_path: FileDescriptorOrPath | None = None): ... diff --git a/stubs/docker/docker/utils/decorators.pyi b/stubs/docker/docker/utils/decorators.pyi index 528f512276e0..d79c8888999b 100644 --- a/stubs/docker/docker/utils/decorators.pyi +++ b/stubs/docker/docker/utils/decorators.pyi @@ -1,3 +1,6 @@ -def check_resource(resource_name): ... -def minimum_version(version): ... -def update_headers(f): ... +from _typeshed import Incomplete +from collections.abc import Callable + +def check_resource(resource_name: str): ... +def minimum_version(version: str): ... +def update_headers(f: Callable[..., Incomplete]): ... diff --git a/stubs/docker/docker/utils/fnmatch.pyi b/stubs/docker/docker/utils/fnmatch.pyi index 98a2a4c6101e..631a71ee5c0a 100644 --- a/stubs/docker/docker/utils/fnmatch.pyi +++ b/stubs/docker/docker/utils/fnmatch.pyi @@ -1,5 +1,5 @@ __all__ = ["fnmatch", "fnmatchcase", "translate"] -def fnmatch(name, pat): ... -def fnmatchcase(name, pat): ... -def translate(pat): ... +def fnmatch(name: str, pat: str) -> bool: ... +def fnmatchcase(name: str, pat: str) -> bool: ... +def translate(pat: str) -> str: ... diff --git a/stubs/docker/docker/utils/json_stream.pyi b/stubs/docker/docker/utils/json_stream.pyi index 883144018241..4805785eb27b 100644 --- a/stubs/docker/docker/utils/json_stream.pyi +++ b/stubs/docker/docker/utils/json_stream.pyi @@ -6,10 +6,10 @@ from docker._types import JSON json_decoder: json.JSONDecoder -def stream_as_text(stream: Iterator[str | bytes]) -> Generator[str, None, None]: ... +def stream_as_text(stream: Iterator[str | bytes]) -> Generator[str]: ... def json_splitter(buffer: str) -> tuple[JSON, str] | None: ... -def json_stream(stream: Iterator[str]) -> Generator[JSON, None, None]: ... +def json_stream(stream: Iterator[str]) -> Generator[JSON]: ... def line_splitter(buffer: str, separator: str = "\n") -> tuple[str, str] | None: ... def split_buffer( stream: Iterator[str | bytes], splitter: Callable[[str], tuple[str, str]] | None = None, decoder: Callable[[str], Any] = ... -) -> Generator[Any, None, None]: ... +) -> Generator[Any]: ... diff --git a/stubs/docker/docker/utils/ports.pyi b/stubs/docker/docker/utils/ports.pyi index 421c1ea51e31..ffc6615f36b4 100644 --- a/stubs/docker/docker/utils/ports.pyi +++ b/stubs/docker/docker/utils/ports.pyi @@ -1,9 +1,11 @@ +import re from _typeshed import Incomplete +from typing import Final -PORT_SPEC: Incomplete +PORT_SPEC: Final[re.Pattern[str]] def add_port_mapping(port_bindings, internal_port, external) -> None: ... def add_port(port_bindings, internal_port_range, external_range) -> None: ... -def build_port_bindings(ports): ... +def build_port_bindings(ports) -> dict[Incomplete, Incomplete]: ... def port_range(start, end, proto, randomly_available_port: bool = False): ... -def split_port(port): ... +def split_port(port: object) -> tuple[Incomplete, Incomplete]: ... diff --git a/stubs/docker/docker/utils/socket.pyi b/stubs/docker/docker/utils/socket.pyi index 87b84b277be0..3914c3f6268d 100644 --- a/stubs/docker/docker/utils/socket.pyi +++ b/stubs/docker/docker/utils/socket.pyi @@ -1,18 +1,29 @@ -from _typeshed import Incomplete -from collections.abc import Generator +from _typeshed import Incomplete, ReadableBuffer +from collections.abc import Generator, Iterable +from typing import Final, Literal, TypeVar, overload -STDOUT: int -STDERR: int +_T = TypeVar("_T") + +STDOUT: Final = 1 +STDERR: Final = 2 class SocketError(Exception): ... -NPIPE_ENDED: int +NPIPE_ENDED: Final = 109 def read(socket, n: int = 4096): ... -def read_exactly(socket, n): ... -def next_frame_header(socket): ... +def read_exactly(socket, n: int) -> bytes: ... +def next_frame_header(socket) -> tuple[Incomplete, int]: ... def frames_iter(socket, tty): ... -def frames_iter_no_tty(socket) -> Generator[Incomplete, None, None]: ... -def frames_iter_tty(socket) -> Generator[Incomplete, None, None]: ... -def consume_socket_output(frames, demux: bool = False): ... -def demux_adaptor(stream_id, data): ... +def frames_iter_no_tty(socket) -> Generator[tuple[str | Incomplete, str | bytes | Incomplete]]: ... +def frames_iter_tty(socket) -> Generator[Incomplete]: ... +@overload +def consume_socket_output( + frames: Iterable[tuple[Incomplete, Incomplete]], demux: Literal[True] +) -> tuple[Incomplete, Incomplete]: ... +@overload +def consume_socket_output(frames: Iterable[ReadableBuffer], demux: Literal[False] = False) -> bytes: ... +@overload +def demux_adaptor(stream_id: Literal[1], data: _T) -> tuple[_T, None]: ... +@overload +def demux_adaptor(stream_id: Literal[2], data: _T) -> tuple[None, _T]: ... diff --git a/stubs/docker/docker/utils/utils.pyi b/stubs/docker/docker/utils/utils.pyi index a7815ab01e89..4ac647e39c15 100644 --- a/stubs/docker/docker/utils/utils.pyi +++ b/stubs/docker/docker/utils/utils.pyi @@ -1,34 +1,72 @@ -from _typeshed import Incomplete -from typing import NamedTuple +import datetime +from _typeshed import FileDescriptorOrPath, Incomplete, ReadableBuffer +from collections.abc import Iterable, Mapping +from shlex import _ShlexInstream +from typing import Literal, NamedTuple, NoReturn, TypedDict, TypeVar, overload, type_check_only +from typing_extensions import deprecated + +from ..tls import TLSConfig + +_T = TypeVar("_T") +_K = TypeVar("_K") +_V = TypeVar("_V") + +@type_check_only +class _EnvKWArgs(TypedDict, total=False): + base_url: str + tls: TLSConfig class URLComponents(NamedTuple): - scheme: Incomplete - netloc: Incomplete - url: Incomplete - params: Incomplete - query: Incomplete - fragment: Incomplete + scheme: str | None + netloc: str | None + url: str + params: str | None + query: str | None + fragment: str | None -def create_ipam_pool(*args, **kwargs) -> None: ... -def create_ipam_config(*args, **kwargs) -> None: ... -def decode_json_header(header): ... -def compare_version(v1, v2): ... -def version_lt(v1, v2): ... -def version_gte(v1, v2): ... -def convert_port_bindings(port_bindings): ... -def convert_volume_binds(binds): ... -def convert_tmpfs_mounts(tmpfs): ... -def convert_service_networks(networks): ... -def parse_repository_tag(repo_name): ... -def parse_host(addr, is_win32: bool = False, tls: bool = False): ... -def parse_devices(devices): ... -def kwargs_from_env(environment: Incomplete | None = None): ... -def convert_filters(filters): ... -def datetime_to_timestamp(dt): ... -def parse_bytes(s): ... +@deprecated("utils.create_ipam_pool has been removed. Please use a docker.types.IPAMPool object instead.") +def create_ipam_pool(*args, **kwargs) -> NoReturn: ... +@deprecated("utils.create_ipam_config has been removed. Please use a docker.types.IPAMConfig object instead.") +def create_ipam_config(*args, **kwargs) -> NoReturn: ... +def decode_json_header(header: str | ReadableBuffer): ... +def compare_version(v1: str, v2: str) -> Literal[0, -1, 1]: ... +def version_lt(v1: str, v2: str) -> bool: ... +def version_gte(v1: str, v2: str) -> bool: ... +def convert_port_bindings( + port_bindings: Mapping[object, Incomplete], # keys are converted using str() +) -> dict[str, list[dict[str, str]]]: ... +@overload +def convert_volume_binds(binds: list[_T]) -> list[_T]: ... +@overload +def convert_volume_binds(binds: Mapping[str | bytes, Incomplete]) -> list[str]: ... +@overload +def convert_tmpfs_mounts(tmpfs: dict[_K, _V]) -> dict[_K, _V]: ... +@overload +def convert_tmpfs_mounts(tmpfs: list[str]) -> dict[str, str]: ... +@overload +def convert_service_networks(networks: None) -> None: ... +@overload +def convert_service_networks(networks: list[str] | list[dict[str, str]] | list[str | dict[str, str]]) -> list[dict[str, str]]: ... +def parse_repository_tag(repo_name: str) -> tuple[str, str | None]: ... +@overload +def parse_host(addr: None, is_win32: Literal[True], tls: bool = False) -> Literal["npipe:////./pipe/docker_engine"]: ... +@overload +def parse_host( + addr: None, is_win32: Literal[False] = False, tls: bool = False +) -> Literal["http+unix:///var/run/docker.sock"]: ... +@overload +def parse_host(addr: str | None, is_win32: bool = False, tls: bool = False) -> str | bytes: ... +def parse_devices(devices: Iterable[str | dict[str, Incomplete]]) -> list[dict[str, Incomplete]]: ... +def kwargs_from_env(environment: Mapping[str, Incomplete] | None = None) -> _EnvKWArgs: ... +def convert_filters(filters) -> str: ... +def datetime_to_timestamp(dt: datetime.datetime) -> int: ... +def parse_bytes(s: float | str) -> float: ... def normalize_links(links): ... -def parse_env_file(env_file): ... -def split_command(command): ... -def format_environment(environment): ... -def format_extra_hosts(extra_hosts, task: bool = False): ... -def create_host_config(self, *args, **kwargs) -> None: ... +def parse_env_file(env_file: FileDescriptorOrPath) -> dict[str, str]: ... +def split_command(command: str | _ShlexInstream) -> list[str]: ... +def format_environment(environment: Mapping[str, object | None]) -> list[str]: ... +def format_extra_hosts( + extra_hosts: Mapping[object, object], task: bool = False # keys and values are converted to str +) -> list[str]: ... +@deprecated("utils.create_host_config has been removed. Please use a docker.types.HostConfig object instead.") +def create_host_config(self, *args, **kwargs) -> NoReturn: ... From debbe471acd7344add2b46f58a566ccfd1234a96 Mon Sep 17 00:00:00 2001 From: Avasam Date: Tue, 15 Apr 2025 07:52:05 -0400 Subject: [PATCH 225/388] auth0-python: Cleanup re-exports leftovers from stubgen (#13828) --- .../@tests/stubtest_allowlist.txt | 7 - stubs/auth0-python/auth0/__init__.pyi | 6 +- stubs/auth0-python/auth0/asyncify.pyi | 10 +- .../auth0/authentication/__init__.pyi | 16 +-- .../authentication/async_token_verifier.pyi | 8 +- .../authentication/back_channel_login.pyi | 2 +- .../auth0/authentication/base.pyi | 6 +- .../auth0/authentication/database.pyi | 2 +- .../auth0/authentication/delegated.pyi | 2 +- .../auth0/authentication/enterprise.pyi | 2 +- .../auth0/authentication/get_token.pyi | 2 +- .../auth0/authentication/passwordless.pyi | 2 +- .../pushed_authorization_requests.pyi | 2 +- .../auth0/authentication/revoke_token.pyi | 2 +- .../auth0/authentication/social.pyi | 2 +- .../auth0/authentication/token_verifier.pyi | 2 - .../auth0/authentication/users.pyi | 4 +- .../auth0/management/__init__.pyi | 60 ++++----- .../auth0-python/auth0/management/actions.pyi | 4 +- .../auth0/management/async_auth0.pyi | 5 +- .../auth0/management/attack_protection.pyi | 4 +- stubs/auth0-python/auth0/management/auth0.pyi | 124 +++++++++--------- .../auth0/management/blacklists.pyi | 4 +- .../auth0/management/branding.pyi | 4 +- .../auth0/management/client_credentials.pyi | 4 +- .../auth0/management/client_grants.pyi | 4 +- .../auth0-python/auth0/management/clients.pyi | 4 +- .../auth0/management/connections.pyi | 4 +- .../auth0/management/custom_domains.pyi | 4 +- .../auth0/management/device_credentials.pyi | 4 +- .../auth0/management/email_templates.pyi | 4 +- .../auth0-python/auth0/management/emails.pyi | 4 +- .../auth0-python/auth0/management/grants.pyi | 4 +- .../auth0/management/guardian.pyi | 4 +- stubs/auth0-python/auth0/management/hooks.pyi | 4 +- stubs/auth0-python/auth0/management/jobs.pyi | 4 +- .../auth0/management/log_streams.pyi | 4 +- stubs/auth0-python/auth0/management/logs.pyi | 4 +- .../auth0/management/organizations.pyi | 4 +- .../auth0-python/auth0/management/prompts.pyi | 4 +- .../auth0/management/resource_servers.pyi | 4 +- stubs/auth0-python/auth0/management/roles.pyi | 4 +- stubs/auth0-python/auth0/management/rules.pyi | 4 +- .../auth0/management/rules_configs.pyi | 4 +- stubs/auth0-python/auth0/management/stats.pyi | 4 +- .../auth0-python/auth0/management/tenants.pyi | 4 +- .../auth0-python/auth0/management/tickets.pyi | 4 +- .../auth0/management/user_blocks.pyi | 4 +- stubs/auth0-python/auth0/management/users.pyi | 4 +- .../auth0/management/users_by_email.pyi | 4 +- stubs/auth0-python/auth0/rest.pyi | 5 +- stubs/auth0-python/auth0/rest_async.pyi | 11 +- 52 files changed, 184 insertions(+), 218 deletions(-) diff --git a/stubs/auth0-python/@tests/stubtest_allowlist.txt b/stubs/auth0-python/@tests/stubtest_allowlist.txt index 8264245ada21..e83dc7be660e 100644 --- a/stubs/auth0-python/@tests/stubtest_allowlist.txt +++ b/stubs/auth0-python/@tests/stubtest_allowlist.txt @@ -5,13 +5,6 @@ auth0\.test.* auth0\..*_async # Inconsistently implemented, ommitted -auth0.asyncify.AsyncRestClient.file_post -auth0.authentication.async_token_verifier.AsyncRestClient.file_post auth0.management.Auth0\..* auth0.rest_async.AsyncRestClient.file_post auth0.authentication.async_token_verifier.AsyncTokenVerifier.verify - -# TYPE_CHECKING override makes these show up wrong -auth0.management.async_auth0.RestClientOptions -auth0.management.auth0.RestClientOptions -auth0.rest.RequestsResponse diff --git a/stubs/auth0-python/auth0/__init__.pyi b/stubs/auth0-python/auth0/__init__.pyi index 3dfa028c921c..48e8b1da8e62 100644 --- a/stubs/auth0-python/auth0/__init__.pyi +++ b/stubs/auth0-python/auth0/__init__.pyi @@ -1,7 +1,3 @@ -from auth0.exceptions import ( - Auth0Error as Auth0Error, - RateLimitError as RateLimitError, - TokenValidationError as TokenValidationError, -) +from auth0.exceptions import Auth0Error, RateLimitError, TokenValidationError __all__ = ("Auth0Error", "RateLimitError", "TokenValidationError") diff --git a/stubs/auth0-python/auth0/asyncify.pyi b/stubs/auth0-python/auth0/asyncify.pyi index 37c0503bc273..c4109958aad6 100644 --- a/stubs/auth0-python/auth0/asyncify.pyi +++ b/stubs/auth0-python/auth0/asyncify.pyi @@ -1,6 +1,6 @@ -from auth0.authentication import Users as Users -from auth0.authentication.base import AuthenticationBase as AuthenticationBase -from auth0.rest import RestClientOptions as RestClientOptions -from auth0.rest_async import AsyncRestClient as AsyncRestClient +from typing import TypeVar -def asyncify(cls): ... +_T = TypeVar("_T") + +# See note in stubs/auth0-python/@tests/stubtest_allowlist.txt about _async methods +def asyncify(cls: type[_T]) -> type[_T]: ... diff --git a/stubs/auth0-python/auth0/authentication/__init__.pyi b/stubs/auth0-python/auth0/authentication/__init__.pyi index d6263712af40..24384af11585 100644 --- a/stubs/auth0-python/auth0/authentication/__init__.pyi +++ b/stubs/auth0-python/auth0/authentication/__init__.pyi @@ -1,10 +1,10 @@ -from .database import Database as Database -from .delegated import Delegated as Delegated -from .enterprise import Enterprise as Enterprise -from .get_token import GetToken as GetToken -from .passwordless import Passwordless as Passwordless -from .revoke_token import RevokeToken as RevokeToken -from .social import Social as Social -from .users import Users as Users +from .database import Database +from .delegated import Delegated +from .enterprise import Enterprise +from .get_token import GetToken +from .passwordless import Passwordless +from .revoke_token import RevokeToken +from .social import Social +from .users import Users __all__ = ("Database", "Delegated", "Enterprise", "GetToken", "Passwordless", "RevokeToken", "Social", "Users") diff --git a/stubs/auth0-python/auth0/authentication/async_token_verifier.pyi b/stubs/auth0-python/auth0/authentication/async_token_verifier.pyi index 3880c4a6c7e2..27064c21b5f2 100644 --- a/stubs/auth0-python/auth0/authentication/async_token_verifier.pyi +++ b/stubs/auth0-python/auth0/authentication/async_token_verifier.pyi @@ -1,10 +1,4 @@ -from .. import TokenValidationError as TokenValidationError -from ..rest_async import AsyncRestClient as AsyncRestClient -from .token_verifier import ( - AsymmetricSignatureVerifier as AsymmetricSignatureVerifier, - JwksFetcher as JwksFetcher, - TokenVerifier as TokenVerifier, -) +from .token_verifier import AsymmetricSignatureVerifier, JwksFetcher, TokenVerifier class AsyncAsymmetricSignatureVerifier(AsymmetricSignatureVerifier): def __init__(self, jwks_url: str, algorithm: str = "RS256") -> None: ... diff --git a/stubs/auth0-python/auth0/authentication/back_channel_login.pyi b/stubs/auth0-python/auth0/authentication/back_channel_login.pyi index 7e45855fa264..e59412243dc7 100644 --- a/stubs/auth0-python/auth0/authentication/back_channel_login.pyi +++ b/stubs/auth0-python/auth0/authentication/back_channel_login.pyi @@ -1,4 +1,4 @@ -from .base import AuthenticationBase as AuthenticationBase +from .base import AuthenticationBase class BackChannelLogin(AuthenticationBase): def back_channel_login(self, binding_message: str, login_hint: str, scope: str, **kwargs): ... diff --git a/stubs/auth0-python/auth0/authentication/base.pyi b/stubs/auth0-python/auth0/authentication/base.pyi index 30c6021462cf..ca603c1dabb3 100644 --- a/stubs/auth0-python/auth0/authentication/base.pyi +++ b/stubs/auth0-python/auth0/authentication/base.pyi @@ -1,10 +1,8 @@ from _typeshed import Incomplete from typing import Final -from auth0.rest import RestClient as RestClient, RestClientOptions as RestClientOptions -from auth0.types import RequestData as RequestData - -from .client_authentication import add_client_authentication as add_client_authentication +from auth0.rest import RestClient +from auth0.types import RequestData UNKNOWN_ERROR: Final[str] diff --git a/stubs/auth0-python/auth0/authentication/database.pyi b/stubs/auth0-python/auth0/authentication/database.pyi index e08752c27897..f5ed2a558afc 100644 --- a/stubs/auth0-python/auth0/authentication/database.pyi +++ b/stubs/auth0-python/auth0/authentication/database.pyi @@ -1,6 +1,6 @@ from _typeshed import Incomplete -from .base import AuthenticationBase as AuthenticationBase +from .base import AuthenticationBase class Database(AuthenticationBase): def signup( diff --git a/stubs/auth0-python/auth0/authentication/delegated.pyi b/stubs/auth0-python/auth0/authentication/delegated.pyi index cedc50b05e6d..5be5035f3ac9 100644 --- a/stubs/auth0-python/auth0/authentication/delegated.pyi +++ b/stubs/auth0-python/auth0/authentication/delegated.pyi @@ -1,4 +1,4 @@ -from .base import AuthenticationBase as AuthenticationBase +from .base import AuthenticationBase class Delegated(AuthenticationBase): def get_token( diff --git a/stubs/auth0-python/auth0/authentication/enterprise.pyi b/stubs/auth0-python/auth0/authentication/enterprise.pyi index a438ec9a0fad..0b205ebdb83f 100644 --- a/stubs/auth0-python/auth0/authentication/enterprise.pyi +++ b/stubs/auth0-python/auth0/authentication/enterprise.pyi @@ -1,4 +1,4 @@ -from .base import AuthenticationBase as AuthenticationBase +from .base import AuthenticationBase class Enterprise(AuthenticationBase): def saml_metadata(self): ... diff --git a/stubs/auth0-python/auth0/authentication/get_token.pyi b/stubs/auth0-python/auth0/authentication/get_token.pyi index 02590cf34a17..5308543ca245 100644 --- a/stubs/auth0-python/auth0/authentication/get_token.pyi +++ b/stubs/auth0-python/auth0/authentication/get_token.pyi @@ -1,4 +1,4 @@ -from .base import AuthenticationBase as AuthenticationBase +from .base import AuthenticationBase class GetToken(AuthenticationBase): def authorization_code(self, code: str, redirect_uri: str | None, grant_type: str = "authorization_code"): ... diff --git a/stubs/auth0-python/auth0/authentication/passwordless.pyi b/stubs/auth0-python/auth0/authentication/passwordless.pyi index 841c6f4fcc9c..aac13339a26c 100644 --- a/stubs/auth0-python/auth0/authentication/passwordless.pyi +++ b/stubs/auth0-python/auth0/authentication/passwordless.pyi @@ -1,4 +1,4 @@ -from .base import AuthenticationBase as AuthenticationBase +from .base import AuthenticationBase class Passwordless(AuthenticationBase): def email(self, email: str, send: str = "link", auth_params: dict[str, str] | None = None): ... diff --git a/stubs/auth0-python/auth0/authentication/pushed_authorization_requests.pyi b/stubs/auth0-python/auth0/authentication/pushed_authorization_requests.pyi index 67bdc0074e35..6d0f0193c2ae 100644 --- a/stubs/auth0-python/auth0/authentication/pushed_authorization_requests.pyi +++ b/stubs/auth0-python/auth0/authentication/pushed_authorization_requests.pyi @@ -1,4 +1,4 @@ -from .base import AuthenticationBase as AuthenticationBase +from .base import AuthenticationBase class PushedAuthorizationRequests(AuthenticationBase): def pushed_authorization_request(self, response_type: str, redirect_uri: str, **kwargs): ... diff --git a/stubs/auth0-python/auth0/authentication/revoke_token.pyi b/stubs/auth0-python/auth0/authentication/revoke_token.pyi index 7190be6c6535..8a26618fdbe7 100644 --- a/stubs/auth0-python/auth0/authentication/revoke_token.pyi +++ b/stubs/auth0-python/auth0/authentication/revoke_token.pyi @@ -1,4 +1,4 @@ -from .base import AuthenticationBase as AuthenticationBase +from .base import AuthenticationBase class RevokeToken(AuthenticationBase): def revoke_refresh_token(self, token: str): ... diff --git a/stubs/auth0-python/auth0/authentication/social.pyi b/stubs/auth0-python/auth0/authentication/social.pyi index b63c702fd934..c17a225c507b 100644 --- a/stubs/auth0-python/auth0/authentication/social.pyi +++ b/stubs/auth0-python/auth0/authentication/social.pyi @@ -1,4 +1,4 @@ -from .base import AuthenticationBase as AuthenticationBase +from .base import AuthenticationBase class Social(AuthenticationBase): def login(self, access_token: str, connection: str, scope: str = "openid"): ... diff --git a/stubs/auth0-python/auth0/authentication/token_verifier.pyi b/stubs/auth0-python/auth0/authentication/token_verifier.pyi index 171e55501f9b..9cec21490065 100644 --- a/stubs/auth0-python/auth0/authentication/token_verifier.pyi +++ b/stubs/auth0-python/auth0/authentication/token_verifier.pyi @@ -1,8 +1,6 @@ from _typeshed import Incomplete from typing import ClassVar -from auth0.exceptions import TokenValidationError as TokenValidationError - class SignatureVerifier: DISABLE_JWT_CHECKS: ClassVar[dict[str, bool]] def __init__(self, algorithm: str) -> None: ... diff --git a/stubs/auth0-python/auth0/authentication/users.pyi b/stubs/auth0-python/auth0/authentication/users.pyi index c986cbc2fcae..3484f5f98e78 100644 --- a/stubs/auth0-python/auth0/authentication/users.pyi +++ b/stubs/auth0-python/auth0/authentication/users.pyi @@ -1,7 +1,7 @@ from _typeshed import Incomplete -from auth0.rest import RestClient as RestClient, RestClientOptions as RestClientOptions -from auth0.types import TimeoutType as TimeoutType +from auth0.rest import RestClient +from auth0.types import TimeoutType class Users: domain: str diff --git a/stubs/auth0-python/auth0/management/__init__.pyi b/stubs/auth0-python/auth0/management/__init__.pyi index 37f4aa7c2067..942a94fa070a 100644 --- a/stubs/auth0-python/auth0/management/__init__.pyi +++ b/stubs/auth0-python/auth0/management/__init__.pyi @@ -1,33 +1,33 @@ -from .actions import Actions as Actions -from .attack_protection import AttackProtection as AttackProtection -from .auth0 import Auth0 as Auth0 -from .blacklists import Blacklists as Blacklists -from .branding import Branding as Branding -from .client_credentials import ClientCredentials as ClientCredentials -from .client_grants import ClientGrants as ClientGrants -from .clients import Clients as Clients -from .connections import Connections as Connections -from .custom_domains import CustomDomains as CustomDomains -from .device_credentials import DeviceCredentials as DeviceCredentials -from .email_templates import EmailTemplates as EmailTemplates -from .emails import Emails as Emails -from .grants import Grants as Grants -from .guardian import Guardian as Guardian -from .hooks import Hooks as Hooks -from .jobs import Jobs as Jobs -from .log_streams import LogStreams as LogStreams -from .logs import Logs as Logs -from .organizations import Organizations as Organizations -from .resource_servers import ResourceServers as ResourceServers -from .roles import Roles as Roles -from .rules import Rules as Rules -from .rules_configs import RulesConfigs as RulesConfigs -from .stats import Stats as Stats -from .tenants import Tenants as Tenants -from .tickets import Tickets as Tickets -from .user_blocks import UserBlocks as UserBlocks -from .users import Users as Users -from .users_by_email import UsersByEmail as UsersByEmail +from .actions import Actions +from .attack_protection import AttackProtection +from .auth0 import Auth0 +from .blacklists import Blacklists +from .branding import Branding +from .client_credentials import ClientCredentials +from .client_grants import ClientGrants +from .clients import Clients +from .connections import Connections +from .custom_domains import CustomDomains +from .device_credentials import DeviceCredentials +from .email_templates import EmailTemplates +from .emails import Emails +from .grants import Grants +from .guardian import Guardian +from .hooks import Hooks +from .jobs import Jobs +from .log_streams import LogStreams +from .logs import Logs +from .organizations import Organizations +from .resource_servers import ResourceServers +from .roles import Roles +from .rules import Rules +from .rules_configs import RulesConfigs +from .stats import Stats +from .tenants import Tenants +from .tickets import Tickets +from .user_blocks import UserBlocks +from .users import Users +from .users_by_email import UsersByEmail __all__ = ( "Auth0", diff --git a/stubs/auth0-python/auth0/management/actions.pyi b/stubs/auth0-python/auth0/management/actions.pyi index bce092dff25e..8c8179c20c82 100644 --- a/stubs/auth0-python/auth0/management/actions.pyi +++ b/stubs/auth0-python/auth0/management/actions.pyi @@ -1,7 +1,7 @@ from _typeshed import Incomplete -from ..rest import RestClient as RestClient, RestClientOptions as RestClientOptions -from ..types import TimeoutType as TimeoutType +from ..rest import RestClientOptions +from ..types import TimeoutType class Actions: domain: Incomplete diff --git a/stubs/auth0-python/auth0/management/async_auth0.pyi b/stubs/auth0-python/auth0/management/async_auth0.pyi index 6b3f78886ca1..5af87901a440 100644 --- a/stubs/auth0-python/auth0/management/async_auth0.pyi +++ b/stubs/auth0-python/auth0/management/async_auth0.pyi @@ -1,10 +1,7 @@ from types import TracebackType from typing_extensions import Self -from auth0.rest import RestClientOptions as RestClientOptions - -from ..asyncify import asyncify as asyncify -from .auth0 import Auth0 as Auth0 +from auth0.rest import RestClientOptions class AsyncAuth0: def __init__(self, domain: str, token: str, rest_options: RestClientOptions | None = None) -> None: ... diff --git a/stubs/auth0-python/auth0/management/attack_protection.pyi b/stubs/auth0-python/auth0/management/attack_protection.pyi index d84ecc180939..d87701e522c7 100644 --- a/stubs/auth0-python/auth0/management/attack_protection.pyi +++ b/stubs/auth0-python/auth0/management/attack_protection.pyi @@ -1,7 +1,7 @@ from _typeshed import Incomplete -from ..rest import RestClient as RestClient, RestClientOptions as RestClientOptions -from ..types import TimeoutType as TimeoutType +from ..rest import RestClientOptions +from ..types import TimeoutType class AttackProtection: domain: Incomplete diff --git a/stubs/auth0-python/auth0/management/auth0.pyi b/stubs/auth0-python/auth0/management/auth0.pyi index 14f7e64caa38..f3d7d22ddb26 100644 --- a/stubs/auth0-python/auth0/management/auth0.pyi +++ b/stubs/auth0-python/auth0/management/auth0.pyi @@ -1,67 +1,65 @@ -from _typeshed import Incomplete +from auth0.rest import RestClientOptions -from auth0.rest import RestClientOptions as RestClientOptions - -from .actions import Actions as Actions -from .attack_protection import AttackProtection as AttackProtection -from .blacklists import Blacklists as Blacklists -from .branding import Branding as Branding -from .client_credentials import ClientCredentials as ClientCredentials -from .client_grants import ClientGrants as ClientGrants -from .clients import Clients as Clients -from .connections import Connections as Connections -from .custom_domains import CustomDomains as CustomDomains -from .device_credentials import DeviceCredentials as DeviceCredentials -from .email_templates import EmailTemplates as EmailTemplates -from .emails import Emails as Emails -from .grants import Grants as Grants -from .guardian import Guardian as Guardian -from .hooks import Hooks as Hooks -from .jobs import Jobs as Jobs -from .log_streams import LogStreams as LogStreams -from .logs import Logs as Logs -from .organizations import Organizations as Organizations -from .prompts import Prompts as Prompts -from .resource_servers import ResourceServers as ResourceServers -from .roles import Roles as Roles -from .rules import Rules as Rules -from .rules_configs import RulesConfigs as RulesConfigs -from .stats import Stats as Stats -from .tenants import Tenants as Tenants -from .tickets import Tickets as Tickets -from .user_blocks import UserBlocks as UserBlocks -from .users import Users as Users -from .users_by_email import UsersByEmail as UsersByEmail +from .actions import Actions +from .attack_protection import AttackProtection +from .blacklists import Blacklists +from .branding import Branding +from .client_credentials import ClientCredentials +from .client_grants import ClientGrants +from .clients import Clients +from .connections import Connections +from .custom_domains import CustomDomains +from .device_credentials import DeviceCredentials +from .email_templates import EmailTemplates +from .emails import Emails +from .grants import Grants +from .guardian import Guardian +from .hooks import Hooks +from .jobs import Jobs +from .log_streams import LogStreams +from .logs import Logs +from .organizations import Organizations +from .prompts import Prompts +from .resource_servers import ResourceServers +from .roles import Roles +from .rules import Rules +from .rules_configs import RulesConfigs +from .stats import Stats +from .tenants import Tenants +from .tickets import Tickets +from .user_blocks import UserBlocks +from .users import Users +from .users_by_email import UsersByEmail class Auth0: - actions: Incomplete - attack_protection: Incomplete - blacklists: Incomplete - branding: Incomplete - client_credentials: Incomplete - client_grants: Incomplete - clients: Incomplete - connections: Incomplete - custom_domains: Incomplete - device_credentials: Incomplete - email_templates: Incomplete - emails: Incomplete - grants: Incomplete - guardian: Incomplete - hooks: Incomplete - jobs: Incomplete - log_streams: Incomplete - logs: Incomplete - organizations: Incomplete - prompts: Incomplete - resource_servers: Incomplete - roles: Incomplete - rules_configs: Incomplete - rules: Incomplete - stats: Incomplete - tenants: Incomplete - tickets: Incomplete - user_blocks: Incomplete - users_by_email: Incomplete - users: Incomplete + actions: Actions + attack_protection: AttackProtection + blacklists: Blacklists + branding: Branding + client_credentials: ClientCredentials + client_grants: ClientGrants + clients: Clients + connections: Connections + custom_domains: CustomDomains + device_credentials: DeviceCredentials + email_templates: EmailTemplates + emails: Emails + grants: Grants + guardian: Guardian + hooks: Hooks + jobs: Jobs + log_streams: LogStreams + logs: Logs + organizations: Organizations + prompts: Prompts + resource_servers: ResourceServers + roles: Roles + rules_configs: RulesConfigs + rules: Rules + stats: Stats + tenants: Tenants + tickets: Tickets + user_blocks: UserBlocks + users_by_email: UsersByEmail + users: Users def __init__(self, domain: str, token: str, rest_options: RestClientOptions | None = None) -> None: ... diff --git a/stubs/auth0-python/auth0/management/blacklists.pyi b/stubs/auth0-python/auth0/management/blacklists.pyi index 9baf22376176..b2793ec1272d 100644 --- a/stubs/auth0-python/auth0/management/blacklists.pyi +++ b/stubs/auth0-python/auth0/management/blacklists.pyi @@ -1,7 +1,7 @@ from _typeshed import Incomplete -from ..rest import RestClient as RestClient, RestClientOptions as RestClientOptions -from ..types import TimeoutType as TimeoutType +from ..rest import RestClientOptions +from ..types import TimeoutType class Blacklists: url: Incomplete diff --git a/stubs/auth0-python/auth0/management/branding.pyi b/stubs/auth0-python/auth0/management/branding.pyi index 71be39cf4b6b..8350d1a8267f 100644 --- a/stubs/auth0-python/auth0/management/branding.pyi +++ b/stubs/auth0-python/auth0/management/branding.pyi @@ -1,7 +1,7 @@ from _typeshed import Incomplete -from ..rest import RestClient as RestClient, RestClientOptions as RestClientOptions -from ..types import TimeoutType as TimeoutType +from ..rest import RestClientOptions +from ..types import TimeoutType class Branding: domain: Incomplete diff --git a/stubs/auth0-python/auth0/management/client_credentials.pyi b/stubs/auth0-python/auth0/management/client_credentials.pyi index 895f79bcc5ee..107534317423 100644 --- a/stubs/auth0-python/auth0/management/client_credentials.pyi +++ b/stubs/auth0-python/auth0/management/client_credentials.pyi @@ -1,7 +1,7 @@ from _typeshed import Incomplete -from ..rest import RestClient as RestClient, RestClientOptions as RestClientOptions -from ..types import TimeoutType as TimeoutType +from ..rest import RestClientOptions +from ..types import TimeoutType class ClientCredentials: domain: Incomplete diff --git a/stubs/auth0-python/auth0/management/client_grants.pyi b/stubs/auth0-python/auth0/management/client_grants.pyi index bae2e974df3c..1938b8c2f29c 100644 --- a/stubs/auth0-python/auth0/management/client_grants.pyi +++ b/stubs/auth0-python/auth0/management/client_grants.pyi @@ -1,7 +1,7 @@ from _typeshed import Incomplete -from ..rest import RestClient as RestClient, RestClientOptions as RestClientOptions -from ..types import TimeoutType as TimeoutType +from ..rest import RestClientOptions +from ..types import TimeoutType class ClientGrants: domain: Incomplete diff --git a/stubs/auth0-python/auth0/management/clients.pyi b/stubs/auth0-python/auth0/management/clients.pyi index 1144a33b3dff..21358fc59ddf 100644 --- a/stubs/auth0-python/auth0/management/clients.pyi +++ b/stubs/auth0-python/auth0/management/clients.pyi @@ -1,7 +1,7 @@ from _typeshed import Incomplete -from ..rest import RestClient as RestClient, RestClientOptions as RestClientOptions -from ..types import TimeoutType as TimeoutType +from ..rest import RestClientOptions +from ..types import TimeoutType class Clients: domain: Incomplete diff --git a/stubs/auth0-python/auth0/management/connections.pyi b/stubs/auth0-python/auth0/management/connections.pyi index a7b53124c8e6..69caf1afa4ee 100644 --- a/stubs/auth0-python/auth0/management/connections.pyi +++ b/stubs/auth0-python/auth0/management/connections.pyi @@ -1,7 +1,7 @@ from _typeshed import Incomplete -from ..rest import RestClient as RestClient, RestClientOptions as RestClientOptions -from ..types import TimeoutType as TimeoutType +from ..rest import RestClientOptions +from ..types import TimeoutType class Connections: domain: Incomplete diff --git a/stubs/auth0-python/auth0/management/custom_domains.pyi b/stubs/auth0-python/auth0/management/custom_domains.pyi index 84b0a1859cb0..e6e05a81e5cc 100644 --- a/stubs/auth0-python/auth0/management/custom_domains.pyi +++ b/stubs/auth0-python/auth0/management/custom_domains.pyi @@ -1,7 +1,7 @@ from _typeshed import Incomplete -from ..rest import RestClient as RestClient, RestClientOptions as RestClientOptions -from ..types import TimeoutType as TimeoutType +from ..rest import RestClientOptions +from ..types import TimeoutType class CustomDomains: domain: Incomplete diff --git a/stubs/auth0-python/auth0/management/device_credentials.pyi b/stubs/auth0-python/auth0/management/device_credentials.pyi index 3d1f8e4f3a1b..a89bfa418fe0 100644 --- a/stubs/auth0-python/auth0/management/device_credentials.pyi +++ b/stubs/auth0-python/auth0/management/device_credentials.pyi @@ -1,7 +1,7 @@ from _typeshed import Incomplete -from ..rest import RestClient as RestClient, RestClientOptions as RestClientOptions -from ..types import TimeoutType as TimeoutType +from ..rest import RestClientOptions +from ..types import TimeoutType class DeviceCredentials: domain: Incomplete diff --git a/stubs/auth0-python/auth0/management/email_templates.pyi b/stubs/auth0-python/auth0/management/email_templates.pyi index 9a4e8919da65..17e9b9c6c11d 100644 --- a/stubs/auth0-python/auth0/management/email_templates.pyi +++ b/stubs/auth0-python/auth0/management/email_templates.pyi @@ -1,7 +1,7 @@ from _typeshed import Incomplete -from ..rest import RestClient as RestClient, RestClientOptions as RestClientOptions -from ..types import TimeoutType as TimeoutType +from ..rest import RestClientOptions +from ..types import TimeoutType class EmailTemplates: domain: Incomplete diff --git a/stubs/auth0-python/auth0/management/emails.pyi b/stubs/auth0-python/auth0/management/emails.pyi index f681b1ea9150..24421140bd5b 100644 --- a/stubs/auth0-python/auth0/management/emails.pyi +++ b/stubs/auth0-python/auth0/management/emails.pyi @@ -1,7 +1,7 @@ from _typeshed import Incomplete -from ..rest import RestClient as RestClient, RestClientOptions as RestClientOptions -from ..types import TimeoutType as TimeoutType +from ..rest import RestClientOptions +from ..types import TimeoutType class Emails: domain: Incomplete diff --git a/stubs/auth0-python/auth0/management/grants.pyi b/stubs/auth0-python/auth0/management/grants.pyi index e7946afc5a65..7354186fd494 100644 --- a/stubs/auth0-python/auth0/management/grants.pyi +++ b/stubs/auth0-python/auth0/management/grants.pyi @@ -1,7 +1,7 @@ from _typeshed import Incomplete -from ..rest import RestClient as RestClient, RestClientOptions as RestClientOptions -from ..types import TimeoutType as TimeoutType +from ..rest import RestClientOptions +from ..types import TimeoutType class Grants: domain: Incomplete diff --git a/stubs/auth0-python/auth0/management/guardian.pyi b/stubs/auth0-python/auth0/management/guardian.pyi index 19e46a0e238e..4614fb344f4c 100644 --- a/stubs/auth0-python/auth0/management/guardian.pyi +++ b/stubs/auth0-python/auth0/management/guardian.pyi @@ -1,7 +1,7 @@ from _typeshed import Incomplete -from ..rest import RestClient as RestClient, RestClientOptions as RestClientOptions -from ..types import TimeoutType as TimeoutType +from ..rest import RestClientOptions +from ..types import TimeoutType class Guardian: domain: Incomplete diff --git a/stubs/auth0-python/auth0/management/hooks.pyi b/stubs/auth0-python/auth0/management/hooks.pyi index 37ceddff41b6..18d7f5c63e9a 100644 --- a/stubs/auth0-python/auth0/management/hooks.pyi +++ b/stubs/auth0-python/auth0/management/hooks.pyi @@ -1,7 +1,7 @@ from _typeshed import Incomplete -from ..rest import RestClient as RestClient, RestClientOptions as RestClientOptions -from ..types import TimeoutType as TimeoutType +from ..rest import RestClientOptions +from ..types import TimeoutType class Hooks: domain: Incomplete diff --git a/stubs/auth0-python/auth0/management/jobs.pyi b/stubs/auth0-python/auth0/management/jobs.pyi index 7ac7699525ba..bd55f89399f3 100644 --- a/stubs/auth0-python/auth0/management/jobs.pyi +++ b/stubs/auth0-python/auth0/management/jobs.pyi @@ -1,7 +1,7 @@ from _typeshed import Incomplete -from ..rest import RestClient as RestClient, RestClientOptions as RestClientOptions -from ..types import TimeoutType as TimeoutType +from ..rest import RestClientOptions +from ..types import TimeoutType class Jobs: domain: Incomplete diff --git a/stubs/auth0-python/auth0/management/log_streams.pyi b/stubs/auth0-python/auth0/management/log_streams.pyi index 85dcd922690a..9990a570ee3a 100644 --- a/stubs/auth0-python/auth0/management/log_streams.pyi +++ b/stubs/auth0-python/auth0/management/log_streams.pyi @@ -1,8 +1,8 @@ from _typeshed import Incomplete from builtins import list as _list -from ..rest import RestClient as RestClient, RestClientOptions as RestClientOptions -from ..types import TimeoutType as TimeoutType +from ..rest import RestClientOptions +from ..types import TimeoutType class LogStreams: domain: Incomplete diff --git a/stubs/auth0-python/auth0/management/logs.pyi b/stubs/auth0-python/auth0/management/logs.pyi index a5ac9e4c74b7..800778d2dbf5 100644 --- a/stubs/auth0-python/auth0/management/logs.pyi +++ b/stubs/auth0-python/auth0/management/logs.pyi @@ -1,7 +1,7 @@ from _typeshed import Incomplete -from ..rest import RestClient as RestClient, RestClientOptions as RestClientOptions -from ..types import TimeoutType as TimeoutType +from ..rest import RestClientOptions +from ..types import TimeoutType class Logs: domain: Incomplete diff --git a/stubs/auth0-python/auth0/management/organizations.pyi b/stubs/auth0-python/auth0/management/organizations.pyi index 9dc1205b3e43..eb750ff79fd0 100644 --- a/stubs/auth0-python/auth0/management/organizations.pyi +++ b/stubs/auth0-python/auth0/management/organizations.pyi @@ -1,7 +1,7 @@ from _typeshed import Incomplete -from ..rest import RestClient as RestClient, RestClientOptions as RestClientOptions -from ..types import TimeoutType as TimeoutType +from ..rest import RestClientOptions +from ..types import TimeoutType class Organizations: domain: Incomplete diff --git a/stubs/auth0-python/auth0/management/prompts.pyi b/stubs/auth0-python/auth0/management/prompts.pyi index 37376d758e3c..5b11913678f4 100644 --- a/stubs/auth0-python/auth0/management/prompts.pyi +++ b/stubs/auth0-python/auth0/management/prompts.pyi @@ -1,7 +1,7 @@ from _typeshed import Incomplete -from ..rest import RestClient as RestClient, RestClientOptions as RestClientOptions -from ..types import TimeoutType as TimeoutType +from ..rest import RestClientOptions +from ..types import TimeoutType class Prompts: domain: Incomplete diff --git a/stubs/auth0-python/auth0/management/resource_servers.pyi b/stubs/auth0-python/auth0/management/resource_servers.pyi index 8e6ca07b0b9d..cb8e175b34dd 100644 --- a/stubs/auth0-python/auth0/management/resource_servers.pyi +++ b/stubs/auth0-python/auth0/management/resource_servers.pyi @@ -1,7 +1,7 @@ from _typeshed import Incomplete -from ..rest import RestClient as RestClient, RestClientOptions as RestClientOptions -from ..types import TimeoutType as TimeoutType +from ..rest import RestClientOptions +from ..types import TimeoutType class ResourceServers: domain: Incomplete diff --git a/stubs/auth0-python/auth0/management/roles.pyi b/stubs/auth0-python/auth0/management/roles.pyi index 1cb8b1ff9b34..5c288c09a5da 100644 --- a/stubs/auth0-python/auth0/management/roles.pyi +++ b/stubs/auth0-python/auth0/management/roles.pyi @@ -1,8 +1,8 @@ from _typeshed import Incomplete from builtins import list as _list -from ..rest import RestClient as RestClient, RestClientOptions as RestClientOptions -from ..types import TimeoutType as TimeoutType +from ..rest import RestClientOptions +from ..types import TimeoutType class Roles: domain: Incomplete diff --git a/stubs/auth0-python/auth0/management/rules.pyi b/stubs/auth0-python/auth0/management/rules.pyi index 1eac99eb60f7..6cee7f6e151d 100644 --- a/stubs/auth0-python/auth0/management/rules.pyi +++ b/stubs/auth0-python/auth0/management/rules.pyi @@ -1,7 +1,7 @@ from _typeshed import Incomplete -from ..rest import RestClient as RestClient, RestClientOptions as RestClientOptions -from ..types import TimeoutType as TimeoutType +from ..rest import RestClientOptions +from ..types import TimeoutType class Rules: domain: Incomplete diff --git a/stubs/auth0-python/auth0/management/rules_configs.pyi b/stubs/auth0-python/auth0/management/rules_configs.pyi index f276a18d9a36..5f2f361d5f46 100644 --- a/stubs/auth0-python/auth0/management/rules_configs.pyi +++ b/stubs/auth0-python/auth0/management/rules_configs.pyi @@ -1,7 +1,7 @@ from _typeshed import Incomplete -from ..rest import RestClient as RestClient, RestClientOptions as RestClientOptions -from ..types import TimeoutType as TimeoutType +from ..rest import RestClientOptions +from ..types import TimeoutType class RulesConfigs: domain: Incomplete diff --git a/stubs/auth0-python/auth0/management/stats.pyi b/stubs/auth0-python/auth0/management/stats.pyi index f3128be12eaf..3cff2985672c 100644 --- a/stubs/auth0-python/auth0/management/stats.pyi +++ b/stubs/auth0-python/auth0/management/stats.pyi @@ -1,7 +1,7 @@ from _typeshed import Incomplete -from ..rest import RestClient as RestClient, RestClientOptions as RestClientOptions -from ..types import TimeoutType as TimeoutType +from ..rest import RestClientOptions +from ..types import TimeoutType class Stats: domain: Incomplete diff --git a/stubs/auth0-python/auth0/management/tenants.pyi b/stubs/auth0-python/auth0/management/tenants.pyi index 12a4df7ad40a..3df24bf63374 100644 --- a/stubs/auth0-python/auth0/management/tenants.pyi +++ b/stubs/auth0-python/auth0/management/tenants.pyi @@ -1,7 +1,7 @@ from _typeshed import Incomplete -from ..rest import RestClient as RestClient, RestClientOptions as RestClientOptions -from ..types import TimeoutType as TimeoutType +from ..rest import RestClientOptions +from ..types import TimeoutType class Tenants: domain: Incomplete diff --git a/stubs/auth0-python/auth0/management/tickets.pyi b/stubs/auth0-python/auth0/management/tickets.pyi index 236596687f29..4c0dfaf2c879 100644 --- a/stubs/auth0-python/auth0/management/tickets.pyi +++ b/stubs/auth0-python/auth0/management/tickets.pyi @@ -1,7 +1,7 @@ from _typeshed import Incomplete -from ..rest import RestClient as RestClient, RestClientOptions as RestClientOptions -from ..types import TimeoutType as TimeoutType +from ..rest import RestClientOptions +from ..types import TimeoutType class Tickets: domain: Incomplete diff --git a/stubs/auth0-python/auth0/management/user_blocks.pyi b/stubs/auth0-python/auth0/management/user_blocks.pyi index 7417dd514e59..f6dbb555097f 100644 --- a/stubs/auth0-python/auth0/management/user_blocks.pyi +++ b/stubs/auth0-python/auth0/management/user_blocks.pyi @@ -1,7 +1,7 @@ from _typeshed import Incomplete -from ..rest import RestClient as RestClient, RestClientOptions as RestClientOptions -from ..types import TimeoutType as TimeoutType +from ..rest import RestClientOptions +from ..types import TimeoutType class UserBlocks: domain: Incomplete diff --git a/stubs/auth0-python/auth0/management/users.pyi b/stubs/auth0-python/auth0/management/users.pyi index e2e159b5755f..2a744f2db6fe 100644 --- a/stubs/auth0-python/auth0/management/users.pyi +++ b/stubs/auth0-python/auth0/management/users.pyi @@ -1,8 +1,8 @@ from _typeshed import Incomplete from builtins import list as _list -from ..rest import RestClient as RestClient, RestClientOptions as RestClientOptions -from ..types import TimeoutType as TimeoutType +from ..rest import RestClientOptions +from ..types import TimeoutType class Users: domain: Incomplete diff --git a/stubs/auth0-python/auth0/management/users_by_email.pyi b/stubs/auth0-python/auth0/management/users_by_email.pyi index 177fe7b092bf..34b166a046cb 100644 --- a/stubs/auth0-python/auth0/management/users_by_email.pyi +++ b/stubs/auth0-python/auth0/management/users_by_email.pyi @@ -1,7 +1,7 @@ from _typeshed import Incomplete -from ..rest import RestClient as RestClient, RestClientOptions as RestClientOptions -from ..types import TimeoutType as TimeoutType +from ..rest import RestClientOptions +from ..types import TimeoutType class UsersByEmail: domain: Incomplete diff --git a/stubs/auth0-python/auth0/rest.pyi b/stubs/auth0-python/auth0/rest.pyi index 5b12d48e7713..474716ed553b 100644 --- a/stubs/auth0-python/auth0/rest.pyi +++ b/stubs/auth0-python/auth0/rest.pyi @@ -3,9 +3,8 @@ from collections.abc import Mapping from typing import Final import requests -from auth0.exceptions import Auth0Error as Auth0Error, RateLimitError as RateLimitError -from auth0.rest_async import RequestsResponse as RequestsResponse -from auth0.types import RequestData as RequestData, TimeoutType as TimeoutType +from auth0.rest_async import RequestsResponse +from auth0.types import RequestData, TimeoutType UNKNOWN_ERROR: Final[str] diff --git a/stubs/auth0-python/auth0/rest_async.pyi b/stubs/auth0-python/auth0/rest_async.pyi index fec31cd6c515..e3f5c5be615b 100644 --- a/stubs/auth0-python/auth0/rest_async.pyi +++ b/stubs/auth0-python/auth0/rest_async.pyi @@ -1,15 +1,8 @@ from _typeshed import Incomplete -from auth0.exceptions import RateLimitError as RateLimitError -from auth0.types import RequestData as RequestData +from auth0.types import RequestData -from .rest import ( - EmptyResponse as EmptyResponse, - JsonResponse as JsonResponse, - PlainResponse as PlainResponse, - Response as Response, - RestClient as RestClient, -) +from .rest import RestClient class AsyncRestClient(RestClient): timeout: Incomplete From aee949ed8b01dfd9941a6e974e0a6f8ec5898ae2 Mon Sep 17 00:00:00 2001 From: Avasam Date: Tue, 15 Apr 2025 08:25:33 -0400 Subject: [PATCH 226/388] auth0-python solve stubtest_allowlist entries (#13827) --- stubs/auth0-python/@tests/stubtest_allowlist.txt | 8 ++++---- .../auth0/authentication/async_token_verifier.pyi | 5 +++++ stubs/auth0-python/auth0/rest_async.pyi | 4 +++- 3 files changed, 12 insertions(+), 5 deletions(-) diff --git a/stubs/auth0-python/@tests/stubtest_allowlist.txt b/stubs/auth0-python/@tests/stubtest_allowlist.txt index e83dc7be660e..374bc1d561b6 100644 --- a/stubs/auth0-python/@tests/stubtest_allowlist.txt +++ b/stubs/auth0-python/@tests/stubtest_allowlist.txt @@ -1,10 +1,10 @@ # Omit tests auth0\.test.* -# Omit _async functions because they aren't present in the code +# Omit _async functions because they aren't present at runtime +# The way these stubs are currently implemented is that we pretend all classes have async methods +# Even though in reality, users need to call `auth0.asyncify.asyncify` to generate async subclasses auth0\..*_async # Inconsistently implemented, ommitted -auth0.management.Auth0\..* -auth0.rest_async.AsyncRestClient.file_post -auth0.authentication.async_token_verifier.AsyncTokenVerifier.verify +auth0\.management\.Auth0\..* diff --git a/stubs/auth0-python/auth0/authentication/async_token_verifier.pyi b/stubs/auth0-python/auth0/authentication/async_token_verifier.pyi index 27064c21b5f2..5b18760b06b2 100644 --- a/stubs/auth0-python/auth0/authentication/async_token_verifier.pyi +++ b/stubs/auth0-python/auth0/authentication/async_token_verifier.pyi @@ -1,3 +1,5 @@ +from _typeshed import Incomplete + from .token_verifier import AsymmetricSignatureVerifier, JwksFetcher, TokenVerifier class AsyncAsymmetricSignatureVerifier(AsymmetricSignatureVerifier): @@ -17,3 +19,6 @@ class AsyncTokenVerifier(TokenVerifier): self, signature_verifier: AsyncAsymmetricSignatureVerifier, issuer: str, audience: str, leeway: int = 0 ) -> None: ... def set_session(self, session) -> None: ... + async def verify( # type: ignore[override] # Differs from supertype + self, token: str, nonce: str | None = None, max_age: int | None = None, organization: str | None = None + ) -> dict[str, Incomplete]: ... diff --git a/stubs/auth0-python/auth0/rest_async.pyi b/stubs/auth0-python/auth0/rest_async.pyi index e3f5c5be615b..56dc43e76804 100644 --- a/stubs/auth0-python/auth0/rest_async.pyi +++ b/stubs/auth0-python/auth0/rest_async.pyi @@ -9,7 +9,9 @@ class AsyncRestClient(RestClient): def set_session(self, session) -> None: ... async def get(self, url: str, params: dict[str, Incomplete] | None = None, headers: dict[str, str] | None = None): ... async def post(self, url: str, data: RequestData | None = None, headers: dict[str, str] | None = None): ... - async def file_post(self, *args, **kwargs): ... + async def file_post( # type: ignore[override] # Differs from supertype + self, url: str, data: dict[str, Incomplete], files: dict[str, Incomplete] + ): ... async def patch(self, url: str, data: RequestData | None = None): ... async def put(self, url: str, data: RequestData | None = None): ... async def delete(self, url: str, params: dict[str, Incomplete] | None = None, data: RequestData | None = None): ... From 02470aad007f7ff6d99138fa8a2a85a34ba9dae9 Mon Sep 17 00:00:00 2001 From: Sebastian Rittau Date: Tue, 15 Apr 2025 18:09:33 +0200 Subject: [PATCH 227/388] Document visit method return type (#13831) --- stdlib/ast.pyi | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/stdlib/ast.pyi b/stdlib/ast.pyi index 90c6d2ff0e68..1a3d3e97d11e 100644 --- a/stdlib/ast.pyi +++ b/stdlib/ast.pyi @@ -1893,8 +1893,12 @@ if sys.version_info >= (3, 14): def compare(left: AST, right: AST, /, *, compare_attributes: bool = False) -> bool: ... class NodeVisitor: + # All visit methods below can be overwritten by subclasses and return an + # arbitrary value, which is passed to the caller. def visit(self, node: AST) -> Any: ... def generic_visit(self, node: AST) -> Any: ... + # The following visit methods are not defined on NodeVisitor, but can + # be implemented by subclasses and are called during a visit if defined. def visit_Module(self, node: Module) -> Any: ... def visit_Interactive(self, node: Interactive) -> Any: ... def visit_Expression(self, node: Expression) -> Any: ... From 0411f5c1762f37323652c6dc32d8e4a007f81a52 Mon Sep 17 00:00:00 2001 From: Sebastian Rittau Date: Tue, 15 Apr 2025 23:08:24 +0200 Subject: [PATCH 228/388] Update authlib to 1.5.* (#13540) --- stubs/Authlib/@tests/stubtest_allowlist.txt | 5 +--- stubs/Authlib/METADATA.toml | 2 +- stubs/Authlib/authlib/consts.pyi | 2 +- stubs/Authlib/authlib/jose/rfc7519/claims.pyi | 11 +++---- .../Authlib/authlib/oauth2/rfc7523/client.pyi | 13 +++++---- .../Authlib/authlib/oauth2/rfc7591/claims.pyi | 5 +++- .../authlib/oauth2/rfc7591/endpoint.pyi | 12 ++++---- .../authlib/oauth2/rfc7592/endpoint.pyi | 10 +++---- .../Authlib/authlib/oauth2/rfc9068/claims.pyi | 1 - .../authlib/oauth2/rfc9207/__init__.pyi | 3 ++ .../authlib/oauth2/rfc9207/parameter.pyi | 4 +++ stubs/Authlib/authlib/oidc/core/claims.pyi | 3 -- .../Authlib/authlib/oidc/core/grants/util.pyi | 1 + .../authlib/oidc/registration/__init__.pyi | 3 ++ .../authlib/oidc/registration/claims.pyi | 29 +++++++++++++++++++ 15 files changed, 69 insertions(+), 35 deletions(-) create mode 100644 stubs/Authlib/authlib/oauth2/rfc9207/__init__.pyi create mode 100644 stubs/Authlib/authlib/oauth2/rfc9207/parameter.pyi create mode 100644 stubs/Authlib/authlib/oidc/registration/__init__.pyi create mode 100644 stubs/Authlib/authlib/oidc/registration/claims.pyi diff --git a/stubs/Authlib/@tests/stubtest_allowlist.txt b/stubs/Authlib/@tests/stubtest_allowlist.txt index a28af110d1af..e23991b2554b 100644 --- a/stubs/Authlib/@tests/stubtest_allowlist.txt +++ b/stubs/Authlib/@tests/stubtest_allowlist.txt @@ -1,3 +1,4 @@ +# TODO: check these entries authlib.jose.ECKey.PRIVATE_KEY_CLS authlib.jose.ECKey.PUBLIC_KEY_CLS authlib.jose.RSAKey.PRIVATE_KEY_CLS @@ -29,8 +30,4 @@ authlib.oauth2.rfc7521.AssertionClient.oauth_error_class authlib.oauth2.rfc7521.client.AssertionClient.oauth_error_class authlib.oauth2.rfc7523.JWTBearerTokenValidator.token_cls authlib.oauth2.rfc7523.validator.JWTBearerTokenValidator.token_cls -authlib.oauth2.rfc7591.ClientRegistrationEndpoint.claims_class -authlib.oauth2.rfc7591.endpoint.ClientRegistrationEndpoint.claims_class -authlib.oauth2.rfc7592.ClientConfigurationEndpoint.claims_class -authlib.oauth2.rfc7592.endpoint.ClientConfigurationEndpoint.claims_class authlib.oauth2.rfc9068.claims.JWTAccessTokenClaims.validate diff --git a/stubs/Authlib/METADATA.toml b/stubs/Authlib/METADATA.toml index d45cede42927..006051a0d7d0 100644 --- a/stubs/Authlib/METADATA.toml +++ b/stubs/Authlib/METADATA.toml @@ -1,4 +1,4 @@ -version = "1.4.*" +version = "1.5.0" upstream_repository = "https://github.com/lepture/authlib" requires = ["cryptography"] partial_stub = true diff --git a/stubs/Authlib/authlib/consts.pyi b/stubs/Authlib/authlib/consts.pyi index 6ec14d37c50c..f8ec0e9e5e3b 100644 --- a/stubs/Authlib/authlib/consts.pyi +++ b/stubs/Authlib/authlib/consts.pyi @@ -1,6 +1,6 @@ from typing import Final -name: Final[str] +name: Final = "Authlib" version: Final[str] author: Final[str] homepage: Final[str] diff --git a/stubs/Authlib/authlib/jose/rfc7519/claims.pyi b/stubs/Authlib/authlib/jose/rfc7519/claims.pyi index 63e206f11115..fc77d9392b4b 100644 --- a/stubs/Authlib/authlib/jose/rfc7519/claims.pyi +++ b/stubs/Authlib/authlib/jose/rfc7519/claims.pyi @@ -1,16 +1,17 @@ from _typeshed import Incomplete +from typing import Any, ClassVar -class BaseClaims(dict[str, object]): - REGISTERED_CLAIMS: list[str] +class BaseClaims(dict[str, Any]): # dict values are key-dependent + REGISTERED_CLAIMS: ClassVar[list[str]] header: Incomplete options: Incomplete params: Incomplete def __init__(self, payload, header, options: Incomplete | None = None, params: Incomplete | None = None) -> None: ... - def __getattr__(self, key): ... - def get_registered_claims(self): ... + # TODO: Adds an attribute for each key in REGISTERED_CLAIMS + def __getattr__(self, key: str): ... + def get_registered_claims(self) -> dict[str, Incomplete]: ... class JWTClaims(BaseClaims): - REGISTERED_CLAIMS: list[str] def validate(self, now: Incomplete | None = None, leeway: int = 0) -> None: ... def validate_iss(self) -> None: ... def validate_sub(self) -> None: ... diff --git a/stubs/Authlib/authlib/oauth2/rfc7523/client.pyi b/stubs/Authlib/authlib/oauth2/rfc7523/client.pyi index 7e5d18b6c823..50b6ae797299 100644 --- a/stubs/Authlib/authlib/oauth2/rfc7523/client.pyi +++ b/stubs/Authlib/authlib/oauth2/rfc7523/client.pyi @@ -1,12 +1,13 @@ -from _typeshed import Incomplete +from typing import Final -ASSERTION_TYPE: str +ASSERTION_TYPE: Final[str] class JWTBearerClientAssertion: - CLIENT_ASSERTION_TYPE = ASSERTION_TYPE - CLIENT_AUTH_METHOD: str - token_url: Incomplete - def __init__(self, token_url, validate_jti: bool = True) -> None: ... + CLIENT_ASSERTION_TYPE: Final[str] + CLIENT_AUTH_METHOD: Final[str] + token_url: str + leeway: int + def __init__(self, token_url: str, validate_jti: bool = True, leeway: int = 60) -> None: ... def __call__(self, query_client, request): ... def create_claims_options(self): ... def process_assertion_claims(self, assertion, resolve_key): ... diff --git a/stubs/Authlib/authlib/oauth2/rfc7591/claims.pyi b/stubs/Authlib/authlib/oauth2/rfc7591/claims.pyi index 8145438834dd..e79d2fddca8f 100644 --- a/stubs/Authlib/authlib/oauth2/rfc7591/claims.pyi +++ b/stubs/Authlib/authlib/oauth2/rfc7591/claims.pyi @@ -1,9 +1,10 @@ from _typeshed import Incomplete +from collections.abc import Mapping +from typing import Any from authlib.jose import BaseClaims class ClientMetadataClaims(BaseClaims): - REGISTERED_CLAIMS: Incomplete def validate(self) -> None: ... def validate_redirect_uris(self) -> None: ... def validate_token_endpoint_auth_method(self) -> None: ... @@ -20,3 +21,5 @@ class ClientMetadataClaims(BaseClaims): def validate_jwks(self) -> None: ... def validate_software_id(self) -> None: ... def validate_software_version(self) -> None: ... + @classmethod + def get_claims_options(cls, metadata: Mapping[str, Incomplete]) -> dict[str, Any]: ... # dict values are key-dependent diff --git a/stubs/Authlib/authlib/oauth2/rfc7591/endpoint.pyi b/stubs/Authlib/authlib/oauth2/rfc7591/endpoint.pyi index 36e0bf6a2c70..c0af6f0c57ea 100644 --- a/stubs/Authlib/authlib/oauth2/rfc7591/endpoint.pyi +++ b/stubs/Authlib/authlib/oauth2/rfc7591/endpoint.pyi @@ -1,18 +1,16 @@ from _typeshed import Incomplete - -from authlib.oauth2.rfc7591 import ClientMetadataClaims +from typing import Final class ClientRegistrationEndpoint: - ENDPOINT_NAME: str - claims_class = ClientMetadataClaims + ENDPOINT_NAME: Final = "client_registration" software_statement_alg_values_supported: Incomplete server: Incomplete - def __init__(self, server) -> None: ... - def __call__(self, request): ... + claims_classes: list[type[Incomplete]] + def __init__(self, server: Incomplete | None = None, claims_classes: list[type[Incomplete]] | None = None) -> None: ... + def __call__(self, request) -> dict[Incomplete, Incomplete]: ... def create_registration_response(self, request): ... def extract_client_metadata(self, request): ... def extract_software_statement(self, software_statement, request): ... - def get_claims_options(self): ... def generate_client_info(self): ... def generate_client_registration_info(self, client, request) -> None: ... def create_endpoint_request(self, request): ... diff --git a/stubs/Authlib/authlib/oauth2/rfc7592/endpoint.pyi b/stubs/Authlib/authlib/oauth2/rfc7592/endpoint.pyi index e5cf2ac32311..9c393a221416 100644 --- a/stubs/Authlib/authlib/oauth2/rfc7592/endpoint.pyi +++ b/stubs/Authlib/authlib/oauth2/rfc7592/endpoint.pyi @@ -1,12 +1,11 @@ from _typeshed import Incomplete - -from authlib.oauth2.rfc7591 import ClientMetadataClaims +from typing import Final class ClientConfigurationEndpoint: - ENDPOINT_NAME: str - claims_class = ClientMetadataClaims + ENDPOINT_NAME: Final = "client_configuration" server: Incomplete - def __init__(self, server) -> None: ... + claims_classes: list[type[Incomplete]] + def __init__(self, server: Incomplete | None = None, claims_classes: list[type[Incomplete]] | None = None) -> None: ... def __call__(self, request): ... def create_configuration_response(self, request): ... def create_endpoint_request(self, request): ... @@ -14,7 +13,6 @@ class ClientConfigurationEndpoint: def create_delete_client_response(self, client, request): ... def create_update_client_response(self, client, request): ... def extract_client_metadata(self, request): ... - def get_claims_options(self): ... def introspect_client(self, client): ... def generate_client_registration_info(self, client, request) -> None: ... def authenticate_token(self, request) -> None: ... diff --git a/stubs/Authlib/authlib/oauth2/rfc9068/claims.pyi b/stubs/Authlib/authlib/oauth2/rfc9068/claims.pyi index 405ba4c103e9..6351408041fa 100644 --- a/stubs/Authlib/authlib/oauth2/rfc9068/claims.pyi +++ b/stubs/Authlib/authlib/oauth2/rfc9068/claims.pyi @@ -3,7 +3,6 @@ from _typeshed import Incomplete from authlib.jose import JWTClaims class JWTAccessTokenClaims(JWTClaims): - REGISTERED_CLAIMS: Incomplete def validate(self, now: Incomplete | None = None, leeway: int = 0, **kwargs) -> None: ... def validate_typ(self) -> None: ... def validate_client_id(self): ... diff --git a/stubs/Authlib/authlib/oauth2/rfc9207/__init__.pyi b/stubs/Authlib/authlib/oauth2/rfc9207/__init__.pyi new file mode 100644 index 000000000000..f0d14fa1c1f4 --- /dev/null +++ b/stubs/Authlib/authlib/oauth2/rfc9207/__init__.pyi @@ -0,0 +1,3 @@ +from .parameter import IssuerParameter as IssuerParameter + +__all__ = ["IssuerParameter"] diff --git a/stubs/Authlib/authlib/oauth2/rfc9207/parameter.pyi b/stubs/Authlib/authlib/oauth2/rfc9207/parameter.pyi new file mode 100644 index 000000000000..8e06d4bb7b46 --- /dev/null +++ b/stubs/Authlib/authlib/oauth2/rfc9207/parameter.pyi @@ -0,0 +1,4 @@ +class IssuerParameter: + def __call__(self, grant) -> None: ... + def add_issuer_parameter(self, hook_type: str, response) -> None: ... + def get_issuer(self) -> str | None: ... diff --git a/stubs/Authlib/authlib/oidc/core/claims.pyi b/stubs/Authlib/authlib/oidc/core/claims.pyi index b4452f6d636d..96342e7ff69e 100644 --- a/stubs/Authlib/authlib/oidc/core/claims.pyi +++ b/stubs/Authlib/authlib/oidc/core/claims.pyi @@ -16,17 +16,14 @@ class IDToken(JWTClaims): class CodeIDToken(IDToken): RESPONSE_TYPES: Incomplete - REGISTERED_CLAIMS: Incomplete class ImplicitIDToken(IDToken): RESPONSE_TYPES: Incomplete ESSENTIAL_CLAIMS: Incomplete - REGISTERED_CLAIMS: Incomplete def validate_at_hash(self) -> None: ... class HybridIDToken(ImplicitIDToken): RESPONSE_TYPES: Incomplete - REGISTERED_CLAIMS: Incomplete def validate(self, now: Incomplete | None = None, leeway: int = 0) -> None: ... def validate_c_hash(self) -> None: ... diff --git a/stubs/Authlib/authlib/oidc/core/grants/util.pyi b/stubs/Authlib/authlib/oidc/core/grants/util.pyi index d93f15590e43..9800380f4659 100644 --- a/stubs/Authlib/authlib/oidc/core/grants/util.pyi +++ b/stubs/Authlib/authlib/oidc/core/grants/util.pyi @@ -14,5 +14,6 @@ def generate_id_token( nonce: str | None = None, auth_time: int | None = None, code: str | None = None, + kid: str | None = None, ) -> str: ... def create_response_mode_response(redirect_uri, params, response_mode): ... diff --git a/stubs/Authlib/authlib/oidc/registration/__init__.pyi b/stubs/Authlib/authlib/oidc/registration/__init__.pyi new file mode 100644 index 000000000000..e0fca8da8da3 --- /dev/null +++ b/stubs/Authlib/authlib/oidc/registration/__init__.pyi @@ -0,0 +1,3 @@ +from .claims import ClientMetadataClaims as ClientMetadataClaims + +__all__ = ["ClientMetadataClaims"] diff --git a/stubs/Authlib/authlib/oidc/registration/claims.pyi b/stubs/Authlib/authlib/oidc/registration/claims.pyi new file mode 100644 index 000000000000..f2dd67de8278 --- /dev/null +++ b/stubs/Authlib/authlib/oidc/registration/claims.pyi @@ -0,0 +1,29 @@ +from _typeshed import Incomplete +from collections.abc import Mapping + +from authlib.jose import BaseClaims + +class ClientMetadataClaims(BaseClaims): + def validate(self) -> None: ... + # The "cls" argument is called "self" in the actual implementation, + # but stubtest will not allow that. + @classmethod + def get_claims_options(cls, metadata: Mapping[str, Incomplete]) -> dict[str, Incomplete]: ... + def validate_token_endpoint_auth_signing_alg(self) -> None: ... + def validate_application_type(self) -> None: ... + def validate_sector_identifier_uri(self) -> None: ... + def validate_subject_type(self) -> None: ... + def validate_id_token_signed_response_alg(self) -> None: ... + def validate_id_token_encrypted_response_alg(self) -> None: ... + def validate_id_token_encrypted_response_enc(self) -> None: ... + def validate_userinfo_signed_response_alg(self) -> None: ... + def validate_userinfo_encrypted_response_alg(self) -> None: ... + def validate_userinfo_encrypted_response_enc(self) -> None: ... + def validate_default_max_age(self) -> None: ... + def validate_require_auth_time(self) -> None: ... + def validate_default_acr_values(self) -> None: ... + def validate_initiate_login_uri(self) -> None: ... + def validate_request_object_signing_alg(self) -> None: ... + def validate_request_object_encryption_alg(self) -> None: ... + def validate_request_object_encryption_enc(self) -> None: ... + def validate_request_uris(self) -> None: ... From d314de77f89037843acafd1153f5f669c23fd4f1 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Wed, 16 Apr 2025 09:10:46 +0200 Subject: [PATCH 229/388] [stubsabot] Bump greenlet to 3.2.* (#13836) Release: https://pypi.org/pypi/greenlet/3.2.0 Homepage: https://greenlet.readthedocs.io/ Repository: https://github.com/python-greenlet/greenlet Typeshed stubs: https://github.com/python/typeshed/tree/main/stubs/greenlet Changelog: https://greenlet.readthedocs.io/en/latest/changes.html Diff: https://github.com/python-greenlet/greenlet/compare/3.1.1...3.2.0 Stubsabot analysis of the diff between the two releases: - Total lines of Python code added: 29. - Total lines of Python code deleted: 18. If stubtest fails for this PR: - Leave this PR open (as a reminder, and to prevent stubsabot from opening another PR) - Fix stubtest failures in another PR, then close this PR Note that you will need to close and re-open the PR in order to trigger CI Co-authored-by: stubsabot <> --- stubs/greenlet/METADATA.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stubs/greenlet/METADATA.toml b/stubs/greenlet/METADATA.toml index 1ba4fc2c8fe5..7b3f60b4c20c 100644 --- a/stubs/greenlet/METADATA.toml +++ b/stubs/greenlet/METADATA.toml @@ -1,2 +1,2 @@ -version = "3.1.*" +version = "3.2.*" upstream_repository = "https://github.com/python-greenlet/greenlet" From d9d546c3e12604ff105c0093f1911f8acda74e9d Mon Sep 17 00:00:00 2001 From: CatBraaain <84499939+CatBraaain@users.noreply.github.com> Date: Wed, 16 Apr 2025 18:11:32 +0900 Subject: [PATCH 230/388] [tqdm] Add return type to tqdm.asyncio.gather (#13839) --- stubs/tqdm/tqdm/asyncio.pyi | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stubs/tqdm/tqdm/asyncio.pyi b/stubs/tqdm/tqdm/asyncio.pyi index ec11fe561c69..78161f0d4c72 100644 --- a/stubs/tqdm/tqdm/asyncio.pyi +++ b/stubs/tqdm/tqdm/asyncio.pyi @@ -81,7 +81,7 @@ class tqdm_asyncio(std_tqdm[_T]): nrows: int | None = ..., colour: str | None = ..., delay: float | None = ..., - ): ... + ) -> list[_T]: ... @overload def __init__( self, From 013f215543a9535a5b154f00d4181133dcf1c5d3 Mon Sep 17 00:00:00 2001 From: Avasam Date: Wed, 16 Apr 2025 05:32:19 -0400 Subject: [PATCH 231/388] Add all classes in `stubs/pyinstaller/PyInstaller/utils/win32/versioninfo.pyi` (#13833) --- .../@tests/test_cases/check_versioninfo.py | 63 +++++++++++++++++ .../PyInstaller/utils/win32/versioninfo.pyi | 68 ++++++++++++++++--- 2 files changed, 121 insertions(+), 10 deletions(-) create mode 100644 stubs/pyinstaller/@tests/test_cases/check_versioninfo.py diff --git a/stubs/pyinstaller/@tests/test_cases/check_versioninfo.py b/stubs/pyinstaller/@tests/test_cases/check_versioninfo.py new file mode 100644 index 000000000000..dc16ebe27d98 --- /dev/null +++ b/stubs/pyinstaller/@tests/test_cases/check_versioninfo.py @@ -0,0 +1,63 @@ +from PyInstaller.utils.win32.versioninfo import ( + FixedFileInfo, + StringFileInfo, + StringStruct, + StringTable, + VarFileInfo, + VarStruct, + VSVersionInfo, +) + +# Everything below this line is the content from running `pyi-grab_version python3` +# ============================================================================== + +# UTF-8 +# +# For more details about fixed file info 'ffi' see: +# http://msdn.microsoft.com/en-us/library/ms646997.aspx +VSVersionInfo( + ffi=FixedFileInfo( + # filevers and prodvers should be always a tuple with four items: (1, 2, 3, 4) + # Set not needed items to zero 0. + filevers=(3, 13, 1150, 1013), + prodvers=(3, 13, 1150, 1013), + # Contains a bitmask that specifies the valid bits 'flags'r + mask=0x3F, + # Contains a bitmask that specifies the Boolean attributes of the file. + flags=0x0, + # The operating system for which this file was designed. + # 0x4 - NT and there is no need to change it. + OS=0x4, + # The general type of file. + # 0x1 - the file is an application. + fileType=0x2, + # The function of the file. + # 0x0 - the function is not defined for this fileType + subtype=0x0, + # Creation date and time stamp. + date=(0, 0), + ), + kids=[ + StringFileInfo( + [ + StringTable( + "000004b0", + [ + StringStruct("CompanyName", "Python Software Foundation"), + StringStruct("FileDescription", "Python Core"), + StringStruct("FileVersion", "3.13.1"), + StringStruct("InternalName", "Python DLL"), + StringStruct( + "LegalCopyright", + "Copyright © 2001-2024 Python Software Foundation. Copyright © 2000 BeOpen.com. Copyright © 1995-2001 CNRI. Copyright © 1991-1995 SMC.", + ), + StringStruct("OriginalFilename", "python3.dll"), + StringStruct("ProductName", "Python"), + StringStruct("ProductVersion", "3.13.1"), + ], + ) + ] + ), + VarFileInfo([VarStruct("Translation", [0, 1200])]), + ], +) diff --git a/stubs/pyinstaller/PyInstaller/utils/win32/versioninfo.pyi b/stubs/pyinstaller/PyInstaller/utils/win32/versioninfo.pyi index 6a6c89f51b68..f4af6b33c4ac 100644 --- a/stubs/pyinstaller/PyInstaller/utils/win32/versioninfo.pyi +++ b/stubs/pyinstaller/PyInstaller/utils/win32/versioninfo.pyi @@ -1,28 +1,31 @@ -from _typeshed import SliceableBuffer +from _typeshed import SliceableBuffer, Unused from collections.abc import Sequence -from typing import Literal, Protocol +from typing import Any, Protocol, type_check_only from typing_extensions import TypeAlias _FourIntSequence: TypeAlias = Sequence[int] _TwoIntSequence: TypeAlias = Sequence[int] +@type_check_only class _Kid(Protocol): def toRaw(self) -> bytes: ... def __str__(self, indent: str = "", /) -> str: ... -# Used by other types referenced in https://pyinstaller.org/en/stable/spec-files.html#spec-file-operation +# All the classes below are used in version_file_info generated by `pyi-grab_version` +# See: https://pyinstaller.org/en/stable/usage.html#capturing-windows-version-data + +# VSVersionInfo is also by other types referenced in https://pyinstaller.org/en/stable/spec-files.html#spec-file-operation class VSVersionInfo: ffi: FixedFileInfo | None kids: list[_Kid] def __init__(self, ffi: FixedFileInfo | None = None, kids: list[_Kid] | None = None) -> None: ... def fromRaw(self, data: SliceableBuffer) -> int: ... def toRaw(self) -> bytes: ... - def __eq__(self, other: object) -> bool: ... def __str__(self, indent: str = "") -> str: ... class FixedFileInfo: - sig: Literal[0xFEEF04BD] - strucVersion: Literal[0x10000] + sig: int + strucVersion: int fileVersionMS: int fileVersionLS: int productVersionMS: int @@ -36,16 +39,61 @@ class FixedFileInfo: fileDateLS: int def __init__( self, - filevers: _FourIntSequence = ..., - prodvers: _FourIntSequence = ..., + filevers: _FourIntSequence = (0, 0, 0, 0), + prodvers: _FourIntSequence = (0, 0, 0, 0), mask: int = 0x3F, flags: int = 0x0, OS: int = 0x40004, fileType: int = 0x1, subtype: int = 0x0, - date: _TwoIntSequence = ..., + date: _TwoIntSequence = (0, 0), ) -> None: ... def fromRaw(self, data: SliceableBuffer, i: int) -> int: ... def toRaw(self) -> bytes: ... - def __eq__(self, other: object) -> bool: ... def __str__(self, indent: str = "") -> str: ... + +class StringFileInfo: + name: str + kids: list[_Kid] + def __init__(self, kids: list[_Kid] | None = None) -> None: ... + def fromRaw(self, sublen: Unused, vallen: Unused, name: str, data: SliceableBuffer, i: int, limit: int) -> int: ... + def toRaw(self) -> bytes: ... + def __str__(self, indent: str = "") -> str: ... + +class StringTable: + name: str + kids: list[_Kid] + def __init__(self, name: str | None = None, kids: list[_Kid] | None = None) -> None: ... + def fromRaw(self, data: SliceableBuffer, i: int, limit: int) -> int: ... + def toRaw(self) -> bytes: ... + def __str__(self, indent: str = "") -> str: ... + +class StringStruct: + name: str + val: str + def __init__(self, name: str | None = None, val: str | None = None) -> None: ... + def fromRaw(self, data: SliceableBuffer, i: int, limit: int) -> int: ... + def toRaw(self) -> bytes: ... + def __str__(self, indent: Unused = "") -> str: ... + +class VarFileInfo: + kids: list[_Kid] + def __init__(self, kids: list[_Kid] | None = None) -> None: ... + sublen: int + vallen: int + name: str + def fromRaw(self, sublen: int, vallen: int, name: str, data: SliceableBuffer, i: int, limit: int) -> int: ... + wType: int + def toRaw(self) -> bytes: ... + def __str__(self, indent: str = "") -> str: ... + +class VarStruct: + name: str + kids: list[Any] # Whatever can be passed to struct.pack + def __init__(self, name: str | None = None, kids: list[Any] | None = None) -> None: ... + def fromRaw(self, data: SliceableBuffer, i: int, limit: Unused) -> int: ... + wValueLength: int + wType: int + sublen: int + def toRaw(self) -> bytes: ... + def __str__(self, indent: Unused = "") -> str: ... From 0abf848c2ecb9099a0759b5175edab832cdd4ce4 Mon Sep 17 00:00:00 2001 From: Sebastian Rittau Date: Wed, 16 Apr 2025 11:43:06 +0200 Subject: [PATCH 232/388] Remove obsolete type alias _CursesWindow (#13841) --- stdlib/curses/__init__.pyi | 5 ----- 1 file changed, 5 deletions(-) diff --git a/stdlib/curses/__init__.pyi b/stdlib/curses/__init__.pyi index edc64a00cd39..5c157fd7c2f6 100644 --- a/stdlib/curses/__init__.pyi +++ b/stdlib/curses/__init__.pyi @@ -23,11 +23,6 @@ COLOR_PAIRS: int def wrapper(func: Callable[Concatenate[window, _P], _T], /, *arg: _P.args, **kwds: _P.kwargs) -> _T: ... -# typeshed used the name _CursesWindow for the underlying C class before -# it was mapped to the name 'window' in 3.8. -# Kept here as a legacy alias in case any third-party code is relying on it. -_CursesWindow = window - # At runtime this class is unexposed and calls itself curses.ncurses_version. # That name would conflict with the actual curses.ncurses_version, which is # an instance of this class. From 35932d900689abccb370313284b4daf6b090de21 Mon Sep 17 00:00:00 2001 From: Sebastian Rittau Date: Wed, 16 Apr 2025 11:43:31 +0200 Subject: [PATCH 233/388] Remove obsolete mentions of Python 3.8 (#13842) --- stdlib/email/_header_value_parser.pyi | 4 ++-- stdlib/email/_policybase.pyi | 6 +++--- stdlib/email/errors.pyi | 2 +- stdlib/email/policy.pyi | 6 +++--- stdlib/email/utils.pyi | 4 ++-- stdlib/sys/__init__.pyi | 6 +++--- 6 files changed, 14 insertions(+), 14 deletions(-) diff --git a/stdlib/email/_header_value_parser.pyi b/stdlib/email/_header_value_parser.pyi index f4e9ca68d6a9..a8abfead9217 100644 --- a/stdlib/email/_header_value_parser.pyi +++ b/stdlib/email/_header_value_parser.pyi @@ -17,9 +17,9 @@ TOKEN_ENDS: Final[set[str]] ASPECIALS: Final[set[str]] ATTRIBUTE_ENDS: Final[set[str]] EXTENDED_ATTRIBUTE_ENDS: Final[set[str]] -# Added in Python 3.8.20, 3.9.20, 3.10.15, 3.11.10, 3.12.5 +# Added in Python 3.9.20, 3.10.15, 3.11.10, 3.12.5 NLSET: Final[set[str]] -# Added in Python 3.8.20, 3.9.20, 3.10.15, 3.11.10, 3.12.5 +# Added in Python 3.9.20, 3.10.15, 3.11.10, 3.12.5 SPECIALSNL: Final[set[str]] if sys.version_info >= (3, 10): diff --git a/stdlib/email/_policybase.pyi b/stdlib/email/_policybase.pyi index f5dbbd96da14..5266609e597f 100644 --- a/stdlib/email/_policybase.pyi +++ b/stdlib/email/_policybase.pyi @@ -23,7 +23,7 @@ class _PolicyBase(Generic[_MessageT]): raise_on_defect: bool mangle_from_: bool message_factory: _MessageFactory[_MessageT] | None - # Added in Python 3.8.20, 3.9.20, 3.10.15, 3.11.10, 3.12.5 + # Added in Python 3.9.20, 3.10.15, 3.11.10, 3.12.5 verify_generated_headers: bool def __init__( @@ -35,7 +35,7 @@ class _PolicyBase(Generic[_MessageT]): raise_on_defect: bool = False, mangle_from_: bool = ..., # default depends on sub-class message_factory: _MessageFactory[_MessageT] | None = None, - # Added in Python 3.8.20, 3.9.20, 3.10.15, 3.11.10, 3.12.5 + # Added in Python 3.9.20, 3.10.15, 3.11.10, 3.12.5 verify_generated_headers: bool = True, ) -> None: ... def clone( @@ -47,7 +47,7 @@ class _PolicyBase(Generic[_MessageT]): raise_on_defect: bool = ..., mangle_from_: bool = ..., message_factory: _MessageFactory[_MessageT] | None = ..., - # Added in Python 3.8.20, 3.9.20, 3.10.15, 3.11.10, 3.12.5 + # Added in Python 3.9.20, 3.10.15, 3.11.10, 3.12.5 verify_generated_headers: bool = ..., ) -> Self: ... def __add__(self, other: Policy) -> Self: ... diff --git a/stdlib/email/errors.pyi b/stdlib/email/errors.pyi index f105576c5ee4..b501a5866556 100644 --- a/stdlib/email/errors.pyi +++ b/stdlib/email/errors.pyi @@ -7,7 +7,7 @@ class BoundaryError(MessageParseError): ... class MultipartConversionError(MessageError, TypeError): ... class CharsetError(MessageError): ... -# Added in Python 3.8.20, 3.9.20, 3.10.15, 3.11.10, 3.12.5 +# Added in Python 3.9.20, 3.10.15, 3.11.10, 3.12.5 class HeaderWriteError(MessageError): ... class MessageDefect(ValueError): diff --git a/stdlib/email/policy.pyi b/stdlib/email/policy.pyi index 5b145bcf2318..5005483edf86 100644 --- a/stdlib/email/policy.pyi +++ b/stdlib/email/policy.pyi @@ -24,7 +24,7 @@ class EmailPolicy(Policy[_MessageT]): raise_on_defect: bool = ..., mangle_from_: bool = ..., message_factory: None = None, - # Added in Python 3.8.20, 3.9.20, 3.10.15, 3.11.10, 3.12.5 + # Added in Python 3.9.20, 3.10.15, 3.11.10, 3.12.5 verify_generated_headers: bool = ..., utf8: bool = ..., refold_source: str = ..., @@ -41,7 +41,7 @@ class EmailPolicy(Policy[_MessageT]): raise_on_defect: bool = ..., mangle_from_: bool = ..., message_factory: _MessageFactory[_MessageT] | None = ..., - # Added in Python 3.8.20, 3.9.20, 3.10.15, 3.11.10, 3.12.5 + # Added in Python 3.9.20, 3.10.15, 3.11.10, 3.12.5 verify_generated_headers: bool = ..., utf8: bool = ..., refold_source: str = ..., @@ -62,7 +62,7 @@ class EmailPolicy(Policy[_MessageT]): raise_on_defect: bool = ..., mangle_from_: bool = ..., message_factory: _MessageFactory[_MessageT] | None = ..., - # Added in Python 3.8.20, 3.9.20, 3.10.15, 3.11.10, 3.12.5 + # Added in Python 3.9.20, 3.10.15, 3.11.10, 3.12.5 verify_generated_headers: bool = ..., utf8: bool = ..., refold_source: str = ..., diff --git a/stdlib/email/utils.pyi b/stdlib/email/utils.pyi index dc3eecb5ef7f..efc32a7abce2 100644 --- a/stdlib/email/utils.pyi +++ b/stdlib/email/utils.pyi @@ -30,11 +30,11 @@ _PDTZ: TypeAlias = tuple[int, int, int, int, int, int, int, int, int, int | None def quote(str: str) -> str: ... def unquote(str: str) -> str: ... -# `strict` parameter added in Python 3.8.20, 3.9.20, 3.10.15, 3.11.10, 3.12.5 +# `strict` parameter added in Python 3.9.20, 3.10.15, 3.11.10, 3.12.5 def parseaddr(addr: str | list[str], *, strict: bool = True) -> tuple[str, str]: ... def formataddr(pair: tuple[str | None, str], charset: str | Charset = "utf-8") -> str: ... -# `strict` parameter added in Python 3.8.20, 3.9.20, 3.10.15, 3.11.10, 3.12.5 +# `strict` parameter added in Python 3.9.20, 3.10.15, 3.11.10, 3.12.5 def getaddresses(fieldvalues: Iterable[str], *, strict: bool = True) -> list[tuple[str, str]]: ... @overload def parsedate(data: None) -> None: ... diff --git a/stdlib/sys/__init__.pyi b/stdlib/sys/__init__.pyi index a2cca3509a9c..2d894674c4af 100644 --- a/stdlib/sys/__init__.pyi +++ b/stdlib/sys/__init__.pyi @@ -96,7 +96,7 @@ flags: _flags # This can be re-visited when typeshed drops support for 3.10, # at which point all supported versions will include int_max_str_digits # in all patch versions. -# 3.8 and 3.9 are 15 or 16-tuple +# 3.9 is 15 or 16-tuple # 3.10 is 16 or 17-tuple # 3.11+ is an 18-tuple. @final @@ -184,7 +184,7 @@ class _flags(_UninstantiableStructseq, tuple[int, ...]): # Whether or not this exists on lower versions of Python # may depend on which patch release you're using # (it was backported to all Python versions on 3.8+ as a security fix) - # Added in: 3.8.14, 3.9.14, 3.10.7 + # Added in: 3.9.14, 3.10.7 # and present in all versions of 3.11 and later. @property def int_max_str_digits(self) -> int: ... @@ -448,7 +448,7 @@ if sys.platform == "win32": def get_coroutine_origin_tracking_depth() -> int: ... def set_coroutine_origin_tracking_depth(depth: int) -> None: ... -# The following two functions were added in 3.11.0, 3.10.7, 3.9.14, and 3.8.14, +# The following two functions were added in 3.11.0, 3.10.7, and 3.9.14, # as part of the response to CVE-2020-10735 def set_int_max_str_digits(maxdigits: int) -> None: ... def get_int_max_str_digits() -> int: ... From 70e4e3f4b5b69f306468e55adfbb0cac143718bf Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Thu, 17 Apr 2025 04:54:53 +0400 Subject: [PATCH 234/388] Bump pyinstaller to 6.13.* (#13843) --- stubs/pyinstaller/METADATA.toml | 2 +- stubs/pyinstaller/PyInstaller/compat.pyi | 1 + stubs/pyinstaller/PyInstaller/utils/hooks/__init__.pyi | 2 +- 3 files changed, 3 insertions(+), 2 deletions(-) diff --git a/stubs/pyinstaller/METADATA.toml b/stubs/pyinstaller/METADATA.toml index be1bf721aeab..6ff992eac0af 100644 --- a/stubs/pyinstaller/METADATA.toml +++ b/stubs/pyinstaller/METADATA.toml @@ -1,2 +1,2 @@ -version = "6.12.*" +version = "6.13.*" upstream_repository = "https://github.com/pyinstaller/pyinstaller" diff --git a/stubs/pyinstaller/PyInstaller/compat.pyi b/stubs/pyinstaller/PyInstaller/compat.pyi index 05a1440e1954..81193e19ee58 100644 --- a/stubs/pyinstaller/PyInstaller/compat.pyi +++ b/stubs/pyinstaller/PyInstaller/compat.pyi @@ -29,6 +29,7 @@ is_openbsd: Final[bool] is_hpux: Final[bool] is_unix: Final[bool] is_musl: Final[bool] +is_termux: Final[bool] is_macos_11_compat: Final[bool] is_macos_11_native: Final[bool] is_macos_11: Final[bool] diff --git a/stubs/pyinstaller/PyInstaller/utils/hooks/__init__.pyi b/stubs/pyinstaller/PyInstaller/utils/hooks/__init__.pyi index 17f0ddf5c4ac..bab6ffebfa1b 100644 --- a/stubs/pyinstaller/PyInstaller/utils/hooks/__init__.pyi +++ b/stubs/pyinstaller/PyInstaller/utils/hooks/__init__.pyi @@ -52,7 +52,7 @@ def collect_system_data_files( path: str, destdir: StrPath | None = None, include_py_files: bool = False ) -> list[tuple[str, str]]: ... def copy_metadata(package_name: str, recursive: bool = False) -> list[tuple[str, str]]: ... -def get_installer(module: str) -> str | None: ... +def get_installer(dist_name: str) -> str | None: ... def collect_all( package_name: str, include_py_files: bool = True, From ed3347505b8983622035173e634e331ada26e6bc Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Thu, 17 Apr 2025 07:54:27 +0400 Subject: [PATCH 235/388] Bump shapely to 2.1.* (#13847) --- stubs/shapely/@tests/stubtest_allowlist.txt | 2 +- stubs/shapely/METADATA.toml | 2 +- stubs/shapely/shapely/_geometry.pyi | 35 +++-- stubs/shapely/shapely/_ragged_array.pyi | 2 +- stubs/shapely/shapely/algorithms/cga.pyi | 3 - .../shapely/shapely/algorithms/polylabel.pyi | 16 --- stubs/shapely/shapely/constructive.pyi | 135 +++++++++++++++--- stubs/shapely/shapely/coordinates.pyi | 22 ++- stubs/shapely/shapely/creation.pyi | 69 +++++++-- stubs/shapely/shapely/decorators.pyi | 3 +- stubs/shapely/shapely/geometry/base.pyi | 14 +- stubs/shapely/shapely/geometry/point.pyi | 2 + stubs/shapely/shapely/io.pyi | 33 +++-- stubs/shapely/shapely/lib.pyi | 12 ++ stubs/shapely/shapely/ops.pyi | 24 ++-- stubs/shapely/shapely/predicates.pyi | 56 +++++--- stubs/shapely/shapely/set_operations.pyi | 14 +- 17 files changed, 323 insertions(+), 121 deletions(-) diff --git a/stubs/shapely/@tests/stubtest_allowlist.txt b/stubs/shapely/@tests/stubtest_allowlist.txt index 2d094265aee0..fd759a27a630 100644 --- a/stubs/shapely/@tests/stubtest_allowlist.txt +++ b/stubs/shapely/@tests/stubtest_allowlist.txt @@ -1,4 +1,4 @@ -shapely\.geometry\.conftest +shapely\.conftest shapely\.tests.* shapely\._typing # stubs only module diff --git a/stubs/shapely/METADATA.toml b/stubs/shapely/METADATA.toml index e12eb3f77a61..023f0b416b42 100644 --- a/stubs/shapely/METADATA.toml +++ b/stubs/shapely/METADATA.toml @@ -1,4 +1,4 @@ -version = "2.0.*" +version = "2.1.*" # Requires a version of numpy with a `py.typed` file requires = ["numpy>=1.20"] upstream_repository = "https://github.com/shapely/shapely" diff --git a/stubs/shapely/shapely/_geometry.pyi b/stubs/shapely/shapely/_geometry.pyi index 874dc42ab291..eb2cdab6cec9 100644 --- a/stubs/shapely/shapely/_geometry.pyi +++ b/stubs/shapely/shapely/_geometry.pyi @@ -13,28 +13,29 @@ from .lib import Geometry __all__ = [ "GeometryType", - "get_type_id", - "get_dimensions", + "force_2d", + "force_3d", "get_coordinate_dimension", + "get_dimensions", + "get_exterior_ring", + "get_geometry", + "get_interior_ring", + "get_m", "get_num_coordinates", + "get_num_geometries", + "get_num_interior_rings", + "get_num_points", + "get_parts", + "get_point", + "get_precision", + "get_rings", "get_srid", - "set_srid", + "get_type_id", "get_x", "get_y", "get_z", - "get_exterior_ring", - "get_num_points", - "get_num_interior_rings", - "get_num_geometries", - "get_point", - "get_interior_ring", - "get_geometry", - "get_parts", - "get_rings", - "get_precision", "set_precision", - "force_2d", - "force_3d", + "set_srid", ] _PrecisionMode: TypeAlias = Literal["valid_output", "pointwise", "keep_collapsed", 0, 1, 2] @@ -89,6 +90,10 @@ def get_z(point: Geometry | None, **kwargs) -> float: ... @overload def get_z(point: OptGeoArrayLikeSeq, **kwargs) -> NDArray[np.float64]: ... @overload +def get_m(point: Geometry | None, **kwargs) -> float: ... +@overload +def get_m(point: OptGeoArrayLikeSeq, **kwargs) -> NDArray[np.float64]: ... +@overload def get_point(geometry: LineString, index: SupportsIndex, **kwargs) -> Point | Any: ... @overload def get_point(geometry: Point | Polygon | BaseMultipartGeometry | None, index: SupportsIndex, **kwargs) -> None: ... diff --git a/stubs/shapely/shapely/_ragged_array.pyi b/stubs/shapely/shapely/_ragged_array.pyi index c79038f46559..c4d60165b8f5 100644 --- a/stubs/shapely/shapely/_ragged_array.pyi +++ b/stubs/shapely/shapely/_ragged_array.pyi @@ -5,7 +5,7 @@ from ._geometry import GeometryType from ._typing import ArrayLike, ArrayLikeSeq, GeoArray, OptGeoArrayLikeSeq def to_ragged_array( - geometries: OptGeoArrayLikeSeq, include_z: bool | None = None + geometries: OptGeoArrayLikeSeq, include_z: bool | None = None, include_m: bool | None = None ) -> tuple[GeometryType, NDArray[np.float64], tuple[NDArray[np.int64], ...]]: ... def from_ragged_array( geometry_type: GeometryType, coords: ArrayLike[float], offsets: ArrayLikeSeq[int] | None = None diff --git a/stubs/shapely/shapely/algorithms/cga.pyi b/stubs/shapely/shapely/algorithms/cga.pyi index 32f5765f35e6..d3d7ea164660 100644 --- a/stubs/shapely/shapely/algorithms/cga.pyi +++ b/stubs/shapely/shapely/algorithms/cga.pyi @@ -1,6 +1,3 @@ -from collections.abc import Callable - from ..geometry import LinearRing def signed_area(ring: LinearRing) -> float: ... -def is_ccw_impl(name: None = None) -> Callable[[LinearRing], bool]: ... diff --git a/stubs/shapely/shapely/algorithms/polylabel.pyi b/stubs/shapely/shapely/algorithms/polylabel.pyi index 8ef31b9916b6..5d9777c30eab 100644 --- a/stubs/shapely/shapely/algorithms/polylabel.pyi +++ b/stubs/shapely/shapely/algorithms/polylabel.pyi @@ -1,19 +1,3 @@ -from ..errors import TopologicalError as TopologicalError from ..geometry import Point, Polygon -class Cell: - x: float - y: float - h: float - centroid: Point - distance: float - max_distance: float - def __init__(self, x: float, y: float, h: float, polygon: Polygon) -> None: ... - def __lt__(self, other: Cell) -> bool: ... - def __le__(self, other: Cell) -> bool: ... - def __eq__(self, other: object) -> bool: ... - def __ne__(self, other: object) -> bool: ... - def __gt__(self, other: Cell) -> bool: ... - def __ge__(self, other: Cell) -> bool: ... - def polylabel(polygon: Polygon, tolerance: float = 1.0) -> Point: ... diff --git a/stubs/shapely/shapely/constructive.pyi b/stubs/shapely/shapely/constructive.pyi index f022b0cd717c..36dc537dd586 100644 --- a/stubs/shapely/shapely/constructive.pyi +++ b/stubs/shapely/shapely/constructive.pyi @@ -1,5 +1,6 @@ from collections.abc import Sequence from typing import Any, Literal, SupportsIndex, overload +from typing_extensions import TypeAlias from ._enum import ParamEnum from ._typing import ArrayLike, ArrayLikeSeq, GeoArray, OptGeoArrayLike, OptGeoArrayLikeSeq, OptGeoT @@ -12,32 +13,38 @@ __all__ = [ "BufferJoinStyle", "boundary", "buffer", - "offset_curve", + "build_area", "centroid", "clip_by_rect", "concave_hull", + "constrained_delaunay_triangles", "convex_hull", "delaunay_triangles", - "segmentize", "envelope", "extract_unique_points", - "build_area", "make_valid", - "normalize", + "maximum_inscribed_circle", + "minimum_bounding_circle", + "minimum_clearance_line", + "minimum_rotated_rectangle", "node", + "normalize", + "offset_curve", + "orient_polygons", + "oriented_envelope", "point_on_surface", "polygonize", "polygonize_full", "remove_repeated_points", "reverse", + "segmentize", "simplify", "snap", "voronoi_polygons", - "oriented_envelope", - "minimum_rotated_rectangle", - "minimum_bounding_circle", ] +_Method: TypeAlias = Literal["linework", "structure"] + class BufferCapStyle(ParamEnum): round = 1 flat = 2 @@ -229,6 +236,14 @@ def delaunay_triangles( geometry: OptGeoArrayLikeSeq, tolerance: ArrayLike[float] = 0.0, only_edges: ArrayLike[bool] = False, **kwargs ) -> GeoArray: ... @overload +def constrained_delaunay_triangles(geometry: Geometry, **kwargs) -> GeometryCollection: ... +@overload +def constrained_delaunay_triangles(geometry: None, **kwargs) -> None: ... +@overload +def constrained_delaunay_triangles(geometry: Geometry | None, **kwargs) -> GeometryCollection | None: ... +@overload +def constrained_delaunay_triangles(geometry: OptGeoArrayLikeSeq | OptGeoArrayLike, **kwargs) -> GeoArray: ... +@overload def envelope(geometry: Point, **kwargs) -> Point: ... @overload def envelope(geometry: Geometry, **kwargs) -> BaseGeometry: ... @@ -255,13 +270,37 @@ def build_area(geometry: Geometry | None, **kwargs) -> BaseGeometry | None: ... @overload def build_area(geometry: OptGeoArrayLikeSeq, **kwargs) -> GeoArray: ... @overload -def make_valid(geometry: Geometry, **kwargs) -> BaseGeometry: ... +def make_valid(geometry: Geometry, *, method: _Method = "linework", keep_collapsed: bool = True, **kwargs) -> BaseGeometry: ... +@overload +def make_valid(geometry: None, *, method: _Method = "linework", keep_collapsed: bool = True, **kwargs) -> None: ... +@overload +def make_valid( + geometry: Geometry | None, *, method: Literal["structure"], keep_collapsed: bool = True, **kwargs +) -> BaseGeometry | None: ... +@overload +def make_valid( + geometry: Geometry | None, *, method: Literal["linework"], keep_collapsed: Literal[True], **kwargs +) -> BaseGeometry | None: ... +@overload +def make_valid( + geometry: OptGeoArrayLikeSeq, *, method: Literal["structure"], keep_collapsed: bool = True, **kwargs +) -> GeoArray: ... +@overload +def make_valid( + geometry: OptGeoArrayLikeSeq, *, method: Literal["linework"], keep_collapsed: Literal[True], **kwargs +) -> GeoArray: ... +@overload +def minimum_clearance_line(geometry: Point, **kwargs) -> Point: ... @overload -def make_valid(geometry: None, **kwargs) -> None: ... +def minimum_clearance_line(geometry: LineString | Polygon | BaseMultipartGeometry, **kwargs) -> Polygon: ... @overload -def make_valid(geometry: Geometry | None, **kwargs) -> BaseGeometry | None: ... +def minimum_clearance_line(geometry: Geometry, **kwargs) -> Polygon | Point: ... @overload -def make_valid(geometry: OptGeoArrayLikeSeq, **kwargs) -> GeoArray: ... +def minimum_clearance_line(geometry: None, **kwargs) -> None: ... +@overload +def minimum_clearance_line(geometry: Geometry | None, **kwargs) -> Polygon | Point | None: ... +@overload +def minimum_clearance_line(geometry: OptGeoArrayLikeSeq, **kwargs) -> GeoArray: ... @overload def normalize(geometry: OptGeoT, **kwargs) -> OptGeoT: ... @overload @@ -335,27 +374,53 @@ def snap(geometry: OptGeoArrayLike, reference: OptGeoArrayLikeSeq, tolerance: Ar def snap(geometry: OptGeoArrayLike, reference: OptGeoArrayLike, tolerance: ArrayLikeSeq[float], **kwargs) -> GeoArray: ... @overload def voronoi_polygons( - geometry: Geometry, tolerance: float = 0.0, extend_to: Geometry | None = None, only_edges: Literal[False] = False, **kwargs + geometry: Geometry, + tolerance: float = 0.0, + extend_to: Geometry | None = None, + only_edges: Literal[False] = False, + ordered: bool = False, + **kwargs, ) -> GeometryCollection[Polygon]: ... @overload def voronoi_polygons( - geometry: Geometry, tolerance: float, extend_to: Geometry | None, only_edges: Literal[True], **kwargs + geometry: Geometry, tolerance: float, extend_to: Geometry | None, only_edges: Literal[True], ordered: bool = False, **kwargs ) -> LineString | MultiLineString: ... @overload def voronoi_polygons( - geometry: Geometry, tolerance: float = 0.0, extend_to: Geometry | None = None, *, only_edges: Literal[True], **kwargs + geometry: Geometry, + tolerance: float = 0.0, + extend_to: Geometry | None = None, + *, + only_edges: Literal[True], + ordered: bool = False, + **kwargs, ) -> LineString | MultiLineString: ... @overload def voronoi_polygons( - geometry: Geometry, tolerance: float = 0.0, extend_to: Geometry | None = None, only_edges: bool = False, **kwargs + geometry: Geometry, + tolerance: float = 0.0, + extend_to: Geometry | None = None, + only_edges: bool = False, + ordered: bool = False, + **kwargs, ) -> GeometryCollection[Polygon] | LineString | MultiLineString: ... @overload def voronoi_polygons( - geometry: None, tolerance: float = 0.0, extend_to: Geometry | None = None, only_edges: bool = False, **kwargs + geometry: None, + tolerance: float = 0.0, + extend_to: Geometry | None = None, + only_edges: bool = False, + ordered: bool = False, + **kwargs, ) -> None: ... @overload def voronoi_polygons( - geometry: Geometry | None, tolerance: float = 0.0, extend_to: Geometry | None = None, only_edges: bool = False, **kwargs + geometry: Geometry | None, + tolerance: float = 0.0, + extend_to: Geometry | None = None, + only_edges: bool = False, + ordered: bool = False, + **kwargs, ) -> GeometryCollection[Polygon] | LineString | MultiLineString | None: ... @overload def voronoi_polygons( @@ -363,6 +428,7 @@ def voronoi_polygons( tolerance: ArrayLike[float] = 0.0, extend_to: OptGeoArrayLike = None, only_edges: ArrayLike[bool] = False, + ordered: bool = False, **kwargs, ) -> GeoArray: ... @overload @@ -371,6 +437,7 @@ def voronoi_polygons( tolerance: ArrayLikeSeq[float], extend_to: OptGeoArrayLike = None, only_edges: ArrayLike[bool] = False, + ordered: bool = False, **kwargs, ) -> GeoArray: ... @overload @@ -379,6 +446,7 @@ def voronoi_polygons( tolerance: ArrayLike[float], extend_to: OptGeoArrayLikeSeq, only_edges: ArrayLike[bool] = False, + ordered: bool = False, **kwargs, ) -> GeoArray: ... @overload @@ -388,11 +456,17 @@ def voronoi_polygons( *, extend_to: OptGeoArrayLikeSeq, only_edges: ArrayLike[bool] = False, + ordered: bool = False, **kwargs, ) -> GeoArray: ... @overload def voronoi_polygons( - geometry: OptGeoArrayLike, tolerance: ArrayLike[float], extend_to: OptGeoArrayLike, only_edges: ArrayLikeSeq[bool], **kwargs + geometry: OptGeoArrayLike, + tolerance: ArrayLike[float], + extend_to: OptGeoArrayLike, + only_edges: ArrayLikeSeq[bool], + ordered: bool = False, + **kwargs, ) -> GeoArray: ... @overload def voronoi_polygons( @@ -401,6 +475,7 @@ def voronoi_polygons( extend_to: OptGeoArrayLike = None, *, only_edges: ArrayLikeSeq[bool], + ordered: bool = False, **kwargs, ) -> GeoArray: ... @overload @@ -428,3 +503,27 @@ def minimum_bounding_circle(geometry: None, **kwargs) -> None: ... def minimum_bounding_circle(geometry: Geometry | None, **kwargs) -> Polygon | Point | None: ... @overload def minimum_bounding_circle(geometry: OptGeoArrayLikeSeq, **kwargs) -> GeoArray: ... +@overload +def maximum_inscribed_circle(geometry: Point, tolerance: float | None = None, **kwargs) -> Point: ... +@overload +def maximum_inscribed_circle( + geometry: LineString | Polygon | BaseMultipartGeometry, tolerance: float | None = None, **kwargs +): ... +@overload +def maximum_inscribed_circle(geometry: Geometry, tolerance: float | None = None, **kwargs) -> Polygon | Point: ... +@overload +def maximum_inscribed_circle(geometry: None, tolerance: float | None = None, **kwargs) -> None: ... +@overload +def maximum_inscribed_circle(geometry: Geometry | None, tolerance: float | None = None, **kwargs) -> Polygon | Point | None: ... +@overload +def maximum_inscribed_circle(geometry: OptGeoArrayLikeSeq, tolerance: ArrayLike[float] | None = None, **kwargs) -> GeoArray: ... +@overload +def orient_polygons(geometry: Point, *, exterior_cw: bool = False, **kwargs) -> Point: ... +@overload +def orient_polygons(geometry: Geometry, *, exterior_cw: bool = False, **kwargs) -> BaseGeometry: ... +@overload +def orient_polygons(geometry: None, *, exterior_cw: bool = False, **kwargs) -> None: ... +@overload +def orient_polygons(geometry: Geometry | None, *, exterior_cw: bool = False, **kwargs) -> BaseGeometry | None: ... +@overload +def orient_polygons(geometry: OptGeoArrayLikeSeq, *, exterior_cw: bool = False, **kwargs) -> GeoArray: ... diff --git a/stubs/shapely/shapely/coordinates.pyi b/stubs/shapely/shapely/coordinates.pyi index 7aa57afcefd9..bb8026c9d0c1 100644 --- a/stubs/shapely/shapely/coordinates.pyi +++ b/stubs/shapely/shapely/coordinates.pyi @@ -10,32 +10,40 @@ __all__ = ["transform", "count_coordinates", "get_coordinates", "set_coordinates @overload def transform( - geometry: OptGeoT, transformation: Callable[[NDArray[np.float64]], NDArray[np.float64]], include_z: bool = False + geometry: OptGeoT, + transformation: Callable[[NDArray[np.float64]], NDArray[np.float64]], + include_z: bool = False, + *, + interleaved: bool = True, ) -> OptGeoT: ... @overload def transform( - geometry: OptGeoArrayLikeSeq, transformation: Callable[[NDArray[np.float64]], NDArray[np.float64]], include_z: bool = False + geometry: OptGeoArrayLikeSeq, + transformation: Callable[[NDArray[np.float64]], NDArray[np.float64]], + include_z: bool = False, + *, + interleaved: bool = True, ) -> GeoArray: ... def count_coordinates(geometry: OptGeoArrayLike) -> int: ... @overload def get_coordinates( - geometry: OptGeoArrayLike, include_z: bool = False, return_index: Literal[False] = False + geometry: OptGeoArrayLike, include_z: bool = False, return_index: Literal[False] = False, *, include_m: bool = False ) -> NDArray[np.float64]: ... @overload def get_coordinates( - geometry: OptGeoArrayLike, include_z: bool = False, *, return_index: Literal[True] + geometry: OptGeoArrayLike, include_z: bool = False, *, return_index: Literal[True], include_m: bool = False ) -> tuple[NDArray[np.float64], NDArray[np.int64]]: ... @overload def get_coordinates( - geometry: OptGeoArrayLike, include_z: bool, return_index: Literal[True] + geometry: OptGeoArrayLike, include_z: bool, return_index: Literal[True], *, include_m: bool = False ) -> tuple[NDArray[np.float64], NDArray[np.int64]]: ... @overload def get_coordinates( - geometry: OptGeoArrayLike, include_z: bool = False, *, return_index: bool + geometry: OptGeoArrayLike, include_z: bool = False, *, return_index: bool, include_m: bool = False ) -> NDArray[np.float64] | tuple[NDArray[np.float64], NDArray[np.int64]]: ... @overload def get_coordinates( - geometry: OptGeoArrayLike, include_z: bool, return_index: bool + geometry: OptGeoArrayLike, include_z: bool, return_index: bool, *, include_m: bool = False ) -> NDArray[np.float64] | tuple[NDArray[np.float64], NDArray[np.int64]]: ... @overload def set_coordinates(geometry: GeoT, coordinates: ArrayLikeSeq[float]) -> GeoT: ... diff --git a/stubs/shapely/shapely/creation.pyi b/stubs/shapely/shapely/creation.pyi index b3f3e4382dc8..db0a7f75f0f5 100644 --- a/stubs/shapely/shapely/creation.pyi +++ b/stubs/shapely/shapely/creation.pyi @@ -1,9 +1,11 @@ from collections.abc import Sequence from typing import Literal, SupportsIndex, overload +from typing_extensions import TypeAlias import numpy as np from numpy.typing import NDArray +from ._enum import ParamEnum from ._geometry import GeometryType from ._typing import ArrayLike, ArrayLikeSeq, GeoArray, OptGeoArrayLike, OptGeoArrayLikeSeq from .geometry import GeometryCollection, LinearRing, LineString, MultiLineString, MultiPoint, MultiPolygon, Point, Polygon @@ -24,13 +26,34 @@ __all__ = [ "empty", ] +class HandleNaN(ParamEnum): + allow = 0 + skip = 1 + error = 2 + +_HandleNaN: TypeAlias = Literal[0, 1, 2] | HandleNaN + @overload def points( - coords: float, y: float, z: float | None = None, indices: None = None, out: None = None, **kwargs # acts as x + coords: float, + y: float, + z: float | None = None, + indices: None = None, + *, + handle_nan: _HandleNaN = 0, + out: None = None, + **kwargs, # acts as x ) -> Point: ... @overload def points( - coords: Sequence[float], y: None = None, z: None = None, indices: None = None, out: None = None, **kwargs # acts as x, y[, z] + coords: Sequence[float], + y: None = None, + z: None = None, + indices: None = None, + *, + handle_nan: _HandleNaN = 0, + out: None = None, + **kwargs, # acts as x, y[, z] ) -> Point: ... @overload def points( @@ -38,6 +61,8 @@ def points( y: Sequence[float], # must be (y1, y2, ...) z: Sequence[float] | None = None, indices: ArrayLikeSeq[int] | None = None, + *, + handle_nan: _HandleNaN = 0, out: NDArray[np.object_] | None = None, **kwargs, ) -> GeoArray: ... @@ -47,6 +72,8 @@ def points( y: None = None, z: None = None, indices: ArrayLikeSeq[int] | None = None, + *, + handle_nan: _HandleNaN = 0, out: NDArray[np.object_] | None = None, **kwargs, ) -> GeoArray: ... @@ -56,6 +83,8 @@ def points( y: ArrayLike[float], z: ArrayLike[float] | None = None, indices: ArrayLikeSeq[int] | None = None, + *, + handle_nan: _HandleNaN = 0, out: NDArray[np.object_] | None = None, **kwargs, ) -> Point | GeoArray: ... @@ -65,6 +94,8 @@ def points( y: ArrayLike[float] | None = None, z: ArrayLike[float] | None = None, indices: ArrayLikeSeq[int] | None = None, + *, + handle_nan: _HandleNaN = 0, out: NDArray[np.object_] | None = None, **kwargs, ) -> Point | GeoArray: ... @@ -74,6 +105,8 @@ def linestrings( y: Sequence[float], z: Sequence[float] | None = None, indices: None = None, + *, + handle_nan: _HandleNaN = 0, out: None = None, **kwargs, ) -> LineString: ... @@ -83,6 +116,8 @@ def linestrings( y: None = None, z: None = None, indices: None = None, + *, + handle_nan: _HandleNaN = 0, out: None = None, **kwargs, ) -> LineString: ... @@ -92,6 +127,8 @@ def linestrings( y: None = None, z: None = None, indices: ArrayLikeSeq[int] | None = None, + *, + handle_nan: _HandleNaN = 0, out: NDArray[np.object_] | None = None, **kwargs, ) -> GeoArray: ... @@ -101,6 +138,8 @@ def linestrings( y: ArrayLikeSeq[float] | None = None, z: ArrayLikeSeq[float] | None = None, indices: ArrayLikeSeq[int] | None = None, + *, + handle_nan: _HandleNaN = 0, out: NDArray[np.object_] | None = None, **kwargs, ) -> LineString | GeoArray: ... @@ -110,6 +149,8 @@ def linearrings( y: Sequence[float], z: Sequence[float] | None = None, indices: None = None, + *, + handle_nan: _HandleNaN = 0, out: None = None, **kwargs, ) -> LinearRing: ... @@ -119,6 +160,8 @@ def linearrings( y: None = None, z: None = None, indices: None = None, + *, + handle_nan: _HandleNaN = 0, out: None = None, **kwargs, ) -> LinearRing: ... @@ -128,6 +171,8 @@ def linearrings( y: None = None, z: None = None, indices: ArrayLikeSeq[int] | None = None, + *, + handle_nan: _HandleNaN = 0, out: NDArray[np.object_] | None = None, **kwargs, ) -> GeoArray: ... @@ -137,6 +182,8 @@ def linearrings( y: ArrayLikeSeq[float] | None = None, z: ArrayLikeSeq[float] | None = None, indices: ArrayLikeSeq[int] | None = None, + *, + handle_nan: _HandleNaN = 0, out: NDArray[np.object_] | None = None, **kwargs, ) -> LinearRing | GeoArray: ... @@ -145,6 +192,7 @@ def polygons( geometries: LinearRing | Sequence[Sequence[float]] | None, holes: ArrayLikeSeq[float] | OptGeoArrayLikeSeq | None = None, indices: None = None, + *, out: None = None, **kwargs, ) -> Polygon: ... @@ -153,6 +201,7 @@ def polygons( geometries: Sequence[LinearRing | Sequence[Sequence[float]] | None], holes: ArrayLikeSeq[float] | OptGeoArrayLikeSeq | None = None, indices: ArrayLikeSeq[int] | None = None, + *, out: NDArray[np.object_] | None = None, **kwargs, ) -> GeoArray: ... @@ -161,6 +210,7 @@ def polygons( geometries: ArrayLikeSeq[float] | OptGeoArrayLikeSeq, holes: ArrayLikeSeq[float] | OptGeoArrayLikeSeq | None = None, indices: ArrayLikeSeq[int] | None = None, + *, out: NDArray[np.object_] | None = None, **kwargs, ) -> Polygon | GeoArray: ... @@ -177,48 +227,51 @@ def box( ) -> GeoArray: ... @overload def multipoints( - geometries: Sequence[Point | Sequence[float] | None], indices: None = None, out: None = None, **kwargs + geometries: Sequence[Point | Sequence[float] | None], indices: None = None, *, out: None = None, **kwargs ) -> MultiPoint: ... @overload def multipoints( geometries: Sequence[Sequence[Point | Sequence[float] | None]], indices: ArrayLikeSeq[int] | None = None, + *, out: NDArray[np.object_] | None = None, **kwargs, ) -> GeoArray: ... @overload def multipoints( - geometries: OptGeoArrayLikeSeq, indices: ArrayLikeSeq[int] | None = None, out: NDArray[np.object_] | None = None, **kwargs + geometries: OptGeoArrayLikeSeq, indices: ArrayLikeSeq[int] | None = None, *, out: NDArray[np.object_] | None = None, **kwargs ) -> MultiPoint | GeoArray: ... @overload def multilinestrings( - geometries: Sequence[LineString | Sequence[Sequence[float]] | None], indices: None = None, out: None = None, **kwargs + geometries: Sequence[LineString | Sequence[Sequence[float]] | None], indices: None = None, *, out: None = None, **kwargs ) -> MultiLineString: ... @overload def multilinestrings( geometries: Sequence[Sequence[LineString | Sequence[Sequence[float]] | None]], indices: ArrayLikeSeq[int] | None = None, + *, out: NDArray[np.object_] | None = None, **kwargs, ) -> GeoArray: ... @overload def multilinestrings( - geometries: OptGeoArrayLikeSeq, indices: ArrayLikeSeq[int] | None = None, out: NDArray[np.object_] | None = None, **kwargs + geometries: OptGeoArrayLikeSeq, indices: ArrayLikeSeq[int] | None = None, *, out: NDArray[np.object_] | None = None, **kwargs ) -> MultiLineString | GeoArray: ... @overload def multipolygons( - geometries: Sequence[Polygon | Sequence[Sequence[float]] | None], indices: None = None, out: None = None, **kwargs + geometries: Sequence[Polygon | Sequence[Sequence[float]] | None], indices: None = None, *, out: None = None, **kwargs ) -> MultiPolygon: ... @overload def multipolygons( geometries: Sequence[Sequence[Polygon | Sequence[Sequence[float]] | None]], indices: ArrayLikeSeq[int] | None = None, + *, out: NDArray[np.object_] | None = None, **kwargs, ) -> GeoArray: ... @overload def multipolygons( - geometries: OptGeoArrayLikeSeq, indices: ArrayLikeSeq[int] | None = None, out: NDArray[np.object_] | None = None, **kwargs + geometries: OptGeoArrayLikeSeq, indices: ArrayLikeSeq[int] | None = None, *, out: NDArray[np.object_] | None = None, **kwargs ) -> MultiPolygon | GeoArray: ... @overload def geometrycollections( diff --git a/stubs/shapely/shapely/decorators.pyi b/stubs/shapely/shapely/decorators.pyi index d9149d60e09b..2d0aa31dd4b7 100644 --- a/stubs/shapely/shapely/decorators.pyi +++ b/stubs/shapely/shapely/decorators.pyi @@ -1,4 +1,4 @@ -from collections.abc import Callable +from collections.abc import Callable, Iterable from typing import TypeVar _F = TypeVar("_F", bound=Callable[..., object]) @@ -9,3 +9,4 @@ class requires_geos: def __call__(self, func: _F) -> _F: ... def multithreading_enabled(func: _F) -> _F: ... +def deprecate_positional(should_be_kwargs: Iterable[str], category: type[Warning] = ...) -> Callable[..., object]: ... diff --git a/stubs/shapely/shapely/geometry/base.pyi b/stubs/shapely/shapely/geometry/base.pyi index ac8b1ca31411..75da5ee5e05d 100644 --- a/stubs/shapely/shapely/geometry/base.pyi +++ b/stubs/shapely/shapely/geometry/base.pyi @@ -158,6 +158,8 @@ class BaseGeometry(Geometry): @property def has_z(self) -> bool: ... @property + def has_m(self) -> bool: ... + @property def is_empty(self) -> bool: ... @property def is_ring(self) -> bool: ... @@ -224,13 +226,15 @@ class BaseGeometry(Geometry): @overload def dwithin(self, other: OptGeoArrayLike, distance: ArrayLikeSeq[float]) -> NDArray[np.bool_]: ... @overload - def equals_exact(self, other: Geometry | None, tolerance: float) -> bool: ... + def equals_exact(self, other: Geometry | None, tolerance: float = 0.0, *, normalize: Literal[False] = False) -> bool: ... @overload - def equals_exact(self, other: OptGeoArrayLikeSeq, tolerance: float) -> NDArray[np.bool_]: ... + def equals_exact( + self, other: OptGeoArrayLikeSeq, tolerance: float = 0.0, *, normalize: bool = False + ) -> NDArray[np.bool_]: ... @overload - def equals_exact(self, other: OptGeoArrayLike, tolerance: ArrayLikeSeq[float]) -> NDArray[np.bool_]: ... - @deprecated("Method 'almost_equals()' is deprecated. Use method 'equals_exact()' instead.") - def almost_equals(self, other: OptGeoArrayLike, decimal: int = 6) -> bool | NDArray[np.bool_]: ... + def equals_exact( + self, other: OptGeoArrayLike, tolerance: ArrayLikeSeq[float], *, normalize: bool = False + ) -> NDArray[np.bool_]: ... @overload def relate_pattern(self, other: Geometry | None, pattern: str) -> bool: ... @overload diff --git a/stubs/shapely/shapely/geometry/point.pyi b/stubs/shapely/shapely/geometry/point.pyi index 29973bdce918..e9ac4e18169e 100644 --- a/stubs/shapely/shapely/geometry/point.pyi +++ b/stubs/shapely/shapely/geometry/point.pyi @@ -25,6 +25,8 @@ class Point(BaseGeometry): def y(self) -> float: ... @property def z(self) -> float: ... + @property + def m(self) -> float: ... def svg(self, scale_factor: float = 1.0, fill_color: str | None = None, opacity: float | None = None) -> str: ... # type: ignore[override] # more precise base overrides @property diff --git a/stubs/shapely/shapely/io.pyi b/stubs/shapely/shapely/io.pyi index 27b667e5347e..9b840b4874b8 100644 --- a/stubs/shapely/shapely/io.pyi +++ b/stubs/shapely/shapely/io.pyi @@ -1,5 +1,6 @@ from _typeshed import Incomplete from typing import Literal, overload +from typing_extensions import TypeAlias import numpy as np from numpy.typing import NDArray @@ -12,6 +13,8 @@ from .lib import Geometry __all__ = ["from_geojson", "from_ragged_array", "from_wkb", "from_wkt", "to_geojson", "to_ragged_array", "to_wkb", "to_wkt"] +_OutputDimension: TypeAlias = Literal[2, 3, 4] + # Mypy and stubtest aren't happy with the following definition and # raise is a reserved keyword, so we cannot use the class syntax of enums # DecodingErrorOptions = ParamEnum("DecodingErrorOptions", {"ignore": 0, "warn": 1, "raise": 2}) @@ -23,18 +26,28 @@ class WKBFlavorOptions(ParamEnum): @overload def to_wkt( - geometry: None, rounding_precision: int = 6, trim: bool = True, output_dimension: int = 3, old_3d: bool = False, **kwargs + geometry: None, + rounding_precision: int = 6, + trim: bool = True, + output_dimension: _OutputDimension | None = None, + old_3d: bool = False, + **kwargs, ) -> None: ... @overload def to_wkt( - geometry: Geometry, rounding_precision: int = 6, trim: bool = True, output_dimension: int = 3, old_3d: bool = False, **kwargs + geometry: Geometry, + rounding_precision: int = 6, + trim: bool = True, + output_dimension: _OutputDimension | None = None, + old_3d: bool = False, + **kwargs, ) -> str: ... @overload def to_wkt( geometry: OptGeoArrayLikeSeq, rounding_precision: int = 6, trim: bool = True, - output_dimension: int = 3, + output_dimension: _OutputDimension | None = None, old_3d: bool = False, **kwargs, ) -> NDArray[np.str_]: ... @@ -42,7 +55,7 @@ def to_wkt( def to_wkb( geometry: None, hex: bool = False, - output_dimension: int = 3, + output_dimension: _OutputDimension | None = None, byte_order: int = -1, include_srid: bool = False, flavor: Literal["iso", "extended"] = "extended", @@ -52,7 +65,7 @@ def to_wkb( def to_wkb( geometry: Geometry, hex: Literal[False] = False, - output_dimension: int = 3, + output_dimension: _OutputDimension | None = None, byte_order: int = -1, include_srid: bool = False, flavor: Literal["iso", "extended"] = "extended", @@ -62,7 +75,7 @@ def to_wkb( def to_wkb( geometry: Geometry, hex: Literal[True], - output_dimension: int = 3, + output_dimension: _OutputDimension | None = None, byte_order: int = -1, include_srid: bool = False, flavor: Literal["iso", "extended"] = "extended", @@ -72,7 +85,7 @@ def to_wkb( def to_wkb( geometry: Geometry, hex: bool, - output_dimension: int = 3, + output_dimension: _OutputDimension | None = None, byte_order: int = -1, include_srid: bool = False, flavor: Literal["iso", "extended"] = "extended", @@ -82,7 +95,7 @@ def to_wkb( def to_wkb( geometry: OptGeoArrayLikeSeq, hex: Literal[False] = False, - output_dimension: int = 3, + output_dimension: _OutputDimension | None = None, byte_order: int = -1, include_srid: bool = False, flavor: Literal["iso", "extended"] = "extended", @@ -92,7 +105,7 @@ def to_wkb( def to_wkb( geometry: OptGeoArrayLikeSeq, hex: Literal[True], - output_dimension: int = 3, + output_dimension: _OutputDimension | None = None, byte_order: int = -1, include_srid: bool = False, flavor: Literal["iso", "extended"] = "extended", @@ -102,7 +115,7 @@ def to_wkb( def to_wkb( geometry: OptGeoArrayLikeSeq, hex: bool, - output_dimension: int = 3, + output_dimension: _OutputDimension | None = None, byte_order: int = -1, include_srid: bool = False, flavor: Literal["iso", "extended"] = "extended", diff --git a/stubs/shapely/shapely/lib.pyi b/stubs/shapely/shapely/lib.pyi index 4a123b9116e8..a3b3d82bb9f2 100644 --- a/stubs/shapely/shapely/lib.pyi +++ b/stubs/shapely/shapely/lib.pyi @@ -13,10 +13,14 @@ build_area: np.ufunc centroid: np.ufunc clip_by_rect: np.ufunc concave_hull: np.ufunc +constrained_delaunay_triangles: np.ufunc contains: np.ufunc contains_properly: np.ufunc contains_xy: np.ufunc convex_hull: np.ufunc +coverage_invalid_edges: np.ufunc +coverage_is_valid: np.ufunc +coverage_simplify: np.ufunc coverage_union: np.ufunc covered_by: np.ufunc covers: np.ufunc @@ -27,11 +31,13 @@ destroy_prepared: np.ufunc difference: np.ufunc difference_prec: np.ufunc disjoint: np.ufunc +disjoint_subset_union: np.ufunc distance: np.ufunc dwithin: np.ufunc envelope: np.ufunc equals: np.ufunc equals_exact: np.ufunc +equals_identical: np.ufunc extract_unique_points: np.ufunc force_2d: np.ufunc force_3d: np.ufunc @@ -45,6 +51,7 @@ get_dimensions: np.ufunc get_exterior_ring: np.ufunc get_geometry: np.ufunc get_interior_ring: np.ufunc +get_m: np.ufunc get_num_coordinates: np.ufunc get_num_geometries: np.ufunc get_num_interior_rings: np.ufunc @@ -53,6 +60,7 @@ get_point: np.ufunc get_precision: np.ufunc get_srid: np.ufunc get_type_id: np.ufunc +has_m: np.ufunc get_x: np.ufunc get_y: np.ufunc get_z: np.ufunc @@ -85,12 +93,16 @@ line_merge_directed: np.ufunc linearrings: np.ufunc linestrings: np.ufunc make_valid: np.ufunc +make_valid_with_params: np.ufunc +maximum_inscribed_circle: np.ufunc minimum_bounding_circle: np.ufunc minimum_bounding_radius: np.ufunc minimum_clearance: np.ufunc +minimum_clearance_line: np.ufunc node: np.ufunc normalize: np.ufunc offset_curve: np.ufunc +orient_polygons: np.ufunc oriented_envelope: np.ufunc overlaps: np.ufunc point_on_surface: np.ufunc diff --git a/stubs/shapely/shapely/ops.pyi b/stubs/shapely/shapely/ops.pyi index a03a5f43298c..f1f188bcb9b1 100644 --- a/stubs/shapely/shapely/ops.pyi +++ b/stubs/shapely/shapely/ops.pyi @@ -1,6 +1,5 @@ from collections.abc import Callable, Iterable from typing import Any, Literal, overload -from typing_extensions import deprecated from ._typing import GeoT, OptGeoArrayLike, SupportsGeoInterface from .algorithms.polylabel import polylabel as polylabel @@ -10,23 +9,22 @@ from .geometry.linestring import _ConvertibleToLineString from .lib import Geometry __all__ = [ - "cascaded_union", + "clip_by_rect", "linemerge", + "nearest_points", "operator", + "orient", "polygonize", "polygonize_full", + "shared_paths", + "snap", + "split", + "substring", "transform", - "unary_union", "triangulate", - "voronoi_diagram", - "split", - "nearest_points", + "unary_union", "validate", - "snap", - "shared_paths", - "clip_by_rect", - "orient", - "substring", + "voronoi_diagram", ] class CollectionOperator: @@ -47,8 +45,6 @@ class CollectionOperator: def linemerge( self, lines: MultiLineString | BaseMultipartGeometry | Iterable[_ConvertibleToLineString], directed: bool = False ) -> LineString | MultiLineString: ... - @deprecated("The `cascaded_union()` function is deprecated. Use `unary_union()` instead.") - def cascaded_union(self, geoms: OptGeoArrayLike) -> BaseGeometry: ... def unary_union(self, geoms: OptGeoArrayLike) -> BaseGeometry: ... operator: CollectionOperator @@ -58,8 +54,6 @@ linemerge = operator.linemerge unary_union = operator.unary_union # This is also an alias to operator method but we want to mark it as deprecated -@deprecated("The `cascaded_union()` function is deprecated. Use `unary_union()` instead.") -def cascaded_union(geoms: OptGeoArrayLike) -> BaseGeometry: ... @overload # edges false def triangulate(geom: Geometry, tolerance: float = 0.0, edges: Literal[False] = False) -> list[Polygon]: ... @overload # edges true (keyword) diff --git a/stubs/shapely/shapely/predicates.pyi b/stubs/shapely/shapely/predicates.pyi index ddf792261fa3..3b4fe369b980 100644 --- a/stubs/shapely/shapely/predicates.pyi +++ b/stubs/shapely/shapely/predicates.pyi @@ -9,7 +9,21 @@ from .geometry.base import BaseGeometry from .lib import Geometry __all__ = [ + "contains", + "contains_properly", + "contains_xy", + "covered_by", + "covers", + "crosses", + "disjoint", + "dwithin", + "equals", + "equals_exact", + "equals_identical", + "has_m", "has_z", + "intersects", + "intersects_xy", "is_ccw", "is_closed", "is_empty", @@ -21,23 +35,11 @@ __all__ = [ "is_valid", "is_valid_input", "is_valid_reason", - "crosses", - "contains", - "contains_xy", - "contains_properly", - "covered_by", - "covers", - "disjoint", - "dwithin", - "equals", - "intersects", - "intersects_xy", "overlaps", - "touches", - "within", - "equals_exact", "relate", "relate_pattern", + "touches", + "within", ] @overload @@ -45,6 +47,10 @@ def has_z(geometry: Geometry | None, **kwargs) -> bool: ... @overload def has_z(geometry: OptGeoArrayLikeSeq, **kwargs) -> NDArray[np.bool_]: ... @overload +def has_m(geometry: Geometry | None, **kwargs) -> bool: ... +@overload +def has_m(geometry: OptGeoArrayLikeSeq, **kwargs) -> NDArray[np.bool_]: ... +@overload def is_ccw(geometry: Geometry | None, **kwargs) -> bool: ... @overload def is_ccw(geometry: OptGeoArrayLikeSeq, **kwargs) -> NDArray[np.bool_]: ... @@ -163,13 +169,27 @@ def within(a: OptGeoArrayLikeSeq, b: OptGeoArrayLike, **kwargs) -> NDArray[np.bo @overload def within(a: OptGeoArrayLike, b: OptGeoArrayLikeSeq, **kwargs) -> NDArray[np.bool_]: ... @overload -def equals_exact(a: Geometry | None, b: Geometry | None, tolerance: float = 0.0, **kwargs) -> bool: ... +def equals_exact( + a: Geometry | None, b: Geometry | None, tolerance: float = 0.0, *, normalize: bool = False, **kwargs +) -> bool: ... +@overload +def equals_exact( + a: OptGeoArrayLike, b: OptGeoArrayLike, tolerance: ArrayLikeSeq[float], *, normalize: bool = False, **kwargs +) -> NDArray[np.bool_]: ... +@overload +def equals_exact( + a: OptGeoArrayLikeSeq, b: OptGeoArrayLike, tolerance: ArrayLike[float] = 0.0, *, normalize: bool = False, **kwargs +) -> NDArray[np.bool_]: ... +@overload +def equals_exact( + a: OptGeoArrayLike, b: OptGeoArrayLikeSeq, tolerance: ArrayLike[float] = 0.0, *, normalize: bool = False, **kwargs +) -> NDArray[np.bool_]: ... @overload -def equals_exact(a: OptGeoArrayLike, b: OptGeoArrayLike, tolerance: ArrayLikeSeq[float], **kwargs) -> NDArray[np.bool_]: ... +def equals_identical(a: Geometry | None, b: Geometry | None, **kwargs) -> bool: ... @overload -def equals_exact(a: OptGeoArrayLikeSeq, b: OptGeoArrayLike, tolerance: ArrayLike[float] = 0.0, **kwargs) -> NDArray[np.bool_]: ... +def equals_identical(a: OptGeoArrayLikeSeq, b: OptGeoArrayLike, **kwargs) -> NDArray[np.bool_]: ... @overload -def equals_exact(a: OptGeoArrayLike, b: OptGeoArrayLikeSeq, tolerance: ArrayLike[float] = 0.0, **kwargs) -> NDArray[np.bool_]: ... +def equals_identical(a: OptGeoArrayLike, b: OptGeoArrayLikeSeq, **kwargs) -> NDArray[np.bool_]: ... @overload def relate(a: Geometry | None, b: None, **kwargs) -> None: ... @overload diff --git a/stubs/shapely/shapely/set_operations.pyi b/stubs/shapely/shapely/set_operations.pyi index d44c034f4abd..ccc6d3c8e91c 100644 --- a/stubs/shapely/shapely/set_operations.pyi +++ b/stubs/shapely/shapely/set_operations.pyi @@ -5,7 +5,11 @@ from .geometry.base import BaseGeometry from .lib import Geometry __all__ = [ + "coverage_union", + "coverage_union_all", "difference", + "disjoint_subset_union", + "disjoint_subset_union_all", "intersection", "intersection_all", "symmetric_difference", @@ -13,8 +17,6 @@ __all__ = [ "unary_union", "union", "union_all", - "coverage_union", - "coverage_union_all", ] @overload @@ -84,3 +86,11 @@ def coverage_union(a: OptGeoArrayLike, b: OptGeoArrayLike, *, axis: int, **kwarg def coverage_union_all(geometries: OptGeoArrayLike, axis: None = None, **kwargs) -> BaseGeometry: ... @overload def coverage_union_all(geometries: OptGeoArrayLikeSeq, axis: int, **kwargs) -> BaseGeometry | GeoArray: ... +@overload +def disjoint_subset_union(a: OptGeoArrayLike, b: OptGeoArrayLike, *, axis: None = None, **kwargs) -> BaseGeometry: ... +@overload +def disjoint_subset_union(a: OptGeoArrayLike, b: OptGeoArrayLike, *, axis: int, **kwargs) -> BaseGeometry | GeoArray: ... +@overload +def disjoint_subset_union_all(geometries: OptGeoArrayLike, *, axis: None = None, **kwargs) -> BaseGeometry: ... +@overload +def disjoint_subset_union_all(geometries: OptGeoArrayLikeSeq, *, axis: int, **kwargs) -> BaseGeometry | GeoArray: ... From 4d3a178bd9082aa7331bd41649a89104bca8a45b Mon Sep 17 00:00:00 2001 From: Noelle Leigh <5957867+noelleleigh@users.noreply.github.com> Date: Fri, 18 Apr 2025 15:32:16 -0400 Subject: [PATCH 236/388] `stdlib/unittest/mock.pyi`: Improve `_Call` types (#13845) This fixes a number of issues related to `_Call` in `stdlib/unittest/mock.pyi`: - `_Call.__new__()`, `_Call.__init__()`: The `parent` argument should be another `_Call` or `None`. - `_Call.name` doesn't exist. - `_Call.parent` doesn't exist. - `_Call.from_kall` doesn't exist. - [`NonCallableMock.call_args`][0] should be a `_Call` or `None`. [0]: https://docs.python.org/3/library/unittest.mock.html#unittest.mock.Mock.call_args --- stdlib/unittest/mock.pyi | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/stdlib/unittest/mock.pyi b/stdlib/unittest/mock.pyi index d2664465097f..9e353900f2d7 100644 --- a/stdlib/unittest/mock.pyi +++ b/stdlib/unittest/mock.pyi @@ -1,4 +1,5 @@ import sys +from _typeshed import MaybeNone from collections.abc import Awaitable, Callable, Coroutine, Iterable, Mapping, Sequence from contextlib import _GeneratorContextManager from types import TracebackType @@ -69,16 +70,13 @@ _CallValue: TypeAlias = str | tuple[Any, ...] | Mapping[str, Any] | _ArgsKwargs class _Call(tuple[Any, ...]): def __new__( - cls, value: _CallValue = (), name: str | None = "", parent: Any | None = None, two: bool = False, from_kall: bool = True + cls, value: _CallValue = (), name: str | None = "", parent: _Call | None = None, two: bool = False, from_kall: bool = True ) -> Self: ... - name: Any - parent: Any - from_kall: Any def __init__( self, value: _CallValue = (), name: str | None = None, - parent: Any | None = None, + parent: _Call | None = None, two: bool = False, from_kall: bool = True, ) -> None: ... @@ -162,7 +160,7 @@ class NonCallableMock(Base, Any): side_effect: Any called: bool call_count: int - call_args: Any + call_args: _Call | MaybeNone call_args_list: _CallList mock_calls: _CallList def _format_mock_call_signature(self, args: Any, kwargs: Any) -> str: ... From 7c5727482c42af3531bb1770a3853a2e1e38b77f Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Mon, 21 Apr 2025 13:54:39 -0400 Subject: [PATCH 237/388] [stubsabot] Bump setuptools to 79.0.* (#13860) Co-authored-by: stubsabot <> --- stubs/setuptools/METADATA.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stubs/setuptools/METADATA.toml b/stubs/setuptools/METADATA.toml index d8a876935bfe..7532fd1a3dce 100644 --- a/stubs/setuptools/METADATA.toml +++ b/stubs/setuptools/METADATA.toml @@ -1,4 +1,4 @@ -version = "78.1.*" +version = "79.0.*" upstream_repository = "https://github.com/pypa/setuptools" extra_description = """\ Given that `pkg_resources` is typed since `setuptools >= 71.1`, \ From e318a981234cc0ab1f2820830b8ca7c71d8458d9 Mon Sep 17 00:00:00 2001 From: Avasam Date: Tue, 22 Apr 2025 03:58:53 -0400 Subject: [PATCH 238/388] Properly mark deprecated methods in threading (#13861) --- stdlib/threading.pyi | 22 +++++++++++++++------- 1 file changed, 15 insertions(+), 7 deletions(-) diff --git a/stdlib/threading.pyi b/stdlib/threading.pyi index e3965fab0e80..99f5c8d2a516 100644 --- a/stdlib/threading.pyi +++ b/stdlib/threading.pyi @@ -5,6 +5,7 @@ from _typeshed import ProfileFunction, TraceFunction from collections.abc import Callable, Iterable, Mapping from types import TracebackType from typing import Any, TypeVar, final +from typing_extensions import deprecated _T = TypeVar("_T") @@ -44,9 +45,11 @@ if sys.version_info >= (3, 12): _profile_hook: ProfileFunction | None def active_count() -> int: ... -def activeCount() -> int: ... # deprecated alias for active_count() +@deprecated("Use active_count() instead") +def activeCount() -> int: ... def current_thread() -> Thread: ... -def currentThread() -> Thread: ... # deprecated alias for current_thread() +@deprecated("Use current_thread() instead") +def currentThread() -> Thread: ... def get_ident() -> int: ... def enumerate() -> list[Thread]: ... def main_thread() -> Thread: ... @@ -89,11 +92,14 @@ class Thread: @property def native_id(self) -> int | None: ... # only available on some platforms def is_alive(self) -> bool: ... - # the following methods are all deprecated - def getName(self) -> str: ... - def setName(self, name: str) -> None: ... + @deprecated("Get the daemon attribute instead") def isDaemon(self) -> bool: ... + @deprecated("Set the daemon attribute instead") def setDaemon(self, daemonic: bool) -> None: ... + @deprecated("Use the name attribute instead") + def getName(self) -> str: ... + @deprecated("Use the name attribute instead") + def setName(self, name: str) -> None: ... class _DummyThread(Thread): def __init__(self) -> None: ... @@ -124,7 +130,8 @@ class Condition: def wait_for(self, predicate: Callable[[], _T], timeout: float | None = None) -> _T: ... def notify(self, n: int = 1) -> None: ... def notify_all(self) -> None: ... - def notifyAll(self) -> None: ... # deprecated alias for notify_all() + @deprecated("Use notify_all() instead") + def notifyAll(self) -> None: ... class Semaphore: _value: int @@ -138,7 +145,8 @@ class BoundedSemaphore(Semaphore): ... class Event: def is_set(self) -> bool: ... - def isSet(self) -> bool: ... # deprecated alias for is_set() + @deprecated("Use is_set() instead") + def isSet(self) -> bool: ... def set(self) -> None: ... def clear(self) -> None: ... def wait(self, timeout: float | None = None) -> bool: ... From 33019f8d81970bdf955bafe4e81ad07137e92ba6 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Tue, 22 Apr 2025 22:02:25 -0700 Subject: [PATCH 239/388] [stubsabot] Bump mypy-extensions to 1.1.* (#13868) Release: https://pypi.org/pypi/mypy-extensions/1.1.0 Homepage: https://github.com/python/mypy_extensions Repository: https://github.com/python/mypy_extensions Typeshed stubs: https://github.com/python/typeshed/tree/main/stubs/mypy-extensions Diff: https://github.com/python/mypy_extensions/compare/1.0.0...1.1.0 Stubsabot analysis of the diff between the two releases: - 0 public Python files have been added. - 0 files included in typeshed's stubs have been deleted. - 1 file included in typeshed's stubs has been modified or renamed: `mypy_extensions.py`. - Total lines of Python code added: 103. - Total lines of Python code deleted: 80. If stubtest fails for this PR: - Leave this PR open (as a reminder, and to prevent stubsabot from opening another PR) - Fix stubtest failures in another PR, then close this PR Note that you will need to close and re-open the PR in order to trigger CI Co-authored-by: stubsabot <> --- stubs/mypy-extensions/METADATA.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stubs/mypy-extensions/METADATA.toml b/stubs/mypy-extensions/METADATA.toml index b0da8c5ae8ad..7d329474c424 100644 --- a/stubs/mypy-extensions/METADATA.toml +++ b/stubs/mypy-extensions/METADATA.toml @@ -1,2 +1,2 @@ -version = "1.0.*" +version = "1.1.*" upstream_repository = "https://github.com/python/mypy_extensions" From 72934aee8d729170cc973ddcaa7dbb36b3d4f87d Mon Sep 17 00:00:00 2001 From: sobolevn Date: Thu, 24 Apr 2025 01:37:20 +0300 Subject: [PATCH 240/388] Deprecate several things in `mypy_extensions` (#13874) See https://github.com/python/mypy_extensions/compare/1.0.0...1.1.0 on what was deprecated. --- stubs/mypy-extensions/mypy_extensions.pyi | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/stubs/mypy-extensions/mypy_extensions.pyi b/stubs/mypy-extensions/mypy_extensions.pyi index 0bba94c3407e..78659fb9794d 100644 --- a/stubs/mypy-extensions/mypy_extensions.pyi +++ b/stubs/mypy-extensions/mypy_extensions.pyi @@ -3,7 +3,7 @@ from _collections_abc import dict_items, dict_keys, dict_values from _typeshed import IdentityFunction, Unused from collections.abc import Mapping from typing import Any, ClassVar, Generic, TypeVar, overload, type_check_only -from typing_extensions import Never, Self +from typing_extensions import Never, Self, deprecated _T = TypeVar("_T") _U = TypeVar("_U") @@ -11,6 +11,7 @@ _U = TypeVar("_U") # Internal mypy fallback type for all typed dicts (does not exist at runtime) # N.B. Keep this mostly in sync with typing(_extensions)._TypedDict @type_check_only +@deprecated("mypy_extensions._TypedDict package is deprecated") class _TypedDict(Mapping[str, object], metaclass=abc.ABCMeta): __total__: ClassVar[bool] # Unlike typing(_extensions).TypedDict, @@ -37,6 +38,7 @@ class _TypedDict(Mapping[str, object], metaclass=abc.ABCMeta): # supposedly incompatible definitions of `__or__` and `__ior__`: def __ior__(self, value: Self, /) -> Self: ... # type: ignore[misc] +@deprecated("mypy_extensions.TypedDict is deprecated, use typing.TypedDict instead") def TypedDict(typename: str, fields: dict[str, type[Any]], total: bool = ...) -> type[dict[str, Any]]: ... @overload def Arg(type: _T, name: str | None = ...) -> _T: ... @@ -65,6 +67,7 @@ def KwArg() -> Any: ... # Return type that indicates a function does not return. # Deprecated: Use typing.NoReturn instead. +@deprecated("mypy_extensions.NoReturn is deprecated, use typing.NoReturn or typing.Never instead") class NoReturn: ... # This is consistent with implementation. Usage intends for this as From 91c31b92670f34ecab1514c24b665ad8562e2d86 Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Thu, 24 Apr 2025 14:08:21 +0400 Subject: [PATCH 241/388] Complete `flake8-docstrings` (#13872) --- stubs/flake8-docstrings/METADATA.toml | 5 +---- stubs/flake8-docstrings/flake8_docstrings.pyi | 17 ++++++++++------- 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/stubs/flake8-docstrings/METADATA.toml b/stubs/flake8-docstrings/METADATA.toml index 10586ebd1aad..8d4722b27d0c 100644 --- a/stubs/flake8-docstrings/METADATA.toml +++ b/stubs/flake8-docstrings/METADATA.toml @@ -1,6 +1,3 @@ version = "1.7.*" upstream_repository = "https://github.com/pycqa/flake8-docstrings" -partial_stub = true - -[tool.stubtest] -ignore_missing_stub = true +requires = ["types-flake8"] diff --git a/stubs/flake8-docstrings/flake8_docstrings.pyi b/stubs/flake8-docstrings/flake8_docstrings.pyi index 49d96ed6e0ee..0c9f9344a561 100644 --- a/stubs/flake8-docstrings/flake8_docstrings.pyi +++ b/stubs/flake8-docstrings/flake8_docstrings.pyi @@ -1,21 +1,24 @@ import argparse import ast -from _typeshed import Incomplete from collections.abc import Generator, Iterable -from typing import Any, ClassVar +from typing import Any, ClassVar, Final, Literal +from typing_extensions import Self + +from flake8.options.manager import OptionManager + +__version__: Final[str] +__all__ = ("pep257Checker",) class pep257Checker: name: ClassVar[str] version: ClassVar[str] tree: ast.AST filename: str - checker: Any + checker: Any # actual type: pep257.ConventionChecker source: str def __init__(self, tree: ast.AST, filename: str, lines: Iterable[str]) -> None: ... @classmethod - def add_options(cls, parser: Any) -> None: ... + def add_options(cls, parser: OptionManager) -> None: ... @classmethod def parse_options(cls, options: argparse.Namespace) -> None: ... - def run(self) -> Generator[tuple[int, int, str, type[Any]], None, None]: ... - -def __getattr__(name: str) -> Incomplete: ... + def run(self) -> Generator[tuple[int, Literal[0], str, type[Self]]]: ... From 8823039d2fed8b6f8cbf28234386eb2cfda95477 Mon Sep 17 00:00:00 2001 From: sobolevn Date: Thu, 24 Apr 2025 13:08:36 +0300 Subject: [PATCH 242/388] Update `mypy_extensions` deprecation messages (#13876) CC @AlexWaygood --- stubs/mypy-extensions/mypy_extensions.pyi | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/stubs/mypy-extensions/mypy_extensions.pyi b/stubs/mypy-extensions/mypy_extensions.pyi index 78659fb9794d..d7e593b971bc 100644 --- a/stubs/mypy-extensions/mypy_extensions.pyi +++ b/stubs/mypy-extensions/mypy_extensions.pyi @@ -11,7 +11,7 @@ _U = TypeVar("_U") # Internal mypy fallback type for all typed dicts (does not exist at runtime) # N.B. Keep this mostly in sync with typing(_extensions)._TypedDict @type_check_only -@deprecated("mypy_extensions._TypedDict package is deprecated") +@deprecated("Use typing._TypedDict instead") class _TypedDict(Mapping[str, object], metaclass=abc.ABCMeta): __total__: ClassVar[bool] # Unlike typing(_extensions).TypedDict, @@ -38,7 +38,7 @@ class _TypedDict(Mapping[str, object], metaclass=abc.ABCMeta): # supposedly incompatible definitions of `__or__` and `__ior__`: def __ior__(self, value: Self, /) -> Self: ... # type: ignore[misc] -@deprecated("mypy_extensions.TypedDict is deprecated, use typing.TypedDict instead") +@deprecated("Use typing.TypedDict or typing_extensions.TypedDict instead") def TypedDict(typename: str, fields: dict[str, type[Any]], total: bool = ...) -> type[dict[str, Any]]: ... @overload def Arg(type: _T, name: str | None = ...) -> _T: ... @@ -67,7 +67,7 @@ def KwArg() -> Any: ... # Return type that indicates a function does not return. # Deprecated: Use typing.NoReturn instead. -@deprecated("mypy_extensions.NoReturn is deprecated, use typing.NoReturn or typing.Never instead") +@deprecated("Use typing.NoReturn or typing.Never instead") class NoReturn: ... # This is consistent with implementation. Usage intends for this as From 2fc76d81f78e68f6c0dba0bf302808fc226e65d7 Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Thu, 24 Apr 2025 15:08:01 +0400 Subject: [PATCH 243/388] Mark `flake8-rst-docstrings` as complete (#13867) --- stubs/flake8-rst-docstrings/METADATA.toml | 4 ---- 1 file changed, 4 deletions(-) diff --git a/stubs/flake8-rst-docstrings/METADATA.toml b/stubs/flake8-rst-docstrings/METADATA.toml index 2b76dde36f88..039b40c8b0e6 100644 --- a/stubs/flake8-rst-docstrings/METADATA.toml +++ b/stubs/flake8-rst-docstrings/METADATA.toml @@ -1,6 +1,2 @@ version = "0.3.*" upstream_repository = "https://github.com/peterjc/flake8-rst-docstrings" -partial_stub = true - -[tool.stubtest] -ignore_missing_stub = true From 9e4b8d9fd8427afb04cd487be9fefcfa65b5eafc Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Thu, 24 Apr 2025 15:10:38 +0400 Subject: [PATCH 244/388] Mark some `importlib` interfaces as deprecated (#13866) --- stdlib/_frozen_importlib_external.pyi | 5 ++++- stdlib/importlib/abc.pyi | 3 +++ 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/stdlib/_frozen_importlib_external.pyi b/stdlib/_frozen_importlib_external.pyi index 386cf20808e4..463b4087f6b6 100644 --- a/stdlib/_frozen_importlib_external.pyi +++ b/stdlib/_frozen_importlib_external.pyi @@ -36,7 +36,10 @@ def spec_from_file_location( loader: LoaderProtocol | None = None, submodule_search_locations: list[str] | None = ..., ) -> importlib.machinery.ModuleSpec | None: ... - +@deprecated( + "Deprecated as of Python 3.6: Use site configuration instead. " + "Future versions of Python may not enable this finder by default." +) class WindowsRegistryFinder(importlib.abc.MetaPathFinder): if sys.version_info < (3, 12): @classmethod diff --git a/stdlib/importlib/abc.pyi b/stdlib/importlib/abc.pyi index 8a106b3a64d7..3016a3a43b36 100644 --- a/stdlib/importlib/abc.pyi +++ b/stdlib/importlib/abc.pyi @@ -8,6 +8,7 @@ from importlib import _bootstrap_external from importlib.machinery import ModuleSpec from io import BufferedReader from typing import IO, Any, Literal, Protocol, overload, runtime_checkable +from typing_extensions import deprecated if sys.version_info >= (3, 11): __all__ = [ @@ -38,6 +39,7 @@ else: if sys.version_info < (3, 12): class Finder(metaclass=ABCMeta): ... +@deprecated("Deprecated as of Python 3.7: Use importlib.resources.abc.TraversableResources instead.") class ResourceLoader(Loader): @abstractmethod def get_data(self, path: str) -> bytes: ... @@ -58,6 +60,7 @@ class ExecutionLoader(InspectLoader): def get_filename(self, fullname: str) -> str: ... class SourceLoader(_bootstrap_external.SourceLoader, ResourceLoader, ExecutionLoader, metaclass=ABCMeta): # type: ignore[misc] # incompatible definitions of source_to_code in the base classes + @deprecated("Deprecated as of Python 3.3: Use importlib.resources.abc.SourceLoader.path_stats instead.") def path_mtime(self, path: str) -> float: ... def set_data(self, path: str, data: bytes) -> None: ... def get_source(self, fullname: str) -> str | None: ... From 03cfb9fe863680b8775b01594c63d24075a6b22a Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Thu, 24 Apr 2025 15:11:45 +0400 Subject: [PATCH 245/388] Deprecate undocumented `pydoc.ispackage` (#13865) --- stdlib/pydoc.pyi | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/stdlib/pydoc.pyi b/stdlib/pydoc.pyi index 144f782acad5..f14b9d1bb699 100644 --- a/stdlib/pydoc.pyi +++ b/stdlib/pydoc.pyi @@ -6,7 +6,7 @@ from collections.abc import Callable, Container, Mapping, MutableMapping from reprlib import Repr from types import MethodType, ModuleType, TracebackType from typing import IO, Any, AnyStr, Final, NoReturn, Protocol, TypeVar -from typing_extensions import TypeGuard +from typing_extensions import TypeGuard, deprecated __all__ = ["help"] @@ -31,7 +31,14 @@ def stripid(text: str) -> str: ... def allmethods(cl: type) -> MutableMapping[str, MethodType]: ... def visiblename(name: str, all: Container[str] | None = None, obj: object = None) -> bool: ... def classify_class_attrs(object: object) -> list[tuple[str, str, type, str]]: ... -def ispackage(path: str) -> bool: ... + +if sys.version_info >= (3, 13): + @deprecated("Deprecated in Python 3.13.") + def ispackage(path: str) -> bool: ... + +else: + def ispackage(path: str) -> bool: ... + def source_synopsis(file: IO[AnyStr]) -> AnyStr | None: ... def synopsis(filename: str, cache: MutableMapping[str, tuple[int, str]] = {}) -> str | None: ... From e29feb182c8c26f37e50bc7e2a07a1f65eaf04c6 Mon Sep 17 00:00:00 2001 From: Andrew Riachi Date: Thu, 24 Apr 2025 06:13:15 -0500 Subject: [PATCH 246/388] [PyScreeze] Fix locateCenterOnScreen optional arguments (#13852) --- stubs/PyScreeze/pyscreeze/__init__.pyi | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/stubs/PyScreeze/pyscreeze/__init__.pyi b/stubs/PyScreeze/pyscreeze/__init__.pyi index 538982f05ab9..1405bfba0f23 100644 --- a/stubs/PyScreeze/pyscreeze/__init__.pyi +++ b/stubs/PyScreeze/pyscreeze/__init__.pyi @@ -127,7 +127,7 @@ def locateAllOnScreen( def locateCenterOnScreen( image: str | Image.Image | _MatLike, *, - minSearchTime: float, + minSearchTime: float = 0, grayscale: bool | None = None, limit: Unused = 1, region: tuple[int, int, int, int] | None = None, @@ -140,7 +140,7 @@ def locateCenterOnScreen( def locateCenterOnScreen( image: str | Image.Image, *, - minSearchTime: float, + minSearchTime: float = 0, grayscale: bool | None = None, limit: Unused = 1, region: tuple[int, int, int, int] | None = None, From 9f7afa0924527cdb38b1ccf36b542de2f0a6503a Mon Sep 17 00:00:00 2001 From: Julen Costa Watanabe <62463972+julencw@users.noreply.github.com> Date: Thu, 24 Apr 2025 13:56:57 +0200 Subject: [PATCH 247/388] [tensorflow] Fix literal value for `pad` function (#13877) --- stubs/tensorflow/tensorflow/__init__.pyi | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stubs/tensorflow/tensorflow/__init__.pyi b/stubs/tensorflow/tensorflow/__init__.pyi index 873a2b860f0e..298bc5d834e0 100644 --- a/stubs/tensorflow/tensorflow/__init__.pyi +++ b/stubs/tensorflow/tensorflow/__init__.pyi @@ -417,7 +417,7 @@ def reshape(tensor: TensorCompatible, shape: ShapeLike | Tensor, name: str | Non def pad( tensor: TensorCompatible, paddings: Tensor | IntArray | Iterable[Iterable[int]], - mode: Literal["CONSTANT", "constant", "REFLECT", "reflect", "SYMMETRIC", "symmectric"] = "CONSTANT", + mode: Literal["CONSTANT", "constant", "REFLECT", "reflect", "SYMMETRIC", "symmetric"] = "CONSTANT", constant_values: ScalarTensorCompatible = 0, name: str | None = None, ) -> Tensor: ... From e29a37deeefc912bd1b4a042f13e7e3f8dce94a1 Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Thu, 24 Apr 2025 15:58:30 +0400 Subject: [PATCH 248/388] Expand args and kwargs for `click_default_group` methods (#13873) --- pyrightconfig.stricter.json | 1 - stubs/click-default-group/METADATA.toml | 2 +- .../click_default_group.pyi | 85 ++++++++++++++----- 3 files changed, 64 insertions(+), 24 deletions(-) diff --git a/pyrightconfig.stricter.json b/pyrightconfig.stricter.json index 54d64e5f1529..1a9951c48fd5 100644 --- a/pyrightconfig.stricter.json +++ b/pyrightconfig.stricter.json @@ -33,7 +33,6 @@ "stubs/braintree", "stubs/caldav", "stubs/cffi", - "stubs/click-default-group", "stubs/click-web", "stubs/corus", "stubs/dateparser", diff --git a/stubs/click-default-group/METADATA.toml b/stubs/click-default-group/METADATA.toml index 6475e9bc71da..713e657a8635 100644 --- a/stubs/click-default-group/METADATA.toml +++ b/stubs/click-default-group/METADATA.toml @@ -1,4 +1,4 @@ version = "1.2.*" +upstream_repository = "https://github.com/click-contrib/click-default-group" # requires a version of click with a py.typed requires = ["click>=8.0.0"] -upstream_repository = "https://github.com/click-contrib/click-default-group" diff --git a/stubs/click-default-group/click_default_group.pyi b/stubs/click-default-group/click_default_group.pyi index 83541f3cfbbf..c8b46e4dabd7 100644 --- a/stubs/click-default-group/click_default_group.pyi +++ b/stubs/click-default-group/click_default_group.pyi @@ -1,42 +1,83 @@ -from _typeshed import Incomplete -from collections.abc import Sequence +from collections.abc import Callable, MutableMapping, Sequence +from typing import Any, Final, Literal, overload +from typing_extensions import deprecated import click __all__ = ["DefaultGroup"] -__version__: str +__version__: Final[str] class DefaultGroup(click.Group): ignore_unknown_options: bool default_cmd_name: str | None default_if_no_args: bool - def __init__(self, *args, **kwargs) -> None: ... + # type hints were taken from click lib + def __init__( + self, + name: str | None = None, + commands: MutableMapping[str, click.Command] | Sequence[click.Command] | None = None, + *, + ignore_unknown_options: Literal[True] | None = True, + default_cmd_name: str | None = None, + default_if_no_args: bool = False, + invoke_without_command: bool = False, + no_args_is_help: bool | None = None, + subcommand_metavar: str | None = None, + chain: bool = False, + result_callback: Callable[..., Any] | None = None, # Any is specified in click lib + context_settings: MutableMapping[str, Any] | None = None, # Any is specified in click lib + callback: Callable[..., Any] | None = None, # Any is specified in click lib + params: list[click.Parameter] | None = None, + help: str | None = None, + epilog: str | None = None, + short_help: str | None = None, + options_metavar: str | None = "[OPTIONS]", + add_help_option: bool = True, + hidden: bool = False, + deprecated: bool = False, + ) -> None: ... def set_default_command(self, command: click.Command) -> None: ... def parse_args(self, ctx: click.Context, args: list[str]) -> list[str]: ... def get_command(self, ctx: click.Context, cmd_name: str) -> click.Command | None: ... def resolve_command(self, ctx: click.Context, args: list[str]) -> tuple[str | None, click.Command | None, list[str]]: ... def format_commands(self, ctx: click.Context, formatter: click.HelpFormatter) -> None: ... - def command(self, *args, **kwargs) -> click.Command: ... # incomplete + @overload + def command( + self, + __func: Callable[..., Any], + /, + *, + name: str | None = ..., + cls: type[click.Command] | None = ..., + default: Literal[False] = False, + ) -> click.Command: ... + @overload + @deprecated("Use default param of `DefaultGroup` or `set_default_command()` instead") + def command( + self, + __func: Callable[..., Any], + /, + *, + name: str | None = ..., + cls: type[click.Command] | None = ..., + default: Literal[True], + ) -> click.Command: ... + @overload + def command( + self, *, name: str | None = ..., cls: type[click.Command] | None = ..., default: Literal[False] = False + ) -> Callable[[Callable[..., Any]], click.Command]: ... + @overload + @deprecated("Use default param of `DefaultGroup` or `set_default_command()` instead") + def command( + self, *, name: str | None = ..., cls: type[click.Command] | None = ..., default: Literal[True] + ) -> Callable[[Callable[..., Any]], click.Command]: ... + @overload + def command(self, *args: Any, **kwargs: Any) -> Callable[[Callable[..., Any]], click.Command] | click.Command: ... class DefaultCommandFormatter: group: click.Group formatter: click.HelpFormatter mark: str - def __init__(self, group: click.Group, formatter: click.HelpFormatter, mark: str = ...) -> None: ... + def __init__(self, group: click.Group, formatter: click.HelpFormatter, mark: str = "*") -> None: ... def write_dl(self, rows: Sequence[tuple[str, str]], col_max: int = 30, col_spacing: int = -2) -> None: ... - def __getattr__(self, attr: str) -> Incomplete: ... - # __getattr__ used to ala-derive from click.HelpFormatter: - # indent_increment: int - # width: int | None - # current_indent: int - # buffer: t.List[str] - # def write(self, string: str) -> None: ... - # def indent(self) -> None: ... - # def dedent(self) -> None: ... - # def write_usage(self, prog: str, args: str = ..., prefix: str | None = ...) -> None: ... - # def write_heading(self, heading: str) -> None: ... - # def write_paragraph(self) -> None: ... - # def write_text(self, text: str) -> None: ... - # def section(self, name: str) -> t.Iterator[None]: ... - # def indentation(self) -> t.Iterator[None]: ... - # def getvalue(self) -> str: ... + def __getattr__(self, attr: str) -> Any: ... # attribute access is forwarded to click.HelpFormatter From 348f41ccf8103395991e25379aec6edd3b9707f3 Mon Sep 17 00:00:00 2001 From: David Gilman Date: Thu, 24 Apr 2025 15:12:12 -0700 Subject: [PATCH 249/388] [auth0-python] Add async functions to AsyncAuth0 (#13799) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Avasam --- .../auth0/management/async_auth0.pyi | 64 +++++++++++++++++++ 1 file changed, 64 insertions(+) diff --git a/stubs/auth0-python/auth0/management/async_auth0.pyi b/stubs/auth0-python/auth0/management/async_auth0.pyi index 5af87901a440..f73b2a97fbd7 100644 --- a/stubs/auth0-python/auth0/management/async_auth0.pyi +++ b/stubs/auth0-python/auth0/management/async_auth0.pyi @@ -3,6 +3,37 @@ from typing_extensions import Self from auth0.rest import RestClientOptions +from .actions import Actions +from .attack_protection import AttackProtection +from .blacklists import Blacklists +from .branding import Branding +from .client_credentials import ClientCredentials +from .client_grants import ClientGrants +from .clients import Clients +from .connections import Connections +from .custom_domains import CustomDomains +from .device_credentials import DeviceCredentials +from .email_templates import EmailTemplates +from .emails import Emails +from .grants import Grants +from .guardian import Guardian +from .hooks import Hooks +from .jobs import Jobs +from .log_streams import LogStreams +from .logs import Logs +from .organizations import Organizations +from .prompts import Prompts +from .resource_servers import ResourceServers +from .roles import Roles +from .rules import Rules +from .rules_configs import RulesConfigs +from .stats import Stats +from .tenants import Tenants +from .tickets import Tickets +from .user_blocks import UserBlocks +from .users import Users +from .users_by_email import UsersByEmail + class AsyncAuth0: def __init__(self, domain: str, token: str, rest_options: RestClientOptions | None = None) -> None: ... def set_session(self, session) -> None: ... @@ -10,3 +41,36 @@ class AsyncAuth0: async def __aexit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None ) -> None: ... + + # Same attributes as Auth0 + # See note in stubs/auth0-python/@tests/stubtest_allowlist.txt about _async methods + actions: Actions + attack_protection: AttackProtection + blacklists: Blacklists + branding: Branding + client_credentials: ClientCredentials + client_grants: ClientGrants + clients: Clients + connections: Connections + custom_domains: CustomDomains + device_credentials: DeviceCredentials + email_templates: EmailTemplates + emails: Emails + grants: Grants + guardian: Guardian + hooks: Hooks + jobs: Jobs + log_streams: LogStreams + logs: Logs + organizations: Organizations + prompts: Prompts + resource_servers: ResourceServers + roles: Roles + rules_configs: RulesConfigs + rules: Rules + stats: Stats + tenants: Tenants + tickets: Tickets + user_blocks: UserBlocks + users_by_email: UsersByEmail + users: Users From 9470e7a2fcc3f285dc33faf125b0e634504509f6 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Fri, 25 Apr 2025 07:47:55 +0100 Subject: [PATCH 250/388] Update dependency pyright to v1.1.400 (#13882) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- requirements-tests.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-tests.txt b/requirements-tests.txt index 8c6fc56cca4d..a81e7cf21f89 100644 --- a/requirements-tests.txt +++ b/requirements-tests.txt @@ -1,7 +1,7 @@ # Type checkers that we test our stubs against. These should always # be pinned to a specific version to make failure reproducible. mypy==1.15.0 -pyright==1.1.399 +pyright==1.1.400 # pytype can be installed on Windows, but requires building wheels, let's not do that on the CI pytype==2024.10.11; platform_system != "Windows" and python_version >= "3.10" and python_version < "3.13" From 91791cee36781246e2993e024969617272c68e3a Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Fri, 25 Apr 2025 12:26:21 +0400 Subject: [PATCH 251/388] Complete `ttkthemes` (#13858) --- pyrightconfig.stricter.json | 1 - stdlib/tkinter/__init__.pyi | 1 + stubs/ttkthemes/ttkthemes/_imgops.pyi | 9 +++- stubs/ttkthemes/ttkthemes/_utils.pyi | 13 ++--- stubs/ttkthemes/ttkthemes/_widget.pyi | 2 +- stubs/ttkthemes/ttkthemes/themed_style.pyi | 2 +- stubs/ttkthemes/ttkthemes/themed_tk.pyi | 62 +++++++++++++++++++--- 7 files changed, 71 insertions(+), 19 deletions(-) diff --git a/pyrightconfig.stricter.json b/pyrightconfig.stricter.json index 1a9951c48fd5..7e9a54bca9af 100644 --- a/pyrightconfig.stricter.json +++ b/pyrightconfig.stricter.json @@ -90,7 +90,6 @@ "stubs/shapely", "stubs/tensorflow", "stubs/tqdm", - "stubs/ttkthemes", "stubs/vobject", "stubs/workalendar", "stubs/wurlitzer", diff --git a/stdlib/tkinter/__init__.pyi b/stdlib/tkinter/__init__.pyi index 291e2fc5108f..dcac61d77e0a 100644 --- a/stdlib/tkinter/__init__.pyi +++ b/stdlib/tkinter/__init__.pyi @@ -977,6 +977,7 @@ class Tk(Misc, Wm): sync: bool = False, use: str | None = None, ) -> None: ... + # Keep this in sync with ttktheme.ThemedTk. See issue #13858 @overload def configure( self, diff --git a/stubs/ttkthemes/ttkthemes/_imgops.pyi b/stubs/ttkthemes/ttkthemes/_imgops.pyi index 1c0b15f1ee7e..3322a352372a 100644 --- a/stubs/ttkthemes/ttkthemes/_imgops.pyi +++ b/stubs/ttkthemes/ttkthemes/_imgops.pyi @@ -1,2 +1,7 @@ -def shift_hue(image, hue): ... -def make_transparent(image): ... +from typing import Any +from typing_extensions import TypeAlias + +_Image: TypeAlias = Any # actually PIL.Image, but not worth adding a dependency + +def shift_hue(image: _Image, hue: float) -> _Image: ... +def make_transparent(image: _Image) -> _Image: ... diff --git a/stubs/ttkthemes/ttkthemes/_utils.pyi b/stubs/ttkthemes/ttkthemes/_utils.pyi index 20fc39fae22e..0c05434a63d1 100644 --- a/stubs/ttkthemes/ttkthemes/_utils.pyi +++ b/stubs/ttkthemes/ttkthemes/_utils.pyi @@ -1,7 +1,8 @@ -from _typeshed import Incomplete +from _typeshed import FileDescriptorOrPath, StrOrBytesPath +from contextlib import AbstractContextManager -def temporary_chdir(new_dir) -> None: ... -def get_file_directory(): ... -def get_temp_directory(): ... -def get_themes_directory(theme_name: Incomplete | None = None, png: bool = False): ... -def create_directory(directory): ... +def temporary_chdir(new_dir: FileDescriptorOrPath) -> AbstractContextManager[None]: ... +def get_file_directory() -> str: ... +def get_temp_directory() -> str: ... +def get_themes_directory(theme_name: str | None = None, png: bool = False) -> str: ... +def create_directory(directory: StrOrBytesPath) -> StrOrBytesPath: ... diff --git a/stubs/ttkthemes/ttkthemes/_widget.pyi b/stubs/ttkthemes/ttkthemes/_widget.pyi index 891e7b747aeb..d5bf4d3ee151 100644 --- a/stubs/ttkthemes/ttkthemes/_widget.pyi +++ b/stubs/ttkthemes/ttkthemes/_widget.pyi @@ -7,7 +7,7 @@ class ThemedWidget: PACKAGES: ClassVar[dict[str, str]] tk: _tkinter.TkappType png_support: bool - def __init__(self, tk_interpreter, gif_override: bool = False) -> None: ... + def __init__(self, tk_interpreter: _tkinter.TkappType, gif_override: bool = False) -> None: ... def set_theme(self, theme_name: str) -> None: ... def get_themes(self) -> list[str]: ... @property diff --git a/stubs/ttkthemes/ttkthemes/themed_style.pyi b/stubs/ttkthemes/ttkthemes/themed_style.pyi index e880f0718dc2..5821b620b155 100644 --- a/stubs/ttkthemes/ttkthemes/themed_style.pyi +++ b/stubs/ttkthemes/ttkthemes/themed_style.pyi @@ -5,7 +5,7 @@ from ._widget import ThemedWidget class ThemedStyle(ttk.Style, ThemedWidget): def __init__( - self, master: tkinter.Misc | None = ..., *, theme: str | None = ..., gif_override: bool | None = ..., **kwargs + self, master: tkinter.Misc | None = ..., *, theme: str | None = None, gif_override: bool | None = False ) -> None: ... # theme_use() can't return None (differs from ttk.Style) def theme_use(self, theme_name: str | None = None) -> str: ... # type: ignore[override] diff --git a/stubs/ttkthemes/ttkthemes/themed_tk.pyi b/stubs/ttkthemes/ttkthemes/themed_tk.pyi index 8bf6ac4c6e5f..5b826ad71b3b 100644 --- a/stubs/ttkthemes/ttkthemes/themed_tk.pyi +++ b/stubs/ttkthemes/ttkthemes/themed_tk.pyi @@ -1,5 +1,5 @@ import tkinter -from _typeshed import Incomplete +from typing import Any from ._widget import ThemedWidget @@ -21,10 +21,56 @@ class ThemedTk(tkinter.Tk, ThemedWidget): background: bool | None = ..., # old alias for themebg gif_override: bool = ..., ) -> None: ... - def set_theme(self, theme_name, toplevel: bool | None = None, themebg: bool | None = None) -> None: ... - # TODO: currently no good way to say "use the same big list of kwargs as parent class but also add these" - def config(self, kw: Incomplete | None = None, **kwargs): ... # type: ignore[override] - def cget(self, k): ... - def configure(self, kw: Incomplete | None = None, **kwargs): ... # type: ignore[override] - def __getitem__(self, k): ... - def __setitem__(self, k, v) -> None: ... + def set_theme(self, theme_name: str, toplevel: bool | None = None, themebg: bool | None = None) -> None: ... + # Keep this in sync with tkinter.Tk + def config( # type: ignore[override] + self, + kw: dict[str, Any] | None = None, + *, + themebg: bool | None = ..., + toplevel: bool | None = ..., + theme: str | None = ..., + background: str = ..., + bd: tkinter._ScreenUnits = ..., + bg: str = ..., + border: tkinter._ScreenUnits = ..., + borderwidth: tkinter._ScreenUnits = ..., + cursor: tkinter._Cursor = ..., + height: tkinter._ScreenUnits = ..., + highlightbackground: str = ..., + highlightcolor: str = ..., + highlightthickness: tkinter._ScreenUnits = ..., + menu: tkinter.Menu = ..., + padx: tkinter._ScreenUnits = ..., + pady: tkinter._ScreenUnits = ..., + relief: tkinter._Relief = ..., + takefocus: tkinter._TakeFocusValue = ..., + width: tkinter._ScreenUnits = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + def cget(self, k: str) -> Any: ... + def configure( # type: ignore[override] + self, + kw: dict[str, Any] | None = None, + *, + themebg: bool | None = ..., + toplevel: bool | None = ..., + theme: str | None = ..., + background: str = ..., + bd: tkinter._ScreenUnits = ..., + bg: str = ..., + border: tkinter._ScreenUnits = ..., + borderwidth: tkinter._ScreenUnits = ..., + cursor: tkinter._Cursor = ..., + height: tkinter._ScreenUnits = ..., + highlightbackground: str = ..., + highlightcolor: str = ..., + highlightthickness: tkinter._ScreenUnits = ..., + menu: tkinter.Menu = ..., + padx: tkinter._ScreenUnits = ..., + pady: tkinter._ScreenUnits = ..., + relief: tkinter._Relief = ..., + takefocus: tkinter._TakeFocusValue = ..., + width: tkinter._ScreenUnits = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + def __getitem__(self, k: str) -> Any: ... + def __setitem__(self, k: str, v: Any) -> None: ... From 6a04a32317b708c8a8a607edbd732dd88d8ad26c Mon Sep 17 00:00:00 2001 From: Avasam Date: Fri, 25 Apr 2025 04:38:36 -0400 Subject: [PATCH 252/388] pywin32: Fix various arguments (#13846) --- stubs/pywin32/_win32typing.pyi | 2 +- stubs/pywin32/win32/win32api.pyi | 4 ++-- stubs/pywin32/win32/win32gui.pyi | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/stubs/pywin32/_win32typing.pyi b/stubs/pywin32/_win32typing.pyi index 0078f78f40e7..63777e1e05d5 100644 --- a/stubs/pywin32/_win32typing.pyi +++ b/stubs/pywin32/_win32typing.pyi @@ -4923,7 +4923,7 @@ class PyCColorDialog: def DoModal(self): ... def GetSavedCustomColors(self): ... def SetCurrentColor(self, color, /) -> None: ... - def SetCustomColors(self) -> None: ... + def SetCustomColors(self, colors: Sequence[int], /) -> None: ... def GetCustomColors(self) -> tuple[Incomplete, ...]: ... class PyCComboBox: diff --git a/stubs/pywin32/win32/win32api.pyi b/stubs/pywin32/win32/win32api.pyi index 28941dbdbb90..1577e5e95c6d 100644 --- a/stubs/pywin32/win32/win32api.pyi +++ b/stubs/pywin32/win32/win32api.pyi @@ -200,7 +200,7 @@ def LoadLibrary(fileName: str, /): ... def LoadLibraryEx(fileName: str, handle: int, handle1, /) -> int: ... def LoadResource(handle: int, _type: _win32typing.PyResourceId, name: _win32typing.PyResourceId, language, /) -> str: ... def LoadString(handle: int, stringId, numChars: int = ..., /) -> str: ... -def MessageBeep(type: int, /): ... +def MessageBeep(type: int = 0, /): ... def MessageBox(hwnd: int | None, message: str, title: str | None = ..., style=..., language=..., /) -> int: ... def MonitorFromPoint(pt: tuple[Incomplete, Incomplete], Flags: int = ...) -> int: ... def MonitorFromRect(rc: _win32typing.PyRECT | tuple[int, int, int, int], Flags: int = ...) -> int: ... @@ -268,7 +268,7 @@ def SetCursorPos(arg: tuple[Incomplete, Incomplete], /) -> None: ... def SetDllDirectory(PathName: str, /) -> None: ... def SetErrorMode(errorMode, /): ... def SetFileAttributes(pathName: str, attrs, /): ... -def SetLastError(): ... +def SetLastError(errVal: int, /): ... def SetSysColors(Elements, RgbValues, /) -> None: ... def SetLocalTime(SystemTime: TimeType, /) -> None: ... def SetSystemTime(year, month, dayOfWeek, day, hour, minute, second, millseconds, /): ... diff --git a/stubs/pywin32/win32/win32gui.pyi b/stubs/pywin32/win32/win32gui.pyi index 99e95b327887..9d58f6c72979 100644 --- a/stubs/pywin32/win32/win32gui.pyi +++ b/stubs/pywin32/win32/win32gui.pyi @@ -152,7 +152,7 @@ def AlphaBlend( /, ) -> None: ... def MessageBox(parent: _win32typing.PyHANDLE | int | None, text: str, caption: str, flags, /): ... -def MessageBeep(_type, /) -> None: ... +def MessageBeep(type, /) -> None: ... def CreateWindow( className: str | _win32typing.PyResourceId, windowTitle: str | None, From 0f2bf2cb875bd03e8d7cee2e472a7c1e1b2d2714 Mon Sep 17 00:00:00 2001 From: Blake Williams Date: Fri, 25 Apr 2025 18:39:43 +1000 Subject: [PATCH 253/388] Import gRPC stubs from the grpc-stubs project (#11204) --- pyrightconfig.stricter.json | 1 + stubs/grpcio/@tests/stubtest_allowlist.txt | 11 + stubs/grpcio/@tests/test_cases/check_aio.py | 25 + .../test_cases/check_aio_multi_callable.py | 37 + stubs/grpcio/@tests/test_cases/check_grpc.py | 46 ++ .../test_cases/check_handler_inheritance.py | 36 + .../@tests/test_cases/check_multi_callable.py | 35 + .../@tests/test_cases/check_reflection.py | 9 + .../@tests/test_cases/check_reflection_aio.py | 9 + .../@tests/test_cases/check_register.py | 14 + .../test_cases/check_server_interceptor.py | 22 + .../grpcio/@tests/test_cases/check_status.py | 8 + stubs/grpcio/METADATA.toml | 15 + stubs/grpcio/grpc/__init__.pyi | 640 ++++++++++++++++++ stubs/grpcio/grpc/aio/__init__.pyi | 455 +++++++++++++ stubs/grpcio/grpc_channelz/__init__.pyi | 3 + stubs/grpcio/grpc_channelz/v1/__init__.pyi | 3 + stubs/grpcio/grpc_channelz/v1/_servicer.pyi | 25 + stubs/grpcio/grpc_channelz/v1/channelz.pyi | 3 + .../grpcio/grpc_channelz/v1/channelz_pb2.pyi | 16 + .../grpc_channelz/v1/channelz_pb2_grpc.pyi | 3 + stubs/grpcio/grpc_health/__init__.pyi | 3 + stubs/grpcio/grpc_health/v1/__init__.pyi | 3 + stubs/grpcio/grpc_health/v1/health.pyi | 34 + stubs/grpcio/grpc_health/v1/health_pb2.pyi | 3 + .../grpcio/grpc_health/v1/health_pb2_grpc.pyi | 6 + stubs/grpcio/grpc_reflection/__init__.pyi | 3 + .../grpc_reflection/v1alpha/__init__.pyi | 3 + .../grpcio/grpc_reflection/v1alpha/_base.pyi | 6 + .../grpc_reflection/v1alpha/reflection.pyi | 23 + .../v1alpha/reflection_pb2.pyi | 3 + stubs/grpcio/grpc_status/__init__.pyi | 3 + stubs/grpcio/grpc_status/rpc_status.pyi | 13 + 33 files changed, 1519 insertions(+) create mode 100644 stubs/grpcio/@tests/stubtest_allowlist.txt create mode 100644 stubs/grpcio/@tests/test_cases/check_aio.py create mode 100644 stubs/grpcio/@tests/test_cases/check_aio_multi_callable.py create mode 100644 stubs/grpcio/@tests/test_cases/check_grpc.py create mode 100644 stubs/grpcio/@tests/test_cases/check_handler_inheritance.py create mode 100644 stubs/grpcio/@tests/test_cases/check_multi_callable.py create mode 100644 stubs/grpcio/@tests/test_cases/check_reflection.py create mode 100644 stubs/grpcio/@tests/test_cases/check_reflection_aio.py create mode 100644 stubs/grpcio/@tests/test_cases/check_register.py create mode 100644 stubs/grpcio/@tests/test_cases/check_server_interceptor.py create mode 100644 stubs/grpcio/@tests/test_cases/check_status.py create mode 100644 stubs/grpcio/METADATA.toml create mode 100644 stubs/grpcio/grpc/__init__.pyi create mode 100644 stubs/grpcio/grpc/aio/__init__.pyi create mode 100644 stubs/grpcio/grpc_channelz/__init__.pyi create mode 100644 stubs/grpcio/grpc_channelz/v1/__init__.pyi create mode 100644 stubs/grpcio/grpc_channelz/v1/_servicer.pyi create mode 100644 stubs/grpcio/grpc_channelz/v1/channelz.pyi create mode 100644 stubs/grpcio/grpc_channelz/v1/channelz_pb2.pyi create mode 100644 stubs/grpcio/grpc_channelz/v1/channelz_pb2_grpc.pyi create mode 100644 stubs/grpcio/grpc_health/__init__.pyi create mode 100644 stubs/grpcio/grpc_health/v1/__init__.pyi create mode 100644 stubs/grpcio/grpc_health/v1/health.pyi create mode 100644 stubs/grpcio/grpc_health/v1/health_pb2.pyi create mode 100644 stubs/grpcio/grpc_health/v1/health_pb2_grpc.pyi create mode 100644 stubs/grpcio/grpc_reflection/__init__.pyi create mode 100644 stubs/grpcio/grpc_reflection/v1alpha/__init__.pyi create mode 100644 stubs/grpcio/grpc_reflection/v1alpha/_base.pyi create mode 100644 stubs/grpcio/grpc_reflection/v1alpha/reflection.pyi create mode 100644 stubs/grpcio/grpc_reflection/v1alpha/reflection_pb2.pyi create mode 100644 stubs/grpcio/grpc_status/__init__.pyi create mode 100644 stubs/grpcio/grpc_status/rpc_status.pyi diff --git a/pyrightconfig.stricter.json b/pyrightconfig.stricter.json index 7e9a54bca9af..776f3d7df704 100644 --- a/pyrightconfig.stricter.json +++ b/pyrightconfig.stricter.json @@ -44,6 +44,7 @@ "stubs/gdb", "stubs/geopandas", "stubs/google-cloud-ndb", + "stubs/grpcio/grpc/__init__.pyi", "stubs/hdbcli/hdbcli/dbapi.pyi", "stubs/html5lib", "stubs/httplib2", diff --git a/stubs/grpcio/@tests/stubtest_allowlist.txt b/stubs/grpcio/@tests/stubtest_allowlist.txt new file mode 100644 index 000000000000..dad6fb3e8ec4 --- /dev/null +++ b/stubs/grpcio/@tests/stubtest_allowlist.txt @@ -0,0 +1,11 @@ +# Error: is not present at runtime +# ============================= +# Error class attributes that aren't defined. +grpc.RpcError.code +grpc.RpcError.details +grpc.RpcError.trailing_metadata + +# Error: is inconsistent +# ============================= +# Stub class is incomplete. +grpc_reflection.v1alpha._base.BaseReflectionServicer.__init__ diff --git a/stubs/grpcio/@tests/test_cases/check_aio.py b/stubs/grpcio/@tests/test_cases/check_aio.py new file mode 100644 index 000000000000..2eef7eec05f4 --- /dev/null +++ b/stubs/grpcio/@tests/test_cases/check_aio.py @@ -0,0 +1,25 @@ +from __future__ import annotations + +from typing import Any, cast +from typing_extensions import assert_type + +import grpc.aio + +# Interceptor casts +client_interceptors: list[grpc.aio.ClientInterceptor] = [] +grpc.aio.insecure_channel("target", interceptors=client_interceptors) + +server_interceptors: list[grpc.aio.ServerInterceptor[Any, Any]] = [] +grpc.aio.server(interceptors=server_interceptors) + + +# Metadata +async def metadata() -> None: + metadata = await cast(grpc.aio.Call, None).initial_metadata() + assert_type(metadata["foo"], grpc.aio._MetadataValue) + for k in metadata: + assert_type(k, str) + + for k, v in metadata.items(): + assert_type(k, str) + assert_type(v, grpc.aio._MetadataValue) diff --git a/stubs/grpcio/@tests/test_cases/check_aio_multi_callable.py b/stubs/grpcio/@tests/test_cases/check_aio_multi_callable.py new file mode 100644 index 000000000000..36fb5870380b --- /dev/null +++ b/stubs/grpcio/@tests/test_cases/check_aio_multi_callable.py @@ -0,0 +1,37 @@ +from __future__ import annotations + +from typing import Protocol, cast +from typing_extensions import assert_type + +import grpc.aio + + +class DummyRequest: + pass + + +class DummyReply: + pass + + +class DummyServiceStub(Protocol): + UnaryUnary: grpc.aio.UnaryUnaryMultiCallable[DummyRequest, DummyReply] + UnaryStream: grpc.aio.UnaryStreamMultiCallable[DummyRequest, DummyReply] + StreamUnary: grpc.aio.StreamUnaryMultiCallable[DummyRequest, DummyReply] + StreamStream: grpc.aio.StreamStreamMultiCallable[DummyRequest, DummyReply] + + +stub = cast(DummyServiceStub, None) +req = DummyRequest() + + +async def async_context() -> None: + assert_type(await stub.UnaryUnary(req), DummyReply) + + async for resp in stub.UnaryStream(req): + assert_type(resp, DummyReply) + + assert_type(await stub.StreamUnary(iter([req])), DummyReply) + + async for resp in stub.StreamStream(iter([req])): + assert_type(resp, DummyReply) diff --git a/stubs/grpcio/@tests/test_cases/check_grpc.py b/stubs/grpcio/@tests/test_cases/check_grpc.py new file mode 100644 index 000000000000..e413ba974930 --- /dev/null +++ b/stubs/grpcio/@tests/test_cases/check_grpc.py @@ -0,0 +1,46 @@ +from __future__ import annotations + +from typing import Optional, cast +from typing_extensions import assert_type + +import grpc + +# Channel options: +assert_type(grpc.insecure_channel("target", ()), grpc.Channel) +assert_type(grpc.insecure_channel("target", (("a", "b"),)), grpc.Channel) +assert_type(grpc.insecure_channel("target", (("a", "b"), ("c", "d"))), grpc.Channel) + +# Local channel credentials: +creds = grpc.local_channel_credentials(grpc.LocalConnectionType.LOCAL_TCP) +assert_type(creds, grpc.ChannelCredentials) + +# Other credential types: +assert_type(grpc.alts_channel_credentials(), grpc.ChannelCredentials) +assert_type(grpc.alts_server_credentials(), grpc.ServerCredentials) +assert_type(grpc.compute_engine_channel_credentials(grpc.CallCredentials("")), grpc.ChannelCredentials) +assert_type(grpc.insecure_server_credentials(), grpc.ServerCredentials) + +# XDS credentials: +assert_type( + grpc.xds_channel_credentials(grpc.local_channel_credentials(grpc.LocalConnectionType.LOCAL_TCP)), grpc.ChannelCredentials +) +assert_type(grpc.xds_server_credentials(grpc.insecure_server_credentials()), grpc.ServerCredentials) + +# Channel ready future +channel = grpc.insecure_channel("target", ()) +assert_type(grpc.channel_ready_future(channel).result(), None) + +# Channel options supports list: +assert_type(grpc.insecure_channel("target", []), grpc.Channel) +assert_type(grpc.insecure_channel("target", [("a", "b")]), grpc.Channel) +assert_type(grpc.insecure_channel("target", [("a", "b"), ("c", "d")]), grpc.Channel) + +# Client call details optionals: +call_details = grpc.ClientCallDetails() +assert_type(call_details.method, str) +assert_type(call_details.timeout, Optional[float]) + +# Call iterator +call_iter = cast(grpc._CallIterator[str], None) +for call in call_iter: + assert_type(call, str) diff --git a/stubs/grpcio/@tests/test_cases/check_handler_inheritance.py b/stubs/grpcio/@tests/test_cases/check_handler_inheritance.py new file mode 100644 index 000000000000..72cefce0bd41 --- /dev/null +++ b/stubs/grpcio/@tests/test_cases/check_handler_inheritance.py @@ -0,0 +1,36 @@ +from __future__ import annotations + +from typing import cast +from typing_extensions import assert_type + +import grpc + + +class Request: + pass + + +class Response: + pass + + +def unary_unary_call(rq: Request, ctx: grpc.ServicerContext) -> Response: + assert_type(rq, Request) + return Response() + + +class ServiceHandler(grpc.ServiceRpcHandler[Request, Response]): + def service_name(self) -> str: + return "hello" + + def service(self, handler_call_details: grpc.HandlerCallDetails) -> grpc.RpcMethodHandler[Request, Response] | None: + rpc = grpc.RpcMethodHandler[Request, Response]() + rpc.unary_unary = unary_unary_call + return rpc + + +h = ServiceHandler() +ctx = cast(grpc.ServicerContext, None) +svc = h.service(grpc.HandlerCallDetails()) +if svc is not None and svc.unary_unary is not None: + svc.unary_unary(Request(), ctx) diff --git a/stubs/grpcio/@tests/test_cases/check_multi_callable.py b/stubs/grpcio/@tests/test_cases/check_multi_callable.py new file mode 100644 index 000000000000..254d60b2a228 --- /dev/null +++ b/stubs/grpcio/@tests/test_cases/check_multi_callable.py @@ -0,0 +1,35 @@ +from __future__ import annotations + +from typing import Protocol, cast +from typing_extensions import assert_type + +import grpc + + +class DummyRequest: + pass + + +class DummyReply: + pass + + +class DummyServiceStub(Protocol): + UnaryUnary: grpc.UnaryUnaryMultiCallable[DummyRequest, DummyReply] + UnaryStream: grpc.UnaryStreamMultiCallable[DummyRequest, DummyReply] + StreamUnary: grpc.StreamUnaryMultiCallable[DummyRequest, DummyReply] + StreamStream: grpc.StreamStreamMultiCallable[DummyRequest, DummyReply] + + +stub = cast(DummyServiceStub, None) +req = DummyRequest() + +assert_type(stub.UnaryUnary(req), DummyReply) + +for resp in stub.UnaryStream(req): + assert_type(resp, DummyReply) + +assert_type(stub.StreamUnary(iter([req])), DummyReply) + +for resp in stub.StreamStream(iter([req])): + assert_type(resp, DummyReply) diff --git a/stubs/grpcio/@tests/test_cases/check_reflection.py b/stubs/grpcio/@tests/test_cases/check_reflection.py new file mode 100644 index 000000000000..5287283960a8 --- /dev/null +++ b/stubs/grpcio/@tests/test_cases/check_reflection.py @@ -0,0 +1,9 @@ +from __future__ import annotations + +from typing import cast + +import grpc +from grpc_reflection.v1alpha.reflection import enable_server_reflection + +server = cast(grpc.Server, None) +enable_server_reflection(["foo"], server, None) diff --git a/stubs/grpcio/@tests/test_cases/check_reflection_aio.py b/stubs/grpcio/@tests/test_cases/check_reflection_aio.py new file mode 100644 index 000000000000..80d4054cc123 --- /dev/null +++ b/stubs/grpcio/@tests/test_cases/check_reflection_aio.py @@ -0,0 +1,9 @@ +from __future__ import annotations + +from typing import cast + +import grpc.aio +from grpc_reflection.v1alpha.reflection import enable_server_reflection + +server = cast(grpc.aio.Server, None) +enable_server_reflection(["foo"], server, None) diff --git a/stubs/grpcio/@tests/test_cases/check_register.py b/stubs/grpcio/@tests/test_cases/check_register.py new file mode 100644 index 000000000000..b68daa523e7a --- /dev/null +++ b/stubs/grpcio/@tests/test_cases/check_register.py @@ -0,0 +1,14 @@ +from __future__ import annotations + +from typing import Any + +import grpc + + +@grpc.Call.register +class CallProxy: + def __init__(self, target: grpc.Call) -> None: + self._target = target + + def __getattr__(self, name: str) -> Any: + return getattr(self._target, name) diff --git a/stubs/grpcio/@tests/test_cases/check_server_interceptor.py b/stubs/grpcio/@tests/test_cases/check_server_interceptor.py new file mode 100644 index 000000000000..9a84f37e41d9 --- /dev/null +++ b/stubs/grpcio/@tests/test_cases/check_server_interceptor.py @@ -0,0 +1,22 @@ +from __future__ import annotations + +from collections.abc import Callable + +import grpc + + +class Request: + pass + + +class Response: + pass + + +class NoopInterceptor(grpc.ServerInterceptor[Request, Response]): + def intercept_service( + self, + continuation: Callable[[grpc.HandlerCallDetails], grpc.RpcMethodHandler[Request, Response] | None], + handler_call_details: grpc.HandlerCallDetails, + ) -> grpc.RpcMethodHandler[Request, Response] | None: + return continuation(handler_call_details) diff --git a/stubs/grpcio/@tests/test_cases/check_status.py b/stubs/grpcio/@tests/test_cases/check_status.py new file mode 100644 index 000000000000..b9e1776b68cf --- /dev/null +++ b/stubs/grpcio/@tests/test_cases/check_status.py @@ -0,0 +1,8 @@ +from __future__ import annotations + +from grpc import Status +from grpc_status import to_status + +# XXX: to_status actually expects a "google.rpc.status.Status", +# but the stubs for that aren't present yet. +status: Status = to_status(None) diff --git a/stubs/grpcio/METADATA.toml b/stubs/grpcio/METADATA.toml new file mode 100644 index 000000000000..9c94f65fb2ef --- /dev/null +++ b/stubs/grpcio/METADATA.toml @@ -0,0 +1,15 @@ +version = "1.*" +upstream_repository = "https://github.com/grpc/grpc" +partial_stub = true +requires = [ + "types-protobuf", +] + +[tool.stubtest] +ignore_missing_stub = true +stubtest_requirements = [ + "grpcio-channelz", + "grpcio-health-checking", + "grpcio-reflection", + "grpcio-status", +] diff --git a/stubs/grpcio/grpc/__init__.pyi b/stubs/grpcio/grpc/__init__.pyi new file mode 100644 index 000000000000..8f370e86a0e6 --- /dev/null +++ b/stubs/grpcio/grpc/__init__.pyi @@ -0,0 +1,640 @@ +import abc +import enum +import threading +from collections.abc import Callable, Iterable, Iterator, Mapping, Sequence +from concurrent import futures +from types import ModuleType, TracebackType +from typing import Any, Generic, NoReturn, Protocol, TypeVar, type_check_only +from typing_extensions import Self, TypeAlias + +__version__: str + +# This class encodes an uninhabited type, requiring use of explicit casts or ignores +# in order to satisfy type checkers. This allows grpc-stubs to add proper stubs +# later, allowing those overrides to be removed. +# The alternative is Any, but a future replacement of Any with a proper type +# would result in type errors where previously the type checker was happy, which +# we want to avoid. Forcing the user to use overrides provides forwards-compatibility. +@type_check_only +class _PartialStubMustCastOrIgnore: ... + +# XXX: Early attempts to tame this used literals for all the keys (gRPC is +# a bit segfaulty and doesn't adequately validate the option keys), but that +# didn't quite work out. Maybe it's something we can come back to? +_OptionKeyValue: TypeAlias = tuple[str, Any] +_Options: TypeAlias = Sequence[_OptionKeyValue] + +class Compression(enum.IntEnum): + NoCompression = 0 + Deflate = 1 + Gzip = 2 + +@enum.unique +class LocalConnectionType(enum.Enum): + UDS = 0 + LOCAL_TCP = 1 + +# XXX: not documented, needs more investigation. +# Some evidence: +# - https://github.com/grpc/grpc/blob/0e1984effd7e977ef18f1ad7fde7d10a2a153e1d/src/python/grpcio_tests/tests/unit/_metadata_test.py#L71 +# - https://github.com/grpc/grpc/blob/0e1984effd7e977ef18f1ad7fde7d10a2a153e1d/src/python/grpcio_tests/tests/unit/_metadata_test.py#L58 +# - https://github.com/grpc/grpc/blob/0e1984effd7e977ef18f1ad7fde7d10a2a153e1d/src/python/grpcio_tests/tests/unit/_invocation_defects_test.py#L66 +_Metadata: TypeAlias = tuple[tuple[str, str | bytes], ...] + +_TRequest = TypeVar("_TRequest") +_TResponse = TypeVar("_TResponse") + +# XXX: These are probably the SerializeToTring/FromString pb2 methods, but +# this needs further investigation +@type_check_only +class _RequestSerializer(Protocol): + def __call__(self, *args: Any, **kwargs: Any) -> Any: ... + +@type_check_only +class _RequestDeserializer(Protocol): + def __call__(self, *args: Any, **kwargs: Any) -> Any: ... + +@type_check_only +class _ResponseSerializer(Protocol): + def __call__(self, *args: Any, **kwargs: Any) -> Any: ... + +@type_check_only +class _ResponseDeserializer(Protocol): + def __call__(self, *args: Any, **kwargs: Any) -> Any: ... + +# Future Interfaces: + +class FutureTimeoutError(Exception): ... +class FutureCancelledError(Exception): ... + +_TFutureValue = TypeVar("_TFutureValue") + +class Future(abc.ABC, Generic[_TFutureValue]): + @abc.abstractmethod + def add_done_callback(self, fn: Callable[[Future[_TFutureValue]], None]) -> None: ... + @abc.abstractmethod + def cancel(self) -> bool: ... + @abc.abstractmethod + def cancelled(self) -> bool: ... + @abc.abstractmethod + def done(self) -> bool: ... + @abc.abstractmethod + def exception(self, timeout: float | None = ...) -> Exception | None: ... + @abc.abstractmethod + def result(self, timeout: float | None = ...) -> _TFutureValue: ... + @abc.abstractmethod + def running(self) -> bool: ... + + # FIXME: unsure of the exact return type here. Is it a traceback.StackSummary? + @abc.abstractmethod + def traceback(self, timeout: float | None = ...) -> Any: ... + +# Create Client: + +def insecure_channel(target: str, options: _Options | None = ..., compression: Compression | None = ...) -> Channel: ... +def secure_channel( + target: str, credentials: ChannelCredentials, options: _Options | None = ..., compression: Compression | None = ... +) -> Channel: ... + +_Interceptor: TypeAlias = ( + UnaryUnaryClientInterceptor[_TRequest, _TResponse] + | UnaryStreamClientInterceptor[_TRequest, _TResponse] + | StreamUnaryClientInterceptor[_TRequest, _TResponse] + | StreamStreamClientInterceptor[_TRequest, _TResponse] +) + +def intercept_channel(channel: Channel, *interceptors: _Interceptor[_TRequest, _TResponse]) -> Channel: ... + +# Create Client Credentials: + +def ssl_channel_credentials( + root_certificates: bytes | None = ..., private_key: bytes | None = ..., certificate_chain: bytes | None = ... +) -> ChannelCredentials: ... +def local_channel_credentials(local_connect_type: LocalConnectionType = ...) -> ChannelCredentials: ... +def metadata_call_credentials(metadata_plugin: AuthMetadataPlugin, name: str | None = ...) -> CallCredentials: ... +def access_token_call_credentials(access_token: str) -> CallCredentials: ... +def alts_channel_credentials(service_accounts: Sequence[str] | None = ...) -> ChannelCredentials: ... +def compute_engine_channel_credentials(call_credentials: CallCredentials) -> ChannelCredentials: ... +def xds_channel_credentials(fallback_credentials: ChannelCredentials | None = ...) -> ChannelCredentials: ... + +# GRPC docs say there should be at least two: +def composite_call_credentials(creds1: CallCredentials, creds2: CallCredentials, *rest: CallCredentials) -> CallCredentials: ... + +# Compose a ChannelCredentials and one or more CallCredentials objects. +def composite_channel_credentials( + channel_credentials: ChannelCredentials, call_credentials: CallCredentials, *rest: CallCredentials +) -> ChannelCredentials: ... + +# Create Server: + +def server( + thread_pool: futures.ThreadPoolExecutor, + handlers: list[GenericRpcHandler[Any, Any]] | None = ..., + interceptors: list[ServerInterceptor[Any, Any]] | None = ..., + options: _Options | None = ..., + maximum_concurrent_rpcs: int | None = ..., + compression: Compression | None = ..., + xds: bool = ..., +) -> Server: ... + +# Create Server Credentials: + +_CertificateChainPair: TypeAlias = tuple[bytes, bytes] + +def ssl_server_credentials( + private_key_certificate_chain_pairs: list[_CertificateChainPair], + root_certificates: bytes | None = ..., + require_client_auth: bool = ..., +) -> ServerCredentials: ... +def local_server_credentials(local_connect_type: LocalConnectionType = ...) -> ServerCredentials: ... +def ssl_server_certificate_configuration( + private_key_certificate_chain_pairs: list[_CertificateChainPair], root_certificates: bytes | None = ... +) -> ServerCertificateConfiguration: ... +def dynamic_ssl_server_credentials( + initial_certificate_configuration: ServerCertificateConfiguration, + certificate_configuration_fetcher: Callable[[], ServerCertificateConfiguration], + require_client_authentication: bool = ..., +) -> ServerCredentials: ... +def alts_server_credentials() -> ServerCredentials: ... +def insecure_server_credentials() -> ServerCredentials: ... +def xds_server_credentials(fallback_credentials: ServerCredentials) -> ServerCredentials: ... + +# RPC Method Handlers: + +# XXX: This is probably what appears in the add_FooServicer_to_server function +# in the _pb2_grpc files that get generated, which points to the FooServicer +# handler functions that get generated, which look like this: +# +# def FloobDoob(self, request, context): +# return response +# +@type_check_only +class _Behaviour(Protocol): + def __call__(self, *args: Any, **kwargs: Any) -> Any: ... + +def unary_unary_rpc_method_handler( + behavior: _Behaviour, + request_deserializer: _RequestDeserializer | None = ..., + response_serializer: _ResponseSerializer | None = ..., +) -> RpcMethodHandler[Any, Any]: ... +def unary_stream_rpc_method_handler( + behavior: _Behaviour, + request_deserializer: _RequestDeserializer | None = ..., + response_serializer: _ResponseSerializer | None = ..., +) -> RpcMethodHandler[Any, Any]: ... +def stream_unary_rpc_method_handler( + behavior: _Behaviour, + request_deserializer: _RequestDeserializer | None = ..., + response_serializer: _ResponseSerializer | None = ..., +) -> RpcMethodHandler[Any, Any]: ... +def stream_stream_rpc_method_handler( + behavior: _Behaviour, + request_deserializer: _RequestDeserializer | None = ..., + response_serializer: _ResponseSerializer | None = ..., +) -> RpcMethodHandler[Any, Any]: ... +def method_handlers_generic_handler( + service: str, method_handlers: dict[str, RpcMethodHandler[Any, Any]] +) -> GenericRpcHandler[Any, Any]: ... + +# Channel Ready Future: + +def channel_ready_future(channel: Channel) -> Future[None]: ... + +# Channel Connectivity: + +class ChannelConnectivity(enum.Enum): + IDLE = (0, "idle") + CONNECTING = (1, "connecting") + READY = (2, "ready") + TRANSIENT_FAILURE = (3, "transient failure") + SHUTDOWN = (4, "shutdown") + +# gRPC Status Code: + +class Status(abc.ABC): + code: StatusCode + + # XXX: misnamed property, does not align with status.proto, where it is called 'message': + details: str + + trailing_metadata: _Metadata + +# https://grpc.github.io/grpc/core/md_doc_statuscodes.html +class StatusCode(enum.Enum): + OK = (0, "ok") + CANCELLED = (1, "cancelled") + UNKNOWN = (2, "unknown") + INVALID_ARGUMENT = (3, "invalid argument") + DEADLINE_EXCEEDED = (4, "deadline exceeded") + NOT_FOUND = (5, "not found") + ALREADY_EXISTS = (6, "already exists") + PERMISSION_DENIED = (7, "permission denied") + RESOURCE_EXHAUSTED = (8, "resource exhausted") + FAILED_PRECONDITION = (9, "failed precondition") + ABORTED = (10, "aborted") + OUT_OF_RANGE = (11, "out of range") + UNIMPLEMENTED = (12, "unimplemented") + INTERNAL = (13, "internal") + UNAVAILABLE = (14, "unavailable") + DATA_LOSS = (15, "data loss") + UNAUTHENTICATED = (16, "unauthenticated") + +# Channel Object: + +class Channel(abc.ABC): + @abc.abstractmethod + def close(self) -> None: ... + @abc.abstractmethod + def stream_stream( + self, + method: str, + request_serializer: _RequestSerializer | None = ..., + response_deserializer: _ResponseDeserializer | None = ..., + ) -> StreamStreamMultiCallable[Any, Any]: ... + @abc.abstractmethod + def stream_unary( + self, + method: str, + request_serializer: _RequestSerializer | None = ..., + response_deserializer: _ResponseDeserializer | None = ..., + ) -> StreamUnaryMultiCallable[Any, Any]: ... + @abc.abstractmethod + def subscribe(self, callback: Callable[[ChannelConnectivity], None], try_to_connect: bool = ...) -> None: ... + @abc.abstractmethod + def unary_stream( + self, + method: str, + request_serializer: _RequestSerializer | None = ..., + response_deserializer: _ResponseDeserializer | None = ..., + ) -> UnaryStreamMultiCallable[Any, Any]: ... + @abc.abstractmethod + def unary_unary( + self, + method: str, + request_serializer: _RequestSerializer | None = ..., + response_deserializer: _ResponseDeserializer | None = ..., + ) -> UnaryUnaryMultiCallable[Any, Any]: ... + @abc.abstractmethod + def unsubscribe(self, callback: Callable[[ChannelConnectivity], None]) -> None: ... + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + ) -> bool | None: ... + +# Server Object: + +class Server(abc.ABC): + @abc.abstractmethod + def add_generic_rpc_handlers(self, generic_rpc_handlers: Iterable[GenericRpcHandler[Any, Any]]) -> None: ... + + # Returns an integer port on which server will accept RPC requests. + @abc.abstractmethod + def add_insecure_port(self, address: str) -> int: ... + + # Returns an integer port on which server will accept RPC requests. + @abc.abstractmethod + def add_secure_port(self, address: str, server_credentials: ServerCredentials) -> int: ... + @abc.abstractmethod + def start(self) -> None: ... + + # Grace period is in seconds. + @abc.abstractmethod + def stop(self, grace: float | None) -> threading.Event: ... + + # Block current thread until the server stops. Returns a bool + # indicates if the operation times out. Timeout is in seconds. + def wait_for_termination(self, timeout: float | None = ...) -> bool: ... + +# Authentication & Authorization Objects: + +# This class has no supported interface +class ChannelCredentials: + def __init__(self, credentials) -> None: ... + +# This class has no supported interface +class CallCredentials: + def __init__(self, credentials) -> None: ... + +class AuthMetadataContext(abc.ABC): + service_url: str + method_name: str + +class AuthMetadataPluginCallback(abc.ABC): + def __call__(self, metadata: _Metadata, error: Exception | None) -> None: ... + +class AuthMetadataPlugin(abc.ABC): + def __call__(self, context: AuthMetadataContext, callback: AuthMetadataPluginCallback) -> None: ... + +# This class has no supported interface +class ServerCredentials: + def __init__(self, credentials) -> None: ... + +# This class has no supported interface +class ServerCertificateConfiguration: + def __init__(self, certificate_configuration) -> None: ... + +# gRPC Exceptions: + +@type_check_only +class _Metadatum: + key: str + value: bytes + +# FIXME: There is scant documentation about what is actually available in this type. +# The properties here are the properties observed in the wild, and may be inaccurate. +# A better source to confirm their presence needs to be found at some point. +class RpcError(Exception): + def code(self) -> StatusCode: ... + + # misnamed property, does not align with status.proto, where it is called 'message': + def details(self) -> str | None: ... + + # XXX: This has a slightly different return type to all the other metadata: + def trailing_metadata(self) -> tuple[_Metadatum, ...]: ... + +# Shared Context: + +class RpcContext(abc.ABC): + @abc.abstractmethod + def add_callback(self, callback: Callable[[], None]) -> bool: ... + @abc.abstractmethod + def cancel(self) -> bool: ... + @abc.abstractmethod + def is_active(self) -> bool: ... + @abc.abstractmethod + def time_remaining(self) -> float: ... + +# Client-Side Context: + +class Call(RpcContext, metaclass=abc.ABCMeta): + @abc.abstractmethod + def code(self) -> StatusCode: ... + + # misnamed property, does not align with status.proto, where it is called 'message': + @abc.abstractmethod + def details(self) -> str: ... + @abc.abstractmethod + def initial_metadata(self) -> _Metadata: ... + @abc.abstractmethod + def trailing_metadata(self) -> _Metadata: ... + +# Client-Side Interceptor: + +class ClientCallDetails(abc.ABC): + method: str + timeout: float | None + metadata: _Metadata | None + credentials: CallCredentials | None + + # "This is an EXPERIMENTAL argument. An optional flag t enable wait for ready mechanism." + wait_for_ready: bool | None + + compression: Compression | None + +# An object that is both a Call for the RPC and a Future. In the event of +# RPC completion, the return Call-Future's result value will be the +# response message of the RPC. Should the event terminate with non-OK +# status, the returned Call-Future's exception value will be an RpcError. +# +@type_check_only +class _CallFuture(Call, Future[_TResponse], metaclass=abc.ABCMeta): ... + +class UnaryUnaryClientInterceptor(abc.ABC, Generic[_TRequest, _TResponse]): + @abc.abstractmethod + def intercept_unary_unary( + self, + # FIXME: decode these cryptic runes to confirm the typing mystery of + # this callable's signature that was left for us by past civilisations: + # + # continuation - A function that proceeds with the invocation by + # executing the next interceptor in chain or invoking the actual RPC + # on the underlying Channel. It is the interceptor's responsibility + # to call it if it decides to move the RPC forward. The interceptor + # can use response_future = continuation(client_call_details, + # request) to continue with the RPC. continuation returns an object + # that is both a Call for the RPC and a Future. In the event of RPC + # completion, the return Call-Future's result value will be the + # response message of the RPC. Should the event terminate with non-OK + # status, the returned Call-Future's exception value will be an + # RpcError. + # + continuation: Callable[[ClientCallDetails, _TRequest], _CallFuture[_TResponse]], + client_call_details: ClientCallDetails, + request: _TRequest, + ) -> _CallFuture[_TResponse]: ... + +@type_check_only +class _CallIterator(Call, Generic[_TResponse], metaclass=abc.ABCMeta): + def __iter__(self) -> Iterator[_TResponse]: ... + +class UnaryStreamClientInterceptor(abc.ABC, Generic[_TRequest, _TResponse]): + @abc.abstractmethod + def intercept_unary_stream( + self, + continuation: Callable[[ClientCallDetails, _TRequest], _CallIterator[_TResponse]], + client_call_details: ClientCallDetails, + request: _TRequest, + ) -> _CallIterator[_TResponse]: ... + +class StreamUnaryClientInterceptor(abc.ABC, Generic[_TRequest, _TResponse]): + @abc.abstractmethod + def intercept_stream_unary( + self, + continuation: Callable[[ClientCallDetails, _TRequest], _CallFuture[_TResponse]], + client_call_details: ClientCallDetails, + request_iterator: Iterator[_TRequest], + ) -> _CallFuture[_TResponse]: ... + +class StreamStreamClientInterceptor(abc.ABC, Generic[_TRequest, _TResponse]): + @abc.abstractmethod + def intercept_stream_stream( + self, + continuation: Callable[[ClientCallDetails, _TRequest], _CallIterator[_TResponse]], + client_call_details: ClientCallDetails, + request_iterator: Iterator[_TRequest], + ) -> _CallIterator[_TResponse]: ... + +# Service-Side Context: + +class ServicerContext(RpcContext, metaclass=abc.ABCMeta): + # misnamed parameter 'details', does not align with status.proto, where it is called 'message': + @abc.abstractmethod + def abort(self, code: StatusCode, details: str) -> NoReturn: ... + @abc.abstractmethod + def abort_with_status(self, status: Status) -> NoReturn: ... + + # FIXME: The docs say "A map of strings to an iterable of bytes for each auth property". + # Does that mean 'bytes' (which is iterable), or 'Iterable[bytes]'? + @abc.abstractmethod + def auth_context(self) -> Mapping[str, bytes]: ... + def disable_next_message_compression(self) -> None: ... + @abc.abstractmethod + def invocation_metadata(self) -> _Metadata: ... + @abc.abstractmethod + def peer(self) -> str: ... + @abc.abstractmethod + def peer_identities(self) -> Iterable[bytes] | None: ... + @abc.abstractmethod + def peer_identity_key(self) -> str | None: ... + @abc.abstractmethod + def send_initial_metadata(self, initial_metadata: _Metadata) -> None: ... + @abc.abstractmethod + def set_code(self, code: StatusCode) -> None: ... + def set_compression(self, compression: Compression) -> None: ... + @abc.abstractmethod + def set_trailing_metadata(self, trailing_metadata: _Metadata) -> None: ... + + # misnamed function 'details', does not align with status.proto, where it is called 'message': + @abc.abstractmethod + def set_details(self, details: str) -> None: ... + def trailing_metadata(self) -> _Metadata: ... + +# Service-Side Handler: + +class RpcMethodHandler(abc.ABC, Generic[_TRequest, _TResponse]): + request_streaming: bool + response_streaming: bool + + # XXX: not clear from docs whether this is optional or not + request_deserializer: _RequestDeserializer | None + + # XXX: not clear from docs whether this is optional or not + response_serializer: _ResponseSerializer | None + + unary_unary: Callable[[_TRequest, ServicerContext], _TResponse] | None + + unary_stream: Callable[[_TRequest, ServicerContext], Iterator[_TResponse]] | None + + stream_unary: Callable[[Iterator[_TRequest], ServicerContext], _TResponse] | None + + stream_stream: Callable[[Iterator[_TRequest], ServicerContext], Iterator[_TResponse]] | None + +class HandlerCallDetails(abc.ABC): + method: str + invocation_metadata: _Metadata + +class GenericRpcHandler(abc.ABC, Generic[_TRequest, _TResponse]): + @abc.abstractmethod + def service(self, handler_call_details: HandlerCallDetails) -> RpcMethodHandler[_TRequest, _TResponse] | None: ... + +class ServiceRpcHandler(GenericRpcHandler[_TRequest, _TResponse], metaclass=abc.ABCMeta): + @abc.abstractmethod + def service_name(self) -> str: ... + +# Service-Side Interceptor: + +class ServerInterceptor(abc.ABC, Generic[_TRequest, _TResponse]): + @abc.abstractmethod + def intercept_service( + self, + continuation: Callable[[HandlerCallDetails], RpcMethodHandler[_TRequest, _TResponse] | None], + handler_call_details: HandlerCallDetails, + ) -> RpcMethodHandler[_TRequest, _TResponse] | None: ... + +# Multi-Callable Interfaces: + +class UnaryUnaryMultiCallable(abc.ABC, Generic[_TRequest, _TResponse]): + @abc.abstractmethod + def __call__( + self, + request: _TRequest, + timeout: float | None = ..., + metadata: _Metadata | None = ..., + credentials: CallCredentials | None = ..., + # FIXME: optional bool seems weird, but that's what the docs suggest + wait_for_ready: bool | None = ..., + compression: Compression | None = ..., + ) -> _TResponse: ... + @abc.abstractmethod + def future( + self, + request: _TRequest, + timeout: float | None = ..., + metadata: _Metadata | None = ..., + credentials: CallCredentials | None = ..., + # FIXME: optional bool seems weird, but that's what the docs suggest + wait_for_ready: bool | None = ..., + compression: Compression | None = ..., + ) -> _CallFuture[_TResponse]: ... + @abc.abstractmethod + def with_call( + self, + request: _TRequest, + timeout: float | None = ..., + metadata: _Metadata | None = ..., + credentials: CallCredentials | None = ..., + # FIXME: optional bool seems weird, but that's what the docs suggest + wait_for_ready: bool | None = ..., + compression: Compression | None = ..., + # FIXME: Return value is documented as "The response value for the RPC and a Call value for the RPC"; + # this is slightly unclear so this return type is a best-effort guess. + ) -> tuple[_TResponse, Call]: ... + +class UnaryStreamMultiCallable(abc.ABC, Generic[_TRequest, _TResponse]): + @abc.abstractmethod + def __call__( + self, + request: _TRequest, + timeout: float | None = ..., + metadata: _Metadata | None = ..., + credentials: CallCredentials | None = ..., + # FIXME: optional bool seems weird, but that's what the docs suggest + wait_for_ready: bool | None = ..., + compression: Compression | None = ..., + ) -> _CallIterator[_TResponse]: ... + +class StreamUnaryMultiCallable(abc.ABC, Generic[_TRequest, _TResponse]): + @abc.abstractmethod + def __call__( + self, + request_iterator: Iterator[_TRequest], + timeout: float | None = ..., + metadata: _Metadata | None = ..., + credentials: CallCredentials | None = ..., + # FIXME: optional bool seems weird, but that's what the docs suggest + wait_for_ready: bool | None = ..., + compression: Compression | None = ..., + ) -> _TResponse: ... + @abc.abstractmethod + def future( + self, + request_iterator: Iterator[_TRequest], + timeout: float | None = ..., + metadata: _Metadata | None = ..., + credentials: CallCredentials | None = ..., + # FIXME: optional bool seems weird, but that's what the docs suggest + wait_for_ready: bool | None = ..., + compression: Compression | None = ..., + ) -> _CallFuture[_TResponse]: ... + @abc.abstractmethod + def with_call( + self, + request_iterator: Iterator[_TRequest], + timeout: float | None = ..., + metadata: _Metadata | None = ..., + credentials: CallCredentials | None = ..., + # FIXME: optional bool seems weird, but that's what the docs suggest + wait_for_ready: bool | None = ..., + compression: Compression | None = ..., + # FIXME: Return value is documented as "The response value for the RPC and a Call value for the RPC"; + # this is slightly unclear so this return type is a best-effort guess. + ) -> tuple[_TResponse, Call]: ... + +class StreamStreamMultiCallable(abc.ABC, Generic[_TRequest, _TResponse]): + @abc.abstractmethod + def __call__( + self, + request_iterator: Iterator[_TRequest], + timeout: float | None = ..., + metadata: _Metadata | None = ..., + credentials: CallCredentials | None = ..., + # FIXME: optional bool seems weird, but that's what the docs suggest + wait_for_ready: bool | None = ..., + compression: Compression | None = ..., + ) -> _CallIterator[_TResponse]: ... + +# Runtime Protobuf Parsing: + +def protos(protobuf_path: str) -> ModuleType: ... +def services(protobuf_path: str) -> ModuleType: ... +def protos_and_services(protobuf_path: str) -> tuple[ModuleType, ModuleType]: ... diff --git a/stubs/grpcio/grpc/aio/__init__.pyi b/stubs/grpcio/grpc/aio/__init__.pyi new file mode 100644 index 000000000000..8a9cb36bb9e6 --- /dev/null +++ b/stubs/grpcio/grpc/aio/__init__.pyi @@ -0,0 +1,455 @@ +import abc +import asyncio +from _typeshed import Incomplete +from collections.abc import AsyncIterable, AsyncIterator, Awaitable, Callable, Generator, Iterable, Iterator, Mapping, Sequence +from concurrent import futures +from types import TracebackType +from typing import Any, Generic, NoReturn, TypeVar, overload, type_check_only +from typing_extensions import Self, TypeAlias + +from grpc import ( + CallCredentials, + ChannelConnectivity, + ChannelCredentials, + Compression, + GenericRpcHandler, + HandlerCallDetails, + RpcError, + RpcMethodHandler, + ServerCredentials, + StatusCode, + _Options, +) + +_TRequest = TypeVar("_TRequest") +_TResponse = TypeVar("_TResponse") + +# Exceptions: + +class BaseError(Exception): ... +class UsageError(BaseError): ... +class AbortError(BaseError): ... +class InternalError(BaseError): ... + +class AioRpcError(RpcError): + def __init__( + self, + code: StatusCode, + initial_metadata: Metadata, + trailing_metadata: Metadata, + details: str | None = ..., + debug_error_string: str | None = ..., + ) -> None: ... + + # FIXME: confirm if these are present in the parent type. The remaining + # methods already exist. + def debug_error_string(self) -> str: ... + def initial_metadata(self) -> Metadata: ... + +# Create Client: + +class ClientInterceptor(metaclass=abc.ABCMeta): ... + +def insecure_channel( + target: str, + options: _Options | None = ..., + compression: Compression | None = ..., + interceptors: Sequence[ClientInterceptor] | None = ..., +) -> Channel: ... +def secure_channel( + target: str, + credentials: ChannelCredentials, + options: _Options | None = ..., + compression: Compression | None = ..., + interceptors: Sequence[ClientInterceptor] | None = ..., +) -> Channel: ... + +# Create Server: + +def server( + migration_thread_pool: futures.Executor | None = ..., + handlers: Sequence[GenericRpcHandler[Any, Any]] | None = ..., + interceptors: Sequence[ServerInterceptor[Any, Any]] | None = ..., + options: _Options | None = ..., + maximum_concurrent_rpcs: int | None = ..., + compression: Compression | None = ..., +) -> Server: ... + +# Channel Object: + +# XXX: The docs suggest these type signatures for aio, but not for non-async, +# and it's unclear why; +# https://grpc.github.io/grpc/python/grpc_asyncio.html#grpc.aio.Channel.stream_stream +_RequestSerializer: TypeAlias = Callable[[Any], bytes] +_ResponseDeserializer: TypeAlias = Callable[[bytes], Any] + +class Channel(abc.ABC): + @abc.abstractmethod + async def close(self, grace: float | None = ...) -> None: ... + @abc.abstractmethod + def get_state(self, try_to_connect: bool = ...) -> ChannelConnectivity: ... + @abc.abstractmethod + async def wait_for_state_change(self, last_observed_state: ChannelConnectivity) -> None: ... + @abc.abstractmethod + def stream_stream( + self, + method: str, + request_serializer: _RequestSerializer | None = ..., + response_deserializer: _ResponseDeserializer | None = ..., + ) -> StreamStreamMultiCallable[Any, Any]: ... + @abc.abstractmethod + def stream_unary( + self, + method: str, + request_serializer: _RequestSerializer | None = ..., + response_deserializer: _ResponseDeserializer | None = ..., + ) -> StreamUnaryMultiCallable[Any, Any]: ... + @abc.abstractmethod + def unary_stream( + self, + method: str, + request_serializer: _RequestSerializer | None = ..., + response_deserializer: _ResponseDeserializer | None = ..., + ) -> UnaryStreamMultiCallable[Any, Any]: ... + @abc.abstractmethod + def unary_unary( + self, + method: str, + request_serializer: _RequestSerializer | None = ..., + response_deserializer: _ResponseDeserializer | None = ..., + ) -> UnaryUnaryMultiCallable[Any, Any]: ... + @abc.abstractmethod + async def __aenter__(self) -> Self: ... + @abc.abstractmethod + async def __aexit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + ) -> bool | None: ... + @abc.abstractmethod + async def channel_ready(self) -> None: ... + +# Server Object: + +class Server(metaclass=abc.ABCMeta): + @abc.abstractmethod + def add_generic_rpc_handlers(self, generic_rpc_handlers: Iterable[GenericRpcHandler[Any, Any]]) -> None: ... + + # Returns an integer port on which server will accept RPC requests. + @abc.abstractmethod + def add_insecure_port(self, address: str) -> int: ... + + # Returns an integer port on which server will accept RPC requests. + @abc.abstractmethod + def add_secure_port(self, address: str, server_credentials: ServerCredentials) -> int: ... + @abc.abstractmethod + async def start(self) -> None: ... + + # Grace period is in seconds. + @abc.abstractmethod + async def stop(self, grace: float | None) -> None: ... + + # Returns a bool indicates if the operation times out. Timeout is in seconds. + @abc.abstractmethod + async def wait_for_termination(self, timeout: float | None = ...) -> bool: ... + +# Client-Side Context: + +_DoneCallbackType: TypeAlias = Callable[[Any], None] +_EOFType: TypeAlias = object + +class RpcContext(metaclass=abc.ABCMeta): + @abc.abstractmethod + def cancelled(self) -> bool: ... + @abc.abstractmethod + def done(self) -> bool: ... + @abc.abstractmethod + def time_remaining(self) -> float | None: ... + @abc.abstractmethod + def cancel(self) -> bool: ... + @abc.abstractmethod + def add_done_callback(self, callback: _DoneCallbackType) -> None: ... + +class Call(RpcContext, metaclass=abc.ABCMeta): + @abc.abstractmethod + async def initial_metadata(self) -> Metadata: ... + @abc.abstractmethod + async def trailing_metadata(self) -> Metadata: ... + @abc.abstractmethod + async def code(self) -> StatusCode: ... + @abc.abstractmethod + async def details(self) -> str: ... + @abc.abstractmethod + async def wait_for_connection(self) -> None: ... + +class UnaryUnaryCall(Call, Generic[_TRequest, _TResponse], metaclass=abc.ABCMeta): + @abc.abstractmethod + def __await__(self) -> Generator[None, None, _TResponse]: ... + +class UnaryStreamCall(Call, Generic[_TRequest, _TResponse], metaclass=abc.ABCMeta): + @abc.abstractmethod + def __aiter__(self) -> AsyncIterator[_TResponse]: ... + @abc.abstractmethod + async def read(self) -> _EOFType | _TResponse: ... + +class StreamUnaryCall(Call, Generic[_TRequest, _TResponse], metaclass=abc.ABCMeta): + @abc.abstractmethod + async def write(self, request: _TRequest) -> None: ... + @abc.abstractmethod + async def done_writing(self) -> None: ... + @abc.abstractmethod + def __await__(self) -> Generator[None, None, _TResponse]: ... + +class StreamStreamCall(Call, Generic[_TRequest, _TResponse], metaclass=abc.ABCMeta): + @abc.abstractmethod + def __aiter__(self) -> AsyncIterator[_TResponse]: ... + @abc.abstractmethod + async def read(self) -> _EOFType | _TResponse: ... + @abc.abstractmethod + async def write(self, request: _TRequest) -> None: ... + @abc.abstractmethod + async def done_writing(self) -> None: ... + +# Service-Side Context: + +@type_check_only +class _DoneCallback(Generic[_TRequest, _TResponse]): + def __call__(self, ctx: ServicerContext[_TRequest, _TResponse]) -> None: ... + +class ServicerContext(Generic[_TRequest, _TResponse], metaclass=abc.ABCMeta): + @abc.abstractmethod + async def abort(self, code: StatusCode, details: str = ..., trailing_metadata: _MetadataType = ...) -> NoReturn: ... + @abc.abstractmethod + async def read(self) -> _TRequest: ... + @abc.abstractmethod + async def write(self, message: _TResponse) -> None: ... + @abc.abstractmethod + async def send_initial_metadata(self, initial_metadata: _MetadataType) -> None: ... + def add_done_callback(self, callback: _DoneCallback[_TRequest, _TResponse]) -> None: ... + @abc.abstractmethod + def set_trailing_metadata(self, trailing_metadata: _MetadataType) -> None: ... + @abc.abstractmethod + def invocation_metadata(self) -> Metadata | None: ... + @abc.abstractmethod + def set_code(self, code: StatusCode) -> None: ... + @abc.abstractmethod + def set_details(self, details: str) -> None: ... + @abc.abstractmethod + def set_compression(self, compression: Compression) -> None: ... + @abc.abstractmethod + def disable_next_message_compression(self) -> None: ... + @abc.abstractmethod + def peer(self) -> str: ... + @abc.abstractmethod + def peer_identities(self) -> Iterable[bytes] | None: ... + @abc.abstractmethod + def peer_identity_key(self) -> str | None: ... + @abc.abstractmethod + def auth_context(self) -> Mapping[str, Iterable[bytes]]: ... + def time_remaining(self) -> float: ... + def trailing_metadata(self) -> Metadata: ... + def code(self) -> StatusCode: ... + def details(self) -> str: ... + def cancelled(self) -> bool: ... + def done(self) -> bool: ... + +# Client-Side Interceptor: + +class ClientCallDetails(abc.ABC): + def __init__( + self, + method: str, + timeout: float | None, + metadata: Metadata | None, + credentials: CallCredentials | None, + wait_for_ready: bool | None, + ) -> None: ... + + method: str + timeout: float | None + metadata: Metadata | None + credentials: CallCredentials | None + + # "This is an EXPERIMENTAL argument. An optional flag t enable wait for ready mechanism." + wait_for_ready: bool | None + + # As at 1.53.0, this is not supported in aio: + # compression: Compression | None + +@type_check_only +class _InterceptedCall(Generic[_TRequest, _TResponse]): + def __init__(self, interceptors_task: asyncio.Task[Any]) -> None: ... + def __del__(self) -> None: ... + def cancel(self) -> bool: ... + def cancelled(self) -> bool: ... + def done(self) -> bool: ... + def add_done_callback(self, callback: _DoneCallback[_TRequest, _TResponse]) -> None: ... + def time_remaining(self) -> float | None: ... + async def initial_metadata(self) -> Metadata | None: ... + async def trailing_metadata(self) -> Metadata | None: ... + async def code(self) -> StatusCode: ... + async def details(self) -> str: ... + async def debug_error_string(self) -> str | None: ... + async def wait_for_connection(self) -> None: ... + +class InterceptedUnaryUnaryCall(_InterceptedCall[_TRequest, _TResponse], metaclass=abc.ABCMeta): + def __await__(self) -> Generator[Incomplete, None, _TResponse]: ... + def __init__( + self, + interceptors: Sequence[UnaryUnaryClientInterceptor[_TRequest, _TResponse]], + request: _TRequest, + timeout: float | None, + metadata: Metadata, + credentials: CallCredentials | None, + wait_for_ready: bool | None, + channel: Channel, + method: bytes, + request_serializer: _RequestSerializer, + response_deserializer: _ResponseDeserializer, + loop: asyncio.AbstractEventLoop, + ) -> None: ... + + # pylint: disable=too-many-arguments + async def _invoke( + self, + interceptors: Sequence[UnaryUnaryClientInterceptor[_TRequest, _TResponse]], + method: bytes, + timeout: float | None, + metadata: Metadata | None, + credentials: CallCredentials | None, + wait_for_ready: bool | None, + request: _TRequest, + request_serializer: _RequestSerializer, + response_deserializer: _ResponseDeserializer, + ) -> UnaryUnaryCall[_TRequest, _TResponse]: ... + def time_remaining(self) -> float | None: ... + +class UnaryUnaryClientInterceptor(Generic[_TRequest, _TResponse], metaclass=abc.ABCMeta): + @abc.abstractmethod + async def intercept_unary_unary( + self, + # XXX: See equivalent function in grpc types for notes about continuation: + continuation: Callable[[ClientCallDetails, _TRequest], UnaryUnaryCall[_TRequest, _TResponse]], + client_call_details: ClientCallDetails, + request: _TRequest, + ) -> _TResponse: ... + +class UnaryStreamClientInterceptor(Generic[_TRequest, _TResponse], metaclass=abc.ABCMeta): + @abc.abstractmethod + async def intercept_unary_stream( + self, + continuation: Callable[[ClientCallDetails, _TRequest], UnaryStreamCall[_TRequest, _TResponse]], + client_call_details: ClientCallDetails, + request: _TRequest, + ) -> AsyncIterable[_TResponse] | UnaryStreamCall[_TRequest, _TResponse]: ... + +class StreamUnaryClientInterceptor(Generic[_TRequest, _TResponse], metaclass=abc.ABCMeta): + @abc.abstractmethod + async def intercept_stream_unary( + self, + continuation: Callable[[ClientCallDetails, _TRequest], StreamUnaryCall[_TRequest, _TResponse]], + client_call_details: ClientCallDetails, + request_iterator: AsyncIterable[_TRequest] | Iterable[_TRequest], + ) -> AsyncIterable[_TResponse] | UnaryStreamCall[_TRequest, _TResponse]: ... + +class StreamStreamClientInterceptor(Generic[_TRequest, _TResponse], metaclass=abc.ABCMeta): + @abc.abstractmethod + async def intercept_stream_stream( + self, + continuation: Callable[[ClientCallDetails, _TRequest], StreamStreamCall[_TRequest, _TResponse]], + client_call_details: ClientCallDetails, + request_iterator: AsyncIterable[_TRequest] | Iterable[_TRequest], + ) -> AsyncIterable[_TResponse] | StreamStreamCall[_TRequest, _TResponse]: ... + +# Server-Side Interceptor: + +class ServerInterceptor(Generic[_TRequest, _TResponse], metaclass=abc.ABCMeta): + @abc.abstractmethod + async def intercept_service( + self, + continuation: Callable[[HandlerCallDetails], Awaitable[RpcMethodHandler[_TRequest, _TResponse]]], + handler_call_details: HandlerCallDetails, + ) -> RpcMethodHandler[_TRequest, _TResponse]: ... + +# Multi-Callable Interfaces: + +class UnaryUnaryMultiCallable(Generic[_TRequest, _TResponse], metaclass=abc.ABCMeta): + @abc.abstractmethod + def __call__( + self, + request: _TRequest, + *, + timeout: float | None = ..., + metadata: _MetadataType | None = ..., + credentials: CallCredentials | None = ..., + # FIXME: optional bool seems weird, but that's what the docs suggest + wait_for_ready: bool | None = ..., + compression: Compression | None = ..., + ) -> UnaryUnaryCall[_TRequest, _TResponse]: ... + +class UnaryStreamMultiCallable(Generic[_TRequest, _TResponse], metaclass=abc.ABCMeta): + @abc.abstractmethod + def __call__( + self, + request: _TRequest, + *, + timeout: float | None = ..., + metadata: _MetadataType | None = ..., + credentials: CallCredentials | None = ..., + # FIXME: optional bool seems weird, but that's what the docs suggest + wait_for_ready: bool | None = ..., + compression: Compression | None = ..., + ) -> UnaryStreamCall[_TRequest, _TResponse]: ... + +class StreamUnaryMultiCallable(Generic[_TRequest, _TResponse], metaclass=abc.ABCMeta): + @abc.abstractmethod + def __call__( + self, + request_iterator: AsyncIterator[_TRequest] | Iterator[_TRequest] | None = None, + timeout: float | None = ..., + metadata: _MetadataType | None = ..., + credentials: CallCredentials | None = ..., + # FIXME: optional bool seems weird, but that's what the docs suggest + wait_for_ready: bool | None = ..., + compression: Compression | None = ..., + ) -> StreamUnaryCall[_TRequest, _TResponse]: ... + +class StreamStreamMultiCallable(Generic[_TRequest, _TResponse], metaclass=abc.ABCMeta): + @abc.abstractmethod + def __call__( + self, + request_iterator: AsyncIterator[_TRequest] | Iterator[_TRequest] | None = None, + timeout: float | None = ..., + metadata: _MetadataType | None = ..., + credentials: CallCredentials | None = ..., + # FIXME: optional bool seems weird, but that's what the docs suggest + wait_for_ready: bool | None = ..., + compression: Compression | None = ..., + ) -> StreamStreamCall[_TRequest, _TResponse]: ... + +# Metadata: + +_MetadataKey: TypeAlias = str +_MetadataValue: TypeAlias = str | bytes +_MetadatumType: TypeAlias = tuple[_MetadataKey, _MetadataValue] +_MetadataType: TypeAlias = Metadata | Sequence[_MetadatumType] +_T = TypeVar("_T") + +class Metadata(Mapping[_MetadataKey, _MetadataValue]): + def __init__(self, *args: tuple[_MetadataKey, _MetadataValue]) -> None: ... + @classmethod + def from_tuple(cls, raw_metadata: tuple[_MetadataKey, _MetadataValue]) -> Metadata: ... + def add(self, key: _MetadataKey, value: _MetadataValue) -> None: ... + def __len__(self) -> int: ... + def __getitem__(self, key: _MetadataKey) -> _MetadataValue: ... + def __setitem__(self, key: _MetadataKey, value: _MetadataValue) -> None: ... + def __delitem__(self, key: _MetadataKey) -> None: ... + def delete_all(self, key: _MetadataKey) -> None: ... + def __iter__(self) -> Iterator[_MetadataKey]: ... + @overload + def get(self, key: _MetadataKey) -> _MetadataValue | None: ... + @overload + def get(self, key: _MetadataKey, default: _T) -> _MetadataValue | _T: ... + def get_all(self, key: _MetadataKey) -> list[_MetadataValue]: ... + def set_all(self, key: _MetadataKey, values: list[_MetadataValue]) -> None: ... + def __contains__(self, key: object) -> bool: ... + def __eq__(self, other: object) -> bool: ... + def __add__(self, other: Any) -> Metadata: ... diff --git a/stubs/grpcio/grpc_channelz/__init__.pyi b/stubs/grpcio/grpc_channelz/__init__.pyi new file mode 100644 index 000000000000..0f6820f054ea --- /dev/null +++ b/stubs/grpcio/grpc_channelz/__init__.pyi @@ -0,0 +1,3 @@ +from _typeshed import Incomplete + +def __getattr__(name: str) -> Incomplete: ... diff --git a/stubs/grpcio/grpc_channelz/v1/__init__.pyi b/stubs/grpcio/grpc_channelz/v1/__init__.pyi new file mode 100644 index 000000000000..0f6820f054ea --- /dev/null +++ b/stubs/grpcio/grpc_channelz/v1/__init__.pyi @@ -0,0 +1,3 @@ +from _typeshed import Incomplete + +def __getattr__(name: str) -> Incomplete: ... diff --git a/stubs/grpcio/grpc_channelz/v1/_servicer.pyi b/stubs/grpcio/grpc_channelz/v1/_servicer.pyi new file mode 100644 index 000000000000..f8b7ba089b47 --- /dev/null +++ b/stubs/grpcio/grpc_channelz/v1/_servicer.pyi @@ -0,0 +1,25 @@ +import grpc_channelz.v1.channelz_pb2 as _channelz_pb2 +import grpc_channelz.v1.channelz_pb2_grpc as _channelz_pb2_grpc +from grpc import ServicerContext + +class ChannelzServicer(_channelz_pb2_grpc.ChannelzServicer): + @staticmethod + def GetTopChannels( + request: _channelz_pb2.GetTopChannelsRequest, context: ServicerContext + ) -> _channelz_pb2.GetTopChannelsResponse: ... + @staticmethod + def GetServers(request: _channelz_pb2.GetServersRequest, context: ServicerContext) -> _channelz_pb2.GetServersResponse: ... + @staticmethod + def GetServer(request: _channelz_pb2.GetServerRequest, context: ServicerContext) -> _channelz_pb2.GetServerResponse: ... + @staticmethod + def GetServerSockets( + request: _channelz_pb2.GetServerSocketsRequest, context: ServicerContext + ) -> _channelz_pb2.GetServerSocketsResponse: ... + @staticmethod + def GetChannel(request: _channelz_pb2.GetChannelRequest, context: ServicerContext) -> _channelz_pb2.GetChannelResponse: ... + @staticmethod + def GetSubchannel( + request: _channelz_pb2.GetSubchannelRequest, context: ServicerContext + ) -> _channelz_pb2.GetSubchannelResponse: ... + @staticmethod + def GetSocket(request: _channelz_pb2.GetSocketRequest, context: ServicerContext) -> _channelz_pb2.GetSocketResponse: ... diff --git a/stubs/grpcio/grpc_channelz/v1/channelz.pyi b/stubs/grpcio/grpc_channelz/v1/channelz.pyi new file mode 100644 index 000000000000..f5aa790e028d --- /dev/null +++ b/stubs/grpcio/grpc_channelz/v1/channelz.pyi @@ -0,0 +1,3 @@ +from grpc import Server + +def add_channelz_servicer(server: Server) -> None: ... diff --git a/stubs/grpcio/grpc_channelz/v1/channelz_pb2.pyi b/stubs/grpcio/grpc_channelz/v1/channelz_pb2.pyi new file mode 100644 index 000000000000..fdc2ff0ee872 --- /dev/null +++ b/stubs/grpcio/grpc_channelz/v1/channelz_pb2.pyi @@ -0,0 +1,16 @@ +from _typeshed import Incomplete + +GetTopChannelsRequest = Incomplete +GetTopChannelsResponse = Incomplete +GetServersRequest = Incomplete +GetServersResponse = Incomplete +GetServerRequest = Incomplete +GetServerResponse = Incomplete +GetServerSocketsRequest = Incomplete +GetServerSocketsResponse = Incomplete +GetChannelRequest = Incomplete +GetChannelResponse = Incomplete +GetSubchannelRequest = Incomplete +GetSubchannelResponse = Incomplete +GetSocketRequest = Incomplete +GetSocketResponse = Incomplete diff --git a/stubs/grpcio/grpc_channelz/v1/channelz_pb2_grpc.pyi b/stubs/grpcio/grpc_channelz/v1/channelz_pb2_grpc.pyi new file mode 100644 index 000000000000..0b9716abcf75 --- /dev/null +++ b/stubs/grpcio/grpc_channelz/v1/channelz_pb2_grpc.pyi @@ -0,0 +1,3 @@ +from _typeshed import Incomplete + +ChannelzServicer = Incomplete diff --git a/stubs/grpcio/grpc_health/__init__.pyi b/stubs/grpcio/grpc_health/__init__.pyi new file mode 100644 index 000000000000..0f6820f054ea --- /dev/null +++ b/stubs/grpcio/grpc_health/__init__.pyi @@ -0,0 +1,3 @@ +from _typeshed import Incomplete + +def __getattr__(name: str) -> Incomplete: ... diff --git a/stubs/grpcio/grpc_health/v1/__init__.pyi b/stubs/grpcio/grpc_health/v1/__init__.pyi new file mode 100644 index 000000000000..0f6820f054ea --- /dev/null +++ b/stubs/grpcio/grpc_health/v1/__init__.pyi @@ -0,0 +1,3 @@ +from _typeshed import Incomplete + +def __getattr__(name: str) -> Incomplete: ... diff --git a/stubs/grpcio/grpc_health/v1/health.pyi b/stubs/grpcio/grpc_health/v1/health.pyi new file mode 100644 index 000000000000..7ea7dc5d0b50 --- /dev/null +++ b/stubs/grpcio/grpc_health/v1/health.pyi @@ -0,0 +1,34 @@ +from concurrent import futures +from typing import Any, Protocol + +from grpc import ServicerContext +from grpc_health.v1 import health_pb2 as _health_pb2, health_pb2_grpc as _health_pb2_grpc + +SERVICE_NAME: str +OVERALL_HEALTH: str + +class _Watcher: + def __init__(self) -> None: ... + def __iter__(self) -> _Watcher: ... + def next(self) -> _health_pb2.HealthCheckResponse: ... + def __next__(self) -> _health_pb2.HealthCheckResponse: ... + def add(self, response: _health_pb2.HealthCheckResponse) -> None: ... + def close(self) -> None: ... + +# FIXME: This needs further investigation +class _SendResponseCallback(Protocol): + def __call__(self, *args: Any, **kwargs: Any) -> Any: ... + +class HealthServicer(_health_pb2_grpc.HealthServicer): + def __init__( + self, experimental_non_blocking: bool = ..., experimental_thread_pool: futures.ThreadPoolExecutor | None = ... + ) -> None: ... + def Check(self, request: _health_pb2.HealthCheckRequest, context: ServicerContext) -> _health_pb2.HealthCheckResponse: ... + def Watch( + self, + request: _health_pb2.HealthCheckRequest, + context: ServicerContext, + send_response_callback: _SendResponseCallback | None = ..., + ) -> _health_pb2.HealthCheckResponse: ... + def set(self, service: str, status: _health_pb2.HealthCheckResponse.ServingStatus) -> None: ... + def enter_graceful_shutdown(self) -> None: ... diff --git a/stubs/grpcio/grpc_health/v1/health_pb2.pyi b/stubs/grpcio/grpc_health/v1/health_pb2.pyi new file mode 100644 index 000000000000..0f6820f054ea --- /dev/null +++ b/stubs/grpcio/grpc_health/v1/health_pb2.pyi @@ -0,0 +1,3 @@ +from _typeshed import Incomplete + +def __getattr__(name: str) -> Incomplete: ... diff --git a/stubs/grpcio/grpc_health/v1/health_pb2_grpc.pyi b/stubs/grpcio/grpc_health/v1/health_pb2_grpc.pyi new file mode 100644 index 000000000000..8a29ae2fab5b --- /dev/null +++ b/stubs/grpcio/grpc_health/v1/health_pb2_grpc.pyi @@ -0,0 +1,6 @@ +from _typeshed import Incomplete + +def __getattr__(name: str) -> Incomplete: ... + +# FIXME: Incomplete +class HealthServicer: ... diff --git a/stubs/grpcio/grpc_reflection/__init__.pyi b/stubs/grpcio/grpc_reflection/__init__.pyi new file mode 100644 index 000000000000..0f6820f054ea --- /dev/null +++ b/stubs/grpcio/grpc_reflection/__init__.pyi @@ -0,0 +1,3 @@ +from _typeshed import Incomplete + +def __getattr__(name: str) -> Incomplete: ... diff --git a/stubs/grpcio/grpc_reflection/v1alpha/__init__.pyi b/stubs/grpcio/grpc_reflection/v1alpha/__init__.pyi new file mode 100644 index 000000000000..0f6820f054ea --- /dev/null +++ b/stubs/grpcio/grpc_reflection/v1alpha/__init__.pyi @@ -0,0 +1,3 @@ +from _typeshed import Incomplete + +def __getattr__(name: str) -> Incomplete: ... diff --git a/stubs/grpcio/grpc_reflection/v1alpha/_base.pyi b/stubs/grpcio/grpc_reflection/v1alpha/_base.pyi new file mode 100644 index 000000000000..00704b5062ef --- /dev/null +++ b/stubs/grpcio/grpc_reflection/v1alpha/_base.pyi @@ -0,0 +1,6 @@ +from _typeshed import Incomplete + +def __getattr__(name: str) -> Incomplete: ... + +# FIXME: Incomplete +class BaseReflectionServicer: ... diff --git a/stubs/grpcio/grpc_reflection/v1alpha/reflection.pyi b/stubs/grpcio/grpc_reflection/v1alpha/reflection.pyi new file mode 100644 index 000000000000..cf75c363f198 --- /dev/null +++ b/stubs/grpcio/grpc_reflection/v1alpha/reflection.pyi @@ -0,0 +1,23 @@ +import typing_extensions +from _typeshed import Incomplete +from collections.abc import Iterable + +import grpc +from google.protobuf import descriptor_pool +from grpc import aio +from grpc_reflection.v1alpha import reflection_pb2 as _reflection_pb2 +from grpc_reflection.v1alpha._base import BaseReflectionServicer + +SERVICE_NAME: str + +_AnyServer: typing_extensions.TypeAlias = grpc.Server | aio.Server +_AnyServicerContext: typing_extensions.TypeAlias = grpc.ServicerContext | aio.ServicerContext[Incomplete, Incomplete] + +class ReflectionServicer(BaseReflectionServicer): + def ServerReflectionInfo( + self, request_iterator: Iterable[_reflection_pb2.ServerReflectionRequest], context: _AnyServicerContext + ) -> None: ... + +def enable_server_reflection( + service_names: Iterable[str], server: _AnyServer, pool: descriptor_pool.DescriptorPool | None = ... +) -> None: ... diff --git a/stubs/grpcio/grpc_reflection/v1alpha/reflection_pb2.pyi b/stubs/grpcio/grpc_reflection/v1alpha/reflection_pb2.pyi new file mode 100644 index 000000000000..0f6820f054ea --- /dev/null +++ b/stubs/grpcio/grpc_reflection/v1alpha/reflection_pb2.pyi @@ -0,0 +1,3 @@ +from _typeshed import Incomplete + +def __getattr__(name: str) -> Incomplete: ... diff --git a/stubs/grpcio/grpc_status/__init__.pyi b/stubs/grpcio/grpc_status/__init__.pyi new file mode 100644 index 000000000000..0f6820f054ea --- /dev/null +++ b/stubs/grpcio/grpc_status/__init__.pyi @@ -0,0 +1,3 @@ +from _typeshed import Incomplete + +def __getattr__(name: str) -> Incomplete: ... diff --git a/stubs/grpcio/grpc_status/rpc_status.pyi b/stubs/grpcio/grpc_status/rpc_status.pyi new file mode 100644 index 000000000000..abf6b8761f3c --- /dev/null +++ b/stubs/grpcio/grpc_status/rpc_status.pyi @@ -0,0 +1,13 @@ +from typing import Any + +import grpc + +# XXX: don't yet know how to add a stub for google.rpc.status_pb2.Status +# without affecting other stuff; may need to make a stub-only package for +# google.rpc as well. + +# Returns a google.rpc.status.Status message corresponding to a given grpc.Call. +def from_call(call: grpc.Call) -> Any: ... + +# Convert a google.rpc.status.Status message to grpc.Status. +def to_status(status: Any) -> grpc.Status: ... From 58a2d0647f06c3944fc1682a1238cdebff90686a Mon Sep 17 00:00:00 2001 From: Thanos <111999343+Sachaa-Thanasius@users.noreply.github.com> Date: Fri, 25 Apr 2025 06:01:45 -0400 Subject: [PATCH 254/388] Add missing `_optimize` parameter to `importlib.machinery.SourceFileLoader` method. (#13880) --- stdlib/_frozen_importlib_external.pyi | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/stdlib/_frozen_importlib_external.pyi b/stdlib/_frozen_importlib_external.pyi index 463b4087f6b6..edad50a8d858 100644 --- a/stdlib/_frozen_importlib_external.pyi +++ b/stdlib/_frozen_importlib_external.pyi @@ -121,6 +121,13 @@ class FileLoader: class SourceFileLoader(importlib.abc.FileLoader, FileLoader, importlib.abc.SourceLoader, SourceLoader): # type: ignore[misc] # incompatible method arguments in base classes def set_data(self, path: str, data: ReadableBuffer, *, _mode: int = 0o666) -> None: ... def path_stats(self, path: str) -> Mapping[str, Any]: ... + def source_to_code( # type: ignore[override] # incompatible with InspectLoader.source_to_code + self, + data: ReadableBuffer | str | _ast.Module | _ast.Expression | _ast.Interactive, + path: ReadableBuffer | StrPath, + *, + _optimize: int = -1, + ) -> types.CodeType: ... class SourcelessFileLoader(importlib.abc.FileLoader, FileLoader, _LoaderBasics): def get_code(self, fullname: str) -> types.CodeType | None: ... From da855b3b1aab102434d3e44534cae34c68ba8f80 Mon Sep 17 00:00:00 2001 From: Sebastian Rittau Date: Fri, 25 Apr 2025 12:16:28 +0200 Subject: [PATCH 255/388] [fpdf2] Update to 2.8.3 (#13871) --- stubs/fpdf2/METADATA.toml | 2 +- stubs/fpdf2/fpdf/_fonttools_shims.pyi | 5 +- stubs/fpdf2/fpdf/enums.pyi | 16 ++++ stubs/fpdf2/fpdf/fonts.pyi | 52 ++++++++----- stubs/fpdf2/fpdf/fpdf.pyi | 58 ++++++++++++-- stubs/fpdf2/fpdf/graphics_state.pyi | 12 ++- stubs/fpdf2/fpdf/line_break.pyi | 12 ++- stubs/fpdf2/fpdf/outline.pyi | 5 +- stubs/fpdf2/fpdf/output.pyi | 41 +++++++++- stubs/fpdf2/fpdf/pattern.pyi | 104 ++++++++++++++++++++++++++ stubs/fpdf2/fpdf/prefs.pyi | 23 +++++- stubs/fpdf2/fpdf/syntax.pyi | 7 ++ stubs/fpdf2/fpdf/table.pyi | 19 ++++- stubs/fpdf2/fpdf/text_region.pyi | 3 + stubs/fpdf2/fpdf/util.pyi | 2 +- 15 files changed, 320 insertions(+), 41 deletions(-) create mode 100644 stubs/fpdf2/fpdf/pattern.pyi diff --git a/stubs/fpdf2/METADATA.toml b/stubs/fpdf2/METADATA.toml index 180d317a993e..8ffd02df0c12 100644 --- a/stubs/fpdf2/METADATA.toml +++ b/stubs/fpdf2/METADATA.toml @@ -1,4 +1,4 @@ -version = "2.8.2" +version = "2.8.3" upstream_repository = "https://github.com/PyFPDF/fpdf2" requires = ["Pillow>=10.3.0"] diff --git a/stubs/fpdf2/fpdf/_fonttools_shims.pyi b/stubs/fpdf2/fpdf/_fonttools_shims.pyi index 95c6c6d339da..f68d651014d6 100644 --- a/stubs/fpdf2/fpdf/_fonttools_shims.pyi +++ b/stubs/fpdf2/fpdf/_fonttools_shims.pyi @@ -2,7 +2,7 @@ from abc import ABCMeta, abstractmethod from collections.abc import Mapping from logging import Logger -from typing import Protocol +from typing import Any, Protocol from typing_extensions import TypeAlias # from fonttools.ttLib.ttGlyphSet @@ -13,6 +13,9 @@ class _TTGlyph(Protocol): _TTGlyphSet: TypeAlias = Mapping[str, _TTGlyph] # Simplified for our needs +# fonttools.ttLib.TTFont +_TTFont: TypeAlias = Any # noqa: Y047 + # from fontTools.misc.loggingTools class LogMixin: diff --git a/stubs/fpdf2/fpdf/enums.pyi b/stubs/fpdf2/fpdf/enums.pyi index 20441ee7c4a5..f9f7fb8ce4ee 100644 --- a/stubs/fpdf2/fpdf/enums.pyi +++ b/stubs/fpdf2/fpdf/enums.pyi @@ -56,6 +56,7 @@ class TextEmphasis(CoerciveIntFlag): B = 1 I = 2 U = 4 + S = 8 @property def style(self) -> str: ... @@ -296,6 +297,11 @@ class TextDirection(CoerciveEnum): TTB = "TTB" BTT = "BTT" +class OutputIntentSubType(CoerciveEnum): + PDFX = "GTS_PDFX" + PDFA = "GTS_PDFA1" + ISOPDF = "ISO_PDFE1" + class PageLabelStyle(CoerciveEnum): NUMBER = "D" UPPER_ROMAN = "R" @@ -322,3 +328,13 @@ class PageOrientation(CoerciveEnum): @classmethod def coerce(cls, value: Self | str) -> Self: ... # type: ignore[override] + +class PDFResourceType(Enum): + EXT_G_STATE = "ExtGState" + COLOR_SPACE = "ColorSpace" + PATTERN = "Pattern" + SHADDING = "Shading" + X_OBJECT = "XObject" + FONT = "Font" + PROC_SET = "ProcSet" + PROPERTIES = "Properties" diff --git a/stubs/fpdf2/fpdf/fonts.pyi b/stubs/fpdf2/fpdf/fonts.pyi index dc793aaed1af..adb41f2412ea 100644 --- a/stubs/fpdf2/fpdf/fonts.pyi +++ b/stubs/fpdf2/fpdf/fonts.pyi @@ -1,14 +1,19 @@ import dataclasses -from _typeshed import Incomplete +from _typeshed import Incomplete, Unused +from collections import defaultdict from collections.abc import Generator from dataclasses import dataclass +from logging import Logger from typing import Final, overload from typing_extensions import Self, deprecated +from ._fonttools_shims import _TTFont from .drawing import DeviceGray, DeviceRGB, Number from .enums import Align, TextEmphasis from .syntax import PDFObject +LOGGER: Logger + # Only defined if harfbuzz is installed. class HarfBuzzFont(Incomplete): # derives from uharfbuzz.Font def __deepcopy__(self, _memo: object) -> Self: ... @@ -73,37 +78,48 @@ class TitleStyle(TextStyle): ... __pdoc__: Final[dict[str, bool]] -class _FontMixin: +class CoreFont: i: int type: str name: str up: int ut: int + sp: int + ss: int cw: int fontkey: str emphasis: TextEmphasis - def encode_text(self, text: str): ... - -class CoreFont(_FontMixin): def __init__(self, fpdf, fontkey: str, style: int) -> None: ... - def get_text_width(self, text: str, font_size_pt: int, _): ... + def get_text_width(self, text: str, font_size_pt: int, _: Unused) -> float: ... + def encode_text(self, text: str) -> str: ... -class TTFFont(_FontMixin): +class TTFFont: + i: int + type: str ttffile: Incomplete - ttfont: Incomplete - scale: Incomplete - desc: Incomplete + fontkey: str + ttfont: _TTFont + scale: float + desc: PDFFontDescriptor + cw: defaultdict[str, int] cmap: Incomplete - glyph_ids: Incomplete - missing_glyphs: Incomplete - subset: Incomplete + glyph_ids: dict[Incomplete, Incomplete] + missing_glyphs: list[Incomplete] + name: str + up: int + ut: int + sp: int + ss: int + emphasis: TextEmphasis + subset: SubsetMap hbfont: HarfBuzzFont | None # Not always defined. def __init__(self, fpdf, font_file_path, fontkey: str, style: int) -> None: ... def close(self) -> None: ... - def get_text_width(self, text: str, font_size_pt: int, text_shaping_parms): ... - def shaped_text_width(self, text: str, font_size_pt: int, text_shaping_parms): ... - def perform_harfbuzz_shaping(self, text: str, font_size_pt: int, text_shaping_parms): ... - def shape_text(self, text: str, font_size_pt: int, text_shaping_parms): ... + def get_text_width(self, text: str, font_size_pt: int, text_shaping_params): ... + def shaped_text_width(self, text: str, font_size_pt: int, text_shaping_params): ... + def perform_harfbuzz_shaping(self, text: str, font_size_pt: int, text_shaping_params): ... + def encode_text(self, text: str) -> str: ... + def shape_text(self, text: str, font_size_pt: int, text_shaping_params): ... class PDFFontDescriptor(PDFObject): type: Incomplete @@ -128,7 +144,7 @@ class Glyph: class SubsetMap: font: TTFFont - def __init__(self, font: TTFFont, identities: list[int]) -> None: ... + def __init__(self, font: TTFFont) -> None: ... def __len__(self) -> int: ... def items(self) -> Generator[Incomplete, None, None]: ... def pick(self, unicode: int): ... diff --git a/stubs/fpdf2/fpdf/fpdf.pyi b/stubs/fpdf2/fpdf/fpdf.pyi index 5bd1b679f6e4..2d5bd0f8ebd5 100644 --- a/stubs/fpdf2/fpdf/fpdf.pyi +++ b/stubs/fpdf2/fpdf/fpdf.pyi @@ -23,6 +23,7 @@ from .enums import ( EncryptionMethod, FileAttachmentAnnotationName, MethodReturnValue, + OutputIntentSubType, PageLabelStyle, PageLayout, PageMode, @@ -53,7 +54,7 @@ from .image_datastructures import ( VectorImageInfo as VectorImageInfo, _TextAlign, ) -from .output import OutputProducer, PDFPage +from .output import OutputProducer, PDFICCProfile, PDFPage from .recorder import FPDFRecorder from .structure_tree import StructureTreeBuilder from .syntax import DestinationXYZ @@ -77,7 +78,35 @@ __all__ = [ _Orientation: TypeAlias = Literal["", "portrait", "p", "P", "landscape", "l", "L"] _Format: TypeAlias = Literal["", "a3", "A3", "a4", "A4", "a5", "A5", "letter", "Letter", "legal", "Legal"] _FontStyle: TypeAlias = Literal["", "B", "I", "BI"] -_FontStyles: TypeAlias = Literal["", "B", "I", "U", "BU", "UB", "BI", "IB", "IU", "UI", "BIU", "BUI", "IBU", "IUB", "UBI", "UIB"] +_FontStyles: TypeAlias = Literal[ + "", + "B", + "I", + "U", + "S", + "BU", + "UB", + "BI", + "IB", + "IU", + "UI", + "BS", + "SB", + "IS", + "SI", + "BIU", + "BUI", + "IBU", + "IUB", + "UBI", + "UIB", + "BIS", + "BSI", + "IBS", + "ISB", + "SBI", + "SIB", +] FPDF_VERSION: Final[str] PAGE_FORMATS: dict[_Format, tuple[float, float]] @@ -88,12 +117,14 @@ class ToCPlaceholder(NamedTuple): y: int page_orientation: str pages: int = 1 + reset_page_indices: bool = True def get_page_format(format: _Format | tuple[float, float], k: float | None = None) -> tuple[float, float]: ... class FPDF(GraphicsStateMixin): MARKDOWN_BOLD_MARKER: ClassVar[str] MARKDOWN_ITALICS_MARKER: ClassVar[str] + MARKDOWN_STRIKETHROUGH_MARKER: ClassVar[str] MARKDOWN_UNDERLINE_MARKER: ClassVar[str] MARKDOWN_ESCAPE_CHARACTER: ClassVar[str] MARKDOWN_LINK_REGEX: ClassVar[Pattern[str]] @@ -145,7 +176,6 @@ class FPDF(GraphicsStateMixin): compress: bool pdf_version: str creation_date: datetime.datetime - graphics_style_names_per_page_number: dict[int, set[str]] buffer: bytearray | None @@ -179,6 +209,19 @@ class FPDF(GraphicsStateMixin): def is_ttf_font(self) -> bool: ... @property def page_mode(self) -> PageMode: ... + @page_mode.setter + def page_mode(self, page_mode: PageMode) -> None: ... + @property + def output_intents(self): ... + def add_output_intent( + self, + subtype: OutputIntentSubType, + output_condition_identifier: str | None = None, + output_condition: str | None = None, + registry_name: str | None = None, + dest_output_profile: PDFICCProfile | None = None, + info: str | None = None, + ) -> None: ... @property def epw(self) -> float: ... @property @@ -336,6 +379,7 @@ class FPDF(GraphicsStateMixin): closed: bool = False, style: RenderStyle | Literal["D", "F", "DF", "FD"] | None = None, ) -> None: ... + def use_pattern(self, shading) -> _GeneratorContextManager[None]: ... def add_font( self, family: str | None = None, @@ -343,7 +387,7 @@ class FPDF(GraphicsStateMixin): fname: str | PurePath | None = None, uni: bool | Literal["DEPRECATED"] = "DEPRECATED", ) -> None: ... - def set_font(self, family: str | None = None, style: _FontStyles = "", size: int = 0) -> None: ... + def set_font(self, family: str | None = None, style: _FontStyles | TextEmphasis = "", size: int = 0) -> None: ... def set_font_size(self, size: float) -> None: ... def set_char_spacing(self, spacing: float) -> None: ... def set_stretching(self, stretching: float) -> None: ... @@ -618,7 +662,11 @@ class FPDF(GraphicsStateMixin): def unbreakable(self) -> _GeneratorContextManager[FPDFRecorder]: ... def offset_rendering(self) -> _GeneratorContextManager[FPDFRecorder]: ... def insert_toc_placeholder( - self, render_toc_function: Callable[[FPDF, list[OutlineSection]], object], pages: int = 1, allow_extra_pages: bool = False + self, + render_toc_function: Callable[[FPDF, list[OutlineSection]], object], + pages: int = 1, + allow_extra_pages: bool = False, + reset_page_indices: bool = True, ) -> None: ... def set_section_title_styles( self, diff --git a/stubs/fpdf2/fpdf/graphics_state.pyi b/stubs/fpdf2/fpdf/graphics_state.pyi index cbc0048ab446..8fe7e5e1b723 100644 --- a/stubs/fpdf2/fpdf/graphics_state.pyi +++ b/stubs/fpdf2/fpdf/graphics_state.pyi @@ -1,4 +1,4 @@ -from typing import Any, ClassVar, Literal, TypedDict, type_check_only +from typing import Any, ClassVar, Final, Literal, TypedDict, type_check_only from .drawing import DeviceGray, DeviceRGB from .enums import TextMode @@ -36,6 +36,10 @@ class GraphicsStateMixin: @underline.setter def underline(self, v: bool) -> None: ... @property + def strikethrough(self) -> bool: ... + @strikethrough.setter + def strikethrough(self, v: bool) -> None: ... + @property def font_style(self) -> str: ... @font_style.setter def font_style(self, v: str) -> None: ... @@ -64,6 +68,10 @@ class GraphicsStateMixin: @current_font.setter def current_font(self, v: dict[str, Any]) -> None: ... @property + def current_font_is_set_on_page(self) -> bool: ... + @current_font_is_set_on_page.setter + def current_font_is_set_on_page(self, v: bool) -> None: ... + @property def dash_pattern(self) -> dict[str, float]: ... @dash_pattern.setter def dash_pattern(self, v: dict[str, float]) -> None: ... @@ -116,3 +124,5 @@ class GraphicsStateMixin: @text_shaping.setter def text_shaping(self, v: _TextShaping | None) -> None: ... def font_face(self) -> FontFace: ... + +__pdoc__: Final[dict[str, bool]] diff --git a/stubs/fpdf2/fpdf/line_break.pyi b/stubs/fpdf2/fpdf/line_break.pyi index 90c4678df1c9..6d48b958b9c5 100644 --- a/stubs/fpdf2/fpdf/line_break.pyi +++ b/stubs/fpdf2/fpdf/line_break.pyi @@ -43,7 +43,9 @@ class Fragment: @property def text_mode(self): ... @property - def underline(self): ... + def underline(self) -> bool: ... + @property + def strikethrough(self) -> bool: ... @property def draw_color(self): ... @property @@ -90,6 +92,7 @@ class TextLine(NamedTuple): max_width: float trailing_nl: bool = False trailing_form_feed: bool = False + indent: float = 0 def get_ordered_fragments(self) -> tuple[Fragment, ...]: ... class SpaceHint(NamedTuple): @@ -114,13 +117,14 @@ class HyphenHint(NamedTuple): class CurrentLine: max_width: float - print_sh: Incomplete + print_sh: bool + indent: float fragments: list[Fragment] height: int number_of_spaces: int space_break_hint: Incomplete hyphen_break_hint: Incomplete - def __init__(self, max_width: float, print_sh: bool = False) -> None: ... + def __init__(self, max_width: float, print_sh: bool = False, indent: float = 0) -> None: ... @property def width(self) -> float: ... def add_character( @@ -150,6 +154,7 @@ class MultiLineBreak: fragment_index: int character_index: int idx_last_forced_break: int | None + first_line_indent: float def __init__( self, fragments: Sequence[Fragment], @@ -160,5 +165,6 @@ class MultiLineBreak: wrapmode: WrapMode = ..., line_height: float = 1.0, skip_leading_spaces: bool = False, + first_line_indent: float = 0, ) -> None: ... def get_line(self) -> TextLine: ... diff --git a/stubs/fpdf2/fpdf/outline.pyi b/stubs/fpdf2/fpdf/outline.pyi index d3bee41be5dc..52ceb7eff68d 100644 --- a/stubs/fpdf2/fpdf/outline.pyi +++ b/stubs/fpdf2/fpdf/outline.pyi @@ -1,13 +1,14 @@ from _typeshed import Incomplete from collections.abc import Generator, Iterable -from typing import NamedTuple +from dataclasses import dataclass from .fonts import TextStyle from .fpdf import FPDF from .structure_tree import StructElem from .syntax import Destination, PDFObject, PDFString -class OutlineSection(NamedTuple): +@dataclass +class OutlineSection: name: str level: int page_number: int diff --git a/stubs/fpdf2/fpdf/output.pyi b/stubs/fpdf2/fpdf/output.pyi index db06865eee0e..630472ca0abd 100644 --- a/stubs/fpdf2/fpdf/output.pyi +++ b/stubs/fpdf2/fpdf/output.pyi @@ -5,7 +5,7 @@ from typing import Final from .annotations import AnnotationDict from .encryption import StandardSecurityHandler -from .enums import PageLabelStyle +from .enums import OutputIntentSubType, PageLabelStyle, PDFResourceType from .fpdf import FPDF from .image_datastructures import RasterImageInfo from .line_break import TotalPagesSubstitutionFragment @@ -86,6 +86,7 @@ class PDFCatalog(PDFObject): metadata: Incomplete | None names: Incomplete | None outlines: Incomplete | None + output_intents: Incomplete | None struct_tree_root: Incomplete | None def __init__( self, @@ -100,7 +101,9 @@ class PDFResources(PDFObject): font: Incomplete x_object: Incomplete ext_g_state: Incomplete - def __init__(self, proc_set, font, x_object, ext_g_state) -> None: ... + shading: Incomplete + pattern: Incomplete + def __init__(self, proc_set, font, x_object, ext_g_state, shading, pattern) -> None: ... class PDFFontStream(PDFContentStream): length1: int @@ -135,7 +138,7 @@ class PDFXObject(PDFContentStream): decode_parms: Incomplete | None = None, ) -> None: ... -class PDFICCPObject(PDFContentStream): +class PDFICCProfile(PDFContentStream): n: Incomplete alternate: Name def __init__(self, contents: bytes, n, alternate: str) -> None: ... @@ -164,7 +167,8 @@ class PDFPage(PDFObject): resources: Incomplete | None parent: Incomplete | None def __init__(self, duration: Incomplete | None, transition, contents, index) -> None: ... - def index(self): ... + def index(self) -> int: ... + def set_index(self, i: int) -> None: ... def dimensions(self) -> tuple[float | None, float | None]: ... def set_dimensions(self, width_pt: float | None, height_pt: float | None) -> None: ... def set_page_label(self, previous_page_label: PDFPageLabel, page_label: PDFPageLabel) -> None: ... @@ -192,6 +196,35 @@ class PDFXrefAndTrailer(ContentWithoutID): def __init__(self, output_builder) -> None: ... def serialize(self, _security_handler: StandardSecurityHandler | None = None) -> str: ... +class OutputIntentDictionary: + type: Name + s: Name + output_condition_identifier: PDFString | None + output_condition: PDFString | None + registry_name: PDFString | None + dest_output_profile: Incomplete | None + info: PDFString | None + + def __init__( + self, + subtype: OutputIntentSubType | str, + output_condition_identifier: str, + output_condition: str | None = None, + registry_name: str | None = None, + dest_output_profile: PDFICCProfile | None = None, + info: str | None = None, + ) -> None: ... + def serialize(self, _security_handler: StandardSecurityHandler | None = None, _obj_id: Incomplete | None = None): ... + +class ResourceCatalog: + resources: defaultdict[PDFResourceType, dict[Incomplete, Incomplete]] + resources_per_page: defaultdict[tuple[int, PDFResourceType], set[Incomplete]] + + def add(self, resource_type: PDFResourceType, resource, page_number: int) -> Incomplete | None: ... + def get_items(self, resource_type: PDFResourceType): ... + def get_resources_per_page(self, page_number: int, resource_type: PDFResourceType): ... + def get_used_resources(self, resource_type: PDFResourceType) -> set[Incomplete]: ... + class OutputProducer: fpdf: FPDF pdf_objs: list[Incomplete] diff --git a/stubs/fpdf2/fpdf/pattern.pyi b/stubs/fpdf2/fpdf/pattern.pyi new file mode 100644 index 000000000000..c58a3ea4e82c --- /dev/null +++ b/stubs/fpdf2/fpdf/pattern.pyi @@ -0,0 +1,104 @@ +from _typeshed import Incomplete +from abc import ABC +from collections.abc import Iterable +from typing import Final, Literal + +from .drawing import DeviceCMYK, DeviceGray, DeviceRGB +from .fpdf import FPDF +from .syntax import Name, PDFObject + +class Pattern(PDFObject): + type: Name + pattern_type: int + def __init__(self, shading: LinearGradient | RadialGradient) -> None: ... + @property + def shading(self) -> str: ... + +class Type2Function(PDFObject): + function_type: Final = 2 + domain: str + c0: str + c1: str + n: int + def __init__(self, color_1, color_2) -> None: ... + +class Type3Function(PDFObject): + function_type: Final = 3 + domain: str + bounds: str + encode: str + n: int + + def __init__(self, functions: Iterable[Incomplete], bounds: Iterable[Incomplete]) -> None: ... + @property + def functions(self) -> str: ... + +class Shading(PDFObject): + shading_type: Literal[2, 3] + background: str | None + color_space: Name + coords: list[int] + function: str + extend: str + def __init__( + self, + shading_type: Literal[2, 3], + background: DeviceRGB | DeviceGray | DeviceCMYK | None, + color_space: str, + coords: list[int], + function: Type2Function | Type3Function, + extend_before: bool, + extend_after: bool, + ) -> None: ... + +class Gradient(ABC): + color_space: str + colors: list[Incomplete] + background: Incomplete | None + extend_before: Incomplete + extend_after: Incomplete + bounds: Incomplete + functions: Incomplete + pattern: Pattern + coords: Incomplete | None + shading_type: int + + def __init__(self, colors, background, extend_before, extend_after, bounds): ... + def get_shading_object(self) -> Shading: ... + def get_pattern(self) -> Pattern: ... + +class LinearGradient(Gradient): + coords: list[str] + shading_type: int + def __init__( + self, + fpdf: FPDF, + from_x: float, + from_y: float, + to_x: float, + to_y: float, + colors: list[Incomplete], + background: Incomplete | None = None, + extend_before: bool = False, + extend_after: bool = False, + bounds: list[int] | None = None, + ) -> None: ... + +class RadialGradient(Gradient): + coords: list[str] + shading_type: int + def __init__( + self, + fpdf: FPDF, + start_circle_x: float, + start_circle_y: float, + start_circle_radius: float, + end_circle_x: float, + end_circle_y: float, + end_circle_radius: float, + colors: list[Incomplete], + background=None, + extend_before: bool = False, + extend_after: bool = False, + bounds: list[int] | None = None, + ): ... diff --git a/stubs/fpdf2/fpdf/prefs.pyi b/stubs/fpdf2/fpdf/prefs.pyi index 541ecad54d36..f4d4899686e9 100644 --- a/stubs/fpdf2/fpdf/prefs.pyi +++ b/stubs/fpdf2/fpdf/prefs.pyi @@ -1,3 +1,6 @@ +from _typeshed import Incomplete +from typing import Literal + from .enums import Duplex, PageBoundaries, PageMode, TextDirection class ViewerPreferences: @@ -6,9 +9,6 @@ class ViewerPreferences: hide_window_u_i: bool fit_window: bool center_window: bool - display_doc_title: bool - num_copies: int | None - print_page_range: list[int] | None def __init__( self, hide_toolbar: bool = False, @@ -26,16 +26,29 @@ class ViewerPreferences: view_clip: PageBoundaries | None = None, print_area: PageBoundaries | None = None, print_clip: PageBoundaries | None = None, + print_scaling: Incomplete | None = None, ) -> None: ... @property def non_full_screen_page_mode(self) -> PageMode | None: ... @non_full_screen_page_mode.setter def non_full_screen_page_mode(self, page_mode: PageMode | str | None) -> None: ... @property + def num_copies(self) -> int | None: ... + @num_copies.setter + def num_copies(self, num_copies: int | None) -> None: ... + @property + def print_page_range(self) -> list[int] | None: ... + @print_page_range.setter + def print_page_range(self, print_page_range: list[int] | None) -> None: ... + @property def direction(self) -> TextDirection | None: ... @direction.setter def direction(self, direction: TextDirection | str | None) -> None: ... @property + def display_doc_title(self) -> bool: ... + @display_doc_title.setter + def display_doc_title(self, display_doc_title: bool) -> None: ... + @property def duplex(self) -> Duplex | None: ... @duplex.setter def duplex(self, duplex: Duplex | str | None) -> None: ... @@ -55,4 +68,8 @@ class ViewerPreferences: def print_clip(self) -> PageBoundaries | None: ... @print_clip.setter def print_clip(self, view_area: PageBoundaries | str | None) -> None: ... + @property + def print_scaling(self) -> Literal["None", "AppDefault"] | None: ... + @print_scaling.setter + def print_scaling(self, print_scaling: Literal["None", "AppDefault"] | None) -> None: ... def serialize(self) -> str: ... diff --git a/stubs/fpdf2/fpdf/syntax.pyi b/stubs/fpdf2/fpdf/syntax.pyi index 59d44648c47e..62daa150bafd 100644 --- a/stubs/fpdf2/fpdf/syntax.pyi +++ b/stubs/fpdf2/fpdf/syntax.pyi @@ -77,3 +77,10 @@ class DestinationXYZ(Destination): page_ref: Incomplete | None def __init__(self, page: int, top: float, left: float = 0, zoom: float | Literal["null"] = "null") -> None: ... def serialize(self) -> str: ... + def replace( + self, + page: Incomplete | None = None, + top: float | None = None, + left: float | None = None, + zoom: float | Literal["null"] | None = None, + ) -> DestinationXYZ: ... diff --git a/stubs/fpdf2/fpdf/table.pyi b/stubs/fpdf2/fpdf/table.pyi index c29ae9cd2607..28e0313bfabf 100644 --- a/stubs/fpdf2/fpdf/table.pyi +++ b/stubs/fpdf2/fpdf/table.pyi @@ -52,15 +52,30 @@ class Table: outer_border_width: float | None = None, num_heading_rows: int = 1, repeat_headings: TableHeadingsDisplay | int = 1, + min_row_height: Incomplete | None = None, ) -> None: ... - def row(self, cells: Iterable[str] = (), style: FontFace | None = None) -> Row: ... + def row( + self, + cells: Iterable[str] = (), + style: FontFace | None = None, + v_align: VAlign | str | None = None, + min_height: Incomplete | None = None, + ) -> Row: ... def render(self) -> None: ... def get_cell_border(self, i: int, j: int, cell: Cell) -> str | Literal[0, 1]: ... class Row: cells: list[Cell] style: FontFace - def __init__(self, table: Table, style: FontFace | None = None) -> None: ... + v_align: VAlign | None + min_height: Incomplete | None + def __init__( + self, + table: Table, + style: FontFace | None = None, + v_align: VAlign | str | None = None, + min_height: Incomplete | None = None, + ) -> None: ... @property def cols_count(self) -> int: ... @property diff --git a/stubs/fpdf2/fpdf/text_region.pyi b/stubs/fpdf2/fpdf/text_region.pyi index 6cc4396a3aa3..76d7b4c69173 100644 --- a/stubs/fpdf2/fpdf/text_region.pyi +++ b/stubs/fpdf2/fpdf/text_region.pyi @@ -41,6 +41,7 @@ class Paragraph: skip_leading_spaces: bool wrapmode: Incomplete bullet: Bullet | None + first_line_indent: float def __init__( self, @@ -54,6 +55,7 @@ class Paragraph: bullet_string: str = "", skip_leading_spaces: bool = False, wrapmode: WrapMode | None = None, + first_line_indent: float = 0, ) -> None: ... def __enter__(self): ... def __exit__(self, exc_type, exc_value, traceback) -> None: ... @@ -134,6 +136,7 @@ class ParagraphCollectorMixin: bullet_string: str = "", bullet_r_margin: float | None = None, wrapmode: WrapMode | None = None, + first_line_indent: float = 0, ) -> Paragraph: ... def end_paragraph(self) -> None: ... def image( diff --git a/stubs/fpdf2/fpdf/util.pyi b/stubs/fpdf2/fpdf/util.pyi index b8d0a8d57752..f94c5861ad2c 100644 --- a/stubs/fpdf2/fpdf/util.pyi +++ b/stubs/fpdf2/fpdf/util.pyi @@ -26,7 +26,7 @@ def convert_unit( ROMAN_NUMERAL_MAP: Final[tuple[tuple[str, int], ...]] -def int2roman(n: int) -> str: ... +def int2roman(n: int | None) -> str: ... def int_to_letters(n: int) -> str: ... def print_mem_usage(prefix: str) -> None: ... def get_mem_usage(prefix: str) -> str: ... From 5c3634fd22816f9973fa3b889cda2ac4d56c2085 Mon Sep 17 00:00:00 2001 From: thomas-whaley-poco <153152967+thomas-whaley-poco@users.noreply.github.com> Date: Fri, 25 Apr 2025 23:20:40 +1200 Subject: [PATCH 256/388] Fix generics in NetworkX (#13864) --- .../algorithms/shortest_paths/unweighted.pyi | 8 ++--- .../algorithms/shortest_paths/weighted.pyi | 32 +++++++++---------- 2 files changed, 20 insertions(+), 20 deletions(-) diff --git a/stubs/networkx/networkx/algorithms/shortest_paths/unweighted.pyi b/stubs/networkx/networkx/algorithms/shortest_paths/unweighted.pyi index 8250e0f2c229..5e12a5322da0 100644 --- a/stubs/networkx/networkx/algorithms/shortest_paths/unweighted.pyi +++ b/stubs/networkx/networkx/algorithms/shortest_paths/unweighted.pyi @@ -11,14 +11,14 @@ def single_target_shortest_path_length(G: Graph[_Node], target: _Node, cutoff: i @_dispatchable def all_pairs_shortest_path_length(G: Graph[_Node], cutoff: int | None = None) -> Generator[Incomplete, None, None]: ... @_dispatchable -def bidirectional_shortest_path(G: Graph[_Node], source: str, target: str): ... +def bidirectional_shortest_path(G: Graph[_Node], source: _Node, target: _Node): ... @_dispatchable -def single_source_shortest_path(G: Graph[_Node], source: str, cutoff: int | None = None): ... +def single_source_shortest_path(G: Graph[_Node], source: _Node, cutoff: int | None = None): ... @_dispatchable -def single_target_shortest_path(G: Graph[_Node], target: str, cutoff: int | None = None): ... +def single_target_shortest_path(G: Graph[_Node], target: _Node, cutoff: int | None = None): ... @_dispatchable def all_pairs_shortest_path(G: Graph[_Node], cutoff: int | None = None) -> Generator[Incomplete, None, None]: ... @_dispatchable def predecessor( - G: Graph[_Node], source: str, target: str | None = None, cutoff: int | None = None, return_seen: bool | None = None + G: Graph[_Node], source: _Node, target: _Node | None = None, cutoff: int | None = None, return_seen: bool | None = None ): ... diff --git a/stubs/networkx/networkx/algorithms/shortest_paths/weighted.pyi b/stubs/networkx/networkx/algorithms/shortest_paths/weighted.pyi index 8c898e28bff5..dd933aef97a5 100644 --- a/stubs/networkx/networkx/algorithms/shortest_paths/weighted.pyi +++ b/stubs/networkx/networkx/algorithms/shortest_paths/weighted.pyi @@ -15,8 +15,8 @@ def dijkstra_path( @_dispatchable def dijkstra_path_length( G: Graph[_Node], - source: str, - target: str, + source: _Node, + target: _Node, weight: str | Callable[[Any, Any, SupportsGetItem[str, Any]], float | None] | None = "weight", ): ... @_dispatchable @@ -29,15 +29,15 @@ def single_source_dijkstra_path( @_dispatchable def single_source_dijkstra_path_length( G: Graph[_Node], - source: str, + source: _Node, cutoff: float | None = None, weight: str | Callable[[Any, Any, SupportsGetItem[str, Any]], float | None] | None = "weight", ): ... @_dispatchable def single_source_dijkstra( G: Graph[_Node], - source: str, - target: str | None = None, + source: _Node, + target: _Node | None = None, cutoff: float | None = None, weight: str | Callable[[Any, Any, SupportsGetItem[str, Any]], float | None] | None = "weight", ): ... @@ -59,14 +59,14 @@ def multi_source_dijkstra_path_length( def multi_source_dijkstra( G: Graph[_Node], sources, - target: str | None = None, + target: _Node | None = None, cutoff: float | None = None, weight: str | Callable[[Any, Any, SupportsGetItem[str, Any]], float | None] | None = "weight", ): ... @_dispatchable def dijkstra_predecessor_and_distance( G: Graph[_Node], - source: str, + source: _Node, cutoff: float | None = None, weight: str | Callable[[Any, Any, SupportsGetItem[str, Any]], float | None] | None = "weight", ): ... @@ -91,8 +91,8 @@ def all_pairs_dijkstra_path( @_dispatchable def bellman_ford_predecessor_and_distance( G: Graph[_Node], - source: str, - target: str | None = None, + source: _Node, + target: _Node | None = None, weight: str | Callable[[Any, Any, SupportsGetItem[str, Any]], float | None] | None = "weight", heuristic: bool = False, ): ... @@ -106,8 +106,8 @@ def bellman_ford_path( @_dispatchable def bellman_ford_path_length( G: Graph[_Node], - source: str, - target: str, + source: _Node, + target: _Node, weight: str | Callable[[Any, Any, SupportsGetItem[str, Any]], float | None] | None = "weight", ): ... @_dispatchable @@ -116,13 +116,13 @@ def single_source_bellman_ford_path( ): ... @_dispatchable def single_source_bellman_ford_path_length( - G: Graph[_Node], source: str, weight: str | Callable[[Any, Any, SupportsGetItem[str, Any]], float | None] | None = "weight" + G: Graph[_Node], source: _Node, weight: str | Callable[[Any, Any, SupportsGetItem[str, Any]], float | None] | None = "weight" ): ... @_dispatchable def single_source_bellman_ford( G: Graph[_Node], - source: str, - target: str | None = None, + source: _Node, + target: _Node | None = None, weight: str | Callable[[Any, Any, SupportsGetItem[str, Any]], float | None] | None = "weight", ): ... @_dispatchable @@ -135,7 +135,7 @@ def all_pairs_bellman_ford_path( ) -> Generator[Incomplete, None, None]: ... @_dispatchable def goldberg_radzik( - G: Graph[_Node], source: str, weight: str | Callable[[Any, Any, SupportsGetItem[str, Any]], float | None] | None = "weight" + G: Graph[_Node], source: _Node, weight: str | Callable[[Any, Any, SupportsGetItem[str, Any]], float | None] | None = "weight" ): ... @_dispatchable def negative_edge_cycle( @@ -145,7 +145,7 @@ def negative_edge_cycle( ): ... @_dispatchable def find_negative_cycle( - G: Graph[_Node], source: str, weight: str | Callable[[Any, Any, SupportsGetItem[str, Any]], float | None] | None = "weight" + G: Graph[_Node], source: _Node, weight: str | Callable[[Any, Any, SupportsGetItem[str, Any]], float | None] | None = "weight" ): ... @_dispatchable def bidirectional_dijkstra( From 9d5f6cba5ee823ad433c1d1f89ddf39d7c3632cc Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Fri, 25 Apr 2025 15:57:12 +0400 Subject: [PATCH 257/388] Improve `braintree` (#13862) --- stubs/braintree/braintree/__init__.pyi | 9 +++ stubs/braintree/braintree/add_on.pyi | 2 +- stubs/braintree/braintree/add_on_gateway.pyi | 4 +- stubs/braintree/braintree/address.pyi | 12 +-- stubs/braintree/braintree/address_gateway.pyi | 14 +++- .../braintree/braintree/android_pay_card.pyi | 6 +- stubs/braintree/braintree/apple_pay_card.pyi | 2 +- .../braintree/braintree/apple_pay_gateway.pyi | 7 +- stubs/braintree/braintree/client_token.pyi | 2 +- stubs/braintree/braintree/configuration.pyi | 80 ++++++++++++------- .../braintree/credentials_parser.pyi | 18 ++--- stubs/braintree/braintree/credit_card.pyi | 34 ++++---- .../braintree/credit_card_gateway.pyi | 27 ++++--- .../braintree/credit_card_verification.pyi | 13 +-- .../credit_card_verification_gateway.pyi | 11 ++- stubs/braintree/braintree/customer.pyi | 35 ++++++-- .../braintree/braintree/customer_gateway.pyi | 17 ++-- stubs/braintree/braintree/disbursement.pyi | 4 +- .../braintree/disbursement_detail.pyi | 2 +- stubs/braintree/braintree/discount.pyi | 2 +- .../braintree/braintree/discount_gateway.pyi | 4 +- stubs/braintree/braintree/dispute.pyi | 16 ++-- .../braintree/dispute_details/evidence.pyi | 4 +- .../dispute_details/paypal_message.pyi | 4 +- .../dispute_details/status_history.pyi | 4 +- stubs/braintree/braintree/dispute_gateway.pyi | 18 +++-- stubs/braintree/braintree/document_upload.pyi | 6 +- .../braintree/document_upload_gateway.pyi | 5 +- stubs/braintree/braintree/error_result.pyi | 5 +- stubs/braintree/braintree/errors.pyi | 9 ++- .../braintree/europe_bank_account.pyi | 2 +- .../braintree/exceptions/__init__.pyi | 3 + .../braintree/exchange_rate_quote_gateway.pyi | 8 +- .../braintree/exchange_rate_quote_input.pyi | 2 +- .../braintree/exchange_rate_quote_payload.pyi | 9 ++- .../braintree/exchange_rate_quote_request.pyi | 8 +- stubs/braintree/braintree/masterpass_card.pyi | 4 +- .../braintree/merchant_account_gateway.pyi | 16 ++-- .../braintree/braintree/merchant_gateway.pyi | 5 +- .../braintree/meta_checkout_card.pyi | 4 +- .../braintree/meta_checkout_token.pyi | 4 +- stubs/braintree/braintree/montary_amount.pyi | 4 +- stubs/braintree/braintree/oauth_gateway.pyi | 11 ++- stubs/braintree/braintree/payment_method.pyi | 32 ++++++-- .../braintree/payment_method_gateway.pyi | 16 ++-- .../braintree/payment_method_nonce.pyi | 6 +- .../payment_method_nonce_gateway.pyi | 8 +- .../braintree/payment_method_parser.pyi | 32 +++++++- stubs/braintree/braintree/paypal_account.pyi | 10 ++- .../braintree/paypal_account_gateway.pyi | 10 ++- .../braintree/paypal_payment_resource.pyi | 4 +- .../braintree/braintree/successful_result.pyi | 4 +- .../transaction_line_item_gateway.pyi | 4 +- .../braintree/unknown_payment_method.pyi | 2 +- stubs/braintree/braintree/us_bank_account.pyi | 8 +- .../braintree/us_bank_account_gateway.pyi | 4 +- .../us_bank_account_verification.pyi | 11 ++- .../us_bank_account_verification_gateway.pyi | 11 ++- stubs/braintree/braintree/util/constants.pyi | 7 +- stubs/braintree/braintree/util/crypto.pyi | 20 ++++- .../braintree/util/datetime_parser.pyi | 4 +- stubs/braintree/braintree/util/generator.pyi | 25 +++++- .../braintree/util/graphql_client.pyi | 24 +++++- stubs/braintree/braintree/util/http.pyi | 29 ++++--- stubs/braintree/braintree/util/parser.pyi | 6 +- stubs/braintree/braintree/util/xml_util.pyi | 6 +- .../braintree/validation_error_collection.pyi | 21 ++--- .../braintree/visa_checkout_card.pyi | 4 +- .../braintree/webhook_notification.pyi | 4 +- .../webhook_notification_gateway.pyi | 6 +- stubs/braintree/braintree/webhook_testing.pyi | 4 +- .../braintree/webhook_testing_gateway.pyi | 2 +- 72 files changed, 531 insertions(+), 249 deletions(-) diff --git a/stubs/braintree/braintree/__init__.pyi b/stubs/braintree/braintree/__init__.pyi index 30ed226bba2e..1c360cd4ff34 100644 --- a/stubs/braintree/braintree/__init__.pyi +++ b/stubs/braintree/braintree/__init__.pyi @@ -7,6 +7,7 @@ from braintree.amex_express_checkout_card import AmexExpressCheckoutCard as Amex from braintree.android_pay_card import AndroidPayCard as AndroidPayCard from braintree.apple_pay_card import ApplePayCard as ApplePayCard from braintree.apple_pay_gateway import ApplePayGateway as ApplePayGateway +from braintree.blik_alias import BlikAlias as BlikAlias from braintree.braintree_gateway import BraintreeGateway as BraintreeGateway from braintree.client_token import ClientToken as ClientToken from braintree.configuration import Configuration as Configuration @@ -32,11 +33,14 @@ from braintree.dispute import Dispute as Dispute from braintree.dispute_search import DisputeSearch as DisputeSearch from braintree.document_upload import DocumentUpload as DocumentUpload from braintree.document_upload_gateway import DocumentUploadGateway as DocumentUploadGateway +from braintree.enriched_customer_data import EnrichedCustomerData as EnrichedCustomerData from braintree.environment import Environment as Environment from braintree.error_codes import ErrorCodes as ErrorCodes from braintree.error_result import ErrorResult as ErrorResult from braintree.errors import Errors as Errors from braintree.europe_bank_account import EuropeBankAccount as EuropeBankAccount +from braintree.graphql import * +from braintree.liability_shift import LiabilityShift as LiabilityShift from braintree.local_payment_completed import LocalPaymentCompleted as LocalPaymentCompleted from braintree.local_payment_reversed import LocalPaymentReversed as LocalPaymentReversed from braintree.merchant import Merchant as Merchant @@ -46,6 +50,9 @@ from braintree.oauth_access_revocation import OAuthAccessRevocation as OAuthAcce from braintree.partner_merchant import PartnerMerchant as PartnerMerchant from braintree.payment_instrument_type import PaymentInstrumentType as PaymentInstrumentType from braintree.payment_method import PaymentMethod as PaymentMethod +from braintree.payment_method_customer_data_updated_metadata import ( + PaymentMethodCustomerDataUpdatedMetadata as PaymentMethodCustomerDataUpdatedMetadata, +) from braintree.payment_method_nonce import PaymentMethodNonce as PaymentMethodNonce from braintree.payment_method_parser import parse_payment_method as parse_payment_method from braintree.paypal_account import PayPalAccount as PayPalAccount @@ -57,6 +64,7 @@ from braintree.resource_collection import ResourceCollection as ResourceCollecti from braintree.risk_data import RiskData as RiskData from braintree.samsung_pay_card import SamsungPayCard as SamsungPayCard from braintree.search import Search as Search +from braintree.sepa_direct_debit_account import SepaDirectDebitAccount as SepaDirectDebitAccount from braintree.settlement_batch_summary import SettlementBatchSummary as SettlementBatchSummary from braintree.signature_service import SignatureService as SignatureService from braintree.status_event import StatusEvent as StatusEvent @@ -77,6 +85,7 @@ from braintree.unknown_payment_method import UnknownPaymentMethod as UnknownPaym from braintree.us_bank_account import UsBankAccount as UsBankAccount from braintree.validation_error_collection import ValidationErrorCollection as ValidationErrorCollection from braintree.venmo_account import VenmoAccount as VenmoAccount +from braintree.venmo_profile_data import VenmoProfileData as VenmoProfileData from braintree.version import Version as Version from braintree.webhook_notification import WebhookNotification as WebhookNotification from braintree.webhook_notification_gateway import WebhookNotificationGateway as WebhookNotificationGateway diff --git a/stubs/braintree/braintree/add_on.pyi b/stubs/braintree/braintree/add_on.pyi index 5af205d8564c..76f5079c16ac 100644 --- a/stubs/braintree/braintree/add_on.pyi +++ b/stubs/braintree/braintree/add_on.pyi @@ -2,4 +2,4 @@ from braintree.modification import Modification class AddOn(Modification): @staticmethod - def all(): ... + def all() -> list[AddOn]: ... diff --git a/stubs/braintree/braintree/add_on_gateway.pyi b/stubs/braintree/braintree/add_on_gateway.pyi index 702cf587b9c3..95bd15bd8c46 100644 --- a/stubs/braintree/braintree/add_on_gateway.pyi +++ b/stubs/braintree/braintree/add_on_gateway.pyi @@ -1,7 +1,9 @@ from _typeshed import Incomplete +from braintree.add_on import AddOn + class AddOnGateway: gateway: Incomplete config: Incomplete def __init__(self, gateway) -> None: ... - def all(self): ... + def all(self) -> list[AddOn]: ... diff --git a/stubs/braintree/braintree/address.pyi b/stubs/braintree/braintree/address.pyi index 529f190ccc2b..f90932b60fd1 100644 --- a/stubs/braintree/braintree/address.pyi +++ b/stubs/braintree/braintree/address.pyi @@ -14,14 +14,14 @@ class Address(Resource): PickupInStore: Final = "pickup_in_store" @staticmethod - def create(params: Incomplete | None = None): ... + def create(params: dict[str, Incomplete] | None = None): ... @staticmethod - def delete(customer_id, address_id): ... + def delete(customer_id: str, address_id: str): ... @staticmethod - def find(customer_id, address_id): ... + def find(customer_id: str, address_id: str): ... @staticmethod - def update(customer_id, address_id, params: Incomplete | None = None): ... + def update(customer_id: str, address_id: str, params: dict[str, Incomplete] | None = None): ... @staticmethod - def create_signature(): ... + def create_signature() -> list[str | dict[str, list[str]]]: ... @staticmethod - def update_signature(): ... + def update_signature() -> list[str | dict[str, list[str]]]: ... diff --git a/stubs/braintree/braintree/address_gateway.pyi b/stubs/braintree/braintree/address_gateway.pyi index 9a189131ddde..fc7735b016b4 100644 --- a/stubs/braintree/braintree/address_gateway.pyi +++ b/stubs/braintree/braintree/address_gateway.pyi @@ -1,10 +1,16 @@ from _typeshed import Incomplete +from braintree.address import Address +from braintree.error_result import ErrorResult +from braintree.successful_result import SuccessfulResult + class AddressGateway: gateway: Incomplete config: Incomplete def __init__(self, gateway) -> None: ... - def create(self, params: Incomplete | None = None): ... - def delete(self, customer_id, address_id): ... - def find(self, customer_id, address_id): ... - def update(self, customer_id, address_id, params: Incomplete | None = None): ... + def create(self, params: dict[str, Incomplete] | None = None) -> SuccessfulResult | ErrorResult | None: ... + def delete(self, customer_id: str, address_id: str) -> SuccessfulResult: ... + def find(self, customer_id: str, address_id: str) -> Address: ... + def update( + self, customer_id: str, address_id: str, params: dict[str, Incomplete] | None = None + ) -> SuccessfulResult | ErrorResult | None: ... diff --git a/stubs/braintree/braintree/android_pay_card.pyi b/stubs/braintree/braintree/android_pay_card.pyi index dc7d3d2833fa..41e05930a283 100644 --- a/stubs/braintree/braintree/android_pay_card.pyi +++ b/stubs/braintree/braintree/android_pay_card.pyi @@ -14,8 +14,8 @@ class AndroidPayCard(Resource): @property def card_type(self): ... @staticmethod - def signature(): ... + def signature() -> list[str | dict[str, list[str]]]: ... @staticmethod - def card_signature(): ... + def card_signature() -> list[str | dict[str, list[str]]]: ... @staticmethod - def network_token_signature(): ... + def network_token_signature() -> list[str | dict[str, list[str]]]: ... diff --git a/stubs/braintree/braintree/apple_pay_card.pyi b/stubs/braintree/braintree/apple_pay_card.pyi index 81ab542626b9..86c6fc015d4f 100644 --- a/stubs/braintree/braintree/apple_pay_card.pyi +++ b/stubs/braintree/braintree/apple_pay_card.pyi @@ -16,4 +16,4 @@ class ApplePayCard(Resource): @property def expiration_date(self): ... @staticmethod - def signature(): ... + def signature() -> list[str | dict[str, list[str]]]: ... diff --git a/stubs/braintree/braintree/apple_pay_gateway.pyi b/stubs/braintree/braintree/apple_pay_gateway.pyi index e37310688ee1..55477a1eb30b 100644 --- a/stubs/braintree/braintree/apple_pay_gateway.pyi +++ b/stubs/braintree/braintree/apple_pay_gateway.pyi @@ -1,9 +1,12 @@ from _typeshed import Incomplete +from braintree.error_result import ErrorResult +from braintree.successful_result import SuccessfulResult + class ApplePayGateway: gateway: Incomplete config: Incomplete def __init__(self, gateway) -> None: ... - def register_domain(self, domain): ... - def unregister_domain(self, domain): ... + def register_domain(self, domain: str) -> SuccessfulResult | ErrorResult | None: ... + def unregister_domain(self, domain: str) -> SuccessfulResult: ... def registered_domains(self): ... diff --git a/stubs/braintree/braintree/client_token.pyi b/stubs/braintree/braintree/client_token.pyi index 0bd366e4fc0b..214c90c4be3d 100644 --- a/stubs/braintree/braintree/client_token.pyi +++ b/stubs/braintree/braintree/client_token.pyi @@ -4,4 +4,4 @@ class ClientToken: @staticmethod def generate(params: Incomplete | None = None, gateway: Incomplete | None = None): ... @staticmethod - def generate_signature(): ... + def generate_signature() -> list[str | dict[str, list[str]]]: ... diff --git a/stubs/braintree/braintree/configuration.pyi b/stubs/braintree/braintree/configuration.pyi index 3f4d08c92214..30af8103bc43 100644 --- a/stubs/braintree/braintree/configuration.pyi +++ b/stubs/braintree/braintree/configuration.pyi @@ -1,45 +1,69 @@ from _typeshed import Incomplete +from braintree.braintree_gateway import BraintreeGateway +from braintree.util.graphql_client import GraphQLClient +from braintree.util.http import Http + class Configuration: @staticmethod - def configure(environment, merchant_id, public_key, private_key, **kwargs) -> None: ... + def configure( + environment, + merchant_id: str, + public_key: str, + private_key: str, + *, + http_strategy: Incomplete | None = None, + timeout: int = 60, + wrap_http_exceptions: bool = False, + ) -> None: ... @staticmethod - def for_partner(environment, partner_id, public_key, private_key, **kwargs): ... + def for_partner( + environment, + partner_id: str, + public_key: str, + private_key: str, + *, + http_strategy: Incomplete | None = None, + timeout: int = 60, + wrap_http_exceptions: bool = False, + ) -> Configuration: ... @staticmethod - def gateway(): ... + def gateway() -> BraintreeGateway: ... @staticmethod - def instantiate(): ... + def instantiate() -> Configuration: ... @staticmethod - def api_version(): ... + def api_version() -> str: ... @staticmethod - def graphql_api_version(): ... + def graphql_api_version() -> str: ... environment: Incomplete - merchant_id: Incomplete - public_key: Incomplete - private_key: Incomplete - client_id: Incomplete - client_secret: Incomplete - access_token: Incomplete - timeout: Incomplete - wrap_http_exceptions: Incomplete + merchant_id: str | None + public_key: str | None + private_key: str | None + client_id: str | None + client_secret: str | None + access_token: str | None + timeout: int + wrap_http_exceptions: bool def __init__( self, environment: Incomplete | None = None, - merchant_id: Incomplete | None = None, - public_key: Incomplete | None = None, - private_key: Incomplete | None = None, - client_id: Incomplete | None = None, - client_secret: Incomplete | None = None, - access_token: Incomplete | None = None, + merchant_id: str | None = None, + public_key: str | None = None, + private_key: str | None = None, + client_id: str | None = None, + client_secret: str | None = None, + access_token: str | None = None, *args, - **kwargs, + timeout: int = 60, + wrap_http_exceptions: bool = False, + http_strategy: Incomplete | None = None, ) -> None: ... - def base_merchant_path(self): ... - def base_url(self): ... - def graphql_base_url(self): ... - def http(self): ... - def graphql_client(self): ... + def base_merchant_path(self) -> str: ... + def base_url(self) -> str: ... + def graphql_base_url(self) -> str: ... + def http(self) -> Http: ... + def graphql_client(self) -> GraphQLClient: ... def http_strategy(self): ... - def has_client_credentials(self): ... + def has_client_credentials(self) -> bool: ... def assert_has_client_credentials(self) -> None: ... - def has_access_token(self): ... + def has_access_token(self) -> bool: ... diff --git a/stubs/braintree/braintree/credentials_parser.pyi b/stubs/braintree/braintree/credentials_parser.pyi index 49dae345e38e..bc24ea29893f 100644 --- a/stubs/braintree/braintree/credentials_parser.pyi +++ b/stubs/braintree/braintree/credentials_parser.pyi @@ -1,15 +1,15 @@ -from _typeshed import Incomplete +from braintree.environment import Environment class CredentialsParser: - client_id: Incomplete - client_secret: Incomplete - access_token: Incomplete + client_id: str | None + client_secret: str | None + access_token: str | None + environment: Environment | None + merchant_id: str def __init__( - self, client_id: Incomplete | None = None, client_secret: Incomplete | None = None, access_token: Incomplete | None = None + self, client_id: str | None = None, client_secret: str | None = None, access_token: str | None = None ) -> None: ... - environment: Incomplete def parse_client_credentials(self) -> None: ... - merchant_id: Incomplete def parse_access_token(self) -> None: ... - def get_environment(self, credential): ... - def get_merchant_id(self, credential): ... + def get_environment(self, credential: str) -> Environment | None: ... + def get_merchant_id(self, credential: str) -> str: ... diff --git a/stubs/braintree/braintree/credit_card.pyi b/stubs/braintree/braintree/credit_card.pyi index a419ef74d736..a38d46c58036 100644 --- a/stubs/braintree/braintree/credit_card.pyi +++ b/stubs/braintree/braintree/credit_card.pyi @@ -1,11 +1,15 @@ from _typeshed import Incomplete +from datetime import date, datetime from enum import Enum -from typing import Final +from typing import Final, Literal from braintree.address import Address from braintree.credit_card_verification import CreditCardVerification +from braintree.error_result import ErrorResult from braintree.resource import Resource +from braintree.resource_collection import ResourceCollection from braintree.subscription import Subscription +from braintree.successful_result import SuccessfulResult class CreditCard(Resource): class CardType: @@ -56,31 +60,33 @@ class CreditCard(Resource): ProductId: type[CardTypeIndicator] PrepaidReloadable: type[CardTypeIndicator] @staticmethod - def create(params: Incomplete | None = None): ... + def create(params: dict[str, Incomplete] | None = None) -> SuccessfulResult | ErrorResult | None: ... @staticmethod - def update(credit_card_token, params: Incomplete | None = None): ... + def update(credit_card_token: str, params: dict[str, Incomplete] | None = None) -> SuccessfulResult | ErrorResult | None: ... @staticmethod - def delete(credit_card_token): ... + def delete(credit_card_token: str) -> SuccessfulResult: ... @staticmethod - def expired(): ... + def expired() -> ResourceCollection: ... @staticmethod - def expiring_between(start_date, end_date): ... + def expiring_between(start_date: date | datetime, end_date: date | datetime) -> ResourceCollection: ... @staticmethod - def find(credit_card_token): ... + def find(credit_card_token: str) -> CreditCard: ... @staticmethod - def from_nonce(nonce): ... + def from_nonce(nonce: str) -> CreditCard: ... @staticmethod - def create_signature(): ... + def create_signature() -> list[str | dict[str, list[str]] | dict[str, list[str | dict[str, list[str]]]]]: ... @staticmethod - def update_signature(): ... + def update_signature() -> list[str | dict[str, list[str]] | dict[str, list[str | dict[str, list[str]]]]]: ... @staticmethod - def signature(type): ... + def signature( + type: Literal["create", "update", "update_via_customer"], + ) -> list[str | dict[str, list[str]] | dict[str, list[str | dict[str, list[str]]]]]: ... is_expired = expired billing_address: Address | None subscriptions: list[Subscription] verification: CreditCardVerification - def __init__(self, gateway, attributes): ... + def __init__(self, gateway, attributes) -> None: ... @property - def expiration_date(self): ... + def expiration_date(self) -> str | None: ... @property - def masked_number(self): ... + def masked_number(self) -> str: ... diff --git a/stubs/braintree/braintree/credit_card_gateway.pyi b/stubs/braintree/braintree/credit_card_gateway.pyi index d6e6700a62bc..7ec3fac2cb04 100644 --- a/stubs/braintree/braintree/credit_card_gateway.pyi +++ b/stubs/braintree/braintree/credit_card_gateway.pyi @@ -1,14 +1,23 @@ -from _typeshed import Incomplete +from _typeshed import Incomplete, Unused +from datetime import date, datetime +from typing import NoReturn + +from braintree.credit_card import CreditCard +from braintree.error_result import ErrorResult +from braintree.resource_collection import ResourceCollection +from braintree.successful_result import SuccessfulResult class CreditCardGateway: gateway: Incomplete config: Incomplete def __init__(self, gateway) -> None: ... - def create(self, params: Incomplete | None = None): ... - def delete(self, credit_card_token): ... - def expired(self): ... - def expiring_between(self, start_date, end_date): ... - def find(self, credit_card_token): ... - def forward(self, credit_card_token, receiving_merchant_id) -> None: ... - def from_nonce(self, nonce): ... - def update(self, credit_card_token, params: Incomplete | None = None): ... + def create(self, params: dict[str, Incomplete] | None = None) -> SuccessfulResult | ErrorResult | None: ... + def delete(self, credit_card_token: str) -> SuccessfulResult: ... + def expired(self) -> ResourceCollection: ... + def expiring_between(self, start_date: date | datetime, end_date: date | datetime) -> ResourceCollection: ... + def find(self, credit_card_token: str) -> CreditCard: ... + def forward(self, credit_card_token: Unused, receiving_merchant_id: Unused) -> NoReturn: ... + def from_nonce(self, nonce: str) -> CreditCard: ... + def update( + self, credit_card_token: str, params: dict[str, Incomplete] | None = None + ) -> SuccessfulResult | ErrorResult | None: ... diff --git a/stubs/braintree/braintree/credit_card_verification.pyi b/stubs/braintree/braintree/credit_card_verification.pyi index 092066789194..74f584866ca6 100644 --- a/stubs/braintree/braintree/credit_card_verification.pyi +++ b/stubs/braintree/braintree/credit_card_verification.pyi @@ -3,7 +3,10 @@ from decimal import Decimal from typing import Final from braintree.attribute_getter import AttributeGetter +from braintree.error_result import ErrorResult +from braintree.resource_collection import ResourceCollection from braintree.risk_data import RiskData +from braintree.successful_result import SuccessfulResult from braintree.three_d_secure_info import ThreeDSecureInfo class CreditCardVerification(AttributeGetter): @@ -23,11 +26,11 @@ class CreditCardVerification(AttributeGetter): three_d_secure_info: ThreeDSecureInfo | None def __init__(self, gateway, attributes) -> None: ... @staticmethod - def find(verification_id): ... + def find(verification_id: str) -> CreditCardVerification: ... @staticmethod - def search(*query): ... + def search(*query) -> ResourceCollection: ... @staticmethod - def create(params): ... + def create(params) -> SuccessfulResult | ErrorResult | None: ... @staticmethod - def create_signature(): ... - def __eq__(self, other): ... + def create_signature() -> list[dict[str, list[str | dict[str, list[str]]]] | dict[str, list[str]] | str]: ... + def __eq__(self, other: object) -> bool: ... diff --git a/stubs/braintree/braintree/credit_card_verification_gateway.pyi b/stubs/braintree/braintree/credit_card_verification_gateway.pyi index 461c9754aae9..fa108beb5f2d 100644 --- a/stubs/braintree/braintree/credit_card_verification_gateway.pyi +++ b/stubs/braintree/braintree/credit_card_verification_gateway.pyi @@ -1,9 +1,14 @@ from _typeshed import Incomplete +from braintree.credit_card_verification import CreditCardVerification +from braintree.error_result import ErrorResult +from braintree.resource_collection import ResourceCollection +from braintree.successful_result import SuccessfulResult + class CreditCardVerificationGateway: gateway: Incomplete config: Incomplete def __init__(self, gateway) -> None: ... - def find(self, verification_id): ... - def search(self, *query): ... - def create(self, params): ... + def find(self, verification_id: str) -> CreditCardVerification: ... + def search(self, *query) -> ResourceCollection: ... + def create(self, params: dict[str, Incomplete] | None) -> SuccessfulResult | ErrorResult | None: ... diff --git a/stubs/braintree/braintree/customer.pyi b/stubs/braintree/braintree/customer.pyi index de81f5a7d223..22f5532ae8fb 100644 --- a/stubs/braintree/braintree/customer.pyi +++ b/stubs/braintree/braintree/customer.pyi @@ -5,32 +5,51 @@ from braintree.amex_express_checkout_card import AmexExpressCheckoutCard from braintree.android_pay_card import AndroidPayCard from braintree.apple_pay_card import ApplePayCard from braintree.credit_card import CreditCard +from braintree.error_result import ErrorResult from braintree.europe_bank_account import EuropeBankAccount from braintree.masterpass_card import MasterpassCard from braintree.paypal_account import PayPalAccount from braintree.resource import Resource +from braintree.resource_collection import ResourceCollection from braintree.samsung_pay_card import SamsungPayCard +from braintree.successful_result import SuccessfulResult from braintree.us_bank_account import UsBankAccount from braintree.venmo_account import VenmoAccount from braintree.visa_checkout_card import VisaCheckoutCard class Customer(Resource): @staticmethod - def all(): ... + def all() -> ResourceCollection: ... @staticmethod - def create(params: Incomplete | None = None): ... + def create(params: dict[str, Incomplete] | None = None) -> SuccessfulResult | ErrorResult | None: ... @staticmethod - def delete(customer_id): ... + def delete(customer_id: str) -> SuccessfulResult: ... @staticmethod - def find(customer_id, association_filter_id: Incomplete | None = None): ... + def find(customer_id: str, association_filter_id: str | None = None) -> Customer: ... @staticmethod - def search(*query): ... + def search(*query) -> ResourceCollection: ... @staticmethod - def update(customer_id, params: Incomplete | None = None): ... + def update(customer_id: str, params: dict[str, Incomplete] | None = None) -> SuccessfulResult | ErrorResult | None: ... @staticmethod - def create_signature(): ... + def create_signature() -> ( + list[ + str + | dict[str, list[str]] + | dict[str, list[str | dict[str, list[str]] | dict[str, list[str | dict[str, list[str]]]]]] + | dict[str, list[str | dict[str, list[str]]]] + | dict[str, list[dict[str, list[str | dict[str, list[str | dict[str, list[str]]]]]]]] + ] + ): ... @staticmethod - def update_signature(): ... + def update_signature() -> ( + list[ + str + | dict[str, list[str]] + | dict[str, list[str | dict[str, list[str]] | dict[str, list[str | dict[str, list[str]]]]]] + | dict[str, list[str | dict[str, list[str]]]] + | dict[str, list[dict[str, list[str | dict[str, list[str | dict[str, list[str]]]]]]]] + ] + ): ... payment_methods: list[Resource] credit_cards: list[CreditCard] addresses: list[Address] diff --git a/stubs/braintree/braintree/customer_gateway.pyi b/stubs/braintree/braintree/customer_gateway.pyi index acfad5e5b21a..18558178134c 100644 --- a/stubs/braintree/braintree/customer_gateway.pyi +++ b/stubs/braintree/braintree/customer_gateway.pyi @@ -1,12 +1,17 @@ from _typeshed import Incomplete +from braintree.customer import Customer +from braintree.error_result import ErrorResult +from braintree.resource_collection import ResourceCollection +from braintree.successful_result import SuccessfulResult + class CustomerGateway: gateway: Incomplete config: Incomplete def __init__(self, gateway) -> None: ... - def all(self): ... - def create(self, params: Incomplete | None = None): ... - def delete(self, customer_id): ... - def find(self, customer_id, association_filter_id: Incomplete | None = None): ... - def search(self, *query): ... - def update(self, customer_id, params: Incomplete | None = None): ... + def all(self) -> ResourceCollection: ... + def create(self, params: dict[str, Incomplete] | None = None) -> SuccessfulResult | ErrorResult | None: ... + def delete(self, customer_id: str) -> SuccessfulResult: ... + def find(self, customer_id: str, association_filter_id: str | None = None) -> Customer: ... + def search(self, *query) -> ResourceCollection: ... + def update(self, customer_id: str, params: dict[str, Incomplete] | None = None) -> SuccessfulResult | ErrorResult | None: ... diff --git a/stubs/braintree/braintree/disbursement.pyi b/stubs/braintree/braintree/disbursement.pyi index d6734ec5f682..f406b972cb12 100644 --- a/stubs/braintree/braintree/disbursement.pyi +++ b/stubs/braintree/braintree/disbursement.pyi @@ -13,5 +13,5 @@ class Disbursement(Resource): merchant_account: MerchantAccount def __init__(self, gateway, attributes) -> None: ... def transactions(self): ... - def is_credit(self): ... - def is_debit(self): ... + def is_credit(self) -> bool: ... + def is_debit(self) -> bool: ... diff --git a/stubs/braintree/braintree/disbursement_detail.pyi b/stubs/braintree/braintree/disbursement_detail.pyi index 6b8db77c82dd..8de304b964fb 100644 --- a/stubs/braintree/braintree/disbursement_detail.pyi +++ b/stubs/braintree/braintree/disbursement_detail.pyi @@ -7,4 +7,4 @@ class DisbursementDetail(AttributeGetter): settlement_currency_exchange_rate: Decimal | None def __init__(self, attributes) -> None: ... @property - def is_valid(self): ... + def is_valid(self) -> bool: ... diff --git a/stubs/braintree/braintree/discount.pyi b/stubs/braintree/braintree/discount.pyi index 9491e551e134..0078843f6265 100644 --- a/stubs/braintree/braintree/discount.pyi +++ b/stubs/braintree/braintree/discount.pyi @@ -2,4 +2,4 @@ from braintree.modification import Modification class Discount(Modification): @staticmethod - def all(): ... + def all() -> list[Discount]: ... diff --git a/stubs/braintree/braintree/discount_gateway.pyi b/stubs/braintree/braintree/discount_gateway.pyi index 4f519ea355eb..df1da780a7c2 100644 --- a/stubs/braintree/braintree/discount_gateway.pyi +++ b/stubs/braintree/braintree/discount_gateway.pyi @@ -1,7 +1,9 @@ from _typeshed import Incomplete +from braintree.discount import Discount + class DiscountGateway: gateway: Incomplete config: Incomplete def __init__(self, gateway) -> None: ... - def all(self): ... + def all(self) -> list[Discount]: ... diff --git a/stubs/braintree/braintree/dispute.pyi b/stubs/braintree/braintree/dispute.pyi index c598dde408ee..fe129e8c17cc 100644 --- a/stubs/braintree/braintree/dispute.pyi +++ b/stubs/braintree/braintree/dispute.pyi @@ -4,6 +4,8 @@ from typing import Final from braintree.attribute_getter import AttributeGetter from braintree.dispute_details import DisputeEvidence, DisputePayPalMessage, DisputeStatusHistory +from braintree.error_result import ErrorResult +from braintree.successful_result import SuccessfulResult from braintree.transaction_details import TransactionDetails class Dispute(AttributeGetter): @@ -50,19 +52,19 @@ class Dispute(AttributeGetter): NoProtection: Final = "No Protection" @staticmethod - def accept(id): ... + def accept(id: str) -> SuccessfulResult | ErrorResult: ... @staticmethod - def add_file_evidence(dispute_id, document_upload_id): ... + def add_file_evidence(dispute_id: str, document_upload_id) -> SuccessfulResult | ErrorResult | None: ... @staticmethod - def add_text_evidence(id, content_or_request): ... + def add_text_evidence(id: str, content_or_request) -> SuccessfulResult | ErrorResult | None: ... @staticmethod - def finalize(id): ... + def finalize(id: str) -> SuccessfulResult | ErrorResult: ... @staticmethod - def find(id): ... + def find(id: str) -> Dispute: ... @staticmethod - def remove_evidence(id, evidence_id): ... + def remove_evidence(id: str, evidence_id: str) -> SuccessfulResult | ErrorResult: ... @staticmethod - def search(*query): ... + def search(*query) -> SuccessfulResult: ... amount: Decimal | None amount_disputed: Decimal | None amount_won: Decimal | None diff --git a/stubs/braintree/braintree/dispute_details/evidence.pyi b/stubs/braintree/braintree/dispute_details/evidence.pyi index 6afdb2190463..a7dd9100d4d1 100644 --- a/stubs/braintree/braintree/dispute_details/evidence.pyi +++ b/stubs/braintree/braintree/dispute_details/evidence.pyi @@ -1,4 +1,6 @@ +from typing import Any + from braintree.attribute_getter import AttributeGetter class DisputeEvidence(AttributeGetter): - def __init__(self, attributes) -> None: ... + def __init__(self, attributes: dict[str, Any] | None) -> None: ... diff --git a/stubs/braintree/braintree/dispute_details/paypal_message.pyi b/stubs/braintree/braintree/dispute_details/paypal_message.pyi index f765e00a203d..7d5d8a2441e5 100644 --- a/stubs/braintree/braintree/dispute_details/paypal_message.pyi +++ b/stubs/braintree/braintree/dispute_details/paypal_message.pyi @@ -1,4 +1,6 @@ +from typing import Any + from braintree.attribute_getter import AttributeGetter class DisputePayPalMessage(AttributeGetter): - def __init__(self, attributes) -> None: ... + def __init__(self, attributes: dict[str, Any] | None) -> None: ... diff --git a/stubs/braintree/braintree/dispute_details/status_history.pyi b/stubs/braintree/braintree/dispute_details/status_history.pyi index c5191976f0fc..dd7466ace617 100644 --- a/stubs/braintree/braintree/dispute_details/status_history.pyi +++ b/stubs/braintree/braintree/dispute_details/status_history.pyi @@ -1,4 +1,6 @@ +from typing import Any + from braintree.attribute_getter import AttributeGetter class DisputeStatusHistory(AttributeGetter): - def __init__(self, attributes) -> None: ... + def __init__(self, attributes: dict[str, Any] | None) -> None: ... diff --git a/stubs/braintree/braintree/dispute_gateway.pyi b/stubs/braintree/braintree/dispute_gateway.pyi index d681355f60db..4c0a5d66481f 100644 --- a/stubs/braintree/braintree/dispute_gateway.pyi +++ b/stubs/braintree/braintree/dispute_gateway.pyi @@ -1,14 +1,18 @@ from _typeshed import Incomplete +from braintree.dispute import Dispute +from braintree.error_result import ErrorResult +from braintree.successful_result import SuccessfulResult + class DisputeGateway: gateway: Incomplete config: Incomplete def __init__(self, gateway) -> None: ... - def accept(self, dispute_id): ... - def add_file_evidence(self, dispute_id, document_upload_id_or_request): ... - def add_text_evidence(self, dispute_id, content_or_request): ... - def finalize(self, dispute_id): ... - def find(self, dispute_id): ... - def remove_evidence(self, dispute_id, evidence_id): ... + def accept(self, dispute_id: str) -> SuccessfulResult | ErrorResult: ... + def add_file_evidence(self, dispute_id: str, document_upload_id_or_request) -> SuccessfulResult | ErrorResult | None: ... + def add_text_evidence(self, dispute_id: str, content_or_request) -> SuccessfulResult | ErrorResult | None: ... + def finalize(self, dispute_id: str) -> SuccessfulResult | ErrorResult: ... + def find(self, dispute_id: str) -> Dispute: ... + def remove_evidence(self, dispute_id: str, evidence_id: int) -> SuccessfulResult | ErrorResult: ... search_criteria: dict[Incomplete, Incomplete] - def search(self, *query): ... + def search(self, *query) -> SuccessfulResult: ... diff --git a/stubs/braintree/braintree/document_upload.pyi b/stubs/braintree/braintree/document_upload.pyi index 9a7a8b450539..155b5596711e 100644 --- a/stubs/braintree/braintree/document_upload.pyi +++ b/stubs/braintree/braintree/document_upload.pyi @@ -1,14 +1,16 @@ from _typeshed import Incomplete from typing import Final +from braintree.error_result import ErrorResult from braintree.resource import Resource +from braintree.successful_result import SuccessfulResult class DocumentUpload(Resource): class Kind: EvidenceDocument: Final = "evidence_document" @staticmethod - def create(params: Incomplete | None = None): ... + def create(params: dict[str, Incomplete] | None = None) -> SuccessfulResult | ErrorResult: ... @staticmethod - def create_signature(): ... + def create_signature() -> list[str]: ... def __init__(self, gateway, attributes) -> None: ... diff --git a/stubs/braintree/braintree/document_upload_gateway.pyi b/stubs/braintree/braintree/document_upload_gateway.pyi index 78daa75d0116..5dcf510fee1b 100644 --- a/stubs/braintree/braintree/document_upload_gateway.pyi +++ b/stubs/braintree/braintree/document_upload_gateway.pyi @@ -1,7 +1,10 @@ from _typeshed import Incomplete +from braintree.error_result import ErrorResult +from braintree.successful_result import SuccessfulResult + class DocumentUploadGateway: gateway: Incomplete config: Incomplete def __init__(self, gateway) -> None: ... - def create(self, params: Incomplete | None = None): ... + def create(self, params: dict[str, Incomplete] | None = None) -> SuccessfulResult | ErrorResult: ... diff --git a/stubs/braintree/braintree/error_result.pyi b/stubs/braintree/braintree/error_result.pyi index 6916a42e62cd..9f23d30fa60a 100644 --- a/stubs/braintree/braintree/error_result.pyi +++ b/stubs/braintree/braintree/error_result.pyi @@ -1,4 +1,5 @@ from _typeshed import Incomplete +from typing import Literal from braintree.credit_card_verification import CreditCardVerification from braintree.errors import Errors @@ -14,6 +15,6 @@ class ErrorResult: transaction: Transaction subscription: Subscription merchant_account: Plan - def __init__(self, gateway, attributes) -> None: ... + def __init__(self, gateway, attributes: dict[str, Incomplete]) -> None: ... @property - def is_success(self): ... + def is_success(self) -> Literal[False]: ... diff --git a/stubs/braintree/braintree/errors.pyi b/stubs/braintree/braintree/errors.pyi index a48e45c8ac7a..84c2ffb55742 100644 --- a/stubs/braintree/braintree/errors.pyi +++ b/stubs/braintree/braintree/errors.pyi @@ -1,10 +1,13 @@ +from _typeshed import Incomplete + +from braintree.validation_error import ValidationError from braintree.validation_error_collection import ValidationErrorCollection class Errors: errors: ValidationErrorCollection size = errors.deep_size - def __init__(self, data) -> None: ... + def __init__(self, data: dict[str, Incomplete]) -> None: ... @property - def deep_errors(self): ... - def for_object(self, key): ... + def deep_errors(self) -> list[ValidationError]: ... + def for_object(self, key: str) -> ValidationErrorCollection: ... def __len__(self) -> int: ... diff --git a/stubs/braintree/braintree/europe_bank_account.pyi b/stubs/braintree/braintree/europe_bank_account.pyi index eb842eac29bd..f9b9632ee832 100644 --- a/stubs/braintree/braintree/europe_bank_account.pyi +++ b/stubs/braintree/braintree/europe_bank_account.pyi @@ -8,4 +8,4 @@ class EuropeBankAccount(Resource): Consumer: Final = "consumer" @staticmethod - def signature(): ... + def signature() -> list[str]: ... diff --git a/stubs/braintree/braintree/exceptions/__init__.pyi b/stubs/braintree/braintree/exceptions/__init__.pyi index 343f22a04e8a..38284711b57c 100644 --- a/stubs/braintree/braintree/exceptions/__init__.pyi +++ b/stubs/braintree/braintree/exceptions/__init__.pyi @@ -8,6 +8,9 @@ from braintree.exceptions.not_found_error import NotFoundError as NotFoundError from braintree.exceptions.request_timeout_error import RequestTimeoutError as RequestTimeoutError from braintree.exceptions.server_error import ServerError as ServerError from braintree.exceptions.service_unavailable_error import ServiceUnavailableError as ServiceUnavailableError +from braintree.exceptions.test_operation_performed_in_production_error import ( + TestOperationPerformedInProductionError as TestOperationPerformedInProductionError, +) from braintree.exceptions.too_many_requests_error import TooManyRequestsError as TooManyRequestsError from braintree.exceptions.unexpected_error import UnexpectedError as UnexpectedError from braintree.exceptions.upgrade_required_error import UpgradeRequiredError as UpgradeRequiredError diff --git a/stubs/braintree/braintree/exchange_rate_quote_gateway.pyi b/stubs/braintree/braintree/exchange_rate_quote_gateway.pyi index 769faaa9e03d..98449f0872c1 100644 --- a/stubs/braintree/braintree/exchange_rate_quote_gateway.pyi +++ b/stubs/braintree/braintree/exchange_rate_quote_gateway.pyi @@ -1,9 +1,13 @@ from _typeshed import Incomplete +from braintree.error_result import ErrorResult +from braintree.exchange_rate_quote_payload import ExchangeRateQuotePayload +from braintree.successful_result import SuccessfulResult + class ExchangeRateQuoteGateway: gateway: Incomplete config: Incomplete graphql_client: Incomplete def __init__(self, gateway, graphql_client: Incomplete | None = None) -> None: ... - exchange_rate_quote_payload: Incomplete - def generate(self, request): ... + exchange_rate_quote_payload: ExchangeRateQuotePayload + def generate(self, request) -> SuccessfulResult | ErrorResult | None: ... diff --git a/stubs/braintree/braintree/exchange_rate_quote_input.pyi b/stubs/braintree/braintree/exchange_rate_quote_input.pyi index 8538d18c7818..43ca18533dbc 100644 --- a/stubs/braintree/braintree/exchange_rate_quote_input.pyi +++ b/stubs/braintree/braintree/exchange_rate_quote_input.pyi @@ -6,4 +6,4 @@ class ExchangeRateQuoteInput(AttributeGetter): parent: Incomplete def __init__(self, parent, attributes) -> None: ... def done(self): ... - def to_graphql_variables(self): ... + def to_graphql_variables(self) -> dict[str, Incomplete]: ... diff --git a/stubs/braintree/braintree/exchange_rate_quote_payload.pyi b/stubs/braintree/braintree/exchange_rate_quote_payload.pyi index 1069e0611f4a..39a7ed103bfb 100644 --- a/stubs/braintree/braintree/exchange_rate_quote_payload.pyi +++ b/stubs/braintree/braintree/exchange_rate_quote_payload.pyi @@ -1,6 +1,9 @@ from _typeshed import Incomplete +from collections.abc import Mapping + +from braintree.exchange_rate_quote import ExchangeRateQuote class ExchangeRateQuotePayload: - quotes: Incomplete - def __init__(self, data) -> None: ... - def get_quotes(self): ... + quotes: list[ExchangeRateQuote] + def __init__(self, data: Mapping[str, Incomplete]) -> None: ... + def get_quotes(self) -> list[ExchangeRateQuote]: ... diff --git a/stubs/braintree/braintree/exchange_rate_quote_request.pyi b/stubs/braintree/braintree/exchange_rate_quote_request.pyi index 1e0d04d4c727..e5eec778b32f 100644 --- a/stubs/braintree/braintree/exchange_rate_quote_request.pyi +++ b/stubs/braintree/braintree/exchange_rate_quote_request.pyi @@ -1,7 +1,9 @@ from _typeshed import Incomplete +from braintree.exchange_rate_quote_input import ExchangeRateQuoteInput + class ExchangeRateQuoteRequest: - quotes: Incomplete + quotes: list[ExchangeRateQuoteInput] def __init__(self) -> None: ... - def add_exchange_rate_quote_input(self, attributes): ... - def to_graphql_variables(self): ... + def add_exchange_rate_quote_input(self, attributes) -> ExchangeRateQuoteInput: ... + def to_graphql_variables(self) -> dict[str, Incomplete]: ... diff --git a/stubs/braintree/braintree/masterpass_card.pyi b/stubs/braintree/braintree/masterpass_card.pyi index 80be2912c25b..c50678e419b7 100644 --- a/stubs/braintree/braintree/masterpass_card.pyi +++ b/stubs/braintree/braintree/masterpass_card.pyi @@ -7,6 +7,6 @@ class MasterpassCard(Resource): subscriptions: list[Subscription] def __init__(self, gateway, attributes) -> None: ... @property - def expiration_date(self): ... + def expiration_date(self) -> str: ... @property - def masked_number(self): ... + def masked_number(self) -> str: ... diff --git a/stubs/braintree/braintree/merchant_account_gateway.pyi b/stubs/braintree/braintree/merchant_account_gateway.pyi index da1f6c868dcb..b467975c03e5 100644 --- a/stubs/braintree/braintree/merchant_account_gateway.pyi +++ b/stubs/braintree/braintree/merchant_account_gateway.pyi @@ -1,11 +1,17 @@ from _typeshed import Incomplete +from braintree.error_result import ErrorResult +from braintree.merchant_account import MerchantAccount +from braintree.successful_result import SuccessfulResult + class MerchantAccountGateway: gateway: Incomplete config: Incomplete def __init__(self, gateway) -> None: ... - def create(self, params: Incomplete | None = None): ... - def update(self, merchant_account_id, params: Incomplete | None = None): ... - def find(self, merchant_account_id): ... - def create_for_currency(self, params: Incomplete | None = None): ... - def all(self): ... + def create(self, params: dict[str, Incomplete] | None = None) -> SuccessfulResult | ErrorResult | None: ... + def update( + self, merchant_account_id: str, params: dict[str, Incomplete] | None = None + ) -> SuccessfulResult | ErrorResult | None: ... + def find(self, merchant_account_id: str) -> MerchantAccount: ... + def create_for_currency(self, params: dict[str, Incomplete] | None = None) -> SuccessfulResult | ErrorResult | None: ... + def all(self) -> SuccessfulResult: ... diff --git a/stubs/braintree/braintree/merchant_gateway.pyi b/stubs/braintree/braintree/merchant_gateway.pyi index 143ec45a31c6..48dcbad24312 100644 --- a/stubs/braintree/braintree/merchant_gateway.pyi +++ b/stubs/braintree/braintree/merchant_gateway.pyi @@ -1,7 +1,10 @@ from _typeshed import Incomplete +from braintree.error_result import ErrorResult +from braintree.successful_result import SuccessfulResult + class MerchantGateway: gateway: Incomplete config: Incomplete def __init__(self, gateway) -> None: ... - def create(self, params): ... + def create(self, params: dict[str, Incomplete] | None) -> SuccessfulResult | ErrorResult: ... diff --git a/stubs/braintree/braintree/meta_checkout_card.pyi b/stubs/braintree/braintree/meta_checkout_card.pyi index e943e94b7ae5..7ee4304b9cfa 100644 --- a/stubs/braintree/braintree/meta_checkout_card.pyi +++ b/stubs/braintree/braintree/meta_checkout_card.pyi @@ -3,6 +3,6 @@ from braintree.resource import Resource class MetaCheckoutCard(Resource): def __init__(self, gateway, attributes) -> None: ... @property - def expiration_date(self): ... + def expiration_date(self) -> str | None: ... @property - def masked_number(self): ... + def masked_number(self) -> str: ... diff --git a/stubs/braintree/braintree/meta_checkout_token.pyi b/stubs/braintree/braintree/meta_checkout_token.pyi index a39f6f9156b3..a9d7bece4d1c 100644 --- a/stubs/braintree/braintree/meta_checkout_token.pyi +++ b/stubs/braintree/braintree/meta_checkout_token.pyi @@ -3,6 +3,6 @@ from braintree.resource import Resource class MetaCheckoutToken(Resource): def __init__(self, gateway, attributes) -> None: ... @property - def expiration_date(self): ... + def expiration_date(self) -> str | None: ... @property - def masked_number(self): ... + def masked_number(self) -> str: ... diff --git a/stubs/braintree/braintree/montary_amount.pyi b/stubs/braintree/braintree/montary_amount.pyi index 1ffe9c20f19d..98f6a9555de5 100644 --- a/stubs/braintree/braintree/montary_amount.pyi +++ b/stubs/braintree/braintree/montary_amount.pyi @@ -1,7 +1,7 @@ -from _typeshed import Incomplete +from decimal import Decimal from braintree.attribute_getter import AttributeGetter class MontaryAmount(AttributeGetter): - value: Incomplete + value: Decimal def __init__(self, attributes) -> None: ... diff --git a/stubs/braintree/braintree/oauth_gateway.pyi b/stubs/braintree/braintree/oauth_gateway.pyi index b79b96e8d0c5..53e5af4f3d3f 100644 --- a/stubs/braintree/braintree/oauth_gateway.pyi +++ b/stubs/braintree/braintree/oauth_gateway.pyi @@ -1,10 +1,13 @@ from _typeshed import Incomplete +from braintree.error_result import ErrorResult +from braintree.successful_result import SuccessfulResult + class OAuthGateway: gateway: Incomplete config: Incomplete def __init__(self, gateway) -> None: ... - def create_token_from_code(self, params): ... - def create_token_from_refresh_token(self, params): ... - def revoke_access_token(self, access_token): ... - def connect_url(self, raw_params): ... + def create_token_from_code(self, params: dict[str, Incomplete]) -> SuccessfulResult | ErrorResult: ... + def create_token_from_refresh_token(self, params: dict[str, Incomplete]) -> SuccessfulResult | ErrorResult: ... + def revoke_access_token(self, access_token: str) -> type[SuccessfulResult] | ErrorResult: ... + def connect_url(self, raw_params: dict[str, Incomplete]) -> str: ... diff --git a/stubs/braintree/braintree/payment_method.pyi b/stubs/braintree/braintree/payment_method.pyi index a9f1dd8b39e8..b87060d671d1 100644 --- a/stubs/braintree/braintree/payment_method.pyi +++ b/stubs/braintree/braintree/payment_method.pyi @@ -1,21 +1,37 @@ from _typeshed import Incomplete +from braintree.error_result import ErrorResult from braintree.resource import Resource +from braintree.successful_result import SuccessfulResult class PaymentMethod(Resource): @staticmethod - def create(params: Incomplete | None = None): ... + def create(params: dict[str, Incomplete] | None = None) -> SuccessfulResult | ErrorResult: ... @staticmethod - def find(payment_method_token): ... + def find(payment_method_token: str) -> Resource: ... @staticmethod - def update(payment_method_token, params): ... + def update(payment_method_token: str, params) -> SuccessfulResult | ErrorResult: ... @staticmethod - def delete(payment_method_token, options: Incomplete | None = None): ... + def delete(payment_method_token: str, options: Incomplete | None = None) -> SuccessfulResult: ... @staticmethod - def create_signature(): ... + def create_signature() -> ( + list[ + str + | dict[str, list[str | dict[str, list[str]]]] + | dict[str, list[str | dict[str, list[str]] | dict[str, list[str | dict[str, list[str | dict[str, list[str]]]]]]]] + | dict[str, list[str]] + ] + ): ... @staticmethod - def signature(type): ... + def signature( + type: str, + ) -> list[ + str + | dict[str, list[str | dict[str, list[str]]]] + | dict[str, list[str | dict[str, list[str]] | dict[str, list[str | dict[str, list[str | dict[str, list[str]]]]]]]] + | dict[str, list[str]] + ]: ... @staticmethod - def update_signature(): ... + def update_signature() -> list[str | dict[str, list[str | dict[str, list[str]]]] | dict[str, list[str]]]: ... @staticmethod - def delete_signature(): ... + def delete_signature() -> list[str]: ... diff --git a/stubs/braintree/braintree/payment_method_gateway.pyi b/stubs/braintree/braintree/payment_method_gateway.pyi index 8ecfde7e51c9..7d8ab1876484 100644 --- a/stubs/braintree/braintree/payment_method_gateway.pyi +++ b/stubs/braintree/braintree/payment_method_gateway.pyi @@ -1,13 +1,17 @@ from _typeshed import Incomplete +from braintree.error_result import ErrorResult +from braintree.resource import Resource +from braintree.successful_result import SuccessfulResult + class PaymentMethodGateway: gateway: Incomplete config: Incomplete def __init__(self, gateway) -> None: ... - def create(self, params: Incomplete | None = None): ... - def find(self, payment_method_token): ... - def update(self, payment_method_token, params): ... - def delete(self, payment_method_token, options: Incomplete | None = None): ... + def create(self, params: dict[str, Incomplete] | None = None) -> SuccessfulResult | ErrorResult: ... + def find(self, payment_method_token: str) -> Resource: ... + def update(self, payment_method_token: str, params) -> SuccessfulResult | ErrorResult: ... + def delete(self, payment_method_token: str, options: Incomplete | None = None) -> SuccessfulResult: ... options: dict[str, Incomplete] - def grant(self, payment_method_token, options: Incomplete | None = None): ... - def revoke(self, payment_method_token): ... + def grant(self, payment_method_token: str, options: Incomplete | None = None) -> SuccessfulResult | ErrorResult: ... + def revoke(self, payment_method_token: str) -> SuccessfulResult | ErrorResult: ... diff --git a/stubs/braintree/braintree/payment_method_nonce.pyi b/stubs/braintree/braintree/payment_method_nonce.pyi index f3e0c4be2902..9848d09b7fe8 100644 --- a/stubs/braintree/braintree/payment_method_nonce.pyi +++ b/stubs/braintree/braintree/payment_method_nonce.pyi @@ -1,14 +1,16 @@ from _typeshed import Incomplete from braintree.bin_data import BinData +from braintree.error_result import ErrorResult from braintree.resource import Resource +from braintree.successful_result import SuccessfulResult from braintree.three_d_secure_info import ThreeDSecureInfo class PaymentMethodNonce(Resource): @staticmethod - def create(payment_method_token, params={}): ... + def create(payment_method_token: str, params={}) -> SuccessfulResult | ErrorResult: ... @staticmethod - def find(payment_method_nonce): ... + def find(payment_method_nonce: str) -> PaymentMethodNonce: ... three_d_secure_info: ThreeDSecureInfo | None authentication_insight: Incomplete bin_data: BinData diff --git a/stubs/braintree/braintree/payment_method_nonce_gateway.pyi b/stubs/braintree/braintree/payment_method_nonce_gateway.pyi index 44196c96b994..df513455ac4d 100644 --- a/stubs/braintree/braintree/payment_method_nonce_gateway.pyi +++ b/stubs/braintree/braintree/payment_method_nonce_gateway.pyi @@ -1,8 +1,12 @@ from _typeshed import Incomplete +from braintree.error_result import ErrorResult +from braintree.payment_method_nonce import PaymentMethodNonce +from braintree.successful_result import SuccessfulResult + class PaymentMethodNonceGateway: gateway: Incomplete config: Incomplete def __init__(self, gateway) -> None: ... - def create(self, payment_method_token, params=...): ... - def find(self, payment_method_nonce): ... + def create(self, payment_method_token: str, params=...) -> SuccessfulResult | ErrorResult: ... + def find(self, payment_method_nonce: str) -> PaymentMethodNonce: ... diff --git a/stubs/braintree/braintree/payment_method_parser.pyi b/stubs/braintree/braintree/payment_method_parser.pyi index 1c8b84170cd5..9881d41f3a9c 100644 --- a/stubs/braintree/braintree/payment_method_parser.pyi +++ b/stubs/braintree/braintree/payment_method_parser.pyi @@ -1 +1,31 @@ -def parse_payment_method(gateway, attributes): ... +from braintree.amex_express_checkout_card import AmexExpressCheckoutCard +from braintree.android_pay_card import AndroidPayCard +from braintree.apple_pay_card import ApplePayCard +from braintree.credit_card import CreditCard +from braintree.europe_bank_account import EuropeBankAccount +from braintree.masterpass_card import MasterpassCard +from braintree.paypal_account import PayPalAccount +from braintree.samsung_pay_card import SamsungPayCard +from braintree.sepa_direct_debit_account import SepaDirectDebitAccount +from braintree.unknown_payment_method import UnknownPaymentMethod +from braintree.us_bank_account import UsBankAccount +from braintree.venmo_account import VenmoAccount +from braintree.visa_checkout_card import VisaCheckoutCard + +def parse_payment_method( + gateway, attributes +) -> ( + PayPalAccount + | CreditCard + | EuropeBankAccount + | ApplePayCard + | AndroidPayCard + | AmexExpressCheckoutCard + | SepaDirectDebitAccount + | VenmoAccount + | UsBankAccount + | VisaCheckoutCard + | MasterpassCard + | SamsungPayCard + | UnknownPaymentMethod +): ... diff --git a/stubs/braintree/braintree/paypal_account.pyi b/stubs/braintree/braintree/paypal_account.pyi index 07805a1921b9..c24ebba39820 100644 --- a/stubs/braintree/braintree/paypal_account.pyi +++ b/stubs/braintree/braintree/paypal_account.pyi @@ -1,16 +1,18 @@ from _typeshed import Incomplete +from braintree.error_result import ErrorResult from braintree.resource import Resource from braintree.subscription import Subscription +from braintree.successful_result import SuccessfulResult class PayPalAccount(Resource): @staticmethod - def find(paypal_account_token): ... + def find(paypal_account_token: str) -> PayPalAccount | None: ... @staticmethod - def delete(paypal_account_token): ... + def delete(paypal_account_token: str) -> SuccessfulResult: ... @staticmethod - def update(paypal_account_token, params: Incomplete | None = None): ... + def update(paypal_account_token: str, params: Incomplete | None = None) -> SuccessfulResult | ErrorResult | None: ... @staticmethod - def signature(): ... + def signature() -> list[str | dict[str, list[str]]]: ... subscriptions: list[Subscription] def __init__(self, gateway, attributes) -> None: ... diff --git a/stubs/braintree/braintree/paypal_account_gateway.pyi b/stubs/braintree/braintree/paypal_account_gateway.pyi index d90586089eed..471601f27e67 100644 --- a/stubs/braintree/braintree/paypal_account_gateway.pyi +++ b/stubs/braintree/braintree/paypal_account_gateway.pyi @@ -1,9 +1,13 @@ from _typeshed import Incomplete +from braintree.error_result import ErrorResult +from braintree.paypal_account import PayPalAccount +from braintree.successful_result import SuccessfulResult + class PayPalAccountGateway: gateway: Incomplete config: Incomplete def __init__(self, gateway) -> None: ... - def find(self, paypal_account_token): ... - def delete(self, paypal_account_token): ... - def update(self, paypal_account_token, params: Incomplete | None = None): ... + def find(self, paypal_account_token: str) -> PayPalAccount | None: ... + def delete(self, paypal_account_token: str) -> SuccessfulResult: ... + def update(self, paypal_account_token: str, params: Incomplete | None = None) -> SuccessfulResult | ErrorResult | None: ... diff --git a/stubs/braintree/braintree/paypal_payment_resource.pyi b/stubs/braintree/braintree/paypal_payment_resource.pyi index 4c2297db78e2..81b5e5e3d987 100644 --- a/stubs/braintree/braintree/paypal_payment_resource.pyi +++ b/stubs/braintree/braintree/paypal_payment_resource.pyi @@ -1,10 +1,12 @@ from _typeshed import Incomplete +from braintree.error_result import ErrorResult from braintree.resource import Resource +from braintree.successful_result import SuccessfulResult class PayPalPaymentResource(Resource): def __init__(self, gateway, attributes) -> None: ... @staticmethod - def update(request): ... + def update(request) -> SuccessfulResult | ErrorResult: ... @staticmethod def update_signature() -> list[Incomplete]: ... diff --git a/stubs/braintree/braintree/successful_result.pyi b/stubs/braintree/braintree/successful_result.pyi index 036b144ffbcc..962d23484093 100644 --- a/stubs/braintree/braintree/successful_result.pyi +++ b/stubs/braintree/braintree/successful_result.pyi @@ -1,5 +1,7 @@ +from typing import Literal + from braintree.attribute_getter import AttributeGetter class SuccessfulResult(AttributeGetter): @property - def is_success(self): ... + def is_success(self) -> Literal[True]: ... diff --git a/stubs/braintree/braintree/transaction_line_item_gateway.pyi b/stubs/braintree/braintree/transaction_line_item_gateway.pyi index 796f9daf2651..dcf1e5b87505 100644 --- a/stubs/braintree/braintree/transaction_line_item_gateway.pyi +++ b/stubs/braintree/braintree/transaction_line_item_gateway.pyi @@ -1,7 +1,9 @@ from _typeshed import Incomplete +from braintree.transaction_line_item import TransactionLineItem + class TransactionLineItemGateway: gateway: Incomplete config: Incomplete def __init__(self, gateway) -> None: ... - def find_all(self, transaction_id): ... + def find_all(self, transaction_id: str) -> list[TransactionLineItem]: ... diff --git a/stubs/braintree/braintree/unknown_payment_method.pyi b/stubs/braintree/braintree/unknown_payment_method.pyi index bd5da6081bdd..9551e3225685 100644 --- a/stubs/braintree/braintree/unknown_payment_method.pyi +++ b/stubs/braintree/braintree/unknown_payment_method.pyi @@ -1,4 +1,4 @@ from braintree.resource import Resource class UnknownPaymentMethod(Resource): - def image_url(self): ... + def image_url(self) -> str: ... diff --git a/stubs/braintree/braintree/us_bank_account.pyi b/stubs/braintree/braintree/us_bank_account.pyi index b4a14d836a9d..538a965c739d 100644 --- a/stubs/braintree/braintree/us_bank_account.pyi +++ b/stubs/braintree/braintree/us_bank_account.pyi @@ -1,14 +1,16 @@ from braintree.ach_mandate import AchMandate +from braintree.error_result import ErrorResult from braintree.resource import Resource +from braintree.successful_result import SuccessfulResult from braintree.us_bank_account_verification import UsBankAccountVerification class UsBankAccount(Resource): @staticmethod - def find(token): ... + def find(token: str) -> UsBankAccount | None: ... @staticmethod - def sale(token, transactionRequest): ... + def sale(token: str, transactionRequest) -> SuccessfulResult | ErrorResult | None: ... @staticmethod - def signature(): ... + def signature() -> list[str]: ... ach_mandate: AchMandate | None verifications: list[UsBankAccountVerification] def __init__(self, gateway, attributes) -> None: ... diff --git a/stubs/braintree/braintree/us_bank_account_gateway.pyi b/stubs/braintree/braintree/us_bank_account_gateway.pyi index c20be885acfa..8e3643b7f868 100644 --- a/stubs/braintree/braintree/us_bank_account_gateway.pyi +++ b/stubs/braintree/braintree/us_bank_account_gateway.pyi @@ -1,7 +1,9 @@ from _typeshed import Incomplete +from braintree.us_bank_account import UsBankAccount + class UsBankAccountGateway: gateway: Incomplete config: Incomplete def __init__(self, gateway) -> None: ... - def find(self, us_bank_account_token): ... + def find(self, us_bank_account_token: str) -> UsBankAccount | None: ... diff --git a/stubs/braintree/braintree/us_bank_account_verification.pyi b/stubs/braintree/braintree/us_bank_account_verification.pyi index af0e3fbd395c..506fb48a7aae 100644 --- a/stubs/braintree/braintree/us_bank_account_verification.pyi +++ b/stubs/braintree/braintree/us_bank_account_verification.pyi @@ -1,6 +1,9 @@ from typing import Final from braintree.attribute_getter import AttributeGetter +from braintree.error_result import ErrorResult +from braintree.resource_collection import ResourceCollection +from braintree.successful_result import SuccessfulResult from braintree.us_bank_account import UsBankAccount class UsBankAccountVerification(AttributeGetter): @@ -24,9 +27,9 @@ class UsBankAccountVerification(AttributeGetter): us_bank_account: UsBankAccount | None def __init__(self, gateway, attributes) -> None: ... @staticmethod - def confirm_micro_transfer_amounts(verification_id, amounts): ... + def confirm_micro_transfer_amounts(verification_id: str, amounts) -> SuccessfulResult | ErrorResult | None: ... @staticmethod - def find(verification_id): ... + def find(verification_id: str) -> UsBankAccountVerification: ... @staticmethod - def search(*query): ... - def __eq__(self, other): ... + def search(*query) -> ResourceCollection: ... + def __eq__(self, other: object) -> bool: ... diff --git a/stubs/braintree/braintree/us_bank_account_verification_gateway.pyi b/stubs/braintree/braintree/us_bank_account_verification_gateway.pyi index 08bc17af0343..5522b67d2f3a 100644 --- a/stubs/braintree/braintree/us_bank_account_verification_gateway.pyi +++ b/stubs/braintree/braintree/us_bank_account_verification_gateway.pyi @@ -1,9 +1,14 @@ from _typeshed import Incomplete +from braintree.error_result import ErrorResult +from braintree.resource_collection import ResourceCollection +from braintree.successful_result import SuccessfulResult +from braintree.us_bank_account_verification import UsBankAccountVerification + class UsBankAccountVerificationGateway: gateway: Incomplete config: Incomplete def __init__(self, gateway) -> None: ... - def confirm_micro_transfer_amounts(self, verification_id, amounts): ... - def find(self, verification_id): ... - def search(self, *query): ... + def confirm_micro_transfer_amounts(self, verification_id: str, amounts) -> SuccessfulResult | ErrorResult | None: ... + def find(self, verification_id: str) -> UsBankAccountVerification: ... + def search(self, *query) -> ResourceCollection: ... diff --git a/stubs/braintree/braintree/util/constants.pyi b/stubs/braintree/braintree/util/constants.pyi index 1ec390b709f0..17cdc02f6ba8 100644 --- a/stubs/braintree/braintree/util/constants.pyi +++ b/stubs/braintree/braintree/util/constants.pyi @@ -1,4 +1,7 @@ +from _typeshed import Incomplete +from typing import Any + class Constants: @staticmethod - def get_all_constant_values_from_class(klass): ... - def get_all_enum_values(enum_class): ... + def get_all_constant_values_from_class(klass: object) -> list[Any]: ... # Any taken from klass.__dict__ + def get_all_enum_values(enum_class) -> list[Incomplete]: ... diff --git a/stubs/braintree/braintree/util/crypto.pyi b/stubs/braintree/braintree/util/crypto.pyi index 6ea4a83dc232..6a477c8bc480 100644 --- a/stubs/braintree/braintree/util/crypto.pyi +++ b/stubs/braintree/braintree/util/crypto.pyi @@ -1,9 +1,23 @@ +from _typeshed import ReadableBuffer +from collections.abc import Iterable +from typing import Literal, overload + text_type = str class Crypto: @staticmethod - def sha1_hmac_hash(secret_key, content): ... + def sha1_hmac_hash(secret_key: str | ReadableBuffer, content: str | ReadableBuffer | None) -> str: ... + @staticmethod + def sha256_hmac_hash(secret_key: str | ReadableBuffer, content: str | ReadableBuffer | None) -> str: ... + @overload + @staticmethod + def secure_compare(left: None, right: Iterable[str | bytes | bytearray]) -> Literal[False]: ... + @overload + @staticmethod + def secure_compare(left: Iterable[str | bytes | bytearray], right: None) -> Literal[False]: ... + @overload @staticmethod - def sha256_hmac_hash(secret_key, content): ... + def secure_compare(left: None, right: None) -> Literal[False]: ... + @overload @staticmethod - def secure_compare(left, right): ... + def secure_compare(left: Iterable[str | bytes | bytearray], right: Iterable[str | bytes | bytearray]) -> bool: ... diff --git a/stubs/braintree/braintree/util/datetime_parser.pyi b/stubs/braintree/braintree/util/datetime_parser.pyi index eb12ffa4057c..c2341864097a 100644 --- a/stubs/braintree/braintree/util/datetime_parser.pyi +++ b/stubs/braintree/braintree/util/datetime_parser.pyi @@ -1 +1,3 @@ -def parse_datetime(timestamp): ... +from datetime import datetime + +def parse_datetime(timestamp: str) -> datetime: ... diff --git a/stubs/braintree/braintree/util/generator.pyi b/stubs/braintree/braintree/util/generator.pyi index fe7d4f3a4d62..cfd6c564f19c 100644 --- a/stubs/braintree/braintree/util/generator.pyi +++ b/stubs/braintree/braintree/util/generator.pyi @@ -1,10 +1,27 @@ -from _typeshed import Incomplete +import datetime +import decimal +from collections.abc import Iterable, Mapping +from typing_extensions import TypeAlias integer_types = int text_type = str binary_type = bytes +_XMLValue: TypeAlias = ( + str + | bytes + | int + | bool + | decimal.Decimal + | Iterable[_XMLValue] + | Mapping[str, _XMLValue] + | datetime.datetime + | datetime.date + | None +) +_XML: TypeAlias = Mapping[str, _XMLValue] + class Generator: - dict: dict[Incomplete, Incomplete] - def __init__(self, dict) -> None: ... - def generate(self): ... + dict: _XML + def __init__(self, dict: _XML) -> None: ... + def generate(self) -> str: ... diff --git a/stubs/braintree/braintree/util/graphql_client.pyi b/stubs/braintree/braintree/util/graphql_client.pyi index e17cab425d00..e893ae0d1e8f 100644 --- a/stubs/braintree/braintree/util/graphql_client.pyi +++ b/stubs/braintree/braintree/util/graphql_client.pyi @@ -1,18 +1,34 @@ from _typeshed import Incomplete +from collections.abc import Iterable from typing import TypedDict +from braintree.configuration import Configuration +from braintree.environment import Environment from braintree.util.http import Http +class _Extension(TypedDict): + errorClass: Incomplete + legacyCode: int | None + +class _Error(TypedDict): + attribute: str | None + code: int | None + message: str | None + extensions: _Extension | None + class _ValidationErrors(TypedDict): - errors: Incomplete + errors: Iterable[_Error] + +class _Response(TypedDict): + errors: Iterable[_Error] | None class GraphQLClient(Http): @staticmethod - def raise_exception_for_graphql_error(response) -> None: ... + def raise_exception_for_graphql_error(response: _Response) -> None: ... graphql_headers: dict[str, str] - def __init__(self, config: Incomplete | None = None, environment: Incomplete | None = None) -> None: ... + def __init__(self, config: Configuration | None = None, environment: Environment | None = None) -> None: ... def query(self, definition, variables: Incomplete | None = None, operation_name: Incomplete | None = None): ... @staticmethod def get_validation_errors(response) -> _ValidationErrors | None: ... @staticmethod - def get_validation_error_code(error) -> Incomplete | None: ... + def get_validation_error_code(error: _Error) -> int | None: ... diff --git a/stubs/braintree/braintree/util/http.pyi b/stubs/braintree/braintree/util/http.pyi index 841b8455c9c5..f061e7ab892d 100644 --- a/stubs/braintree/braintree/util/http.pyi +++ b/stubs/braintree/braintree/util/http.pyi @@ -1,6 +1,9 @@ from _typeshed import Incomplete from typing import Final +from braintree.configuration import Configuration +from braintree.environment import Environment + class Http: class ContentType: Xml: Final = "application/xml" @@ -8,16 +11,18 @@ class Http: Json: Final = "application/json" @staticmethod - def is_error_status(status): ... + def is_error_status(status: int) -> bool: ... @staticmethod - def raise_exception_from_status(status, message: Incomplete | None = None) -> None: ... - config: Incomplete - environment: Incomplete - def __init__(self, config, environment: Incomplete | None = None) -> None: ... - def post(self, path, params: Incomplete | None = None): ... - def delete(self, path): ... - def get(self, path): ... - def put(self, path, params: Incomplete | None = None): ... - def post_multipart(self, path, files, params: Incomplete | None = None): ... - def http_do(self, http_verb, path, headers, request_body): ... - def handle_exception(self, exception) -> None: ... + def raise_exception_from_status(status: int, message: str | None = None) -> None: ... + config: Configuration + environment: Environment + def __init__(self, config: Configuration, environment: Environment | None = None) -> None: ... + def post(self, path: str, params: dict[str, Incomplete] | None = None): ... + def delete(self, path: str): ... + def get(self, path: str): ... + def put(self, path: str, params: dict[str, Incomplete] | None = None): ... + def post_multipart(self, path: str, files, params: dict[str, Incomplete] | None = None): ... + def http_do( + self, http_verb: str, path: str, headers: dict[str, Incomplete], request_body: str | tuple[str, Incomplete] | None + ) -> list[int | str]: ... + def handle_exception(self, exception: IOError) -> None: ... diff --git a/stubs/braintree/braintree/util/parser.pyi b/stubs/braintree/braintree/util/parser.pyi index 7214087d72fe..3798ec84dd65 100644 --- a/stubs/braintree/braintree/util/parser.pyi +++ b/stubs/braintree/braintree/util/parser.pyi @@ -1,8 +1,10 @@ from xml.dom.minidom import Document +from .generator import _XML + binary_type = bytes class Parser: doc: Document - def __init__(self, xml) -> None: ... - def parse(self): ... + def __init__(self, xml: str | bytes) -> None: ... + def parse(self) -> _XML: ... diff --git a/stubs/braintree/braintree/util/xml_util.pyi b/stubs/braintree/braintree/util/xml_util.pyi index 6e643fbd516f..eb340ea8c570 100644 --- a/stubs/braintree/braintree/util/xml_util.pyi +++ b/stubs/braintree/braintree/util/xml_util.pyi @@ -1,5 +1,7 @@ +from .generator import _XML + class XmlUtil: @staticmethod - def xml_from_dict(dict): ... + def xml_from_dict(dict: _XML) -> str: ... @staticmethod - def dict_from_xml(xml): ... + def dict_from_xml(xml: str | bytes) -> _XML: ... diff --git a/stubs/braintree/braintree/validation_error_collection.pyi b/stubs/braintree/braintree/validation_error_collection.pyi index 9c4e6044b4d6..d0d677c5c3f8 100644 --- a/stubs/braintree/braintree/validation_error_collection.pyi +++ b/stubs/braintree/braintree/validation_error_collection.pyi @@ -1,18 +1,21 @@ from _typeshed import Incomplete +from typing_extensions import Self + +from braintree.validation_error import ValidationError class ValidationErrorCollection: data: dict[str, Incomplete] - def __init__(self, data: Incomplete | None = None) -> None: ... + def __init__(self, data: dict[str, Incomplete] | None = None) -> None: ... @property - def deep_errors(self): ... - def for_index(self, index): ... - def for_object(self, nested_key): ... - def on(self, attribute): ... + def deep_errors(self) -> list[ValidationError]: ... + def for_index(self, index: int | str) -> Self: ... + def for_object(self, nested_key: str) -> Self: ... + def on(self, attribute: str) -> list[ValidationError]: ... @property - def deep_size(self): ... + def deep_size(self) -> int: ... @property - def errors(self): ... + def errors(self) -> list[ValidationError]: ... @property - def size(self): ... - def __getitem__(self, index): ... + def size(self) -> int: ... + def __getitem__(self, index: int) -> ValidationError: ... def __len__(self) -> int: ... diff --git a/stubs/braintree/braintree/visa_checkout_card.pyi b/stubs/braintree/braintree/visa_checkout_card.pyi index 58ceeb6b7073..911e6fd6505b 100644 --- a/stubs/braintree/braintree/visa_checkout_card.pyi +++ b/stubs/braintree/braintree/visa_checkout_card.pyi @@ -9,6 +9,6 @@ class VisaCheckoutCard(Resource): verification: CreditCardVerification def __init__(self, gateway, attributes): ... @property - def expiration_date(self): ... + def expiration_date(self) -> str: ... @property - def masked_number(self): ... + def masked_number(self) -> str: ... diff --git a/stubs/braintree/braintree/webhook_notification.pyi b/stubs/braintree/braintree/webhook_notification.pyi index 541006d65aeb..476818075c5b 100644 --- a/stubs/braintree/braintree/webhook_notification.pyi +++ b/stubs/braintree/braintree/webhook_notification.pyi @@ -68,9 +68,9 @@ class WebhookNotification(Resource): TransactionSettlementDeclined: Final = "transaction_settlement_declined" @staticmethod - def parse(signature, payload): ... + def parse(signature: str, payload: str) -> WebhookNotification: ... @staticmethod - def verify(challenge): ... + def verify(challenge: str) -> str: ... source_merchant_id: Incomplete subscription: Subscription merchant_account: MerchantAccount diff --git a/stubs/braintree/braintree/webhook_notification_gateway.pyi b/stubs/braintree/braintree/webhook_notification_gateway.pyi index 7f7a98325fe3..671fde3deab9 100644 --- a/stubs/braintree/braintree/webhook_notification_gateway.pyi +++ b/stubs/braintree/braintree/webhook_notification_gateway.pyi @@ -1,10 +1,12 @@ from _typeshed import Incomplete +from braintree.webhook_notification import WebhookNotification + text_type = str class WebhookNotificationGateway: gateway: Incomplete config: Incomplete def __init__(self, gateway) -> None: ... - def parse(self, signature, payload): ... - def verify(self, challenge): ... + def parse(self, signature: str, payload: str) -> WebhookNotification: ... + def verify(self, challenge: str) -> str: ... diff --git a/stubs/braintree/braintree/webhook_testing.pyi b/stubs/braintree/braintree/webhook_testing.pyi index 0e10d7a672ff..d47691caa553 100644 --- a/stubs/braintree/braintree/webhook_testing.pyi +++ b/stubs/braintree/braintree/webhook_testing.pyi @@ -1,5 +1,3 @@ -from _typeshed import Incomplete - class WebhookTesting: @staticmethod - def sample_notification(kind, id, source_merchant_id: Incomplete | None = None): ... + def sample_notification(kind: str, id: str, source_merchant_id: str | None = None) -> dict[str, str | bytes]: ... diff --git a/stubs/braintree/braintree/webhook_testing_gateway.pyi b/stubs/braintree/braintree/webhook_testing_gateway.pyi index b348d1d74622..8bb417416bd8 100644 --- a/stubs/braintree/braintree/webhook_testing_gateway.pyi +++ b/stubs/braintree/braintree/webhook_testing_gateway.pyi @@ -4,4 +4,4 @@ class WebhookTestingGateway: gateway: Incomplete config: Incomplete def __init__(self, gateway) -> None: ... - def sample_notification(self, kind, id, source_merchant_id: Incomplete | None = None): ... + def sample_notification(self, kind: str, id: str, source_merchant_id: str | None = None) -> dict[str, str | bytes]: ... From dfcf748553300320508ee2f90c37dd96171807ee Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tam=C3=A1s=20PEREGI?= Date: Sun, 27 Apr 2025 07:03:42 +0200 Subject: [PATCH 258/388] Fix return type of win32wnet.WNetOpenEnum (#13890) --- stubs/pywin32/win32/win32wnet.pyi | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stubs/pywin32/win32/win32wnet.pyi b/stubs/pywin32/win32/win32wnet.pyi index 61fe4147a3f7..6fd4e9b0e432 100644 --- a/stubs/pywin32/win32/win32wnet.pyi +++ b/stubs/pywin32/win32/win32wnet.pyi @@ -19,7 +19,7 @@ def WNetAddConnection3( Flags: int = ..., ) -> None: ... def WNetCancelConnection2(name: str, flags, force, /) -> None: ... -def WNetOpenEnum(scope, _type, usage, resource: _win32typing.PyNETRESOURCE, /) -> int: ... +def WNetOpenEnum(scope, _type, usage, resource: _win32typing.PyNETRESOURCE, /) -> _win32typing.PyHANDLE: ... def WNetCloseEnum(handle: _win32typing.PyHANDLE, /) -> None: ... def WNetEnumResource(handle: _win32typing.PyHANDLE, maxExtries: int = ..., /) -> list[_win32typing.PyNETRESOURCE]: ... def WNetGetUser(connection: str | None = ..., /) -> str: ... From f82441de7cee40ba9713bf4b3e2ef9b46a7c0cc0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Janek=20Nouvertn=C3=A9?= Date: Sun, 27 Apr 2025 10:23:34 +0200 Subject: [PATCH 259/388] Make AsyncExitStack generic on `__aexit__` (#13888) --- stdlib/contextlib.pyi | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stdlib/contextlib.pyi b/stdlib/contextlib.pyi index 70d0dbdcb2f1..4663b448c79c 100644 --- a/stdlib/contextlib.pyi +++ b/stdlib/contextlib.pyi @@ -179,7 +179,7 @@ class AsyncExitStack(_BaseExitStack[_ExitT_co], metaclass=abc.ABCMeta): async def __aenter__(self) -> Self: ... async def __aexit__( self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None, / - ) -> bool: ... + ) -> _ExitT_co: ... if sys.version_info >= (3, 10): class nullcontext(AbstractContextManager[_T, None], AbstractAsyncContextManager[_T, None]): From d8fc16a1f1d856f61188ea882bfa0c725c7e2a87 Mon Sep 17 00:00:00 2001 From: tmlnv <108088921+tmlnv@users.noreply.github.com> Date: Sun, 27 Apr 2025 17:38:05 +0300 Subject: [PATCH 260/388] Add None to SubsegmentContextManager.__enter__ return type (#13892) --- stubs/aws-xray-sdk/aws_xray_sdk/core/models/subsegment.pyi | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stubs/aws-xray-sdk/aws_xray_sdk/core/models/subsegment.pyi b/stubs/aws-xray-sdk/aws_xray_sdk/core/models/subsegment.pyi index ad398ae983c9..634e32b12a84 100644 --- a/stubs/aws-xray-sdk/aws_xray_sdk/core/models/subsegment.pyi +++ b/stubs/aws-xray-sdk/aws_xray_sdk/core/models/subsegment.pyi @@ -20,7 +20,7 @@ class SubsegmentContextManager: subsegment: Subsegment def __init__(self, recorder: AWSXRayRecorder, name: Incomplete | None = None, **subsegment_kwargs) -> None: ... def __call__(self, wrapped, instance, args: list[Any], kwargs: dict[str, Any]): ... - def __enter__(self) -> Subsegment: ... + def __enter__(self) -> Subsegment | None: ... def __exit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None ) -> None: ... From c6d7b746ed07403e59f4e7c197cff40810d8427d Mon Sep 17 00:00:00 2001 From: Avasam Date: Mon, 28 Apr 2025 02:43:46 -0400 Subject: [PATCH 261/388] Remove Any in `stubs/grpcio/grpc_status/rpc_status.pyi` (#13895) --- pyrightconfig.stricter.json | 1 + stubs/grpcio/@tests/test_cases/check_status.py | 8 -------- stubs/grpcio/grpc_status/rpc_status.pyi | 10 ++-------- 3 files changed, 3 insertions(+), 16 deletions(-) delete mode 100644 stubs/grpcio/@tests/test_cases/check_status.py diff --git a/pyrightconfig.stricter.json b/pyrightconfig.stricter.json index 776f3d7df704..1cba7f4512b1 100644 --- a/pyrightconfig.stricter.json +++ b/pyrightconfig.stricter.json @@ -45,6 +45,7 @@ "stubs/geopandas", "stubs/google-cloud-ndb", "stubs/grpcio/grpc/__init__.pyi", + "stubs/grpcio/grpc_status/rpc_status.pyi", "stubs/hdbcli/hdbcli/dbapi.pyi", "stubs/html5lib", "stubs/httplib2", diff --git a/stubs/grpcio/@tests/test_cases/check_status.py b/stubs/grpcio/@tests/test_cases/check_status.py deleted file mode 100644 index b9e1776b68cf..000000000000 --- a/stubs/grpcio/@tests/test_cases/check_status.py +++ /dev/null @@ -1,8 +0,0 @@ -from __future__ import annotations - -from grpc import Status -from grpc_status import to_status - -# XXX: to_status actually expects a "google.rpc.status.Status", -# but the stubs for that aren't present yet. -status: Status = to_status(None) diff --git a/stubs/grpcio/grpc_status/rpc_status.pyi b/stubs/grpcio/grpc_status/rpc_status.pyi index abf6b8761f3c..090716d5cfe9 100644 --- a/stubs/grpcio/grpc_status/rpc_status.pyi +++ b/stubs/grpcio/grpc_status/rpc_status.pyi @@ -1,13 +1,7 @@ -from typing import Any - import grpc -# XXX: don't yet know how to add a stub for google.rpc.status_pb2.Status -# without affecting other stuff; may need to make a stub-only package for -# google.rpc as well. - # Returns a google.rpc.status.Status message corresponding to a given grpc.Call. -def from_call(call: grpc.Call) -> Any: ... +def from_call(call: grpc.Call): ... # Convert a google.rpc.status.Status message to grpc.Status. -def to_status(status: Any) -> grpc.Status: ... +def to_status(status) -> grpc.Status: ... From 197ea4abd0169737c00417fc016849c068cb0fdd Mon Sep 17 00:00:00 2001 From: Avasam Date: Mon, 28 Apr 2025 06:24:26 -0400 Subject: [PATCH 262/388] Apply mypy-tests custom config to other mypy-based tests (#13825) --- lib/ts_utils/metadata.py | 1 + lib/ts_utils/mypy.py | 64 +++++++++++++ lib/ts_utils/utils.py | 34 ++++++- pyproject.toml | 5 ++ tests/mypy_test.py | 122 ++++++------------------- tests/regr_test.py | 112 ++++++++++++----------- tests/stubtest_third_party.py | 165 ++++++++++++++++++---------------- 7 files changed, 276 insertions(+), 227 deletions(-) create mode 100644 lib/ts_utils/mypy.py diff --git a/lib/ts_utils/metadata.py b/lib/ts_utils/metadata.py index f851ce536519..ec30f9301425 100644 --- a/lib/ts_utils/metadata.py +++ b/lib/ts_utils/metadata.py @@ -166,6 +166,7 @@ def is_obsolete(self) -> bool: "tool", "partial_stub", "requires_python", + "mypy-tests", } ) _KNOWN_METADATA_TOOL_FIELDS: Final = { diff --git a/lib/ts_utils/mypy.py b/lib/ts_utils/mypy.py new file mode 100644 index 000000000000..7fc050b155d1 --- /dev/null +++ b/lib/ts_utils/mypy.py @@ -0,0 +1,64 @@ +from __future__ import annotations + +from collections.abc import Generator, Iterable +from contextlib import contextmanager +from typing import Any, NamedTuple + +import tomli + +from ts_utils.metadata import metadata_path +from ts_utils.utils import NamedTemporaryFile, TemporaryFileWrapper + + +class MypyDistConf(NamedTuple): + module_name: str + values: dict[str, dict[str, Any]] + + +# The configuration section in the metadata file looks like the following, with multiple module sections possible +# [mypy-tests] +# [mypy-tests.yaml] +# module_name = "yaml" +# [mypy-tests.yaml.values] +# disallow_incomplete_defs = true +# disallow_untyped_defs = true + + +def mypy_configuration_from_distribution(distribution: str) -> list[MypyDistConf]: + with metadata_path(distribution).open("rb") as f: + data = tomli.load(f) + + # TODO: This could be added to ts_utils.metadata + mypy_tests_conf: dict[str, dict[str, Any]] = data.get("mypy-tests", {}) + if not mypy_tests_conf: + return [] + + def validate_configuration(section_name: str, mypy_section: dict[str, Any]) -> MypyDistConf: + assert isinstance(mypy_section, dict), f"{section_name} should be a section" + module_name = mypy_section.get("module_name") + + assert module_name is not None, f"{section_name} should have a module_name key" + assert isinstance(module_name, str), f"{section_name} should be a key-value pair" + + assert "values" in mypy_section, f"{section_name} should have a values section" + values: dict[str, dict[str, Any]] = mypy_section["values"] + assert isinstance(values, dict), "values should be a section" + return MypyDistConf(module_name, values.copy()) + + assert isinstance(mypy_tests_conf, dict), "mypy-tests should be a section" + return [validate_configuration(section_name, mypy_section) for section_name, mypy_section in mypy_tests_conf.items()] + + +@contextmanager +def temporary_mypy_config_file(configurations: Iterable[MypyDistConf]) -> Generator[TemporaryFileWrapper[str]]: + temp = NamedTemporaryFile("w+") + try: + for dist_conf in configurations: + temp.write(f"[mypy-{dist_conf.module_name}]\n") + for k, v in dist_conf.values.items(): + temp.write(f"{k} = {v}\n") + temp.write("[mypy]\n") + temp.flush() + yield temp + finally: + temp.close() diff --git a/lib/ts_utils/utils.py b/lib/ts_utils/utils.py index e4a687600099..fba574d7557f 100644 --- a/lib/ts_utils/utils.py +++ b/lib/ts_utils/utils.py @@ -3,16 +3,24 @@ from __future__ import annotations import functools +import os import re import sys +import tempfile from collections.abc import Iterable, Mapping from pathlib import Path -from typing import Any, Final, NamedTuple +from types import MethodType +from typing import TYPE_CHECKING, Any, Final, NamedTuple from typing_extensions import TypeAlias import pathspec from packaging.requirements import Requirement +from .paths import REQUIREMENTS_PATH, STDLIB_PATH, STUBS_PATH, TEST_CASES_DIR, allowlists_path, test_cases_path + +if TYPE_CHECKING: + from _typeshed import OpenTextMode + try: from termcolor import colored as colored # pyright: ignore[reportAssignmentType] except ImportError: @@ -21,8 +29,6 @@ def colored(text: str, color: str | None = None, **kwargs: Any) -> str: # type: return text -from .paths import REQUIREMENTS_PATH, STDLIB_PATH, STUBS_PATH, TEST_CASES_DIR, allowlists_path, test_cases_path - PYTHON_VERSION: Final = f"{sys.version_info.major}.{sys.version_info.minor}" @@ -196,6 +202,26 @@ def allowlists(distribution_name: str) -> list[str]: return ["stubtest_allowlist.txt", platform_allowlist] +# Re-exposing as a public name to avoid many pyright reportPrivateUsage +TemporaryFileWrapper = tempfile._TemporaryFileWrapper # pyright: ignore[reportPrivateUsage] + +# We need to work around a limitation of tempfile.NamedTemporaryFile on Windows +# For details, see https://github.com/python/typeshed/pull/13620#discussion_r1990185997 +# Python 3.12 added a cross-platform solution with `tempfile.NamedTemporaryFile("w+", delete_on_close=False)` +if sys.platform != "win32": + NamedTemporaryFile = tempfile.NamedTemporaryFile # noqa: TID251 +else: + + def NamedTemporaryFile(mode: OpenTextMode) -> TemporaryFileWrapper[str]: # noqa: N802 + def close(self: TemporaryFileWrapper[str]) -> None: + TemporaryFileWrapper.close(self) # pyright: ignore[reportUnknownMemberType] + os.remove(self.name) + + temp = tempfile.NamedTemporaryFile(mode, delete=False) # noqa: SIM115, TID251 + temp.close = MethodType(close, temp) # type: ignore[method-assign] + return temp + + # ==================================================================== # Parsing .gitignore # ==================================================================== @@ -215,7 +241,7 @@ def spec_matches_path(spec: pathspec.PathSpec, path: Path) -> bool: # ==================================================================== -# mypy/stubtest call +# stubtest call # ==================================================================== diff --git a/pyproject.toml b/pyproject.toml index 5d6bd434156b..b3e1a5821102 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -139,6 +139,8 @@ select = [ "TC005", # Found empty type-checking block # "TC008", # TODO: Enable when out of preview "TC010", # Invalid string member in `X | Y`-style union type + # Used for lint.flake8-import-conventions.aliases + "TID251", # `{name}` is banned: {message} ] extend-safe-fixes = [ "UP036", # Remove unnecessary `sys.version_info` blocks @@ -235,6 +237,9 @@ convention = "pep257" # https://docs.astral.sh/ruff/settings/#lint_pydocstyle_co typing_extensions = "typing_extensions" typing = "typing" +[tool.ruff.lint.flake8-tidy-imports.banned-api] +"tempfile.NamedTemporaryFile".msg = "Use `ts_util.util.NamedTemporaryFile` instead." + [tool.ruff.lint.isort] split-on-trailing-comma = false combine-as-imports = true diff --git a/tests/mypy_test.py b/tests/mypy_test.py index 2eeb532d1ca6..84c8fa1467a8 100755 --- a/tests/mypy_test.py +++ b/tests/mypy_test.py @@ -5,14 +5,12 @@ import argparse import concurrent.futures -import functools import os import subprocess import sys import tempfile import time from collections import defaultdict -from collections.abc import Generator from dataclasses import dataclass from enum import Enum from itertools import product @@ -21,10 +19,10 @@ from typing import Annotated, Any, NamedTuple from typing_extensions import TypeAlias -import tomli from packaging.requirements import Requirement -from ts_utils.metadata import PackageDependencies, get_recursive_requirements, metadata_path, read_metadata +from ts_utils.metadata import PackageDependencies, get_recursive_requirements, read_metadata +from ts_utils.mypy import MypyDistConf, mypy_configuration_from_distribution, temporary_mypy_config_file from ts_utils.paths import STDLIB_PATH, STUBS_PATH, TESTS_DIR, TS_BASE_PATH, distribution_path from ts_utils.utils import ( PYTHON_VERSION, @@ -46,24 +44,6 @@ print_error("Cannot import mypy. Did you install it?") sys.exit(1) -# We need to work around a limitation of tempfile.NamedTemporaryFile on Windows -# For details, see https://github.com/python/typeshed/pull/13620#discussion_r1990185997 -# Python 3.12 added a workaround with `tempfile.NamedTemporaryFile("w+", delete_on_close=False)` -if sys.platform != "win32": - _named_temporary_file = functools.partial(tempfile.NamedTemporaryFile, "w+") -else: - from contextlib import contextmanager - - @contextmanager - def _named_temporary_file() -> Generator[tempfile._TemporaryFileWrapper[str]]: # pyright: ignore[reportPrivateUsage] - temp = tempfile.NamedTemporaryFile("w+", delete=False) # noqa: SIM115 - try: - yield temp - finally: - temp.close() - os.remove(temp.name) - - SUPPORTED_VERSIONS = ["3.13", "3.12", "3.11", "3.10", "3.9"] SUPPORTED_PLATFORMS = ("linux", "win32", "darwin") DIRECTORIES_TO_TEST = [STDLIB_PATH, STUBS_PATH] @@ -177,49 +157,20 @@ def add_files(files: list[Path], module: Path, args: TestConfig) -> None: files.extend(sorted(file for file in module.rglob("*.pyi") if match(file, args))) -class MypyDistConf(NamedTuple): - module_name: str - values: dict[str, dict[str, Any]] - - -# The configuration section in the metadata file looks like the following, with multiple module sections possible -# [mypy-tests] -# [mypy-tests.yaml] -# module_name = "yaml" -# [mypy-tests.yaml.values] -# disallow_incomplete_defs = true -# disallow_untyped_defs = true - - -def add_configuration(configurations: list[MypyDistConf], distribution: str) -> None: - with metadata_path(distribution).open("rb") as f: - data = tomli.load(f) - - # TODO: This could be added to ts_utils.metadata, but is currently unused - mypy_tests_conf: dict[str, dict[str, Any]] = data.get("mypy-tests", {}) - if not mypy_tests_conf: - return - - assert isinstance(mypy_tests_conf, dict), "mypy-tests should be a section" - for section_name, mypy_section in mypy_tests_conf.items(): - assert isinstance(mypy_section, dict), f"{section_name} should be a section" - module_name = mypy_section.get("module_name") - - assert module_name is not None, f"{section_name} should have a module_name key" - assert isinstance(module_name, str), f"{section_name} should be a key-value pair" - - assert "values" in mypy_section, f"{section_name} should have a values section" - values: dict[str, dict[str, Any]] = mypy_section["values"] - assert isinstance(values, dict), "values should be a section" - - configurations.append(MypyDistConf(module_name, values.copy())) - - class MypyResult(Enum): SUCCESS = 0 FAILURE = 1 CRASH = 2 + @staticmethod + def from_process_result(result: subprocess.CompletedProcess[Any]) -> MypyResult: + if result.returncode == 0: + return MypyResult.SUCCESS + elif result.returncode == 1: + return MypyResult.FAILURE + else: + return MypyResult.CRASH + def run_mypy( args: TestConfig, @@ -234,15 +185,7 @@ def run_mypy( env_vars = dict(os.environ) if mypypath is not None: env_vars["MYPYPATH"] = mypypath - - with _named_temporary_file() as temp: - temp.write("[mypy]\n") - for dist_conf in configurations: - temp.write(f"[mypy-{dist_conf.module_name}]\n") - for k, v in dist_conf.values.items(): - temp.write(f"{k} = {v}\n") - temp.flush() - + with temporary_mypy_config_file(configurations) as temp: flags = [ "--python-version", args.version, @@ -278,29 +221,23 @@ def run_mypy( if args.verbose: print(colored(f"running {' '.join(mypy_command)}", "blue")) result = subprocess.run(mypy_command, capture_output=True, text=True, env=env_vars, check=False) - if result.returncode: - print_error(f"failure (exit code {result.returncode})\n") - if result.stdout: - print_error(result.stdout) - if result.stderr: - print_error(result.stderr) - if non_types_dependencies and args.verbose: - print("Ran with the following environment:") - subprocess.run(["uv", "pip", "freeze"], env={**os.environ, "VIRTUAL_ENV": str(venv_dir)}, check=False) - print() - else: - print_success_msg() - if result.returncode == 0: - return MypyResult.SUCCESS - elif result.returncode == 1: - return MypyResult.FAILURE - else: - return MypyResult.CRASH + if result.returncode: + print_error(f"failure (exit code {result.returncode})\n") + if result.stdout: + print_error(result.stdout) + if result.stderr: + print_error(result.stderr) + if non_types_dependencies and args.verbose: + print("Ran with the following environment:") + subprocess.run(["uv", "pip", "freeze"], env={**os.environ, "VIRTUAL_ENV": str(venv_dir)}, check=False) + print() + else: + print_success_msg() + + return MypyResult.from_process_result(result) -def add_third_party_files( - distribution: str, files: list[Path], args: TestConfig, configurations: list[MypyDistConf], seen_dists: set[str] -) -> None: +def add_third_party_files(distribution: str, files: list[Path], args: TestConfig, seen_dists: set[str]) -> None: typeshed_reqs = get_recursive_requirements(distribution).typeshed_pkgs if distribution in seen_dists: return @@ -311,7 +248,6 @@ def add_third_party_files( if name.startswith("."): continue add_files(files, (root / name), args) - add_configuration(configurations, distribution) class TestResult(NamedTuple): @@ -328,9 +264,9 @@ def test_third_party_distribution( and the second element is the number of checked files. """ files: list[Path] = [] - configurations: list[MypyDistConf] = [] seen_dists: set[str] = set() - add_third_party_files(distribution, files, args, configurations, seen_dists) + add_third_party_files(distribution, files, args, seen_dists) + configurations = mypy_configuration_from_distribution(distribution) if not files and args.filter: return TestResult(MypyResult.SUCCESS, 0) diff --git a/tests/regr_test.py b/tests/regr_test.py index fc4e48c55ff6..32bfb259390c 100755 --- a/tests/regr_test.py +++ b/tests/regr_test.py @@ -22,6 +22,7 @@ from typing_extensions import TypeAlias from ts_utils.metadata import get_recursive_requirements, read_metadata +from ts_utils.mypy import mypy_configuration_from_distribution, temporary_mypy_config_file from ts_utils.paths import STDLIB_PATH, TEST_CASES_DIR, TS_BASE_PATH, distribution_path from ts_utils.utils import ( PYTHON_VERSION, @@ -169,62 +170,71 @@ def run_testcases( env_vars = dict(os.environ) new_test_case_dir = tempdir / TEST_CASES_DIR - # "--enable-error-code ignore-without-code" is purposefully omitted. - # See https://github.com/python/typeshed/pull/8083 - flags = [ - "--python-version", - version, - "--show-traceback", - "--no-error-summary", - "--platform", - platform, - "--strict", - "--pretty", - # Avoid race conditions when reading the cache - # (https://github.com/python/typeshed/issues/11220) - "--no-incremental", - # Not useful for the test cases - "--disable-error-code=empty-body", - ] - if package.is_stdlib: - python_exe = sys.executable - custom_typeshed = TS_BASE_PATH - flags.append("--no-site-packages") + configurations = [] else: - custom_typeshed = tempdir / TYPESHED - env_vars["MYPYPATH"] = os.pathsep.join(map(str, custom_typeshed.glob("stubs/*"))) - has_non_types_dependencies = (tempdir / VENV_DIR).exists() - if has_non_types_dependencies: - python_exe = str(venv_python(tempdir / VENV_DIR)) - else: + configurations = mypy_configuration_from_distribution(package.name) + + with temporary_mypy_config_file(configurations) as temp: + + # "--enable-error-code ignore-without-code" is purposefully omitted. + # See https://github.com/python/typeshed/pull/8083 + flags = [ + "--python-version", + version, + "--show-traceback", + "--no-error-summary", + "--platform", + platform, + "--strict", + "--pretty", + "--config-file", + temp.name, + # Avoid race conditions when reading the cache + # (https://github.com/python/typeshed/issues/11220) + "--no-incremental", + # Not useful for the test cases + "--disable-error-code=empty-body", + ] + + if package.is_stdlib: python_exe = sys.executable + custom_typeshed = TS_BASE_PATH flags.append("--no-site-packages") - - flags.extend(["--custom-typeshed-dir", str(custom_typeshed)]) - - # If the test-case filename ends with -py39, - # only run the test if --python-version was set to 3.9 or higher (for example) - for path in new_test_case_dir.rglob("*.py"): - if match := re.fullmatch(r".*-py3(\d{1,2})", path.stem): - minor_version_required = int(match[1]) - assert f"3.{minor_version_required}" in SUPPORTED_VERSIONS - python_minor_version = int(version.split(".")[1]) - if minor_version_required > python_minor_version: - continue - flags.append(str(path)) - - mypy_command = [python_exe, "-m", "mypy", *flags] - if verbosity is Verbosity.VERBOSE: - description = f"{package.name}/{version}/{platform}" - msg = f"{description}: {mypy_command=}\n" - if "MYPYPATH" in env_vars: - msg += f"{description}: {env_vars['MYPYPATH']=}" else: - msg += f"{description}: MYPYPATH not set" - msg += "\n" - verbose_log(msg) - return subprocess.run(mypy_command, capture_output=True, text=True, env=env_vars, check=False) + custom_typeshed = tempdir / TYPESHED + env_vars["MYPYPATH"] = os.pathsep.join(map(str, custom_typeshed.glob("stubs/*"))) + has_non_types_dependencies = (tempdir / VENV_DIR).exists() + if has_non_types_dependencies: + python_exe = str(venv_python(tempdir / VENV_DIR)) + else: + python_exe = sys.executable + flags.append("--no-site-packages") + + flags.extend(["--custom-typeshed-dir", str(custom_typeshed)]) + + # If the test-case filename ends with -py39, + # only run the test if --python-version was set to 3.9 or higher (for example) + for path in new_test_case_dir.rglob("*.py"): + if match := re.fullmatch(r".*-py3(\d{1,2})", path.stem): + minor_version_required = int(match[1]) + assert f"3.{minor_version_required}" in SUPPORTED_VERSIONS + python_minor_version = int(version.split(".")[1]) + if minor_version_required > python_minor_version: + continue + flags.append(str(path)) + + mypy_command = [python_exe, "-m", "mypy", *flags] + if verbosity is Verbosity.VERBOSE: + description = f"{package.name}/{version}/{platform}" + msg = f"{description}: {mypy_command=}\n" + if "MYPYPATH" in env_vars: + msg += f"{description}: {env_vars['MYPYPATH']=}" + else: + msg += f"{description}: MYPYPATH not set" + msg += "\n" + verbose_log(msg) + return subprocess.run(mypy_command, capture_output=True, text=True, env=env_vars, check=False) @dataclass(frozen=True) diff --git a/tests/stubtest_third_party.py b/tests/stubtest_third_party.py index 8b8cb6265dfd..0530f6279628 100755 --- a/tests/stubtest_third_party.py +++ b/tests/stubtest_third_party.py @@ -16,6 +16,7 @@ from typing import NoReturn from ts_utils.metadata import NoSuchStubError, get_recursive_requirements, read_metadata +from ts_utils.mypy import mypy_configuration_from_distribution, temporary_mypy_config_file from ts_utils.paths import STUBS_PATH, allowlists_path, tests_path from ts_utils.utils import ( PYTHON_VERSION, @@ -95,89 +96,95 @@ def run_stubtest( print_command_failure("Failed to install", e) return False - ignore_missing_stub = ["--ignore-missing-stub"] if stubtest_settings.ignore_missing_stub else [] - packages_to_check = [d.name for d in dist.iterdir() if d.is_dir() and d.name.isidentifier()] - modules_to_check = [d.stem for d in dist.iterdir() if d.is_file() and d.suffix == ".pyi"] - stubtest_cmd = [ - python_exe, - "-m", - "mypy.stubtest", - # Use --custom-typeshed-dir in case we make linked changes to stdlib or _typeshed - "--custom-typeshed-dir", - str(dist.parent.parent), - *ignore_missing_stub, - *packages_to_check, - *modules_to_check, - *allowlist_stubtest_arguments(dist_name), - ] + mypy_configuration = mypy_configuration_from_distribution(dist_name) + with temporary_mypy_config_file(mypy_configuration) as temp: + ignore_missing_stub = ["--ignore-missing-stub"] if stubtest_settings.ignore_missing_stub else [] + packages_to_check = [d.name for d in dist.iterdir() if d.is_dir() and d.name.isidentifier()] + modules_to_check = [d.stem for d in dist.iterdir() if d.is_file() and d.suffix == ".pyi"] + stubtest_cmd = [ + python_exe, + "-m", + "mypy.stubtest", + "--mypy-config-file", + temp.name, + # Use --custom-typeshed-dir in case we make linked changes to stdlib or _typeshed + "--custom-typeshed-dir", + str(dist.parent.parent), + *ignore_missing_stub, + *packages_to_check, + *modules_to_check, + *allowlist_stubtest_arguments(dist_name), + ] + + stubs_dir = dist.parent + mypypath_items = [str(dist)] + [str(stubs_dir / pkg.name) for pkg in requirements.typeshed_pkgs] + mypypath = os.pathsep.join(mypypath_items) + # For packages that need a display, we need to pass at least $DISPLAY + # to stubtest. $DISPLAY is set by xvfb-run in CI. + # + # It seems that some other environment variables are needed too, + # because the CI fails if we pass only os.environ["DISPLAY"]. I didn't + # "bisect" to see which variables are actually needed. + stubtest_env = os.environ | {"MYPYPATH": mypypath, "MYPY_FORCE_COLOR": "1"} + + # Perform some black magic in order to run stubtest inside uWSGI + if dist_name == "uWSGI": + if not setup_uwsgi_stubtest_command(dist, venv_dir, stubtest_cmd): + return False + + if dist_name == "gdb": + if not setup_gdb_stubtest_command(venv_dir, stubtest_cmd): + return False + + try: + subprocess.run(stubtest_cmd, env=stubtest_env, check=True, capture_output=True) + except subprocess.CalledProcessError as e: + print_time(time() - t) + print_error("fail") + + print_divider() + print("Commands run:") + print_commands(pip_cmd, stubtest_cmd, mypypath) + + print_divider() + print("Command output:\n") + print_command_output(e) + + print_divider() + print("Python version: ", end="", flush=True) + ret = subprocess.run([sys.executable, "-VV"], capture_output=True, check=False) + print_command_output(ret) - stubs_dir = dist.parent - mypypath_items = [str(dist)] + [str(stubs_dir / pkg.name) for pkg in requirements.typeshed_pkgs] - mypypath = os.pathsep.join(mypypath_items) - # For packages that need a display, we need to pass at least $DISPLAY - # to stubtest. $DISPLAY is set by xvfb-run in CI. - # - # It seems that some other environment variables are needed too, - # because the CI fails if we pass only os.environ["DISPLAY"]. I didn't - # "bisect" to see which variables are actually needed. - stubtest_env = os.environ | {"MYPYPATH": mypypath, "MYPY_FORCE_COLOR": "1"} - - # Perform some black magic in order to run stubtest inside uWSGI - if dist_name == "uWSGI": - if not setup_uwsgi_stubtest_command(dist, venv_dir, stubtest_cmd): - return False + print("\nRan with the following environment:") + ret = subprocess.run([pip_exe, "freeze", "--all"], capture_output=True, check=False) + print_command_output(ret) + if keep_tmp_dir: + print("Path to virtual environment:", venv_dir, flush=True) + + print_divider() + main_allowlist_path = allowlists_path(dist_name) / "stubtest_allowlist.txt" + if main_allowlist_path.exists(): + print(f'To fix "unused allowlist" errors, remove the corresponding entries from {main_allowlist_path}') + print() + else: + print(f"Re-running stubtest with --generate-allowlist.\nAdd the following to {main_allowlist_path}:") + ret = subprocess.run( + [*stubtest_cmd, "--generate-allowlist"], env=stubtest_env, capture_output=True, check=False + ) + print_command_output(ret) + + print_divider() + print(f"Upstream repository: {metadata.upstream_repository}") + print(f"Typeshed source code: https://github.com/python/typeshed/tree/main/stubs/{dist.name}") + + print_divider() - if dist_name == "gdb": - if not setup_gdb_stubtest_command(venv_dir, stubtest_cmd): return False - - try: - subprocess.run(stubtest_cmd, env=stubtest_env, check=True, capture_output=True) - except subprocess.CalledProcessError as e: - print_time(time() - t) - print_error("fail") - - print_divider() - print("Commands run:") - print_commands(pip_cmd, stubtest_cmd, mypypath) - - print_divider() - print("Command output:\n") - print_command_output(e) - - print_divider() - print("Python version: ", end="", flush=True) - ret = subprocess.run([sys.executable, "-VV"], capture_output=True, check=False) - print_command_output(ret) - - print("\nRan with the following environment:") - ret = subprocess.run([pip_exe, "freeze", "--all"], capture_output=True, check=False) - print_command_output(ret) - if keep_tmp_dir: - print("Path to virtual environment:", venv_dir, flush=True) - - print_divider() - main_allowlist_path = allowlists_path(dist_name) / "stubtest_allowlist.txt" - if main_allowlist_path.exists(): - print(f'To fix "unused allowlist" errors, remove the corresponding entries from {main_allowlist_path}') - print() else: - print(f"Re-running stubtest with --generate-allowlist.\nAdd the following to {main_allowlist_path}:") - ret = subprocess.run([*stubtest_cmd, "--generate-allowlist"], env=stubtest_env, capture_output=True, check=False) - print_command_output(ret) - - print_divider() - print(f"Upstream repository: {metadata.upstream_repository}") - print(f"Typeshed source code: https://github.com/python/typeshed/tree/main/stubs/{dist.name}") - - print_divider() - - return False - else: - print_time(time() - t) - print_success_msg() - if keep_tmp_dir: - print_info(f"Virtual environment kept at: {venv_dir}") + print_time(time() - t) + print_success_msg() + if keep_tmp_dir: + print_info(f"Virtual environment kept at: {venv_dir}") finally: if not keep_tmp_dir: rmtree(venv_dir) From 17ab6fd203281af02969d6040a09183f88ebc67b Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Mon, 28 Apr 2025 14:27:03 +0400 Subject: [PATCH 263/388] Bump setuptools to 80.0.* (#13898) --- stubs/setuptools/METADATA.toml | 2 +- .../setuptools/setuptools/command/develop.pyi | 40 +++++------- .../setuptools/command/easy_install.pyi | 4 +- .../setuptools/setuptools/command/install.pyi | 4 +- stubs/setuptools/setuptools/sandbox.pyi | 63 ------------------- 5 files changed, 20 insertions(+), 93 deletions(-) delete mode 100644 stubs/setuptools/setuptools/sandbox.pyi diff --git a/stubs/setuptools/METADATA.toml b/stubs/setuptools/METADATA.toml index 7532fd1a3dce..07aa716d5608 100644 --- a/stubs/setuptools/METADATA.toml +++ b/stubs/setuptools/METADATA.toml @@ -1,4 +1,4 @@ -version = "79.0.*" +version = "80.0.*" upstream_repository = "https://github.com/pypa/setuptools" extra_description = """\ Given that `pkg_resources` is typed since `setuptools >= 71.1`, \ diff --git a/stubs/setuptools/setuptools/command/develop.pyi b/stubs/setuptools/setuptools/command/develop.pyi index 87b85692fa4d..afad2e9a4568 100644 --- a/stubs/setuptools/setuptools/command/develop.pyi +++ b/stubs/setuptools/setuptools/command/develop.pyi @@ -1,34 +1,24 @@ from _typeshed import Incomplete from typing import ClassVar +from typing_extensions import deprecated -from pkg_resources import Distribution +from setuptools import Command +from setuptools.warnings import SetuptoolsDeprecationWarning -from .. import namespaces -from .easy_install import easy_install - -class develop(namespaces.DevelopInstaller, easy_install): - description: str +class develop(Command): user_options: ClassVar[list[tuple[str, str | None, str]]] boolean_options: ClassVar[list[str]] - command_consumes_arguments: bool - multi_version: bool - def run(self) -> None: ... # type: ignore[override] - uninstall: Incomplete - egg_path: Incomplete - setup_path: Incomplete - always_copy_from: str + install_dir: Incomplete + no_deps: bool + user: bool + prefix: Incomplete + index_url: Incomplete + def run(self) -> None: ... + @deprecated( + "develop command is deprecated. Please avoid running `setup.py` and `develop`. " + "Instead, use standards-based tools like pip or uv." + ) def initialize_options(self) -> None: ... - args: list[Incomplete] - egg_link: str - egg_base: Incomplete - dist: Distribution def finalize_options(self) -> None: ... - def install_for_development(self) -> None: ... - def uninstall_link(self) -> None: ... - def install_egg_scripts(self, dist): ... - def install_wrapper_scripts(self, dist): ... -class VersionlessRequirement: - def __init__(self, dist) -> None: ... - def __getattr__(self, name: str): ... - def as_requirement(self): ... +class DevelopDeprecationWarning(SetuptoolsDeprecationWarning): ... diff --git a/stubs/setuptools/setuptools/command/easy_install.pyi b/stubs/setuptools/setuptools/command/easy_install.pyi index 0255102d8d8b..0f79115b0e3b 100644 --- a/stubs/setuptools/setuptools/command/easy_install.pyi +++ b/stubs/setuptools/setuptools/command/easy_install.pyi @@ -56,7 +56,7 @@ class easy_install(Command): def finalize_options(self) -> None: ... def expand_basedirs(self) -> None: ... def expand_dirs(self) -> None: ... - def run(self, show_deprecation: bool = True) -> None: ... + def run(self, show_deprecation: bool = True) -> NoReturn: ... def pseudo_tempname(self): ... def warn_deprecated_options(self) -> None: ... def check_site_dir(self) -> None: ... @@ -83,7 +83,7 @@ class easy_install(Command): def install_wheel(self, wheel_path, tmpdir): ... def installation_report(self, req, dist, what: str = "Installed") -> str: ... def report_editable(self, spec, setup_script): ... - def run_setup(self, setup_script, setup_base, args) -> None: ... + def run_setup(self, setup_script, setup_base, args) -> NoReturn: ... def build_and_install(self, setup_script, setup_base): ... def update_pth(self, dist) -> None: ... def unpack_progress(self, src, dst): ... diff --git a/stubs/setuptools/setuptools/command/install.pyi b/stubs/setuptools/setuptools/command/install.pyi index 064c11a6dd69..c449715c9dbf 100644 --- a/stubs/setuptools/setuptools/command/install.pyi +++ b/stubs/setuptools/setuptools/command/install.pyi @@ -1,6 +1,6 @@ from _typeshed import Incomplete from collections.abc import Callable -from typing import Any, ClassVar +from typing import Any, ClassVar, NoReturn from setuptools.dist import Distribution @@ -20,4 +20,4 @@ class install(orig.install): extra_dirs: str def handle_extra_path(self): ... def run(self): ... - def do_egg_install(self) -> None: ... + def do_egg_install(self) -> NoReturn: ... diff --git a/stubs/setuptools/setuptools/sandbox.pyi b/stubs/setuptools/setuptools/sandbox.pyi deleted file mode 100644 index ee5935c056ec..000000000000 --- a/stubs/setuptools/setuptools/sandbox.pyi +++ /dev/null @@ -1,63 +0,0 @@ -import sys -from types import TracebackType -from typing import ClassVar -from typing_extensions import Self - -from ._distutils.errors import DistutilsError - -__all__ = ["AbstractSandbox", "DirectorySandbox", "SandboxViolation", "run_setup"] - -class UnpickleableException(Exception): - @staticmethod - def dump(type, exc): ... - -class ExceptionSaver: - def __enter__(self) -> Self: ... - def __exit__(self, type: type[BaseException] | None, exc: BaseException | None, tb: TracebackType | None) -> bool: ... - def resume(self) -> None: ... - -def run_setup(setup_script, args): ... - -class AbstractSandbox: - def __enter__(self) -> None: ... - def __exit__( - self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None - ) -> None: ... - def run(self, func): ... - # Dynamically created - if sys.platform == "win32": - def startfile(self, path, *args, **kw): ... - else: - def chown(self, path, *args, **kw): ... - def chroot(self, path, *args, **kw): ... - def lchown(self, path, *args, **kw): ... - def mkfifo(self, path, *args, **kw): ... - def mknod(self, path, *args, **kw): ... - def pathconf(self, path, *args, **kw): ... - - def access(self, path, *args, **kw): ... - def chdir(self, path, *args, **kw): ... - def chmod(self, path, *args, **kw): ... - def getcwd(self, *args, **kw): ... - def link(self, src, dst, *args, **kw): ... - def listdir(self, path, *args, **kw): ... - def lstat(self, path, *args, **kw): ... - def mkdir(self, path, *args, **kw): ... - def open(self, path, *args, **kw): ... - def readlink(self, path, *args, **kw): ... - def remove(self, path, *args, **kw): ... - def rename(self, src, dst, *args, **kw): ... - def rmdir(self, path, *args, **kw): ... - def stat(self, path, *args, **kw): ... - def symlink(self, src, dst, *args, **kw): ... - def unlink(self, path, *args, **kw): ... - def utime(self, path, *args, **kw): ... - -class DirectorySandbox(AbstractSandbox): - write_ops: ClassVar[dict[str, None]] - def __init__(self, sandbox, exceptions=...) -> None: ... - def tmpnam(self) -> None: ... - def open(self, file, flags, mode: int = 511, *args, **kw): ... # type: ignore[override] - -class SandboxViolation(DistutilsError): - tmpl: str From 87369e61c321b3e4fe826e288802397a473eb568 Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Mon, 28 Apr 2025 15:34:51 +0400 Subject: [PATCH 264/388] Improve `pywin32.isapi` (#13889) --- stubs/pywin32/isapi/__init__.pyi | 10 +- stubs/pywin32/isapi/install.pyi | 45 +++--- stubs/pywin32/isapi/isapicon.pyi | 170 ++++++++++----------- stubs/pywin32/isapi/simple.pyi | 2 +- stubs/pywin32/isapi/threaded_extension.pyi | 22 +-- 5 files changed, 126 insertions(+), 123 deletions(-) diff --git a/stubs/pywin32/isapi/__init__.pyi b/stubs/pywin32/isapi/__init__.pyi index bf54f831793f..107d2b2c9e26 100644 --- a/stubs/pywin32/isapi/__init__.pyi +++ b/stubs/pywin32/isapi/__init__.pyi @@ -1,10 +1,8 @@ -from _typeshed import Incomplete - class ISAPIError(Exception): - errno: Incomplete - strerror: Incomplete - funcname: Incomplete - def __init__(self, errno, strerror: Incomplete | None = None, funcname: Incomplete | None = None) -> None: ... + errno: int + strerror: str | None + funcname: str | None + def __init__(self, errno: int, strerror: str | None = None, funcname: str | None = None) -> None: ... class FilterError(ISAPIError): ... class ExtensionError(ISAPIError): ... diff --git a/stubs/pywin32/isapi/install.pyi b/stubs/pywin32/isapi/install.pyi index 7f224506f6d7..ae3a54a2402f 100644 --- a/stubs/pywin32/isapi/install.pyi +++ b/stubs/pywin32/isapi/install.pyi @@ -1,6 +1,9 @@ -from _typeshed import Incomplete +from _typeshed import Incomplete, StrOrBytesPath, StrPath, SupportsGetItem, Unused +from collections.abc import Callable, Iterable, Mapping +from optparse import OptionParser +from typing import Final, Literal -this_dir: Incomplete +this_dir: str class FilterParameters: Name: Incomplete @@ -33,8 +36,8 @@ class VirtualDirParameters: ScriptMapUpdate: str Server: Incomplete def __init__(self, **kw) -> None: ... - def is_root(self): ... - def split_path(self): ... + def is_root(self) -> bool: ... + def split_path(self) -> list[str]: ... class ScriptMapParams: Extension: Incomplete @@ -56,43 +59,43 @@ class ISAPIParameters: verbose: int -def log(level, what) -> None: ... +def log(level: int, what: object) -> None: ... class InstallationError(Exception): ... class ItemNotFound(InstallationError): ... class ConfigurationError(InstallationError): ... -def FindPath(options, server, name): ... -def LocateWebServerPath(description): ... -def GetWebServer(description: Incomplete | None = None): ... +def FindPath(options, server: str | bytes | bytearray, name: str) -> str: ... +def LocateWebServerPath(description: str): ... +def GetWebServer(description: str | None = None): ... def LoadWebServer(path): ... -def FindWebServer(options, server_desc): ... -def split_path(path): ... +def FindWebServer(options, server_desc: str | bytes | bytearray | None) -> str: ... +def split_path(path: str) -> list[str]: ... def CreateDirectory(params, options): ... -def AssignScriptMaps(script_maps, target, update: str = "replace") -> None: ... +def AssignScriptMaps(script_maps: Iterable[ScriptMapParams], target, update: str = "replace") -> None: ... def get_unique_items(sequence, reference): ... def CreateISAPIFilter(filterParams, options): ... def DeleteISAPIFilter(filterParams, options) -> None: ... def AddExtensionFiles(params, options) -> None: ... def DeleteExtensionFileRecords(params, options) -> None: ... -def CheckLoaderModule(dll_name) -> None: ... +def CheckLoaderModule(dll_name: StrOrBytesPath) -> None: ... def Install(params, options) -> None: ... def RemoveDirectory(params, options) -> None: ... def RemoveScriptMaps(vd_params, options) -> None: ... def Uninstall(params, options) -> None: ... -def GetLoaderModuleName(mod_name, check_module: Incomplete | None = None): ... -def InstallModule(conf_module_name, params, options, log=...) -> None: ... -def UninstallModule(conf_module_name, params, options, log=...) -> None: ... +def GetLoaderModuleName(mod_name: StrPath, check_module: bool | None = None) -> str: ... +def InstallModule(conf_module_name: StrPath, params, options, log: Callable[[int, str], Unused] = ...) -> None: ... +def UninstallModule(conf_module_name: StrPath, params, options, log: Callable[[int, str], Unused] = ...) -> None: ... -standard_arguments: Incomplete +standard_arguments: Final[dict[Literal["install", "remove"], Callable[..., Incomplete]]] -def build_usage(handler_map): ... +def build_usage(handler_map: Mapping[str, object]) -> str: ... def MergeStandardOptions(options, params) -> None: ... def HandleCommandLine( params, - argv: Incomplete | None = None, - conf_module_name: Incomplete | None = None, + argv: SupportsGetItem[int, str] | None = None, + conf_module_name: str | None = None, default_arg: str = "install", - opt_parser: Incomplete | None = None, - custom_arg_handlers={}, + opt_parser: OptionParser | None = None, + custom_arg_handlers: Mapping[str, object] = {}, ) -> None: ... diff --git a/stubs/pywin32/isapi/isapicon.pyi b/stubs/pywin32/isapi/isapicon.pyi index 33dc5d623dcd..33ee18759595 100644 --- a/stubs/pywin32/isapi/isapicon.pyi +++ b/stubs/pywin32/isapi/isapicon.pyi @@ -1,86 +1,86 @@ -from _typeshed import Incomplete +from typing import Final -HTTP_CONTINUE: int -HTTP_SWITCHING_PROTOCOLS: int -HTTP_PROCESSING: int -HTTP_OK: int -HTTP_CREATED: int -HTTP_ACCEPTED: int -HTTP_NON_AUTHORITATIVE: int -HTTP_NO_CONTENT: int -HTTP_RESET_CONTENT: int -HTTP_PARTIAL_CONTENT: int -HTTP_MULTI_STATUS: int -HTTP_MULTIPLE_CHOICES: int -HTTP_MOVED_PERMANENTLY: int -HTTP_MOVED_TEMPORARILY: int -HTTP_SEE_OTHER: int -HTTP_NOT_MODIFIED: int -HTTP_USE_PROXY: int -HTTP_TEMPORARY_REDIRECT: int -HTTP_BAD_REQUEST: int -HTTP_UNAUTHORIZED: int -HTTP_PAYMENT_REQUIRED: int -HTTP_FORBIDDEN: int -HTTP_NOT_FOUND: int -HTTP_METHOD_NOT_ALLOWED: int -HTTP_NOT_ACCEPTABLE: int -HTTP_PROXY_AUTHENTICATION_REQUIRED: int -HTTP_REQUEST_TIME_OUT: int -HTTP_CONFLICT: int -HTTP_GONE: int -HTTP_LENGTH_REQUIRED: int -HTTP_PRECONDITION_FAILED: int -HTTP_REQUEST_ENTITY_TOO_LARGE: int -HTTP_REQUEST_URI_TOO_LARGE: int -HTTP_UNSUPPORTED_MEDIA_TYPE: int -HTTP_RANGE_NOT_SATISFIABLE: int -HTTP_EXPECTATION_FAILED: int -HTTP_UNPROCESSABLE_ENTITY: int -HTTP_INTERNAL_SERVER_ERROR: int -HTTP_NOT_IMPLEMENTED: int -HTTP_BAD_GATEWAY: int -HTTP_SERVICE_UNAVAILABLE: int -HTTP_GATEWAY_TIME_OUT: int -HTTP_VERSION_NOT_SUPPORTED: int -HTTP_VARIANT_ALSO_VARIES: int -HSE_STATUS_SUCCESS: int -HSE_STATUS_SUCCESS_AND_KEEP_CONN: int -HSE_STATUS_PENDING: int -HSE_STATUS_ERROR: int -SF_NOTIFY_SECURE_PORT: int -SF_NOTIFY_NONSECURE_PORT: int -SF_NOTIFY_READ_RAW_DATA: int -SF_NOTIFY_PREPROC_HEADERS: int -SF_NOTIFY_AUTHENTICATION: int -SF_NOTIFY_URL_MAP: int -SF_NOTIFY_ACCESS_DENIED: int -SF_NOTIFY_SEND_RESPONSE: int -SF_NOTIFY_SEND_RAW_DATA: int -SF_NOTIFY_LOG: int -SF_NOTIFY_END_OF_REQUEST: int -SF_NOTIFY_END_OF_NET_SESSION: int -SF_NOTIFY_ORDER_HIGH: int -SF_NOTIFY_ORDER_MEDIUM: int -SF_NOTIFY_ORDER_LOW: int -SF_NOTIFY_ORDER_DEFAULT: int -SF_NOTIFY_ORDER_MASK: Incomplete -SF_STATUS_REQ_FINISHED: int -SF_STATUS_REQ_FINISHED_KEEP_CONN: Incomplete -SF_STATUS_REQ_NEXT_NOTIFICATION: Incomplete -SF_STATUS_REQ_HANDLED_NOTIFICATION: Incomplete -SF_STATUS_REQ_ERROR: Incomplete -SF_STATUS_REQ_READ_NEXT: Incomplete -HSE_IO_SYNC: int -HSE_IO_ASYNC: int -HSE_IO_DISCONNECT_AFTER_SEND: int -HSE_IO_SEND_HEADERS: int -HSE_IO_NODELAY: int -HSE_IO_FINAL_SEND: int -HSE_IO_CACHE_RESPONSE: int -HSE_EXEC_URL_NO_HEADERS: int -HSE_EXEC_URL_IGNORE_CURRENT_INTERCEPTOR: int -HSE_EXEC_URL_IGNORE_VALIDATION_AND_RANGE: int -HSE_EXEC_URL_DISABLE_CUSTOM_ERROR: int -HSE_EXEC_URL_SSI_CMD: int -HSE_EXEC_URL_HTTP_CACHE_ELIGIBLE: int +HTTP_CONTINUE: Final = 100 +HTTP_SWITCHING_PROTOCOLS: Final = 101 +HTTP_PROCESSING: Final = 102 +HTTP_OK: Final = 200 +HTTP_CREATED: Final = 201 +HTTP_ACCEPTED: Final = 202 +HTTP_NON_AUTHORITATIVE: Final = 203 +HTTP_NO_CONTENT: Final = 204 +HTTP_RESET_CONTENT: Final = 205 +HTTP_PARTIAL_CONTENT: Final = 206 +HTTP_MULTI_STATUS: Final = 207 +HTTP_MULTIPLE_CHOICES: Final = 300 +HTTP_MOVED_PERMANENTLY: Final = 301 +HTTP_MOVED_TEMPORARILY: Final = 302 +HTTP_SEE_OTHER: Final = 303 +HTTP_NOT_MODIFIED: Final = 304 +HTTP_USE_PROXY: Final = 305 +HTTP_TEMPORARY_REDIRECT: Final = 307 +HTTP_BAD_REQUEST: Final = 400 +HTTP_UNAUTHORIZED: Final = 401 +HTTP_PAYMENT_REQUIRED: Final = 402 +HTTP_FORBIDDEN: Final = 403 +HTTP_NOT_FOUND: Final = 404 +HTTP_METHOD_NOT_ALLOWED: Final = 405 +HTTP_NOT_ACCEPTABLE: Final = 406 +HTTP_PROXY_AUTHENTICATION_REQUIRED: Final = 407 +HTTP_REQUEST_TIME_OUT: Final = 408 +HTTP_CONFLICT: Final = 409 +HTTP_GONE: Final = 410 +HTTP_LENGTH_REQUIRED: Final = 411 +HTTP_PRECONDITION_FAILED: Final = 412 +HTTP_REQUEST_ENTITY_TOO_LARGE: Final = 413 +HTTP_REQUEST_URI_TOO_LARGE: Final = 414 +HTTP_UNSUPPORTED_MEDIA_TYPE: Final = 415 +HTTP_RANGE_NOT_SATISFIABLE: Final = 416 +HTTP_EXPECTATION_FAILED: Final = 417 +HTTP_UNPROCESSABLE_ENTITY: Final = 422 +HTTP_INTERNAL_SERVER_ERROR: Final = 500 +HTTP_NOT_IMPLEMENTED: Final = 501 +HTTP_BAD_GATEWAY: Final = 502 +HTTP_SERVICE_UNAVAILABLE: Final = 503 +HTTP_GATEWAY_TIME_OUT: Final = 504 +HTTP_VERSION_NOT_SUPPORTED: Final = 505 +HTTP_VARIANT_ALSO_VARIES: Final = 506 +HSE_STATUS_SUCCESS: Final = 1 +HSE_STATUS_SUCCESS_AND_KEEP_CONN: Final = 2 +HSE_STATUS_PENDING: Final = 3 +HSE_STATUS_ERROR: Final = 4 +SF_NOTIFY_SECURE_PORT: Final = 0x00000001 +SF_NOTIFY_NONSECURE_PORT: Final = 0x00000002 +SF_NOTIFY_READ_RAW_DATA: Final = 0x00008000 +SF_NOTIFY_PREPROC_HEADERS: Final = 0x00004000 +SF_NOTIFY_AUTHENTICATION: Final = 0x00002000 +SF_NOTIFY_URL_MAP: Final = 0x00001000 +SF_NOTIFY_ACCESS_DENIED: Final = 0x00000800 +SF_NOTIFY_SEND_RESPONSE: Final = 0x00000040 +SF_NOTIFY_SEND_RAW_DATA: Final = 0x00000400 +SF_NOTIFY_LOG: Final = 0x00000200 +SF_NOTIFY_END_OF_REQUEST: Final = 0x00000080 +SF_NOTIFY_END_OF_NET_SESSION: Final = 0x00000100 +SF_NOTIFY_ORDER_HIGH: Final = 0x00080000 +SF_NOTIFY_ORDER_MEDIUM: Final = 0x00040000 +SF_NOTIFY_ORDER_LOW: Final = 0x00020000 +SF_NOTIFY_ORDER_DEFAULT: Final = SF_NOTIFY_ORDER_LOW +SF_NOTIFY_ORDER_MASK: Final = 917504 +SF_STATUS_REQ_FINISHED: Final = 134217728 +SF_STATUS_REQ_FINISHED_KEEP_CONN: Final = 134217729 +SF_STATUS_REQ_NEXT_NOTIFICATION: Final = 134217730 +SF_STATUS_REQ_HANDLED_NOTIFICATION: Final = 134217731 +SF_STATUS_REQ_ERROR: Final = 134217732 +SF_STATUS_REQ_READ_NEXT: Final = 134217733 +HSE_IO_SYNC: Final = 0x00000001 +HSE_IO_ASYNC: Final = 0x00000002 +HSE_IO_DISCONNECT_AFTER_SEND: Final = 0x00000004 +HSE_IO_SEND_HEADERS: Final = 0x00000008 +HSE_IO_NODELAY: Final = 0x00001000 +HSE_IO_FINAL_SEND: Final = 0x00000010 +HSE_IO_CACHE_RESPONSE: Final = 0x00000020 +HSE_EXEC_URL_NO_HEADERS: Final = 0x02 +HSE_EXEC_URL_IGNORE_CURRENT_INTERCEPTOR: Final = 0x04 +HSE_EXEC_URL_IGNORE_VALIDATION_AND_RANGE: Final = 0x10 +HSE_EXEC_URL_DISABLE_CUSTOM_ERROR: Final = 0x20 +HSE_EXEC_URL_SSI_CMD: Final = 0x40 +HSE_EXEC_URL_HTTP_CACHE_ELIGIBLE: Final = 0x80 diff --git a/stubs/pywin32/isapi/simple.pyi b/stubs/pywin32/isapi/simple.pyi index 594f30bcd7d1..5d913c510d35 100644 --- a/stubs/pywin32/isapi/simple.pyi +++ b/stubs/pywin32/isapi/simple.pyi @@ -1,6 +1,6 @@ class SimpleExtension: def GetExtensionVersion(self, vi) -> None: ... - def HttpExtensionProc(self, control_block) -> None: ... + def HttpExtensionProc(self, control_block) -> int | None: ... def TerminateExtension(self, status) -> None: ... class SimpleFilter: diff --git a/stubs/pywin32/isapi/threaded_extension.pyi b/stubs/pywin32/isapi/threaded_extension.pyi index 0a463a6470e5..ead5c5ddcb1e 100644 --- a/stubs/pywin32/isapi/threaded_extension.pyi +++ b/stubs/pywin32/isapi/threaded_extension.pyi @@ -1,26 +1,28 @@ import threading -from _typeshed import Incomplete +from _typeshed import Unused +from collections.abc import Callable +from typing import Final import isapi.simple -ISAPI_REQUEST: int -ISAPI_SHUTDOWN: int +ISAPI_REQUEST: Final = 1 +ISAPI_SHUTDOWN: Final = 2 class WorkerThread(threading.Thread): running: bool - io_req_port: Incomplete - extension: Incomplete - def __init__(self, extension, io_req_port) -> None: ... + io_req_port: int + extension: ThreadPoolExtension + def __init__(self, extension: ThreadPoolExtension, io_req_port: int) -> None: ... def call_handler(self, cblock) -> None: ... class ThreadPoolExtension(isapi.simple.SimpleExtension): max_workers: int worker_shutdown_wait: int - workers: Incomplete - dispatch_map: Incomplete - io_req_port: Incomplete + workers: list[WorkerThread] + dispatch_map: dict[int, Callable[..., Unused]] + io_req_port: int def GetExtensionVersion(self, vi) -> None: ... - def HttpExtensionProc(self, control_block): ... + def HttpExtensionProc(self, control_block) -> int: ... def TerminateExtension(self, status) -> None: ... def DispatchConnection(self, errCode, bytes, key, overlapped) -> None: ... def Dispatch(self, ecb) -> None: ... From 9d9b19a84510beb82c6e5389818c67f12fae2bac Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Mon, 28 Apr 2025 15:41:28 +0400 Subject: [PATCH 265/388] Deprecate `TarInfo.tarfile` attribute (#13894) --- stdlib/tarfile.pyi | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/stdlib/tarfile.pyi b/stdlib/tarfile.pyi index cd31b101c886..50546be8e4bb 100644 --- a/stdlib/tarfile.pyi +++ b/stdlib/tarfile.pyi @@ -7,7 +7,7 @@ from collections.abc import Callable, Iterable, Iterator, Mapping from gzip import _ReadableFileobj as _GzipReadableFileobj, _WritableFileobj as _GzipWritableFileobj from types import TracebackType from typing import IO, ClassVar, Literal, Protocol, overload -from typing_extensions import Self, TypeAlias +from typing_extensions import Self, TypeAlias, deprecated __all__ = [ "TarFile", @@ -622,7 +622,6 @@ class TarInfo: offset: int offset_data: int sparse: bytes | None - tarfile: TarFile | None mode: int type: bytes linkname: str @@ -632,6 +631,16 @@ class TarInfo: gname: str pax_headers: Mapping[str, str] def __init__(self, name: str = "") -> None: ... + if sys.version_info >= (3, 13): + @property + @deprecated("Deprecated in Python 3.13; removal scheduled for Python 3.16") + def tarfile(self) -> TarFile | None: ... + @tarfile.setter + @deprecated("Deprecated in Python 3.13; removal scheduled for Python 3.16") + def tarfile(self) -> None: ... + else: + tarfile: TarFile | None + @classmethod def frombuf(cls, buf: bytes | bytearray, encoding: str, errors: str) -> Self: ... @classmethod From 97718d0e786d043f961f2313dfc52e2134b9640f Mon Sep 17 00:00:00 2001 From: vidhyavijayan3 <149491240+vidhyavijayan3@users.noreply.github.com> Date: Mon, 28 Apr 2025 17:14:40 +0530 Subject: [PATCH 266/388] Fix base class compatibility for IntFlag in Python 3.11+ to resolve inverted type issue (#13854) --- stdlib/enum.pyi | 2 ++ 1 file changed, 2 insertions(+) diff --git a/stdlib/enum.pyi b/stdlib/enum.pyi index 8c88b26a3a2f..26f198867113 100644 --- a/stdlib/enum.pyi +++ b/stdlib/enum.pyi @@ -299,6 +299,7 @@ if sys.version_info >= (3, 11): def __or__(self, other: int) -> Self: ... def __and__(self, other: int) -> Self: ... def __xor__(self, other: int) -> Self: ... + def __invert__(self) -> Self: ... __ror__ = __or__ __rand__ = __and__ __rxor__ = __xor__ @@ -309,6 +310,7 @@ else: def __or__(self, other: int) -> Self: ... def __and__(self, other: int) -> Self: ... def __xor__(self, other: int) -> Self: ... + def __invert__(self) -> Self: ... __ror__ = __or__ __rand__ = __and__ __rxor__ = __xor__ From 7192c1e7c9e375f755c497431c6d648ba4e2f440 Mon Sep 17 00:00:00 2001 From: Sebastian Rittau Date: Mon, 28 Apr 2025 14:01:44 +0200 Subject: [PATCH 267/388] Mark ExifRead as obsolete (#13899) --- stubs/ExifRead/METADATA.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/stubs/ExifRead/METADATA.toml b/stubs/ExifRead/METADATA.toml index bf04067ade5c..b65bbc46611c 100644 --- a/stubs/ExifRead/METADATA.toml +++ b/stubs/ExifRead/METADATA.toml @@ -1,2 +1,3 @@ version = "3.0.*" upstream_repository = "https://github.com/ianare/exif-py" +obsolete_since = "3.1.0" # Released on 2025-04-25 From 1420e501d7196deec1e1d1b47949af6d8c4ada58 Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Mon, 28 Apr 2025 16:05:34 +0400 Subject: [PATCH 268/388] [hvac] Clarify and improve some annotations (#13886) --- stubs/hvac/hvac/aws_utils.pyi | 4 +++- stubs/hvac/hvac/exceptions.pyi | 7 +++---- stubs/hvac/hvac/utils.pyi | 6 ++++-- 3 files changed, 10 insertions(+), 7 deletions(-) diff --git a/stubs/hvac/hvac/aws_utils.pyi b/stubs/hvac/hvac/aws_utils.pyi index 134fd4b2f9ad..a63a1b022c81 100644 --- a/stubs/hvac/hvac/aws_utils.pyi +++ b/stubs/hvac/hvac/aws_utils.pyi @@ -1,9 +1,11 @@ +import requests + class SigV4Auth: access_key: str secret_key: str session_token: str | None region: str def __init__(self, access_key: str, secret_key: str, session_token: str | None = None, region: str = "us-east-1") -> None: ... - def add_auth(self, request) -> None: ... + def add_auth(self, request: requests.PreparedRequest) -> None: ... def generate_sigv4_auth_request(header_value: str | None = None): ... diff --git a/stubs/hvac/hvac/exceptions.pyi b/stubs/hvac/hvac/exceptions.pyi index 9bf16de0939b..67c2ed53b81a 100644 --- a/stubs/hvac/hvac/exceptions.pyi +++ b/stubs/hvac/hvac/exceptions.pyi @@ -1,9 +1,8 @@ from collections.abc import Iterable -from typing import Any from typing_extensions import Self class VaultError(Exception): - errors: Iterable[Any] | str | None + errors: Iterable[str] | str | None method: str | None url: str | None text: str | None @@ -11,7 +10,7 @@ class VaultError(Exception): def __init__( self, message: str | None = None, - errors: Iterable[Any] | str | None = None, + errors: Iterable[str] | str | None = None, method: str | None = None, url: str | None = None, text: str | None = None, @@ -22,7 +21,7 @@ class VaultError(Exception): cls, status_code: int, message: str | None = ..., - errors: Iterable[Any] | str | None = ..., + errors: Iterable[str] | str | None = ..., method: str | None = ..., url: str | None = ..., text: str | None = ..., diff --git a/stubs/hvac/hvac/utils.pyi b/stubs/hvac/hvac/utils.pyi index 75e86eb614c2..5eaf5803bc3e 100644 --- a/stubs/hvac/hvac/utils.pyi +++ b/stubs/hvac/hvac/utils.pyi @@ -16,7 +16,7 @@ def raise_for_error( url: str, status_code: int, message: str | None = None, - errors: Iterable[Any] | str | None = None, + errors: Iterable[str] | str | None = None, text: str | None = None, json: object | None = None, ) -> NoReturn: ... @@ -42,4 +42,6 @@ def comma_delimited_to_list(list_param: Iterable[_T]) -> Iterable[_T]: ... # the docstring states that this function returns a bool, but the code does not return anything def validate_pem_format(param_name: str, param_argument: str) -> None: ... def remove_nones(params: Mapping[_K, _V | None]) -> Mapping[_K, _V]: ... -def format_url(format_str: str, *args: Any, **kwargs: Any) -> str: ... +def format_url( + format_str: str, *args: object, **kwargs: object +) -> str: ... # values are passed to builtins.str, which takes an object type From 317922bf2b53ff5391cd2dac164e563671cfb2ee Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Mon, 28 Apr 2025 16:27:34 +0400 Subject: [PATCH 269/388] Bump reportlab to 4.4.* (#13857) --- stubs/reportlab/@tests/stubtest_allowlist.txt | 1 - stubs/reportlab/METADATA.toml | 2 +- .../reportlab/reportlab/graphics/renderPM.pyi | 18 ++- stubs/reportlab/reportlab/lib/rl_accel.pyi | 16 +-- stubs/reportlab/reportlab/lib/utils.pyi | 103 ++++++++++-------- .../reportlab/pdfbase/pdfmetrics.pyi | 30 ++--- stubs/reportlab/reportlab/pdfbase/ttfonts.pyi | 58 +++++----- stubs/reportlab/reportlab/pdfgen/canvas.pyi | 5 +- .../reportlab/reportlab/pdfgen/textobject.pyi | 24 +++- .../reportlab/platypus/flowables.pyi | 28 ++++- stubs/reportlab/reportlab/platypus/tables.pyi | 4 +- stubs/reportlab/reportlab/rl_config.pyi | 18 +-- stubs/reportlab/reportlab/rl_settings.pyi | 20 ++-- 13 files changed, 200 insertions(+), 127 deletions(-) diff --git a/stubs/reportlab/@tests/stubtest_allowlist.txt b/stubs/reportlab/@tests/stubtest_allowlist.txt index b11ffbf4b64b..c96ce50c36d6 100644 --- a/stubs/reportlab/@tests/stubtest_allowlist.txt +++ b/stubs/reportlab/@tests/stubtest_allowlist.txt @@ -20,7 +20,6 @@ reportlab.graphics.shapes.Drawing.wrap reportlab\.platypus\.(doctemplate\.|flowables\.|tableofcontents\.)?[A-Za-z_]+\.split reportlab\.platypus\.(doctemplate\.|flowables\.|tableofcontents\.)?[A-Za-z_]+\.wrap reportlab.platypus.multicol.MultiCol.split -reportlab.platypus.multicol.MultiCol.wrap reportlab.platypus.para.FastPara.split reportlab.platypus.para.FastPara.wrap reportlab.platypus.para.Para.split diff --git a/stubs/reportlab/METADATA.toml b/stubs/reportlab/METADATA.toml index 6159d1b17008..d1a58be6ea2c 100644 --- a/stubs/reportlab/METADATA.toml +++ b/stubs/reportlab/METADATA.toml @@ -1,4 +1,4 @@ -version = "4.3.1" +version = "4.4.*" # GitHub mirror of https://hg.reportlab.com/hg-public/reportlab/file upstream_repository = "https://github.com/MrBitBucket/reportlab-mirror" diff --git a/stubs/reportlab/reportlab/graphics/renderPM.pyi b/stubs/reportlab/reportlab/graphics/renderPM.pyi index 41974b755976..75d6b2ea26fc 100644 --- a/stubs/reportlab/reportlab/graphics/renderPM.pyi +++ b/stubs/reportlab/reportlab/graphics/renderPM.pyi @@ -50,9 +50,20 @@ class PMCanvas: def fillstrokepath(self, stroke: int = 1, fill: int = 1) -> None: ... def bezierArcCCW(self, cx, cy, rx, ry, theta0, theta1): ... def addEllipsoidalArc(self, cx, cy, rx, ry, ang1, ang2) -> None: ... - def drawCentredString(self, x, y, text, text_anchor: str = "middle") -> None: ... - def drawRightString(self, text, x, y) -> None: ... - def drawString(self, x, y, text, _fontInfo: Incomplete | None = None, text_anchor: str = "left") -> None: ... + def drawCentredString( + self, x: float, y: float, text: str, text_anchor: str = "middle", direction: str | None = None, shaping: bool = False + ) -> None: ... + def drawRightString(self, text: str, x: float, y: float, direction: str | None = None) -> None: ... + def drawString( + self, + x: float, + y: float, + text: str, + _fontInfo: Incomplete | None = None, + text_anchor: str = "left", + direction: str | None = None, + shaping: bool = False, + ) -> None: ... def line(self, x1, y1, x2, y2) -> None: ... def rect(self, x, y, width, height, stroke: int = 1, fill: int = 1) -> None: ... def roundRect(self, x, y, width, height, rx, ry) -> None: ... @@ -73,7 +84,6 @@ class PMCanvas: strokeWidth: Incomplete def setLineWidth(self, width) -> None: ... def stringWidth(self, text, fontName: Incomplete | None = None, fontSize: Incomplete | None = None): ... - def shapedText(self, text) -> tuple[Incomplete, Incomplete]: ... def drawToPMCanvas( d: Drawing, diff --git a/stubs/reportlab/reportlab/lib/rl_accel.pyi b/stubs/reportlab/reportlab/lib/rl_accel.pyi index b0a9336227b7..48a6d66e926e 100644 --- a/stubs/reportlab/reportlab/lib/rl_accel.pyi +++ b/stubs/reportlab/reportlab/lib/rl_accel.pyi @@ -1,14 +1,16 @@ +from typing import Literal + def fp_str(*a): ... def unicode2T1(utext, fonts): ... -def instanceStringWidthT1(self, text, size, encoding: str = "utf8"): ... -def instanceStringWidthTTF(self, text, size, encoding: str = "utf8"): ... -def hex32(i): ... -def add32(x, y): ... -def calcChecksum(data): ... +def instanceStringWidthT1(self, text: str, size: float, encoding: str = "utf8") -> float: ... +def instanceStringWidthTTF(self, text: str, size: float, encoding: str = "utf8") -> float: ... +def hex32(i) -> str: ... +def add32(x: int, y: int) -> int: ... +def calcChecksum(data: str | bytes) -> int: ... def escapePDF(s): ... -def asciiBase85Encode(input): ... +def asciiBase85Encode(input: str) -> str: ... def asciiBase85Decode(input): ... -def sameFrag(f, g): ... +def sameFrag(f, g) -> bool | Literal[0]: ... __all__ = [ "fp_str", diff --git a/stubs/reportlab/reportlab/lib/utils.pyi b/stubs/reportlab/reportlab/lib/utils.pyi index 0eb199be83bc..08485eb2df59 100644 --- a/stubs/reportlab/reportlab/lib/utils.pyi +++ b/stubs/reportlab/reportlab/lib/utils.pyi @@ -1,7 +1,10 @@ import datetime +import zipimport from _typeshed import Incomplete, SupportsItems -from collections.abc import Generator -from typing import Final, Literal, TypeVar +from collections.abc import Generator, Iterable +from os import PathLike +from types import TracebackType +from typing import AnyStr, Final, Literal, TypeVar, overload from reportlab.lib.rltempfile import get_rl_tempdir as get_rl_tempdir, get_rl_tempfile as get_rl_tempfile @@ -24,37 +27,37 @@ __UNSET__: Final[_UNSET_] isPyPy: bool -def isFunction(v): ... -def isMethod(v, mt=...): ... -def isModule(v): ... -def isSeq(v, _st=...): ... -def isNative(v): ... - -strTypes: Incomplete - -def asBytes(v, enc: str = "utf8"): ... -def asUnicode(v, enc: str = "utf8"): ... -def asUnicodeEx(v, enc: str = "utf8"): ... -def asNative(v, enc: str = "utf8"): ... -def int2Byte(i): ... -def isStr(v): ... -def isBytes(v): ... -def isUnicode(v): ... -def isClass(v): ... -def isNonPrimitiveInstance(x): ... -def instantiated(v): ... -def bytestr(x, enc: str = "utf8"): ... -def encode_label(args): ... -def decode_label(label): ... -def rawUnicode(s): ... -def rawBytes(s): ... - -rl_exec: Incomplete - -def char2int(s): ... -def rl_reraise(t, v, b: Incomplete | None = None) -> None: ... +def isFunction(v: object) -> bool: ... +def isMethod(v: object, mt=...) -> bool: ... +def isModule(v: object) -> bool: ... +def isSeq(v: object, _st=...) -> bool: ... +def isNative(v: object) -> bool: ... + +strTypes: tuple[type[str], type[bytes]] + +def asBytes(v: str | bytes, enc: str = "utf8") -> bytes: ... +def asUnicode(v: str | bytes, enc: str = "utf8") -> str: ... +def asUnicodeEx(v: str | bytes, enc: str = "utf8") -> str: ... +def asNative(v: str | bytes, enc: str = "utf8") -> str: ... +def int2Byte(i: int) -> bytes: ... +def isStr(v: object) -> bool: ... +def isBytes(v: object) -> bool: ... +def isUnicode(v: object) -> bool: ... +def isClass(v: object) -> bool: ... +def isNonPrimitiveInstance(x: object) -> bool: ... +def instantiated(v: object) -> bool: ... +def bytestr(x: object, enc: str = "utf8") -> bytes: ... +def encode_label(args) -> str: ... +def decode_label(label: str): ... +def rawUnicode(s: str | bytes) -> str: ... +def rawBytes(s: str | bytes) -> bytes: ... + +rl_exec = exec + +def char2int(s: int | str | bytes) -> int: ... +def rl_reraise(t, v: BaseException, b: TracebackType | None = None) -> None: ... def rl_add_builtins(**kwd) -> None: ... -def zipImported(ldr: Incomplete | None = None): ... +def zipImported(ldr: zipimport.zipimporter | None = None) -> zipimport.zipimporter | None: ... class CIDict(dict[_KT, _VT]): def __init__(self, *args, **kwds) -> None: ... @@ -64,14 +67,14 @@ def markfilename(filename, creatorcode: Incomplete | None = None, filetype: Inco __rl_loader__: Incomplete -def rl_glob(pattern, glob=...): ... -def isFileSystemDistro(): ... -def isCompactDistro(): ... -def isSourceDistro(): ... -def normalize_path(p): ... +def rl_glob(pattern: AnyStr, glob=...) -> list[AnyStr]: ... +def isFileSystemDistro() -> bool: ... +def isCompactDistro() -> bool: ... +def isSourceDistro() -> bool: ... +def normalize_path(p: PathLike[AnyStr]) -> PathLike[AnyStr]: ... def recursiveImport(modulename, baseDir: Incomplete | None = None, noCWD: int = 0, debug: int = 0): ... -haveImages: Incomplete +haveImages: Final[bool] class ArgvDictValue: value: Incomplete @@ -147,15 +150,18 @@ class _FmtSelfDict: class FmtSelfDict: ... -def simpleSplit(text, fontName, fontSize, maxWidth): ... -def escapeTextOnce(text): ... +def simpleSplit(text: str | bytes, fontName: str | None, fontSize: float, maxWidth: float | None): ... +@overload +def escapeTextOnce(text: None) -> None: ... +@overload +def escapeTextOnce(text: str | bytes) -> str: ... def fileName2FSEnc(fn): ... def prev_this_next(items): ... -def commasplit(s): ... -def commajoin(l): ... +def commasplit(s: str | bytes) -> list[str]: ... +def commajoin(l: Iterable[str | bytes]) -> str: ... def findInPaths(fn, paths, isfile: bool = True, fail: bool = False): ... -def annotateException(msg, enc: str = "utf8", postMsg: str = "", sep: str = " ") -> None: ... -def escapeOnce(data): ... +def annotateException(msg: str, enc: str = "utf8", postMsg: str = "", sep: str = " ") -> None: ... +def escapeOnce(data: str) -> str: ... class IdentStr(str): def __new__(cls, value): ... @@ -188,3 +194,12 @@ def recursiveGetAttr(obj, name, g: Incomplete | None = None): ... def recursiveSetAttr(obj, name, value) -> None: ... def recursiveDelAttr(obj, name) -> None: ... def yieldNoneSplits(L) -> Generator[Incomplete, None, None]: ... + +class KlassStore: + lim: int + store: dict[str, type] + def __init__(self, lim: int = 127) -> None: ... + def add(self, k: str, v: type) -> None: ... + def __contains__(self, k) -> bool: ... + def __getitem__(self, k: str) -> type: ... + def get(self, k, default=None): ... diff --git a/stubs/reportlab/reportlab/pdfbase/pdfmetrics.pyi b/stubs/reportlab/reportlab/pdfbase/pdfmetrics.pyi index dfe023a0936b..3eac7bcd4829 100644 --- a/stubs/reportlab/reportlab/pdfbase/pdfmetrics.pyi +++ b/stubs/reportlab/reportlab/pdfbase/pdfmetrics.pyi @@ -1,4 +1,4 @@ -from _typeshed import Incomplete +from _typeshed import Incomplete, StrOrBytesPath from typing import Final from reportlab.lib.rl_accel import unicode2T1 as unicode2T1 @@ -10,7 +10,7 @@ standardEncodings: Incomplete class FontError(Exception): ... class FontNotFoundError(Exception): ... -def parseAFMFile(afmFileName): ... +def parseAFMFile(afmFileName: StrOrBytesPath) -> tuple[dict[Incomplete, Incomplete], list[Incomplete]]: ... class TypeFace: name: Incomplete @@ -51,15 +51,15 @@ class Font: encoding: Incomplete encName: Incomplete substitutionFonts: Incomplete - isShaped: bool + shapable: bool def __init__(self, name, faceName, encName, substitutionFonts: Incomplete | None = None) -> None: ... - def stringWidth(self, text, size, encoding: str = "utf8"): ... + def stringWidth(self, text: str | bytes, size: float, encoding: str = "utf8") -> float: ... def addObjects(self, doc) -> None: ... -PFB_MARKER: Incomplete -PFB_ASCII: Incomplete -PFB_BINARY: Incomplete -PFB_EOF: Incomplete +PFB_MARKER: Final[str] +PFB_ASCII: Final[str] +PFB_BINARY: Final[str] +PFB_EOF: Final[str] class EmbeddedType1Face(TypeFace): afmFileName: Incomplete @@ -81,13 +81,13 @@ def registerFontFamily( def registerFont(font) -> None: ... def getTypeFace(faceName): ... def getEncoding(encName): ... -def findFontAndRegister(fontName): ... -def getFont(fontName): ... -def getAscentDescent(fontName, fontSize: Incomplete | None = None): ... -def getAscent(fontName, fontSize: Incomplete | None = None): ... -def getDescent(fontName, fontSize: Incomplete | None = None): ... -def getRegisteredFontNames(): ... -def stringWidth(text, fontName, fontSize, encoding: str = "utf8"): ... +def findFontAndRegister(fontName: str) -> Font: ... +def getFont(fontName: str) -> Font: ... +def getAscentDescent(fontName: str, fontSize: float | None = None): ... +def getAscent(fontName: str, fontSize: float | None = None): ... +def getDescent(fontName: str, fontSize: float | None = None): ... +def getRegisteredFontNames() -> list[Incomplete]: ... +def stringWidth(text: str | bytes, fontName: str, fontSize: float, encoding: str = "utf8") -> float: ... def dumpFontData() -> None: ... def test3widths(texts) -> None: ... def testStringWidthAlgorithms() -> None: ... diff --git a/stubs/reportlab/reportlab/pdfbase/ttfonts.pyi b/stubs/reportlab/reportlab/pdfbase/ttfonts.pyi index 2317449089b6..7a77835e46d6 100644 --- a/stubs/reportlab/reportlab/pdfbase/ttfonts.pyi +++ b/stubs/reportlab/reportlab/pdfbase/ttfonts.pyi @@ -1,6 +1,7 @@ -from _typeshed import Incomplete +from _typeshed import Incomplete, ReadableBuffer, StrOrBytesPath from typing import Final, NamedTuple from typing_extensions import Self +from weakref import WeakKeyDictionary from reportlab.pdfbase import pdfdoc, pdfmetrics @@ -8,25 +9,25 @@ __version__: Final[str] class TTFError(pdfdoc.PDFError): ... -def SUBSETN(n, table=...): ... -def makeToUnicodeCMap(fontname, subset): ... +def SUBSETN(n, table: ReadableBuffer | None = ...) -> bytes: ... +def makeToUnicodeCMap(fontname: str, subset) -> str: ... def splice(stream, offset, value): ... -GF_ARG_1_AND_2_ARE_WORDS: Incomplete -GF_ARGS_ARE_XY_VALUES: Incomplete -GF_ROUND_XY_TO_GRID: Incomplete -GF_WE_HAVE_A_SCALE: Incomplete -GF_RESERVED: Incomplete -GF_MORE_COMPONENTS: Incomplete -GF_WE_HAVE_AN_X_AND_Y_SCALE: Incomplete -GF_WE_HAVE_A_TWO_BY_TWO: Incomplete -GF_WE_HAVE_INSTRUCTIONS: Incomplete -GF_USE_MY_METRICS: Incomplete -GF_OVERLAP_COMPOUND: Incomplete -GF_SCALED_COMPONENT_OFFSET: Incomplete -GF_UNSCALED_COMPONENT_OFFSET: Incomplete - -def TTFOpenFile(fn): ... +GF_ARG_1_AND_2_ARE_WORDS: Final = 1 +GF_ARGS_ARE_XY_VALUES: Final = 2 +GF_ROUND_XY_TO_GRID: Final = 4 +GF_WE_HAVE_A_SCALE: Final = 8 +GF_RESERVED: Final = 16 +GF_MORE_COMPONENTS: Final = 32 +GF_WE_HAVE_AN_X_AND_Y_SCALE: Final = 64 +GF_WE_HAVE_A_TWO_BY_TWO: Final = 128 +GF_WE_HAVE_INSTRUCTIONS: Final = 256 +GF_USE_MY_METRICS: Final = 512 +GF_OVERLAP_COMPOUND: Final = 1024 +GF_SCALED_COMPONENT_OFFSET: Final = 2048 +GF_UNSCALED_COMPONENT_OFFSET: Final = 4096 + +def TTFOpenFile(fn: StrOrBytesPath) -> tuple[StrOrBytesPath,]: ... class TTFontParser: ttfVersions: Incomplete @@ -138,18 +139,18 @@ class TTFont: subsets: Incomplete def __init__(self, asciiReadable: Incomplete | None = None, ttf: Incomplete | None = None) -> None: ... - fontName: Incomplete - face: Incomplete - encoding: Incomplete - state: Incomplete + fontName: str + face: TTFontFace + encoding: TTEncoding + state: WeakKeyDictionary[Incomplete, State] def __init__( self, - name, + name: str, filename, validate: int = 0, subfontIndex: int = 0, - asciiReadable: Incomplete | None = None, - shaped: Incomplete | None = None, + asciiReadable: int | None = None, + shapable: bool = True, ) -> None: ... def stringWidth(self, text, size, encoding: str = "utf8"): ... def splitString(self, text, doc, encoding: str = "utf-8"): ... @@ -159,9 +160,9 @@ class TTFont: def hbFace(self) -> Incomplete | None: ... def hbFont(self, fontSize: float = 10): ... @property - def isShaped(self) -> bool: ... - @isShaped.setter - def isShaped(self, v) -> None: ... + def shapable(self) -> bool: ... + @shapable.setter + def shapable(self, v) -> None: ... def pdfScale(self, v): ... def unregister(self) -> None: ... @@ -179,6 +180,7 @@ class ShapedStr(str): def __new__(cls, s, shapeData: ShapeData | None = None) -> Self: ... def __radd__(self, other) -> Self: ... +def shapeStr(s: str, fontName: str, fontSize: float): ... def freshTTFont(ttfn, ttfpath, **kwds) -> TTFont: ... def makeShapedFragWord(w, K: list[Incomplete] = [], V: list[Incomplete] = []) -> type[ShapedFragWord]: ... def shapeFragWord(w, features: Incomplete | None = None): ... diff --git a/stubs/reportlab/reportlab/pdfgen/canvas.pyi b/stubs/reportlab/reportlab/pdfgen/canvas.pyi index 96351502e016..7240c215996c 100644 --- a/stubs/reportlab/reportlab/pdfgen/canvas.pyi +++ b/stubs/reportlab/reportlab/pdfgen/canvas.pyi @@ -239,6 +239,7 @@ class Canvas(_PDFColorSetter): charSpace: float = 0, direction: Literal["LTR", "RTL"] | None = None, wordSpace: float | None = None, + shaping: bool = False, ) -> None: ... def drawRightString( self, @@ -249,6 +250,7 @@ class Canvas(_PDFColorSetter): charSpace: float = 0, direction: Literal["LTR", "RTL"] | None = None, wordSpace: float | None = None, + shaping: bool = False, ) -> None: ... def drawCentredString( self, @@ -259,6 +261,7 @@ class Canvas(_PDFColorSetter): charSpace: float = 0, direction: Literal["LTR", "RTL"] | None = None, wordSpace: float | None = None, + shaping: bool = False, ) -> None: ... def drawAlignedString( self, @@ -270,6 +273,7 @@ class Canvas(_PDFColorSetter): charSpace: float = 0, direction: Literal["LTR", "RTL"] | None = None, wordSpace: float | None = None, + shaping: bool = False, ) -> None: ... def getAvailableFonts(self): ... def listLoadedFonts0(self): ... @@ -304,6 +308,5 @@ class Canvas(_PDFColorSetter): @property def acroForm(self): ... def drawBoundary(self, sb, x1: float, y1: float, width: float, height: float) -> None: ... - def shapedText(self, text) -> tuple[Incomplete, Incomplete]: ... __all__ = ["Canvas", "ShowBoundaryValue"] diff --git a/stubs/reportlab/reportlab/pdfgen/textobject.pyi b/stubs/reportlab/reportlab/pdfgen/textobject.pyi index 457832b6b594..e0162746b4d8 100644 --- a/stubs/reportlab/reportlab/pdfgen/textobject.pyi +++ b/stubs/reportlab/reportlab/pdfgen/textobject.pyi @@ -1,16 +1,36 @@ +from _typeshed import Incomplete, Unused +from collections.abc import Callable from typing import Final, Literal from typing_extensions import TypeAlias from reportlab.lib.colors import Color +from reportlab.pdfbase.ttfonts import ShapedStr from reportlab.pdfgen.canvas import Canvas # NOTE: This is slightly different from what toColor accepts and interprets _Color: TypeAlias = Color | tuple[float, float, float, float] | tuple[float, float, float] | list[float] | str __version__: Final[str] -log2vis: None +log2vis: Callable[..., str | None] +BidiStr: type[str] +BidiList: type[list[Incomplete]] +BidiIndex: Incomplete -def fribidiText(text: str, direction: str) -> str: ... +def bidiText(text: str, direction: str | None) -> str: ... +def bidiShapedText( + text: str, direction: str = "RTL", clean: bool = True, fontName: str = "Helvetica", fontSize: int = 10, shaping: bool = False +) -> tuple[ShapedStr | str, float]: ... +def isBidiStr(_: Unused) -> bool: ... +def isBidiList(_: Unused) -> bool: ... +def innerBidiStrWrap(s: str, bidiV: int = -1, bidiL: int = -1) -> str: ... +def bidiStrWrap(s: str, orig: str) -> str: ... +def bidiListWrap(L, orig) -> list[Incomplete]: ... +def bidiFragWord(w: str, direction: str | None = None, bidiV: int = -1, bidiL: int = -1, clean: bool = True): ... +def bidiWordList( + words: list[str] | tuple[str], direction: str = "RTL", clean: bool = True, wx: bool = False +) -> list[Incomplete]: ... + +rtlSupport: bool class _PDFColorSetter: def setFillColorCMYK(self, c: float, m: float, y: float, k: float, alpha: float | None = None) -> None: ... diff --git a/stubs/reportlab/reportlab/platypus/flowables.pyi b/stubs/reportlab/reportlab/platypus/flowables.pyi index ee384d99ca16..328a145ca7d2 100644 --- a/stubs/reportlab/reportlab/platypus/flowables.pyi +++ b/stubs/reportlab/reportlab/platypus/flowables.pyi @@ -1,4 +1,4 @@ -from _typeshed import Incomplete, SupportsRead +from _typeshed import Incomplete, SupportsRead, Unused from collections.abc import Callable, Iterable, Sequence from typing import Any, Literal, NoReturn, Protocol from typing_extensions import Self, TypeAlias @@ -52,6 +52,7 @@ __all__ = [ "XBox", "splitLine", "splitLines", + "PlacedStory", ] _HAlignment: TypeAlias = Literal["LEFT", "CENTER", "CENTRE", "RIGHT", 0, 1, 2] @@ -98,7 +99,7 @@ class Flowable: # make everyone happy here, sigh... def drawOn(self, canvas: Canvas, x: float, y: float) -> None: ... def wrapOn(self, canv: Canvas, aW: float, aH: float) -> tuple[float, float]: ... - def wrap(self, aW: float, aH: float, /) -> tuple[float, float]: ... + def wrap(self, aW: float, aH: float) -> tuple[float, float]: ... def minWidth(self) -> float: ... def splitOn(self, canv: Canvas, aW: float, aH: float) -> list[Flowable]: ... def split(self, aW: float, aH: float, /) -> list[Flowable]: ... @@ -217,7 +218,7 @@ class ParagraphAndImage(Flowable): def draw(self) -> None: ... class FailOnWrap(NullDraw): - def wrap(self, aW: float, aH: float, /) -> NoReturn: ... + def wrap(self, aW: float, aH: float) -> NoReturn: ... class FailOnDraw(Flowable): def draw(self) -> NoReturn: ... @@ -271,6 +272,27 @@ class KeepInFrame(_Container, Flowable): fakeWidth: bool | None = None, ) -> None: ... +class PlacedStory(Flowable): + def __init__( + self, + x, + y, + maxWidth: float, + maxHeight: float, + content: list[Flowable] = [], + mergeSpace: Incomplete | None = 1, + mode: Literal["error", "continue", "shrink", "truncate"] = "shrink", + name: str = "", + anchor: str = "sw", + fakeWidth: bool | None = None, + hAlign: str = "LEFT", + vAlign: str = "BOTTOM", + showBoundary=None, + origin="page", + ) -> None: ... + def wrap(self, _aW: Unused, _aH: Unused) -> tuple[Literal[0], Literal[0]]: ... + def drawOn(self, canv: Canvas, lx: float, ly: float, _sW=0) -> None: ... + class _FindSplitterMixin: ... class ImageAndFlowables(_Container, _FindSplitterMixin, Flowable): diff --git a/stubs/reportlab/reportlab/platypus/tables.pyi b/stubs/reportlab/reportlab/platypus/tables.pyi index 88893f074fb1..3bcaa2f87018 100644 --- a/stubs/reportlab/reportlab/platypus/tables.pyi +++ b/stubs/reportlab/reportlab/platypus/tables.pyi @@ -40,7 +40,9 @@ class CellStyle(PropertySet): background: _Color valign: Literal["TOP", "MIDDLE", "BOTTOM"] href: str | None - destination: Incomplete + direction: str | None + shaping: Incomplete | None + destination: Incomplete | None def __init__(self, name: str, parent: CellStyle | None = None) -> None: ... def copy(self, result: CellStyle | None = None) -> CellStyle: ... diff --git a/stubs/reportlab/reportlab/rl_config.pyi b/stubs/reportlab/reportlab/rl_config.pyi index 965f6d0d90b3..7d530a71f3b2 100644 --- a/stubs/reportlab/reportlab/rl_config.pyi +++ b/stubs/reportlab/reportlab/rl_config.pyi @@ -1,5 +1,6 @@ from _typeshed import Incomplete from collections.abc import Callable +from typing import Literal def register_reset(func: Callable[[], Callable[[], object] | None]) -> None: ... def _reset() -> None: ... @@ -26,7 +27,7 @@ longTableOptimize: int autoConvertEncoding: int _FUZZ: float wrapA85: int -fsEncodings: Incomplete +fsEncodings: tuple[Literal["utf8"], Literal["cp1252"], Literal["cp430"]] odbc_driver: str platypus_link_underline: int canvas_basefontname: str @@ -39,7 +40,6 @@ ttfAsciiReadable: int pdfMultiLine: int pdfComments: int debug: int -rtlSupport: int listWrapOnFakeWidth: int underlineWidth: str underlineOffset: str @@ -50,7 +50,7 @@ strikeGap: str decimalSymbol: str errorOnDuplicatePageLabelPage: int autoGenerateMissingTTFName: int -allowTTFSubsetting: Incomplete +allowTTFSubsetting: list[str] spaceShrinkage: float hyphenationLang: str uriWasteReduce: int @@ -59,14 +59,14 @@ hyphenationMinWordLength: int reserveTTFNotdef: int documentLang: Incomplete encryptionStrength: int -trustedHosts: Incomplete -trustedSchemes: Incomplete +trustedHosts: list[str] | None +trustedSchemes: list[str] renderPMBackend: str xmlParser: str textPaths: str toColorCanUse: str defCWRF: float -T1SearchPath: Incomplete -TTFSearchPath: Incomplete -CMapSearchPath: Incomplete -shapedFontGlob: list[Incomplete] +unShapedFontGlob: list[str] | None +T1SearchPath: list[str] +TTFSearchPath: list[str] +CMapSearchPath: list[str] diff --git a/stubs/reportlab/reportlab/rl_settings.pyi b/stubs/reportlab/reportlab/rl_settings.pyi index 2c220169136a..8e36a529bb22 100644 --- a/stubs/reportlab/reportlab/rl_settings.pyi +++ b/stubs/reportlab/reportlab/rl_settings.pyi @@ -1,5 +1,5 @@ from _typeshed import Incomplete -from typing import Final +from typing import Final, Literal __version__: Final[str] @@ -27,7 +27,7 @@ longTableOptimize: Final[int] autoConvertEncoding: Final[int] _FUZZ: Final[float] wrapA85: Final[int] -fsEncodings: Final[Incomplete] +fsEncodings: Final[tuple[Literal["utf8"], Literal["cp1252"], Literal["cp430"]]] odbc_driver: Final[str] platypus_link_underline: Final[int] canvas_basefontname: Final[str] @@ -40,7 +40,6 @@ ttfAsciiReadable: Final[int] pdfMultiLine: Final[int] pdfComments: Final[int] debug: Final[int] -rtlSupport: Final[int] listWrapOnFakeWidth: Final[int] underlineWidth: Final[str] underlineOffset: Final[str] @@ -51,7 +50,7 @@ strikeGap: Final[str] decimalSymbol: Final[str] errorOnDuplicatePageLabelPage: Final[int] autoGenerateMissingTTFName: Final[int] -allowTTFSubsetting: Final[Incomplete] +allowTTFSubsetting: Final[list[str]] spaceShrinkage: Final[float] hyphenationLang: Final[str] uriWasteReduce: Final[int] @@ -61,16 +60,16 @@ reserveTTFNotdef: Final[int] documentLang: Final[Incomplete] encryptionStrength: Final[int] trustedHosts: Final[Incomplete] -trustedSchemes: Final[Incomplete] +trustedSchemes: Final[list[str]] renderPMBackend: Final[str] xmlParser: Final[str] textPaths: Final[str] toColorCanUse: Final[str] defCWRF: Final[float] -T1SearchPath: Final[Incomplete] -TTFSearchPath: Final[Incomplete] -CMapSearchPath: Final[Incomplete] -shapedFontGlob: Final[Incomplete | None] +unShapedFontGlob: list[str] | None +T1SearchPath: Final[tuple[str, ...]] +TTFSearchPath: Final[tuple[str, ...]] +CMapSearchPath: Final[tuple[str, ...]] __all__ = ( "allowTableBoundsErrors", @@ -108,7 +107,6 @@ __all__ = ( "pdfMultiLine", "pdfComments", "debug", - "rtlSupport", "listWrapOnFakeWidth", "T1SearchPath", "TTFSearchPath", @@ -138,5 +136,5 @@ __all__ = ( "textPaths", "toColorCanUse", "defCWRF", - "shapedFontGlob", + "unShapedFontGlob", ) From 8f6c00df896e6260f25f9c0464906289006bf5c4 Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Mon, 28 Apr 2025 16:42:26 +0400 Subject: [PATCH 270/388] Complete `pyflakes` (#13848) --- pyrightconfig.stricter.json | 1 - stubs/pyflakes/@tests/stubtest_allowlist.txt | 4 + stubs/pyflakes/METADATA.toml | 4 - stubs/pyflakes/pyflakes/__init__.pyi | 4 +- stubs/pyflakes/pyflakes/__main__.pyi | 1 + stubs/pyflakes/pyflakes/api.pyi | 15 +-- stubs/pyflakes/pyflakes/checker.pyi | 73 +++++++++------ stubs/pyflakes/pyflakes/messages.pyi | 97 ++++++++++---------- stubs/pyflakes/pyflakes/reporter.pyi | 12 ++- 9 files changed, 119 insertions(+), 92 deletions(-) create mode 100644 stubs/pyflakes/pyflakes/__main__.pyi diff --git a/pyrightconfig.stricter.json b/pyrightconfig.stricter.json index 1cba7f4512b1..4242a2ba3280 100644 --- a/pyrightconfig.stricter.json +++ b/pyrightconfig.stricter.json @@ -74,7 +74,6 @@ "stubs/psycopg2", "stubs/pyasn1", "stubs/pycurl", - "stubs/pyflakes", "stubs/Pygments", "stubs/PyMySQL", "stubs/python-crontab", diff --git a/stubs/pyflakes/@tests/stubtest_allowlist.txt b/stubs/pyflakes/@tests/stubtest_allowlist.txt index c803fc98bd1b..109ee305c802 100644 --- a/stubs/pyflakes/@tests/stubtest_allowlist.txt +++ b/stubs/pyflakes/@tests/stubtest_allowlist.txt @@ -3,6 +3,7 @@ pyflakes.messages.DuplicateArgument.message_args pyflakes.messages.ForwardAnnotationSyntaxError.message_args pyflakes.messages.FutureFeatureNotDefined.message_args pyflakes.messages.ImportShadowedByLoopVar.message_args +pyflakes.messages.ImportStarNotPermitted.message_args pyflakes.messages.ImportStarUsage.message_args pyflakes.messages.ImportStarUsed.message_args pyflakes.messages.MultiValueRepeatedKeyLiteral.message_args @@ -24,3 +25,6 @@ pyflakes.messages.UnusedAnnotation.message_args pyflakes.messages.UnusedImport.message_args pyflakes.messages.UnusedIndirectAssignment.message_args pyflakes.messages.UnusedVariable.message_args + +# Tests are not included: +pyflakes.test.* diff --git a/stubs/pyflakes/METADATA.toml b/stubs/pyflakes/METADATA.toml index 615ddbc4e61b..2308bd579d7b 100644 --- a/stubs/pyflakes/METADATA.toml +++ b/stubs/pyflakes/METADATA.toml @@ -1,6 +1,2 @@ version = "~=3.3.2" upstream_repository = "https://github.com/PyCQA/pyflakes" -partial_stub = true - -[tool.stubtest] -ignore_missing_stub = true diff --git a/stubs/pyflakes/pyflakes/__init__.pyi b/stubs/pyflakes/pyflakes/__init__.pyi index bda5b5a7f4cc..c5dd95466063 100644 --- a/stubs/pyflakes/pyflakes/__init__.pyi +++ b/stubs/pyflakes/pyflakes/__init__.pyi @@ -1 +1,3 @@ -__version__: str +from typing import Final + +__version__: Final[str] diff --git a/stubs/pyflakes/pyflakes/__main__.pyi b/stubs/pyflakes/pyflakes/__main__.pyi new file mode 100644 index 000000000000..e70d627ae91d --- /dev/null +++ b/stubs/pyflakes/pyflakes/__main__.pyi @@ -0,0 +1 @@ +from pyflakes.api import main as main diff --git a/stubs/pyflakes/pyflakes/api.pyi b/stubs/pyflakes/pyflakes/api.pyi index 59dc4ca8affe..b2f910227c5a 100644 --- a/stubs/pyflakes/pyflakes/api.pyi +++ b/stubs/pyflakes/pyflakes/api.pyi @@ -1,16 +1,17 @@ -from _typeshed import Incomplete +from _typeshed import GenericPath from collections.abc import Iterable, Iterator, Sequence from re import Pattern +from typing import Final, NoReturn from pyflakes.reporter import Reporter __all__ = ["check", "checkPath", "checkRecursive", "iterSourceCode", "main"] -PYTHON_SHEBANG_REGEX: Pattern[bytes] +PYTHON_SHEBANG_REGEX: Final[Pattern[bytes]] def check(codeString: str, filename: str, reporter: Reporter | None = None) -> int: ... -def checkPath(filename, reporter: Reporter | None = None) -> int: ... -def isPythonFile(filename) -> bool: ... -def iterSourceCode(paths: Iterable[Incomplete]) -> Iterator[Incomplete]: ... -def checkRecursive(paths: Iterable[Incomplete], reporter: Reporter) -> int: ... -def main(prog: str | None = None, args: Sequence[Incomplete] | None = None) -> None: ... +def checkPath(filename: str, reporter: Reporter | None = None) -> int: ... +def isPythonFile(filename: str) -> bool: ... +def iterSourceCode(paths: Iterable[GenericPath[str]]) -> Iterator[GenericPath[str]]: ... +def checkRecursive(paths: Iterable[GenericPath[str]], reporter: Reporter) -> int: ... +def main(prog: str | None = None, args: Sequence[str] | None = None) -> NoReturn: ... diff --git a/stubs/pyflakes/pyflakes/checker.pyi b/stubs/pyflakes/pyflakes/checker.pyi index 1736ae44cff1..3f6dbc726b3f 100644 --- a/stubs/pyflakes/pyflakes/checker.pyi +++ b/stubs/pyflakes/pyflakes/checker.pyi @@ -1,10 +1,10 @@ import ast import sys -from _typeshed import Incomplete -from collections.abc import Callable, Generator, Iterable, Iterator +from _typeshed import StrOrLiteralStr, Unused +from collections.abc import Callable, Generator, Iterable, Iterator, Sequence from contextlib import contextmanager from re import Pattern -from typing import Any, ClassVar, Literal, TypeVar, overload +from typing import Any, ClassVar, Final, Literal, TypeVar, overload from typing_extensions import Never, ParamSpec, TypeAlias from pyflakes.messages import Message @@ -13,16 +13,25 @@ _AnyFunction: TypeAlias = Callable[..., Any] _F = TypeVar("_F", bound=_AnyFunction) _P = ParamSpec("_P") -PYPY: bool +PYPY: Final[bool] +builtin_vars: Final[list[str]] -def getAlternatives(n: ast.If | ast.Try) -> list[ast.AST]: ... +def parse_format_string( + format_string: StrOrLiteralStr, +) -> Iterable[tuple[StrOrLiteralStr, StrOrLiteralStr | None, StrOrLiteralStr | None, StrOrLiteralStr | None]]: ... -FOR_TYPES: tuple[type[ast.For], type[ast.AsyncFor]] -MAPPING_KEY_RE: Pattern[str] -CONVERSION_FLAG_RE: Pattern[str] -WIDTH_RE: Pattern[str] -PRECISION_RE: Pattern[str] -LENGTH_RE: Pattern[str] +if sys.version_info >= (3, 10): + def getAlternatives(n: ast.If | ast.Try | ast.Match) -> list[ast.AST]: ... + +else: + def getAlternatives(n: ast.If | ast.Try) -> list[ast.AST]: ... + +FOR_TYPES: Final[tuple[type[ast.For], type[ast.AsyncFor]]] +MAPPING_KEY_RE: Final[Pattern[str]] +CONVERSION_FLAG_RE: Final[Pattern[str]] +WIDTH_RE: Final[Pattern[str]] +PRECISION_RE: Final[Pattern[str]] +LENGTH_RE: Final[Pattern[str]] VALID_CONVERSIONS: frozenset[str] _FormatType: TypeAlias = tuple[str | None, str | None, str | None, str | None, str] @@ -44,12 +53,12 @@ def convert_to_value(item: ast.Tuple) -> tuple[Any, ...]: ... # type: ignore[ov def convert_to_value(item: ast.Name) -> VariableKey: ... # type: ignore[overload-overlap] @overload def convert_to_value(item: ast.AST) -> UnhandledKeyType: ... -def is_notimplemented_name_node(node: object) -> bool: ... +def is_notimplemented_name_node(node: ast.AST) -> bool: ... class Binding: name: str source: ast.AST | None - used: Literal[False] | tuple[Incomplete, ast.AST] + used: Literal[False] | tuple[Scope, ast.AST] def __init__(self, name: str, source: ast.AST | None) -> None: ... def redefines(self, other: Binding) -> bool: ... @@ -68,7 +77,7 @@ class VariableKey: class Importation(Definition): fullName: str - redefined: list[Incomplete] + redefined: list[ast.AST] def __init__(self, name: str, source: ast.AST | None, full_name: str | None = None) -> None: ... @property def source_statement(self) -> str: ... @@ -85,11 +94,12 @@ class StarImportation(Importation): def __init__(self, name: str, source: ast.AST) -> None: ... class FutureImportation(ImportationFrom): - used: tuple[Incomplete, ast.AST] - def __init__(self, name: str, source: ast.AST, scope) -> None: ... + used: tuple[Scope, ast.AST] + def __init__(self, name: str, source: ast.AST, scope: Scope) -> None: ... class Argument(Binding): ... class Assignment(Binding): ... +class NamedExprAssignment(Assignment): ... class Annotation(Binding): def redefines(self, other: Binding) -> Literal[False]: ... @@ -111,7 +121,7 @@ class FunctionScope(Scope): usesLocals: bool alwaysUsed: ClassVar[set[str]] globals: set[str] - returnValue: Incomplete + returnValue: ast.expr | None isGenerator: bool def __init__(self) -> None: ... def unused_assignments(self) -> Iterator[tuple[str, Binding]]: ... @@ -129,7 +139,7 @@ def getNodeName(node: ast.AST) -> str: ... TYPING_MODULES: frozenset[Literal["typing", "typing_extensions"]] -def is_typing_overload(value: Binding, scope_stack) -> bool: ... +def is_typing_overload(value: Binding, scope_stack: Sequence[Scope]) -> bool: ... class AnnotationState: NONE: ClassVar[Literal[0]] @@ -165,22 +175,26 @@ else: if sys.version_info >= (3, 12): _TypeVar: TypeAlias = ast.TypeVar + _ParamSpec: TypeAlias = ast.ParamSpec + _TypeVarTuple: TypeAlias = ast.TypeVarTuple _TypeAlias: TypeAlias = ast.TypeAlias else: # The methods using these should never be called on Python < 3.12. _TypeVar: TypeAlias = Never + _ParamSpec: TypeAlias = Never + _TypeVarTuple: TypeAlias = Never _TypeAlias: TypeAlias = Never class Checker: nodeDepth: int offset: tuple[int, int] | None builtIns: set[str] - deadScopes: list[Incomplete] - messages: list[Incomplete] + deadScopes: list[Scope] + messages: list[Message] filename: str withDoctest: bool scopeStack: list[Scope] - exceptHandlers: list[Incomplete] + exceptHandlers: list[tuple[()] | str] root: ast.AST def __init__( self, @@ -188,7 +202,7 @@ class Checker: filename: str = "(none)", builtins: Iterable[str] | None = None, withDoctest: bool = False, - file_tokens: tuple[Incomplete, ...] = (), + file_tokens: Unused = (), ) -> None: ... def deferFunction(self, callable: _AnyFunction) -> None: ... @property @@ -211,15 +225,15 @@ class Checker: def getScopeNode(self, node: ast.AST) -> ast.AST | None: ... def differentForks(self, lnode: ast.AST, rnode: ast.AST) -> bool: ... def addBinding(self, node: ast.AST, value: Binding) -> None: ... - def getNodeHandler(self, node_class: type[ast.AST]): ... - def handleNodeLoad(self, node: ast.AST, parent: ast.AST) -> None: ... + def getNodeHandler(self, node_class: type[ast.AST]) -> Callable[[ast.AST], None]: ... + def handleNodeLoad(self, node: ast.AST, parent: ast.AST | None) -> None: ... def handleNodeStore(self, node: ast.AST) -> None: ... def handleNodeDelete(self, node: ast.AST) -> None: ... def handleChildren(self, tree: ast.AST, omit: _OmitType = None) -> None: ... def isLiteralTupleUnpacking(self, node: ast.AST) -> bool | None: ... def isDocstring(self, node: ast.AST) -> bool: ... def getDocstring(self, node: ast.AST) -> tuple[str, int] | tuple[None, None]: ... - def handleNode(self, node: ast.AST | None, parent) -> None: ... + def handleNode(self, node: ast.AST | None, parent: ast.AST | None) -> None: ... def handleDoctests(self, node: ast.AST) -> None: ... def handleStringAnnotation(self, s: str, node: ast.AST, ref_lineno: int, ref_col_offset: int, err: type[Message]) -> None: ... def handle_annotation_always_deferred(self, annotation: ast.AST, parent: ast.AST) -> None: ... @@ -311,13 +325,18 @@ class Checker: def LAMBDA(self, node: ast.Lambda) -> None: ... def ARGUMENTS(self, node: ast.arguments) -> None: ... def ARG(self, node: ast.arg) -> None: ... - def CLASSDEF(self, node: ast.ClassDef): ... + def CLASSDEF(self, node: ast.ClassDef) -> None: ... def AUGASSIGN(self, node: ast.AugAssign) -> None: ... def TUPLE(self, node: ast.Tuple) -> None: ... def LIST(self, node: ast.List) -> None: ... def IMPORT(self, node: ast.Import) -> None: ... def IMPORTFROM(self, node: ast.ImportFrom) -> None: ... def TRY(self, node: ast.Try) -> None: ... + if sys.version_info >= (3, 11): + def TRYSTAR(self, node: ast.TryStar) -> None: ... + else: + def TRYSTAR(self, node: ast.Try) -> None: ... + def EXCEPTHANDLER(self, node: ast.ExceptHandler) -> None: ... def ANNASSIGN(self, node: ast.AnnAssign) -> None: ... def COMPARE(self, node: ast.Compare) -> None: ... @@ -332,4 +351,6 @@ class Checker: def MATCHMAPPING(self, node: _MatchMapping) -> None: ... def MATCHSTAR(self, node: _MatchStar) -> None: ... def TYPEVAR(self, node: _TypeVar) -> None: ... + def PARAMSPEC(self, node: _ParamSpec) -> None: ... + def TYPEVARTUPLE(self, node: _TypeVarTuple) -> None: ... def TYPEALIAS(self, node: _TypeAlias) -> None: ... diff --git a/stubs/pyflakes/pyflakes/messages.pyi b/stubs/pyflakes/pyflakes/messages.pyi index 1338e037b354..cb7c3833a8d1 100644 --- a/stubs/pyflakes/pyflakes/messages.pyi +++ b/stubs/pyflakes/pyflakes/messages.pyi @@ -1,5 +1,4 @@ import ast -from _typeshed import Incomplete from typing import Any, ClassVar class Message: @@ -11,74 +10,74 @@ class Message: def __init__(self, filename: str, loc: ast.AST) -> None: ... class UnusedImport(Message): - message_args: tuple[Incomplete] - def __init__(self, filename: str, loc: ast.AST, name) -> None: ... + message_args: tuple[str] + def __init__(self, filename: str, loc: ast.AST, name: str) -> None: ... class RedefinedWhileUnused(Message): - message_args: tuple[Incomplete, int] - def __init__(self, filename: str, loc: ast.AST, name, orig_loc: ast.AST) -> None: ... + message_args: tuple[str, int] + def __init__(self, filename: str, loc: ast.AST, name: str, orig_loc: ast.AST) -> None: ... class ImportShadowedByLoopVar(Message): - message_args: tuple[Incomplete, int] - def __init__(self, filename: str, loc: ast.AST, name, orig_loc: ast.AST) -> None: ... + message_args: tuple[str, int] + def __init__(self, filename: str, loc: ast.AST, name: str, orig_loc: ast.AST) -> None: ... class ImportStarNotPermitted(Message): - message_args: Incomplete - def __init__(self, filename: str, loc, modname) -> None: ... + message_args: tuple[str] + def __init__(self, filename: str, loc: ast.AST, modname: str) -> None: ... class ImportStarUsed(Message): - message_args: tuple[Incomplete] - def __init__(self, filename: str, loc: ast.AST, modname) -> None: ... + message_args: tuple[str] + def __init__(self, filename: str, loc: ast.AST, modname: str) -> None: ... class ImportStarUsage(Message): - message_args: tuple[Incomplete, Incomplete] - def __init__(self, filename: str, loc: ast.AST, name, from_list) -> None: ... + message_args: tuple[str, str] + def __init__(self, filename: str, loc: ast.AST, name: str, from_list: str) -> None: ... class UndefinedName(Message): - message_args: tuple[Incomplete] - def __init__(self, filename: str, loc: ast.AST, name) -> None: ... + message_args: tuple[str] + def __init__(self, filename: str, loc: ast.AST, name: str) -> None: ... class DoctestSyntaxError(Message): message_args: tuple[()] def __init__(self, filename: str, loc: ast.AST, position: tuple[int, int] | None = None) -> None: ... class UndefinedExport(Message): - message_args: tuple[Incomplete] - def __init__(self, filename: str, loc: ast.AST, name) -> None: ... + message_args: tuple[str] + def __init__(self, filename: str, loc: ast.AST, name: str) -> None: ... class UndefinedLocal(Message): default: ClassVar[str] builtin: ClassVar[str] - message_args: tuple[Incomplete, int] - def __init__(self, filename: str, loc: ast.AST, name, orig_loc: ast.AST) -> None: ... + message_args: tuple[str, int] + def __init__(self, filename: str, loc: ast.AST, name: str, orig_loc: ast.AST) -> None: ... class DuplicateArgument(Message): - message_args: tuple[Incomplete] - def __init__(self, filename: str, loc: ast.AST, name) -> None: ... + message_args: tuple[str] + def __init__(self, filename: str, loc: ast.AST, name: str) -> None: ... class MultiValueRepeatedKeyLiteral(Message): - message_args: tuple[Incomplete] - def __init__(self, filename: str, loc: ast.AST, key) -> None: ... + message_args: tuple[str] + def __init__(self, filename: str, loc: ast.AST, key: str) -> None: ... class MultiValueRepeatedKeyVariable(Message): - message_args: tuple[Incomplete] - def __init__(self, filename: str, loc: ast.AST, key) -> None: ... + message_args: tuple[str] + def __init__(self, filename: str, loc: ast.AST, key: str) -> None: ... class LateFutureImport(Message): message_args: tuple[()] def __init__(self, filename: str, loc: ast.AST) -> None: ... class FutureFeatureNotDefined(Message): - message_args: tuple[Incomplete] - def __init__(self, filename: str, loc: ast.AST, name) -> None: ... + message_args: tuple[str] + def __init__(self, filename: str, loc: ast.AST, name: str) -> None: ... class UnusedVariable(Message): - message_args: tuple[Incomplete] - def __init__(self, filename: str, loc: ast.AST, names) -> None: ... + message_args: tuple[str] + def __init__(self, filename: str, loc: ast.AST, names: str) -> None: ... class UnusedAnnotation(Message): - message_args: tuple[Incomplete] - def __init__(self, filename: str, loc: ast.AST, names) -> None: ... + message_args: tuple[str] + def __init__(self, filename: str, loc: ast.AST, names: str) -> None: ... class UnusedIndirectAssignment(Message): message_args: tuple[str, str] @@ -95,8 +94,8 @@ class IfTuple(Message): ... class AssertTuple(Message): ... class ForwardAnnotationSyntaxError(Message): - message_args: tuple[Incomplete] - def __init__(self, filename: str, loc: ast.AST, annotation) -> None: ... + message_args: tuple[str] + def __init__(self, filename: str, loc: ast.AST, annotation: str) -> None: ... class RaiseNotImplemented(Message): ... class InvalidPrintSyntax(Message): ... @@ -104,44 +103,44 @@ class IsLiteral(Message): ... class FStringMissingPlaceholders(Message): ... class StringDotFormatExtraPositionalArguments(Message): - message_args: tuple[Incomplete] - def __init__(self, filename: str, loc: ast.AST, extra_positions) -> None: ... + message_args: tuple[str] + def __init__(self, filename: str, loc: ast.AST, extra_positions: str) -> None: ... class StringDotFormatExtraNamedArguments(Message): - message_args: tuple[Incomplete] - def __init__(self, filename: str, loc: ast.AST, extra_keywords) -> None: ... + message_args: tuple[str] + def __init__(self, filename: str, loc: ast.AST, extra_keywords: str) -> None: ... class StringDotFormatMissingArgument(Message): - message_args: tuple[Incomplete] - def __init__(self, filename: str, loc: ast.AST, missing_arguments) -> None: ... + message_args: tuple[str] + def __init__(self, filename: str, loc: ast.AST, missing_arguments: str) -> None: ... class StringDotFormatMixingAutomatic(Message): ... class StringDotFormatInvalidFormat(Message): - message_args: tuple[Incomplete] - def __init__(self, filename: str, loc: ast.AST, error) -> None: ... + message_args: tuple[str] | tuple[Exception] + def __init__(self, filename: str, loc: ast.AST, error: str | Exception) -> None: ... class PercentFormatInvalidFormat(Message): - message_args: tuple[Incomplete] - def __init__(self, filename: str, loc: ast.AST, error) -> None: ... + message_args: tuple[str] | tuple[Exception] + def __init__(self, filename: str, loc: ast.AST, error: str | Exception) -> None: ... class PercentFormatMixedPositionalAndNamed(Message): ... class PercentFormatUnsupportedFormatCharacter(Message): - message_args: tuple[Incomplete] - def __init__(self, filename: str, loc: ast.AST, c) -> None: ... + message_args: tuple[str] + def __init__(self, filename: str, loc: ast.AST, c: str) -> None: ... class PercentFormatPositionalCountMismatch(Message): message_args: tuple[int, int] def __init__(self, filename: str, loc: ast.AST, n_placeholders: int, n_substitutions: int) -> None: ... class PercentFormatExtraNamedArguments(Message): - message_args: tuple[Incomplete] - def __init__(self, filename: str, loc: ast.AST, extra_keywords) -> None: ... + message_args: tuple[str] + def __init__(self, filename: str, loc: ast.AST, extra_keywords: str) -> None: ... class PercentFormatMissingArgument(Message): - message_args: tuple[Incomplete] - def __init__(self, filename: str, loc: ast.AST, missing_arguments) -> None: ... + message_args: tuple[str] + def __init__(self, filename: str, loc: ast.AST, missing_arguments: str) -> None: ... class PercentFormatExpectedMapping(Message): ... class PercentFormatExpectedSequence(Message): ... diff --git a/stubs/pyflakes/pyflakes/reporter.pyi b/stubs/pyflakes/pyflakes/reporter.pyi index 5b15ee962578..f6470e8c298e 100644 --- a/stubs/pyflakes/pyflakes/reporter.pyi +++ b/stubs/pyflakes/pyflakes/reporter.pyi @@ -1,5 +1,9 @@ +from _typeshed import SupportsWrite + +from .messages import Message + class Reporter: - def __init__(self, warningStream, errorStream) -> None: ... - def unexpectedError(self, filename, msg) -> None: ... - def syntaxError(self, filename, msg, lineno, offset, text) -> None: ... - def flake(self, message) -> None: ... + def __init__(self, warningStream: SupportsWrite[str], errorStream: SupportsWrite[str]) -> None: ... + def unexpectedError(self, filename: str, msg: str) -> None: ... + def syntaxError(self, filename: str, msg: str, lineno: int, offset: int | None, text: str | None) -> None: ... + def flake(self, message: Message) -> None: ... From 3346d2a0073496521806ae71eaca50087513bbfa Mon Sep 17 00:00:00 2001 From: Sebastian Rittau Date: Tue, 29 Apr 2025 01:04:45 +0200 Subject: [PATCH 271/388] Rename "Preparing Changes" to "Making Changes" (#13901) And integrate the "Code Style" chapter there. This makes the actual chapter structure follow the overview structure. --- CONTRIBUTING.md | 48 +++++++++++++++++++++++------------------------- 1 file changed, 23 insertions(+), 25 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 5ebfae6b49df..e6645ede68bc 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -9,7 +9,7 @@ are important to the project's success. 1. [Prepare your environment](#preparing-the-environment). 2. Find out [where to make your changes](#where-to-make-changes). -3. [Prepare your changes](#preparing-changes): +3. [Making your changes](#making-changes): * Small fixes and additions can be submitted directly as pull requests, but [contact us](README.md#discussion) before starting significant work. * Create your stubs, considering [what to include](#what-to-include) and @@ -112,29 +112,6 @@ as it's currently excluded from the requirements file: -## Code formatting - -The code is formatted using [`Black`](https://github.com/psf/black). -Various other autofixes and lint rules are -also performed by [`Ruff`](https://github.com/astral-sh/ruff) and -[`Flake8`](https://github.com/pycqa/flake8), -with plugin [`flake8-pyi`](https://github.com/pycqa/flake8-pyi). - -The repository is equipped with a [pre-commit.ci](https://pre-commit.ci/) -configuration file. This means that you don't *need* to do anything yourself to -run the code formatters or linters. When you push a commit, a bot will run -those for you right away and add any autofixes to your PR. Anything -that can't be autofixed will show up as a CI failure, hopefully with an error -message that will make it clear what's gone wrong. - -That being said, if you *want* to run the formatters and linters locally -when you commit, you're free to do so. To use the same configuration as we use -in CI, we recommend doing this via pre-commit: - -```bash -(.venv)$ pre-commit run --all-files -``` - ## Where to make changes ### Standard library stubs @@ -260,7 +237,7 @@ The format of all `METADATA.toml` files can be checked by running `python3 ./tests/check_typeshed_structure.py`. -## Preparing Changes +## Making Changes ### Before you begin @@ -275,6 +252,27 @@ Each Python module is represented by a .pyi "stub file". This is a syntactically Typeshed follows the standard type system guidelines for [stub content](https://typing.readthedocs.io/en/latest/guides/writing_stubs.html#stub-content) and [coding style](https://typing.readthedocs.io/en/latest/guides/writing_stubs.html#style-guide). +The code is formatted using [`Black`](https://github.com/psf/black). +Various other autofixes and lint rules are +also performed by [`Ruff`](https://github.com/astral-sh/ruff) and +[`Flake8`](https://github.com/pycqa/flake8), +with plugin [`flake8-pyi`](https://github.com/pycqa/flake8-pyi). + +The repository is equipped with a [pre-commit.ci](https://pre-commit.ci/) +configuration file. This means that you don't *need* to do anything yourself to +run the code formatters or linters. When you push a commit, a bot will run +those for you right away and add any autofixes to your PR. Anything +that can't be autofixed will show up as a CI failure, hopefully with an error +message that will make it clear what's gone wrong. + +That being said, if you *want* to run the formatters and linters locally +when you commit, you're free to do so. To use the same configuration as we use +in CI, we recommend doing this via pre-commit: + +```bash +(.venv)$ pre-commit run --all-files +``` + ### What to include Stubs should include the complete interface (classes, functions, From c65424dc9c79c255894d023e8ea2043c110a5fbb Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Tue, 29 Apr 2025 22:15:16 +0400 Subject: [PATCH 272/388] Add argument to `TarInfo.tarfile` setter (#13904) --- stdlib/tarfile.pyi | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stdlib/tarfile.pyi b/stdlib/tarfile.pyi index 50546be8e4bb..31094f87872d 100644 --- a/stdlib/tarfile.pyi +++ b/stdlib/tarfile.pyi @@ -637,7 +637,7 @@ class TarInfo: def tarfile(self) -> TarFile | None: ... @tarfile.setter @deprecated("Deprecated in Python 3.13; removal scheduled for Python 3.16") - def tarfile(self) -> None: ... + def tarfile(self, tarfile: TarFile | None) -> None: ... else: tarfile: TarFile | None From 1af802530652d0fb5278fc06b6fecd8741f19271 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Wed, 30 Apr 2025 15:01:15 +0200 Subject: [PATCH 273/388] [stubsabot] Bump peewee to 3.18.0 (#13907) --- stubs/peewee/METADATA.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stubs/peewee/METADATA.toml b/stubs/peewee/METADATA.toml index 16c45e60207e..4ed22e0ed02e 100644 --- a/stubs/peewee/METADATA.toml +++ b/stubs/peewee/METADATA.toml @@ -1,4 +1,4 @@ -version = "3.17.9" +version = "3.18.0" upstream_repository = "https://github.com/coleifer/peewee" # We're not providing stubs for all playhouse modules right now # https://github.com/python/typeshed/pull/11731#issuecomment-2065729058 From 85eb49c6fe9c4372b26db37844837ae4543fcb6f Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Wed, 30 Apr 2025 18:22:13 +0400 Subject: [PATCH 274/388] Bump pep8-naming to 0.15.* (#13908) --- stubs/pep8-naming/METADATA.toml | 2 +- stubs/pep8-naming/pep8ext_naming.pyi | 109 +++++++++++++++------------ 2 files changed, 63 insertions(+), 48 deletions(-) diff --git a/stubs/pep8-naming/METADATA.toml b/stubs/pep8-naming/METADATA.toml index a922d35ede9e..7f9d2dc3ec32 100644 --- a/stubs/pep8-naming/METADATA.toml +++ b/stubs/pep8-naming/METADATA.toml @@ -1,2 +1,2 @@ -version = "0.14.*" +version = "0.15.*" upstream_repository = "https://github.com/PyCQA/pep8-naming" diff --git a/stubs/pep8-naming/pep8ext_naming.pyi b/stubs/pep8-naming/pep8ext_naming.pyi index 3977e3aa815a..758bd4fce074 100644 --- a/stubs/pep8-naming/pep8ext_naming.pyi +++ b/stubs/pep8-naming/pep8ext_naming.pyi @@ -1,9 +1,10 @@ import argparse import ast +import enum import optparse from collections import deque -from collections.abc import Callable, Generator, Iterable, Sequence -from typing import Final, Literal +from collections.abc import Callable, Generator, Iterable, Iterator, Sequence +from typing import Any, Final, Literal from typing_extensions import Self __version__: Final[str] @@ -13,22 +14,30 @@ METACLASS_BASES: Final[frozenset[Literal["type", "ABCMeta"]]] METHOD_CONTAINER_NODES: Final[set[ast.AST]] FUNC_NODES: Final[tuple[type[ast.FunctionDef], type[ast.AsyncFunctionDef]]] -class _ASTCheckMeta(type): - codes: tuple[str, ...] +class BaseASTCheck: all: list[BaseASTCheck] - def __init__(cls, class_name: str, bases: tuple[object, ...], namespace: Iterable[str]) -> None: ... - -class BaseASTCheck(metaclass=_ASTCheckMeta): codes: tuple[str, ...] - all: list[BaseASTCheck] + # Per convention, unknown kwargs are passed to the super-class. See there for the types. + def __init_subclass__(cls, **kwargs: Any) -> None: ... def err(self, node: ast.AST, code: str, **kwargs: str) -> tuple[int, int, str, Self]: ... +class NameSet(frozenset[str]): + def __new__(cls, iterable: Iterable[str]) -> Self: ... + def __contains__(self, item: object, /) -> bool: ... + +@enum.unique +class FunctionType(enum.Enum): + CLASSMETHOD = "classmethod" + STATICMETHOD = "staticmethod" + FUNCTION = "function" + METHOD = "method" + class NamingChecker: name: str version: str visitors: Sequence[BaseASTCheck] - decorator_to_type: dict[str, Literal["classmethod", "staticmethod"]] - ignore_names: frozenset[str] + decorator_to_type: dict[str, FunctionType] + ignored: NameSet def __init__(self, tree: ast.AST, filename: str) -> None: ... @classmethod def add_options(cls, parser: optparse.OptionParser) -> None: ... @@ -38,7 +47,9 @@ class NamingChecker: def visit_tree(self, node: ast.AST, parents: deque[ast.AST]) -> Generator[tuple[int, int, str, Self]]: ... def visit_node(self, node: ast.AST, parents: Sequence[ast.AST]) -> Generator[tuple[int, int, str, Self]]: ... def tag_class_functions(self, cls_node: ast.ClassDef) -> None: ... - def set_function_nodes_types(self, nodes: Iterable[ast.AST], ismetaclass: bool, late_decoration: dict[str, str]) -> None: ... + def set_function_nodes_types( + self, nodes: Iterator[ast.AST], ismetaclass: bool, late_decoration: dict[str, FunctionType] + ) -> None: ... @classmethod def find_decorator_name(cls, d: ast.Expr) -> str: ... @staticmethod @@ -46,97 +57,95 @@ class NamingChecker: class ClassNameCheck(BaseASTCheck): codes: tuple[Literal["N801"], Literal["N818"]] - N801: str - N818: str + N801: Final[str] + N818: Final[str] @classmethod def get_classdef(cls, name: str, parents: Sequence[ast.AST]) -> ast.ClassDef | None: ... @classmethod def superclass_names(cls, name: str, parents: Sequence[ast.AST], _names: set[str] | None = None) -> set[str]: ... def visit_classdef( - self, node: ast.ClassDef, parents: Sequence[ast.AST], ignore: Iterable[str] | None = None + self, node: ast.ClassDef, parents: Sequence[ast.AST], ignored: NameSet ) -> Generator[tuple[int, int, str, Self]]: ... class FunctionNameCheck(BaseASTCheck): codes: tuple[Literal["N802"], Literal["N807"]] - N802: str - N807: str + N802: Final[str] + N807: Final[str] @staticmethod def has_override_decorator(node: ast.FunctionDef | ast.AsyncFunctionDef) -> bool: ... def visit_functiondef( - self, node: ast.FunctionDef, parents: Sequence[ast.AST], ignore: Iterable[str] | None = None + self, node: ast.FunctionDef, parents: Sequence[ast.AST], ignored: NameSet ) -> Generator[tuple[int, int, str, Self]]: ... def visit_asyncfunctiondef( - self, node: ast.AsyncFunctionDef, parents: Sequence[ast.AST], ignore: Iterable[str] | None = None + self, node: ast.AsyncFunctionDef, parents: Sequence[ast.AST], ignored: NameSet ) -> Generator[tuple[int, int, str, Self]]: ... class FunctionArgNamesCheck(BaseASTCheck): codes: tuple[Literal["N803"], Literal["N804"], Literal["N805"]] - N803: str - N804: str - N805: str + N803: Final[str] + N804: Final[str] + N805: Final[str] def visit_functiondef( - self, node: ast.FunctionDef, parents: Sequence[ast.AST], ignore: Iterable[str] | None = None + self, node: ast.FunctionDef, parents: Sequence[ast.AST], ignored: NameSet ) -> Generator[tuple[int, int, str, Self]]: ... def visit_asyncfunctiondef( - self, node: ast.AsyncFunctionDef, parents: Sequence[ast.AST], ignore: Iterable[str] | None = None + self, node: ast.AsyncFunctionDef, parents: Sequence[ast.AST], ignored: NameSet ) -> Generator[tuple[int, int, str, Self]]: ... class ImportAsCheck(BaseASTCheck): codes: tuple[Literal["N811"], Literal["N812"], Literal["N813"], Literal["N814"], Literal["N817"]] - N811: str - N812: str - N813: str - N814: str - N817: str + N811: Final[str] + N812: Final[str] + N813: Final[str] + N814: Final[str] + N817: Final[str] def visit_importfrom( - self, node: ast.ImportFrom, parents: Sequence[ast.AST], ignore: Iterable[str] | None = None + self, node: ast.ImportFrom, parents: Sequence[ast.AST], ignored: NameSet ) -> Generator[tuple[int, int, str, Self]]: ... def visit_import( - self, node: ast.Import, parents: Sequence[ast.AST], ignore: Iterable[str] | None = None + self, node: ast.Import, parents: Sequence[ast.AST], ignored: NameSet ) -> Generator[tuple[int, int, str, Self]]: ... class VariablesCheck(BaseASTCheck): codes: tuple[Literal["N806"], Literal["N815"], Literal["N816"]] - N806: str - N815: str - N816: str + N806: Final[str] + N815: Final[str] + N816: Final[str] @staticmethod def is_namedtupe(node_value: ast.AST) -> bool: ... def visit_assign( - self, node: ast.Assign, parents: Sequence[ast.AST], ignore: Iterable[str] | None = None + self, node: ast.Assign, parents: Sequence[ast.AST], ignored: NameSet ) -> Generator[tuple[int, int, str, Self]]: ... def visit_namedexpr( - self, node: ast.NamedExpr, parents: Sequence[ast.AST], ignore: Iterable[str] + self, node: ast.NamedExpr, parents: Sequence[ast.AST], ignored: NameSet ) -> Generator[tuple[int, int, str, Self]]: ... def visit_annassign( - self, node: ast.AnnAssign, parents: Sequence[ast.AST], ignore: Iterable[str] + self, node: ast.AnnAssign, parents: Sequence[ast.AST], ignored: NameSet ) -> Generator[tuple[int, int, str, Self]]: ... def visit_with( - self, node: ast.With, parents: Sequence[ast.AST], ignore: Iterable[str] + self, node: ast.With, parents: Sequence[ast.AST], ignored: NameSet ) -> Generator[tuple[int, int, str, Self]]: ... def visit_asyncwith( - self, node: ast.AsyncWith, parents: Sequence[ast.AST], ignore: Iterable[str] - ) -> Generator[tuple[int, int, str, Self]]: ... - def visit_for( - self, node: ast.For, parents: Sequence[ast.AST], ignore: Iterable[str] + self, node: ast.AsyncWith, parents: Sequence[ast.AST], ignored: NameSet ) -> Generator[tuple[int, int, str, Self]]: ... + def visit_for(self, node: ast.For, parents: Sequence[ast.AST], ignored: NameSet) -> Generator[tuple[int, int, str, Self]]: ... def visit_asyncfor( - self, node: ast.AsyncFor, parents: Sequence[ast.AST], ignore: Iterable[str] + self, node: ast.AsyncFor, parents: Sequence[ast.AST], ignored: NameSet ) -> Generator[tuple[int, int, str, Self]]: ... def visit_excepthandler( - self, node: ast.ExceptHandler, parents: Sequence[ast.AST], ignore: Iterable[str] + self, node: ast.ExceptHandler, parents: Sequence[ast.AST], ignored: NameSet ) -> Generator[tuple[int, int, str, Self]]: ... def visit_generatorexp( - self, node: ast.GeneratorExp, parents: Sequence[ast.AST], ignore: Iterable[str] + self, node: ast.GeneratorExp, parents: Sequence[ast.AST], ignored: NameSet ) -> Generator[tuple[int, int, str, Self]]: ... def visit_listcomp( - self, node: ast.ListComp, parents: Sequence[ast.AST], ignore: Iterable[str] + self, node: ast.ListComp, parents: Sequence[ast.AST], ignored: NameSet ) -> Generator[tuple[int, int, str, Self]]: ... def visit_dictcomp( - self, node: ast.DictComp, parents: Sequence[ast.AST], ignore: Iterable[str] + self, node: ast.DictComp, parents: Sequence[ast.AST], ignored: NameSet ) -> Generator[tuple[int, int, str, Self]]: ... def visit_setcomp( - self, node: ast.SetComp, parents: Sequence[ast.AST], ignore: Iterable[str] + self, node: ast.SetComp, parents: Sequence[ast.AST], ignored: NameSet ) -> Generator[tuple[int, int, str, Self]]: ... @staticmethod def global_variable_check(name: str) -> Literal["N816"] | None: ... @@ -145,4 +154,10 @@ class VariablesCheck(BaseASTCheck): @staticmethod def function_variable_check(func: Callable[..., object], var_name: str) -> Literal["N806"] | None: ... +class TypeVarNameCheck(BaseASTCheck): + N808: Final[str] + def visit_module( + self, node: ast.Module, parents: Sequence[ast.AST], ignored: NameSet + ) -> Generator[tuple[int, int, str, Self]]: ... + def is_mixed_case(name: str) -> bool: ... From d0f1f7ad093c11a5faaf49f1e35973476df957a9 Mon Sep 17 00:00:00 2001 From: Tatsh Date: Wed, 30 Apr 2025 18:50:53 -0400 Subject: [PATCH 275/388] Add ratelimit stubs (#13909) --- stubs/ratelimit/@tests/stubtest_allowlist.txt | 3 +++ stubs/ratelimit/METADATA.toml | 2 ++ stubs/ratelimit/ratelimit/__init__.pyi | 7 +++++++ stubs/ratelimit/ratelimit/decorators.pyi | 14 ++++++++++++++ stubs/ratelimit/ratelimit/exception.pyi | 3 +++ 5 files changed, 29 insertions(+) create mode 100644 stubs/ratelimit/@tests/stubtest_allowlist.txt create mode 100644 stubs/ratelimit/METADATA.toml create mode 100644 stubs/ratelimit/ratelimit/__init__.pyi create mode 100644 stubs/ratelimit/ratelimit/decorators.pyi create mode 100644 stubs/ratelimit/ratelimit/exception.pyi diff --git a/stubs/ratelimit/@tests/stubtest_allowlist.txt b/stubs/ratelimit/@tests/stubtest_allowlist.txt new file mode 100644 index 000000000000..5b0935ce9175 --- /dev/null +++ b/stubs/ratelimit/@tests/stubtest_allowlist.txt @@ -0,0 +1,3 @@ +# This file lacks __all__ and "now" is only used to set the default value of +# RateLimitDecorator.__init__()'s clock parameter +ratelimit.decorators.now diff --git a/stubs/ratelimit/METADATA.toml b/stubs/ratelimit/METADATA.toml new file mode 100644 index 000000000000..d7e6677bef62 --- /dev/null +++ b/stubs/ratelimit/METADATA.toml @@ -0,0 +1,2 @@ +version = "2.2.*" +upstream_repository = "https://github.com/tomasbasham/ratelimit" diff --git a/stubs/ratelimit/ratelimit/__init__.pyi b/stubs/ratelimit/ratelimit/__init__.pyi new file mode 100644 index 000000000000..374329aaa37e --- /dev/null +++ b/stubs/ratelimit/ratelimit/__init__.pyi @@ -0,0 +1,7 @@ +from ratelimit.decorators import RateLimitDecorator, sleep_and_retry +from ratelimit.exception import RateLimitException + +limits = RateLimitDecorator +rate_limited = RateLimitDecorator + +__all__ = ["RateLimitException", "limits", "rate_limited", "sleep_and_retry"] diff --git a/stubs/ratelimit/ratelimit/decorators.pyi b/stubs/ratelimit/ratelimit/decorators.pyi new file mode 100644 index 000000000000..297828817b83 --- /dev/null +++ b/stubs/ratelimit/ratelimit/decorators.pyi @@ -0,0 +1,14 @@ +from collections.abc import Callable +from typing import TypeVar +from typing_extensions import ParamSpec + +_P = ParamSpec("_P") +_T = TypeVar("_T") + +class RateLimitDecorator: + def __init__( + self, calls: int = 15, period: float = 900, clock: Callable[[], float] = ..., raise_on_limit: bool = True + ) -> None: ... + def __call__(self, func: Callable[_P, _T]) -> Callable[_P, _T]: ... + +def sleep_and_retry(func: Callable[_P, _T]) -> Callable[_P, _T]: ... diff --git a/stubs/ratelimit/ratelimit/exception.pyi b/stubs/ratelimit/ratelimit/exception.pyi new file mode 100644 index 000000000000..2271781458b0 --- /dev/null +++ b/stubs/ratelimit/ratelimit/exception.pyi @@ -0,0 +1,3 @@ +class RateLimitException(Exception): + period_remaining: float + def __init__(self, message: str, period_remaining: float) -> None: ... From 539eb3c2c8a3a92e0cf7af932543b38e5d3ce0f3 Mon Sep 17 00:00:00 2001 From: Allan Lewis Date: Thu, 1 May 2025 15:47:53 +0100 Subject: [PATCH 276/388] Correct type of `kwargs` in `assertpy.exception.ExceptionMixin.when_called_with` (#13903) The keys of `kwargs` dicts are always strings, the type hint is for the values, which in this case could be anything. --- stubs/assertpy/assertpy/exception.pyi | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/stubs/assertpy/assertpy/exception.pyi b/stubs/assertpy/assertpy/exception.pyi index 2af5e46bae03..b1eb47da6915 100644 --- a/stubs/assertpy/assertpy/exception.pyi +++ b/stubs/assertpy/assertpy/exception.pyi @@ -5,4 +5,5 @@ __tracebackhide__: bool class ExceptionMixin: def raises(self, ex: type[BaseException] | BaseException) -> Self: ... - def when_called_with(self, *some_args: Any, **some_kwargs: dict[str, Any]) -> Self: ... + # The types of some_args and some_kwargs must equal the types of the called function. + def when_called_with(self, *some_args: Any, **some_kwargs: Any) -> Self: ... From 72a1f2160b05df6b9092f1f29d17e4d0d00f1906 Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Sat, 3 May 2025 03:08:31 +0400 Subject: [PATCH 277/388] Improve `docker.types` (#13809) Co-authored-by: Avasam --- stubs/docker/docker/api/container.pyi | 6 +- stubs/docker/docker/api/network.pyi | 3 +- stubs/docker/docker/models/containers.pyi | 19 +- stubs/docker/docker/types/containers.pyi | 66 +++---- stubs/docker/docker/types/networks.pyi | 30 ++-- stubs/docker/docker/types/services.pyi | 200 ++++++++++++---------- stubs/docker/docker/types/swarm.pyi | 42 +++-- stubs/docker/docker/utils/utils.pyi | 4 +- 8 files changed, 204 insertions(+), 166 deletions(-) diff --git a/stubs/docker/docker/api/container.pyi b/stubs/docker/docker/api/container.pyi index 06a6167c8920..d2a58261853f 100644 --- a/stubs/docker/docker/api/container.pyi +++ b/stubs/docker/docker/api/container.pyi @@ -1,6 +1,6 @@ import datetime from _typeshed import Incomplete -from typing import Literal, TypedDict, overload, type_check_only +from typing import Any, Literal, TypedDict, overload, type_check_only from typing_extensions import TypeAlias from docker._types import WaitContainerResponse @@ -61,7 +61,9 @@ class ContainerApiMixin: detach: bool = False, stdin_open: bool = False, tty: bool = False, - ports: list[int] | None = None, + # list is invariant, enumerating all possible union combination would be too complex for: + # list[str | int | tuple[int | str, str] | tuple[int | str, ...]] + ports: dict[str, dict[Incomplete, Incomplete]] | list[Any] | None = None, environment: dict[str, str] | list[str] | None = None, volumes: str | list[str] | None = None, network_disabled: bool = False, diff --git a/stubs/docker/docker/api/network.pyi b/stubs/docker/docker/api/network.pyi index fff9fdfdb8f4..7f20d8707256 100644 --- a/stubs/docker/docker/api/network.pyi +++ b/stubs/docker/docker/api/network.pyi @@ -1,4 +1,5 @@ from _typeshed import Incomplete +from collections.abc import Iterable from typing import Any, Literal, TypedDict, type_check_only from typing_extensions import TypeAlias @@ -43,7 +44,7 @@ class NetworkApiMixin: ipv4_address: Incomplete | None = None, ipv6_address: Incomplete | None = None, aliases: Incomplete | None = None, - links: Incomplete | None = None, + links: dict[str, str] | dict[str, None] | dict[str, str | None] | Iterable[tuple[str, str | None]] | None = None, link_local_ips: Incomplete | None = None, driver_opt: Incomplete | None = None, mac_address: Incomplete | None = None, diff --git a/stubs/docker/docker/models/containers.pyi b/stubs/docker/docker/models/containers.pyi index c3f787b76891..3224f5844cfd 100644 --- a/stubs/docker/docker/models/containers.pyi +++ b/stubs/docker/docker/models/containers.pyi @@ -1,5 +1,6 @@ import datetime from _typeshed import Incomplete +from collections.abc import Iterable, Mapping from typing import Literal, NamedTuple, TypedDict, overload from typing_extensions import NotRequired @@ -152,7 +153,7 @@ class ContainerCollection(Collection[Container]): entrypoint: str | list[str] | None = None, environment: dict[str, str] | list[str] | None = None, extra_hosts: dict[str, str] | None = None, - group_add: list[str | int] | None = None, + group_add: Iterable[str | int] | None = None, healthcheck: dict[Incomplete, Incomplete] | None = None, hostname: str | None = None, init: bool | None = None, @@ -161,7 +162,7 @@ class ContainerCollection(Collection[Container]): isolation: str | None = None, kernel_memory: str | int | None = None, labels: dict[str, str] | list[str] | None = None, - links: dict[str, str | None] | None = None, + links: dict[str, str] | dict[str, None] | dict[str, str | None] | Iterable[tuple[str, str | None]] | None = None, log_config: LogConfig | None = None, lxc_conf: dict[Incomplete, Incomplete] | None = None, mac_address: str | None = None, @@ -181,7 +182,7 @@ class ContainerCollection(Collection[Container]): pid_mode: str | None = None, pids_limit: int | None = None, platform: str | None = None, - ports: dict[str, int | list[int] | tuple[str, int] | None] | None = None, + ports: Mapping[str, int | list[int] | tuple[str, int] | None] | None = None, privileged: bool = False, publish_all_ports: bool = False, read_only: bool | None = None, @@ -247,7 +248,7 @@ class ContainerCollection(Collection[Container]): entrypoint: str | list[str] | None = None, environment: dict[str, str] | list[str] | None = None, extra_hosts: dict[str, str] | None = None, - group_add: list[str | int] | None = None, + group_add: Iterable[str | int] | None = None, healthcheck: dict[Incomplete, Incomplete] | None = None, hostname: str | None = None, init: bool | None = None, @@ -256,7 +257,7 @@ class ContainerCollection(Collection[Container]): isolation: str | None = None, kernel_memory: str | int | None = None, labels: dict[str, str] | list[str] | None = None, - links: dict[str, str | None] | None = None, + links: dict[str, str] | dict[str, None] | dict[str, str | None] | Iterable[tuple[str, str | None]] | None = None, log_config: LogConfig | None = None, lxc_conf: dict[Incomplete, Incomplete] | None = None, mac_address: str | None = None, @@ -276,7 +277,7 @@ class ContainerCollection(Collection[Container]): pid_mode: str | None = None, pids_limit: int | None = None, platform: str | None = None, - ports: dict[str, int | list[int] | tuple[str, int] | None] | None = None, + ports: Mapping[str, int | list[int] | tuple[str, int] | None] | None = None, privileged: bool = False, publish_all_ports: bool = False, read_only: bool | None = None, @@ -338,7 +339,7 @@ class ContainerCollection(Collection[Container]): entrypoint: str | list[str] | None = None, environment: dict[str, str] | list[str] | None = None, extra_hosts: dict[str, str] | None = None, - group_add: list[str | int] | None = None, + group_add: Iterable[str | int] | None = None, healthcheck: dict[Incomplete, Incomplete] | None = None, hostname: str | None = None, init: bool | None = None, @@ -347,7 +348,7 @@ class ContainerCollection(Collection[Container]): isolation: str | None = None, kernel_memory: str | int | None = None, labels: dict[str, str] | list[str] | None = None, - links: dict[str, str | None] | None = None, + links: dict[str, str] | dict[str, None] | dict[str, str | None] | Iterable[tuple[str, str | None]] | None = None, log_config: LogConfig | None = None, lxc_conf: dict[Incomplete, Incomplete] | None = None, mac_address: str | None = None, @@ -367,7 +368,7 @@ class ContainerCollection(Collection[Container]): pid_mode: str | None = None, pids_limit: int | None = None, platform: str | None = None, - ports: dict[str, int | list[int] | tuple[str, int] | None] | None = None, + ports: Mapping[str, int | list[int] | tuple[str, int] | None] | None = None, privileged: bool = False, publish_all_ports: bool = False, read_only: bool | None = None, diff --git a/stubs/docker/docker/types/containers.pyi b/stubs/docker/docker/types/containers.pyi index bd9fffa38d4b..a327c3479141 100644 --- a/stubs/docker/docker/types/containers.pyi +++ b/stubs/docker/docker/types/containers.pyi @@ -1,18 +1,22 @@ from _typeshed import Incomplete -from typing import Literal +from collections.abc import Iterable, Mapping +from typing import Any, Final, Literal from docker._types import ContainerWeightDevice +from .. import errors from .base import DictType +from .healthcheck import Healthcheck +from .networks import NetworkingConfig from .services import Mount class LogConfigTypesEnum: - JSON: Incomplete - SYSLOG: Incomplete - JOURNALD: Incomplete - GELF: Incomplete - FLUENTD: Incomplete - NONE: Incomplete + JSON: Final = "json-file" + SYSLOG: Final = "syslog" + JOURNALD: Final = "journald" + GELF: Final = "gelf" + FLUENTD: Final = "fluentd" + NONE: Final = "none" class LogConfig(DictType): types: type[LogConfigTypesEnum] @@ -68,21 +72,21 @@ class HostConfig(dict[str, Incomplete]): def __init__( self, version: str, - binds: Incomplete | None = None, - port_bindings: Incomplete | None = None, - lxc_conf: dict[Incomplete, Incomplete] | None = None, + binds: dict[str, Mapping[str, str]] | list[str] | None = None, + port_bindings: Mapping[int | str, Incomplete] | None = None, + lxc_conf: dict[str, Incomplete] | list[dict[str, Incomplete]] | None = None, publish_all_ports: bool = False, - links: dict[str, str | None] | None = None, + links: dict[str, str] | dict[str, None] | dict[str, str | None] | Iterable[tuple[str, str | None]] | None = None, privileged: bool = False, - dns: list[Incomplete] | None = None, - dns_search: list[Incomplete] | None = None, + dns: list[str] | None = None, + dns_search: list[str] | None = None, volumes_from: list[str] | None = None, network_mode: str | None = None, - restart_policy: dict[Incomplete, Incomplete] | None = None, + restart_policy: Mapping[str, str | int] | None = None, cap_add: list[str] | None = None, cap_drop: list[str] | None = None, devices: list[str] | None = None, - extra_hosts: dict[Incomplete, Incomplete] | None = None, + extra_hosts: dict[str, Incomplete] | list[Incomplete] | None = None, read_only: bool | None = None, pid_mode: str | None = None, ipc_mode: str | None = None, @@ -95,18 +99,18 @@ class HostConfig(dict[str, Incomplete]): kernel_memory: str | int | None = None, mem_swappiness: int | None = None, cgroup_parent: str | None = None, - group_add: list[str | int] | None = None, + group_add: Iterable[str | int] | None = None, cpu_quota: int | None = None, cpu_period: int | None = None, blkio_weight: int | None = None, blkio_weight_device: list[ContainerWeightDevice] | None = None, - device_read_bps: Incomplete | None = None, - device_write_bps: Incomplete | None = None, - device_read_iops: Incomplete | None = None, - device_write_iops: Incomplete | None = None, + device_read_bps: list[Mapping[str, str | int]] | None = None, + device_write_bps: list[Mapping[str, str | int]] | None = None, + device_read_iops: list[Mapping[str, str | int]] | None = None, + device_write_iops: list[Mapping[str, str | int]] | None = None, oom_kill_disable: bool = False, shm_size: str | int | None = None, - sysctls: dict[Incomplete, Incomplete] | None = None, + sysctls: dict[str, str] | None = None, tmpfs: dict[str, str] | None = None, oom_score_adj: int | None = None, dns_opt: list[Incomplete] | None = None, @@ -134,35 +138,37 @@ class HostConfig(dict[str, Incomplete]): cgroupns: Literal["private", "host"] | None = None, ) -> None: ... -def host_config_type_error(param, param_value, expected): ... -def host_config_version_error(param, version, less_than: bool = True): ... -def host_config_value_error(param, param_value): ... -def host_config_incompatible_error(param, param_value, incompatible_param): ... +def host_config_type_error(param: str, param_value: object, expected: str) -> TypeError: ... +def host_config_version_error(param: str, version: str, less_than: bool = True) -> errors.InvalidVersion: ... +def host_config_value_error(param: str, param_value: object) -> ValueError: ... +def host_config_incompatible_error(param: str, param_value: str, incompatible_param: str) -> errors.InvalidArgument: ... class ContainerConfig(dict[str, Incomplete]): def __init__( self, version: str, - image, + image: str, command: str | list[str], hostname: str | None = None, user: str | int | None = None, detach: bool = False, stdin_open: bool = False, tty: bool = False, - ports: dict[str, int | list[int] | tuple[str, int] | None] | None = None, + # list is invariant, enumerating all possible union combination would be too complex for: + # list[str | int | tuple[int | str, str] | tuple[int | str, ...]] + ports: dict[str, dict[Incomplete, Incomplete]] | list[Any] | None = None, environment: dict[str, str] | list[str] | None = None, volumes: str | list[str] | None = None, network_disabled: bool = False, entrypoint: str | list[str] | None = None, working_dir: str | None = None, domainname: str | None = None, - host_config: Incomplete | None = None, + host_config: HostConfig | None = None, mac_address: str | None = None, labels: dict[str, str] | list[str] | None = None, stop_signal: str | None = None, - networking_config: Incomplete | None = None, - healthcheck: Incomplete | None = None, + networking_config: NetworkingConfig | None = None, + healthcheck: Healthcheck | None = None, stop_timeout: int | None = None, runtime: str | None = None, ) -> None: ... diff --git a/stubs/docker/docker/types/networks.pyi b/stubs/docker/docker/types/networks.pyi index 14c08b01d1ac..fb64d7d9fe61 100644 --- a/stubs/docker/docker/types/networks.pyi +++ b/stubs/docker/docker/types/networks.pyi @@ -1,31 +1,35 @@ from _typeshed import Incomplete +from collections.abc import Iterable class EndpointConfig(dict[str, Incomplete]): def __init__( self, - version, - aliases: Incomplete | None = None, - links: Incomplete | None = None, - ipv4_address: Incomplete | None = None, - ipv6_address: Incomplete | None = None, - link_local_ips: Incomplete | None = None, + version: str, + aliases: list[Incomplete] | None = None, + links: dict[str, str] | dict[str, None] | dict[str, str | None] | Iterable[tuple[str, str | None]] | None = None, + ipv4_address: str | None = None, + ipv6_address: str | None = None, + link_local_ips: list[str] | None = None, driver_opt: Incomplete | None = None, - mac_address: Incomplete | None = None, + mac_address: str | None = None, ) -> None: ... class NetworkingConfig(dict[str, Incomplete]): - def __init__(self, endpoints_config: Incomplete | None = None) -> None: ... + def __init__(self, endpoints_config: EndpointConfig | None = None) -> None: ... class IPAMConfig(dict[str, Incomplete]): def __init__( - self, driver: str = "default", pool_configs: Incomplete | None = None, options: Incomplete | None = None + self, + driver: str = "default", + pool_configs: list[IPAMPool] | None = None, + options: dict[Incomplete, Incomplete] | None = None, ) -> None: ... class IPAMPool(dict[str, Incomplete]): def __init__( self, - subnet: Incomplete | None = None, - iprange: Incomplete | None = None, - gateway: Incomplete | None = None, - aux_addresses: Incomplete | None = None, + subnet: str | None = None, + iprange: str | None = None, + gateway: str | None = None, + aux_addresses: dict[str, str] | None = None, ) -> None: ... diff --git a/stubs/docker/docker/types/services.pyi b/stubs/docker/docker/types/services.pyi index fda53fc07f68..001f1cf5b41c 100644 --- a/stubs/docker/docker/types/services.pyi +++ b/stubs/docker/docker/types/services.pyi @@ -1,170 +1,188 @@ from _typeshed import Incomplete +from collections.abc import Iterable, Mapping +from typing import Final, Literal, TypeVar, overload + +from .healthcheck import Healthcheck + +_T = TypeVar("_T") class TaskTemplate(dict[str, Incomplete]): def __init__( self, - container_spec, - resources: Incomplete | None = None, - restart_policy: Incomplete | None = None, - placement: Incomplete | None = None, - log_driver: Incomplete | None = None, - networks: Incomplete | None = None, - force_update: Incomplete | None = None, + container_spec: ContainerSpec, + resources: Resources | None = None, + restart_policy: RestartPolicy | None = None, + placement: Placement | list[str] | None = None, + log_driver: DriverConfig | None = None, + networks: Iterable[str | NetworkAttachmentConfig] | None = None, + force_update: int | None = None, ) -> None: ... @property - def container_spec(self): ... + def container_spec(self) -> ContainerSpec: ... @property - def resources(self): ... + def resources(self) -> Resources: ... @property - def restart_policy(self): ... + def restart_policy(self) -> RestartPolicy: ... @property - def placement(self): ... + def placement(self) -> Placement: ... class ContainerSpec(dict[str, Incomplete]): def __init__( self, - image, - command: Incomplete | None = None, - args: Incomplete | None = None, - hostname: Incomplete | None = None, - env: Incomplete | None = None, - workdir: Incomplete | None = None, - user: Incomplete | None = None, - labels: Incomplete | None = None, - mounts: Incomplete | None = None, - stop_grace_period: Incomplete | None = None, - secrets: Incomplete | None = None, - tty: Incomplete | None = None, - groups: Incomplete | None = None, - open_stdin: Incomplete | None = None, - read_only: Incomplete | None = None, - stop_signal: Incomplete | None = None, - healthcheck: Incomplete | None = None, - hosts: Incomplete | None = None, - dns_config: Incomplete | None = None, - configs: Incomplete | None = None, - privileges: Incomplete | None = None, - isolation: Incomplete | None = None, - init: Incomplete | None = None, - cap_add: Incomplete | None = None, - cap_drop: Incomplete | None = None, - sysctls: Incomplete | None = None, + image: str, + command: str | list[str] | None = None, + args: list[str] | None = None, + hostname: str | None = None, + env: dict[str, Incomplete] | list[str] | None = None, + workdir: str | None = None, + user: str | None = None, + labels: dict[Incomplete, Incomplete] | None = None, + mounts: Iterable[str | Mount] | None = None, + stop_grace_period: int | None = None, + secrets: list[SecretReference] | None = None, + tty: bool | None = None, + groups: list[Incomplete] | None = None, + open_stdin: bool | None = None, + read_only: bool | None = None, + stop_signal: str | None = None, + healthcheck: Healthcheck | None = None, + hosts: Mapping[str, str] | None = None, + dns_config: DNSConfig | None = None, + configs: list[ConfigReference] | None = None, + privileges: Privileges | None = None, + isolation: str | None = None, + init: bool | None = None, + cap_add: list[Incomplete] | None = None, + cap_drop: list[Incomplete] | None = None, + sysctls: dict[str, Incomplete] | None = None, ) -> None: ... class Mount(dict[str, Incomplete]): def __init__( self, - target, - source, - type: str = "volume", + target: str, + source: str, + type: Literal["bind", "volume", "tmpfs", "npipe"] = "volume", read_only: bool = False, - consistency: Incomplete | None = None, - propagation: Incomplete | None = None, + consistency: Literal["default", "consistent", "cached", "delegated"] | None = None, + propagation: str | None = None, no_copy: bool = False, - labels: Incomplete | None = None, - driver_config: Incomplete | None = None, - tmpfs_size: Incomplete | None = None, - tmpfs_mode: Incomplete | None = None, + labels: dict[Incomplete, Incomplete] | None = None, + driver_config: DriverConfig | None = None, + tmpfs_size: int | str | None = None, + tmpfs_mode: int | None = None, ) -> None: ... @classmethod - def parse_mount_string(cls, string): ... + def parse_mount_string(cls, string: str) -> Mount: ... class Resources(dict[str, Incomplete]): def __init__( self, - cpu_limit: Incomplete | None = None, - mem_limit: Incomplete | None = None, - cpu_reservation: Incomplete | None = None, - mem_reservation: Incomplete | None = None, - generic_resources: Incomplete | None = None, + cpu_limit: int | None = None, + mem_limit: int | None = None, + cpu_reservation: int | None = None, + mem_reservation: int | None = None, + generic_resources: dict[str, Incomplete] | list[str] | None = None, ) -> None: ... class UpdateConfig(dict[str, Incomplete]): def __init__( self, parallelism: int = 0, - delay: Incomplete | None = None, - failure_action: str = "continue", - monitor: Incomplete | None = None, - max_failure_ratio: Incomplete | None = None, - order: Incomplete | None = None, + delay: int | None = None, + failure_action: Literal["pause", "continue", "rollback"] = "continue", + monitor: int | None = None, + max_failure_ratio: float | None = None, + order: Literal["start-first", "stop-first"] | None = None, ) -> None: ... class RollbackConfig(UpdateConfig): ... class RestartConditionTypesEnum: - NONE: Incomplete - ON_FAILURE: Incomplete - ANY: Incomplete + NONE: Final = "none" + ON_FAILURE: Final = "on-failure" + ANY: Final = "any" class RestartPolicy(dict[str, Incomplete]): condition_types: type[RestartConditionTypesEnum] - def __init__(self, condition="none", delay: int = 0, max_attempts: int = 0, window: int = 0) -> None: ... + def __init__( + self, condition: Literal["none", "on-failure", "any"] = "none", delay: int = 0, max_attempts: int = 0, window: int = 0 + ) -> None: ... class DriverConfig(dict[str, Incomplete]): - def __init__(self, name, options: Incomplete | None = None) -> None: ... + def __init__(self, name: str, options: dict[Incomplete, Incomplete] | None = None) -> None: ... class EndpointSpec(dict[str, Incomplete]): - def __init__(self, mode: Incomplete | None = None, ports: Incomplete | None = None) -> None: ... + def __init__( + self, mode: str | None = None, ports: Mapping[str, str | tuple[str | None, ...]] | list[dict[str, str]] | None = None + ) -> None: ... -def convert_service_ports(ports): ... +@overload +def convert_service_ports(ports: list[_T]) -> list[_T]: ... +@overload +def convert_service_ports(ports: Mapping[str, str | tuple[str | None, ...]]) -> list[dict[str, str]]: ... class ServiceMode(dict[str, Incomplete]): - mode: Incomplete - def __init__(self, mode, replicas: Incomplete | None = None, concurrency: Incomplete | None = None) -> None: ... + mode: Literal["replicated", "global", "ReplicatedJob", "GlobalJob"] + def __init__( + self, + mode: Literal["replicated", "global", "replicated-job", "global-job"], + replicas: int | None = None, + concurrency: int | None = None, + ) -> None: ... @property - def replicas(self): ... + def replicas(self) -> int | None: ... class SecretReference(dict[str, Incomplete]): def __init__( self, - secret_id, - secret_name, - filename: Incomplete | None = None, - uid: Incomplete | None = None, - gid: Incomplete | None = None, + secret_id: str, + secret_name: str, + filename: str | None = None, + uid: str | None = None, + gid: str | None = None, mode: int = 292, ) -> None: ... class ConfigReference(dict[str, Incomplete]): def __init__( self, - config_id, - config_name, - filename: Incomplete | None = None, - uid: Incomplete | None = None, - gid: Incomplete | None = None, + config_id: str, + config_name: str, + filename: str | None = None, + uid: str | None = None, + gid: str | None = None, mode: int = 292, ) -> None: ... class Placement(dict[str, Incomplete]): def __init__( self, - constraints: Incomplete | None = None, - preferences: Incomplete | None = None, - platforms: Incomplete | None = None, - maxreplicas: Incomplete | None = None, + constraints: list[str] | None = None, + preferences: Iterable[tuple[str, str] | PlacementPreference] | None = None, + platforms: Iterable[tuple[str, str]] | None = None, + maxreplicas: int | None = None, ) -> None: ... class PlacementPreference(dict[str, Incomplete]): - def __init__(self, strategy, descriptor) -> None: ... + def __init__(self, strategy: Literal["spread"], descriptor: str) -> None: ... class DNSConfig(dict[str, Incomplete]): def __init__( - self, nameservers: Incomplete | None = None, search: Incomplete | None = None, options: Incomplete | None = None + self, nameservers: list[str] | None = None, search: list[str] | None = None, options: list[str] | None = None ) -> None: ... class Privileges(dict[str, Incomplete]): def __init__( self, - credentialspec_file: Incomplete | None = None, - credentialspec_registry: Incomplete | None = None, - selinux_disable: Incomplete | None = None, - selinux_user: Incomplete | None = None, - selinux_role: Incomplete | None = None, - selinux_type: Incomplete | None = None, - selinux_level: Incomplete | None = None, + credentialspec_file: str | None = None, + credentialspec_registry: str | None = None, + selinux_disable: bool | None = None, + selinux_user: str | None = None, + selinux_role: str | None = None, + selinux_type: str | None = None, + selinux_level: str | None = None, ) -> None: ... class NetworkAttachmentConfig(dict[str, Incomplete]): - def __init__(self, target, aliases: Incomplete | None = None, options: Incomplete | None = None) -> None: ... + def __init__(self, target: str, aliases: list[str] | None = None, options: dict[str, Incomplete] | None = None) -> None: ... diff --git a/stubs/docker/docker/types/swarm.pyi b/stubs/docker/docker/types/swarm.pyi index 5811686e9a03..2d99c4a0ae93 100644 --- a/stubs/docker/docker/types/swarm.pyi +++ b/stubs/docker/docker/types/swarm.pyi @@ -1,29 +1,35 @@ from _typeshed import Incomplete from typing import Any +from .services import DriverConfig + class SwarmSpec(dict[str, Any]): def __init__( self, - version, - task_history_retention_limit: Incomplete | None = None, - snapshot_interval: Incomplete | None = None, - keep_old_snapshots: Incomplete | None = None, - log_entries_for_slow_followers: Incomplete | None = None, - heartbeat_tick: Incomplete | None = None, - election_tick: Incomplete | None = None, - dispatcher_heartbeat_period: Incomplete | None = None, - node_cert_expiry: Incomplete | None = None, - external_cas: Incomplete | None = None, - name: Incomplete | None = None, - labels: Incomplete | None = None, - signing_ca_cert: Incomplete | None = None, - signing_ca_key: Incomplete | None = None, - ca_force_rotate: Incomplete | None = None, - autolock_managers: Incomplete | None = None, - log_driver: Incomplete | None = None, + version: str, + task_history_retention_limit: int | None = None, + snapshot_interval: int | None = None, + keep_old_snapshots: int | None = None, + log_entries_for_slow_followers: int | None = None, + heartbeat_tick: int | None = None, + election_tick: int | None = None, + dispatcher_heartbeat_period: int | None = None, + node_cert_expiry: int | None = None, + external_cas: list[SwarmExternalCA] | None = None, + name: str | None = None, + labels: dict[str, Incomplete] | None = None, + signing_ca_cert: str | None = None, + signing_ca_key: str | None = None, + ca_force_rotate: int | None = None, + autolock_managers: bool | None = None, + log_driver: DriverConfig | None = None, ) -> None: ... class SwarmExternalCA(dict[str, Any]): def __init__( - self, url, protocol: Incomplete | None = None, options: Incomplete | None = None, ca_cert: Incomplete | None = None + self, + url: str, + protocol: str | None = None, + options: dict[Incomplete, Incomplete] | None = None, + ca_cert: str | None = None, ) -> None: ... diff --git a/stubs/docker/docker/utils/utils.pyi b/stubs/docker/docker/utils/utils.pyi index 4ac647e39c15..8622c5e04f04 100644 --- a/stubs/docker/docker/utils/utils.pyi +++ b/stubs/docker/docker/utils/utils.pyi @@ -33,7 +33,7 @@ def compare_version(v1: str, v2: str) -> Literal[0, -1, 1]: ... def version_lt(v1: str, v2: str) -> bool: ... def version_gte(v1: str, v2: str) -> bool: ... def convert_port_bindings( - port_bindings: Mapping[object, Incomplete], # keys are converted using str() + port_bindings: Mapping[str, int | list[int] | tuple[str, int] | None], ) -> dict[str, list[dict[str, str]]]: ... @overload def convert_volume_binds(binds: list[_T]) -> list[_T]: ... @@ -61,7 +61,7 @@ def kwargs_from_env(environment: Mapping[str, Incomplete] | None = None) -> _Env def convert_filters(filters) -> str: ... def datetime_to_timestamp(dt: datetime.datetime) -> int: ... def parse_bytes(s: float | str) -> float: ... -def normalize_links(links): ... +def normalize_links(links: dict[str, str] | dict[str, None] | dict[str, str | None] | Iterable[tuple[str, str | None]]): ... def parse_env_file(env_file: FileDescriptorOrPath) -> dict[str, str]: ... def split_command(command: str | _ShlexInstream) -> list[str]: ... def format_environment(environment: Mapping[str, object | None]) -> list[str]: ... From f8bd9b12eea32da376d0c5752aa3f7f53afe8d91 Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Sat, 3 May 2025 03:19:19 +0400 Subject: [PATCH 278/388] Bump setuptools to 80.1.* (#13915) --- stubs/setuptools/METADATA.toml | 2 +- stubs/setuptools/setuptools/command/install.pyi | 4 +--- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/stubs/setuptools/METADATA.toml b/stubs/setuptools/METADATA.toml index 07aa716d5608..f41550888041 100644 --- a/stubs/setuptools/METADATA.toml +++ b/stubs/setuptools/METADATA.toml @@ -1,4 +1,4 @@ -version = "80.0.*" +version = "80.1.*" upstream_repository = "https://github.com/pypa/setuptools" extra_description = """\ Given that `pkg_resources` is typed since `setuptools >= 71.1`, \ diff --git a/stubs/setuptools/setuptools/command/install.pyi b/stubs/setuptools/setuptools/command/install.pyi index c449715c9dbf..2c4e07f318e2 100644 --- a/stubs/setuptools/setuptools/command/install.pyi +++ b/stubs/setuptools/setuptools/command/install.pyi @@ -1,6 +1,6 @@ from _typeshed import Incomplete from collections.abc import Callable -from typing import Any, ClassVar, NoReturn +from typing import Any, ClassVar from setuptools.dist import Distribution @@ -19,5 +19,3 @@ class install(orig.install): path_file: Incomplete extra_dirs: str def handle_extra_path(self): ... - def run(self): ... - def do_egg_install(self) -> NoReturn: ... From 92d3c7606e337f0fedfbeba277e599322e6f2b77 Mon Sep 17 00:00:00 2001 From: Avasam Date: Fri, 2 May 2025 20:29:31 -0400 Subject: [PATCH 279/388] Mention rerunning sync_protobuf scripts (#13924) --- stubs/protobuf/METADATA.toml | 1 + stubs/s2clientprotocol/METADATA.toml | 1 + stubs/tensorflow/METADATA.toml | 1 + 3 files changed, 3 insertions(+) diff --git a/stubs/protobuf/METADATA.toml b/stubs/protobuf/METADATA.toml index 04c674e5fbc1..377d93a1b1b0 100644 --- a/stubs/protobuf/METADATA.toml +++ b/stubs/protobuf/METADATA.toml @@ -1,4 +1,5 @@ # Using an exact number in the specifier for scripts/sync_protobuf/google_protobuf.py +# When updating, also re-run the script version = "~=5.29.1" upstream_repository = "https://github.com/protocolbuffers/protobuf" extra_description = "Partially generated using [mypy-protobuf==3.6.0](https://github.com/nipunn1313/mypy-protobuf/tree/v3.6.0) and libprotoc 28.1 on [protobuf v29.1](https://github.com/protocolbuffers/protobuf/releases/tag/v29.1) (python `protobuf==5.29.1`)." diff --git a/stubs/s2clientprotocol/METADATA.toml b/stubs/s2clientprotocol/METADATA.toml index 869820a9e7b4..6631acf0ad29 100644 --- a/stubs/s2clientprotocol/METADATA.toml +++ b/stubs/s2clientprotocol/METADATA.toml @@ -1,5 +1,6 @@ # Whenever you update version here, PACKAGE_VERSION should be updated # in scripts/sync_protobuf/s2clientprotocol.py and vice-versa. +# When updating, also re-run the script version = "5.*" upstream_repository = "https://github.com/Blizzard/s2client-proto" requires = ["types-protobuf"] diff --git a/stubs/tensorflow/METADATA.toml b/stubs/tensorflow/METADATA.toml index 94d4b9f8a08b..a207ca05601a 100644 --- a/stubs/tensorflow/METADATA.toml +++ b/stubs/tensorflow/METADATA.toml @@ -1,4 +1,5 @@ # Using an exact number in the specifier for scripts/sync_protobuf/tensorflow.py +# When updating, also re-run the script version = "~=2.18.0" upstream_repository = "https://github.com/tensorflow/tensorflow" # requires a version of numpy with a `py.typed` file From c582c15aaebebf66cc782da393f7b3bac0e7cfd8 Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Sat, 3 May 2025 05:20:00 +0400 Subject: [PATCH 280/388] Bump protobuf to ~=6.30.2 (#13922) Co-authored-by: Avasam --- stubs/protobuf/@tests/stubtest_allowlist.txt | 4 - stubs/protobuf/METADATA.toml | 4 +- stubs/protobuf/google/protobuf/__init__.pyi | 4 +- .../google/protobuf/descriptor_pb2.pyi | 79 ++++++++++++++++--- .../google/protobuf/internal/containers.pyi | 2 + .../google/protobuf/internal/decoder.pyi | 2 - .../google/protobuf/message_factory.pyi | 2 - stubs/protobuf/google/protobuf/reflection.pyi | 3 - stubs/protobuf/google/protobuf/service.pyi | 39 --------- .../protobuf/google/protobuf/wrappers_pb2.pyi | 42 +++++++++- 10 files changed, 113 insertions(+), 68 deletions(-) delete mode 100644 stubs/protobuf/google/protobuf/service.pyi diff --git a/stubs/protobuf/@tests/stubtest_allowlist.txt b/stubs/protobuf/@tests/stubtest_allowlist.txt index 6913cb4129a7..1d7f7f7a98d6 100644 --- a/stubs/protobuf/@tests/stubtest_allowlist.txt +++ b/stubs/protobuf/@tests/stubtest_allowlist.txt @@ -16,10 +16,6 @@ google.protobuf.message.Message.Extensions # Has *args that would fail at runtime with any positional argument google.protobuf.internal.containers.BaseContainer.sort -# Stubbed as static method, but actually exists as a property that's -# a function. Typeshed's typing is more useful -google.protobuf.service.Service.GetDescriptor - # These are deliberately omitted in the stub. # The classes can't be constructed directly anyway, # so the signatures of their constructors are somewhat irrelevant. diff --git a/stubs/protobuf/METADATA.toml b/stubs/protobuf/METADATA.toml index 377d93a1b1b0..a7416507152f 100644 --- a/stubs/protobuf/METADATA.toml +++ b/stubs/protobuf/METADATA.toml @@ -1,8 +1,8 @@ # Using an exact number in the specifier for scripts/sync_protobuf/google_protobuf.py # When updating, also re-run the script -version = "~=5.29.1" +version = "~=6.30.2" upstream_repository = "https://github.com/protocolbuffers/protobuf" -extra_description = "Partially generated using [mypy-protobuf==3.6.0](https://github.com/nipunn1313/mypy-protobuf/tree/v3.6.0) and libprotoc 28.1 on [protobuf v29.1](https://github.com/protocolbuffers/protobuf/releases/tag/v29.1) (python `protobuf==5.29.1`)." +extra_description = "Partially generated using [mypy-protobuf==3.6.0](https://github.com/nipunn1313/mypy-protobuf/tree/v3.6.0) and libprotoc 29.0 on [protobuf v30.2](https://github.com/protocolbuffers/protobuf/releases/tag/v30.2) (python `protobuf==6.30.2`)." partial_stub = true [tool.stubtest] diff --git a/stubs/protobuf/google/protobuf/__init__.pyi b/stubs/protobuf/google/protobuf/__init__.pyi index bda5b5a7f4cc..c5dd95466063 100644 --- a/stubs/protobuf/google/protobuf/__init__.pyi +++ b/stubs/protobuf/google/protobuf/__init__.pyi @@ -1 +1,3 @@ -__version__: str +from typing import Final + +__version__: Final[str] diff --git a/stubs/protobuf/google/protobuf/descriptor_pb2.pyi b/stubs/protobuf/google/protobuf/descriptor_pb2.pyi index ddf523d56c8b..6ff2ce84e0b2 100644 --- a/stubs/protobuf/google/protobuf/descriptor_pb2.pyi +++ b/stubs/protobuf/google/protobuf/descriptor_pb2.pyi @@ -151,9 +151,16 @@ class FileDescriptorProto(google.protobuf.message.Message): The supported values are "proto2", "proto3", and "editions". If `edition` is present, this value must be "editions". + WARNING: This field should only be used by protobuf plugins or special + cases like the proto compiler. Other uses are discouraged and + developers should rely on the protoreflect APIs for their client language. """ edition: global___Edition.ValueType - """The edition of the proto file.""" + """The edition of the proto file. + WARNING: This field should only be used by protobuf plugins or special + cases like the proto compiler. Other uses are discouraged and + developers should rely on the protoreflect APIs for their client language. + """ @property def dependency(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: """Names of files imported by this file.""" @@ -982,7 +989,11 @@ class FileOptions(google.protobuf.message.Message): """ @property def features(self) -> global___FeatureSet: - """Any features defined in the specific edition.""" + """Any features defined in the specific edition. + WARNING: This field should only be used by protobuf plugins or special + cases like the proto compiler. Other uses are discouraged and + developers should rely on the protoreflect APIs for their client language. + """ @property def uninterpreted_option(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___UninterpretedOption]: @@ -1099,7 +1110,11 @@ class MessageOptions(google.protobuf.message.Message): """ @property def features(self) -> global___FeatureSet: - """Any features defined in the specific edition.""" + """Any features defined in the specific edition. + WARNING: This field should only be used by protobuf plugins or special + cases like the proto compiler. Other uses are discouraged and + developers should rely on the protoreflect APIs for their client language. + """ @property def uninterpreted_option(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___UninterpretedOption]: @@ -1378,7 +1393,11 @@ class FieldOptions(google.protobuf.message.Message): def edition_defaults(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___FieldOptions.EditionDefault]: ... @property def features(self) -> global___FeatureSet: - """Any features defined in the specific edition.""" + """Any features defined in the specific edition. + WARNING: This field should only be used by protobuf plugins or special + cases like the proto compiler. Other uses are discouraged and + developers should rely on the protoreflect APIs for their client language. + """ @property def feature_support(self) -> global___FieldOptions.FeatureSupport: ... @@ -1417,7 +1436,11 @@ class OneofOptions(google.protobuf.message.Message): UNINTERPRETED_OPTION_FIELD_NUMBER: builtins.int @property def features(self) -> global___FeatureSet: - """Any features defined in the specific edition.""" + """Any features defined in the specific edition. + WARNING: This field should only be used by protobuf plugins or special + cases like the proto compiler. Other uses are discouraged and + developers should rely on the protoreflect APIs for their client language. + """ @property def uninterpreted_option(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___UninterpretedOption]: @@ -1463,7 +1486,11 @@ class EnumOptions(google.protobuf.message.Message): """ @property def features(self) -> global___FeatureSet: - """Any features defined in the specific edition.""" + """Any features defined in the specific edition. + WARNING: This field should only be used by protobuf plugins or special + cases like the proto compiler. Other uses are discouraged and + developers should rely on the protoreflect APIs for their client language. + """ @property def uninterpreted_option(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___UninterpretedOption]: @@ -1505,7 +1532,11 @@ class EnumValueOptions(google.protobuf.message.Message): """ @property def features(self) -> global___FeatureSet: - """Any features defined in the specific edition.""" + """Any features defined in the specific edition. + WARNING: This field should only be used by protobuf plugins or special + cases like the proto compiler. Other uses are discouraged and + developers should rely on the protoreflect APIs for their client language. + """ @property def feature_support(self) -> global___FieldOptions.FeatureSupport: @@ -1549,7 +1580,11 @@ class ServiceOptions(google.protobuf.message.Message): """ @property def features(self) -> global___FeatureSet: - """Any features defined in the specific edition.""" + """Any features defined in the specific edition. + WARNING: This field should only be used by protobuf plugins or special + cases like the proto compiler. Other uses are discouraged and + developers should rely on the protoreflect APIs for their client language. + """ @property def uninterpreted_option(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___UninterpretedOption]: @@ -1613,7 +1648,11 @@ class MethodOptions(google.protobuf.message.Message): idempotency_level: global___MethodOptions.IdempotencyLevel.ValueType @property def features(self) -> global___FeatureSet: - """Any features defined in the specific edition.""" + """Any features defined in the specific edition. + WARNING: This field should only be used by protobuf plugins or special + cases like the proto compiler. Other uses are discouraged and + developers should rely on the protoreflect APIs for their client language. + """ @property def uninterpreted_option(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___UninterpretedOption]: @@ -1809,18 +1848,35 @@ class FeatureSet(google.protobuf.message.Message): ALLOW: FeatureSet.JsonFormat.ValueType # 1 LEGACY_BEST_EFFORT: FeatureSet.JsonFormat.ValueType # 2 + class _EnforceNamingStyle: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + + class _EnforceNamingStyleEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[FeatureSet._EnforceNamingStyle.ValueType], builtins.type): + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + ENFORCE_NAMING_STYLE_UNKNOWN: FeatureSet._EnforceNamingStyle.ValueType # 0 + STYLE2024: FeatureSet._EnforceNamingStyle.ValueType # 1 + STYLE_LEGACY: FeatureSet._EnforceNamingStyle.ValueType # 2 + + class EnforceNamingStyle(_EnforceNamingStyle, metaclass=_EnforceNamingStyleEnumTypeWrapper): ... + ENFORCE_NAMING_STYLE_UNKNOWN: FeatureSet.EnforceNamingStyle.ValueType # 0 + STYLE2024: FeatureSet.EnforceNamingStyle.ValueType # 1 + STYLE_LEGACY: FeatureSet.EnforceNamingStyle.ValueType # 2 + FIELD_PRESENCE_FIELD_NUMBER: builtins.int ENUM_TYPE_FIELD_NUMBER: builtins.int REPEATED_FIELD_ENCODING_FIELD_NUMBER: builtins.int UTF8_VALIDATION_FIELD_NUMBER: builtins.int MESSAGE_ENCODING_FIELD_NUMBER: builtins.int JSON_FORMAT_FIELD_NUMBER: builtins.int + ENFORCE_NAMING_STYLE_FIELD_NUMBER: builtins.int field_presence: global___FeatureSet.FieldPresence.ValueType enum_type: global___FeatureSet.EnumType.ValueType repeated_field_encoding: global___FeatureSet.RepeatedFieldEncoding.ValueType utf8_validation: global___FeatureSet.Utf8Validation.ValueType message_encoding: global___FeatureSet.MessageEncoding.ValueType json_format: global___FeatureSet.JsonFormat.ValueType + enforce_naming_style: global___FeatureSet.EnforceNamingStyle.ValueType def __init__( self, *, @@ -1830,9 +1886,10 @@ class FeatureSet(google.protobuf.message.Message): utf8_validation: global___FeatureSet.Utf8Validation.ValueType | None = ..., message_encoding: global___FeatureSet.MessageEncoding.ValueType | None = ..., json_format: global___FeatureSet.JsonFormat.ValueType | None = ..., + enforce_naming_style: global___FeatureSet.EnforceNamingStyle.ValueType | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["enum_type", b"enum_type", "field_presence", b"field_presence", "json_format", b"json_format", "message_encoding", b"message_encoding", "repeated_field_encoding", b"repeated_field_encoding", "utf8_validation", b"utf8_validation"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["enum_type", b"enum_type", "field_presence", b"field_presence", "json_format", b"json_format", "message_encoding", b"message_encoding", "repeated_field_encoding", b"repeated_field_encoding", "utf8_validation", b"utf8_validation"]) -> None: ... + def HasField(self, field_name: typing.Literal["enforce_naming_style", b"enforce_naming_style", "enum_type", b"enum_type", "field_presence", b"field_presence", "json_format", b"json_format", "message_encoding", b"message_encoding", "repeated_field_encoding", b"repeated_field_encoding", "utf8_validation", b"utf8_validation"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["enforce_naming_style", b"enforce_naming_style", "enum_type", b"enum_type", "field_presence", b"field_presence", "json_format", b"json_format", "message_encoding", b"message_encoding", "repeated_field_encoding", b"repeated_field_encoding", "utf8_validation", b"utf8_validation"]) -> None: ... global___FeatureSet = FeatureSet diff --git a/stubs/protobuf/google/protobuf/internal/containers.pyi b/stubs/protobuf/google/protobuf/internal/containers.pyi index e660140ac0d4..75261371607e 100644 --- a/stubs/protobuf/google/protobuf/internal/containers.pyi +++ b/stubs/protobuf/google/protobuf/internal/containers.pyi @@ -73,6 +73,7 @@ class ScalarMap(MutableMapping[_K, _ScalarV]): def get(self, key: _K, default: None = None) -> _ScalarV | None: ... @overload def get(self, key: _K, default: _ScalarV | _T) -> _ScalarV | _T: ... + def setdefault(self, key: _K, value: _ScalarV | None = None) -> _ScalarV: ... def MergeFrom(self, other: Self): ... def InvalidateIterators(self) -> None: ... def GetEntryClass(self) -> GeneratedProtocolMessageType: ... @@ -96,6 +97,7 @@ class MessageMap(MutableMapping[_K, _MessageV]): @overload def get(self, key: _K, default: _MessageV | _T) -> _MessageV | _T: ... def get_or_create(self, key: _K) -> _MessageV: ... + def setdefault(self, key: _K, value: _MessageV | None = None) -> _MessageV: ... def MergeFrom(self, other: Self): ... def InvalidateIterators(self) -> None: ... def GetEntryClass(self) -> GeneratedProtocolMessageType: ... diff --git a/stubs/protobuf/google/protobuf/internal/decoder.pyi b/stubs/protobuf/google/protobuf/internal/decoder.pyi index ce74e9318c60..94ed7f615360 100644 --- a/stubs/protobuf/google/protobuf/internal/decoder.pyi +++ b/stubs/protobuf/google/protobuf/internal/decoder.pyi @@ -59,5 +59,3 @@ MESSAGE_SET_ITEM_TAG: bytes def MessageSetItemDecoder(descriptor: Descriptor) -> _Decoder: ... def MapDecoder(field_descriptor, new_default, is_message_map) -> _Decoder: ... - -SkipField: Any diff --git a/stubs/protobuf/google/protobuf/message_factory.pyi b/stubs/protobuf/google/protobuf/message_factory.pyi index 518e1251955d..6422284aaad3 100644 --- a/stubs/protobuf/google/protobuf/message_factory.pyi +++ b/stubs/protobuf/google/protobuf/message_factory.pyi @@ -9,8 +9,6 @@ from google.protobuf.message import Message class MessageFactory: pool: Any def __init__(self, pool: DescriptorPool | None = None) -> None: ... - def GetPrototype(self, descriptor: Descriptor) -> type[Message]: ... - def GetMessages(self, files: Iterable[str]) -> dict[str, type[Message]]: ... def GetMessageClass(descriptor: Descriptor) -> type[Message]: ... def GetMessageClassesForFiles(files: Iterable[str], pool: DescriptorPool) -> dict[str, type[Message]]: ... diff --git a/stubs/protobuf/google/protobuf/reflection.pyi b/stubs/protobuf/google/protobuf/reflection.pyi index 5f7822363b1e..2836b3fcf11f 100644 --- a/stubs/protobuf/google/protobuf/reflection.pyi +++ b/stubs/protobuf/google/protobuf/reflection.pyi @@ -1,5 +1,2 @@ class GeneratedProtocolMessageType(type): def __new__(cls, name, bases, dictionary): ... - -def ParseMessage(descriptor, byte_str): ... -def MakeClass(descriptor): ... diff --git a/stubs/protobuf/google/protobuf/service.pyi b/stubs/protobuf/google/protobuf/service.pyi deleted file mode 100644 index 1123b6134ddc..000000000000 --- a/stubs/protobuf/google/protobuf/service.pyi +++ /dev/null @@ -1,39 +0,0 @@ -from collections.abc import Callable -from concurrent.futures import Future - -from google.protobuf.descriptor import MethodDescriptor, ServiceDescriptor -from google.protobuf.message import Message - -class RpcException(Exception): ... - -class Service: - @staticmethod - def GetDescriptor() -> ServiceDescriptor: ... - def CallMethod( - self, - method_descriptor: MethodDescriptor, - rpc_controller: RpcController, - request: Message, - done: Callable[[Message], None] | None, - ) -> Future[Message] | None: ... - def GetRequestClass(self, method_descriptor: MethodDescriptor) -> type[Message]: ... - def GetResponseClass(self, method_descriptor: MethodDescriptor) -> type[Message]: ... - -class RpcController: - def Reset(self) -> None: ... - def Failed(self) -> bool: ... - def ErrorText(self) -> str | None: ... - def StartCancel(self) -> None: ... - def SetFailed(self, reason: str) -> None: ... - def IsCanceled(self) -> bool: ... - def NotifyOnCancel(self, callback: Callable[[], None]) -> None: ... - -class RpcChannel: - def CallMethod( - self, - method_descriptor: MethodDescriptor, - rpc_controller: RpcController, - request: Message, - response_class: type[Message], - done: Callable[[Message], None] | None, - ) -> Future[Message] | None: ... diff --git a/stubs/protobuf/google/protobuf/wrappers_pb2.pyi b/stubs/protobuf/google/protobuf/wrappers_pb2.pyi index f3c923fea88b..24aae899790d 100644 --- a/stubs/protobuf/google/protobuf/wrappers_pb2.pyi +++ b/stubs/protobuf/google/protobuf/wrappers_pb2.pyi @@ -31,10 +31,17 @@ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -Wrappers for primitive (non-message) types. These types are useful -for embedding primitives in the `google.protobuf.Any` type and for places -where we need to distinguish between the absence of a primitive -typed field and its default value. +Wrappers for primitive (non-message) types. These types were needed +for legacy reasons and are not recommended for use in new APIs. + +Historically these wrappers were useful to have presence on proto3 primitive +fields, but proto3 syntax has been updated to support the `optional` keyword. +Using that keyword is now the strongly preferred way to add presence to +proto3 primitive fields. + +A secondary usecase was to embed primitives in the `google.protobuf.Any` +type: it is now recommended that you embed your value in your own wrapper +message which can be specifically documented. These wrappers have no meaningful use within repeated fields as they lack the ability to detect presence on individual elements. @@ -55,6 +62,9 @@ class DoubleValue(google.protobuf.message.Message): """Wrapper message for `double`. The JSON representation for `DoubleValue` is JSON number. + + Not recommended for use in new APIs, but still useful for legacy APIs and + has no plan to be removed. """ DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -76,6 +86,9 @@ class FloatValue(google.protobuf.message.Message): """Wrapper message for `float`. The JSON representation for `FloatValue` is JSON number. + + Not recommended for use in new APIs, but still useful for legacy APIs and + has no plan to be removed. """ DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -97,6 +110,9 @@ class Int64Value(google.protobuf.message.Message): """Wrapper message for `int64`. The JSON representation for `Int64Value` is JSON string. + + Not recommended for use in new APIs, but still useful for legacy APIs and + has no plan to be removed. """ DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -118,6 +134,9 @@ class UInt64Value(google.protobuf.message.Message): """Wrapper message for `uint64`. The JSON representation for `UInt64Value` is JSON string. + + Not recommended for use in new APIs, but still useful for legacy APIs and + has no plan to be removed. """ DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -139,6 +158,9 @@ class Int32Value(google.protobuf.message.Message): """Wrapper message for `int32`. The JSON representation for `Int32Value` is JSON number. + + Not recommended for use in new APIs, but still useful for legacy APIs and + has no plan to be removed. """ DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -160,6 +182,9 @@ class UInt32Value(google.protobuf.message.Message): """Wrapper message for `uint32`. The JSON representation for `UInt32Value` is JSON number. + + Not recommended for use in new APIs, but still useful for legacy APIs and + has no plan to be removed. """ DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -181,6 +206,9 @@ class BoolValue(google.protobuf.message.Message): """Wrapper message for `bool`. The JSON representation for `BoolValue` is JSON `true` and `false`. + + Not recommended for use in new APIs, but still useful for legacy APIs and + has no plan to be removed. """ DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -202,6 +230,9 @@ class StringValue(google.protobuf.message.Message): """Wrapper message for `string`. The JSON representation for `StringValue` is JSON string. + + Not recommended for use in new APIs, but still useful for legacy APIs and + has no plan to be removed. """ DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -223,6 +254,9 @@ class BytesValue(google.protobuf.message.Message): """Wrapper message for `bytes`. The JSON representation for `BytesValue` is JSON string. + + Not recommended for use in new APIs, but still useful for legacy APIs and + has no plan to be removed. """ DESCRIPTOR: google.protobuf.descriptor.Descriptor From 473c2f02dc20ecd42e6afc12b727d406e3a2fb92 Mon Sep 17 00:00:00 2001 From: Hunter Hogan Date: Sat, 3 May 2025 05:05:20 -0500 Subject: [PATCH 281/388] Re-export `ast.Match` and `ast.TypeAlias` from `_ast.pyi` (#13926) This matches what we do for all other AST nodes --- stdlib/_ast.pyi | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/stdlib/_ast.pyi b/stdlib/_ast.pyi index bc0ebd9d8a0f..00c6b357f7d8 100644 --- a/stdlib/_ast.pyi +++ b/stdlib/_ast.pyi @@ -111,13 +111,20 @@ from ast import ( from typing import Literal if sys.version_info >= (3, 12): - from ast import ParamSpec as ParamSpec, TypeVar as TypeVar, TypeVarTuple as TypeVarTuple, type_param as type_param + from ast import ( + ParamSpec as ParamSpec, + TypeAlias as TypeAlias, + TypeVar as TypeVar, + TypeVarTuple as TypeVarTuple, + type_param as type_param, + ) if sys.version_info >= (3, 11): from ast import TryStar as TryStar if sys.version_info >= (3, 10): from ast import ( + Match as Match, MatchAs as MatchAs, MatchClass as MatchClass, MatchMapping as MatchMapping, From cbc0f4e36911818b005890f27477d0296a400fb6 Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Mon, 5 May 2025 04:51:31 +0400 Subject: [PATCH 282/388] Bump setuptools to 80.3.* (#13938) --- stubs/setuptools/METADATA.toml | 2 +- .../setuptools/command/easy_install.pyi | 152 +----------------- stubs/setuptools/setuptools/package_index.pyi | 99 ------------ 3 files changed, 7 insertions(+), 246 deletions(-) delete mode 100644 stubs/setuptools/setuptools/package_index.pyi diff --git a/stubs/setuptools/METADATA.toml b/stubs/setuptools/METADATA.toml index f41550888041..9e2fd9acc0a1 100644 --- a/stubs/setuptools/METADATA.toml +++ b/stubs/setuptools/METADATA.toml @@ -1,4 +1,4 @@ -version = "80.1.*" +version = "80.3.*" upstream_repository = "https://github.com/pypa/setuptools" extra_description = """\ Given that `pkg_resources` is typed since `setuptools >= 71.1`, \ diff --git a/stubs/setuptools/setuptools/command/easy_install.pyi b/stubs/setuptools/setuptools/command/easy_install.pyi index 0f79115b0e3b..101bc1d69449 100644 --- a/stubs/setuptools/setuptools/command/easy_install.pyi +++ b/stubs/setuptools/setuptools/command/easy_install.pyi @@ -1,151 +1,11 @@ -from _typeshed import Incomplete -from collections.abc import Iterable, Iterator -from typing import Any, ClassVar, Literal, NoReturn, TypedDict -from typing_extensions import Self +from abc import abstractmethod -from pkg_resources import Distribution, Environment -from setuptools.package_index import PackageIndex - -from .. import Command, SetuptoolsDeprecationWarning - -__all__ = ["PthDistributions", "easy_install", "extract_wininst_cfg", "get_exe_prefixes"] +from setuptools import Command class easy_install(Command): - description: str - command_consumes_arguments: bool - user_options: ClassVar[list[tuple[str, str | None, str]]] - boolean_options: ClassVar[list[str]] - negative_opt: ClassVar[dict[str, str]] - create_index: ClassVar[type[PackageIndex]] - user: bool - zip_ok: Incomplete - install_dir: Incomplete - index_url: Incomplete - find_links: Incomplete - build_directory: Incomplete - args: Incomplete - optimize: Incomplete - upgrade: Incomplete - editable: Incomplete - root: Incomplete - version: Incomplete - install_purelib: Incomplete - install_platlib: Incomplete - install_headers: Incomplete - install_lib: Incomplete - install_scripts: Incomplete - install_data: Incomplete - install_base: Incomplete - install_platbase: Incomplete - install_userbase: str | None - install_usersite: str | None - no_find_links: Incomplete - package_index: Incomplete - pth_file: Incomplete - site_dirs: Incomplete - installed_projects: Incomplete - verbose: bool | Literal[0, 1] + @abstractmethod def initialize_options(self) -> None: ... - def delete_blockers(self, blockers) -> None: ... - config_vars: dict[str, Any] - script_dir: Incomplete - all_site_dirs: list[str] - shadow_path: list[str] - local_index: Environment - outputs: list[str] + @abstractmethod def finalize_options(self) -> None: ... - def expand_basedirs(self) -> None: ... - def expand_dirs(self) -> None: ... - def run(self, show_deprecation: bool = True) -> NoReturn: ... - def pseudo_tempname(self): ... - def warn_deprecated_options(self) -> None: ... - def check_site_dir(self) -> None: ... - def cant_write_to_target(self) -> NoReturn: ... - def check_pth_processing(self): ... - def install_egg_scripts(self, dist) -> None: ... - def add_output(self, path) -> None: ... - def not_editable(self, spec) -> None: ... - def check_editable(self, spec) -> None: ... - def easy_install(self, spec, deps: bool = False) -> Distribution | None: ... - def install_item(self, spec, download, tmpdir, deps, install_needed: bool = False) -> Distribution | None: ... - def select_scheme(self, name) -> None: ... - def process_distribution(self, requirement, dist, deps: bool = True, *info) -> None: ... - def should_unzip(self, dist) -> bool: ... - def maybe_move(self, spec, dist_filename, setup_base): ... - def install_wrapper_scripts(self, dist) -> None: ... - def install_script(self, dist, script_name, script_text, dev_path: Incomplete | None = None) -> None: ... - def write_script(self, script_name, contents, mode: str = "t", blockers=()) -> None: ... - def install_eggs(self, spec, dist_filename, tmpdir) -> list[Distribution]: ... - def egg_distribution(self, egg_path): ... - def install_egg(self, egg_path, tmpdir): ... - def install_exe(self, dist_filename, tmpdir): ... - def exe_to_egg(self, dist_filename, egg_tmp) -> None: ... - def install_wheel(self, wheel_path, tmpdir): ... - def installation_report(self, req, dist, what: str = "Installed") -> str: ... - def report_editable(self, spec, setup_script): ... - def run_setup(self, setup_script, setup_base, args) -> NoReturn: ... - def build_and_install(self, setup_script, setup_base): ... - def update_pth(self, dist) -> None: ... - def unpack_progress(self, src, dst): ... - def unpack_and_compile(self, egg_path, destination): ... - def byte_compile(self, to_compile) -> None: ... - def create_home_path(self) -> None: ... - INSTALL_SCHEMES: ClassVar[dict[str, dict[str, str]]] - DEFAULT_SCHEME: ClassVar[dict[str, str]] - -def extract_wininst_cfg(dist_filename): ... -def get_exe_prefixes(exe_filename): ... - -class PthDistributions(Environment): - dirty: bool - filename: Incomplete - sitedirs: list[str] - basedir: Incomplete - paths: list[str] - def __init__(self, filename, sitedirs=()) -> None: ... - def save(self) -> None: ... - def add(self, dist) -> None: ... - def remove(self, dist) -> None: ... - def make_relative(self, path): ... - -class RewritePthDistributions(PthDistributions): - prelude: str - postlude: str - -class _SplitArgs(TypedDict, total=False): - comments: bool - posix: bool - -class CommandSpec(list[str]): - options: list[str] - split_args: ClassVar[_SplitArgs] - @classmethod - def best(cls) -> type[CommandSpec]: ... - @classmethod - def from_param(cls, param: Self | str | Iterable[str] | None) -> Self: ... - @classmethod - def from_environment(cls) -> CommandSpec: ... - @classmethod - def from_string(cls, string: str) -> CommandSpec: ... - def install_options(self, script_text: str) -> None: ... - def as_header(self) -> str: ... - -class WindowsCommandSpec(CommandSpec): ... - -class ScriptWriter: - template: ClassVar[str] - command_spec_class: ClassVar[type[CommandSpec]] - @classmethod - def get_args(cls, dist, header: Incomplete | None = None) -> Iterator[tuple[str, str]]: ... - @classmethod - def best(cls) -> type[ScriptWriter]: ... - @classmethod - def get_header(cls, script_text: str = "", executable: str | CommandSpec | Iterable[str] | None = None) -> str: ... - -class WindowsScriptWriter(ScriptWriter): - command_spec_class: ClassVar[type[WindowsCommandSpec]] - @classmethod - def best(cls) -> type[WindowsScriptWriter]: ... - -class WindowsExecutableLauncherWriter(WindowsScriptWriter): ... -class EasyInstallDeprecationWarning(SetuptoolsDeprecationWarning): ... + @abstractmethod + def run(self) -> None: ... diff --git a/stubs/setuptools/setuptools/package_index.pyi b/stubs/setuptools/setuptools/package_index.pyi deleted file mode 100644 index 37871ddfe342..000000000000 --- a/stubs/setuptools/setuptools/package_index.pyi +++ /dev/null @@ -1,99 +0,0 @@ -import configparser -import urllib.request -from _typeshed import Incomplete -from collections.abc import Generator -from hashlib import _Hash -from re import Pattern -from typing import ClassVar -from typing_extensions import NamedTuple - -from pkg_resources import Distribution, Environment - -__all__ = ["PackageIndex", "distros_for_url", "parse_bdist_wininst", "interpret_distro_name"] - -def parse_bdist_wininst(name): ... -def distros_for_url(url, metadata: Incomplete | None = None) -> Generator[Distribution]: ... -def distros_for_location( - location, basename, metadata: Incomplete | None = None -) -> list[Distribution] | Generator[Distribution]: ... -def interpret_distro_name( - location, basename, metadata, py_version: Incomplete | None = None, precedence=1, platform: Incomplete | None = None -) -> Generator[Distribution]: ... - -class ContentChecker: - def feed(self, block) -> None: ... - def is_valid(self): ... - def report(self, reporter, template) -> None: ... - -class HashChecker(ContentChecker): - pattern: ClassVar[Pattern[str]] - hash_name: Incomplete - hash: _Hash - expected: Incomplete - def __init__(self, hash_name, expected) -> None: ... - @classmethod - def from_url(cls, url): ... - def feed(self, block) -> None: ... - def is_valid(self): ... - def report(self, reporter, template): ... - -class PackageIndex(Environment): - index_url: str - scanned_urls: dict[Incomplete, Incomplete] - fetched_urls: dict[Incomplete, Incomplete] - package_pages: dict[Incomplete, Incomplete] - allows: Incomplete - to_scan: list[Incomplete] - opener = urllib.request.urlopen - def __init__( - self, - index_url: str = "https://pypi.org/simple/", - hosts=("*",), - ca_bundle: Incomplete | None = None, - verify_ssl: bool = True, - *args, - **kw, - ) -> None: ... - def process_url(self, url, retrieve: bool = False) -> None: ... - def process_filename(self, fn, nested: bool = False) -> None: ... - def url_ok(self, url, fatal: bool = False): ... - def scan_egg_links(self, search_path) -> None: ... - def scan_egg_link(self, path, entry) -> None: ... - def process_index(self, url, page): ... - def need_version_info(self, url) -> None: ... - def scan_all(self, msg: Incomplete | None = None, *args) -> None: ... - def find_packages(self, requirement) -> None: ... - def obtain(self, requirement, installer: Incomplete | None = None): ... - def check_hash(self, checker, filename, tfp) -> None: ... - def add_find_links(self, urls) -> None: ... - def prescan(self) -> None: ... - def not_found_in_index(self, requirement) -> None: ... - def download(self, spec, tmpdir): ... - def fetch_distribution( - self, - requirement, - tmpdir, - force_scan: bool = False, - source: bool = False, - develop_ok: bool = False, - local_index: Incomplete | None = None, - ): ... - def fetch(self, requirement, tmpdir, force_scan: bool = False, source: bool = False): ... - def gen_setup(self, filename, fragment, tmpdir): ... - dl_blocksize: int - def reporthook(self, url, filename, blocknum, blksize, size) -> None: ... - def open_url(self, url, warning: Incomplete | None = None): ... - def scan_url(self, url) -> None: ... - def debug(self, msg, *args) -> None: ... - def info(self, msg, *args) -> None: ... - def warn(self, msg, *args) -> None: ... - -class Credential(NamedTuple): - username: str - password: str - -class PyPIConfig(configparser.RawConfigParser): - def __init__(self) -> None: ... - @property - def creds_by_repository(self): ... - def find_credential(self, url): ... From f890e83a44fe1f614b5c2781d8314a616b7ea750 Mon Sep 17 00:00:00 2001 From: Avasam Date: Mon, 5 May 2025 09:07:30 -0400 Subject: [PATCH 283/388] `pytype_test`: support either slashes in path params (#13943) --- tests/pytype_test.py | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/tests/pytype_test.py b/tests/pytype_test.py index 7e3eeb7354bb..71b313641517 100755 --- a/tests/pytype_test.py +++ b/tests/pytype_test.py @@ -19,6 +19,7 @@ if TYPE_CHECKING: assert sys.platform != "win32", "pytype isn't yet installed in CI, but wheels can be built on Windows" + from _typeshed import StrPath if sys.version_info >= (3, 13): print("pytype does not support Python 3.13+ yet.", file=sys.stderr) sys.exit(1) @@ -30,6 +31,7 @@ import os import traceback from collections.abc import Iterable, Sequence +from pathlib import Path # pytype is not py.typed https://github.com/google/pytype/issues/1325 from pytype import config as pytype_config, load_pytd # type: ignore[import] @@ -94,21 +96,19 @@ def run_pytype(*, filename: str, python_version: str, missing_modules: Iterable[ return stderr -def _get_relative(filename: str) -> str: - top = 0 +def _get_relative(filename: StrPath) -> Path: + filepath = Path(filename) for d in TYPESHED_SUBDIRS: try: - top = filename.index(d + os.path.sep) + return filepath.absolute().relative_to(Path(d).absolute().parent) except ValueError: continue - else: - break - return filename[top:] + raise ValueError(f"{filepath} not relative to {TYPESHED_SUBDIRS}") def _get_module_name(filename: str) -> str: """Convert a filename {subdir}/m.n/module/foo to module.foo.""" - parts = _get_relative(filename).split(os.path.sep) + parts = _get_relative(filename).parts if parts[0] == "stdlib": module_parts = parts[1:] else: @@ -134,7 +134,7 @@ def determine_files_to_test(*, paths: Sequence[str]) -> list[str]: stdlib_module_versions = parse_stdlib_versions_file() files = [] for f in sorted(filenames): - if _get_relative(f) in exclude_list: + if _get_relative(f).as_posix() in exclude_list: continue if not _is_supported_stdlib_version(stdlib_module_versions, f): continue @@ -154,7 +154,7 @@ def find_stubs_in_paths(paths: Sequence[str]) -> list[str]: def _is_supported_stdlib_version(module_versions: SupportedVersionsDict, filename: str) -> bool: - parts = _get_relative(filename).split(os.path.sep) + parts = _get_relative(filename).parts if parts[0] != "stdlib": return True module_name = _get_module_name(filename) @@ -227,17 +227,17 @@ def run_all_tests(*, files_to_test: Sequence[str], print_stderr: bool, dry_run: missing_modules = get_missing_modules(files_to_test) python_version = f"{sys.version_info.major}.{sys.version_info.minor}" print("Testing files with pytype...") - for i, f in enumerate(files_to_test): + for i, file_to_test in enumerate(files_to_test): if dry_run: stderr = None else: - stderr = run_pytype(filename=f, python_version=python_version, missing_modules=missing_modules) + stderr = run_pytype(filename=file_to_test, python_version=python_version, missing_modules=missing_modules) if stderr: if print_stderr: print(f"\n{stderr}") errors += 1 stacktrace_final_line = stderr.rstrip().rsplit("\n", 1)[-1] - bad.append((_get_relative(f), python_version, stacktrace_final_line)) + bad.append((_get_relative(file_to_test), python_version, stacktrace_final_line)) runs = i + 1 if runs % 25 == 0: From 06f5b18938e3f2b38e3043b609f6539b27fccc2a Mon Sep 17 00:00:00 2001 From: Avasam Date: Mon, 5 May 2025 09:21:48 -0400 Subject: [PATCH 284/388] stubtest-complete grpcio plugins (#13932) --- pyrightconfig.stricter.json | 5 +- stubs/grpcio/@tests/stubtest_allowlist.txt | 5 - stubs/grpcio/grpc/__init__.pyi | 162 +++--- stubs/grpcio/grpc/aio/__init__.pyi | 99 ++-- stubs/grpcio/grpc_channelz/__init__.pyi | 3 - stubs/grpcio/grpc_channelz/v1/__init__.pyi | 3 - stubs/grpcio/grpc_channelz/v1/_async.pyi | 19 + stubs/grpcio/grpc_channelz/v1/channelz.pyi | 7 +- .../grpcio/grpc_channelz/v1/channelz_pb2.pyi | 485 +++++++++++++++++- .../grpc_channelz/v1/channelz_pb2_grpc.pyi | 120 ++++- stubs/grpcio/grpc_health/__init__.pyi | 3 - stubs/grpcio/grpc_health/v1/__init__.pyi | 3 - stubs/grpcio/grpc_health/v1/health.pyi | 43 +- stubs/grpcio/grpc_health/v1/health_pb2.pyi | 25 +- .../grpcio/grpc_health/v1/health_pb2_grpc.pyi | 43 +- stubs/grpcio/grpc_reflection/__init__.pyi | 3 - .../grpc_reflection/v1alpha/__init__.pyi | 3 - .../grpcio/grpc_reflection/v1alpha/_async.pyi | 11 + .../grpcio/grpc_reflection/v1alpha/_base.pyi | 8 +- .../proto_reflection_descriptor_database.pyi | 11 + .../grpc_reflection/v1alpha/reflection.pyi | 19 +- .../v1alpha/reflection_pb2.pyi | 89 +++- .../v1alpha/reflection_pb2_grpc.pyi | 31 ++ stubs/grpcio/grpc_status/__init__.pyi | 3 - stubs/grpcio/grpc_status/_async.pyi | 5 + stubs/grpcio/grpc_status/rpc_status.pyi | 4 + stubs/protobuf/google/_upb/_message.pyi | 310 +++++++++++ .../google/protobuf/descriptor_database.pyi | 16 + stubs/protobuf/google/protobuf/message.pyi | 4 +- 29 files changed, 1321 insertions(+), 221 deletions(-) create mode 100644 stubs/grpcio/grpc_channelz/v1/_async.pyi create mode 100644 stubs/grpcio/grpc_reflection/v1alpha/_async.pyi create mode 100644 stubs/grpcio/grpc_reflection/v1alpha/proto_reflection_descriptor_database.pyi create mode 100644 stubs/grpcio/grpc_reflection/v1alpha/reflection_pb2_grpc.pyi create mode 100644 stubs/grpcio/grpc_status/_async.pyi create mode 100644 stubs/protobuf/google/_upb/_message.pyi create mode 100644 stubs/protobuf/google/protobuf/descriptor_database.pyi diff --git a/pyrightconfig.stricter.json b/pyrightconfig.stricter.json index 4242a2ba3280..08ff5a63e091 100644 --- a/pyrightconfig.stricter.json +++ b/pyrightconfig.stricter.json @@ -45,7 +45,10 @@ "stubs/geopandas", "stubs/google-cloud-ndb", "stubs/grpcio/grpc/__init__.pyi", - "stubs/grpcio/grpc_status/rpc_status.pyi", + "stubs/grpcio/grpc_channelz/v1", + "stubs/grpcio/grpc_health/v1/health_pb2_grpc.pyi", + "stubs/grpcio/grpc_reflection/v1alpha", + "stubs/grpcio/grpc_status", "stubs/hdbcli/hdbcli/dbapi.pyi", "stubs/html5lib", "stubs/httplib2", diff --git a/stubs/grpcio/@tests/stubtest_allowlist.txt b/stubs/grpcio/@tests/stubtest_allowlist.txt index dad6fb3e8ec4..3409137cfdce 100644 --- a/stubs/grpcio/@tests/stubtest_allowlist.txt +++ b/stubs/grpcio/@tests/stubtest_allowlist.txt @@ -4,8 +4,3 @@ grpc.RpcError.code grpc.RpcError.details grpc.RpcError.trailing_metadata - -# Error: is inconsistent -# ============================= -# Stub class is incomplete. -grpc_reflection.v1alpha._base.BaseReflectionServicer.__init__ diff --git a/stubs/grpcio/grpc/__init__.pyi b/stubs/grpcio/grpc/__init__.pyi index 8f370e86a0e6..caf5e9884257 100644 --- a/stubs/grpcio/grpc/__init__.pyi +++ b/stubs/grpcio/grpc/__init__.pyi @@ -79,21 +79,21 @@ class Future(abc.ABC, Generic[_TFutureValue]): @abc.abstractmethod def done(self) -> bool: ... @abc.abstractmethod - def exception(self, timeout: float | None = ...) -> Exception | None: ... + def exception(self, timeout: float | None = None) -> Exception | None: ... @abc.abstractmethod - def result(self, timeout: float | None = ...) -> _TFutureValue: ... + def result(self, timeout: float | None = None) -> _TFutureValue: ... @abc.abstractmethod def running(self) -> bool: ... # FIXME: unsure of the exact return type here. Is it a traceback.StackSummary? @abc.abstractmethod - def traceback(self, timeout: float | None = ...) -> Any: ... + def traceback(self, timeout: float | None = None): ... # Create Client: -def insecure_channel(target: str, options: _Options | None = ..., compression: Compression | None = ...) -> Channel: ... +def insecure_channel(target: str, options: _Options | None = None, compression: Compression | None = None) -> Channel: ... def secure_channel( - target: str, credentials: ChannelCredentials, options: _Options | None = ..., compression: Compression | None = ... + target: str, credentials: ChannelCredentials, options: _Options | None = None, compression: Compression | None = None ) -> Channel: ... _Interceptor: TypeAlias = ( @@ -108,14 +108,14 @@ def intercept_channel(channel: Channel, *interceptors: _Interceptor[_TRequest, _ # Create Client Credentials: def ssl_channel_credentials( - root_certificates: bytes | None = ..., private_key: bytes | None = ..., certificate_chain: bytes | None = ... + root_certificates: bytes | None = None, private_key: bytes | None = None, certificate_chain: bytes | None = None ) -> ChannelCredentials: ... def local_channel_credentials(local_connect_type: LocalConnectionType = ...) -> ChannelCredentials: ... -def metadata_call_credentials(metadata_plugin: AuthMetadataPlugin, name: str | None = ...) -> CallCredentials: ... +def metadata_call_credentials(metadata_plugin: AuthMetadataPlugin, name: str | None = None) -> CallCredentials: ... def access_token_call_credentials(access_token: str) -> CallCredentials: ... -def alts_channel_credentials(service_accounts: Sequence[str] | None = ...) -> ChannelCredentials: ... +def alts_channel_credentials(service_accounts: Sequence[str] | None = None) -> ChannelCredentials: ... def compute_engine_channel_credentials(call_credentials: CallCredentials) -> ChannelCredentials: ... -def xds_channel_credentials(fallback_credentials: ChannelCredentials | None = ...) -> ChannelCredentials: ... +def xds_channel_credentials(fallback_credentials: ChannelCredentials | None = None) -> ChannelCredentials: ... # GRPC docs say there should be at least two: def composite_call_credentials(creds1: CallCredentials, creds2: CallCredentials, *rest: CallCredentials) -> CallCredentials: ... @@ -129,12 +129,12 @@ def composite_channel_credentials( def server( thread_pool: futures.ThreadPoolExecutor, - handlers: list[GenericRpcHandler[Any, Any]] | None = ..., - interceptors: list[ServerInterceptor[Any, Any]] | None = ..., - options: _Options | None = ..., - maximum_concurrent_rpcs: int | None = ..., - compression: Compression | None = ..., - xds: bool = ..., + handlers: list[GenericRpcHandler[Any, Any]] | None = None, + interceptors: list[ServerInterceptor[Any, Any]] | None = None, + options: _Options | None = None, + maximum_concurrent_rpcs: int | None = None, + compression: Compression | None = None, + xds: bool = False, ) -> Server: ... # Create Server Credentials: @@ -143,17 +143,17 @@ _CertificateChainPair: TypeAlias = tuple[bytes, bytes] def ssl_server_credentials( private_key_certificate_chain_pairs: list[_CertificateChainPair], - root_certificates: bytes | None = ..., - require_client_auth: bool = ..., + root_certificates: bytes | None = None, + require_client_auth: bool = False, ) -> ServerCredentials: ... def local_server_credentials(local_connect_type: LocalConnectionType = ...) -> ServerCredentials: ... def ssl_server_certificate_configuration( - private_key_certificate_chain_pairs: list[_CertificateChainPair], root_certificates: bytes | None = ... + private_key_certificate_chain_pairs: list[_CertificateChainPair], root_certificates: bytes | None = None ) -> ServerCertificateConfiguration: ... def dynamic_ssl_server_credentials( initial_certificate_configuration: ServerCertificateConfiguration, certificate_configuration_fetcher: Callable[[], ServerCertificateConfiguration], - require_client_authentication: bool = ..., + require_client_authentication: bool = False, ) -> ServerCredentials: ... def alts_server_credentials() -> ServerCredentials: ... def insecure_server_credentials() -> ServerCredentials: ... @@ -174,23 +174,23 @@ class _Behaviour(Protocol): def unary_unary_rpc_method_handler( behavior: _Behaviour, - request_deserializer: _RequestDeserializer | None = ..., - response_serializer: _ResponseSerializer | None = ..., + request_deserializer: _RequestDeserializer | None = None, + response_serializer: _ResponseSerializer | None = None, ) -> RpcMethodHandler[Any, Any]: ... def unary_stream_rpc_method_handler( behavior: _Behaviour, - request_deserializer: _RequestDeserializer | None = ..., - response_serializer: _ResponseSerializer | None = ..., + request_deserializer: _RequestDeserializer | None = None, + response_serializer: _ResponseSerializer | None = None, ) -> RpcMethodHandler[Any, Any]: ... def stream_unary_rpc_method_handler( behavior: _Behaviour, - request_deserializer: _RequestDeserializer | None = ..., - response_serializer: _ResponseSerializer | None = ..., + request_deserializer: _RequestDeserializer | None = None, + response_serializer: _ResponseSerializer | None = None, ) -> RpcMethodHandler[Any, Any]: ... def stream_stream_rpc_method_handler( behavior: _Behaviour, - request_deserializer: _RequestDeserializer | None = ..., - response_serializer: _ResponseSerializer | None = ..., + request_deserializer: _RequestDeserializer | None = None, + response_serializer: _ResponseSerializer | None = None, ) -> RpcMethodHandler[Any, Any]: ... def method_handlers_generic_handler( service: str, method_handlers: dict[str, RpcMethodHandler[Any, Any]] @@ -248,31 +248,31 @@ class Channel(abc.ABC): def stream_stream( self, method: str, - request_serializer: _RequestSerializer | None = ..., - response_deserializer: _ResponseDeserializer | None = ..., + request_serializer: _RequestSerializer | None = None, + response_deserializer: _ResponseDeserializer | None = None, ) -> StreamStreamMultiCallable[Any, Any]: ... @abc.abstractmethod def stream_unary( self, method: str, - request_serializer: _RequestSerializer | None = ..., - response_deserializer: _ResponseDeserializer | None = ..., + request_serializer: _RequestSerializer | None = None, + response_deserializer: _ResponseDeserializer | None = None, ) -> StreamUnaryMultiCallable[Any, Any]: ... @abc.abstractmethod - def subscribe(self, callback: Callable[[ChannelConnectivity], None], try_to_connect: bool = ...) -> None: ... + def subscribe(self, callback: Callable[[ChannelConnectivity], None], try_to_connect: bool = False) -> None: ... @abc.abstractmethod def unary_stream( self, method: str, - request_serializer: _RequestSerializer | None = ..., - response_deserializer: _ResponseDeserializer | None = ..., + request_serializer: _RequestSerializer | None = None, + response_deserializer: _ResponseDeserializer | None = None, ) -> UnaryStreamMultiCallable[Any, Any]: ... @abc.abstractmethod def unary_unary( self, method: str, - request_serializer: _RequestSerializer | None = ..., - response_deserializer: _ResponseDeserializer | None = ..., + request_serializer: _RequestSerializer | None = None, + response_deserializer: _ResponseDeserializer | None = None, ) -> UnaryUnaryMultiCallable[Any, Any]: ... @abc.abstractmethod def unsubscribe(self, callback: Callable[[ChannelConnectivity], None]) -> None: ... @@ -303,7 +303,7 @@ class Server(abc.ABC): # Block current thread until the server stops. Returns a bool # indicates if the operation times out. Timeout is in seconds. - def wait_for_termination(self, timeout: float | None = ...) -> bool: ... + def wait_for_termination(self, timeout: float | None = None) -> bool: ... # Authentication & Authorization Objects: @@ -538,34 +538,31 @@ class UnaryUnaryMultiCallable(abc.ABC, Generic[_TRequest, _TResponse]): def __call__( self, request: _TRequest, - timeout: float | None = ..., - metadata: _Metadata | None = ..., - credentials: CallCredentials | None = ..., - # FIXME: optional bool seems weird, but that's what the docs suggest - wait_for_ready: bool | None = ..., - compression: Compression | None = ..., + timeout: float | None = None, + metadata: _Metadata | None = None, + credentials: CallCredentials | None = None, + wait_for_ready: bool | None = None, + compression: Compression | None = None, ) -> _TResponse: ... @abc.abstractmethod def future( self, request: _TRequest, - timeout: float | None = ..., - metadata: _Metadata | None = ..., - credentials: CallCredentials | None = ..., - # FIXME: optional bool seems weird, but that's what the docs suggest - wait_for_ready: bool | None = ..., - compression: Compression | None = ..., + timeout: float | None = None, + metadata: _Metadata | None = None, + credentials: CallCredentials | None = None, + wait_for_ready: bool | None = None, + compression: Compression | None = None, ) -> _CallFuture[_TResponse]: ... @abc.abstractmethod def with_call( self, request: _TRequest, - timeout: float | None = ..., - metadata: _Metadata | None = ..., - credentials: CallCredentials | None = ..., - # FIXME: optional bool seems weird, but that's what the docs suggest - wait_for_ready: bool | None = ..., - compression: Compression | None = ..., + timeout: float | None = None, + metadata: _Metadata | None = None, + credentials: CallCredentials | None = None, + wait_for_ready: bool | None = None, + compression: Compression | None = None, # FIXME: Return value is documented as "The response value for the RPC and a Call value for the RPC"; # this is slightly unclear so this return type is a best-effort guess. ) -> tuple[_TResponse, Call]: ... @@ -575,12 +572,11 @@ class UnaryStreamMultiCallable(abc.ABC, Generic[_TRequest, _TResponse]): def __call__( self, request: _TRequest, - timeout: float | None = ..., - metadata: _Metadata | None = ..., - credentials: CallCredentials | None = ..., - # FIXME: optional bool seems weird, but that's what the docs suggest - wait_for_ready: bool | None = ..., - compression: Compression | None = ..., + timeout: float | None = None, + metadata: _Metadata | None = None, + credentials: CallCredentials | None = None, + wait_for_ready: bool | None = None, + compression: Compression | None = None, ) -> _CallIterator[_TResponse]: ... class StreamUnaryMultiCallable(abc.ABC, Generic[_TRequest, _TResponse]): @@ -588,34 +584,31 @@ class StreamUnaryMultiCallable(abc.ABC, Generic[_TRequest, _TResponse]): def __call__( self, request_iterator: Iterator[_TRequest], - timeout: float | None = ..., - metadata: _Metadata | None = ..., - credentials: CallCredentials | None = ..., - # FIXME: optional bool seems weird, but that's what the docs suggest - wait_for_ready: bool | None = ..., - compression: Compression | None = ..., + timeout: float | None = None, + metadata: _Metadata | None = None, + credentials: CallCredentials | None = None, + wait_for_ready: bool | None = None, + compression: Compression | None = None, ) -> _TResponse: ... @abc.abstractmethod def future( self, request_iterator: Iterator[_TRequest], - timeout: float | None = ..., - metadata: _Metadata | None = ..., - credentials: CallCredentials | None = ..., - # FIXME: optional bool seems weird, but that's what the docs suggest - wait_for_ready: bool | None = ..., - compression: Compression | None = ..., + timeout: float | None = None, + metadata: _Metadata | None = None, + credentials: CallCredentials | None = None, + wait_for_ready: bool | None = None, + compression: Compression | None = None, ) -> _CallFuture[_TResponse]: ... @abc.abstractmethod def with_call( self, request_iterator: Iterator[_TRequest], - timeout: float | None = ..., - metadata: _Metadata | None = ..., - credentials: CallCredentials | None = ..., - # FIXME: optional bool seems weird, but that's what the docs suggest - wait_for_ready: bool | None = ..., - compression: Compression | None = ..., + timeout: float | None = None, + metadata: _Metadata | None = None, + credentials: CallCredentials | None = None, + wait_for_ready: bool | None = None, + compression: Compression | None = None, # FIXME: Return value is documented as "The response value for the RPC and a Call value for the RPC"; # this is slightly unclear so this return type is a best-effort guess. ) -> tuple[_TResponse, Call]: ... @@ -625,12 +618,11 @@ class StreamStreamMultiCallable(abc.ABC, Generic[_TRequest, _TResponse]): def __call__( self, request_iterator: Iterator[_TRequest], - timeout: float | None = ..., - metadata: _Metadata | None = ..., - credentials: CallCredentials | None = ..., - # FIXME: optional bool seems weird, but that's what the docs suggest - wait_for_ready: bool | None = ..., - compression: Compression | None = ..., + timeout: float | None = None, + metadata: _Metadata | None = None, + credentials: CallCredentials | None = None, + wait_for_ready: bool | None = None, + compression: Compression | None = None, ) -> _CallIterator[_TResponse]: ... # Runtime Protobuf Parsing: diff --git a/stubs/grpcio/grpc/aio/__init__.pyi b/stubs/grpcio/grpc/aio/__init__.pyi index 8a9cb36bb9e6..1b0f592a9a1a 100644 --- a/stubs/grpcio/grpc/aio/__init__.pyi +++ b/stubs/grpcio/grpc/aio/__init__.pyi @@ -37,12 +37,9 @@ class AioRpcError(RpcError): code: StatusCode, initial_metadata: Metadata, trailing_metadata: Metadata, - details: str | None = ..., - debug_error_string: str | None = ..., + details: str | None = None, + debug_error_string: str | None = None, ) -> None: ... - - # FIXME: confirm if these are present in the parent type. The remaining - # methods already exist. def debug_error_string(self) -> str: ... def initial_metadata(self) -> Metadata: ... @@ -52,27 +49,27 @@ class ClientInterceptor(metaclass=abc.ABCMeta): ... def insecure_channel( target: str, - options: _Options | None = ..., - compression: Compression | None = ..., - interceptors: Sequence[ClientInterceptor] | None = ..., + options: _Options | None = None, + compression: Compression | None = None, + interceptors: Sequence[ClientInterceptor] | None = None, ) -> Channel: ... def secure_channel( target: str, credentials: ChannelCredentials, - options: _Options | None = ..., - compression: Compression | None = ..., - interceptors: Sequence[ClientInterceptor] | None = ..., + options: _Options | None = None, + compression: Compression | None = None, + interceptors: Sequence[ClientInterceptor] | None = None, ) -> Channel: ... # Create Server: def server( - migration_thread_pool: futures.Executor | None = ..., - handlers: Sequence[GenericRpcHandler[Any, Any]] | None = ..., - interceptors: Sequence[ServerInterceptor[Any, Any]] | None = ..., - options: _Options | None = ..., - maximum_concurrent_rpcs: int | None = ..., - compression: Compression | None = ..., + migration_thread_pool: futures.Executor | None = None, + handlers: Sequence[GenericRpcHandler[Any, Any]] | None = None, + interceptors: Sequence[ServerInterceptor[Any, Any]] | None = None, + options: _Options | None = None, + maximum_concurrent_rpcs: int | None = None, + compression: Compression | None = None, ) -> Server: ... # Channel Object: @@ -85,38 +82,38 @@ _ResponseDeserializer: TypeAlias = Callable[[bytes], Any] class Channel(abc.ABC): @abc.abstractmethod - async def close(self, grace: float | None = ...) -> None: ... + async def close(self, grace: float | None = None) -> None: ... @abc.abstractmethod - def get_state(self, try_to_connect: bool = ...) -> ChannelConnectivity: ... + def get_state(self, try_to_connect: bool = False) -> ChannelConnectivity: ... @abc.abstractmethod async def wait_for_state_change(self, last_observed_state: ChannelConnectivity) -> None: ... @abc.abstractmethod def stream_stream( self, method: str, - request_serializer: _RequestSerializer | None = ..., - response_deserializer: _ResponseDeserializer | None = ..., + request_serializer: _RequestSerializer | None = None, + response_deserializer: _ResponseDeserializer | None = None, ) -> StreamStreamMultiCallable[Any, Any]: ... @abc.abstractmethod def stream_unary( self, method: str, - request_serializer: _RequestSerializer | None = ..., - response_deserializer: _ResponseDeserializer | None = ..., + request_serializer: _RequestSerializer | None = None, + response_deserializer: _ResponseDeserializer | None = None, ) -> StreamUnaryMultiCallable[Any, Any]: ... @abc.abstractmethod def unary_stream( self, method: str, - request_serializer: _RequestSerializer | None = ..., - response_deserializer: _ResponseDeserializer | None = ..., + request_serializer: _RequestSerializer | None = None, + response_deserializer: _ResponseDeserializer | None = None, ) -> UnaryStreamMultiCallable[Any, Any]: ... @abc.abstractmethod def unary_unary( self, method: str, - request_serializer: _RequestSerializer | None = ..., - response_deserializer: _ResponseDeserializer | None = ..., + request_serializer: _RequestSerializer | None = None, + response_deserializer: _ResponseDeserializer | None = None, ) -> UnaryUnaryMultiCallable[Any, Any]: ... @abc.abstractmethod async def __aenter__(self) -> Self: ... @@ -149,7 +146,7 @@ class Server(metaclass=abc.ABCMeta): # Returns a bool indicates if the operation times out. Timeout is in seconds. @abc.abstractmethod - async def wait_for_termination(self, timeout: float | None = ...) -> bool: ... + async def wait_for_termination(self, timeout: float | None = None) -> bool: ... # Client-Side Context: @@ -216,7 +213,7 @@ class _DoneCallback(Generic[_TRequest, _TResponse]): class ServicerContext(Generic[_TRequest, _TResponse], metaclass=abc.ABCMeta): @abc.abstractmethod - async def abort(self, code: StatusCode, details: str = ..., trailing_metadata: _MetadataType = ...) -> NoReturn: ... + async def abort(self, code: StatusCode, details: str = "", trailing_metadata: _MetadataType = ()) -> NoReturn: ... @abc.abstractmethod async def read(self) -> _TRequest: ... @abc.abstractmethod @@ -377,12 +374,11 @@ class UnaryUnaryMultiCallable(Generic[_TRequest, _TResponse], metaclass=abc.ABCM self, request: _TRequest, *, - timeout: float | None = ..., - metadata: _MetadataType | None = ..., - credentials: CallCredentials | None = ..., - # FIXME: optional bool seems weird, but that's what the docs suggest - wait_for_ready: bool | None = ..., - compression: Compression | None = ..., + timeout: float | None = None, + metadata: _MetadataType | None = None, + credentials: CallCredentials | None = None, + wait_for_ready: bool | None = None, + compression: Compression | None = None, ) -> UnaryUnaryCall[_TRequest, _TResponse]: ... class UnaryStreamMultiCallable(Generic[_TRequest, _TResponse], metaclass=abc.ABCMeta): @@ -391,12 +387,11 @@ class UnaryStreamMultiCallable(Generic[_TRequest, _TResponse], metaclass=abc.ABC self, request: _TRequest, *, - timeout: float | None = ..., - metadata: _MetadataType | None = ..., - credentials: CallCredentials | None = ..., - # FIXME: optional bool seems weird, but that's what the docs suggest - wait_for_ready: bool | None = ..., - compression: Compression | None = ..., + timeout: float | None = None, + metadata: _MetadataType | None = None, + credentials: CallCredentials | None = None, + wait_for_ready: bool | None = None, + compression: Compression | None = None, ) -> UnaryStreamCall[_TRequest, _TResponse]: ... class StreamUnaryMultiCallable(Generic[_TRequest, _TResponse], metaclass=abc.ABCMeta): @@ -404,12 +399,11 @@ class StreamUnaryMultiCallable(Generic[_TRequest, _TResponse], metaclass=abc.ABC def __call__( self, request_iterator: AsyncIterator[_TRequest] | Iterator[_TRequest] | None = None, - timeout: float | None = ..., - metadata: _MetadataType | None = ..., - credentials: CallCredentials | None = ..., - # FIXME: optional bool seems weird, but that's what the docs suggest - wait_for_ready: bool | None = ..., - compression: Compression | None = ..., + timeout: float | None = None, + metadata: _MetadataType | None = None, + credentials: CallCredentials | None = None, + wait_for_ready: bool | None = None, + compression: Compression | None = None, ) -> StreamUnaryCall[_TRequest, _TResponse]: ... class StreamStreamMultiCallable(Generic[_TRequest, _TResponse], metaclass=abc.ABCMeta): @@ -417,12 +411,11 @@ class StreamStreamMultiCallable(Generic[_TRequest, _TResponse], metaclass=abc.AB def __call__( self, request_iterator: AsyncIterator[_TRequest] | Iterator[_TRequest] | None = None, - timeout: float | None = ..., - metadata: _MetadataType | None = ..., - credentials: CallCredentials | None = ..., - # FIXME: optional bool seems weird, but that's what the docs suggest - wait_for_ready: bool | None = ..., - compression: Compression | None = ..., + timeout: float | None = None, + metadata: _MetadataType | None = None, + credentials: CallCredentials | None = None, + wait_for_ready: bool | None = None, + compression: Compression | None = None, ) -> StreamStreamCall[_TRequest, _TResponse]: ... # Metadata: diff --git a/stubs/grpcio/grpc_channelz/__init__.pyi b/stubs/grpcio/grpc_channelz/__init__.pyi index 0f6820f054ea..e69de29bb2d1 100644 --- a/stubs/grpcio/grpc_channelz/__init__.pyi +++ b/stubs/grpcio/grpc_channelz/__init__.pyi @@ -1,3 +0,0 @@ -from _typeshed import Incomplete - -def __getattr__(name: str) -> Incomplete: ... diff --git a/stubs/grpcio/grpc_channelz/v1/__init__.pyi b/stubs/grpcio/grpc_channelz/v1/__init__.pyi index 0f6820f054ea..e69de29bb2d1 100644 --- a/stubs/grpcio/grpc_channelz/v1/__init__.pyi +++ b/stubs/grpcio/grpc_channelz/v1/__init__.pyi @@ -1,3 +0,0 @@ -from _typeshed import Incomplete - -def __getattr__(name: str) -> Incomplete: ... diff --git a/stubs/grpcio/grpc_channelz/v1/_async.pyi b/stubs/grpcio/grpc_channelz/v1/_async.pyi new file mode 100644 index 000000000000..87aa2bf125d1 --- /dev/null +++ b/stubs/grpcio/grpc_channelz/v1/_async.pyi @@ -0,0 +1,19 @@ +from grpc_channelz.v1 import channelz_pb2, channelz_pb2_grpc + +class ChannelzServicer(channelz_pb2_grpc.ChannelzServicer): + @staticmethod + async def GetTopChannels(request: channelz_pb2.GetTopChannelsRequest, context) -> channelz_pb2.GetTopChannelsResponse: ... + @staticmethod + async def GetServers(request: channelz_pb2.GetServersRequest, context) -> channelz_pb2.GetServersResponse: ... + @staticmethod + async def GetServer(request: channelz_pb2.GetServerRequest, context) -> channelz_pb2.GetServerResponse: ... + @staticmethod + async def GetServerSockets( + request: channelz_pb2.GetServerSocketsRequest, context + ) -> channelz_pb2.GetServerSocketsResponse: ... + @staticmethod + async def GetChannel(request: channelz_pb2.GetChannelRequest, context) -> channelz_pb2.GetChannelResponse: ... + @staticmethod + async def GetSubchannel(request: channelz_pb2.GetSubchannelRequest, context) -> channelz_pb2.GetSubchannelResponse: ... + @staticmethod + async def GetSocket(request: channelz_pb2.GetSocketRequest, context) -> channelz_pb2.GetSocketResponse: ... diff --git a/stubs/grpcio/grpc_channelz/v1/channelz.pyi b/stubs/grpcio/grpc_channelz/v1/channelz.pyi index f5aa790e028d..2ed61fd4fa3a 100644 --- a/stubs/grpcio/grpc_channelz/v1/channelz.pyi +++ b/stubs/grpcio/grpc_channelz/v1/channelz.pyi @@ -1,3 +1,6 @@ -from grpc import Server +from grpc_channelz.v1 import _async as aio +from grpc_channelz.v1._servicer import ChannelzServicer -def add_channelz_servicer(server: Server) -> None: ... +def add_channelz_servicer(server) -> None: ... + +__all__ = ["aio", "add_channelz_servicer", "ChannelzServicer"] diff --git a/stubs/grpcio/grpc_channelz/v1/channelz_pb2.pyi b/stubs/grpcio/grpc_channelz/v1/channelz_pb2.pyi index fdc2ff0ee872..b867e0e8c0b8 100644 --- a/stubs/grpcio/grpc_channelz/v1/channelz_pb2.pyi +++ b/stubs/grpcio/grpc_channelz/v1/channelz_pb2.pyi @@ -1,16 +1,473 @@ from _typeshed import Incomplete +from collections.abc import Iterable, Mapping +from typing import ClassVar, final -GetTopChannelsRequest = Incomplete -GetTopChannelsResponse = Incomplete -GetServersRequest = Incomplete -GetServersResponse = Incomplete -GetServerRequest = Incomplete -GetServerResponse = Incomplete -GetServerSocketsRequest = Incomplete -GetServerSocketsResponse = Incomplete -GetChannelRequest = Incomplete -GetChannelResponse = Incomplete -GetSubchannelRequest = Incomplete -GetSubchannelResponse = Incomplete -GetSocketRequest = Incomplete -GetSocketResponse = Incomplete +from google._upb._message import Descriptor, FileDescriptor, MessageMeta +from google.protobuf import any_pb2, duration_pb2, message, timestamp_pb2, wrappers_pb2 +from google.protobuf.internal import containers + +DESCRIPTOR: FileDescriptor +@final +class Channel(message.Message, metaclass=MessageMeta): + REF_FIELD_NUMBER: ClassVar[int] + DATA_FIELD_NUMBER: ClassVar[int] + CHANNEL_REF_FIELD_NUMBER: ClassVar[int] + SUBCHANNEL_REF_FIELD_NUMBER: ClassVar[int] + SOCKET_REF_FIELD_NUMBER: ClassVar[int] + ref: ChannelRef + data: ChannelData + channel_ref: containers.RepeatedCompositeFieldContainer[ChannelRef] + subchannel_ref: containers.RepeatedCompositeFieldContainer[SubchannelRef] + socket_ref: containers.RepeatedCompositeFieldContainer[SocketRef] + def __init__(self, ref: ChannelRef | Mapping[Incomplete, Incomplete] | None = ..., data: ChannelData | Mapping[Incomplete, Incomplete] | None = ..., channel_ref: Iterable[ChannelRef | Mapping[Incomplete, Incomplete]] | None = ..., subchannel_ref: Iterable[SubchannelRef | Mapping[Incomplete, Incomplete]] | None = ..., socket_ref: Iterable[SocketRef | Mapping[Incomplete, Incomplete]] | None = ...) -> None: ... + DESCRIPTOR: Descriptor + +@final +class Subchannel(message.Message, metaclass=MessageMeta): + REF_FIELD_NUMBER: ClassVar[int] + DATA_FIELD_NUMBER: ClassVar[int] + CHANNEL_REF_FIELD_NUMBER: ClassVar[int] + SUBCHANNEL_REF_FIELD_NUMBER: ClassVar[int] + SOCKET_REF_FIELD_NUMBER: ClassVar[int] + ref: SubchannelRef + data: ChannelData + channel_ref: containers.RepeatedCompositeFieldContainer[ChannelRef] + subchannel_ref: containers.RepeatedCompositeFieldContainer[SubchannelRef] + socket_ref: containers.RepeatedCompositeFieldContainer[SocketRef] + def __init__(self, ref: SubchannelRef | Mapping[Incomplete, Incomplete] | None = ..., data: ChannelData | Mapping[Incomplete, Incomplete] | None = ..., channel_ref: Iterable[ChannelRef | Mapping[Incomplete, Incomplete]] | None = ..., subchannel_ref: Iterable[SubchannelRef | Mapping[Incomplete, Incomplete]] | None = ..., socket_ref: Iterable[SocketRef | Mapping[Incomplete, Incomplete]] | None = ...) -> None: ... + DESCRIPTOR: Descriptor + +@final +class ChannelConnectivityState(message.Message, metaclass=MessageMeta): + State: Incomplete + UNKNOWN: Incomplete + IDLE: Incomplete + CONNECTING: Incomplete + READY: Incomplete + TRANSIENT_FAILURE: Incomplete + SHUTDOWN: Incomplete + STATE_FIELD_NUMBER: ClassVar[int] + state: Incomplete + def __init__(self, state: Incomplete | str | None = ...) -> None: ... + DESCRIPTOR: Descriptor + +@final +class ChannelData(message.Message, metaclass=MessageMeta): + STATE_FIELD_NUMBER: ClassVar[int] + TARGET_FIELD_NUMBER: ClassVar[int] + TRACE_FIELD_NUMBER: ClassVar[int] + CALLS_STARTED_FIELD_NUMBER: ClassVar[int] + CALLS_SUCCEEDED_FIELD_NUMBER: ClassVar[int] + CALLS_FAILED_FIELD_NUMBER: ClassVar[int] + LAST_CALL_STARTED_TIMESTAMP_FIELD_NUMBER: ClassVar[int] + state: ChannelConnectivityState + target: str + trace: ChannelTrace + calls_started: int + calls_succeeded: int + calls_failed: int + last_call_started_timestamp: timestamp_pb2.Timestamp + def __init__(self, state: ChannelConnectivityState | Mapping[Incomplete, Incomplete] | None = ..., target: str | None = ..., trace: ChannelTrace | Mapping[Incomplete, Incomplete] | None = ..., calls_started: int | None = ..., calls_succeeded: int | None = ..., calls_failed: int | None = ..., last_call_started_timestamp: timestamp_pb2.Timestamp | Mapping[Incomplete, Incomplete] | None = ...) -> None: ... + DESCRIPTOR: Descriptor + +@final +class ChannelTraceEvent(message.Message, metaclass=MessageMeta): + Severity: Incomplete + CT_UNKNOWN: Incomplete + CT_INFO: Incomplete + CT_WARNING: Incomplete + CT_ERROR: Incomplete + DESCRIPTION_FIELD_NUMBER: ClassVar[int] + SEVERITY_FIELD_NUMBER: ClassVar[int] + TIMESTAMP_FIELD_NUMBER: ClassVar[int] + CHANNEL_REF_FIELD_NUMBER: ClassVar[int] + SUBCHANNEL_REF_FIELD_NUMBER: ClassVar[int] + description: str + severity: Incomplete + timestamp: timestamp_pb2.Timestamp + channel_ref: ChannelRef + subchannel_ref: SubchannelRef + def __init__(self, description: str | None = ..., severity: Incomplete | str | None = ..., timestamp: timestamp_pb2.Timestamp | Mapping[Incomplete, Incomplete] | None = ..., channel_ref: ChannelRef | Mapping[Incomplete, Incomplete] | None = ..., subchannel_ref: SubchannelRef | Mapping[Incomplete, Incomplete] | None = ...) -> None: ... + DESCRIPTOR: Descriptor + +@final +class ChannelTrace(message.Message, metaclass=MessageMeta): + NUM_EVENTS_LOGGED_FIELD_NUMBER: ClassVar[int] + CREATION_TIMESTAMP_FIELD_NUMBER: ClassVar[int] + EVENTS_FIELD_NUMBER: ClassVar[int] + num_events_logged: int + creation_timestamp: timestamp_pb2.Timestamp + events: containers.RepeatedCompositeFieldContainer[ChannelTraceEvent] + def __init__(self, num_events_logged: int | None = ..., creation_timestamp: timestamp_pb2.Timestamp | Mapping[Incomplete, Incomplete] | None = ..., events: Iterable[ChannelTraceEvent | Mapping[Incomplete, Incomplete]] | None = ...) -> None: ... + DESCRIPTOR: Descriptor + +@final +class ChannelRef(message.Message, metaclass=MessageMeta): + CHANNEL_ID_FIELD_NUMBER: ClassVar[int] + NAME_FIELD_NUMBER: ClassVar[int] + channel_id: int + name: str + def __init__(self, channel_id: int | None = ..., name: str | None = ...) -> None: ... + DESCRIPTOR: Descriptor + +@final +class SubchannelRef(message.Message, metaclass=MessageMeta): + SUBCHANNEL_ID_FIELD_NUMBER: ClassVar[int] + NAME_FIELD_NUMBER: ClassVar[int] + subchannel_id: int + name: str + def __init__(self, subchannel_id: int | None = ..., name: str | None = ...) -> None: ... + DESCRIPTOR: Descriptor + +@final +class SocketRef(message.Message, metaclass=MessageMeta): + SOCKET_ID_FIELD_NUMBER: ClassVar[int] + NAME_FIELD_NUMBER: ClassVar[int] + socket_id: int + name: str + def __init__(self, socket_id: int | None = ..., name: str | None = ...) -> None: ... + DESCRIPTOR: Descriptor + +@final +class ServerRef(message.Message, metaclass=MessageMeta): + SERVER_ID_FIELD_NUMBER: ClassVar[int] + NAME_FIELD_NUMBER: ClassVar[int] + server_id: int + name: str + def __init__(self, server_id: int | None = ..., name: str | None = ...) -> None: ... + DESCRIPTOR: Descriptor + +@final +class Server(message.Message, metaclass=MessageMeta): + REF_FIELD_NUMBER: ClassVar[int] + DATA_FIELD_NUMBER: ClassVar[int] + LISTEN_SOCKET_FIELD_NUMBER: ClassVar[int] + ref: ServerRef + data: ServerData + listen_socket: containers.RepeatedCompositeFieldContainer[SocketRef] + def __init__(self, ref: ServerRef | Mapping[Incomplete, Incomplete] | None = ..., data: ServerData | Mapping[Incomplete, Incomplete] | None = ..., listen_socket: Iterable[SocketRef | Mapping[Incomplete, Incomplete]] | None = ...) -> None: ... + DESCRIPTOR: Descriptor + +@final +class ServerData(message.Message, metaclass=MessageMeta): + TRACE_FIELD_NUMBER: ClassVar[int] + CALLS_STARTED_FIELD_NUMBER: ClassVar[int] + CALLS_SUCCEEDED_FIELD_NUMBER: ClassVar[int] + CALLS_FAILED_FIELD_NUMBER: ClassVar[int] + LAST_CALL_STARTED_TIMESTAMP_FIELD_NUMBER: ClassVar[int] + trace: ChannelTrace + calls_started: int + calls_succeeded: int + calls_failed: int + last_call_started_timestamp: timestamp_pb2.Timestamp + def __init__(self, trace: ChannelTrace | Mapping[Incomplete, Incomplete] | None = ..., calls_started: int | None = ..., calls_succeeded: int | None = ..., calls_failed: int | None = ..., last_call_started_timestamp: timestamp_pb2.Timestamp | Mapping[Incomplete, Incomplete] | None = ...) -> None: ... + DESCRIPTOR: Descriptor + +@final +class Socket(message.Message, metaclass=MessageMeta): + REF_FIELD_NUMBER: ClassVar[int] + DATA_FIELD_NUMBER: ClassVar[int] + LOCAL_FIELD_NUMBER: ClassVar[int] + REMOTE_FIELD_NUMBER: ClassVar[int] + SECURITY_FIELD_NUMBER: ClassVar[int] + REMOTE_NAME_FIELD_NUMBER: ClassVar[int] + ref: SocketRef + data: SocketData + local: Address + remote: Address + security: Security + remote_name: str + def __init__(self, ref: SocketRef | Mapping[Incomplete, Incomplete] | None = ..., data: SocketData | Mapping[Incomplete, Incomplete] | None = ..., local: Address | Mapping[Incomplete, Incomplete] | None = ..., remote: Address | Mapping[Incomplete, Incomplete] | None = ..., security: Security | Mapping[Incomplete, Incomplete] | None = ..., remote_name: str | None = ...) -> None: ... + DESCRIPTOR: Descriptor + +@final +class SocketData(message.Message, metaclass=MessageMeta): + STREAMS_STARTED_FIELD_NUMBER: ClassVar[int] + STREAMS_SUCCEEDED_FIELD_NUMBER: ClassVar[int] + STREAMS_FAILED_FIELD_NUMBER: ClassVar[int] + MESSAGES_SENT_FIELD_NUMBER: ClassVar[int] + MESSAGES_RECEIVED_FIELD_NUMBER: ClassVar[int] + KEEP_ALIVES_SENT_FIELD_NUMBER: ClassVar[int] + LAST_LOCAL_STREAM_CREATED_TIMESTAMP_FIELD_NUMBER: ClassVar[int] + LAST_REMOTE_STREAM_CREATED_TIMESTAMP_FIELD_NUMBER: ClassVar[int] + LAST_MESSAGE_SENT_TIMESTAMP_FIELD_NUMBER: ClassVar[int] + LAST_MESSAGE_RECEIVED_TIMESTAMP_FIELD_NUMBER: ClassVar[int] + LOCAL_FLOW_CONTROL_WINDOW_FIELD_NUMBER: ClassVar[int] + REMOTE_FLOW_CONTROL_WINDOW_FIELD_NUMBER: ClassVar[int] + OPTION_FIELD_NUMBER: ClassVar[int] + streams_started: int + streams_succeeded: int + streams_failed: int + messages_sent: int + messages_received: int + keep_alives_sent: int + last_local_stream_created_timestamp: timestamp_pb2.Timestamp + last_remote_stream_created_timestamp: timestamp_pb2.Timestamp + last_message_sent_timestamp: timestamp_pb2.Timestamp + last_message_received_timestamp: timestamp_pb2.Timestamp + local_flow_control_window: wrappers_pb2.Int64Value + remote_flow_control_window: wrappers_pb2.Int64Value + option: containers.RepeatedCompositeFieldContainer[SocketOption] + def __init__(self, streams_started: int | None = ..., streams_succeeded: int | None = ..., streams_failed: int | None = ..., messages_sent: int | None = ..., messages_received: int | None = ..., keep_alives_sent: int | None = ..., last_local_stream_created_timestamp: timestamp_pb2.Timestamp | Mapping[Incomplete, Incomplete] | None = ..., last_remote_stream_created_timestamp: timestamp_pb2.Timestamp | Mapping[Incomplete, Incomplete] | None = ..., last_message_sent_timestamp: timestamp_pb2.Timestamp | Mapping[Incomplete, Incomplete] | None = ..., last_message_received_timestamp: timestamp_pb2.Timestamp | Mapping[Incomplete, Incomplete] | None = ..., local_flow_control_window: wrappers_pb2.Int64Value | Mapping[Incomplete, Incomplete] | None = ..., remote_flow_control_window: wrappers_pb2.Int64Value | Mapping[Incomplete, Incomplete] | None = ..., option: Iterable[SocketOption | Mapping[Incomplete, Incomplete]] | None = ...) -> None: ... + DESCRIPTOR: Descriptor + +@final +class Address(message.Message, metaclass=MessageMeta): + @final + class TcpIpAddress(message.Message, metaclass=MessageMeta): + IP_ADDRESS_FIELD_NUMBER: ClassVar[int] + PORT_FIELD_NUMBER: ClassVar[int] + ip_address: bytes + port: int + def __init__(self, ip_address: bytes | None = ..., port: int | None = ...) -> None: ... + @final + class UdsAddress(message.Message, metaclass=MessageMeta): + FILENAME_FIELD_NUMBER: ClassVar[int] + filename: str + def __init__(self, filename: str | None = ...) -> None: ... + @final + class OtherAddress(message.Message, metaclass=MessageMeta): + NAME_FIELD_NUMBER: ClassVar[int] + VALUE_FIELD_NUMBER: ClassVar[int] + name: str + value: any_pb2.Any + def __init__(self, name: str | None = ..., value: any_pb2.Any | Mapping[Incomplete, Incomplete] | None = ...) -> None: ... + TCPIP_ADDRESS_FIELD_NUMBER: ClassVar[int] + UDS_ADDRESS_FIELD_NUMBER: ClassVar[int] + OTHER_ADDRESS_FIELD_NUMBER: ClassVar[int] + tcpip_address: Address.TcpIpAddress + uds_address: Address.UdsAddress + other_address: Address.OtherAddress + def __init__(self, tcpip_address: Address.TcpIpAddress | Mapping[Incomplete, Incomplete] | None = ..., uds_address: Address.UdsAddress | Mapping[Incomplete, Incomplete] | None = ..., other_address: Address.OtherAddress | Mapping[Incomplete, Incomplete] | None = ...) -> None: ... + DESCRIPTOR: Descriptor + +@final +class Security(message.Message, metaclass=MessageMeta): + @final + class Tls(message.Message, metaclass=MessageMeta): + STANDARD_NAME_FIELD_NUMBER: ClassVar[int] + OTHER_NAME_FIELD_NUMBER: ClassVar[int] + LOCAL_CERTIFICATE_FIELD_NUMBER: ClassVar[int] + REMOTE_CERTIFICATE_FIELD_NUMBER: ClassVar[int] + standard_name: str + other_name: str + local_certificate: bytes + remote_certificate: bytes + def __init__(self, standard_name: str | None = ..., other_name: str | None = ..., local_certificate: bytes | None = ..., remote_certificate: bytes | None = ...) -> None: ... + @final + class OtherSecurity(message.Message, metaclass=MessageMeta): + NAME_FIELD_NUMBER: ClassVar[int] + VALUE_FIELD_NUMBER: ClassVar[int] + name: str + value: any_pb2.Any + def __init__(self, name: str | None = ..., value: any_pb2.Any | Mapping[Incomplete, Incomplete] | None = ...) -> None: ... + TLS_FIELD_NUMBER: ClassVar[int] + OTHER_FIELD_NUMBER: ClassVar[int] + tls: Security.Tls + other: Security.OtherSecurity + def __init__(self, tls: Security.Tls | Mapping[Incomplete, Incomplete] | None = ..., other: Security.OtherSecurity | Mapping[Incomplete, Incomplete] | None = ...) -> None: ... + DESCRIPTOR: Descriptor + +@final +class SocketOption(message.Message, metaclass=MessageMeta): + NAME_FIELD_NUMBER: ClassVar[int] + VALUE_FIELD_NUMBER: ClassVar[int] + ADDITIONAL_FIELD_NUMBER: ClassVar[int] + name: str + value: str + additional: any_pb2.Any + def __init__(self, name: str | None = ..., value: str | None = ..., additional: any_pb2.Any | Mapping[Incomplete, Incomplete] | None = ...) -> None: ... + DESCRIPTOR: Descriptor + +@final +class SocketOptionTimeout(message.Message, metaclass=MessageMeta): + DURATION_FIELD_NUMBER: ClassVar[int] + duration: duration_pb2.Duration + def __init__(self, duration: duration_pb2.Duration | Mapping[Incomplete, Incomplete] | None = ...) -> None: ... + DESCRIPTOR: Descriptor + +@final +class SocketOptionLinger(message.Message, metaclass=MessageMeta): + ACTIVE_FIELD_NUMBER: ClassVar[int] + DURATION_FIELD_NUMBER: ClassVar[int] + active: bool + duration: duration_pb2.Duration + def __init__(self, active: bool = ..., duration: duration_pb2.Duration | Mapping[Incomplete, Incomplete] | None = ...) -> None: ... + DESCRIPTOR: Descriptor + +@final +class SocketOptionTcpInfo(message.Message, metaclass=MessageMeta): + TCPI_STATE_FIELD_NUMBER: ClassVar[int] + TCPI_CA_STATE_FIELD_NUMBER: ClassVar[int] + TCPI_RETRANSMITS_FIELD_NUMBER: ClassVar[int] + TCPI_PROBES_FIELD_NUMBER: ClassVar[int] + TCPI_BACKOFF_FIELD_NUMBER: ClassVar[int] + TCPI_OPTIONS_FIELD_NUMBER: ClassVar[int] + TCPI_SND_WSCALE_FIELD_NUMBER: ClassVar[int] + TCPI_RCV_WSCALE_FIELD_NUMBER: ClassVar[int] + TCPI_RTO_FIELD_NUMBER: ClassVar[int] + TCPI_ATO_FIELD_NUMBER: ClassVar[int] + TCPI_SND_MSS_FIELD_NUMBER: ClassVar[int] + TCPI_RCV_MSS_FIELD_NUMBER: ClassVar[int] + TCPI_UNACKED_FIELD_NUMBER: ClassVar[int] + TCPI_SACKED_FIELD_NUMBER: ClassVar[int] + TCPI_LOST_FIELD_NUMBER: ClassVar[int] + TCPI_RETRANS_FIELD_NUMBER: ClassVar[int] + TCPI_FACKETS_FIELD_NUMBER: ClassVar[int] + TCPI_LAST_DATA_SENT_FIELD_NUMBER: ClassVar[int] + TCPI_LAST_ACK_SENT_FIELD_NUMBER: ClassVar[int] + TCPI_LAST_DATA_RECV_FIELD_NUMBER: ClassVar[int] + TCPI_LAST_ACK_RECV_FIELD_NUMBER: ClassVar[int] + TCPI_PMTU_FIELD_NUMBER: ClassVar[int] + TCPI_RCV_SSTHRESH_FIELD_NUMBER: ClassVar[int] + TCPI_RTT_FIELD_NUMBER: ClassVar[int] + TCPI_RTTVAR_FIELD_NUMBER: ClassVar[int] + TCPI_SND_SSTHRESH_FIELD_NUMBER: ClassVar[int] + TCPI_SND_CWND_FIELD_NUMBER: ClassVar[int] + TCPI_ADVMSS_FIELD_NUMBER: ClassVar[int] + TCPI_REORDERING_FIELD_NUMBER: ClassVar[int] + tcpi_state: int + tcpi_ca_state: int + tcpi_retransmits: int + tcpi_probes: int + tcpi_backoff: int + tcpi_options: int + tcpi_snd_wscale: int + tcpi_rcv_wscale: int + tcpi_rto: int + tcpi_ato: int + tcpi_snd_mss: int + tcpi_rcv_mss: int + tcpi_unacked: int + tcpi_sacked: int + tcpi_lost: int + tcpi_retrans: int + tcpi_fackets: int + tcpi_last_data_sent: int + tcpi_last_ack_sent: int + tcpi_last_data_recv: int + tcpi_last_ack_recv: int + tcpi_pmtu: int + tcpi_rcv_ssthresh: int + tcpi_rtt: int + tcpi_rttvar: int + tcpi_snd_ssthresh: int + tcpi_snd_cwnd: int + tcpi_advmss: int + tcpi_reordering: int + def __init__(self, tcpi_state: int | None = ..., tcpi_ca_state: int | None = ..., tcpi_retransmits: int | None = ..., tcpi_probes: int | None = ..., tcpi_backoff: int | None = ..., tcpi_options: int | None = ..., tcpi_snd_wscale: int | None = ..., tcpi_rcv_wscale: int | None = ..., tcpi_rto: int | None = ..., tcpi_ato: int | None = ..., tcpi_snd_mss: int | None = ..., tcpi_rcv_mss: int | None = ..., tcpi_unacked: int | None = ..., tcpi_sacked: int | None = ..., tcpi_lost: int | None = ..., tcpi_retrans: int | None = ..., tcpi_fackets: int | None = ..., tcpi_last_data_sent: int | None = ..., tcpi_last_ack_sent: int | None = ..., tcpi_last_data_recv: int | None = ..., tcpi_last_ack_recv: int | None = ..., tcpi_pmtu: int | None = ..., tcpi_rcv_ssthresh: int | None = ..., tcpi_rtt: int | None = ..., tcpi_rttvar: int | None = ..., tcpi_snd_ssthresh: int | None = ..., tcpi_snd_cwnd: int | None = ..., tcpi_advmss: int | None = ..., tcpi_reordering: int | None = ...) -> None: ... + DESCRIPTOR: Descriptor + +@final +class GetTopChannelsRequest(message.Message, metaclass=MessageMeta): + START_CHANNEL_ID_FIELD_NUMBER: ClassVar[int] + MAX_RESULTS_FIELD_NUMBER: ClassVar[int] + start_channel_id: int + max_results: int + def __init__(self, start_channel_id: int | None = ..., max_results: int | None = ...) -> None: ... + DESCRIPTOR: Descriptor + +@final +class GetTopChannelsResponse(message.Message, metaclass=MessageMeta): + CHANNEL_FIELD_NUMBER: ClassVar[int] + END_FIELD_NUMBER: ClassVar[int] + channel: containers.RepeatedCompositeFieldContainer[Channel] + end: bool + def __init__(self, channel: Iterable[Channel | Mapping[Incomplete, Incomplete]] | None = ..., end: bool = ...) -> None: ... + DESCRIPTOR: Descriptor + +@final +class GetServersRequest(message.Message, metaclass=MessageMeta): + START_SERVER_ID_FIELD_NUMBER: ClassVar[int] + MAX_RESULTS_FIELD_NUMBER: ClassVar[int] + start_server_id: int + max_results: int + def __init__(self, start_server_id: int | None = ..., max_results: int | None = ...) -> None: ... + DESCRIPTOR: Descriptor + +@final +class GetServersResponse(message.Message, metaclass=MessageMeta): + SERVER_FIELD_NUMBER: ClassVar[int] + END_FIELD_NUMBER: ClassVar[int] + server: containers.RepeatedCompositeFieldContainer[Server] + end: bool + def __init__(self, server: Iterable[Server | Mapping[Incomplete, Incomplete]] | None = ..., end: bool = ...) -> None: ... + DESCRIPTOR: Descriptor + +@final +class GetServerRequest(message.Message, metaclass=MessageMeta): + SERVER_ID_FIELD_NUMBER: ClassVar[int] + server_id: int + def __init__(self, server_id: int | None = ...) -> None: ... + DESCRIPTOR: Descriptor + +@final +class GetServerResponse(message.Message, metaclass=MessageMeta): + SERVER_FIELD_NUMBER: ClassVar[int] + server: Server + def __init__(self, server: Server | Mapping[Incomplete, Incomplete] | None = ...) -> None: ... + DESCRIPTOR: Descriptor + +@final +class GetServerSocketsRequest(message.Message, metaclass=MessageMeta): + SERVER_ID_FIELD_NUMBER: ClassVar[int] + START_SOCKET_ID_FIELD_NUMBER: ClassVar[int] + MAX_RESULTS_FIELD_NUMBER: ClassVar[int] + server_id: int + start_socket_id: int + max_results: int + def __init__(self, server_id: int | None = ..., start_socket_id: int | None = ..., max_results: int | None = ...) -> None: ... + DESCRIPTOR: Descriptor + +@final +class GetServerSocketsResponse(message.Message, metaclass=MessageMeta): + SOCKET_REF_FIELD_NUMBER: ClassVar[int] + END_FIELD_NUMBER: ClassVar[int] + socket_ref: containers.RepeatedCompositeFieldContainer[SocketRef] + end: bool + def __init__(self, socket_ref: Iterable[SocketRef | Mapping[Incomplete, Incomplete]] | None = ..., end: bool = ...) -> None: ... + DESCRIPTOR: Descriptor + +@final +class GetChannelRequest(message.Message, metaclass=MessageMeta): + CHANNEL_ID_FIELD_NUMBER: ClassVar[int] + channel_id: int + def __init__(self, channel_id: int | None = ...) -> None: ... + DESCRIPTOR: Descriptor + +@final +class GetChannelResponse(message.Message, metaclass=MessageMeta): + CHANNEL_FIELD_NUMBER: ClassVar[int] + channel: Channel + def __init__(self, channel: Channel | Mapping[Incomplete, Incomplete] | None = ...) -> None: ... + DESCRIPTOR: Descriptor + +@final +class GetSubchannelRequest(message.Message, metaclass=MessageMeta): + SUBCHANNEL_ID_FIELD_NUMBER: ClassVar[int] + subchannel_id: int + def __init__(self, subchannel_id: int | None = ...) -> None: ... + DESCRIPTOR: Descriptor + +@final +class GetSubchannelResponse(message.Message, metaclass=MessageMeta): + SUBCHANNEL_FIELD_NUMBER: ClassVar[int] + subchannel: Subchannel + def __init__(self, subchannel: Subchannel | Mapping[Incomplete, Incomplete] | None = ...) -> None: ... + DESCRIPTOR: Descriptor + +@final +class GetSocketRequest(message.Message, metaclass=MessageMeta): + SOCKET_ID_FIELD_NUMBER: ClassVar[int] + SUMMARY_FIELD_NUMBER: ClassVar[int] + socket_id: int + summary: bool + def __init__(self, socket_id: int | None = ..., summary: bool = ...) -> None: ... + DESCRIPTOR: Descriptor + +@final +class GetSocketResponse(message.Message, metaclass=MessageMeta): + SOCKET_FIELD_NUMBER: ClassVar[int] + socket: Socket + def __init__(self, socket: Socket | Mapping[Incomplete, Incomplete] | None = ...) -> None: ... + DESCRIPTOR: Descriptor diff --git a/stubs/grpcio/grpc_channelz/v1/channelz_pb2_grpc.pyi b/stubs/grpcio/grpc_channelz/v1/channelz_pb2_grpc.pyi index 0b9716abcf75..bd6f78fecf4d 100644 --- a/stubs/grpcio/grpc_channelz/v1/channelz_pb2_grpc.pyi +++ b/stubs/grpcio/grpc_channelz/v1/channelz_pb2_grpc.pyi @@ -1,3 +1,121 @@ from _typeshed import Incomplete +from typing import Final -ChannelzServicer = Incomplete +import grpc + +GRPC_GENERATED_VERSION: Final[str] +GRPC_VERSION: Final[str] + +class ChannelzStub: + GetTopChannels: Incomplete + GetServers: Incomplete + GetServer: Incomplete + GetServerSockets: Incomplete + GetChannel: Incomplete + GetSubchannel: Incomplete + GetSocket: Incomplete + def __init__(self, channel: grpc.Channel): ... + +class ChannelzServicer: + def GetTopChannels(self, request, context): ... + def GetServers(self, request, context): ... + def GetServer(self, request, context): ... + def GetServerSockets(self, request, context): ... + def GetChannel(self, request, context): ... + def GetSubchannel(self, request, context): ... + def GetSocket(self, request, context): ... + +def add_ChannelzServicer_to_server(servicer, server) -> None: ... + +class Channelz: + @staticmethod + def GetTopChannels( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): ... + @staticmethod + def GetServers( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): ... + @staticmethod + def GetServer( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): ... + @staticmethod + def GetServerSockets( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): ... + @staticmethod + def GetChannel( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): ... + @staticmethod + def GetSubchannel( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): ... + @staticmethod + def GetSocket( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): ... diff --git a/stubs/grpcio/grpc_health/__init__.pyi b/stubs/grpcio/grpc_health/__init__.pyi index 0f6820f054ea..e69de29bb2d1 100644 --- a/stubs/grpcio/grpc_health/__init__.pyi +++ b/stubs/grpcio/grpc_health/__init__.pyi @@ -1,3 +0,0 @@ -from _typeshed import Incomplete - -def __getattr__(name: str) -> Incomplete: ... diff --git a/stubs/grpcio/grpc_health/v1/__init__.pyi b/stubs/grpcio/grpc_health/v1/__init__.pyi index 0f6820f054ea..e69de29bb2d1 100644 --- a/stubs/grpcio/grpc_health/v1/__init__.pyi +++ b/stubs/grpcio/grpc_health/v1/__init__.pyi @@ -1,3 +0,0 @@ -from _typeshed import Incomplete - -def __getattr__(name: str) -> Incomplete: ... diff --git a/stubs/grpcio/grpc_health/v1/health.pyi b/stubs/grpcio/grpc_health/v1/health.pyi index 7ea7dc5d0b50..1353f3f84ef0 100644 --- a/stubs/grpcio/grpc_health/v1/health.pyi +++ b/stubs/grpcio/grpc_health/v1/health.pyi @@ -1,34 +1,35 @@ +from _typeshed import Incomplete +from collections.abc import Callable from concurrent import futures -from typing import Any, Protocol +from typing import Final, overload +from typing_extensions import Self from grpc import ServicerContext -from grpc_health.v1 import health_pb2 as _health_pb2, health_pb2_grpc as _health_pb2_grpc +from grpc_health.v1 import health_pb2, health_pb2_grpc -SERVICE_NAME: str -OVERALL_HEALTH: str +SERVICE_NAME: Final[str] +OVERALL_HEALTH: Final[str] class _Watcher: def __init__(self) -> None: ... - def __iter__(self) -> _Watcher: ... - def next(self) -> _health_pb2.HealthCheckResponse: ... - def __next__(self) -> _health_pb2.HealthCheckResponse: ... - def add(self, response: _health_pb2.HealthCheckResponse) -> None: ... + def __iter__(self) -> Self: ... + def next(self) -> health_pb2.HealthCheckResponse: ... + def __next__(self) -> health_pb2.HealthCheckResponse: ... + def add(self, response: health_pb2.HealthCheckResponse) -> None: ... def close(self) -> None: ... -# FIXME: This needs further investigation -class _SendResponseCallback(Protocol): - def __call__(self, *args: Any, **kwargs: Any) -> Any: ... - -class HealthServicer(_health_pb2_grpc.HealthServicer): +class HealthServicer(health_pb2_grpc.HealthServicer): def __init__( - self, experimental_non_blocking: bool = ..., experimental_thread_pool: futures.ThreadPoolExecutor | None = ... + self, experimental_non_blocking: bool = True, experimental_thread_pool: futures.ThreadPoolExecutor | None = None ) -> None: ... - def Check(self, request: _health_pb2.HealthCheckRequest, context: ServicerContext) -> _health_pb2.HealthCheckResponse: ... + def Check(self, request: health_pb2.HealthCheckRequest, context: ServicerContext) -> health_pb2.HealthCheckResponse: ... + @overload + def Watch( + self, request: health_pb2.HealthCheckRequest, context: ServicerContext, send_response_callback: None = None + ) -> _Watcher: ... + @overload def Watch( - self, - request: _health_pb2.HealthCheckRequest, - context: ServicerContext, - send_response_callback: _SendResponseCallback | None = ..., - ) -> _health_pb2.HealthCheckResponse: ... - def set(self, service: str, status: _health_pb2.HealthCheckResponse.ServingStatus) -> None: ... + self, request: health_pb2.HealthCheckRequest, context: ServicerContext, send_response_callback: Callable[..., Incomplete] + ) -> None: ... + def set(self, service: str, status: health_pb2.HealthCheckResponse.ServingStatus) -> None: ... def enter_graceful_shutdown(self) -> None: ... diff --git a/stubs/grpcio/grpc_health/v1/health_pb2.pyi b/stubs/grpcio/grpc_health/v1/health_pb2.pyi index 0f6820f054ea..82191f79b659 100644 --- a/stubs/grpcio/grpc_health/v1/health_pb2.pyi +++ b/stubs/grpcio/grpc_health/v1/health_pb2.pyi @@ -1,3 +1,26 @@ from _typeshed import Incomplete +from typing import ClassVar, final -def __getattr__(name: str) -> Incomplete: ... +from google._upb._message import Descriptor, FileDescriptor, MessageMeta +from google.protobuf import message + +DESCRIPTOR: FileDescriptor + +@final +class HealthCheckRequest(message.Message, metaclass=MessageMeta): + SERVICE_FIELD_NUMBER: ClassVar[int] + service: str + def __init__(self, service: str | None = ...) -> None: ... + DESCRIPTOR: Descriptor + +@final +class HealthCheckResponse(message.Message, metaclass=MessageMeta): + ServingStatus: Incomplete + UNKNOWN: Incomplete + SERVING: Incomplete + NOT_SERVING: Incomplete + SERVICE_UNKNOWN: Incomplete + STATUS_FIELD_NUMBER: ClassVar[int] + status: Incomplete + def __init__(self, status: Incomplete | str | None = ...) -> None: ... + DESCRIPTOR: Descriptor diff --git a/stubs/grpcio/grpc_health/v1/health_pb2_grpc.pyi b/stubs/grpcio/grpc_health/v1/health_pb2_grpc.pyi index 8a29ae2fab5b..8170664f0cb1 100644 --- a/stubs/grpcio/grpc_health/v1/health_pb2_grpc.pyi +++ b/stubs/grpcio/grpc_health/v1/health_pb2_grpc.pyi @@ -1,6 +1,41 @@ -from _typeshed import Incomplete +from typing import Final -def __getattr__(name: str) -> Incomplete: ... +GRPC_GENERATED_VERSION: Final[str] +GRPC_VERSION: Final[str] -# FIXME: Incomplete -class HealthServicer: ... +class HealthStub: + def __init__(self, channel) -> None: ... + +class HealthServicer: + def Check(self, request, context): ... + def Watch(self, request, context): ... + +def add_HealthServicer_to_server(servicer, server) -> None: ... + +class Health: + @staticmethod + def Check( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): ... + @staticmethod + def Watch( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): ... diff --git a/stubs/grpcio/grpc_reflection/__init__.pyi b/stubs/grpcio/grpc_reflection/__init__.pyi index 0f6820f054ea..e69de29bb2d1 100644 --- a/stubs/grpcio/grpc_reflection/__init__.pyi +++ b/stubs/grpcio/grpc_reflection/__init__.pyi @@ -1,3 +0,0 @@ -from _typeshed import Incomplete - -def __getattr__(name: str) -> Incomplete: ... diff --git a/stubs/grpcio/grpc_reflection/v1alpha/__init__.pyi b/stubs/grpcio/grpc_reflection/v1alpha/__init__.pyi index 0f6820f054ea..e69de29bb2d1 100644 --- a/stubs/grpcio/grpc_reflection/v1alpha/__init__.pyi +++ b/stubs/grpcio/grpc_reflection/v1alpha/__init__.pyi @@ -1,3 +0,0 @@ -from _typeshed import Incomplete - -def __getattr__(name: str) -> Incomplete: ... diff --git a/stubs/grpcio/grpc_reflection/v1alpha/_async.pyi b/stubs/grpcio/grpc_reflection/v1alpha/_async.pyi new file mode 100644 index 000000000000..39fa94fe1d91 --- /dev/null +++ b/stubs/grpcio/grpc_reflection/v1alpha/_async.pyi @@ -0,0 +1,11 @@ +from collections.abc import AsyncIterable + +from grpc_reflection.v1alpha import reflection_pb2 +from grpc_reflection.v1alpha._base import BaseReflectionServicer + +class ReflectionServicer(BaseReflectionServicer): + async def ServerReflectionInfo( + self, request_iterator: AsyncIterable[reflection_pb2.ServerReflectionRequest], unused_context + ) -> AsyncIterable[reflection_pb2.ServerReflectionResponse]: ... + +__all__ = ["ReflectionServicer"] diff --git a/stubs/grpcio/grpc_reflection/v1alpha/_base.pyi b/stubs/grpcio/grpc_reflection/v1alpha/_base.pyi index 00704b5062ef..e808f4a9147a 100644 --- a/stubs/grpcio/grpc_reflection/v1alpha/_base.pyi +++ b/stubs/grpcio/grpc_reflection/v1alpha/_base.pyi @@ -1,6 +1,6 @@ -from _typeshed import Incomplete +from grpc_reflection.v1alpha import reflection_pb2_grpc -def __getattr__(name: str) -> Incomplete: ... +class BaseReflectionServicer(reflection_pb2_grpc.ServerReflectionServicer): + def __init__(self, service_names, pool=None) -> None: ... -# FIXME: Incomplete -class BaseReflectionServicer: ... +__all__ = ["BaseReflectionServicer"] diff --git a/stubs/grpcio/grpc_reflection/v1alpha/proto_reflection_descriptor_database.pyi b/stubs/grpcio/grpc_reflection/v1alpha/proto_reflection_descriptor_database.pyi new file mode 100644 index 000000000000..e41b1edb0bab --- /dev/null +++ b/stubs/grpcio/grpc_reflection/v1alpha/proto_reflection_descriptor_database.pyi @@ -0,0 +1,11 @@ +import grpc +from google.protobuf.descriptor_database import DescriptorDatabase +from google.protobuf.descriptor_pb2 import FileDescriptorProto + +class ProtoReflectionDescriptorDatabase(DescriptorDatabase): + def __init__(self, channel: grpc.Channel) -> None: ... + def get_services(self) -> list[str]: ... + def FindFileByName(self, name: str) -> FileDescriptorProto: ... + def FindFileContainingSymbol(self, symbol: str) -> FileDescriptorProto: ... + def FindAllExtensionNumbers(self, extendee_name: str) -> list[int]: ... + def FindFileContainingExtension(self, extendee_name: str, extension_number: int) -> FileDescriptorProto: ... diff --git a/stubs/grpcio/grpc_reflection/v1alpha/reflection.pyi b/stubs/grpcio/grpc_reflection/v1alpha/reflection.pyi index cf75c363f198..8b336b9bee3c 100644 --- a/stubs/grpcio/grpc_reflection/v1alpha/reflection.pyi +++ b/stubs/grpcio/grpc_reflection/v1alpha/reflection.pyi @@ -1,23 +1,28 @@ -import typing_extensions from _typeshed import Incomplete from collections.abc import Iterable +from typing import Final +from typing_extensions import TypeAlias import grpc +import grpc.aio from google.protobuf import descriptor_pool -from grpc import aio from grpc_reflection.v1alpha import reflection_pb2 as _reflection_pb2 from grpc_reflection.v1alpha._base import BaseReflectionServicer -SERVICE_NAME: str +from . import _async as aio -_AnyServer: typing_extensions.TypeAlias = grpc.Server | aio.Server -_AnyServicerContext: typing_extensions.TypeAlias = grpc.ServicerContext | aio.ServicerContext[Incomplete, Incomplete] +SERVICE_NAME: Final[str] + +_AnyServer: TypeAlias = grpc.Server | grpc.aio.Server +_AnyServicerContext: TypeAlias = grpc.ServicerContext | grpc.aio.ServicerContext[Incomplete, Incomplete] class ReflectionServicer(BaseReflectionServicer): def ServerReflectionInfo( self, request_iterator: Iterable[_reflection_pb2.ServerReflectionRequest], context: _AnyServicerContext - ) -> None: ... + ): ... def enable_server_reflection( - service_names: Iterable[str], server: _AnyServer, pool: descriptor_pool.DescriptorPool | None = ... + service_names: Iterable[str], server: _AnyServer, pool: descriptor_pool.DescriptorPool | None = None ) -> None: ... + +__all__ = ["SERVICE_NAME", "ReflectionServicer", "enable_server_reflection", "aio"] diff --git a/stubs/grpcio/grpc_reflection/v1alpha/reflection_pb2.pyi b/stubs/grpcio/grpc_reflection/v1alpha/reflection_pb2.pyi index 0f6820f054ea..35eff35cdb30 100644 --- a/stubs/grpcio/grpc_reflection/v1alpha/reflection_pb2.pyi +++ b/stubs/grpcio/grpc_reflection/v1alpha/reflection_pb2.pyi @@ -1,3 +1,90 @@ from _typeshed import Incomplete +from collections.abc import Iterable, Mapping +from typing import ClassVar, final -def __getattr__(name: str) -> Incomplete: ... +from google._upb._message import Descriptor, FileDescriptor, MessageMeta +from google.protobuf import message +from google.protobuf.internal import containers + +DESCRIPTOR: FileDescriptor + +@final +class ServerReflectionRequest(message.Message, metaclass=MessageMeta): + HOST_FIELD_NUMBER: ClassVar[int] + FILE_BY_FILENAME_FIELD_NUMBER: ClassVar[int] + FILE_CONTAINING_SYMBOL_FIELD_NUMBER: ClassVar[int] + FILE_CONTAINING_EXTENSION_FIELD_NUMBER: ClassVar[int] + ALL_EXTENSION_NUMBERS_OF_TYPE_FIELD_NUMBER: ClassVar[int] + LIST_SERVICES_FIELD_NUMBER: ClassVar[int] + host: str + file_by_filename: str + file_containing_symbol: str + file_containing_extension: ExtensionRequest + all_extension_numbers_of_type: str + list_services: str + def __init__(self, host: str | None = ..., file_by_filename: str | None = ..., file_containing_symbol: str | None = ..., file_containing_extension: ExtensionRequest | Mapping[Incomplete, Incomplete] | None = ..., all_extension_numbers_of_type: str | None = ..., list_services: str | None = ...) -> None: ... + DESCRIPTOR: Descriptor +@final +class ExtensionRequest(message.Message, metaclass=MessageMeta): + CONTAINING_TYPE_FIELD_NUMBER: ClassVar[int] + EXTENSION_NUMBER_FIELD_NUMBER: ClassVar[int] + containing_type: str + extension_number: int + def __init__(self, containing_type: str | None = ..., extension_number: int | None = ...) -> None: ... + DESCRIPTOR: Descriptor + +@final +class ServerReflectionResponse(message.Message, metaclass=MessageMeta): + VALID_HOST_FIELD_NUMBER: ClassVar[int] + ORIGINAL_REQUEST_FIELD_NUMBER: ClassVar[int] + FILE_DESCRIPTOR_RESPONSE_FIELD_NUMBER: ClassVar[int] + ALL_EXTENSION_NUMBERS_RESPONSE_FIELD_NUMBER: ClassVar[int] + LIST_SERVICES_RESPONSE_FIELD_NUMBER: ClassVar[int] + ERROR_RESPONSE_FIELD_NUMBER: ClassVar[int] + valid_host: str + original_request: ServerReflectionRequest + file_descriptor_response: FileDescriptorResponse + all_extension_numbers_response: ExtensionNumberResponse + list_services_response: ListServiceResponse + error_response: ErrorResponse + def __init__(self, valid_host: str | None = ..., original_request: ServerReflectionRequest | Mapping[Incomplete, Incomplete] | None = ..., file_descriptor_response: FileDescriptorResponse | Mapping[Incomplete, Incomplete] | None = ..., all_extension_numbers_response: ExtensionNumberResponse | Mapping[Incomplete, Incomplete] | None = ..., list_services_response: ListServiceResponse | Mapping[Incomplete, Incomplete] | None = ..., error_response: ErrorResponse | Mapping[Incomplete, Incomplete] | None = ...) -> None: ... + DESCRIPTOR: Descriptor + +@final +class FileDescriptorResponse(message.Message, metaclass=MessageMeta): + FILE_DESCRIPTOR_PROTO_FIELD_NUMBER: ClassVar[int] + file_descriptor_proto: containers.RepeatedScalarFieldContainer[bytes] + def __init__(self, file_descriptor_proto: Iterable[bytes] | None = ...) -> None: ... + DESCRIPTOR: Descriptor + +@final +class ExtensionNumberResponse(message.Message, metaclass=MessageMeta): + BASE_TYPE_NAME_FIELD_NUMBER: ClassVar[int] + EXTENSION_NUMBER_FIELD_NUMBER: ClassVar[int] + base_type_name: str + extension_number: containers.RepeatedScalarFieldContainer[int] + def __init__(self, base_type_name: str | None = ..., extension_number: Iterable[int] | None = ...) -> None: ... + DESCRIPTOR: Descriptor + +@final +class ListServiceResponse(message.Message, metaclass=MessageMeta): + SERVICE_FIELD_NUMBER: ClassVar[int] + service: containers.RepeatedCompositeFieldContainer[ServiceResponse] + def __init__(self, service: Iterable[ServiceResponse | Mapping[Incomplete, Incomplete]] | None = ...) -> None: ... + DESCRIPTOR: Descriptor + +@final +class ServiceResponse(message.Message, metaclass=MessageMeta): + NAME_FIELD_NUMBER: ClassVar[int] + name: str + def __init__(self, name: str | None = ...) -> None: ... + DESCRIPTOR: Descriptor + +@final +class ErrorResponse(message.Message, metaclass=MessageMeta): + ERROR_CODE_FIELD_NUMBER: ClassVar[int] + ERROR_MESSAGE_FIELD_NUMBER: ClassVar[int] + error_code: int + error_message: str + def __init__(self, error_code: int | None = ..., error_message: str | None = ...) -> None: ... + DESCRIPTOR: Descriptor diff --git a/stubs/grpcio/grpc_reflection/v1alpha/reflection_pb2_grpc.pyi b/stubs/grpcio/grpc_reflection/v1alpha/reflection_pb2_grpc.pyi new file mode 100644 index 000000000000..31b983b4f82c --- /dev/null +++ b/stubs/grpcio/grpc_reflection/v1alpha/reflection_pb2_grpc.pyi @@ -0,0 +1,31 @@ +from binascii import Incomplete +from typing import Final + +import grpc + +GRPC_GENERATED_VERSION: Final[str] +GRPC_VERSION: Final[str] + +class ServerReflectionStub: + ServerReflectionInfo: Incomplete + def __init__(self, channel: grpc.Channel) -> None: ... + +class ServerReflectionServicer: + def ServerReflectionInfo(self, request_iterator, context): ... + +def add_ServerReflectionServicer_to_server(servicer, server): ... + +class ServerReflection: + @staticmethod + def ServerReflectionInfo( + request_iterator, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): ... diff --git a/stubs/grpcio/grpc_status/__init__.pyi b/stubs/grpcio/grpc_status/__init__.pyi index 0f6820f054ea..e69de29bb2d1 100644 --- a/stubs/grpcio/grpc_status/__init__.pyi +++ b/stubs/grpcio/grpc_status/__init__.pyi @@ -1,3 +0,0 @@ -from _typeshed import Incomplete - -def __getattr__(name: str) -> Incomplete: ... diff --git a/stubs/grpcio/grpc_status/_async.pyi b/stubs/grpcio/grpc_status/_async.pyi new file mode 100644 index 000000000000..a636b3f59f20 --- /dev/null +++ b/stubs/grpcio/grpc_status/_async.pyi @@ -0,0 +1,5 @@ +from _typeshed import Incomplete + +async def from_call(call) -> Incomplete | None: ... + +__all__ = ["from_call"] diff --git a/stubs/grpcio/grpc_status/rpc_status.pyi b/stubs/grpcio/grpc_status/rpc_status.pyi index 090716d5cfe9..e2e0204167f5 100644 --- a/stubs/grpcio/grpc_status/rpc_status.pyi +++ b/stubs/grpcio/grpc_status/rpc_status.pyi @@ -1,7 +1,11 @@ import grpc +from . import _async as aio + # Returns a google.rpc.status.Status message corresponding to a given grpc.Call. def from_call(call: grpc.Call): ... # Convert a google.rpc.status.Status message to grpc.Status. def to_status(status) -> grpc.Status: ... + +__all__ = ["from_call", "to_status", "aio"] diff --git a/stubs/protobuf/google/_upb/_message.pyi b/stubs/protobuf/google/_upb/_message.pyi new file mode 100644 index 000000000000..bb188bcbf4f1 --- /dev/null +++ b/stubs/protobuf/google/_upb/_message.pyi @@ -0,0 +1,310 @@ +from _typeshed import Incomplete +from typing import ClassVar, final + +default_pool: DescriptorPool + +@final +class Arena: ... + +@final +class Descriptor: + containing_type: Incomplete + enum_types: Incomplete + enum_types_by_name: Incomplete + enum_values_by_name: Incomplete + extension_ranges: Incomplete + extensions: Incomplete + extensions_by_name: Incomplete + fields: Incomplete + fields_by_camelcase_name: Incomplete + fields_by_name: Incomplete + fields_by_number: Incomplete + file: Incomplete + full_name: Incomplete + has_options: Incomplete + is_extendable: Incomplete + name: Incomplete + nested_types: Incomplete + nested_types_by_name: Incomplete + oneofs: Incomplete + oneofs_by_name: Incomplete + def __init__(self, *args, **kwargs) -> None: ... # incomplete + def CopyToProto(self, object, /): ... + def EnumValueName(self, *args, **kwargs): ... # incomplete + def GetOptions(self): ... + +@final +class DescriptorPool: + def __init__(self, *args, **kwargs) -> None: ... # incomplete + def Add(self, object, /): ... + def AddSerializedFile(self, object, /): ... + def FindAllExtensions(self, object, /): ... + def FindEnumTypeByName(self, object, /): ... + def FindExtensionByName(self, object, /): ... + def FindExtensionByNumber(self, *args, **kwargs): ... # incomplete + def FindFieldByName(self, object, /): ... + def FindFileByName(self, object, /): ... + def FindFileContainingSymbol(self, object, /): ... + def FindMessageTypeByName(self, object, /): ... + def FindMethodByName(self, object, /): ... + def FindOneofByName(self, object, /): ... + def FindServiceByName(self, object, /): ... + def SetFeatureSetDefaults(self, object, /): ... + +@final +class EnumDescriptor: + containing_type: Incomplete + file: Incomplete + full_name: Incomplete + has_options: Incomplete + is_closed: Incomplete + name: Incomplete + values: Incomplete + values_by_name: Incomplete + values_by_number: Incomplete + def __init__(self, *args, **kwargs) -> None: ... # incomplete + def CopyToProto(self, object, /): ... + def GetOptions(self): ... + +@final +class EnumValueDescriptor: + has_options: Incomplete + index: Incomplete + name: Incomplete + number: Incomplete + type: Incomplete + def __init__(self, *args, **kwargs) -> None: ... # incomplete + def GetOptions(self): ... + +@final +class ExtensionDict: + def __contains__(self, other) -> bool: ... + def __delitem__(self, other) -> None: ... + def __eq__(self, other: object) -> bool: ... + def __ge__(self, other: object) -> bool: ... + def __getitem__(self, index): ... + def __gt__(self, other: object) -> bool: ... + def __iter__(self): ... + def __le__(self, other: object) -> bool: ... + def __len__(self) -> int: ... + def __lt__(self, other: object) -> bool: ... + def __ne__(self, other: object) -> bool: ... + def __setitem__(self, index, object) -> None: ... + +@final +class ExtensionIterator: + def __iter__(self): ... + def __next__(self): ... + +@final +class FieldDescriptor: + CPPTYPE_BOOL: ClassVar[int] = ... + CPPTYPE_BYTES: ClassVar[int] = ... + CPPTYPE_DOUBLE: ClassVar[int] = ... + CPPTYPE_ENUM: ClassVar[int] = ... + CPPTYPE_FLOAT: ClassVar[int] = ... + CPPTYPE_INT32: ClassVar[int] = ... + CPPTYPE_INT64: ClassVar[int] = ... + CPPTYPE_MESSAGE: ClassVar[int] = ... + CPPTYPE_STRING: ClassVar[int] = ... + CPPTYPE_UINT32: ClassVar[int] = ... + CPPTYPE_UINT64: ClassVar[int] = ... + LABEL_OPTIONAL: ClassVar[int] = ... + LABEL_REPEATED: ClassVar[int] = ... + LABEL_REQUIRED: ClassVar[int] = ... + TYPE_BOOL: ClassVar[int] = ... + TYPE_BYTES: ClassVar[int] = ... + TYPE_DOUBLE: ClassVar[int] = ... + TYPE_ENUM: ClassVar[int] = ... + TYPE_FIXED32: ClassVar[int] = ... + TYPE_FIXED64: ClassVar[int] = ... + TYPE_FLOAT: ClassVar[int] = ... + TYPE_GROUP: ClassVar[int] = ... + TYPE_INT32: ClassVar[int] = ... + TYPE_INT64: ClassVar[int] = ... + TYPE_MESSAGE: ClassVar[int] = ... + TYPE_SFIXED32: ClassVar[int] = ... + TYPE_SFIXED64: ClassVar[int] = ... + TYPE_SINT32: ClassVar[int] = ... + TYPE_SINT64: ClassVar[int] = ... + TYPE_STRING: ClassVar[int] = ... + TYPE_UINT32: ClassVar[int] = ... + TYPE_UINT64: ClassVar[int] = ... + camelcase_name: Incomplete + containing_oneof: Incomplete + containing_type: Incomplete + cpp_type: Incomplete + default_value: Incomplete + enum_type: Incomplete + extension_scope: Incomplete + file: Incomplete + full_name: Incomplete + has_default_value: Incomplete + has_options: Incomplete + has_presence: Incomplete + index: Incomplete + is_extension: Incomplete + is_packed: Incomplete + json_name: Incomplete + label: Incomplete + message_type: Incomplete + name: Incomplete + number: Incomplete + type: Incomplete + def __init__(self, *args, **kwargs) -> None: ... # incomplete + def GetOptions(self): ... + +@final +class FileDescriptor: + dependencies: Incomplete + enum_types_by_name: Incomplete + extensions_by_name: Incomplete + has_options: Incomplete + message_types_by_name: Incomplete + name: Incomplete + package: Incomplete + pool: Incomplete + public_dependencies: Incomplete + serialized_pb: Incomplete + services_by_name: Incomplete + def __init__(self, *args, **kwargs) -> None: ... # incomplete + def CopyToProto(self, object, /): ... + def GetOptions(self): ... + +@final +class MapIterator: + def __iter__(self): ... + def __next__(self): ... + +@final +class Message: + Extensions: Incomplete + def __init__(self, *args, **kwargs) -> None: ... # incomplete # incomplete + def ByteSize(self): ... + def Clear(self): ... + def ClearExtension(self, object, /): ... + def ClearField(self, object, /): ... + def CopyFrom(self, object, /): ... + def DiscardUnknownFields(self): ... + def FindInitializationErrors(self): ... + @classmethod + def FromString(cls, object, /): ... + def HasExtension(self, object, /): ... + def HasField(self, object, /): ... + def IsInitialized(self, *args, **kwargs): ... # incomplete + def ListFields(self): ... + def MergeFrom(self, object, /): ... + def MergeFromString(self, object, /): ... + def ParseFromString(self, object, /): ... + def SerializePartialToString(self, *args, **kwargs): ... # incomplete + def SerializeToString(self, *args, **kwargs): ... # incomplete + def SetInParent(self): ... + def UnknownFields(self): ... + def WhichOneof(self, object, /): ... + def __contains__(self, other) -> bool: ... + def __deepcopy__(self, memo=None): ... + def __delattr__(self, name): ... + def __eq__(self, other: object) -> bool: ... + def __ge__(self, other: object) -> bool: ... + def __gt__(self, other: object) -> bool: ... + def __le__(self, other: object) -> bool: ... + def __lt__(self, other: object) -> bool: ... + def __ne__(self, other: object) -> bool: ... + def __setattr__(self, name, value): ... + +@final +class MessageMeta(type): ... + +@final +class MethodDescriptor: + client_streaming: Incomplete + containing_service: Incomplete + full_name: Incomplete + index: Incomplete + input_type: Incomplete + name: Incomplete + output_type: Incomplete + server_streaming: Incomplete + def __init__(self, *args, **kwargs) -> None: ... # incomplete + def CopyToProto(self, object, /): ... + def GetOptions(self): ... + +@final +class OneofDescriptor: + containing_type: Incomplete + fields: Incomplete + full_name: Incomplete + has_options: Incomplete + index: Incomplete + name: Incomplete + def __init__(self, *args, **kwargs) -> None: ... # incomplete + def GetOptions(self): ... + +@final +class RepeatedCompositeContainer: + def __init__(self, *args, **kwargs) -> None: ... # incomplete + def MergeFrom(self, object, /): ... + def add(self, *args, **kwargs): ... # incomplete + def append(self, object, /): ... + def extend(self, object, /): ... + def insert(self, *args, **kwargs): ... # incomplete + def pop(self, *args, **kwargs): ... # incomplete + def remove(self, object, /): ... + def reverse(self): ... + def sort(self, *args, **kwargs): ... # incomplete + def __deepcopy__(self, memo=None): ... + def __delitem__(self, other) -> None: ... + def __eq__(self, other: object) -> bool: ... + def __ge__(self, other: object) -> bool: ... + def __getitem__(self, index): ... + def __gt__(self, other: object) -> bool: ... + def __le__(self, other: object) -> bool: ... + def __len__(self) -> int: ... + def __lt__(self, other: object) -> bool: ... + def __ne__(self, other: object) -> bool: ... + def __setitem__(self, index, object) -> None: ... + +@final +class RepeatedScalarContainer: + def __init__(self, *args, **kwargs) -> None: ... # incomplete + def MergeFrom(self, object, /): ... + def append(self, object, /): ... + def extend(self, object, /): ... + def insert(self, *args, **kwargs): ... # incomplete + def pop(self, *args, **kwargs): ... # incomplete + def remove(self, object, /): ... + def reverse(self): ... + def sort(self, *args, **kwargs): ... # incomplete + def __deepcopy__(self, memo=None): ... + def __delitem__(self, other) -> None: ... + def __eq__(self, other: object) -> bool: ... + def __ge__(self, other: object) -> bool: ... + def __getitem__(self, index): ... + def __gt__(self, other: object) -> bool: ... + def __le__(self, other: object) -> bool: ... + def __len__(self) -> int: ... + def __lt__(self, other: object) -> bool: ... + def __ne__(self, other: object) -> bool: ... + def __reduce__(self): ... + def __setitem__(self, index, object) -> None: ... + +@final +class ServiceDescriptor: + file: Incomplete + full_name: Incomplete + index: Incomplete + methods: Incomplete + methods_by_name: Incomplete + name: Incomplete + def __init__(self, *args, **kwargs) -> None: ... # incomplete + def CopyToProto(self, object, /): ... + def FindMethodByName(self, object, /): ... + def GetOptions(self): ... + +@final +class UnknownFieldSet: + def __init__(self, *args, **kwargs) -> None: ... # incomplete + def __getitem__(self, index): ... + def __len__(self) -> int: ... + +def SetAllowOversizeProtos(object, /): ... # incomplete diff --git a/stubs/protobuf/google/protobuf/descriptor_database.pyi b/stubs/protobuf/google/protobuf/descriptor_database.pyi new file mode 100644 index 000000000000..6568fc2159cc --- /dev/null +++ b/stubs/protobuf/google/protobuf/descriptor_database.pyi @@ -0,0 +1,16 @@ +from typing import Final + +from google.protobuf.descriptor_pb2 import FileDescriptorProto + +__author__: Final[str] + +class Error(Exception): ... +class DescriptorDatabaseConflictingDefinitionError(Error): ... + +class DescriptorDatabase: + def __init__(self) -> None: ... + def Add(self, file_desc_proto) -> None: ... + def FindFileByName(self, name): ... + def FindFileContainingSymbol(self, symbol): ... + def FindFileContainingExtension(self, extendee_name, extension_number) -> FileDescriptorProto | None: ... + def FindAllExtensionNumbers(self, extendee_name) -> list[int]: ... diff --git a/stubs/protobuf/google/protobuf/message.pyi b/stubs/protobuf/google/protobuf/message.pyi index a63e610ffca5..ea1d636ee261 100644 --- a/stubs/protobuf/google/protobuf/message.pyi +++ b/stubs/protobuf/google/protobuf/message.pyi @@ -2,6 +2,8 @@ from collections.abc import Sequence from typing import Any from typing_extensions import Self +from google._upb._message import Descriptor as _upb_Descriptor + from .descriptor import Descriptor, FieldDescriptor from .internal.extension_dict import _ExtensionDict, _ExtensionFieldDescriptor @@ -10,7 +12,7 @@ class DecodeError(Error): ... class EncodeError(Error): ... class Message: - DESCRIPTOR: Descriptor + DESCRIPTOR: Descriptor | _upb_Descriptor def __deepcopy__(self, memo: Any = None) -> Self: ... def __eq__(self, other_msg): ... def __ne__(self, other_msg): ... From 00456d63c928358b69cfc7e63cd9af91a3f9fc34 Mon Sep 17 00:00:00 2001 From: Avasam Date: Mon, 5 May 2025 09:22:59 -0400 Subject: [PATCH 285/388] Run Black on *_pb2 files (#13930) --- pyproject.toml | 5 - stubs/protobuf/google/protobuf/any_pb2.pyi | 7 +- stubs/protobuf/google/protobuf/api_pb2.pyi | 47 +- .../google/protobuf/compiler/plugin_pb2.pyi | 102 +- .../google/protobuf/descriptor_pb2.pyi | 874 ++++++++++-- .../protobuf/google/protobuf/duration_pb2.pyi | 7 +- stubs/protobuf/google/protobuf/empty_pb2.pyi | 4 +- .../google/protobuf/field_mask_pb2.pyi | 6 +- .../google/protobuf/source_context_pb2.pyi | 6 +- stubs/protobuf/google/protobuf/struct_pb2.pyi | 63 +- .../google/protobuf/timestamp_pb2.pyi | 7 +- stubs/protobuf/google/protobuf/type_pb2.pyi | 75 +- .../protobuf/google/protobuf/wrappers_pb2.pyi | 54 +- .../s2clientprotocol/common_pb2.pyi | 61 +- .../s2clientprotocol/data_pb2.pyi | 263 +++- .../s2clientprotocol/debug_pb2.pyi | 147 +- .../s2clientprotocol/error_pb2.pyi | 4 +- .../s2clientprotocol/query_pb2.pyi | 88 +- .../s2clientprotocol/raw_pb2.pyi | 445 +++++- .../s2clientprotocol/sc2api_pb2.pyi | 1237 ++++++++++++++--- .../s2clientprotocol/score_pb2.pyi | 162 ++- .../s2clientprotocol/spatial_pb2.pyi | 284 +++- .../s2clientprotocol/ui_pb2.pyi | 291 +++- .../compiler/xla/service/hlo_pb2.pyi | 669 +++++++-- .../service/hlo_profile_printer_data_pb2.pyi | 60 +- .../compiler/xla/service/metrics_pb2.pyi | 107 +- .../test_compilation_environment_pb2.pyi | 18 +- .../xla/service/xla_compile_result_pb2.pyi | 50 +- .../xla/tsl/protobuf/bfc_memory_map_pb2.pyi | 79 +- .../xla/tsl/protobuf/test_log_pb2.pyi | 223 ++- .../tensorflow/compiler/xla/xla_data_pb2.pyi | 602 ++++++-- .../tensorflow/compiler/xla/xla_pb2.pyi | 769 ++++++++-- .../example_parser_configuration_pb2.pyi | 57 +- .../tensorflow/core/example/example_pb2.pyi | 6 +- .../tensorflow/core/example/feature_pb2.pyi | 68 +- .../framework/allocation_description_pb2.pyi | 18 +- .../tensorflow/core/framework/api_def_pb2.pyi | 66 +- .../core/framework/attr_value_pb2.pyi | 134 +- .../core/framework/cost_graph_pb2.pyi | 73 +- .../framework/cpp_shape_inference_pb2.pyi | 17 +- .../core/framework/dataset_metadata_pb2.pyi | 6 +- .../core/framework/dataset_options_pb2.pyi | 424 +++++- .../tensorflow/core/framework/dataset_pb2.pyi | 25 +- .../core/framework/device_attributes_pb2.pyi | 46 +- .../core/framework/full_type_pb2.pyi | 8 +- .../core/framework/function_pb2.pyi | 103 +- .../core/framework/graph_debug_info_pb2.pyi | 68 +- .../tensorflow/core/framework/graph_pb2.pyi | 17 +- .../framework/graph_transfer_info_pb2.pyi | 89 +- .../core/framework/kernel_def_pb2.pyi | 28 +- .../core/framework/log_memory_pb2.pyi | 59 +- .../tensorflow/core/framework/model_pb2.pyi | 105 +- .../core/framework/node_def_pb2.pyi | 43 +- .../tensorflow/core/framework/op_def_pb2.pyi | 103 +- .../optimized_function_graph_pb2.pyi | 60 +- .../core/framework/reader_base_pb2.pyi | 14 +- .../core/framework/resource_handle_pb2.pyi | 22 +- .../core/framework/step_stats_pb2.pyi | 118 +- .../tensorflow/core/framework/summary_pb2.pyi | 115 +- .../core/framework/tensor_description_pb2.pyi | 9 +- .../tensorflow/core/framework/tensor_pb2.pyi | 56 +- .../core/framework/tensor_shape_pb2.pyi | 7 +- .../core/framework/tensor_slice_pb2.pyi | 17 +- .../tensorflow/core/framework/types_pb2.pyi | 6 +- .../core/framework/variable_pb2.pyi | 39 +- .../core/framework/versions_pb2.pyi | 5 +- .../tensorflow/core/protobuf/cluster_pb2.pyi | 18 +- .../protobuf/composite_tensor_variant_pb2.pyi | 6 +- .../tensorflow/core/protobuf/config_pb2.pyi | 449 +++++- .../core/protobuf/control_flow_pb2.pyi | 76 +- .../protobuf/core_platform_payloads_pb2.pyi | 10 +- .../core/protobuf/data_service_pb2.pyi | 52 +- .../core/protobuf/debug_event_pb2.pyi | 192 ++- .../tensorflow/core/protobuf/debug_pb2.pyi | 45 +- .../core/protobuf/device_filters_pb2.pyi | 19 +- .../core/protobuf/device_properties_pb2.pyi | 46 +- .../core/protobuf/fingerprint_pb2.pyi | 18 +- .../core/protobuf/meta_graph_pb2.pyi | 238 ++-- .../core/protobuf/named_tensor_pb2.pyi | 5 +- .../core/protobuf/queue_runner_pb2.pyi | 24 +- .../protobuf/remote_tensor_handle_pb2.pyi | 22 +- .../core/protobuf/rewriter_config_pb2.pyi | 136 +- .../core/protobuf/saved_model_pb2.pyi | 11 +- .../core/protobuf/saved_object_graph_pb2.pyi | 249 +++- .../tensorflow/core/protobuf/saver_pb2.pyi | 24 +- .../core/protobuf/service_config_pb2.pyi | 62 +- .../tensorflow/core/protobuf/snapshot_pb2.pyi | 51 +- .../tensorflow/core/protobuf/struct_pb2.pyi | 174 ++- .../core/protobuf/tensor_bundle_pb2.pyi | 34 +- .../core/protobuf/tensorflow_server_pb2.pyi | 34 +- .../protobuf/tpu/compilation_result_pb2.pyi | 22 +- .../core/protobuf/tpu/dynamic_padding_pb2.pyi | 7 +- .../tpu/optimization_parameters_pb2.pyi | 396 ++++-- .../core/protobuf/tpu/topology_pb2.pyi | 27 +- .../tpu/tpu_embedding_configuration_pb2.pyi | 84 +- .../protobuf/trackable_object_graph_pb2.pyi | 83 +- .../core/protobuf/transport_options_pb2.pyi | 6 +- .../core/protobuf/verifier_config_pb2.pyi | 11 +- .../tensorflow/core/util/event_pb2.pyi | 126 +- .../core/util/memmapped_file_system_pb2.pyi | 14 +- .../core/util/saved_tensor_slice_pb2.pyi | 17 +- .../keras/protobuf/projector_config_pb2.pyi | 39 +- .../keras/protobuf/saved_metadata_pb2.pyi | 22 +- .../python/keras/protobuf/versions_pb2.pyi | 5 +- .../tsl/protobuf/coordination_config_pb2.pyi | 41 +- .../tsl/protobuf/coordination_service_pb2.pyi | 245 ++-- .../distributed_runtime_payloads_pb2.pyi | 21 +- .../tensorflow/tsl/protobuf/dnn_pb2.pyi | 150 +- .../tensorflow/tsl/protobuf/histogram_pb2.pyi | 20 +- .../tsl/protobuf/rpc_options_pb2.pyi | 18 +- .../tensorflow/tsl/protobuf/status_pb2.pyi | 5 +- 111 files changed, 9646 insertions(+), 2555 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index b3e1a5821102..0c443d8d9639 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -9,11 +9,6 @@ requires-python = ">=3.9" # Minimum version to run tests, used by uv run line-length = 130 target-version = ["py310"] skip-magic-trailing-comma = true -# Exclude protobuf files because they have long line lengths -# that can't be autofixed. Like docstrings and import aliases. -# Ideally, we could configure Black to allow longer line lengths -# for just these files, but doesn't seem possible yet. -force-exclude = ".*_pb2.pyi" [tool.ruff] line-length = 130 diff --git a/stubs/protobuf/google/protobuf/any_pb2.pyi b/stubs/protobuf/google/protobuf/any_pb2.pyi index 2edfefbfcacd..c12c7e4a8133 100644 --- a/stubs/protobuf/google/protobuf/any_pb2.pyi +++ b/stubs/protobuf/google/protobuf/any_pb2.pyi @@ -166,12 +166,7 @@ class Any(google.protobuf.message.Message, google.protobuf.internal.well_known_t """ value: builtins.bytes """Must be a valid serialized protocol buffer of the above specified type.""" - def __init__( - self, - *, - type_url: builtins.str | None = ..., - value: builtins.bytes | None = ..., - ) -> None: ... + def __init__(self, *, type_url: builtins.str | None = ..., value: builtins.bytes | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["type_url", b"type_url", "value", b"value"]) -> None: ... global___Any = Any diff --git a/stubs/protobuf/google/protobuf/api_pb2.pyi b/stubs/protobuf/google/protobuf/api_pb2.pyi index f48ce3dd2de2..71e8ca47853f 100644 --- a/stubs/protobuf/google/protobuf/api_pb2.pyi +++ b/stubs/protobuf/google/protobuf/api_pb2.pyi @@ -123,7 +123,25 @@ class Api(google.protobuf.message.Message): syntax: google.protobuf.type_pb2.Syntax.ValueType | None = ..., ) -> None: ... def HasField(self, field_name: typing.Literal["source_context", b"source_context"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["methods", b"methods", "mixins", b"mixins", "name", b"name", "options", b"options", "source_context", b"source_context", "syntax", b"syntax", "version", b"version"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "methods", + b"methods", + "mixins", + b"mixins", + "name", + b"name", + "options", + b"options", + "source_context", + b"source_context", + "syntax", + b"syntax", + "version", + b"version", + ], + ) -> None: ... global___Api = Api @@ -167,7 +185,25 @@ class Method(google.protobuf.message.Message): options: collections.abc.Iterable[google.protobuf.type_pb2.Option] | None = ..., syntax: google.protobuf.type_pb2.Syntax.ValueType | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["name", b"name", "options", b"options", "request_streaming", b"request_streaming", "request_type_url", b"request_type_url", "response_streaming", b"response_streaming", "response_type_url", b"response_type_url", "syntax", b"syntax"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "name", + b"name", + "options", + b"options", + "request_streaming", + b"request_streaming", + "request_type_url", + b"request_type_url", + "response_streaming", + b"response_streaming", + "response_type_url", + b"response_type_url", + "syntax", + b"syntax", + ], + ) -> None: ... global___Method = Method @@ -263,12 +299,7 @@ class Mixin(google.protobuf.message.Message): """If non-empty specifies a path under which inherited HTTP paths are rooted. """ - def __init__( - self, - *, - name: builtins.str | None = ..., - root: builtins.str | None = ..., - ) -> None: ... + def __init__(self, *, name: builtins.str | None = ..., root: builtins.str | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["name", b"name", "root", b"root"]) -> None: ... global___Mixin = Mixin diff --git a/stubs/protobuf/google/protobuf/compiler/plugin_pb2.pyi b/stubs/protobuf/google/protobuf/compiler/plugin_pb2.pyi index 4ba99eafb811..a6f744c26074 100644 --- a/stubs/protobuf/google/protobuf/compiler/plugin_pb2.pyi +++ b/stubs/protobuf/google/protobuf/compiler/plugin_pb2.pyi @@ -58,8 +58,12 @@ class Version(google.protobuf.message.Message): patch: builtins.int | None = ..., suffix: builtins.str | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["major", b"major", "minor", b"minor", "patch", b"patch", "suffix", b"suffix"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["major", b"major", "minor", b"minor", "patch", b"patch", "suffix", b"suffix"]) -> None: ... + def HasField( + self, field_name: typing.Literal["major", b"major", "minor", b"minor", "patch", b"patch", "suffix", b"suffix"] + ) -> builtins.bool: ... + def ClearField( + self, field_name: typing.Literal["major", b"major", "minor", b"minor", "patch", b"patch", "suffix", b"suffix"] + ) -> None: ... global___Version = Version @@ -84,7 +88,9 @@ class CodeGeneratorRequest(google.protobuf.message.Message): """ @property - def proto_file(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[google.protobuf.descriptor_pb2.FileDescriptorProto]: + def proto_file( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[google.protobuf.descriptor_pb2.FileDescriptorProto]: """FileDescriptorProtos for all files in files_to_generate and everything they import. The files will appear in topological order, so each file appears before any file that imports it. @@ -107,7 +113,9 @@ class CodeGeneratorRequest(google.protobuf.message.Message): """ @property - def source_file_descriptors(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[google.protobuf.descriptor_pb2.FileDescriptorProto]: + def source_file_descriptors( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[google.protobuf.descriptor_pb2.FileDescriptorProto]: """File descriptors with all options, including source-retention options. These descriptors are only provided for the files listed in files_to_generate. @@ -126,8 +134,24 @@ class CodeGeneratorRequest(google.protobuf.message.Message): source_file_descriptors: collections.abc.Iterable[google.protobuf.descriptor_pb2.FileDescriptorProto] | None = ..., compiler_version: global___Version | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["compiler_version", b"compiler_version", "parameter", b"parameter"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["compiler_version", b"compiler_version", "file_to_generate", b"file_to_generate", "parameter", b"parameter", "proto_file", b"proto_file", "source_file_descriptors", b"source_file_descriptors"]) -> None: ... + def HasField( + self, field_name: typing.Literal["compiler_version", b"compiler_version", "parameter", b"parameter"] + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "compiler_version", + b"compiler_version", + "file_to_generate", + b"file_to_generate", + "parameter", + b"parameter", + "proto_file", + b"proto_file", + "source_file_descriptors", + b"source_file_descriptors", + ], + ) -> None: ... global___CodeGeneratorRequest = CodeGeneratorRequest @@ -141,7 +165,9 @@ class CodeGeneratorResponse(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _FeatureEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[CodeGeneratorResponse._Feature.ValueType], builtins.type): + class _FeatureEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[CodeGeneratorResponse._Feature.ValueType], builtins.type + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor FEATURE_NONE: CodeGeneratorResponse._Feature.ValueType # 0 FEATURE_PROTO3_OPTIONAL: CodeGeneratorResponse._Feature.ValueType # 1 @@ -233,8 +259,32 @@ class CodeGeneratorResponse(google.protobuf.message.Message): content: builtins.str | None = ..., generated_code_info: google.protobuf.descriptor_pb2.GeneratedCodeInfo | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["content", b"content", "generated_code_info", b"generated_code_info", "insertion_point", b"insertion_point", "name", b"name"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["content", b"content", "generated_code_info", b"generated_code_info", "insertion_point", b"insertion_point", "name", b"name"]) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "content", + b"content", + "generated_code_info", + b"generated_code_info", + "insertion_point", + b"insertion_point", + "name", + b"name", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "content", + b"content", + "generated_code_info", + b"generated_code_info", + "insertion_point", + b"insertion_point", + "name", + b"name", + ], + ) -> None: ... ERROR_FIELD_NUMBER: builtins.int SUPPORTED_FEATURES_FIELD_NUMBER: builtins.int @@ -268,7 +318,9 @@ class CodeGeneratorResponse(google.protobuf.message.Message): effect for plugins that have FEATURE_SUPPORTS_EDITIONS set. """ @property - def file(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___CodeGeneratorResponse.File]: ... + def file( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___CodeGeneratorResponse.File]: ... def __init__( self, *, @@ -278,7 +330,33 @@ class CodeGeneratorResponse(google.protobuf.message.Message): maximum_edition: builtins.int | None = ..., file: collections.abc.Iterable[global___CodeGeneratorResponse.File] | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["error", b"error", "maximum_edition", b"maximum_edition", "minimum_edition", b"minimum_edition", "supported_features", b"supported_features"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["error", b"error", "file", b"file", "maximum_edition", b"maximum_edition", "minimum_edition", b"minimum_edition", "supported_features", b"supported_features"]) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "error", + b"error", + "maximum_edition", + b"maximum_edition", + "minimum_edition", + b"minimum_edition", + "supported_features", + b"supported_features", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "error", + b"error", + "file", + b"file", + "maximum_edition", + b"maximum_edition", + "minimum_edition", + b"minimum_edition", + "supported_features", + b"supported_features", + ], + ) -> None: ... global___CodeGeneratorResponse = CodeGeneratorResponse diff --git a/stubs/protobuf/google/protobuf/descriptor_pb2.pyi b/stubs/protobuf/google/protobuf/descriptor_pb2.pyi index 6ff2ce84e0b2..d3bc182e4be3 100644 --- a/stubs/protobuf/google/protobuf/descriptor_pb2.pyi +++ b/stubs/protobuf/google/protobuf/descriptor_pb2.pyi @@ -114,11 +114,7 @@ class FileDescriptorSet(google.protobuf.message.Message): FILE_FIELD_NUMBER: builtins.int @property def file(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___FileDescriptorProto]: ... - def __init__( - self, - *, - file: collections.abc.Iterable[global___FileDescriptorProto] | None = ..., - ) -> None: ... + def __init__(self, *, file: collections.abc.Iterable[global___FileDescriptorProto] | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["file", b"file"]) -> None: ... global___FileDescriptorSet = FileDescriptorSet @@ -212,8 +208,54 @@ class FileDescriptorProto(google.protobuf.message.Message): syntax: builtins.str | None = ..., edition: global___Edition.ValueType | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["edition", b"edition", "name", b"name", "options", b"options", "package", b"package", "source_code_info", b"source_code_info", "syntax", b"syntax"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["dependency", b"dependency", "edition", b"edition", "enum_type", b"enum_type", "extension", b"extension", "message_type", b"message_type", "name", b"name", "options", b"options", "package", b"package", "public_dependency", b"public_dependency", "service", b"service", "source_code_info", b"source_code_info", "syntax", b"syntax", "weak_dependency", b"weak_dependency"]) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "edition", + b"edition", + "name", + b"name", + "options", + b"options", + "package", + b"package", + "source_code_info", + b"source_code_info", + "syntax", + b"syntax", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "dependency", + b"dependency", + "edition", + b"edition", + "enum_type", + b"enum_type", + "extension", + b"extension", + "message_type", + b"message_type", + "name", + b"name", + "options", + b"options", + "package", + b"package", + "public_dependency", + b"public_dependency", + "service", + b"service", + "source_code_info", + b"source_code_info", + "syntax", + b"syntax", + "weak_dependency", + b"weak_dependency", + ], + ) -> None: ... global___FileDescriptorProto = FileDescriptorProto @@ -243,7 +285,9 @@ class DescriptorProto(google.protobuf.message.Message): end: builtins.int | None = ..., options: global___ExtensionRangeOptions | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["end", b"end", "options", b"options", "start", b"start"]) -> builtins.bool: ... + def HasField( + self, field_name: typing.Literal["end", b"end", "options", b"options", "start", b"start"] + ) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["end", b"end", "options", b"options", "start", b"start"]) -> None: ... @typing.final @@ -261,12 +305,7 @@ class DescriptorProto(google.protobuf.message.Message): """Inclusive.""" end: builtins.int """Exclusive.""" - def __init__( - self, - *, - start: builtins.int | None = ..., - end: builtins.int | None = ..., - ) -> None: ... + def __init__(self, *, start: builtins.int | None = ..., end: builtins.int | None = ...) -> None: ... def HasField(self, field_name: typing.Literal["end", b"end", "start", b"start"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["end", b"end", "start", b"start"]) -> None: ... @@ -290,13 +329,19 @@ class DescriptorProto(google.protobuf.message.Message): @property def enum_type(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___EnumDescriptorProto]: ... @property - def extension_range(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___DescriptorProto.ExtensionRange]: ... + def extension_range( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___DescriptorProto.ExtensionRange]: ... @property - def oneof_decl(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___OneofDescriptorProto]: ... + def oneof_decl( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___OneofDescriptorProto]: ... @property def options(self) -> global___MessageOptions: ... @property - def reserved_range(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___DescriptorProto.ReservedRange]: ... + def reserved_range( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___DescriptorProto.ReservedRange]: ... @property def reserved_name(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: """Reserved field names, which may not be used by fields in the same message. @@ -318,7 +363,31 @@ class DescriptorProto(google.protobuf.message.Message): reserved_name: collections.abc.Iterable[builtins.str] | None = ..., ) -> None: ... def HasField(self, field_name: typing.Literal["name", b"name", "options", b"options"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["enum_type", b"enum_type", "extension", b"extension", "extension_range", b"extension_range", "field", b"field", "name", b"name", "nested_type", b"nested_type", "oneof_decl", b"oneof_decl", "options", b"options", "reserved_name", b"reserved_name", "reserved_range", b"reserved_range"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "enum_type", + b"enum_type", + "extension", + b"extension", + "extension_range", + b"extension_range", + "field", + b"field", + "name", + b"name", + "nested_type", + b"nested_type", + "oneof_decl", + b"oneof_decl", + "options", + b"options", + "reserved_name", + b"reserved_name", + "reserved_range", + b"reserved_range", + ], + ) -> None: ... global___DescriptorProto = DescriptorProto @@ -330,7 +399,10 @@ class ExtensionRangeOptions(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _VerificationStateEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ExtensionRangeOptions._VerificationState.ValueType], builtins.type): + class _VerificationStateEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ExtensionRangeOptions._VerificationState.ValueType], + builtins.type, + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor DECLARATION: ExtensionRangeOptions._VerificationState.ValueType # 0 """All the extensions of the range must be declared.""" @@ -381,8 +453,18 @@ class ExtensionRangeOptions(google.protobuf.message.Message): reserved: builtins.bool | None = ..., repeated: builtins.bool | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["full_name", b"full_name", "number", b"number", "repeated", b"repeated", "reserved", b"reserved", "type", b"type"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["full_name", b"full_name", "number", b"number", "repeated", b"repeated", "reserved", b"reserved", "type", b"type"]) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "full_name", b"full_name", "number", b"number", "repeated", b"repeated", "reserved", b"reserved", "type", b"type" + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "full_name", b"full_name", "number", b"number", "repeated", b"repeated", "reserved", b"reserved", "type", b"type" + ], + ) -> None: ... UNINTERPRETED_OPTION_FIELD_NUMBER: builtins.int DECLARATION_FIELD_NUMBER: builtins.int @@ -394,11 +476,15 @@ class ExtensionRangeOptions(google.protobuf.message.Message): are marked as UNVERIFIED. """ @property - def uninterpreted_option(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___UninterpretedOption]: + def uninterpreted_option( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___UninterpretedOption]: """The parser stores options it doesn't recognize here. See above.""" @property - def declaration(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ExtensionRangeOptions.Declaration]: + def declaration( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ExtensionRangeOptions.Declaration]: """For external users: DO NOT USE. We are in the process of open sourcing extension declaration and executing internal cleanups before it can be used externally. @@ -417,7 +503,19 @@ class ExtensionRangeOptions(google.protobuf.message.Message): verification: global___ExtensionRangeOptions.VerificationState.ValueType | None = ..., ) -> None: ... def HasField(self, field_name: typing.Literal["features", b"features", "verification", b"verification"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["declaration", b"declaration", "features", b"features", "uninterpreted_option", b"uninterpreted_option", "verification", b"verification"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "declaration", + b"declaration", + "features", + b"features", + "uninterpreted_option", + b"uninterpreted_option", + "verification", + b"verification", + ], + ) -> None: ... global___ExtensionRangeOptions = ExtensionRangeOptions @@ -431,7 +529,9 @@ class FieldDescriptorProto(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _TypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[FieldDescriptorProto._Type.ValueType], builtins.type): + class _TypeEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[FieldDescriptorProto._Type.ValueType], builtins.type + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor TYPE_DOUBLE: FieldDescriptorProto._Type.ValueType # 1 """0 is reserved for errors. @@ -514,7 +614,9 @@ class FieldDescriptorProto(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _LabelEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[FieldDescriptorProto._Label.ValueType], builtins.type): + class _LabelEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[FieldDescriptorProto._Label.ValueType], builtins.type + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor LABEL_OPTIONAL: FieldDescriptorProto._Label.ValueType # 1 """0 is reserved for errors""" @@ -620,8 +722,60 @@ class FieldDescriptorProto(google.protobuf.message.Message): options: global___FieldOptions | None = ..., proto3_optional: builtins.bool | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["default_value", b"default_value", "extendee", b"extendee", "json_name", b"json_name", "label", b"label", "name", b"name", "number", b"number", "oneof_index", b"oneof_index", "options", b"options", "proto3_optional", b"proto3_optional", "type", b"type", "type_name", b"type_name"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["default_value", b"default_value", "extendee", b"extendee", "json_name", b"json_name", "label", b"label", "name", b"name", "number", b"number", "oneof_index", b"oneof_index", "options", b"options", "proto3_optional", b"proto3_optional", "type", b"type", "type_name", b"type_name"]) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "default_value", + b"default_value", + "extendee", + b"extendee", + "json_name", + b"json_name", + "label", + b"label", + "name", + b"name", + "number", + b"number", + "oneof_index", + b"oneof_index", + "options", + b"options", + "proto3_optional", + b"proto3_optional", + "type", + b"type", + "type_name", + b"type_name", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "default_value", + b"default_value", + "extendee", + b"extendee", + "json_name", + b"json_name", + "label", + b"label", + "name", + b"name", + "number", + b"number", + "oneof_index", + b"oneof_index", + "options", + b"options", + "proto3_optional", + b"proto3_optional", + "type", + b"type", + "type_name", + b"type_name", + ], + ) -> None: ... global___FieldDescriptorProto = FieldDescriptorProto @@ -636,12 +790,7 @@ class OneofDescriptorProto(google.protobuf.message.Message): name: builtins.str @property def options(self) -> global___OneofOptions: ... - def __init__( - self, - *, - name: builtins.str | None = ..., - options: global___OneofOptions | None = ..., - ) -> None: ... + def __init__(self, *, name: builtins.str | None = ..., options: global___OneofOptions | None = ...) -> None: ... def HasField(self, field_name: typing.Literal["name", b"name", "options", b"options"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["name", b"name", "options", b"options"]) -> None: ... @@ -671,12 +820,7 @@ class EnumDescriptorProto(google.protobuf.message.Message): """Inclusive.""" end: builtins.int """Inclusive.""" - def __init__( - self, - *, - start: builtins.int | None = ..., - end: builtins.int | None = ..., - ) -> None: ... + def __init__(self, *, start: builtins.int | None = ..., end: builtins.int | None = ...) -> None: ... def HasField(self, field_name: typing.Literal["end", b"end", "start", b"start"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["end", b"end", "start", b"start"]) -> None: ... @@ -691,7 +835,9 @@ class EnumDescriptorProto(google.protobuf.message.Message): @property def options(self) -> global___EnumOptions: ... @property - def reserved_range(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___EnumDescriptorProto.EnumReservedRange]: + def reserved_range( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___EnumDescriptorProto.EnumReservedRange]: """Range of reserved numeric values. Reserved numeric values may not be used by enum values in the same enum declaration. Reserved ranges may not overlap. @@ -713,7 +859,21 @@ class EnumDescriptorProto(google.protobuf.message.Message): reserved_name: collections.abc.Iterable[builtins.str] | None = ..., ) -> None: ... def HasField(self, field_name: typing.Literal["name", b"name", "options", b"options"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["name", b"name", "options", b"options", "reserved_name", b"reserved_name", "reserved_range", b"reserved_range", "value", b"value"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "name", + b"name", + "options", + b"options", + "reserved_name", + b"reserved_name", + "reserved_range", + b"reserved_range", + "value", + b"value", + ], + ) -> None: ... global___EnumDescriptorProto = EnumDescriptorProto @@ -737,7 +897,9 @@ class EnumValueDescriptorProto(google.protobuf.message.Message): number: builtins.int | None = ..., options: global___EnumValueOptions | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["name", b"name", "number", b"number", "options", b"options"]) -> builtins.bool: ... + def HasField( + self, field_name: typing.Literal["name", b"name", "number", b"number", "options", b"options"] + ) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["name", b"name", "number", b"number", "options", b"options"]) -> None: ... global___EnumValueDescriptorProto = EnumValueDescriptorProto @@ -802,8 +964,40 @@ class MethodDescriptorProto(google.protobuf.message.Message): client_streaming: builtins.bool | None = ..., server_streaming: builtins.bool | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["client_streaming", b"client_streaming", "input_type", b"input_type", "name", b"name", "options", b"options", "output_type", b"output_type", "server_streaming", b"server_streaming"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["client_streaming", b"client_streaming", "input_type", b"input_type", "name", b"name", "options", b"options", "output_type", b"output_type", "server_streaming", b"server_streaming"]) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "client_streaming", + b"client_streaming", + "input_type", + b"input_type", + "name", + b"name", + "options", + b"options", + "output_type", + b"output_type", + "server_streaming", + b"server_streaming", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "client_streaming", + b"client_streaming", + "input_type", + b"input_type", + "name", + b"name", + "options", + b"options", + "output_type", + b"output_type", + "server_streaming", + b"server_streaming", + ], + ) -> None: ... global___MethodDescriptorProto = MethodDescriptorProto @@ -845,7 +1039,9 @@ class FileOptions(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _OptimizeModeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[FileOptions._OptimizeMode.ValueType], builtins.type): + class _OptimizeModeEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[FileOptions._OptimizeMode.ValueType], builtins.type + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor SPEED: FileOptions._OptimizeMode.ValueType # 1 """Generate complete code for parsing, serialization,""" @@ -996,7 +1192,9 @@ class FileOptions(google.protobuf.message.Message): """ @property - def uninterpreted_option(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___UninterpretedOption]: + def uninterpreted_option( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___UninterpretedOption]: """The parser stores options it doesn't recognize here. See the documentation for the "Options" section above. """ @@ -1026,8 +1224,98 @@ class FileOptions(google.protobuf.message.Message): features: global___FeatureSet | None = ..., uninterpreted_option: collections.abc.Iterable[global___UninterpretedOption] | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["cc_enable_arenas", b"cc_enable_arenas", "cc_generic_services", b"cc_generic_services", "csharp_namespace", b"csharp_namespace", "deprecated", b"deprecated", "features", b"features", "go_package", b"go_package", "java_generate_equals_and_hash", b"java_generate_equals_and_hash", "java_generic_services", b"java_generic_services", "java_multiple_files", b"java_multiple_files", "java_outer_classname", b"java_outer_classname", "java_package", b"java_package", "java_string_check_utf8", b"java_string_check_utf8", "objc_class_prefix", b"objc_class_prefix", "optimize_for", b"optimize_for", "php_class_prefix", b"php_class_prefix", "php_metadata_namespace", b"php_metadata_namespace", "php_namespace", b"php_namespace", "py_generic_services", b"py_generic_services", "ruby_package", b"ruby_package", "swift_prefix", b"swift_prefix"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["cc_enable_arenas", b"cc_enable_arenas", "cc_generic_services", b"cc_generic_services", "csharp_namespace", b"csharp_namespace", "deprecated", b"deprecated", "features", b"features", "go_package", b"go_package", "java_generate_equals_and_hash", b"java_generate_equals_and_hash", "java_generic_services", b"java_generic_services", "java_multiple_files", b"java_multiple_files", "java_outer_classname", b"java_outer_classname", "java_package", b"java_package", "java_string_check_utf8", b"java_string_check_utf8", "objc_class_prefix", b"objc_class_prefix", "optimize_for", b"optimize_for", "php_class_prefix", b"php_class_prefix", "php_metadata_namespace", b"php_metadata_namespace", "php_namespace", b"php_namespace", "py_generic_services", b"py_generic_services", "ruby_package", b"ruby_package", "swift_prefix", b"swift_prefix", "uninterpreted_option", b"uninterpreted_option"]) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "cc_enable_arenas", + b"cc_enable_arenas", + "cc_generic_services", + b"cc_generic_services", + "csharp_namespace", + b"csharp_namespace", + "deprecated", + b"deprecated", + "features", + b"features", + "go_package", + b"go_package", + "java_generate_equals_and_hash", + b"java_generate_equals_and_hash", + "java_generic_services", + b"java_generic_services", + "java_multiple_files", + b"java_multiple_files", + "java_outer_classname", + b"java_outer_classname", + "java_package", + b"java_package", + "java_string_check_utf8", + b"java_string_check_utf8", + "objc_class_prefix", + b"objc_class_prefix", + "optimize_for", + b"optimize_for", + "php_class_prefix", + b"php_class_prefix", + "php_metadata_namespace", + b"php_metadata_namespace", + "php_namespace", + b"php_namespace", + "py_generic_services", + b"py_generic_services", + "ruby_package", + b"ruby_package", + "swift_prefix", + b"swift_prefix", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "cc_enable_arenas", + b"cc_enable_arenas", + "cc_generic_services", + b"cc_generic_services", + "csharp_namespace", + b"csharp_namespace", + "deprecated", + b"deprecated", + "features", + b"features", + "go_package", + b"go_package", + "java_generate_equals_and_hash", + b"java_generate_equals_and_hash", + "java_generic_services", + b"java_generic_services", + "java_multiple_files", + b"java_multiple_files", + "java_outer_classname", + b"java_outer_classname", + "java_package", + b"java_package", + "java_string_check_utf8", + b"java_string_check_utf8", + "objc_class_prefix", + b"objc_class_prefix", + "optimize_for", + b"optimize_for", + "php_class_prefix", + b"php_class_prefix", + "php_metadata_namespace", + b"php_metadata_namespace", + "php_namespace", + b"php_namespace", + "py_generic_services", + b"py_generic_services", + "ruby_package", + b"ruby_package", + "swift_prefix", + b"swift_prefix", + "uninterpreted_option", + b"uninterpreted_option", + ], + ) -> None: ... global___FileOptions = FileOptions @@ -1117,7 +1405,9 @@ class MessageOptions(google.protobuf.message.Message): """ @property - def uninterpreted_option(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___UninterpretedOption]: + def uninterpreted_option( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___UninterpretedOption]: """The parser stores options it doesn't recognize here. See above.""" def __init__( @@ -1131,8 +1421,42 @@ class MessageOptions(google.protobuf.message.Message): features: global___FeatureSet | None = ..., uninterpreted_option: collections.abc.Iterable[global___UninterpretedOption] | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["deprecated", b"deprecated", "deprecated_legacy_json_field_conflicts", b"deprecated_legacy_json_field_conflicts", "features", b"features", "map_entry", b"map_entry", "message_set_wire_format", b"message_set_wire_format", "no_standard_descriptor_accessor", b"no_standard_descriptor_accessor"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["deprecated", b"deprecated", "deprecated_legacy_json_field_conflicts", b"deprecated_legacy_json_field_conflicts", "features", b"features", "map_entry", b"map_entry", "message_set_wire_format", b"message_set_wire_format", "no_standard_descriptor_accessor", b"no_standard_descriptor_accessor", "uninterpreted_option", b"uninterpreted_option"]) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "deprecated", + b"deprecated", + "deprecated_legacy_json_field_conflicts", + b"deprecated_legacy_json_field_conflicts", + "features", + b"features", + "map_entry", + b"map_entry", + "message_set_wire_format", + b"message_set_wire_format", + "no_standard_descriptor_accessor", + b"no_standard_descriptor_accessor", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "deprecated", + b"deprecated", + "deprecated_legacy_json_field_conflicts", + b"deprecated_legacy_json_field_conflicts", + "features", + b"features", + "map_entry", + b"map_entry", + "message_set_wire_format", + b"message_set_wire_format", + "no_standard_descriptor_accessor", + b"no_standard_descriptor_accessor", + "uninterpreted_option", + b"uninterpreted_option", + ], + ) -> None: ... global___MessageOptions = MessageOptions @@ -1144,7 +1468,9 @@ class FieldOptions(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _CTypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[FieldOptions._CType.ValueType], builtins.type): + class _CTypeEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[FieldOptions._CType.ValueType], builtins.type + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor STRING: FieldOptions._CType.ValueType # 0 """Default mode.""" @@ -1175,7 +1501,9 @@ class FieldOptions(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _JSTypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[FieldOptions._JSType.ValueType], builtins.type): + class _JSTypeEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[FieldOptions._JSType.ValueType], builtins.type + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor JS_NORMAL: FieldOptions._JSType.ValueType # 0 """Use the default type.""" @@ -1196,7 +1524,9 @@ class FieldOptions(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _OptionRetentionEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[FieldOptions._OptionRetention.ValueType], builtins.type): + class _OptionRetentionEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[FieldOptions._OptionRetention.ValueType], builtins.type + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor RETENTION_UNKNOWN: FieldOptions._OptionRetention.ValueType # 0 RETENTION_RUNTIME: FieldOptions._OptionRetention.ValueType # 1 @@ -1213,7 +1543,9 @@ class FieldOptions(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _OptionTargetTypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[FieldOptions._OptionTargetType.ValueType], builtins.type): + class _OptionTargetTypeEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[FieldOptions._OptionTargetType.ValueType], builtins.type + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor TARGET_TYPE_UNKNOWN: FieldOptions._OptionTargetType.ValueType # 0 TARGET_TYPE_FILE: FieldOptions._OptionTargetType.ValueType # 1 @@ -1252,12 +1584,7 @@ class FieldOptions(google.protobuf.message.Message): edition: global___Edition.ValueType value: builtins.str """Textproto value.""" - def __init__( - self, - *, - edition: global___Edition.ValueType | None = ..., - value: builtins.str | None = ..., - ) -> None: ... + def __init__(self, *, edition: global___Edition.ValueType | None = ..., value: builtins.str | None = ...) -> None: ... def HasField(self, field_name: typing.Literal["edition", b"edition", "value", b"value"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["edition", b"edition", "value", b"value"]) -> None: ... @@ -1297,8 +1624,32 @@ class FieldOptions(google.protobuf.message.Message): deprecation_warning: builtins.str | None = ..., edition_removed: global___Edition.ValueType | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["deprecation_warning", b"deprecation_warning", "edition_deprecated", b"edition_deprecated", "edition_introduced", b"edition_introduced", "edition_removed", b"edition_removed"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["deprecation_warning", b"deprecation_warning", "edition_deprecated", b"edition_deprecated", "edition_introduced", b"edition_introduced", "edition_removed", b"edition_removed"]) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "deprecation_warning", + b"deprecation_warning", + "edition_deprecated", + b"edition_deprecated", + "edition_introduced", + b"edition_introduced", + "edition_removed", + b"edition_removed", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "deprecation_warning", + b"deprecation_warning", + "edition_deprecated", + b"edition_deprecated", + "edition_introduced", + b"edition_introduced", + "edition_removed", + b"edition_removed", + ], + ) -> None: ... CTYPE_FIELD_NUMBER: builtins.int PACKED_FIELD_NUMBER: builtins.int @@ -1388,9 +1739,13 @@ class FieldOptions(google.protobuf.message.Message): """ retention: global___FieldOptions.OptionRetention.ValueType @property - def targets(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[global___FieldOptions.OptionTargetType.ValueType]: ... + def targets( + self, + ) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[global___FieldOptions.OptionTargetType.ValueType]: ... @property - def edition_defaults(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___FieldOptions.EditionDefault]: ... + def edition_defaults( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___FieldOptions.EditionDefault]: ... @property def features(self) -> global___FeatureSet: """Any features defined in the specific edition. @@ -1402,7 +1757,9 @@ class FieldOptions(google.protobuf.message.Message): @property def feature_support(self) -> global___FieldOptions.FeatureSupport: ... @property - def uninterpreted_option(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___UninterpretedOption]: + def uninterpreted_option( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___UninterpretedOption]: """The parser stores options it doesn't recognize here. See above.""" def __init__( @@ -1423,8 +1780,66 @@ class FieldOptions(google.protobuf.message.Message): feature_support: global___FieldOptions.FeatureSupport | None = ..., uninterpreted_option: collections.abc.Iterable[global___UninterpretedOption] | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["ctype", b"ctype", "debug_redact", b"debug_redact", "deprecated", b"deprecated", "feature_support", b"feature_support", "features", b"features", "jstype", b"jstype", "lazy", b"lazy", "packed", b"packed", "retention", b"retention", "unverified_lazy", b"unverified_lazy", "weak", b"weak"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["ctype", b"ctype", "debug_redact", b"debug_redact", "deprecated", b"deprecated", "edition_defaults", b"edition_defaults", "feature_support", b"feature_support", "features", b"features", "jstype", b"jstype", "lazy", b"lazy", "packed", b"packed", "retention", b"retention", "targets", b"targets", "uninterpreted_option", b"uninterpreted_option", "unverified_lazy", b"unverified_lazy", "weak", b"weak"]) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "ctype", + b"ctype", + "debug_redact", + b"debug_redact", + "deprecated", + b"deprecated", + "feature_support", + b"feature_support", + "features", + b"features", + "jstype", + b"jstype", + "lazy", + b"lazy", + "packed", + b"packed", + "retention", + b"retention", + "unverified_lazy", + b"unverified_lazy", + "weak", + b"weak", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "ctype", + b"ctype", + "debug_redact", + b"debug_redact", + "deprecated", + b"deprecated", + "edition_defaults", + b"edition_defaults", + "feature_support", + b"feature_support", + "features", + b"features", + "jstype", + b"jstype", + "lazy", + b"lazy", + "packed", + b"packed", + "retention", + b"retention", + "targets", + b"targets", + "uninterpreted_option", + b"uninterpreted_option", + "unverified_lazy", + b"unverified_lazy", + "weak", + b"weak", + ], + ) -> None: ... global___FieldOptions = FieldOptions @@ -1443,7 +1858,9 @@ class OneofOptions(google.protobuf.message.Message): """ @property - def uninterpreted_option(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___UninterpretedOption]: + def uninterpreted_option( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___UninterpretedOption]: """The parser stores options it doesn't recognize here. See above.""" def __init__( @@ -1453,7 +1870,9 @@ class OneofOptions(google.protobuf.message.Message): uninterpreted_option: collections.abc.Iterable[global___UninterpretedOption] | None = ..., ) -> None: ... def HasField(self, field_name: typing.Literal["features", b"features"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["features", b"features", "uninterpreted_option", b"uninterpreted_option"]) -> None: ... + def ClearField( + self, field_name: typing.Literal["features", b"features", "uninterpreted_option", b"uninterpreted_option"] + ) -> None: ... global___OneofOptions = OneofOptions @@ -1493,7 +1912,9 @@ class EnumOptions(google.protobuf.message.Message): """ @property - def uninterpreted_option(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___UninterpretedOption]: + def uninterpreted_option( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___UninterpretedOption]: """The parser stores options it doesn't recognize here. See above.""" def __init__( @@ -1505,8 +1926,34 @@ class EnumOptions(google.protobuf.message.Message): features: global___FeatureSet | None = ..., uninterpreted_option: collections.abc.Iterable[global___UninterpretedOption] | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["allow_alias", b"allow_alias", "deprecated", b"deprecated", "deprecated_legacy_json_field_conflicts", b"deprecated_legacy_json_field_conflicts", "features", b"features"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["allow_alias", b"allow_alias", "deprecated", b"deprecated", "deprecated_legacy_json_field_conflicts", b"deprecated_legacy_json_field_conflicts", "features", b"features", "uninterpreted_option", b"uninterpreted_option"]) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "allow_alias", + b"allow_alias", + "deprecated", + b"deprecated", + "deprecated_legacy_json_field_conflicts", + b"deprecated_legacy_json_field_conflicts", + "features", + b"features", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "allow_alias", + b"allow_alias", + "deprecated", + b"deprecated", + "deprecated_legacy_json_field_conflicts", + b"deprecated_legacy_json_field_conflicts", + "features", + b"features", + "uninterpreted_option", + b"uninterpreted_option", + ], + ) -> None: ... global___EnumOptions = EnumOptions @@ -1543,7 +1990,9 @@ class EnumValueOptions(google.protobuf.message.Message): """Information about the support window of a feature value.""" @property - def uninterpreted_option(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___UninterpretedOption]: + def uninterpreted_option( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___UninterpretedOption]: """The parser stores options it doesn't recognize here. See above.""" def __init__( @@ -1555,8 +2004,34 @@ class EnumValueOptions(google.protobuf.message.Message): feature_support: global___FieldOptions.FeatureSupport | None = ..., uninterpreted_option: collections.abc.Iterable[global___UninterpretedOption] | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["debug_redact", b"debug_redact", "deprecated", b"deprecated", "feature_support", b"feature_support", "features", b"features"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["debug_redact", b"debug_redact", "deprecated", b"deprecated", "feature_support", b"feature_support", "features", b"features", "uninterpreted_option", b"uninterpreted_option"]) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "debug_redact", + b"debug_redact", + "deprecated", + b"deprecated", + "feature_support", + b"feature_support", + "features", + b"features", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "debug_redact", + b"debug_redact", + "deprecated", + b"deprecated", + "feature_support", + b"feature_support", + "features", + b"features", + "uninterpreted_option", + b"uninterpreted_option", + ], + ) -> None: ... global___EnumValueOptions = EnumValueOptions @@ -1587,7 +2062,9 @@ class ServiceOptions(google.protobuf.message.Message): """ @property - def uninterpreted_option(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___UninterpretedOption]: + def uninterpreted_option( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___UninterpretedOption]: """The parser stores options it doesn't recognize here. See above.""" def __init__( @@ -1598,7 +2075,12 @@ class ServiceOptions(google.protobuf.message.Message): uninterpreted_option: collections.abc.Iterable[global___UninterpretedOption] | None = ..., ) -> None: ... def HasField(self, field_name: typing.Literal["deprecated", b"deprecated", "features", b"features"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["deprecated", b"deprecated", "features", b"features", "uninterpreted_option", b"uninterpreted_option"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "deprecated", b"deprecated", "features", b"features", "uninterpreted_option", b"uninterpreted_option" + ], + ) -> None: ... global___ServiceOptions = ServiceOptions @@ -1610,7 +2092,9 @@ class MethodOptions(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _IdempotencyLevelEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[MethodOptions._IdempotencyLevel.ValueType], builtins.type): + class _IdempotencyLevelEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[MethodOptions._IdempotencyLevel.ValueType], builtins.type + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor IDEMPOTENCY_UNKNOWN: MethodOptions._IdempotencyLevel.ValueType # 0 NO_SIDE_EFFECTS: MethodOptions._IdempotencyLevel.ValueType # 1 @@ -1655,7 +2139,9 @@ class MethodOptions(google.protobuf.message.Message): """ @property - def uninterpreted_option(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___UninterpretedOption]: + def uninterpreted_option( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___UninterpretedOption]: """The parser stores options it doesn't recognize here. See above.""" def __init__( @@ -1666,8 +2152,25 @@ class MethodOptions(google.protobuf.message.Message): features: global___FeatureSet | None = ..., uninterpreted_option: collections.abc.Iterable[global___UninterpretedOption] | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["deprecated", b"deprecated", "features", b"features", "idempotency_level", b"idempotency_level"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["deprecated", b"deprecated", "features", b"features", "idempotency_level", b"idempotency_level", "uninterpreted_option", b"uninterpreted_option"]) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "deprecated", b"deprecated", "features", b"features", "idempotency_level", b"idempotency_level" + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "deprecated", + b"deprecated", + "features", + b"features", + "idempotency_level", + b"idempotency_level", + "uninterpreted_option", + b"uninterpreted_option", + ], + ) -> None: ... global___MethodOptions = MethodOptions @@ -1698,13 +2201,10 @@ class UninterpretedOption(google.protobuf.message.Message): IS_EXTENSION_FIELD_NUMBER: builtins.int name_part: builtins.str is_extension: builtins.bool - def __init__( - self, - *, - name_part: builtins.str | None = ..., - is_extension: builtins.bool | None = ..., - ) -> None: ... - def HasField(self, field_name: typing.Literal["is_extension", b"is_extension", "name_part", b"name_part"]) -> builtins.bool: ... + def __init__(self, *, name_part: builtins.str | None = ..., is_extension: builtins.bool | None = ...) -> None: ... + def HasField( + self, field_name: typing.Literal["is_extension", b"is_extension", "name_part", b"name_part"] + ) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["is_extension", b"is_extension", "name_part", b"name_part"]) -> None: ... NAME_FIELD_NUMBER: builtins.int @@ -1724,7 +2224,9 @@ class UninterpretedOption(google.protobuf.message.Message): string_value: builtins.bytes aggregate_value: builtins.str @property - def name(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___UninterpretedOption.NamePart]: ... + def name( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___UninterpretedOption.NamePart]: ... def __init__( self, *, @@ -1736,8 +2238,42 @@ class UninterpretedOption(google.protobuf.message.Message): string_value: builtins.bytes | None = ..., aggregate_value: builtins.str | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["aggregate_value", b"aggregate_value", "double_value", b"double_value", "identifier_value", b"identifier_value", "negative_int_value", b"negative_int_value", "positive_int_value", b"positive_int_value", "string_value", b"string_value"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["aggregate_value", b"aggregate_value", "double_value", b"double_value", "identifier_value", b"identifier_value", "name", b"name", "negative_int_value", b"negative_int_value", "positive_int_value", b"positive_int_value", "string_value", b"string_value"]) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "aggregate_value", + b"aggregate_value", + "double_value", + b"double_value", + "identifier_value", + b"identifier_value", + "negative_int_value", + b"negative_int_value", + "positive_int_value", + b"positive_int_value", + "string_value", + b"string_value", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "aggregate_value", + b"aggregate_value", + "double_value", + b"double_value", + "identifier_value", + b"identifier_value", + "name", + b"name", + "negative_int_value", + b"negative_int_value", + "positive_int_value", + b"positive_int_value", + "string_value", + b"string_value", + ], + ) -> None: ... global___UninterpretedOption = UninterpretedOption @@ -1760,7 +2296,9 @@ class FeatureSet(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _FieldPresenceEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[FeatureSet._FieldPresence.ValueType], builtins.type): + class _FieldPresenceEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[FeatureSet._FieldPresence.ValueType], builtins.type + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor FIELD_PRESENCE_UNKNOWN: FeatureSet._FieldPresence.ValueType # 0 EXPLICIT: FeatureSet._FieldPresence.ValueType # 1 @@ -1777,7 +2315,9 @@ class FeatureSet(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _EnumTypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[FeatureSet._EnumType.ValueType], builtins.type): + class _EnumTypeEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[FeatureSet._EnumType.ValueType], builtins.type + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor ENUM_TYPE_UNKNOWN: FeatureSet._EnumType.ValueType # 0 OPEN: FeatureSet._EnumType.ValueType # 1 @@ -1792,7 +2332,9 @@ class FeatureSet(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _RepeatedFieldEncodingEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[FeatureSet._RepeatedFieldEncoding.ValueType], builtins.type): + class _RepeatedFieldEncodingEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[FeatureSet._RepeatedFieldEncoding.ValueType], builtins.type + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor REPEATED_FIELD_ENCODING_UNKNOWN: FeatureSet._RepeatedFieldEncoding.ValueType # 0 PACKED: FeatureSet._RepeatedFieldEncoding.ValueType # 1 @@ -1807,7 +2349,9 @@ class FeatureSet(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _Utf8ValidationEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[FeatureSet._Utf8Validation.ValueType], builtins.type): + class _Utf8ValidationEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[FeatureSet._Utf8Validation.ValueType], builtins.type + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor UTF8_VALIDATION_UNKNOWN: FeatureSet._Utf8Validation.ValueType # 0 VERIFY: FeatureSet._Utf8Validation.ValueType # 2 @@ -1822,7 +2366,9 @@ class FeatureSet(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _MessageEncodingEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[FeatureSet._MessageEncoding.ValueType], builtins.type): + class _MessageEncodingEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[FeatureSet._MessageEncoding.ValueType], builtins.type + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor MESSAGE_ENCODING_UNKNOWN: FeatureSet._MessageEncoding.ValueType # 0 LENGTH_PREFIXED: FeatureSet._MessageEncoding.ValueType # 1 @@ -1837,7 +2383,9 @@ class FeatureSet(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _JsonFormatEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[FeatureSet._JsonFormat.ValueType], builtins.type): + class _JsonFormatEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[FeatureSet._JsonFormat.ValueType], builtins.type + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor JSON_FORMAT_UNKNOWN: FeatureSet._JsonFormat.ValueType # 0 ALLOW: FeatureSet._JsonFormat.ValueType # 1 @@ -1852,7 +2400,9 @@ class FeatureSet(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _EnforceNamingStyleEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[FeatureSet._EnforceNamingStyle.ValueType], builtins.type): + class _EnforceNamingStyleEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[FeatureSet._EnforceNamingStyle.ValueType], builtins.type + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor ENFORCE_NAMING_STYLE_UNKNOWN: FeatureSet._EnforceNamingStyle.ValueType # 0 STYLE2024: FeatureSet._EnforceNamingStyle.ValueType # 1 @@ -1888,8 +2438,44 @@ class FeatureSet(google.protobuf.message.Message): json_format: global___FeatureSet.JsonFormat.ValueType | None = ..., enforce_naming_style: global___FeatureSet.EnforceNamingStyle.ValueType | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["enforce_naming_style", b"enforce_naming_style", "enum_type", b"enum_type", "field_presence", b"field_presence", "json_format", b"json_format", "message_encoding", b"message_encoding", "repeated_field_encoding", b"repeated_field_encoding", "utf8_validation", b"utf8_validation"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["enforce_naming_style", b"enforce_naming_style", "enum_type", b"enum_type", "field_presence", b"field_presence", "json_format", b"json_format", "message_encoding", b"message_encoding", "repeated_field_encoding", b"repeated_field_encoding", "utf8_validation", b"utf8_validation"]) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "enforce_naming_style", + b"enforce_naming_style", + "enum_type", + b"enum_type", + "field_presence", + b"field_presence", + "json_format", + b"json_format", + "message_encoding", + b"message_encoding", + "repeated_field_encoding", + b"repeated_field_encoding", + "utf8_validation", + b"utf8_validation", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "enforce_naming_style", + b"enforce_naming_style", + "enum_type", + b"enum_type", + "field_presence", + b"field_presence", + "json_format", + b"json_format", + "message_encoding", + b"message_encoding", + "repeated_field_encoding", + b"repeated_field_encoding", + "utf8_validation", + b"utf8_validation", + ], + ) -> None: ... global___FeatureSet = FeatureSet @@ -1932,8 +2518,18 @@ class FeatureSetDefaults(google.protobuf.message.Message): overridable_features: global___FeatureSet | None = ..., fixed_features: global___FeatureSet | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["edition", b"edition", "fixed_features", b"fixed_features", "overridable_features", b"overridable_features"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["edition", b"edition", "fixed_features", b"fixed_features", "overridable_features", b"overridable_features"]) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "edition", b"edition", "fixed_features", b"fixed_features", "overridable_features", b"overridable_features" + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "edition", b"edition", "fixed_features", b"fixed_features", "overridable_features", b"overridable_features" + ], + ) -> None: ... DEFAULTS_FIELD_NUMBER: builtins.int MINIMUM_EDITION_FIELD_NUMBER: builtins.int @@ -1947,7 +2543,11 @@ class FeatureSetDefaults(google.protobuf.message.Message): after this will not have reliable defaults. """ @property - def defaults(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___FeatureSetDefaults.FeatureSetEditionDefault]: ... + def defaults( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + global___FeatureSetDefaults.FeatureSetEditionDefault + ]: ... def __init__( self, *, @@ -1955,8 +2555,15 @@ class FeatureSetDefaults(google.protobuf.message.Message): minimum_edition: global___Edition.ValueType | None = ..., maximum_edition: global___Edition.ValueType | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["maximum_edition", b"maximum_edition", "minimum_edition", b"minimum_edition"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["defaults", b"defaults", "maximum_edition", b"maximum_edition", "minimum_edition", b"minimum_edition"]) -> None: ... + def HasField( + self, field_name: typing.Literal["maximum_edition", b"maximum_edition", "minimum_edition", b"minimum_edition"] + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "defaults", b"defaults", "maximum_edition", b"maximum_edition", "minimum_edition", b"minimum_edition" + ], + ) -> None: ... global___FeatureSetDefaults = FeatureSetDefaults @@ -2077,8 +2684,24 @@ class SourceCodeInfo(google.protobuf.message.Message): trailing_comments: builtins.str | None = ..., leading_detached_comments: collections.abc.Iterable[builtins.str] | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["leading_comments", b"leading_comments", "trailing_comments", b"trailing_comments"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["leading_comments", b"leading_comments", "leading_detached_comments", b"leading_detached_comments", "path", b"path", "span", b"span", "trailing_comments", b"trailing_comments"]) -> None: ... + def HasField( + self, field_name: typing.Literal["leading_comments", b"leading_comments", "trailing_comments", b"trailing_comments"] + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "leading_comments", + b"leading_comments", + "leading_detached_comments", + b"leading_detached_comments", + "path", + b"path", + "span", + b"span", + "trailing_comments", + b"trailing_comments", + ], + ) -> None: ... LOCATION_FIELD_NUMBER: builtins.int @property @@ -2128,11 +2751,7 @@ class SourceCodeInfo(google.protobuf.message.Message): be recorded in the future. """ - def __init__( - self, - *, - location: collections.abc.Iterable[global___SourceCodeInfo.Location] | None = ..., - ) -> None: ... + def __init__(self, *, location: collections.abc.Iterable[global___SourceCodeInfo.Location] | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["location", b"location"]) -> None: ... global___SourceCodeInfo = SourceCodeInfo @@ -2154,7 +2773,10 @@ class GeneratedCodeInfo(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _SemanticEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[GeneratedCodeInfo.Annotation._Semantic.ValueType], builtins.type): + class _SemanticEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[GeneratedCodeInfo.Annotation._Semantic.ValueType], + builtins.type, + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor NONE: GeneratedCodeInfo.Annotation._Semantic.ValueType # 0 """There is no effect or the effect is indescribable.""" @@ -2207,21 +2829,27 @@ class GeneratedCodeInfo(google.protobuf.message.Message): end: builtins.int | None = ..., semantic: global___GeneratedCodeInfo.Annotation.Semantic.ValueType | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["begin", b"begin", "end", b"end", "semantic", b"semantic", "source_file", b"source_file"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["begin", b"begin", "end", b"end", "path", b"path", "semantic", b"semantic", "source_file", b"source_file"]) -> None: ... + def HasField( + self, + field_name: typing.Literal["begin", b"begin", "end", b"end", "semantic", b"semantic", "source_file", b"source_file"], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "begin", b"begin", "end", b"end", "path", b"path", "semantic", b"semantic", "source_file", b"source_file" + ], + ) -> None: ... ANNOTATION_FIELD_NUMBER: builtins.int @property - def annotation(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___GeneratedCodeInfo.Annotation]: + def annotation( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___GeneratedCodeInfo.Annotation]: """An Annotation connects some span of text in generated code to an element of its generating .proto file. """ - def __init__( - self, - *, - annotation: collections.abc.Iterable[global___GeneratedCodeInfo.Annotation] | None = ..., - ) -> None: ... + def __init__(self, *, annotation: collections.abc.Iterable[global___GeneratedCodeInfo.Annotation] | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["annotation", b"annotation"]) -> None: ... global___GeneratedCodeInfo = GeneratedCodeInfo diff --git a/stubs/protobuf/google/protobuf/duration_pb2.pyi b/stubs/protobuf/google/protobuf/duration_pb2.pyi index 4beb927266cb..2cb846889f4d 100644 --- a/stubs/protobuf/google/protobuf/duration_pb2.pyi +++ b/stubs/protobuf/google/protobuf/duration_pb2.pyi @@ -120,12 +120,7 @@ class Duration(google.protobuf.message.Message, google.protobuf.internal.well_kn of the same sign as the `seconds` field. Must be from -999,999,999 to +999,999,999 inclusive. """ - def __init__( - self, - *, - seconds: builtins.int | None = ..., - nanos: builtins.int | None = ..., - ) -> None: ... + def __init__(self, *, seconds: builtins.int | None = ..., nanos: builtins.int | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["nanos", b"nanos", "seconds", b"seconds"]) -> None: ... global___Duration = Duration diff --git a/stubs/protobuf/google/protobuf/empty_pb2.pyi b/stubs/protobuf/google/protobuf/empty_pb2.pyi index 593917f63474..317979279540 100644 --- a/stubs/protobuf/google/protobuf/empty_pb2.pyi +++ b/stubs/protobuf/google/protobuf/empty_pb2.pyi @@ -52,8 +52,6 @@ class Empty(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - def __init__( - self, - ) -> None: ... + def __init__(self) -> None: ... global___Empty = Empty diff --git a/stubs/protobuf/google/protobuf/field_mask_pb2.pyi b/stubs/protobuf/google/protobuf/field_mask_pb2.pyi index 1cc4929f00b5..82dea7a11b56 100644 --- a/stubs/protobuf/google/protobuf/field_mask_pb2.pyi +++ b/stubs/protobuf/google/protobuf/field_mask_pb2.pyi @@ -253,11 +253,7 @@ class FieldMask(google.protobuf.message.Message, google.protobuf.internal.well_k def paths(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: """The set of field mask paths.""" - def __init__( - self, - *, - paths: collections.abc.Iterable[builtins.str] | None = ..., - ) -> None: ... + def __init__(self, *, paths: collections.abc.Iterable[builtins.str] | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["paths", b"paths"]) -> None: ... global___FieldMask = FieldMask diff --git a/stubs/protobuf/google/protobuf/source_context_pb2.pyi b/stubs/protobuf/google/protobuf/source_context_pb2.pyi index a2d35937211d..9d74fb3d5926 100644 --- a/stubs/protobuf/google/protobuf/source_context_pb2.pyi +++ b/stubs/protobuf/google/protobuf/source_context_pb2.pyi @@ -53,11 +53,7 @@ class SourceContext(google.protobuf.message.Message): """The path-qualified name of the .proto file that contained the associated protobuf element. For example: `"google/protobuf/source_context.proto"`. """ - def __init__( - self, - *, - file_name: builtins.str | None = ..., - ) -> None: ... + def __init__(self, *, file_name: builtins.str | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["file_name", b"file_name"]) -> None: ... global___SourceContext = SourceContext diff --git a/stubs/protobuf/google/protobuf/struct_pb2.pyi b/stubs/protobuf/google/protobuf/struct_pb2.pyi index 684f3c9d4e08..8e63dabfc2f1 100644 --- a/stubs/protobuf/google/protobuf/struct_pb2.pyi +++ b/stubs/protobuf/google/protobuf/struct_pb2.pyi @@ -93,12 +93,7 @@ class Struct(google.protobuf.message.Message, google.protobuf.internal.well_know key: builtins.str @property def value(self) -> global___Value: ... - def __init__( - self, - *, - key: builtins.str | None = ..., - value: global___Value | None = ..., - ) -> None: ... + def __init__(self, *, key: builtins.str | None = ..., value: global___Value | None = ...) -> None: ... def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... @@ -107,11 +102,7 @@ class Struct(google.protobuf.message.Message, google.protobuf.internal.well_know def fields(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, global___Value]: """Unordered map of dynamically typed values.""" - def __init__( - self, - *, - fields: collections.abc.Mapping[builtins.str, global___Value] | None = ..., - ) -> None: ... + def __init__(self, *, fields: collections.abc.Mapping[builtins.str, global___Value] | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["fields", b"fields"]) -> None: ... global___Struct = Struct @@ -160,9 +151,47 @@ class Value(google.protobuf.message.Message): struct_value: global___Struct | None = ..., list_value: global___ListValue | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["bool_value", b"bool_value", "kind", b"kind", "list_value", b"list_value", "null_value", b"null_value", "number_value", b"number_value", "string_value", b"string_value", "struct_value", b"struct_value"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["bool_value", b"bool_value", "kind", b"kind", "list_value", b"list_value", "null_value", b"null_value", "number_value", b"number_value", "string_value", b"string_value", "struct_value", b"struct_value"]) -> None: ... - def WhichOneof(self, oneof_group: typing.Literal["kind", b"kind"]) -> typing.Literal["null_value", "number_value", "string_value", "bool_value", "struct_value", "list_value"] | None: ... + def HasField( + self, + field_name: typing.Literal[ + "bool_value", + b"bool_value", + "kind", + b"kind", + "list_value", + b"list_value", + "null_value", + b"null_value", + "number_value", + b"number_value", + "string_value", + b"string_value", + "struct_value", + b"struct_value", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "bool_value", + b"bool_value", + "kind", + b"kind", + "list_value", + b"list_value", + "null_value", + b"null_value", + "number_value", + b"number_value", + "string_value", + b"string_value", + "struct_value", + b"struct_value", + ], + ) -> None: ... + def WhichOneof( + self, oneof_group: typing.Literal["kind", b"kind"] + ) -> typing.Literal["null_value", "number_value", "string_value", "bool_value", "struct_value", "list_value"] | None: ... global___Value = Value @@ -180,11 +209,7 @@ class ListValue(google.protobuf.message.Message, google.protobuf.internal.well_k def values(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Value]: """Repeated field of dynamically typed values.""" - def __init__( - self, - *, - values: collections.abc.Iterable[global___Value] | None = ..., - ) -> None: ... + def __init__(self, *, values: collections.abc.Iterable[global___Value] | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["values", b"values"]) -> None: ... global___ListValue = ListValue diff --git a/stubs/protobuf/google/protobuf/timestamp_pb2.pyi b/stubs/protobuf/google/protobuf/timestamp_pb2.pyi index 017a881de8b9..52daff363ddf 100644 --- a/stubs/protobuf/google/protobuf/timestamp_pb2.pyi +++ b/stubs/protobuf/google/protobuf/timestamp_pb2.pyi @@ -149,12 +149,7 @@ class Timestamp(google.protobuf.message.Message, google.protobuf.internal.well_k that count forward in time. Must be from 0 to 999,999,999 inclusive. """ - def __init__( - self, - *, - seconds: builtins.int | None = ..., - nanos: builtins.int | None = ..., - ) -> None: ... + def __init__(self, *, seconds: builtins.int | None = ..., nanos: builtins.int | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["nanos", b"nanos", "seconds", b"seconds"]) -> None: ... global___Timestamp = Timestamp diff --git a/stubs/protobuf/google/protobuf/type_pb2.pyi b/stubs/protobuf/google/protobuf/type_pb2.pyi index 78e6b5b355a2..4d3fc1cb6577 100644 --- a/stubs/protobuf/google/protobuf/type_pb2.pyi +++ b/stubs/protobuf/google/protobuf/type_pb2.pyi @@ -122,7 +122,25 @@ class Type(google.protobuf.message.Message): edition: builtins.str | None = ..., ) -> None: ... def HasField(self, field_name: typing.Literal["source_context", b"source_context"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["edition", b"edition", "fields", b"fields", "name", b"name", "oneofs", b"oneofs", "options", b"options", "source_context", b"source_context", "syntax", b"syntax"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "edition", + b"edition", + "fields", + b"fields", + "name", + b"name", + "oneofs", + b"oneofs", + "options", + b"options", + "source_context", + b"source_context", + "syntax", + b"syntax", + ], + ) -> None: ... global___Type = Type @@ -223,7 +241,9 @@ class Field(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _CardinalityEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[Field._Cardinality.ValueType], builtins.type): + class _CardinalityEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[Field._Cardinality.ValueType], builtins.type + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor CARDINALITY_UNKNOWN: Field._Cardinality.ValueType # 0 """For fields with unknown cardinality.""" @@ -296,7 +316,31 @@ class Field(google.protobuf.message.Message): json_name: builtins.str | None = ..., default_value: builtins.str | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["cardinality", b"cardinality", "default_value", b"default_value", "json_name", b"json_name", "kind", b"kind", "name", b"name", "number", b"number", "oneof_index", b"oneof_index", "options", b"options", "packed", b"packed", "type_url", b"type_url"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "cardinality", + b"cardinality", + "default_value", + b"default_value", + "json_name", + b"json_name", + "kind", + b"kind", + "name", + b"name", + "number", + b"number", + "oneof_index", + b"oneof_index", + "options", + b"options", + "packed", + b"packed", + "type_url", + b"type_url", + ], + ) -> None: ... global___Field = Field @@ -341,7 +385,23 @@ class Enum(google.protobuf.message.Message): edition: builtins.str | None = ..., ) -> None: ... def HasField(self, field_name: typing.Literal["source_context", b"source_context"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["edition", b"edition", "enumvalue", b"enumvalue", "name", b"name", "options", b"options", "source_context", b"source_context", "syntax", b"syntax"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "edition", + b"edition", + "enumvalue", + b"enumvalue", + "name", + b"name", + "options", + b"options", + "source_context", + b"source_context", + "syntax", + b"syntax", + ], + ) -> None: ... global___Enum = Enum @@ -397,12 +457,7 @@ class Option(google.protobuf.message.Message): value using the google.protobuf.Int32Value type. """ - def __init__( - self, - *, - name: builtins.str | None = ..., - value: google.protobuf.any_pb2.Any | None = ..., - ) -> None: ... + def __init__(self, *, name: builtins.str | None = ..., value: google.protobuf.any_pb2.Any | None = ...) -> None: ... def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["name", b"name", "value", b"value"]) -> None: ... diff --git a/stubs/protobuf/google/protobuf/wrappers_pb2.pyi b/stubs/protobuf/google/protobuf/wrappers_pb2.pyi index 24aae899790d..b3d875bccf67 100644 --- a/stubs/protobuf/google/protobuf/wrappers_pb2.pyi +++ b/stubs/protobuf/google/protobuf/wrappers_pb2.pyi @@ -72,11 +72,7 @@ class DoubleValue(google.protobuf.message.Message): VALUE_FIELD_NUMBER: builtins.int value: builtins.float """The double value.""" - def __init__( - self, - *, - value: builtins.float | None = ..., - ) -> None: ... + def __init__(self, *, value: builtins.float | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["value", b"value"]) -> None: ... global___DoubleValue = DoubleValue @@ -96,11 +92,7 @@ class FloatValue(google.protobuf.message.Message): VALUE_FIELD_NUMBER: builtins.int value: builtins.float """The float value.""" - def __init__( - self, - *, - value: builtins.float | None = ..., - ) -> None: ... + def __init__(self, *, value: builtins.float | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["value", b"value"]) -> None: ... global___FloatValue = FloatValue @@ -120,11 +112,7 @@ class Int64Value(google.protobuf.message.Message): VALUE_FIELD_NUMBER: builtins.int value: builtins.int """The int64 value.""" - def __init__( - self, - *, - value: builtins.int | None = ..., - ) -> None: ... + def __init__(self, *, value: builtins.int | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["value", b"value"]) -> None: ... global___Int64Value = Int64Value @@ -144,11 +132,7 @@ class UInt64Value(google.protobuf.message.Message): VALUE_FIELD_NUMBER: builtins.int value: builtins.int """The uint64 value.""" - def __init__( - self, - *, - value: builtins.int | None = ..., - ) -> None: ... + def __init__(self, *, value: builtins.int | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["value", b"value"]) -> None: ... global___UInt64Value = UInt64Value @@ -168,11 +152,7 @@ class Int32Value(google.protobuf.message.Message): VALUE_FIELD_NUMBER: builtins.int value: builtins.int """The int32 value.""" - def __init__( - self, - *, - value: builtins.int | None = ..., - ) -> None: ... + def __init__(self, *, value: builtins.int | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["value", b"value"]) -> None: ... global___Int32Value = Int32Value @@ -192,11 +172,7 @@ class UInt32Value(google.protobuf.message.Message): VALUE_FIELD_NUMBER: builtins.int value: builtins.int """The uint32 value.""" - def __init__( - self, - *, - value: builtins.int | None = ..., - ) -> None: ... + def __init__(self, *, value: builtins.int | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["value", b"value"]) -> None: ... global___UInt32Value = UInt32Value @@ -216,11 +192,7 @@ class BoolValue(google.protobuf.message.Message): VALUE_FIELD_NUMBER: builtins.int value: builtins.bool """The bool value.""" - def __init__( - self, - *, - value: builtins.bool | None = ..., - ) -> None: ... + def __init__(self, *, value: builtins.bool | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["value", b"value"]) -> None: ... global___BoolValue = BoolValue @@ -240,11 +212,7 @@ class StringValue(google.protobuf.message.Message): VALUE_FIELD_NUMBER: builtins.int value: builtins.str """The string value.""" - def __init__( - self, - *, - value: builtins.str | None = ..., - ) -> None: ... + def __init__(self, *, value: builtins.str | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["value", b"value"]) -> None: ... global___StringValue = StringValue @@ -264,11 +232,7 @@ class BytesValue(google.protobuf.message.Message): VALUE_FIELD_NUMBER: builtins.int value: builtins.bytes """The bytes value.""" - def __init__( - self, - *, - value: builtins.bytes | None = ..., - ) -> None: ... + def __init__(self, *, value: builtins.bytes | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["value", b"value"]) -> None: ... global___BytesValue = BytesValue diff --git a/stubs/s2clientprotocol/s2clientprotocol/common_pb2.pyi b/stubs/s2clientprotocol/s2clientprotocol/common_pb2.pyi index 298ef9bf1c76..cfa0bec86506 100644 --- a/stubs/s2clientprotocol/s2clientprotocol/common_pb2.pyi +++ b/stubs/s2clientprotocol/s2clientprotocol/common_pb2.pyi @@ -47,14 +47,13 @@ class AvailableAbility(google.protobuf.message.Message): REQUIRES_POINT_FIELD_NUMBER: builtins.int ability_id: builtins.int requires_point: builtins.bool - def __init__( - self, - *, - ability_id: builtins.int | None = ..., - requires_point: builtins.bool | None = ..., + def __init__(self, *, ability_id: builtins.int | None = ..., requires_point: builtins.bool | None = ...) -> None: ... + def HasField( + self, field_name: typing.Literal["ability_id", b"ability_id", "requires_point", b"requires_point"] + ) -> builtins.bool: ... + def ClearField( + self, field_name: typing.Literal["ability_id", b"ability_id", "requires_point", b"requires_point"] ) -> None: ... - def HasField(self, field_name: typing.Literal["ability_id", b"ability_id", "requires_point", b"requires_point"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["ability_id", b"ability_id", "requires_point", b"requires_point"]) -> None: ... global___AvailableAbility = AvailableAbility @@ -74,14 +73,14 @@ class ImageData(google.protobuf.message.Message): """Dimension in pixels.""" def __init__( - self, - *, - bits_per_pixel: builtins.int | None = ..., - size: global___Size2DI | None = ..., - data: builtins.bytes | None = ..., + self, *, bits_per_pixel: builtins.int | None = ..., size: global___Size2DI | None = ..., data: builtins.bytes | None = ... + ) -> None: ... + def HasField( + self, field_name: typing.Literal["bits_per_pixel", b"bits_per_pixel", "data", b"data", "size", b"size"] + ) -> builtins.bool: ... + def ClearField( + self, field_name: typing.Literal["bits_per_pixel", b"bits_per_pixel", "data", b"data", "size", b"size"] ) -> None: ... - def HasField(self, field_name: typing.Literal["bits_per_pixel", b"bits_per_pixel", "data", b"data", "size", b"size"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["bits_per_pixel", b"bits_per_pixel", "data", b"data", "size", b"size"]) -> None: ... global___ImageData = ImageData @@ -97,12 +96,7 @@ class PointI(google.protobuf.message.Message): Y_FIELD_NUMBER: builtins.int x: builtins.int y: builtins.int - def __init__( - self, - *, - x: builtins.int | None = ..., - y: builtins.int | None = ..., - ) -> None: ... + def __init__(self, *, x: builtins.int | None = ..., y: builtins.int | None = ...) -> None: ... def HasField(self, field_name: typing.Literal["x", b"x", "y", b"y"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["x", b"x", "y", b"y"]) -> None: ... @@ -120,12 +114,7 @@ class RectangleI(google.protobuf.message.Message): def p0(self) -> global___PointI: ... @property def p1(self) -> global___PointI: ... - def __init__( - self, - *, - p0: global___PointI | None = ..., - p1: global___PointI | None = ..., - ) -> None: ... + def __init__(self, *, p0: global___PointI | None = ..., p1: global___PointI | None = ...) -> None: ... def HasField(self, field_name: typing.Literal["p0", b"p0", "p1", b"p1"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["p0", b"p0", "p1", b"p1"]) -> None: ... @@ -143,12 +132,7 @@ class Point2D(google.protobuf.message.Message): Y_FIELD_NUMBER: builtins.int x: builtins.float y: builtins.float - def __init__( - self, - *, - x: builtins.float | None = ..., - y: builtins.float | None = ..., - ) -> None: ... + def __init__(self, *, x: builtins.float | None = ..., y: builtins.float | None = ...) -> None: ... def HasField(self, field_name: typing.Literal["x", b"x", "y", b"y"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["x", b"x", "y", b"y"]) -> None: ... @@ -169,11 +153,7 @@ class Point(google.protobuf.message.Message): y: builtins.float z: builtins.float def __init__( - self, - *, - x: builtins.float | None = ..., - y: builtins.float | None = ..., - z: builtins.float | None = ..., + self, *, x: builtins.float | None = ..., y: builtins.float | None = ..., z: builtins.float | None = ... ) -> None: ... def HasField(self, field_name: typing.Literal["x", b"x", "y", b"y", "z", b"z"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["x", b"x", "y", b"y", "z", b"z"]) -> None: ... @@ -190,12 +170,7 @@ class Size2DI(google.protobuf.message.Message): Y_FIELD_NUMBER: builtins.int x: builtins.int y: builtins.int - def __init__( - self, - *, - x: builtins.int | None = ..., - y: builtins.int | None = ..., - ) -> None: ... + def __init__(self, *, x: builtins.int | None = ..., y: builtins.int | None = ...) -> None: ... def HasField(self, field_name: typing.Literal["x", b"x", "y", b"y"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["x", b"x", "y", b"y"]) -> None: ... diff --git a/stubs/s2clientprotocol/s2clientprotocol/data_pb2.pyi b/stubs/s2clientprotocol/s2clientprotocol/data_pb2.pyi index 6c33927e724e..cd68bb65df02 100644 --- a/stubs/s2clientprotocol/s2clientprotocol/data_pb2.pyi +++ b/stubs/s2clientprotocol/s2clientprotocol/data_pb2.pyi @@ -68,7 +68,9 @@ class AbilityData(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _TargetEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[AbilityData._Target.ValueType], builtins.type): + class _TargetEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[AbilityData._Target.ValueType], builtins.type + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor Point: AbilityData._Target.ValueType # 2 """Requires a target position.""" @@ -153,8 +155,76 @@ class AbilityData(google.protobuf.message.Message): is_instant_placement: builtins.bool | None = ..., cast_range: builtins.float | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["ability_id", b"ability_id", "allow_autocast", b"allow_autocast", "allow_minimap", b"allow_minimap", "available", b"available", "button_name", b"button_name", "cast_range", b"cast_range", "footprint_radius", b"footprint_radius", "friendly_name", b"friendly_name", "hotkey", b"hotkey", "is_building", b"is_building", "is_instant_placement", b"is_instant_placement", "link_index", b"link_index", "link_name", b"link_name", "remaps_to_ability_id", b"remaps_to_ability_id", "target", b"target"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["ability_id", b"ability_id", "allow_autocast", b"allow_autocast", "allow_minimap", b"allow_minimap", "available", b"available", "button_name", b"button_name", "cast_range", b"cast_range", "footprint_radius", b"footprint_radius", "friendly_name", b"friendly_name", "hotkey", b"hotkey", "is_building", b"is_building", "is_instant_placement", b"is_instant_placement", "link_index", b"link_index", "link_name", b"link_name", "remaps_to_ability_id", b"remaps_to_ability_id", "target", b"target"]) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "ability_id", + b"ability_id", + "allow_autocast", + b"allow_autocast", + "allow_minimap", + b"allow_minimap", + "available", + b"available", + "button_name", + b"button_name", + "cast_range", + b"cast_range", + "footprint_radius", + b"footprint_radius", + "friendly_name", + b"friendly_name", + "hotkey", + b"hotkey", + "is_building", + b"is_building", + "is_instant_placement", + b"is_instant_placement", + "link_index", + b"link_index", + "link_name", + b"link_name", + "remaps_to_ability_id", + b"remaps_to_ability_id", + "target", + b"target", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "ability_id", + b"ability_id", + "allow_autocast", + b"allow_autocast", + "allow_minimap", + b"allow_minimap", + "available", + b"available", + "button_name", + b"button_name", + "cast_range", + b"cast_range", + "footprint_radius", + b"footprint_radius", + "friendly_name", + b"friendly_name", + "hotkey", + b"hotkey", + "is_building", + b"is_building", + "is_instant_placement", + b"is_instant_placement", + "link_index", + b"link_index", + "link_name", + b"link_name", + "remaps_to_ability_id", + b"remaps_to_ability_id", + "target", + b"target", + ], + ) -> None: ... global___AbilityData = AbilityData @@ -166,12 +236,7 @@ class DamageBonus(google.protobuf.message.Message): BONUS_FIELD_NUMBER: builtins.int attribute: global___Attribute.ValueType bonus: builtins.float - def __init__( - self, - *, - attribute: global___Attribute.ValueType | None = ..., - bonus: builtins.float | None = ..., - ) -> None: ... + def __init__(self, *, attribute: global___Attribute.ValueType | None = ..., bonus: builtins.float | None = ...) -> None: ... def HasField(self, field_name: typing.Literal["attribute", b"attribute", "bonus", b"bonus"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["attribute", b"attribute", "bonus", b"bonus"]) -> None: ... @@ -185,7 +250,9 @@ class Weapon(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _TargetTypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[Weapon._TargetType.ValueType], builtins.type): + class _TargetTypeEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[Weapon._TargetType.ValueType], builtins.type + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor Ground: Weapon._TargetType.ValueType # 1 Air: Weapon._TargetType.ValueType # 2 @@ -221,8 +288,29 @@ class Weapon(google.protobuf.message.Message): range: builtins.float | None = ..., speed: builtins.float | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["attacks", b"attacks", "damage", b"damage", "range", b"range", "speed", b"speed", "type", b"type"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["attacks", b"attacks", "damage", b"damage", "damage_bonus", b"damage_bonus", "range", b"range", "speed", b"speed", "type", b"type"]) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "attacks", b"attacks", "damage", b"damage", "range", b"range", "speed", b"speed", "type", b"type" + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "attacks", + b"attacks", + "damage", + b"damage", + "damage_bonus", + b"damage_bonus", + "range", + b"range", + "speed", + b"speed", + "type", + b"type", + ], + ) -> None: ... global___Weapon = Weapon @@ -316,8 +404,98 @@ class UnitTypeData(google.protobuf.message.Message): armor: builtins.float | None = ..., weapons: collections.abc.Iterable[global___Weapon] | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["ability_id", b"ability_id", "armor", b"armor", "available", b"available", "build_time", b"build_time", "cargo_size", b"cargo_size", "food_provided", b"food_provided", "food_required", b"food_required", "has_minerals", b"has_minerals", "has_vespene", b"has_vespene", "mineral_cost", b"mineral_cost", "movement_speed", b"movement_speed", "name", b"name", "race", b"race", "require_attached", b"require_attached", "sight_range", b"sight_range", "tech_requirement", b"tech_requirement", "unit_alias", b"unit_alias", "unit_id", b"unit_id", "vespene_cost", b"vespene_cost"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["ability_id", b"ability_id", "armor", b"armor", "attributes", b"attributes", "available", b"available", "build_time", b"build_time", "cargo_size", b"cargo_size", "food_provided", b"food_provided", "food_required", b"food_required", "has_minerals", b"has_minerals", "has_vespene", b"has_vespene", "mineral_cost", b"mineral_cost", "movement_speed", b"movement_speed", "name", b"name", "race", b"race", "require_attached", b"require_attached", "sight_range", b"sight_range", "tech_alias", b"tech_alias", "tech_requirement", b"tech_requirement", "unit_alias", b"unit_alias", "unit_id", b"unit_id", "vespene_cost", b"vespene_cost", "weapons", b"weapons"]) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "ability_id", + b"ability_id", + "armor", + b"armor", + "available", + b"available", + "build_time", + b"build_time", + "cargo_size", + b"cargo_size", + "food_provided", + b"food_provided", + "food_required", + b"food_required", + "has_minerals", + b"has_minerals", + "has_vespene", + b"has_vespene", + "mineral_cost", + b"mineral_cost", + "movement_speed", + b"movement_speed", + "name", + b"name", + "race", + b"race", + "require_attached", + b"require_attached", + "sight_range", + b"sight_range", + "tech_requirement", + b"tech_requirement", + "unit_alias", + b"unit_alias", + "unit_id", + b"unit_id", + "vespene_cost", + b"vespene_cost", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "ability_id", + b"ability_id", + "armor", + b"armor", + "attributes", + b"attributes", + "available", + b"available", + "build_time", + b"build_time", + "cargo_size", + b"cargo_size", + "food_provided", + b"food_provided", + "food_required", + b"food_required", + "has_minerals", + b"has_minerals", + "has_vespene", + b"has_vespene", + "mineral_cost", + b"mineral_cost", + "movement_speed", + b"movement_speed", + "name", + b"name", + "race", + b"race", + "require_attached", + b"require_attached", + "sight_range", + b"sight_range", + "tech_alias", + b"tech_alias", + "tech_requirement", + b"tech_requirement", + "unit_alias", + b"unit_alias", + "unit_id", + b"unit_id", + "vespene_cost", + b"vespene_cost", + "weapons", + b"weapons", + ], + ) -> None: ... global___UnitTypeData = UnitTypeData @@ -348,8 +526,40 @@ class UpgradeData(google.protobuf.message.Message): research_time: builtins.float | None = ..., ability_id: builtins.int | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["ability_id", b"ability_id", "mineral_cost", b"mineral_cost", "name", b"name", "research_time", b"research_time", "upgrade_id", b"upgrade_id", "vespene_cost", b"vespene_cost"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["ability_id", b"ability_id", "mineral_cost", b"mineral_cost", "name", b"name", "research_time", b"research_time", "upgrade_id", b"upgrade_id", "vespene_cost", b"vespene_cost"]) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "ability_id", + b"ability_id", + "mineral_cost", + b"mineral_cost", + "name", + b"name", + "research_time", + b"research_time", + "upgrade_id", + b"upgrade_id", + "vespene_cost", + b"vespene_cost", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "ability_id", + b"ability_id", + "mineral_cost", + b"mineral_cost", + "name", + b"name", + "research_time", + b"research_time", + "upgrade_id", + b"upgrade_id", + "vespene_cost", + b"vespene_cost", + ], + ) -> None: ... global___UpgradeData = UpgradeData @@ -362,12 +572,7 @@ class BuffData(google.protobuf.message.Message): buff_id: builtins.int """Stable ID.""" name: builtins.str - def __init__( - self, - *, - buff_id: builtins.int | None = ..., - name: builtins.str | None = ..., - ) -> None: ... + def __init__(self, *, buff_id: builtins.int | None = ..., name: builtins.str | None = ...) -> None: ... def HasField(self, field_name: typing.Literal["buff_id", b"buff_id", "name", b"name"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["buff_id", b"buff_id", "name", b"name"]) -> None: ... @@ -394,7 +599,17 @@ class EffectData(google.protobuf.message.Message): friendly_name: builtins.str | None = ..., radius: builtins.float | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["effect_id", b"effect_id", "friendly_name", b"friendly_name", "name", b"name", "radius", b"radius"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["effect_id", b"effect_id", "friendly_name", b"friendly_name", "name", b"name", "radius", b"radius"]) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "effect_id", b"effect_id", "friendly_name", b"friendly_name", "name", b"name", "radius", b"radius" + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "effect_id", b"effect_id", "friendly_name", b"friendly_name", "name", b"name", "radius", b"radius" + ], + ) -> None: ... global___EffectData = EffectData diff --git a/stubs/s2clientprotocol/s2clientprotocol/debug_pb2.pyi b/stubs/s2clientprotocol/s2clientprotocol/debug_pb2.pyi index 658d5bd20590..eba2ae972064 100644 --- a/stubs/s2clientprotocol/s2clientprotocol/debug_pb2.pyi +++ b/stubs/s2clientprotocol/s2clientprotocol/debug_pb2.pyi @@ -25,7 +25,9 @@ class _DebugGameState: ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType -class _DebugGameStateEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_DebugGameState.ValueType], builtins.type): +class _DebugGameStateEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_DebugGameState.ValueType], builtins.type +): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor show_map: _DebugGameState.ValueType # 1 control_enemy: _DebugGameState.ValueType # 2 @@ -99,9 +101,57 @@ class DebugCommand(google.protobuf.message.Message): end_game: global___DebugEndGame | None = ..., unit_value: global___DebugSetUnitValue | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["command", b"command", "create_unit", b"create_unit", "draw", b"draw", "end_game", b"end_game", "game_state", b"game_state", "kill_unit", b"kill_unit", "score", b"score", "test_process", b"test_process", "unit_value", b"unit_value"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["command", b"command", "create_unit", b"create_unit", "draw", b"draw", "end_game", b"end_game", "game_state", b"game_state", "kill_unit", b"kill_unit", "score", b"score", "test_process", b"test_process", "unit_value", b"unit_value"]) -> None: ... - def WhichOneof(self, oneof_group: typing.Literal["command", b"command"]) -> typing.Literal["draw", "game_state", "create_unit", "kill_unit", "test_process", "score", "end_game", "unit_value"] | None: ... + def HasField( + self, + field_name: typing.Literal[ + "command", + b"command", + "create_unit", + b"create_unit", + "draw", + b"draw", + "end_game", + b"end_game", + "game_state", + b"game_state", + "kill_unit", + b"kill_unit", + "score", + b"score", + "test_process", + b"test_process", + "unit_value", + b"unit_value", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "command", + b"command", + "create_unit", + b"create_unit", + "draw", + b"draw", + "end_game", + b"end_game", + "game_state", + b"game_state", + "kill_unit", + b"kill_unit", + "score", + b"score", + "test_process", + b"test_process", + "unit_value", + b"unit_value", + ], + ) -> None: ... + def WhichOneof( + self, oneof_group: typing.Literal["command", b"command"] + ) -> ( + typing.Literal["draw", "game_state", "create_unit", "kill_unit", "test_process", "score", "end_game", "unit_value"] | None + ): ... global___DebugCommand = DebugCommand @@ -129,7 +179,9 @@ class DebugDraw(google.protobuf.message.Message): boxes: collections.abc.Iterable[global___DebugBox] | None = ..., spheres: collections.abc.Iterable[global___DebugSphere] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["boxes", b"boxes", "lines", b"lines", "spheres", b"spheres", "text", b"text"]) -> None: ... + def ClearField( + self, field_name: typing.Literal["boxes", b"boxes", "lines", b"lines", "spheres", b"spheres", "text", b"text"] + ) -> None: ... global___DebugDraw = DebugDraw @@ -144,10 +196,7 @@ class Line(google.protobuf.message.Message): @property def p1(self) -> s2clientprotocol.common_pb2.Point: ... def __init__( - self, - *, - p0: s2clientprotocol.common_pb2.Point | None = ..., - p1: s2clientprotocol.common_pb2.Point | None = ..., + self, *, p0: s2clientprotocol.common_pb2.Point | None = ..., p1: s2clientprotocol.common_pb2.Point | None = ... ) -> None: ... def HasField(self, field_name: typing.Literal["p0", b"p0", "p1", b"p1"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["p0", b"p0", "p1", b"p1"]) -> None: ... @@ -164,13 +213,7 @@ class Color(google.protobuf.message.Message): r: builtins.int g: builtins.int b: builtins.int - def __init__( - self, - *, - r: builtins.int | None = ..., - g: builtins.int | None = ..., - b: builtins.int | None = ..., - ) -> None: ... + def __init__(self, *, r: builtins.int | None = ..., g: builtins.int | None = ..., b: builtins.int | None = ...) -> None: ... def HasField(self, field_name: typing.Literal["b", b"b", "g", b"g", "r", b"r"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["b", b"b", "g", b"g", "r", b"r"]) -> None: ... @@ -210,8 +253,18 @@ class DebugText(google.protobuf.message.Message): world_pos: s2clientprotocol.common_pb2.Point | None = ..., size: builtins.int | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["color", b"color", "size", b"size", "text", b"text", "virtual_pos", b"virtual_pos", "world_pos", b"world_pos"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["color", b"color", "size", b"size", "text", b"text", "virtual_pos", b"virtual_pos", "world_pos", b"world_pos"]) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "color", b"color", "size", b"size", "text", b"text", "virtual_pos", b"virtual_pos", "world_pos", b"world_pos" + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "color", b"color", "size", b"size", "text", b"text", "virtual_pos", b"virtual_pos", "world_pos", b"world_pos" + ], + ) -> None: ... global___DebugText = DebugText @@ -229,12 +282,7 @@ class DebugLine(google.protobuf.message.Message): def line(self) -> global___Line: """World space line.""" - def __init__( - self, - *, - color: global___Color | None = ..., - line: global___Line | None = ..., - ) -> None: ... + def __init__(self, *, color: global___Color | None = ..., line: global___Line | None = ...) -> None: ... def HasField(self, field_name: typing.Literal["color", b"color", "line", b"line"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["color", b"color", "line", b"line"]) -> None: ... @@ -314,8 +362,12 @@ class DebugCreateUnit(google.protobuf.message.Message): pos: s2clientprotocol.common_pb2.Point2D | None = ..., quantity: builtins.int | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["owner", b"owner", "pos", b"pos", "quantity", b"quantity", "unit_type", b"unit_type"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["owner", b"owner", "pos", b"pos", "quantity", b"quantity", "unit_type", b"unit_type"]) -> None: ... + def HasField( + self, field_name: typing.Literal["owner", b"owner", "pos", b"pos", "quantity", b"quantity", "unit_type", b"unit_type"] + ) -> builtins.bool: ... + def ClearField( + self, field_name: typing.Literal["owner", b"owner", "pos", b"pos", "quantity", b"quantity", "unit_type", b"unit_type"] + ) -> None: ... global___DebugCreateUnit = DebugCreateUnit @@ -326,11 +378,7 @@ class DebugKillUnit(google.protobuf.message.Message): TAG_FIELD_NUMBER: builtins.int @property def tag(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ... - def __init__( - self, - *, - tag: collections.abc.Iterable[builtins.int] | None = ..., - ) -> None: ... + def __init__(self, *, tag: collections.abc.Iterable[builtins.int] | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["tag", b"tag"]) -> None: ... global___DebugKillUnit = DebugKillUnit @@ -343,7 +391,9 @@ class DebugTestProcess(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _TestEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[DebugTestProcess._Test.ValueType], builtins.type): + class _TestEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[DebugTestProcess._Test.ValueType], builtins.type + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor hang: DebugTestProcess._Test.ValueType # 1 crash: DebugTestProcess._Test.ValueType # 2 @@ -359,10 +409,7 @@ class DebugTestProcess(google.protobuf.message.Message): test: global___DebugTestProcess.Test.ValueType delay_ms: builtins.int def __init__( - self, - *, - test: global___DebugTestProcess.Test.ValueType | None = ..., - delay_ms: builtins.int | None = ..., + self, *, test: global___DebugTestProcess.Test.ValueType | None = ..., delay_ms: builtins.int | None = ... ) -> None: ... def HasField(self, field_name: typing.Literal["delay_ms", b"delay_ms", "test", b"test"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["delay_ms", b"delay_ms", "test", b"test"]) -> None: ... @@ -375,11 +422,7 @@ class DebugSetScore(google.protobuf.message.Message): SCORE_FIELD_NUMBER: builtins.int score: builtins.float - def __init__( - self, - *, - score: builtins.float | None = ..., - ) -> None: ... + def __init__(self, *, score: builtins.float | None = ...) -> None: ... def HasField(self, field_name: typing.Literal["score", b"score"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["score", b"score"]) -> None: ... @@ -393,7 +436,9 @@ class DebugEndGame(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _EndResultEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[DebugEndGame._EndResult.ValueType], builtins.type): + class _EndResultEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[DebugEndGame._EndResult.ValueType], builtins.type + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor Surrender: DebugEndGame._EndResult.ValueType # 1 """Default if nothing is set. The current player admits defeat.""" @@ -406,11 +451,7 @@ class DebugEndGame(google.protobuf.message.Message): END_RESULT_FIELD_NUMBER: builtins.int end_result: global___DebugEndGame.EndResult.ValueType - def __init__( - self, - *, - end_result: global___DebugEndGame.EndResult.ValueType | None = ..., - ) -> None: ... + def __init__(self, *, end_result: global___DebugEndGame.EndResult.ValueType | None = ...) -> None: ... def HasField(self, field_name: typing.Literal["end_result", b"end_result"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["end_result", b"end_result"]) -> None: ... @@ -424,7 +465,9 @@ class DebugSetUnitValue(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _UnitValueEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[DebugSetUnitValue._UnitValue.ValueType], builtins.type): + class _UnitValueEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[DebugSetUnitValue._UnitValue.ValueType], builtins.type + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor Energy: DebugSetUnitValue._UnitValue.ValueType # 1 Life: DebugSetUnitValue._UnitValue.ValueType # 2 @@ -448,7 +491,11 @@ class DebugSetUnitValue(google.protobuf.message.Message): value: builtins.float | None = ..., unit_tag: builtins.int | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["unit_tag", b"unit_tag", "unit_value", b"unit_value", "value", b"value"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["unit_tag", b"unit_tag", "unit_value", b"unit_value", "value", b"value"]) -> None: ... + def HasField( + self, field_name: typing.Literal["unit_tag", b"unit_tag", "unit_value", b"unit_value", "value", b"value"] + ) -> builtins.bool: ... + def ClearField( + self, field_name: typing.Literal["unit_tag", b"unit_tag", "unit_value", b"unit_value", "value", b"value"] + ) -> None: ... global___DebugSetUnitValue = DebugSetUnitValue diff --git a/stubs/s2clientprotocol/s2clientprotocol/error_pb2.pyi b/stubs/s2clientprotocol/s2clientprotocol/error_pb2.pyi index c21b03cbc1b8..5c14aaba748c 100644 --- a/stubs/s2clientprotocol/s2clientprotocol/error_pb2.pyi +++ b/stubs/s2clientprotocol/s2clientprotocol/error_pb2.pyi @@ -21,7 +21,9 @@ class _ActionResult: ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType -class _ActionResultEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_ActionResult.ValueType], builtins.type): +class _ActionResultEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_ActionResult.ValueType], builtins.type +): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor Success: _ActionResult.ValueType # 1 NotSupported: _ActionResult.ValueType # 2 diff --git a/stubs/s2clientprotocol/s2clientprotocol/query_pb2.pyi b/stubs/s2clientprotocol/s2clientprotocol/query_pb2.pyi index ea772e2f6242..2df098c7eb4a 100644 --- a/stubs/s2clientprotocol/s2clientprotocol/query_pb2.pyi +++ b/stubs/s2clientprotocol/s2clientprotocol/query_pb2.pyi @@ -28,9 +28,13 @@ class RequestQuery(google.protobuf.message.Message): @property def pathing(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___RequestQueryPathing]: ... @property - def abilities(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___RequestQueryAvailableAbilities]: ... + def abilities( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___RequestQueryAvailableAbilities]: ... @property - def placements(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___RequestQueryBuildingPlacement]: ... + def placements( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___RequestQueryBuildingPlacement]: ... def __init__( self, *, @@ -39,8 +43,22 @@ class RequestQuery(google.protobuf.message.Message): placements: collections.abc.Iterable[global___RequestQueryBuildingPlacement] | None = ..., ignore_resource_requirements: builtins.bool | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["ignore_resource_requirements", b"ignore_resource_requirements"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["abilities", b"abilities", "ignore_resource_requirements", b"ignore_resource_requirements", "pathing", b"pathing", "placements", b"placements"]) -> None: ... + def HasField( + self, field_name: typing.Literal["ignore_resource_requirements", b"ignore_resource_requirements"] + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "abilities", + b"abilities", + "ignore_resource_requirements", + b"ignore_resource_requirements", + "pathing", + b"pathing", + "placements", + b"placements", + ], + ) -> None: ... global___RequestQuery = RequestQuery @@ -54,9 +72,13 @@ class ResponseQuery(google.protobuf.message.Message): @property def pathing(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ResponseQueryPathing]: ... @property - def abilities(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ResponseQueryAvailableAbilities]: ... + def abilities( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ResponseQueryAvailableAbilities]: ... @property - def placements(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ResponseQueryBuildingPlacement]: ... + def placements( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ResponseQueryBuildingPlacement]: ... def __init__( self, *, @@ -64,7 +86,9 @@ class ResponseQuery(google.protobuf.message.Message): abilities: collections.abc.Iterable[global___ResponseQueryAvailableAbilities] | None = ..., placements: collections.abc.Iterable[global___ResponseQueryBuildingPlacement] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["abilities", b"abilities", "pathing", b"pathing", "placements", b"placements"]) -> None: ... + def ClearField( + self, field_name: typing.Literal["abilities", b"abilities", "pathing", b"pathing", "placements", b"placements"] + ) -> None: ... global___ResponseQuery = ResponseQuery @@ -89,8 +113,14 @@ class RequestQueryPathing(google.protobuf.message.Message): unit_tag: builtins.int | None = ..., end_pos: s2clientprotocol.common_pb2.Point2D | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["end_pos", b"end_pos", "start", b"start", "start_pos", b"start_pos", "unit_tag", b"unit_tag"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["end_pos", b"end_pos", "start", b"start", "start_pos", b"start_pos", "unit_tag", b"unit_tag"]) -> None: ... + def HasField( + self, + field_name: typing.Literal["end_pos", b"end_pos", "start", b"start", "start_pos", b"start_pos", "unit_tag", b"unit_tag"], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal["end_pos", b"end_pos", "start", b"start", "start_pos", b"start_pos", "unit_tag", b"unit_tag"], + ) -> None: ... def WhichOneof(self, oneof_group: typing.Literal["start", b"start"]) -> typing.Literal["start_pos", "unit_tag"] | None: ... global___RequestQueryPathing = RequestQueryPathing @@ -102,11 +132,7 @@ class ResponseQueryPathing(google.protobuf.message.Message): DISTANCE_FIELD_NUMBER: builtins.int distance: builtins.float """0 if no path exists""" - def __init__( - self, - *, - distance: builtins.float | None = ..., - ) -> None: ... + def __init__(self, *, distance: builtins.float | None = ...) -> None: ... def HasField(self, field_name: typing.Literal["distance", b"distance"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["distance", b"distance"]) -> None: ... @@ -120,11 +146,7 @@ class RequestQueryAvailableAbilities(google.protobuf.message.Message): UNIT_TAG_FIELD_NUMBER: builtins.int unit_tag: builtins.int - def __init__( - self, - *, - unit_tag: builtins.int | None = ..., - ) -> None: ... + def __init__(self, *, unit_tag: builtins.int | None = ...) -> None: ... def HasField(self, field_name: typing.Literal["unit_tag", b"unit_tag"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["unit_tag", b"unit_tag"]) -> None: ... @@ -140,7 +162,9 @@ class ResponseQueryAvailableAbilities(google.protobuf.message.Message): unit_tag: builtins.int unit_type_id: builtins.int @property - def abilities(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[s2clientprotocol.common_pb2.AvailableAbility]: ... + def abilities( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[s2clientprotocol.common_pb2.AvailableAbility]: ... def __init__( self, *, @@ -149,7 +173,9 @@ class ResponseQueryAvailableAbilities(google.protobuf.message.Message): unit_type_id: builtins.int | None = ..., ) -> None: ... def HasField(self, field_name: typing.Literal["unit_tag", b"unit_tag", "unit_type_id", b"unit_type_id"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["abilities", b"abilities", "unit_tag", b"unit_tag", "unit_type_id", b"unit_type_id"]) -> None: ... + def ClearField( + self, field_name: typing.Literal["abilities", b"abilities", "unit_tag", b"unit_tag", "unit_type_id", b"unit_type_id"] + ) -> None: ... global___ResponseQueryAvailableAbilities = ResponseQueryAvailableAbilities @@ -174,8 +200,18 @@ class RequestQueryBuildingPlacement(google.protobuf.message.Message): target_pos: s2clientprotocol.common_pb2.Point2D | None = ..., placing_unit_tag: builtins.int | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["ability_id", b"ability_id", "placing_unit_tag", b"placing_unit_tag", "target_pos", b"target_pos"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["ability_id", b"ability_id", "placing_unit_tag", b"placing_unit_tag", "target_pos", b"target_pos"]) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "ability_id", b"ability_id", "placing_unit_tag", b"placing_unit_tag", "target_pos", b"target_pos" + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "ability_id", b"ability_id", "placing_unit_tag", b"placing_unit_tag", "target_pos", b"target_pos" + ], + ) -> None: ... global___RequestQueryBuildingPlacement = RequestQueryBuildingPlacement @@ -185,11 +221,7 @@ class ResponseQueryBuildingPlacement(google.protobuf.message.Message): RESULT_FIELD_NUMBER: builtins.int result: s2clientprotocol.error_pb2.ActionResult.ValueType - def __init__( - self, - *, - result: s2clientprotocol.error_pb2.ActionResult.ValueType | None = ..., - ) -> None: ... + def __init__(self, *, result: s2clientprotocol.error_pb2.ActionResult.ValueType | None = ...) -> None: ... def HasField(self, field_name: typing.Literal["result", b"result"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["result", b"result"]) -> None: ... diff --git a/stubs/s2clientprotocol/s2clientprotocol/raw_pb2.pyi b/stubs/s2clientprotocol/s2clientprotocol/raw_pb2.pyi index 35a08c2337ea..a50167f1e087 100644 --- a/stubs/s2clientprotocol/s2clientprotocol/raw_pb2.pyi +++ b/stubs/s2clientprotocol/s2clientprotocol/raw_pb2.pyi @@ -25,7 +25,9 @@ class _DisplayType: ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType -class _DisplayTypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_DisplayType.ValueType], builtins.type): +class _DisplayTypeEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_DisplayType.ValueType], builtins.type +): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor Visible: _DisplayType.ValueType # 1 """Fully visible""" @@ -71,7 +73,9 @@ class _CloakState: ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType -class _CloakStateEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_CloakState.ValueType], builtins.type): +class _CloakStateEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_CloakState.ValueType], builtins.type +): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor CloakedUnknown: _CloakState.ValueType # 0 """Under the fog, so unknown whether it's cloaked or not.""" @@ -125,7 +129,9 @@ class StartRaw(google.protobuf.message.Message): """The playable cells.""" @property - def start_locations(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[s2clientprotocol.common_pb2.Point2D]: + def start_locations( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[s2clientprotocol.common_pb2.Point2D]: """Possible start locations for players.""" def __init__( @@ -138,8 +144,38 @@ class StartRaw(google.protobuf.message.Message): playable_area: s2clientprotocol.common_pb2.RectangleI | None = ..., start_locations: collections.abc.Iterable[s2clientprotocol.common_pb2.Point2D] | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["map_size", b"map_size", "pathing_grid", b"pathing_grid", "placement_grid", b"placement_grid", "playable_area", b"playable_area", "terrain_height", b"terrain_height"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["map_size", b"map_size", "pathing_grid", b"pathing_grid", "placement_grid", b"placement_grid", "playable_area", b"playable_area", "start_locations", b"start_locations", "terrain_height", b"terrain_height"]) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "map_size", + b"map_size", + "pathing_grid", + b"pathing_grid", + "placement_grid", + b"placement_grid", + "playable_area", + b"playable_area", + "terrain_height", + b"terrain_height", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "map_size", + b"map_size", + "pathing_grid", + b"pathing_grid", + "placement_grid", + b"placement_grid", + "playable_area", + b"playable_area", + "start_locations", + b"start_locations", + "terrain_height", + b"terrain_height", + ], + ) -> None: ... global___StartRaw = StartRaw @@ -181,8 +217,26 @@ class ObservationRaw(google.protobuf.message.Message): effects: collections.abc.Iterable[global___Effect] | None = ..., radar: collections.abc.Iterable[global___RadarRing] | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["event", b"event", "map_state", b"map_state", "player", b"player"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["effects", b"effects", "event", b"event", "map_state", b"map_state", "player", b"player", "radar", b"radar", "units", b"units"]) -> None: ... + def HasField( + self, field_name: typing.Literal["event", b"event", "map_state", b"map_state", "player", b"player"] + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "effects", + b"effects", + "event", + b"event", + "map_state", + b"map_state", + "player", + b"player", + "radar", + b"radar", + "units", + b"units", + ], + ) -> None: ... global___ObservationRaw = ObservationRaw @@ -195,12 +249,7 @@ class RadarRing(google.protobuf.message.Message): radius: builtins.float @property def pos(self) -> s2clientprotocol.common_pb2.Point: ... - def __init__( - self, - *, - pos: s2clientprotocol.common_pb2.Point | None = ..., - radius: builtins.float | None = ..., - ) -> None: ... + def __init__(self, *, pos: s2clientprotocol.common_pb2.Point | None = ..., radius: builtins.float | None = ...) -> None: ... def HasField(self, field_name: typing.Literal["pos", b"pos", "radius", b"radius"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["pos", b"pos", "radius", b"radius"]) -> None: ... @@ -252,7 +301,9 @@ class PlayerRaw(google.protobuf.message.Message): upgrade_ids: collections.abc.Iterable[builtins.int] | None = ..., ) -> None: ... def HasField(self, field_name: typing.Literal["camera", b"camera"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["camera", b"camera", "power_sources", b"power_sources", "upgrade_ids", b"upgrade_ids"]) -> None: ... + def ClearField( + self, field_name: typing.Literal["camera", b"camera", "power_sources", b"power_sources", "upgrade_ids", b"upgrade_ids"] + ) -> None: ... global___PlayerRaw = PlayerRaw @@ -278,9 +329,39 @@ class UnitOrder(google.protobuf.message.Message): target_unit_tag: builtins.int | None = ..., progress: builtins.float | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["ability_id", b"ability_id", "progress", b"progress", "target", b"target", "target_unit_tag", b"target_unit_tag", "target_world_space_pos", b"target_world_space_pos"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["ability_id", b"ability_id", "progress", b"progress", "target", b"target", "target_unit_tag", b"target_unit_tag", "target_world_space_pos", b"target_world_space_pos"]) -> None: ... - def WhichOneof(self, oneof_group: typing.Literal["target", b"target"]) -> typing.Literal["target_world_space_pos", "target_unit_tag"] | None: ... + def HasField( + self, + field_name: typing.Literal[ + "ability_id", + b"ability_id", + "progress", + b"progress", + "target", + b"target", + "target_unit_tag", + b"target_unit_tag", + "target_world_space_pos", + b"target_world_space_pos", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "ability_id", + b"ability_id", + "progress", + b"progress", + "target", + b"target", + "target_unit_tag", + b"target_unit_tag", + "target_world_space_pos", + b"target_world_space_pos", + ], + ) -> None: ... + def WhichOneof( + self, oneof_group: typing.Literal["target", b"target"] + ) -> typing.Literal["target_world_space_pos", "target_unit_tag"] | None: ... global___UnitOrder = UnitOrder @@ -316,8 +397,48 @@ class PassengerUnit(google.protobuf.message.Message): energy_max: builtins.float | None = ..., unit_type: builtins.int | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["energy", b"energy", "energy_max", b"energy_max", "health", b"health", "health_max", b"health_max", "shield", b"shield", "shield_max", b"shield_max", "tag", b"tag", "unit_type", b"unit_type"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["energy", b"energy", "energy_max", b"energy_max", "health", b"health", "health_max", b"health_max", "shield", b"shield", "shield_max", b"shield_max", "tag", b"tag", "unit_type", b"unit_type"]) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "energy", + b"energy", + "energy_max", + b"energy_max", + "health", + b"health", + "health_max", + b"health_max", + "shield", + b"shield", + "shield_max", + b"shield_max", + "tag", + b"tag", + "unit_type", + b"unit_type", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "energy", + b"energy", + "energy_max", + b"energy_max", + "health", + b"health", + "health_max", + b"health_max", + "shield", + b"shield", + "shield_max", + b"shield_max", + "tag", + b"tag", + "unit_type", + b"unit_type", + ], + ) -> None: ... global___PassengerUnit = PassengerUnit @@ -333,12 +454,7 @@ class RallyTarget(google.protobuf.message.Message): def point(self) -> s2clientprotocol.common_pb2.Point: """Will always be filled.""" - def __init__( - self, - *, - point: s2clientprotocol.common_pb2.Point | None = ..., - tag: builtins.int | None = ..., - ) -> None: ... + def __init__(self, *, point: s2clientprotocol.common_pb2.Point | None = ..., tag: builtins.int | None = ...) -> None: ... def HasField(self, field_name: typing.Literal["point", b"point", "tag", b"tag"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["point", b"point", "tag", b"tag"]) -> None: ... @@ -501,8 +617,184 @@ class Unit(google.protobuf.message.Message): buff_duration_max: builtins.int | None = ..., rally_targets: collections.abc.Iterable[global___RallyTarget] | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["add_on_tag", b"add_on_tag", "alliance", b"alliance", "armor_upgrade_level", b"armor_upgrade_level", "assigned_harvesters", b"assigned_harvesters", "attack_upgrade_level", b"attack_upgrade_level", "buff_duration_max", b"buff_duration_max", "buff_duration_remain", b"buff_duration_remain", "build_progress", b"build_progress", "cargo_space_max", b"cargo_space_max", "cargo_space_taken", b"cargo_space_taken", "cloak", b"cloak", "detect_range", b"detect_range", "display_type", b"display_type", "energy", b"energy", "energy_max", b"energy_max", "engaged_target_tag", b"engaged_target_tag", "facing", b"facing", "health", b"health", "health_max", b"health_max", "ideal_harvesters", b"ideal_harvesters", "is_active", b"is_active", "is_blip", b"is_blip", "is_burrowed", b"is_burrowed", "is_flying", b"is_flying", "is_hallucination", b"is_hallucination", "is_on_screen", b"is_on_screen", "is_powered", b"is_powered", "is_selected", b"is_selected", "mineral_contents", b"mineral_contents", "owner", b"owner", "pos", b"pos", "radar_range", b"radar_range", "radius", b"radius", "shield", b"shield", "shield_max", b"shield_max", "shield_upgrade_level", b"shield_upgrade_level", "tag", b"tag", "unit_type", b"unit_type", "vespene_contents", b"vespene_contents", "weapon_cooldown", b"weapon_cooldown"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["add_on_tag", b"add_on_tag", "alliance", b"alliance", "armor_upgrade_level", b"armor_upgrade_level", "assigned_harvesters", b"assigned_harvesters", "attack_upgrade_level", b"attack_upgrade_level", "buff_duration_max", b"buff_duration_max", "buff_duration_remain", b"buff_duration_remain", "buff_ids", b"buff_ids", "build_progress", b"build_progress", "cargo_space_max", b"cargo_space_max", "cargo_space_taken", b"cargo_space_taken", "cloak", b"cloak", "detect_range", b"detect_range", "display_type", b"display_type", "energy", b"energy", "energy_max", b"energy_max", "engaged_target_tag", b"engaged_target_tag", "facing", b"facing", "health", b"health", "health_max", b"health_max", "ideal_harvesters", b"ideal_harvesters", "is_active", b"is_active", "is_blip", b"is_blip", "is_burrowed", b"is_burrowed", "is_flying", b"is_flying", "is_hallucination", b"is_hallucination", "is_on_screen", b"is_on_screen", "is_powered", b"is_powered", "is_selected", b"is_selected", "mineral_contents", b"mineral_contents", "orders", b"orders", "owner", b"owner", "passengers", b"passengers", "pos", b"pos", "radar_range", b"radar_range", "radius", b"radius", "rally_targets", b"rally_targets", "shield", b"shield", "shield_max", b"shield_max", "shield_upgrade_level", b"shield_upgrade_level", "tag", b"tag", "unit_type", b"unit_type", "vespene_contents", b"vespene_contents", "weapon_cooldown", b"weapon_cooldown"]) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "add_on_tag", + b"add_on_tag", + "alliance", + b"alliance", + "armor_upgrade_level", + b"armor_upgrade_level", + "assigned_harvesters", + b"assigned_harvesters", + "attack_upgrade_level", + b"attack_upgrade_level", + "buff_duration_max", + b"buff_duration_max", + "buff_duration_remain", + b"buff_duration_remain", + "build_progress", + b"build_progress", + "cargo_space_max", + b"cargo_space_max", + "cargo_space_taken", + b"cargo_space_taken", + "cloak", + b"cloak", + "detect_range", + b"detect_range", + "display_type", + b"display_type", + "energy", + b"energy", + "energy_max", + b"energy_max", + "engaged_target_tag", + b"engaged_target_tag", + "facing", + b"facing", + "health", + b"health", + "health_max", + b"health_max", + "ideal_harvesters", + b"ideal_harvesters", + "is_active", + b"is_active", + "is_blip", + b"is_blip", + "is_burrowed", + b"is_burrowed", + "is_flying", + b"is_flying", + "is_hallucination", + b"is_hallucination", + "is_on_screen", + b"is_on_screen", + "is_powered", + b"is_powered", + "is_selected", + b"is_selected", + "mineral_contents", + b"mineral_contents", + "owner", + b"owner", + "pos", + b"pos", + "radar_range", + b"radar_range", + "radius", + b"radius", + "shield", + b"shield", + "shield_max", + b"shield_max", + "shield_upgrade_level", + b"shield_upgrade_level", + "tag", + b"tag", + "unit_type", + b"unit_type", + "vespene_contents", + b"vespene_contents", + "weapon_cooldown", + b"weapon_cooldown", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "add_on_tag", + b"add_on_tag", + "alliance", + b"alliance", + "armor_upgrade_level", + b"armor_upgrade_level", + "assigned_harvesters", + b"assigned_harvesters", + "attack_upgrade_level", + b"attack_upgrade_level", + "buff_duration_max", + b"buff_duration_max", + "buff_duration_remain", + b"buff_duration_remain", + "buff_ids", + b"buff_ids", + "build_progress", + b"build_progress", + "cargo_space_max", + b"cargo_space_max", + "cargo_space_taken", + b"cargo_space_taken", + "cloak", + b"cloak", + "detect_range", + b"detect_range", + "display_type", + b"display_type", + "energy", + b"energy", + "energy_max", + b"energy_max", + "engaged_target_tag", + b"engaged_target_tag", + "facing", + b"facing", + "health", + b"health", + "health_max", + b"health_max", + "ideal_harvesters", + b"ideal_harvesters", + "is_active", + b"is_active", + "is_blip", + b"is_blip", + "is_burrowed", + b"is_burrowed", + "is_flying", + b"is_flying", + "is_hallucination", + b"is_hallucination", + "is_on_screen", + b"is_on_screen", + "is_powered", + b"is_powered", + "is_selected", + b"is_selected", + "mineral_contents", + b"mineral_contents", + "orders", + b"orders", + "owner", + b"owner", + "passengers", + b"passengers", + "pos", + b"pos", + "radar_range", + b"radar_range", + "radius", + b"radius", + "rally_targets", + b"rally_targets", + "shield", + b"shield", + "shield_max", + b"shield_max", + "shield_upgrade_level", + b"shield_upgrade_level", + "tag", + b"tag", + "unit_type", + b"unit_type", + "vespene_contents", + b"vespene_contents", + "weapon_cooldown", + b"weapon_cooldown", + ], + ) -> None: ... global___Unit = Unit @@ -538,11 +830,7 @@ class Event(google.protobuf.message.Message): DEAD_UNITS_FIELD_NUMBER: builtins.int @property def dead_units(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ... - def __init__( - self, - *, - dead_units: collections.abc.Iterable[builtins.int] | None = ..., - ) -> None: ... + def __init__(self, *, dead_units: collections.abc.Iterable[builtins.int] | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["dead_units", b"dead_units"]) -> None: ... global___Event = Event @@ -573,8 +861,16 @@ class Effect(google.protobuf.message.Message): owner: builtins.int | None = ..., radius: builtins.float | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["alliance", b"alliance", "effect_id", b"effect_id", "owner", b"owner", "radius", b"radius"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["alliance", b"alliance", "effect_id", b"effect_id", "owner", b"owner", "pos", b"pos", "radius", b"radius"]) -> None: ... + def HasField( + self, + field_name: typing.Literal["alliance", b"alliance", "effect_id", b"effect_id", "owner", b"owner", "radius", b"radius"], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "alliance", b"alliance", "effect_id", b"effect_id", "owner", b"owner", "pos", b"pos", "radius", b"radius" + ], + ) -> None: ... global___Effect = Effect @@ -602,9 +898,35 @@ class ActionRaw(google.protobuf.message.Message): camera_move: global___ActionRawCameraMove | None = ..., toggle_autocast: global___ActionRawToggleAutocast | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["action", b"action", "camera_move", b"camera_move", "toggle_autocast", b"toggle_autocast", "unit_command", b"unit_command"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["action", b"action", "camera_move", b"camera_move", "toggle_autocast", b"toggle_autocast", "unit_command", b"unit_command"]) -> None: ... - def WhichOneof(self, oneof_group: typing.Literal["action", b"action"]) -> typing.Literal["unit_command", "camera_move", "toggle_autocast"] | None: ... + def HasField( + self, + field_name: typing.Literal[ + "action", + b"action", + "camera_move", + b"camera_move", + "toggle_autocast", + b"toggle_autocast", + "unit_command", + b"unit_command", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "action", + b"action", + "camera_move", + b"camera_move", + "toggle_autocast", + b"toggle_autocast", + "unit_command", + b"unit_command", + ], + ) -> None: ... + def WhichOneof( + self, oneof_group: typing.Literal["action", b"action"] + ) -> typing.Literal["unit_command", "camera_move", "toggle_autocast"] | None: ... global___ActionRaw = ActionRaw @@ -633,9 +955,41 @@ class ActionRawUnitCommand(google.protobuf.message.Message): unit_tags: collections.abc.Iterable[builtins.int] | None = ..., queue_command: builtins.bool | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["ability_id", b"ability_id", "queue_command", b"queue_command", "target", b"target", "target_unit_tag", b"target_unit_tag", "target_world_space_pos", b"target_world_space_pos"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["ability_id", b"ability_id", "queue_command", b"queue_command", "target", b"target", "target_unit_tag", b"target_unit_tag", "target_world_space_pos", b"target_world_space_pos", "unit_tags", b"unit_tags"]) -> None: ... - def WhichOneof(self, oneof_group: typing.Literal["target", b"target"]) -> typing.Literal["target_world_space_pos", "target_unit_tag"] | None: ... + def HasField( + self, + field_name: typing.Literal[ + "ability_id", + b"ability_id", + "queue_command", + b"queue_command", + "target", + b"target", + "target_unit_tag", + b"target_unit_tag", + "target_world_space_pos", + b"target_world_space_pos", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "ability_id", + b"ability_id", + "queue_command", + b"queue_command", + "target", + b"target", + "target_unit_tag", + b"target_unit_tag", + "target_world_space_pos", + b"target_world_space_pos", + "unit_tags", + b"unit_tags", + ], + ) -> None: ... + def WhichOneof( + self, oneof_group: typing.Literal["target", b"target"] + ) -> typing.Literal["target_world_space_pos", "target_unit_tag"] | None: ... global___ActionRawUnitCommand = ActionRawUnitCommand @@ -646,11 +1000,7 @@ class ActionRawCameraMove(google.protobuf.message.Message): CENTER_WORLD_SPACE_FIELD_NUMBER: builtins.int @property def center_world_space(self) -> s2clientprotocol.common_pb2.Point: ... - def __init__( - self, - *, - center_world_space: s2clientprotocol.common_pb2.Point | None = ..., - ) -> None: ... + def __init__(self, *, center_world_space: s2clientprotocol.common_pb2.Point | None = ...) -> None: ... def HasField(self, field_name: typing.Literal["center_world_space", b"center_world_space"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["center_world_space", b"center_world_space"]) -> None: ... @@ -666,10 +1016,7 @@ class ActionRawToggleAutocast(google.protobuf.message.Message): @property def unit_tags(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ... def __init__( - self, - *, - ability_id: builtins.int | None = ..., - unit_tags: collections.abc.Iterable[builtins.int] | None = ..., + self, *, ability_id: builtins.int | None = ..., unit_tags: collections.abc.Iterable[builtins.int] | None = ... ) -> None: ... def HasField(self, field_name: typing.Literal["ability_id", b"ability_id"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["ability_id", b"ability_id", "unit_tags", b"unit_tags"]) -> None: ... diff --git a/stubs/s2clientprotocol/s2clientprotocol/sc2api_pb2.pyi b/stubs/s2clientprotocol/s2clientprotocol/sc2api_pb2.pyi index b979697237f5..fbf53b67a613 100644 --- a/stubs/s2clientprotocol/s2clientprotocol/sc2api_pb2.pyi +++ b/stubs/s2clientprotocol/s2clientprotocol/sc2api_pb2.pyi @@ -72,7 +72,9 @@ class _Difficulty: ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType -class _DifficultyEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_Difficulty.ValueType], builtins.type): +class _DifficultyEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_Difficulty.ValueType], builtins.type +): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor VeryEasy: _Difficulty.ValueType # 1 Easy: _Difficulty.ValueType # 2 @@ -106,7 +108,9 @@ class _PlayerType: ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType -class _PlayerTypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_PlayerType.ValueType], builtins.type): +class _PlayerTypeEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_PlayerType.ValueType], builtins.type +): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor Participant: _PlayerType.ValueType # 1 Computer: _PlayerType.ValueType # 2 @@ -371,9 +375,141 @@ class Request(google.protobuf.message.Message): debug: global___RequestDebug | None = ..., id: builtins.int | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["action", b"action", "available_maps", b"available_maps", "create_game", b"create_game", "data", b"data", "debug", b"debug", "game_info", b"game_info", "id", b"id", "join_game", b"join_game", "leave_game", b"leave_game", "map_command", b"map_command", "obs_action", b"obs_action", "observation", b"observation", "ping", b"ping", "query", b"query", "quick_load", b"quick_load", "quick_save", b"quick_save", "quit", b"quit", "replay_info", b"replay_info", "request", b"request", "restart_game", b"restart_game", "save_map", b"save_map", "save_replay", b"save_replay", "start_replay", b"start_replay", "step", b"step"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["action", b"action", "available_maps", b"available_maps", "create_game", b"create_game", "data", b"data", "debug", b"debug", "game_info", b"game_info", "id", b"id", "join_game", b"join_game", "leave_game", b"leave_game", "map_command", b"map_command", "obs_action", b"obs_action", "observation", b"observation", "ping", b"ping", "query", b"query", "quick_load", b"quick_load", "quick_save", b"quick_save", "quit", b"quit", "replay_info", b"replay_info", "request", b"request", "restart_game", b"restart_game", "save_map", b"save_map", "save_replay", b"save_replay", "start_replay", b"start_replay", "step", b"step"]) -> None: ... - def WhichOneof(self, oneof_group: typing.Literal["request", b"request"]) -> typing.Literal["create_game", "join_game", "restart_game", "start_replay", "leave_game", "quick_save", "quick_load", "quit", "game_info", "observation", "action", "obs_action", "step", "data", "query", "save_replay", "map_command", "replay_info", "available_maps", "save_map", "ping", "debug"] | None: ... + def HasField( + self, + field_name: typing.Literal[ + "action", + b"action", + "available_maps", + b"available_maps", + "create_game", + b"create_game", + "data", + b"data", + "debug", + b"debug", + "game_info", + b"game_info", + "id", + b"id", + "join_game", + b"join_game", + "leave_game", + b"leave_game", + "map_command", + b"map_command", + "obs_action", + b"obs_action", + "observation", + b"observation", + "ping", + b"ping", + "query", + b"query", + "quick_load", + b"quick_load", + "quick_save", + b"quick_save", + "quit", + b"quit", + "replay_info", + b"replay_info", + "request", + b"request", + "restart_game", + b"restart_game", + "save_map", + b"save_map", + "save_replay", + b"save_replay", + "start_replay", + b"start_replay", + "step", + b"step", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "action", + b"action", + "available_maps", + b"available_maps", + "create_game", + b"create_game", + "data", + b"data", + "debug", + b"debug", + "game_info", + b"game_info", + "id", + b"id", + "join_game", + b"join_game", + "leave_game", + b"leave_game", + "map_command", + b"map_command", + "obs_action", + b"obs_action", + "observation", + b"observation", + "ping", + b"ping", + "query", + b"query", + "quick_load", + b"quick_load", + "quick_save", + b"quick_save", + "quit", + b"quit", + "replay_info", + b"replay_info", + "request", + b"request", + "restart_game", + b"restart_game", + "save_map", + b"save_map", + "save_replay", + b"save_replay", + "start_replay", + b"start_replay", + "step", + b"step", + ], + ) -> None: ... + def WhichOneof( + self, oneof_group: typing.Literal["request", b"request"] + ) -> ( + typing.Literal[ + "create_game", + "join_game", + "restart_game", + "start_replay", + "leave_game", + "quick_save", + "quick_load", + "quit", + "game_info", + "observation", + "action", + "obs_action", + "step", + "data", + "query", + "save_replay", + "map_command", + "replay_info", + "available_maps", + "save_map", + "ping", + "debug", + ] + | None + ): ... global___Request = Request @@ -488,9 +624,147 @@ class Response(google.protobuf.message.Message): error: collections.abc.Iterable[builtins.str] | None = ..., status: global___Status.ValueType | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["action", b"action", "available_maps", b"available_maps", "create_game", b"create_game", "data", b"data", "debug", b"debug", "game_info", b"game_info", "id", b"id", "join_game", b"join_game", "leave_game", b"leave_game", "map_command", b"map_command", "obs_action", b"obs_action", "observation", b"observation", "ping", b"ping", "query", b"query", "quick_load", b"quick_load", "quick_save", b"quick_save", "quit", b"quit", "replay_info", b"replay_info", "response", b"response", "restart_game", b"restart_game", "save_map", b"save_map", "save_replay", b"save_replay", "start_replay", b"start_replay", "status", b"status", "step", b"step"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["action", b"action", "available_maps", b"available_maps", "create_game", b"create_game", "data", b"data", "debug", b"debug", "error", b"error", "game_info", b"game_info", "id", b"id", "join_game", b"join_game", "leave_game", b"leave_game", "map_command", b"map_command", "obs_action", b"obs_action", "observation", b"observation", "ping", b"ping", "query", b"query", "quick_load", b"quick_load", "quick_save", b"quick_save", "quit", b"quit", "replay_info", b"replay_info", "response", b"response", "restart_game", b"restart_game", "save_map", b"save_map", "save_replay", b"save_replay", "start_replay", b"start_replay", "status", b"status", "step", b"step"]) -> None: ... - def WhichOneof(self, oneof_group: typing.Literal["response", b"response"]) -> typing.Literal["create_game", "join_game", "restart_game", "start_replay", "leave_game", "quick_save", "quick_load", "quit", "game_info", "observation", "action", "obs_action", "step", "data", "query", "save_replay", "replay_info", "available_maps", "save_map", "map_command", "ping", "debug"] | None: ... + def HasField( + self, + field_name: typing.Literal[ + "action", + b"action", + "available_maps", + b"available_maps", + "create_game", + b"create_game", + "data", + b"data", + "debug", + b"debug", + "game_info", + b"game_info", + "id", + b"id", + "join_game", + b"join_game", + "leave_game", + b"leave_game", + "map_command", + b"map_command", + "obs_action", + b"obs_action", + "observation", + b"observation", + "ping", + b"ping", + "query", + b"query", + "quick_load", + b"quick_load", + "quick_save", + b"quick_save", + "quit", + b"quit", + "replay_info", + b"replay_info", + "response", + b"response", + "restart_game", + b"restart_game", + "save_map", + b"save_map", + "save_replay", + b"save_replay", + "start_replay", + b"start_replay", + "status", + b"status", + "step", + b"step", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "action", + b"action", + "available_maps", + b"available_maps", + "create_game", + b"create_game", + "data", + b"data", + "debug", + b"debug", + "error", + b"error", + "game_info", + b"game_info", + "id", + b"id", + "join_game", + b"join_game", + "leave_game", + b"leave_game", + "map_command", + b"map_command", + "obs_action", + b"obs_action", + "observation", + b"observation", + "ping", + b"ping", + "query", + b"query", + "quick_load", + b"quick_load", + "quick_save", + b"quick_save", + "quit", + b"quit", + "replay_info", + b"replay_info", + "response", + b"response", + "restart_game", + b"restart_game", + "save_map", + b"save_map", + "save_replay", + b"save_replay", + "start_replay", + b"start_replay", + "status", + b"status", + "step", + b"step", + ], + ) -> None: ... + def WhichOneof( + self, oneof_group: typing.Literal["response", b"response"] + ) -> ( + typing.Literal[ + "create_game", + "join_game", + "restart_game", + "start_replay", + "leave_game", + "quick_save", + "quick_load", + "quit", + "game_info", + "observation", + "action", + "obs_action", + "step", + "data", + "query", + "save_replay", + "replay_info", + "available_maps", + "save_map", + "map_command", + "ping", + "debug", + ] + | None + ): ... global___Response = Response @@ -532,9 +806,45 @@ class RequestCreateGame(google.protobuf.message.Message): random_seed: builtins.int | None = ..., realtime: builtins.bool | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["Map", b"Map", "battlenet_map_name", b"battlenet_map_name", "disable_fog", b"disable_fog", "local_map", b"local_map", "random_seed", b"random_seed", "realtime", b"realtime"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["Map", b"Map", "battlenet_map_name", b"battlenet_map_name", "disable_fog", b"disable_fog", "local_map", b"local_map", "player_setup", b"player_setup", "random_seed", b"random_seed", "realtime", b"realtime"]) -> None: ... - def WhichOneof(self, oneof_group: typing.Literal["Map", b"Map"]) -> typing.Literal["local_map", "battlenet_map_name"] | None: ... + def HasField( + self, + field_name: typing.Literal[ + "Map", + b"Map", + "battlenet_map_name", + b"battlenet_map_name", + "disable_fog", + b"disable_fog", + "local_map", + b"local_map", + "random_seed", + b"random_seed", + "realtime", + b"realtime", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "Map", + b"Map", + "battlenet_map_name", + b"battlenet_map_name", + "disable_fog", + b"disable_fog", + "local_map", + b"local_map", + "player_setup", + b"player_setup", + "random_seed", + b"random_seed", + "realtime", + b"realtime", + ], + ) -> None: ... + def WhichOneof( + self, oneof_group: typing.Literal["Map", b"Map"] + ) -> typing.Literal["local_map", "battlenet_map_name"] | None: ... global___RequestCreateGame = RequestCreateGame @@ -550,12 +860,7 @@ class LocalMap(google.protobuf.message.Message): into the replay. (260 character max) """ map_data: builtins.bytes - def __init__( - self, - *, - map_path: builtins.str | None = ..., - map_data: builtins.bytes | None = ..., - ) -> None: ... + def __init__(self, *, map_path: builtins.str | None = ..., map_data: builtins.bytes | None = ...) -> None: ... def HasField(self, field_name: typing.Literal["map_data", b"map_data", "map_path", b"map_path"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["map_data", b"map_data", "map_path", b"map_path"]) -> None: ... @@ -569,7 +874,9 @@ class ResponseCreateGame(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _ErrorEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ResponseCreateGame._Error.ValueType], builtins.type): + class _ErrorEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ResponseCreateGame._Error.ValueType], builtins.type + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor MissingMap: ResponseCreateGame._Error.ValueType # 1 InvalidMapPath: ResponseCreateGame._Error.ValueType # 2 @@ -597,10 +904,7 @@ class ResponseCreateGame(google.protobuf.message.Message): error: global___ResponseCreateGame.Error.ValueType error_details: builtins.str def __init__( - self, - *, - error: global___ResponseCreateGame.Error.ValueType | None = ..., - error_details: builtins.str | None = ..., + self, *, error: global___ResponseCreateGame.Error.ValueType | None = ..., error_details: builtins.str | None = ... ) -> None: ... def HasField(self, field_name: typing.Literal["error", b"error", "error_details", b"error_details"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["error", b"error", "error_details", b"error_details"]) -> None: ... @@ -660,9 +964,53 @@ class RequestJoinGame(google.protobuf.message.Message): player_name: builtins.str | None = ..., host_ip: builtins.str | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["host_ip", b"host_ip", "observed_player_id", b"observed_player_id", "options", b"options", "participation", b"participation", "player_name", b"player_name", "race", b"race", "server_ports", b"server_ports", "shared_port", b"shared_port"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["client_ports", b"client_ports", "host_ip", b"host_ip", "observed_player_id", b"observed_player_id", "options", b"options", "participation", b"participation", "player_name", b"player_name", "race", b"race", "server_ports", b"server_ports", "shared_port", b"shared_port"]) -> None: ... - def WhichOneof(self, oneof_group: typing.Literal["participation", b"participation"]) -> typing.Literal["race", "observed_player_id"] | None: ... + def HasField( + self, + field_name: typing.Literal[ + "host_ip", + b"host_ip", + "observed_player_id", + b"observed_player_id", + "options", + b"options", + "participation", + b"participation", + "player_name", + b"player_name", + "race", + b"race", + "server_ports", + b"server_ports", + "shared_port", + b"shared_port", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "client_ports", + b"client_ports", + "host_ip", + b"host_ip", + "observed_player_id", + b"observed_player_id", + "options", + b"options", + "participation", + b"participation", + "player_name", + b"player_name", + "race", + b"race", + "server_ports", + b"server_ports", + "shared_port", + b"shared_port", + ], + ) -> None: ... + def WhichOneof( + self, oneof_group: typing.Literal["participation", b"participation"] + ) -> typing.Literal["race", "observed_player_id"] | None: ... global___RequestJoinGame = RequestJoinGame @@ -675,12 +1023,7 @@ class PortSet(google.protobuf.message.Message): game_port: builtins.int """Game right now needs two internal ports to establish a multiplay game on the local host.""" base_port: builtins.int - def __init__( - self, - *, - game_port: builtins.int | None = ..., - base_port: builtins.int | None = ..., - ) -> None: ... + def __init__(self, *, game_port: builtins.int | None = ..., base_port: builtins.int | None = ...) -> None: ... def HasField(self, field_name: typing.Literal["base_port", b"base_port", "game_port", b"game_port"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["base_port", b"base_port", "game_port", b"game_port"]) -> None: ... @@ -694,7 +1037,9 @@ class ResponseJoinGame(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _ErrorEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ResponseJoinGame._Error.ValueType], builtins.type): + class _ErrorEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ResponseJoinGame._Error.ValueType], builtins.type + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor MissingParticipation: ResponseJoinGame._Error.ValueType # 1 InvalidObservedPlayerId: ResponseJoinGame._Error.ValueType # 2 @@ -744,8 +1089,12 @@ class ResponseJoinGame(google.protobuf.message.Message): error: global___ResponseJoinGame.Error.ValueType | None = ..., error_details: builtins.str | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["error", b"error", "error_details", b"error_details", "player_id", b"player_id"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["error", b"error", "error_details", b"error_details", "player_id", b"player_id"]) -> None: ... + def HasField( + self, field_name: typing.Literal["error", b"error", "error_details", b"error_details", "player_id", b"player_id"] + ) -> builtins.bool: ... + def ClearField( + self, field_name: typing.Literal["error", b"error", "error_details", b"error_details", "player_id", b"player_id"] + ) -> None: ... global___ResponseJoinGame = ResponseJoinGame @@ -755,9 +1104,7 @@ class RequestRestartGame(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - def __init__( - self, - ) -> None: ... + def __init__(self) -> None: ... global___RequestRestartGame = RequestRestartGame @@ -771,7 +1118,9 @@ class ResponseRestartGame(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _ErrorEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ResponseRestartGame._Error.ValueType], builtins.type): + class _ErrorEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ResponseRestartGame._Error.ValueType], builtins.type + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor LaunchError: ResponseRestartGame._Error.ValueType # 1 @@ -792,8 +1141,14 @@ class ResponseRestartGame(google.protobuf.message.Message): error_details: builtins.str | None = ..., need_hard_reset: builtins.bool | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["error", b"error", "error_details", b"error_details", "need_hard_reset", b"need_hard_reset"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["error", b"error", "error_details", b"error_details", "need_hard_reset", b"need_hard_reset"]) -> None: ... + def HasField( + self, + field_name: typing.Literal["error", b"error", "error_details", b"error_details", "need_hard_reset", b"need_hard_reset"], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal["error", b"error", "error_details", b"error_details", "need_hard_reset", b"need_hard_reset"], + ) -> None: ... global___ResponseRestartGame = ResponseRestartGame @@ -834,9 +1189,55 @@ class RequestStartReplay(google.protobuf.message.Message): realtime: builtins.bool | None = ..., record_replay: builtins.bool | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["disable_fog", b"disable_fog", "map_data", b"map_data", "observed_player_id", b"observed_player_id", "options", b"options", "realtime", b"realtime", "record_replay", b"record_replay", "replay", b"replay", "replay_data", b"replay_data", "replay_path", b"replay_path"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["disable_fog", b"disable_fog", "map_data", b"map_data", "observed_player_id", b"observed_player_id", "options", b"options", "realtime", b"realtime", "record_replay", b"record_replay", "replay", b"replay", "replay_data", b"replay_data", "replay_path", b"replay_path"]) -> None: ... - def WhichOneof(self, oneof_group: typing.Literal["replay", b"replay"]) -> typing.Literal["replay_path", "replay_data"] | None: ... + def HasField( + self, + field_name: typing.Literal[ + "disable_fog", + b"disable_fog", + "map_data", + b"map_data", + "observed_player_id", + b"observed_player_id", + "options", + b"options", + "realtime", + b"realtime", + "record_replay", + b"record_replay", + "replay", + b"replay", + "replay_data", + b"replay_data", + "replay_path", + b"replay_path", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "disable_fog", + b"disable_fog", + "map_data", + b"map_data", + "observed_player_id", + b"observed_player_id", + "options", + b"options", + "realtime", + b"realtime", + "record_replay", + b"record_replay", + "replay", + b"replay", + "replay_data", + b"replay_data", + "replay_path", + b"replay_path", + ], + ) -> None: ... + def WhichOneof( + self, oneof_group: typing.Literal["replay", b"replay"] + ) -> typing.Literal["replay_path", "replay_data"] | None: ... global___RequestStartReplay = RequestStartReplay @@ -848,7 +1249,9 @@ class ResponseStartReplay(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _ErrorEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ResponseStartReplay._Error.ValueType], builtins.type): + class _ErrorEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ResponseStartReplay._Error.ValueType], builtins.type + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor MissingReplay: ResponseStartReplay._Error.ValueType # 1 InvalidReplayPath: ResponseStartReplay._Error.ValueType # 2 @@ -872,10 +1275,7 @@ class ResponseStartReplay(google.protobuf.message.Message): error: global___ResponseStartReplay.Error.ValueType error_details: builtins.str def __init__( - self, - *, - error: global___ResponseStartReplay.Error.ValueType | None = ..., - error_details: builtins.str | None = ..., + self, *, error: global___ResponseStartReplay.Error.ValueType | None = ..., error_details: builtins.str | None = ... ) -> None: ... def HasField(self, field_name: typing.Literal["error", b"error", "error_details", b"error_details"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["error", b"error", "error_details", b"error_details"]) -> None: ... @@ -890,11 +1290,7 @@ class RequestMapCommand(google.protobuf.message.Message): TRIGGER_CMD_FIELD_NUMBER: builtins.int trigger_cmd: builtins.str - def __init__( - self, - *, - trigger_cmd: builtins.str | None = ..., - ) -> None: ... + def __init__(self, *, trigger_cmd: builtins.str | None = ...) -> None: ... def HasField(self, field_name: typing.Literal["trigger_cmd", b"trigger_cmd"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["trigger_cmd", b"trigger_cmd"]) -> None: ... @@ -908,7 +1304,9 @@ class ResponseMapCommand(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _ErrorEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ResponseMapCommand._Error.ValueType], builtins.type): + class _ErrorEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ResponseMapCommand._Error.ValueType], builtins.type + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor NoTriggerError: ResponseMapCommand._Error.ValueType # 1 @@ -920,10 +1318,7 @@ class ResponseMapCommand(google.protobuf.message.Message): error: global___ResponseMapCommand.Error.ValueType error_details: builtins.str def __init__( - self, - *, - error: global___ResponseMapCommand.Error.ValueType | None = ..., - error_details: builtins.str | None = ..., + self, *, error: global___ResponseMapCommand.Error.ValueType | None = ..., error_details: builtins.str | None = ... ) -> None: ... def HasField(self, field_name: typing.Literal["error", b"error", "error_details", b"error_details"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["error", b"error", "error_details", b"error_details"]) -> None: ... @@ -936,9 +1331,7 @@ class RequestLeaveGame(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - def __init__( - self, - ) -> None: ... + def __init__(self) -> None: ... global___RequestLeaveGame = RequestLeaveGame @@ -946,9 +1339,7 @@ global___RequestLeaveGame = RequestLeaveGame class ResponseLeaveGame(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - def __init__( - self, - ) -> None: ... + def __init__(self) -> None: ... global___ResponseLeaveGame = ResponseLeaveGame @@ -958,9 +1349,7 @@ class RequestQuickSave(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - def __init__( - self, - ) -> None: ... + def __init__(self) -> None: ... global___RequestQuickSave = RequestQuickSave @@ -970,9 +1359,7 @@ class ResponseQuickSave(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - def __init__( - self, - ) -> None: ... + def __init__(self) -> None: ... global___ResponseQuickSave = ResponseQuickSave @@ -982,9 +1369,7 @@ class RequestQuickLoad(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - def __init__( - self, - ) -> None: ... + def __init__(self) -> None: ... global___RequestQuickLoad = RequestQuickLoad @@ -994,9 +1379,7 @@ class ResponseQuickLoad(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - def __init__( - self, - ) -> None: ... + def __init__(self) -> None: ... global___ResponseQuickLoad = ResponseQuickLoad @@ -1006,9 +1389,7 @@ class RequestQuit(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - def __init__( - self, - ) -> None: ... + def __init__(self) -> None: ... global___RequestQuit = RequestQuit @@ -1016,9 +1397,7 @@ global___RequestQuit = RequestQuit class ResponseQuit(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - def __init__( - self, - ) -> None: ... + def __init__(self) -> None: ... global___ResponseQuit = ResponseQuit @@ -1028,9 +1407,7 @@ class RequestGameInfo(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - def __init__( - self, - ) -> None: ... + def __init__(self) -> None: ... global___RequestGameInfo = RequestGameInfo @@ -1066,8 +1443,29 @@ class ResponseGameInfo(google.protobuf.message.Message): start_raw: s2clientprotocol.raw_pb2.StartRaw | None = ..., options: global___InterfaceOptions | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["local_map_path", b"local_map_path", "map_name", b"map_name", "options", b"options", "start_raw", b"start_raw"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["local_map_path", b"local_map_path", "map_name", b"map_name", "mod_names", b"mod_names", "options", b"options", "player_info", b"player_info", "start_raw", b"start_raw"]) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "local_map_path", b"local_map_path", "map_name", b"map_name", "options", b"options", "start_raw", b"start_raw" + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "local_map_path", + b"local_map_path", + "map_name", + b"map_name", + "mod_names", + b"mod_names", + "options", + b"options", + "player_info", + b"player_info", + "start_raw", + b"start_raw", + ], + ) -> None: ... global___ResponseGameInfo = ResponseGameInfo @@ -1082,12 +1480,7 @@ class RequestObservation(google.protobuf.message.Message): disable_fog: builtins.bool game_loop: builtins.int """In realtime the request will only return once the simulation game loop has reached this value. When not realtime this value is ignored.""" - def __init__( - self, - *, - disable_fog: builtins.bool | None = ..., - game_loop: builtins.int | None = ..., - ) -> None: ... + def __init__(self, *, disable_fog: builtins.bool | None = ..., game_loop: builtins.int | None = ...) -> None: ... def HasField(self, field_name: typing.Literal["disable_fog", b"disable_fog", "game_loop", b"game_loop"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["disable_fog", b"disable_fog", "game_loop", b"game_loop"]) -> None: ... @@ -1128,7 +1521,21 @@ class ResponseObservation(google.protobuf.message.Message): chat: collections.abc.Iterable[global___ChatReceived] | None = ..., ) -> None: ... def HasField(self, field_name: typing.Literal["observation", b"observation"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["action_errors", b"action_errors", "actions", b"actions", "chat", b"chat", "observation", b"observation", "player_result", b"player_result"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "action_errors", + b"action_errors", + "actions", + b"actions", + "chat", + b"chat", + "observation", + b"observation", + "player_result", + b"player_result", + ], + ) -> None: ... global___ResponseObservation = ResponseObservation @@ -1140,12 +1547,7 @@ class ChatReceived(google.protobuf.message.Message): MESSAGE_FIELD_NUMBER: builtins.int player_id: builtins.int message: builtins.str - def __init__( - self, - *, - player_id: builtins.int | None = ..., - message: builtins.str | None = ..., - ) -> None: ... + def __init__(self, *, player_id: builtins.int | None = ..., message: builtins.str | None = ...) -> None: ... def HasField(self, field_name: typing.Literal["message", b"message", "player_id", b"player_id"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["message", b"message", "player_id", b"player_id"]) -> None: ... @@ -1160,11 +1562,7 @@ class RequestAction(google.protobuf.message.Message): ACTIONS_FIELD_NUMBER: builtins.int @property def actions(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Action]: ... - def __init__( - self, - *, - actions: collections.abc.Iterable[global___Action] | None = ..., - ) -> None: ... + def __init__(self, *, actions: collections.abc.Iterable[global___Action] | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["actions", b"actions"]) -> None: ... global___RequestAction = RequestAction @@ -1175,11 +1573,11 @@ class ResponseAction(google.protobuf.message.Message): RESULT_FIELD_NUMBER: builtins.int @property - def result(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[s2clientprotocol.error_pb2.ActionResult.ValueType]: ... - def __init__( + def result( self, - *, - result: collections.abc.Iterable[s2clientprotocol.error_pb2.ActionResult.ValueType] | None = ..., + ) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[s2clientprotocol.error_pb2.ActionResult.ValueType]: ... + def __init__( + self, *, result: collections.abc.Iterable[s2clientprotocol.error_pb2.ActionResult.ValueType] | None = ... ) -> None: ... def ClearField(self, field_name: typing.Literal["result", b"result"]) -> None: ... @@ -1194,11 +1592,7 @@ class RequestObserverAction(google.protobuf.message.Message): ACTIONS_FIELD_NUMBER: builtins.int @property def actions(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ObserverAction]: ... - def __init__( - self, - *, - actions: collections.abc.Iterable[global___ObserverAction] | None = ..., - ) -> None: ... + def __init__(self, *, actions: collections.abc.Iterable[global___ObserverAction] | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["actions", b"actions"]) -> None: ... global___RequestObserverAction = RequestObserverAction @@ -1207,9 +1601,7 @@ global___RequestObserverAction = RequestObserverAction class ResponseObserverAction(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - def __init__( - self, - ) -> None: ... + def __init__(self) -> None: ... global___ResponseObserverAction = ResponseObserverAction @@ -1222,11 +1614,7 @@ class RequestStep(google.protobuf.message.Message): COUNT_FIELD_NUMBER: builtins.int count: builtins.int """Number of game loops to simulate for the next frame.""" - def __init__( - self, - *, - count: builtins.int | None = ..., - ) -> None: ... + def __init__(self, *, count: builtins.int | None = ...) -> None: ... def HasField(self, field_name: typing.Literal["count", b"count"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["count", b"count"]) -> None: ... @@ -1243,11 +1631,7 @@ class ResponseStep(google.protobuf.message.Message): representable as a positive fixed point number. When we reach the "end of time", permanently pause the game and end the game for all. """ - def __init__( - self, - *, - simulation_loop: builtins.int | None = ..., - ) -> None: ... + def __init__(self, *, simulation_loop: builtins.int | None = ...) -> None: ... def HasField(self, field_name: typing.Literal["simulation_loop", b"simulation_loop"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["simulation_loop", b"simulation_loop"]) -> None: ... @@ -1278,8 +1662,36 @@ class RequestData(google.protobuf.message.Message): buff_id: builtins.bool | None = ..., effect_id: builtins.bool | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["ability_id", b"ability_id", "buff_id", b"buff_id", "effect_id", b"effect_id", "unit_type_id", b"unit_type_id", "upgrade_id", b"upgrade_id"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["ability_id", b"ability_id", "buff_id", b"buff_id", "effect_id", b"effect_id", "unit_type_id", b"unit_type_id", "upgrade_id", b"upgrade_id"]) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "ability_id", + b"ability_id", + "buff_id", + b"buff_id", + "effect_id", + b"effect_id", + "unit_type_id", + b"unit_type_id", + "upgrade_id", + b"upgrade_id", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "ability_id", + b"ability_id", + "buff_id", + b"buff_id", + "effect_id", + b"effect_id", + "unit_type_id", + b"unit_type_id", + "upgrade_id", + b"upgrade_id", + ], + ) -> None: ... global___RequestData = RequestData @@ -1293,15 +1705,25 @@ class ResponseData(google.protobuf.message.Message): BUFFS_FIELD_NUMBER: builtins.int EFFECTS_FIELD_NUMBER: builtins.int @property - def abilities(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[s2clientprotocol.data_pb2.AbilityData]: ... + def abilities( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[s2clientprotocol.data_pb2.AbilityData]: ... @property - def units(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[s2clientprotocol.data_pb2.UnitTypeData]: ... + def units( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[s2clientprotocol.data_pb2.UnitTypeData]: ... @property - def upgrades(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[s2clientprotocol.data_pb2.UpgradeData]: ... + def upgrades( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[s2clientprotocol.data_pb2.UpgradeData]: ... @property - def buffs(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[s2clientprotocol.data_pb2.BuffData]: ... + def buffs( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[s2clientprotocol.data_pb2.BuffData]: ... @property - def effects(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[s2clientprotocol.data_pb2.EffectData]: ... + def effects( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[s2clientprotocol.data_pb2.EffectData]: ... def __init__( self, *, @@ -1311,7 +1733,12 @@ class ResponseData(google.protobuf.message.Message): buffs: collections.abc.Iterable[s2clientprotocol.data_pb2.BuffData] | None = ..., effects: collections.abc.Iterable[s2clientprotocol.data_pb2.EffectData] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["abilities", b"abilities", "buffs", b"buffs", "effects", b"effects", "units", b"units", "upgrades", b"upgrades"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "abilities", b"abilities", "buffs", b"buffs", "effects", b"effects", "units", b"units", "upgrades", b"upgrades" + ], + ) -> None: ... global___ResponseData = ResponseData @@ -1321,9 +1748,7 @@ class RequestSaveReplay(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - def __init__( - self, - ) -> None: ... + def __init__(self) -> None: ... global___RequestSaveReplay = RequestSaveReplay @@ -1333,11 +1758,7 @@ class ResponseSaveReplay(google.protobuf.message.Message): DATA_FIELD_NUMBER: builtins.int data: builtins.bytes - def __init__( - self, - *, - data: builtins.bytes | None = ..., - ) -> None: ... + def __init__(self, *, data: builtins.bytes | None = ...) -> None: ... def HasField(self, field_name: typing.Literal["data", b"data"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["data", b"data"]) -> None: ... @@ -1364,9 +1785,21 @@ class RequestReplayInfo(google.protobuf.message.Message): replay_data: builtins.bytes | None = ..., download_data: builtins.bool | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["download_data", b"download_data", "replay", b"replay", "replay_data", b"replay_data", "replay_path", b"replay_path"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["download_data", b"download_data", "replay", b"replay", "replay_data", b"replay_data", "replay_path", b"replay_path"]) -> None: ... - def WhichOneof(self, oneof_group: typing.Literal["replay", b"replay"]) -> typing.Literal["replay_path", "replay_data"] | None: ... + def HasField( + self, + field_name: typing.Literal[ + "download_data", b"download_data", "replay", b"replay", "replay_data", b"replay_data", "replay_path", b"replay_path" + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "download_data", b"download_data", "replay", b"replay", "replay_data", b"replay_data", "replay_path", b"replay_path" + ], + ) -> None: ... + def WhichOneof( + self, oneof_group: typing.Literal["replay", b"replay"] + ) -> typing.Literal["replay_path", "replay_data"] | None: ... global___RequestReplayInfo = RequestReplayInfo @@ -1392,8 +1825,32 @@ class PlayerInfoExtra(google.protobuf.message.Message): player_mmr: builtins.int | None = ..., player_apm: builtins.int | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["player_apm", b"player_apm", "player_info", b"player_info", "player_mmr", b"player_mmr", "player_result", b"player_result"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["player_apm", b"player_apm", "player_info", b"player_info", "player_mmr", b"player_mmr", "player_result", b"player_result"]) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "player_apm", + b"player_apm", + "player_info", + b"player_info", + "player_mmr", + b"player_mmr", + "player_result", + b"player_result", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "player_apm", + b"player_apm", + "player_info", + b"player_info", + "player_mmr", + b"player_mmr", + "player_result", + b"player_result", + ], + ) -> None: ... global___PlayerInfoExtra = PlayerInfoExtra @@ -1405,7 +1862,9 @@ class ResponseReplayInfo(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _ErrorEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ResponseReplayInfo._Error.ValueType], builtins.type): + class _ErrorEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ResponseReplayInfo._Error.ValueType], builtins.type + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor MissingReplay: ResponseReplayInfo._Error.ValueType # 1 InvalidReplayPath: ResponseReplayInfo._Error.ValueType # 2 @@ -1458,8 +1917,58 @@ class ResponseReplayInfo(google.protobuf.message.Message): error: global___ResponseReplayInfo.Error.ValueType | None = ..., error_details: builtins.str | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["base_build", b"base_build", "data_build", b"data_build", "data_version", b"data_version", "error", b"error", "error_details", b"error_details", "game_duration_loops", b"game_duration_loops", "game_duration_seconds", b"game_duration_seconds", "game_version", b"game_version", "local_map_path", b"local_map_path", "map_name", b"map_name"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["base_build", b"base_build", "data_build", b"data_build", "data_version", b"data_version", "error", b"error", "error_details", b"error_details", "game_duration_loops", b"game_duration_loops", "game_duration_seconds", b"game_duration_seconds", "game_version", b"game_version", "local_map_path", b"local_map_path", "map_name", b"map_name", "player_info", b"player_info"]) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "base_build", + b"base_build", + "data_build", + b"data_build", + "data_version", + b"data_version", + "error", + b"error", + "error_details", + b"error_details", + "game_duration_loops", + b"game_duration_loops", + "game_duration_seconds", + b"game_duration_seconds", + "game_version", + b"game_version", + "local_map_path", + b"local_map_path", + "map_name", + b"map_name", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "base_build", + b"base_build", + "data_build", + b"data_build", + "data_version", + b"data_version", + "error", + b"error", + "error_details", + b"error_details", + "game_duration_loops", + b"game_duration_loops", + "game_duration_seconds", + b"game_duration_seconds", + "game_version", + b"game_version", + "local_map_path", + b"local_map_path", + "map_name", + b"map_name", + "player_info", + b"player_info", + ], + ) -> None: ... global___ResponseReplayInfo = ResponseReplayInfo @@ -1469,9 +1978,7 @@ class RequestAvailableMaps(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - def __init__( - self, - ) -> None: ... + def __init__(self) -> None: ... global___RequestAvailableMaps = RequestAvailableMaps @@ -1500,7 +2007,9 @@ class ResponseAvailableMaps(google.protobuf.message.Message): local_map_paths: collections.abc.Iterable[builtins.str] | None = ..., battlenet_map_names: collections.abc.Iterable[builtins.str] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["battlenet_map_names", b"battlenet_map_names", "local_map_paths", b"local_map_paths"]) -> None: ... + def ClearField( + self, field_name: typing.Literal["battlenet_map_names", b"battlenet_map_names", "local_map_paths", b"local_map_paths"] + ) -> None: ... global___ResponseAvailableMaps = ResponseAvailableMaps @@ -1518,12 +2027,7 @@ class RequestSaveMap(google.protobuf.message.Message): """Path the game process will write to, relative to the temp directory. (260 character max)""" map_data: builtins.bytes """Binary map data of a .SC2Map.""" - def __init__( - self, - *, - map_path: builtins.str | None = ..., - map_data: builtins.bytes | None = ..., - ) -> None: ... + def __init__(self, *, map_path: builtins.str | None = ..., map_data: builtins.bytes | None = ...) -> None: ... def HasField(self, field_name: typing.Literal["map_data", b"map_data", "map_path", b"map_path"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["map_data", b"map_data", "map_path", b"map_path"]) -> None: ... @@ -1537,7 +2041,9 @@ class ResponseSaveMap(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _ErrorEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ResponseSaveMap._Error.ValueType], builtins.type): + class _ErrorEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ResponseSaveMap._Error.ValueType], builtins.type + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor InvalidMapData: ResponseSaveMap._Error.ValueType # 1 @@ -1546,11 +2052,7 @@ class ResponseSaveMap(google.protobuf.message.Message): ERROR_FIELD_NUMBER: builtins.int error: global___ResponseSaveMap.Error.ValueType - def __init__( - self, - *, - error: global___ResponseSaveMap.Error.ValueType | None = ..., - ) -> None: ... + def __init__(self, *, error: global___ResponseSaveMap.Error.ValueType | None = ...) -> None: ... def HasField(self, field_name: typing.Literal["error", b"error"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["error", b"error"]) -> None: ... @@ -1562,9 +2064,7 @@ class RequestPing(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - def __init__( - self, - ) -> None: ... + def __init__(self) -> None: ... global___RequestPing = RequestPing @@ -1588,8 +2088,32 @@ class ResponsePing(google.protobuf.message.Message): data_build: builtins.int | None = ..., base_build: builtins.int | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["base_build", b"base_build", "data_build", b"data_build", "data_version", b"data_version", "game_version", b"game_version"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["base_build", b"base_build", "data_build", b"data_build", "data_version", b"data_version", "game_version", b"game_version"]) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "base_build", + b"base_build", + "data_build", + b"data_build", + "data_version", + b"data_version", + "game_version", + b"game_version", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "base_build", + b"base_build", + "data_build", + b"data_build", + "data_version", + b"data_version", + "game_version", + b"game_version", + ], + ) -> None: ... global___ResponsePing = ResponsePing @@ -1601,12 +2125,10 @@ class RequestDebug(google.protobuf.message.Message): DEBUG_FIELD_NUMBER: builtins.int @property - def debug(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[s2clientprotocol.debug_pb2.DebugCommand]: ... - def __init__( + def debug( self, - *, - debug: collections.abc.Iterable[s2clientprotocol.debug_pb2.DebugCommand] | None = ..., - ) -> None: ... + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[s2clientprotocol.debug_pb2.DebugCommand]: ... + def __init__(self, *, debug: collections.abc.Iterable[s2clientprotocol.debug_pb2.DebugCommand] | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["debug", b"debug"]) -> None: ... global___RequestDebug = RequestDebug @@ -1615,9 +2137,7 @@ global___RequestDebug = RequestDebug class ResponseDebug(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - def __init__( - self, - ) -> None: ... + def __init__(self) -> None: ... global___ResponseDebug = ResponseDebug @@ -1645,8 +2165,18 @@ class PlayerSetup(google.protobuf.message.Message): player_name: builtins.str | None = ..., ai_build: global___AIBuild.ValueType | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["ai_build", b"ai_build", "difficulty", b"difficulty", "player_name", b"player_name", "race", b"race", "type", b"type"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["ai_build", b"ai_build", "difficulty", b"difficulty", "player_name", b"player_name", "race", b"race", "type", b"type"]) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "ai_build", b"ai_build", "difficulty", b"difficulty", "player_name", b"player_name", "race", b"race", "type", b"type" + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "ai_build", b"ai_build", "difficulty", b"difficulty", "player_name", b"player_name", "race", b"race", "type", b"type" + ], + ) -> None: ... global___PlayerSetup = PlayerSetup @@ -1680,8 +2210,36 @@ class SpatialCameraSetup(google.protobuf.message.Message): crop_to_playable_area: builtins.bool | None = ..., allow_cheating_layers: builtins.bool | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["allow_cheating_layers", b"allow_cheating_layers", "crop_to_playable_area", b"crop_to_playable_area", "minimap_resolution", b"minimap_resolution", "resolution", b"resolution", "width", b"width"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["allow_cheating_layers", b"allow_cheating_layers", "crop_to_playable_area", b"crop_to_playable_area", "minimap_resolution", b"minimap_resolution", "resolution", b"resolution", "width", b"width"]) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "allow_cheating_layers", + b"allow_cheating_layers", + "crop_to_playable_area", + b"crop_to_playable_area", + "minimap_resolution", + b"minimap_resolution", + "resolution", + b"resolution", + "width", + b"width", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "allow_cheating_layers", + b"allow_cheating_layers", + "crop_to_playable_area", + b"crop_to_playable_area", + "minimap_resolution", + b"minimap_resolution", + "resolution", + b"resolution", + "width", + b"width", + ], + ) -> None: ... global___SpatialCameraSetup = SpatialCameraSetup @@ -1739,8 +2297,52 @@ class InterfaceOptions(google.protobuf.message.Message): raw_affects_selection: builtins.bool | None = ..., raw_crop_to_playable_area: builtins.bool | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["feature_layer", b"feature_layer", "raw", b"raw", "raw_affects_selection", b"raw_affects_selection", "raw_crop_to_playable_area", b"raw_crop_to_playable_area", "render", b"render", "score", b"score", "show_burrowed_shadows", b"show_burrowed_shadows", "show_cloaked", b"show_cloaked", "show_placeholders", b"show_placeholders"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["feature_layer", b"feature_layer", "raw", b"raw", "raw_affects_selection", b"raw_affects_selection", "raw_crop_to_playable_area", b"raw_crop_to_playable_area", "render", b"render", "score", b"score", "show_burrowed_shadows", b"show_burrowed_shadows", "show_cloaked", b"show_cloaked", "show_placeholders", b"show_placeholders"]) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "feature_layer", + b"feature_layer", + "raw", + b"raw", + "raw_affects_selection", + b"raw_affects_selection", + "raw_crop_to_playable_area", + b"raw_crop_to_playable_area", + "render", + b"render", + "score", + b"score", + "show_burrowed_shadows", + b"show_burrowed_shadows", + "show_cloaked", + b"show_cloaked", + "show_placeholders", + b"show_placeholders", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "feature_layer", + b"feature_layer", + "raw", + b"raw", + "raw_affects_selection", + b"raw_affects_selection", + "raw_crop_to_playable_area", + b"raw_crop_to_playable_area", + "render", + b"render", + "score", + b"score", + "show_burrowed_shadows", + b"show_burrowed_shadows", + "show_cloaked", + b"show_cloaked", + "show_placeholders", + b"show_placeholders", + ], + ) -> None: ... global___InterfaceOptions = InterfaceOptions @@ -1777,8 +2379,44 @@ class PlayerInfo(google.protobuf.message.Message): ai_build: global___AIBuild.ValueType | None = ..., player_name: builtins.str | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["ai_build", b"ai_build", "difficulty", b"difficulty", "player_id", b"player_id", "player_name", b"player_name", "race_actual", b"race_actual", "race_requested", b"race_requested", "type", b"type"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["ai_build", b"ai_build", "difficulty", b"difficulty", "player_id", b"player_id", "player_name", b"player_name", "race_actual", b"race_actual", "race_requested", b"race_requested", "type", b"type"]) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "ai_build", + b"ai_build", + "difficulty", + b"difficulty", + "player_id", + b"player_id", + "player_name", + b"player_name", + "race_actual", + b"race_actual", + "race_requested", + b"race_requested", + "type", + b"type", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "ai_build", + b"ai_build", + "difficulty", + b"difficulty", + "player_id", + b"player_id", + "player_name", + b"player_name", + "race_actual", + b"race_actual", + "race_requested", + b"race_requested", + "type", + b"type", + ], + ) -> None: ... global___PlayerInfo = PlayerInfo @@ -1827,8 +2465,60 @@ class PlayerCommon(google.protobuf.message.Message): warp_gate_count: builtins.int | None = ..., larva_count: builtins.int | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["army_count", b"army_count", "food_army", b"food_army", "food_cap", b"food_cap", "food_used", b"food_used", "food_workers", b"food_workers", "idle_worker_count", b"idle_worker_count", "larva_count", b"larva_count", "minerals", b"minerals", "player_id", b"player_id", "vespene", b"vespene", "warp_gate_count", b"warp_gate_count"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["army_count", b"army_count", "food_army", b"food_army", "food_cap", b"food_cap", "food_used", b"food_used", "food_workers", b"food_workers", "idle_worker_count", b"idle_worker_count", "larva_count", b"larva_count", "minerals", b"minerals", "player_id", b"player_id", "vespene", b"vespene", "warp_gate_count", b"warp_gate_count"]) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "army_count", + b"army_count", + "food_army", + b"food_army", + "food_cap", + b"food_cap", + "food_used", + b"food_used", + "food_workers", + b"food_workers", + "idle_worker_count", + b"idle_worker_count", + "larva_count", + b"larva_count", + "minerals", + b"minerals", + "player_id", + b"player_id", + "vespene", + b"vespene", + "warp_gate_count", + b"warp_gate_count", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "army_count", + b"army_count", + "food_army", + b"food_army", + "food_cap", + b"food_cap", + "food_used", + b"food_used", + "food_workers", + b"food_workers", + "idle_worker_count", + b"idle_worker_count", + "larva_count", + b"larva_count", + "minerals", + b"minerals", + "player_id", + b"player_id", + "vespene", + b"vespene", + "warp_gate_count", + b"warp_gate_count", + ], + ) -> None: ... global___PlayerCommon = PlayerCommon @@ -1851,7 +2541,9 @@ class Observation(google.protobuf.message.Message): @property def alerts(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[global___Alert.ValueType]: ... @property - def abilities(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[s2clientprotocol.common_pb2.AvailableAbility]: + def abilities( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[s2clientprotocol.common_pb2.AvailableAbility]: """Abilities available in the selection. Enabled if in this list, disabled otherwise.""" @property @@ -1885,8 +2577,48 @@ class Observation(google.protobuf.message.Message): render_data: s2clientprotocol.spatial_pb2.ObservationRender | None = ..., ui_data: s2clientprotocol.ui_pb2.ObservationUI | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["feature_layer_data", b"feature_layer_data", "game_loop", b"game_loop", "player_common", b"player_common", "raw_data", b"raw_data", "render_data", b"render_data", "score", b"score", "ui_data", b"ui_data"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["abilities", b"abilities", "alerts", b"alerts", "feature_layer_data", b"feature_layer_data", "game_loop", b"game_loop", "player_common", b"player_common", "raw_data", b"raw_data", "render_data", b"render_data", "score", b"score", "ui_data", b"ui_data"]) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "feature_layer_data", + b"feature_layer_data", + "game_loop", + b"game_loop", + "player_common", + b"player_common", + "raw_data", + b"raw_data", + "render_data", + b"render_data", + "score", + b"score", + "ui_data", + b"ui_data", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "abilities", + b"abilities", + "alerts", + b"alerts", + "feature_layer_data", + b"feature_layer_data", + "game_loop", + b"game_loop", + "player_common", + b"player_common", + "raw_data", + b"raw_data", + "render_data", + b"render_data", + "score", + b"score", + "ui_data", + b"ui_data", + ], + ) -> None: ... global___Observation = Observation @@ -1932,8 +2664,40 @@ class Action(google.protobuf.message.Message): action_chat: global___ActionChat | None = ..., game_loop: builtins.int | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["action_chat", b"action_chat", "action_feature_layer", b"action_feature_layer", "action_raw", b"action_raw", "action_render", b"action_render", "action_ui", b"action_ui", "game_loop", b"game_loop"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["action_chat", b"action_chat", "action_feature_layer", b"action_feature_layer", "action_raw", b"action_raw", "action_render", b"action_render", "action_ui", b"action_ui", "game_loop", b"game_loop"]) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "action_chat", + b"action_chat", + "action_feature_layer", + b"action_feature_layer", + "action_raw", + b"action_raw", + "action_render", + b"action_render", + "action_ui", + b"action_ui", + "game_loop", + b"game_loop", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "action_chat", + b"action_chat", + "action_feature_layer", + b"action_feature_layer", + "action_raw", + b"action_raw", + "action_render", + b"action_render", + "action_ui", + b"action_ui", + "game_loop", + b"game_loop", + ], + ) -> None: ... global___Action = Action @@ -1945,7 +2709,9 @@ class ActionChat(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _ChannelEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ActionChat._Channel.ValueType], builtins.type): + class _ChannelEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ActionChat._Channel.ValueType], builtins.type + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor Broadcast: ActionChat._Channel.ValueType # 1 Team: ActionChat._Channel.ValueType # 2 @@ -1959,10 +2725,7 @@ class ActionChat(google.protobuf.message.Message): channel: global___ActionChat.Channel.ValueType message: builtins.str def __init__( - self, - *, - channel: global___ActionChat.Channel.ValueType | None = ..., - message: builtins.str | None = ..., + self, *, channel: global___ActionChat.Channel.ValueType | None = ..., message: builtins.str | None = ... ) -> None: ... def HasField(self, field_name: typing.Literal["channel", b"channel", "message", b"message"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["channel", b"channel", "message", b"message"]) -> None: ... @@ -1987,8 +2750,12 @@ class ActionError(google.protobuf.message.Message): ability_id: builtins.int | None = ..., result: s2clientprotocol.error_pb2.ActionResult.ValueType | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["ability_id", b"ability_id", "result", b"result", "unit_tag", b"unit_tag"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["ability_id", b"ability_id", "result", b"result", "unit_tag", b"unit_tag"]) -> None: ... + def HasField( + self, field_name: typing.Literal["ability_id", b"ability_id", "result", b"result", "unit_tag", b"unit_tag"] + ) -> builtins.bool: ... + def ClearField( + self, field_name: typing.Literal["ability_id", b"ability_id", "result", b"result", "unit_tag", b"unit_tag"] + ) -> None: ... global___ActionError = ActionError @@ -2020,9 +2787,39 @@ class ObserverAction(google.protobuf.message.Message): camera_follow_player: global___ActionObserverCameraFollowPlayer | None = ..., camera_follow_units: global___ActionObserverCameraFollowUnits | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["action", b"action", "camera_follow_player", b"camera_follow_player", "camera_follow_units", b"camera_follow_units", "camera_move", b"camera_move", "player_perspective", b"player_perspective"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["action", b"action", "camera_follow_player", b"camera_follow_player", "camera_follow_units", b"camera_follow_units", "camera_move", b"camera_move", "player_perspective", b"player_perspective"]) -> None: ... - def WhichOneof(self, oneof_group: typing.Literal["action", b"action"]) -> typing.Literal["player_perspective", "camera_move", "camera_follow_player", "camera_follow_units"] | None: ... + def HasField( + self, + field_name: typing.Literal[ + "action", + b"action", + "camera_follow_player", + b"camera_follow_player", + "camera_follow_units", + b"camera_follow_units", + "camera_move", + b"camera_move", + "player_perspective", + b"player_perspective", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "action", + b"action", + "camera_follow_player", + b"camera_follow_player", + "camera_follow_units", + b"camera_follow_units", + "camera_move", + b"camera_move", + "player_perspective", + b"player_perspective", + ], + ) -> None: ... + def WhichOneof( + self, oneof_group: typing.Literal["action", b"action"] + ) -> typing.Literal["player_perspective", "camera_move", "camera_follow_player", "camera_follow_units"] | None: ... global___ObserverAction = ObserverAction @@ -2033,11 +2830,7 @@ class ActionObserverPlayerPerspective(google.protobuf.message.Message): PLAYER_ID_FIELD_NUMBER: builtins.int player_id: builtins.int """0 to observe "Everyone" """ - def __init__( - self, - *, - player_id: builtins.int | None = ..., - ) -> None: ... + def __init__(self, *, player_id: builtins.int | None = ...) -> None: ... def HasField(self, field_name: typing.Literal["player_id", b"player_id"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["player_id", b"player_id"]) -> None: ... @@ -2056,10 +2849,7 @@ class ActionObserverCameraMove(google.protobuf.message.Message): @property def world_pos(self) -> s2clientprotocol.common_pb2.Point2D: ... def __init__( - self, - *, - world_pos: s2clientprotocol.common_pb2.Point2D | None = ..., - distance: builtins.float | None = ..., + self, *, world_pos: s2clientprotocol.common_pb2.Point2D | None = ..., distance: builtins.float | None = ... ) -> None: ... def HasField(self, field_name: typing.Literal["distance", b"distance", "world_pos", b"world_pos"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["distance", b"distance", "world_pos", b"world_pos"]) -> None: ... @@ -2073,11 +2863,7 @@ class ActionObserverCameraFollowPlayer(google.protobuf.message.Message): PLAYER_ID_FIELD_NUMBER: builtins.int player_id: builtins.int """Not implemented. Value must be [1, 15]""" - def __init__( - self, - *, - player_id: builtins.int | None = ..., - ) -> None: ... + def __init__(self, *, player_id: builtins.int | None = ...) -> None: ... def HasField(self, field_name: typing.Literal["player_id", b"player_id"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["player_id", b"player_id"]) -> None: ... @@ -2090,11 +2876,7 @@ class ActionObserverCameraFollowUnits(google.protobuf.message.Message): UNIT_TAGS_FIELD_NUMBER: builtins.int @property def unit_tags(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ... - def __init__( - self, - *, - unit_tags: collections.abc.Iterable[builtins.int] | None = ..., - ) -> None: ... + def __init__(self, *, unit_tags: collections.abc.Iterable[builtins.int] | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["unit_tags", b"unit_tags"]) -> None: ... global___ActionObserverCameraFollowUnits = ActionObserverCameraFollowUnits @@ -2107,12 +2889,7 @@ class PlayerResult(google.protobuf.message.Message): RESULT_FIELD_NUMBER: builtins.int player_id: builtins.int result: global___Result.ValueType - def __init__( - self, - *, - player_id: builtins.int | None = ..., - result: global___Result.ValueType | None = ..., - ) -> None: ... + def __init__(self, *, player_id: builtins.int | None = ..., result: global___Result.ValueType | None = ...) -> None: ... def HasField(self, field_name: typing.Literal["player_id", b"player_id", "result", b"result"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["player_id", b"player_id", "result", b"result"]) -> None: ... diff --git a/stubs/s2clientprotocol/s2clientprotocol/score_pb2.pyi b/stubs/s2clientprotocol/s2clientprotocol/score_pb2.pyi index 95ee7dd7fde5..611578a5c889 100644 --- a/stubs/s2clientprotocol/s2clientprotocol/score_pb2.pyi +++ b/stubs/s2clientprotocol/s2clientprotocol/score_pb2.pyi @@ -26,7 +26,9 @@ class Score(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _ScoreTypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[Score._ScoreType.ValueType], builtins.type): + class _ScoreTypeEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[Score._ScoreType.ValueType], builtins.type + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor Curriculum: Score._ScoreType.ValueType # 1 """map generated score (from curriculum maps with special scoring)""" @@ -54,8 +56,12 @@ class Score(google.protobuf.message.Message): score: builtins.int | None = ..., score_details: global___ScoreDetails | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["score", b"score", "score_details", b"score_details", "score_type", b"score_type"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["score", b"score", "score_details", b"score_details", "score_type", b"score_type"]) -> None: ... + def HasField( + self, field_name: typing.Literal["score", b"score", "score_details", b"score_details", "score_type", b"score_type"] + ) -> builtins.bool: ... + def ClearField( + self, field_name: typing.Literal["score", b"score", "score_details", b"score_details", "score_type", b"score_type"] + ) -> None: ... global___Score = Score @@ -83,8 +89,18 @@ class CategoryScoreDetails(google.protobuf.message.Message): technology: builtins.float | None = ..., upgrade: builtins.float | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["army", b"army", "economy", b"economy", "none", b"none", "technology", b"technology", "upgrade", b"upgrade"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["army", b"army", "economy", b"economy", "none", b"none", "technology", b"technology", "upgrade", b"upgrade"]) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "army", b"army", "economy", b"economy", "none", b"none", "technology", b"technology", "upgrade", b"upgrade" + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "army", b"army", "economy", b"economy", "none", b"none", "technology", b"technology", "upgrade", b"upgrade" + ], + ) -> None: ... global___CategoryScoreDetails = CategoryScoreDetails @@ -99,13 +115,11 @@ class VitalScoreDetails(google.protobuf.message.Message): shields: builtins.float energy: builtins.float def __init__( - self, - *, - life: builtins.float | None = ..., - shields: builtins.float | None = ..., - energy: builtins.float | None = ..., + self, *, life: builtins.float | None = ..., shields: builtins.float | None = ..., energy: builtins.float | None = ... ) -> None: ... - def HasField(self, field_name: typing.Literal["energy", b"energy", "life", b"life", "shields", b"shields"]) -> builtins.bool: ... + def HasField( + self, field_name: typing.Literal["energy", b"energy", "life", b"life", "shields", b"shields"] + ) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["energy", b"energy", "life", b"life", "shields", b"shields"]) -> None: ... global___VitalScoreDetails = VitalScoreDetails @@ -192,7 +206,7 @@ class ScoreDetails(google.protobuf.message.Message): @property def lost_minerals(self) -> global___CategoryScoreDetails: - """ Sum of lost minerals for the player in each category.""" + """Sum of lost minerals for the player in each category.""" @property def lost_vespene(self) -> global___CategoryScoreDetails: @@ -266,7 +280,127 @@ class ScoreDetails(google.protobuf.message.Message): current_apm: builtins.float | None = ..., current_effective_apm: builtins.float | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["collected_minerals", b"collected_minerals", "collected_vespene", b"collected_vespene", "collection_rate_minerals", b"collection_rate_minerals", "collection_rate_vespene", b"collection_rate_vespene", "current_apm", b"current_apm", "current_effective_apm", b"current_effective_apm", "food_used", b"food_used", "friendly_fire_minerals", b"friendly_fire_minerals", "friendly_fire_vespene", b"friendly_fire_vespene", "idle_production_time", b"idle_production_time", "idle_worker_time", b"idle_worker_time", "killed_minerals", b"killed_minerals", "killed_value_structures", b"killed_value_structures", "killed_value_units", b"killed_value_units", "killed_vespene", b"killed_vespene", "lost_minerals", b"lost_minerals", "lost_vespene", b"lost_vespene", "spent_minerals", b"spent_minerals", "spent_vespene", b"spent_vespene", "total_damage_dealt", b"total_damage_dealt", "total_damage_taken", b"total_damage_taken", "total_healed", b"total_healed", "total_used_minerals", b"total_used_minerals", "total_used_vespene", b"total_used_vespene", "total_value_structures", b"total_value_structures", "total_value_units", b"total_value_units", "used_minerals", b"used_minerals", "used_vespene", b"used_vespene"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["collected_minerals", b"collected_minerals", "collected_vespene", b"collected_vespene", "collection_rate_minerals", b"collection_rate_minerals", "collection_rate_vespene", b"collection_rate_vespene", "current_apm", b"current_apm", "current_effective_apm", b"current_effective_apm", "food_used", b"food_used", "friendly_fire_minerals", b"friendly_fire_minerals", "friendly_fire_vespene", b"friendly_fire_vespene", "idle_production_time", b"idle_production_time", "idle_worker_time", b"idle_worker_time", "killed_minerals", b"killed_minerals", "killed_value_structures", b"killed_value_structures", "killed_value_units", b"killed_value_units", "killed_vespene", b"killed_vespene", "lost_minerals", b"lost_minerals", "lost_vespene", b"lost_vespene", "spent_minerals", b"spent_minerals", "spent_vespene", b"spent_vespene", "total_damage_dealt", b"total_damage_dealt", "total_damage_taken", b"total_damage_taken", "total_healed", b"total_healed", "total_used_minerals", b"total_used_minerals", "total_used_vespene", b"total_used_vespene", "total_value_structures", b"total_value_structures", "total_value_units", b"total_value_units", "used_minerals", b"used_minerals", "used_vespene", b"used_vespene"]) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "collected_minerals", + b"collected_minerals", + "collected_vespene", + b"collected_vespene", + "collection_rate_minerals", + b"collection_rate_minerals", + "collection_rate_vespene", + b"collection_rate_vespene", + "current_apm", + b"current_apm", + "current_effective_apm", + b"current_effective_apm", + "food_used", + b"food_used", + "friendly_fire_minerals", + b"friendly_fire_minerals", + "friendly_fire_vespene", + b"friendly_fire_vespene", + "idle_production_time", + b"idle_production_time", + "idle_worker_time", + b"idle_worker_time", + "killed_minerals", + b"killed_minerals", + "killed_value_structures", + b"killed_value_structures", + "killed_value_units", + b"killed_value_units", + "killed_vespene", + b"killed_vespene", + "lost_minerals", + b"lost_minerals", + "lost_vespene", + b"lost_vespene", + "spent_minerals", + b"spent_minerals", + "spent_vespene", + b"spent_vespene", + "total_damage_dealt", + b"total_damage_dealt", + "total_damage_taken", + b"total_damage_taken", + "total_healed", + b"total_healed", + "total_used_minerals", + b"total_used_minerals", + "total_used_vespene", + b"total_used_vespene", + "total_value_structures", + b"total_value_structures", + "total_value_units", + b"total_value_units", + "used_minerals", + b"used_minerals", + "used_vespene", + b"used_vespene", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "collected_minerals", + b"collected_minerals", + "collected_vespene", + b"collected_vespene", + "collection_rate_minerals", + b"collection_rate_minerals", + "collection_rate_vespene", + b"collection_rate_vespene", + "current_apm", + b"current_apm", + "current_effective_apm", + b"current_effective_apm", + "food_used", + b"food_used", + "friendly_fire_minerals", + b"friendly_fire_minerals", + "friendly_fire_vespene", + b"friendly_fire_vespene", + "idle_production_time", + b"idle_production_time", + "idle_worker_time", + b"idle_worker_time", + "killed_minerals", + b"killed_minerals", + "killed_value_structures", + b"killed_value_structures", + "killed_value_units", + b"killed_value_units", + "killed_vespene", + b"killed_vespene", + "lost_minerals", + b"lost_minerals", + "lost_vespene", + b"lost_vespene", + "spent_minerals", + b"spent_minerals", + "spent_vespene", + b"spent_vespene", + "total_damage_dealt", + b"total_damage_dealt", + "total_damage_taken", + b"total_damage_taken", + "total_healed", + b"total_healed", + "total_used_minerals", + b"total_used_minerals", + "total_used_vespene", + b"total_used_vespene", + "total_value_structures", + b"total_value_structures", + "total_value_units", + b"total_value_units", + "used_minerals", + b"used_minerals", + "used_vespene", + b"used_vespene", + ], + ) -> None: ... global___ScoreDetails = ScoreDetails diff --git a/stubs/s2clientprotocol/s2clientprotocol/spatial_pb2.pyi b/stubs/s2clientprotocol/s2clientprotocol/spatial_pb2.pyi index f2e686f08242..5fd2301ac84b 100644 --- a/stubs/s2clientprotocol/s2clientprotocol/spatial_pb2.pyi +++ b/stubs/s2clientprotocol/s2clientprotocol/spatial_pb2.pyi @@ -36,12 +36,11 @@ class ObservationFeatureLayer(google.protobuf.message.Message): @property def minimap_renders(self) -> global___FeatureLayersMinimap: ... def __init__( - self, - *, - renders: global___FeatureLayers | None = ..., - minimap_renders: global___FeatureLayersMinimap | None = ..., + self, *, renders: global___FeatureLayers | None = ..., minimap_renders: global___FeatureLayersMinimap | None = ... ) -> None: ... - def HasField(self, field_name: typing.Literal["minimap_renders", b"minimap_renders", "renders", b"renders"]) -> builtins.bool: ... + def HasField( + self, field_name: typing.Literal["minimap_renders", b"minimap_renders", "renders", b"renders"] + ) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["minimap_renders", b"minimap_renders", "renders", b"renders"]) -> None: ... global___ObservationFeatureLayer = ObservationFeatureLayer @@ -216,8 +215,124 @@ class FeatureLayers(google.protobuf.message.Message): pathable: s2clientprotocol.common_pb2.ImageData | None = ..., placeholder: s2clientprotocol.common_pb2.ImageData | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["active", b"active", "blip", b"blip", "buff_duration", b"buff_duration", "buffs", b"buffs", "build_progress", b"build_progress", "buildable", b"buildable", "cloaked", b"cloaked", "creep", b"creep", "effects", b"effects", "hallucinations", b"hallucinations", "height_map", b"height_map", "pathable", b"pathable", "placeholder", b"placeholder", "player_id", b"player_id", "player_relative", b"player_relative", "power", b"power", "selected", b"selected", "unit_density", b"unit_density", "unit_density_aa", b"unit_density_aa", "unit_energy", b"unit_energy", "unit_energy_ratio", b"unit_energy_ratio", "unit_hit_points", b"unit_hit_points", "unit_hit_points_ratio", b"unit_hit_points_ratio", "unit_shields", b"unit_shields", "unit_shields_ratio", b"unit_shields_ratio", "unit_type", b"unit_type", "visibility_map", b"visibility_map"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["active", b"active", "blip", b"blip", "buff_duration", b"buff_duration", "buffs", b"buffs", "build_progress", b"build_progress", "buildable", b"buildable", "cloaked", b"cloaked", "creep", b"creep", "effects", b"effects", "hallucinations", b"hallucinations", "height_map", b"height_map", "pathable", b"pathable", "placeholder", b"placeholder", "player_id", b"player_id", "player_relative", b"player_relative", "power", b"power", "selected", b"selected", "unit_density", b"unit_density", "unit_density_aa", b"unit_density_aa", "unit_energy", b"unit_energy", "unit_energy_ratio", b"unit_energy_ratio", "unit_hit_points", b"unit_hit_points", "unit_hit_points_ratio", b"unit_hit_points_ratio", "unit_shields", b"unit_shields", "unit_shields_ratio", b"unit_shields_ratio", "unit_type", b"unit_type", "visibility_map", b"visibility_map"]) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "active", + b"active", + "blip", + b"blip", + "buff_duration", + b"buff_duration", + "buffs", + b"buffs", + "build_progress", + b"build_progress", + "buildable", + b"buildable", + "cloaked", + b"cloaked", + "creep", + b"creep", + "effects", + b"effects", + "hallucinations", + b"hallucinations", + "height_map", + b"height_map", + "pathable", + b"pathable", + "placeholder", + b"placeholder", + "player_id", + b"player_id", + "player_relative", + b"player_relative", + "power", + b"power", + "selected", + b"selected", + "unit_density", + b"unit_density", + "unit_density_aa", + b"unit_density_aa", + "unit_energy", + b"unit_energy", + "unit_energy_ratio", + b"unit_energy_ratio", + "unit_hit_points", + b"unit_hit_points", + "unit_hit_points_ratio", + b"unit_hit_points_ratio", + "unit_shields", + b"unit_shields", + "unit_shields_ratio", + b"unit_shields_ratio", + "unit_type", + b"unit_type", + "visibility_map", + b"visibility_map", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "active", + b"active", + "blip", + b"blip", + "buff_duration", + b"buff_duration", + "buffs", + b"buffs", + "build_progress", + b"build_progress", + "buildable", + b"buildable", + "cloaked", + b"cloaked", + "creep", + b"creep", + "effects", + b"effects", + "hallucinations", + b"hallucinations", + "height_map", + b"height_map", + "pathable", + b"pathable", + "placeholder", + b"placeholder", + "player_id", + b"player_id", + "player_relative", + b"player_relative", + "power", + b"power", + "selected", + b"selected", + "unit_density", + b"unit_density", + "unit_density_aa", + b"unit_density_aa", + "unit_energy", + b"unit_energy", + "unit_energy_ratio", + b"unit_energy_ratio", + "unit_hit_points", + b"unit_hit_points", + "unit_hit_points_ratio", + b"unit_hit_points_ratio", + "unit_shields", + b"unit_shields", + "unit_shields_ratio", + b"unit_shields_ratio", + "unit_type", + b"unit_type", + "visibility_map", + b"visibility_map", + ], + ) -> None: ... global___FeatureLayers = FeatureLayers @@ -297,8 +412,60 @@ class FeatureLayersMinimap(google.protobuf.message.Message): pathable: s2clientprotocol.common_pb2.ImageData | None = ..., unit_type: s2clientprotocol.common_pb2.ImageData | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["alerts", b"alerts", "buildable", b"buildable", "camera", b"camera", "creep", b"creep", "height_map", b"height_map", "pathable", b"pathable", "player_id", b"player_id", "player_relative", b"player_relative", "selected", b"selected", "unit_type", b"unit_type", "visibility_map", b"visibility_map"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["alerts", b"alerts", "buildable", b"buildable", "camera", b"camera", "creep", b"creep", "height_map", b"height_map", "pathable", b"pathable", "player_id", b"player_id", "player_relative", b"player_relative", "selected", b"selected", "unit_type", b"unit_type", "visibility_map", b"visibility_map"]) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "alerts", + b"alerts", + "buildable", + b"buildable", + "camera", + b"camera", + "creep", + b"creep", + "height_map", + b"height_map", + "pathable", + b"pathable", + "player_id", + b"player_id", + "player_relative", + b"player_relative", + "selected", + b"selected", + "unit_type", + b"unit_type", + "visibility_map", + b"visibility_map", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "alerts", + b"alerts", + "buildable", + b"buildable", + "camera", + b"camera", + "creep", + b"creep", + "height_map", + b"height_map", + "pathable", + b"pathable", + "player_id", + b"player_id", + "player_relative", + b"player_relative", + "selected", + b"selected", + "unit_type", + b"unit_type", + "visibility_map", + b"visibility_map", + ], + ) -> None: ... global___FeatureLayersMinimap = FeatureLayersMinimap @@ -355,9 +522,39 @@ class ActionSpatial(google.protobuf.message.Message): unit_selection_point: global___ActionSpatialUnitSelectionPoint | None = ..., unit_selection_rect: global___ActionSpatialUnitSelectionRect | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["action", b"action", "camera_move", b"camera_move", "unit_command", b"unit_command", "unit_selection_point", b"unit_selection_point", "unit_selection_rect", b"unit_selection_rect"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["action", b"action", "camera_move", b"camera_move", "unit_command", b"unit_command", "unit_selection_point", b"unit_selection_point", "unit_selection_rect", b"unit_selection_rect"]) -> None: ... - def WhichOneof(self, oneof_group: typing.Literal["action", b"action"]) -> typing.Literal["unit_command", "camera_move", "unit_selection_point", "unit_selection_rect"] | None: ... + def HasField( + self, + field_name: typing.Literal[ + "action", + b"action", + "camera_move", + b"camera_move", + "unit_command", + b"unit_command", + "unit_selection_point", + b"unit_selection_point", + "unit_selection_rect", + b"unit_selection_rect", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "action", + b"action", + "camera_move", + b"camera_move", + "unit_command", + b"unit_command", + "unit_selection_point", + b"unit_selection_point", + "unit_selection_rect", + b"unit_selection_rect", + ], + ) -> None: ... + def WhichOneof( + self, oneof_group: typing.Literal["action", b"action"] + ) -> typing.Literal["unit_command", "camera_move", "unit_selection_point", "unit_selection_rect"] | None: ... global___ActionSpatial = ActionSpatial @@ -384,9 +581,39 @@ class ActionSpatialUnitCommand(google.protobuf.message.Message): target_minimap_coord: s2clientprotocol.common_pb2.PointI | None = ..., queue_command: builtins.bool | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["ability_id", b"ability_id", "queue_command", b"queue_command", "target", b"target", "target_minimap_coord", b"target_minimap_coord", "target_screen_coord", b"target_screen_coord"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["ability_id", b"ability_id", "queue_command", b"queue_command", "target", b"target", "target_minimap_coord", b"target_minimap_coord", "target_screen_coord", b"target_screen_coord"]) -> None: ... - def WhichOneof(self, oneof_group: typing.Literal["target", b"target"]) -> typing.Literal["target_screen_coord", "target_minimap_coord"] | None: ... + def HasField( + self, + field_name: typing.Literal[ + "ability_id", + b"ability_id", + "queue_command", + b"queue_command", + "target", + b"target", + "target_minimap_coord", + b"target_minimap_coord", + "target_screen_coord", + b"target_screen_coord", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "ability_id", + b"ability_id", + "queue_command", + b"queue_command", + "target", + b"target", + "target_minimap_coord", + b"target_minimap_coord", + "target_screen_coord", + b"target_screen_coord", + ], + ) -> None: ... + def WhichOneof( + self, oneof_group: typing.Literal["target", b"target"] + ) -> typing.Literal["target_screen_coord", "target_minimap_coord"] | None: ... global___ActionSpatialUnitCommand = ActionSpatialUnitCommand @@ -399,11 +626,7 @@ class ActionSpatialCameraMove(google.protobuf.message.Message): def center_minimap(self) -> s2clientprotocol.common_pb2.PointI: """Simulates a click on the minimap to move the camera.""" - def __init__( - self, - *, - center_minimap: s2clientprotocol.common_pb2.PointI | None = ..., - ) -> None: ... + def __init__(self, *, center_minimap: s2clientprotocol.common_pb2.PointI | None = ...) -> None: ... def HasField(self, field_name: typing.Literal["center_minimap", b"center_minimap"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["center_minimap", b"center_minimap"]) -> None: ... @@ -417,7 +640,10 @@ class ActionSpatialUnitSelectionPoint(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _TypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ActionSpatialUnitSelectionPoint._Type.ValueType], builtins.type): + class _TypeEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ActionSpatialUnitSelectionPoint._Type.ValueType], + builtins.type, + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor Select: ActionSpatialUnitSelectionPoint._Type.ValueType # 1 """Equivalent to normal click. Changes selection to unit.""" @@ -449,8 +675,12 @@ class ActionSpatialUnitSelectionPoint(google.protobuf.message.Message): selection_screen_coord: s2clientprotocol.common_pb2.PointI | None = ..., type: global___ActionSpatialUnitSelectionPoint.Type.ValueType | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["selection_screen_coord", b"selection_screen_coord", "type", b"type"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["selection_screen_coord", b"selection_screen_coord", "type", b"type"]) -> None: ... + def HasField( + self, field_name: typing.Literal["selection_screen_coord", b"selection_screen_coord", "type", b"type"] + ) -> builtins.bool: ... + def ClearField( + self, field_name: typing.Literal["selection_screen_coord", b"selection_screen_coord", "type", b"type"] + ) -> None: ... global___ActionSpatialUnitSelectionPoint = ActionSpatialUnitSelectionPoint @@ -463,7 +693,9 @@ class ActionSpatialUnitSelectionRect(google.protobuf.message.Message): selection_add: builtins.bool """Equivalent to shift+drag. Adds units to selection.""" @property - def selection_screen_coord(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[s2clientprotocol.common_pb2.RectangleI]: + def selection_screen_coord( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[s2clientprotocol.common_pb2.RectangleI]: """Eventually this should not be an array, but a single field (multiple would be cheating).""" def __init__( @@ -473,6 +705,8 @@ class ActionSpatialUnitSelectionRect(google.protobuf.message.Message): selection_add: builtins.bool | None = ..., ) -> None: ... def HasField(self, field_name: typing.Literal["selection_add", b"selection_add"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["selection_add", b"selection_add", "selection_screen_coord", b"selection_screen_coord"]) -> None: ... + def ClearField( + self, field_name: typing.Literal["selection_add", b"selection_add", "selection_screen_coord", b"selection_screen_coord"] + ) -> None: ... global___ActionSpatialUnitSelectionRect = ActionSpatialUnitSelectionRect diff --git a/stubs/s2clientprotocol/s2clientprotocol/ui_pb2.pyi b/stubs/s2clientprotocol/s2clientprotocol/ui_pb2.pyi index 65a0e165a43d..2608a8aa142c 100644 --- a/stubs/s2clientprotocol/s2clientprotocol/ui_pb2.pyi +++ b/stubs/s2clientprotocol/s2clientprotocol/ui_pb2.pyi @@ -52,9 +52,32 @@ class ObservationUI(google.protobuf.message.Message): cargo: global___CargoPanel | None = ..., production: global___ProductionPanel | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["cargo", b"cargo", "multi", b"multi", "panel", b"panel", "production", b"production", "single", b"single"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["cargo", b"cargo", "groups", b"groups", "multi", b"multi", "panel", b"panel", "production", b"production", "single", b"single"]) -> None: ... - def WhichOneof(self, oneof_group: typing.Literal["panel", b"panel"]) -> typing.Literal["single", "multi", "cargo", "production"] | None: ... + def HasField( + self, + field_name: typing.Literal[ + "cargo", b"cargo", "multi", b"multi", "panel", b"panel", "production", b"production", "single", b"single" + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "cargo", + b"cargo", + "groups", + b"groups", + "multi", + b"multi", + "panel", + b"panel", + "production", + b"production", + "single", + b"single", + ], + ) -> None: ... + def WhichOneof( + self, oneof_group: typing.Literal["panel", b"panel"] + ) -> typing.Literal["single", "multi", "cargo", "production"] | None: ... global___ObservationUI = ObservationUI @@ -75,8 +98,18 @@ class ControlGroup(google.protobuf.message.Message): leader_unit_type: builtins.int | None = ..., count: builtins.int | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["control_group_index", b"control_group_index", "count", b"count", "leader_unit_type", b"leader_unit_type"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["control_group_index", b"control_group_index", "count", b"count", "leader_unit_type", b"leader_unit_type"]) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "control_group_index", b"control_group_index", "count", b"count", "leader_unit_type", b"leader_unit_type" + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "control_group_index", b"control_group_index", "count", b"count", "leader_unit_type", b"leader_unit_type" + ], + ) -> None: ... global___ControlGroup = ControlGroup @@ -123,8 +156,60 @@ class UnitInfo(google.protobuf.message.Message): max_shields: builtins.int | None = ..., max_energy: builtins.int | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["add_on", b"add_on", "build_progress", b"build_progress", "energy", b"energy", "health", b"health", "max_energy", b"max_energy", "max_health", b"max_health", "max_shields", b"max_shields", "player_relative", b"player_relative", "shields", b"shields", "transport_slots_taken", b"transport_slots_taken", "unit_type", b"unit_type"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["add_on", b"add_on", "build_progress", b"build_progress", "energy", b"energy", "health", b"health", "max_energy", b"max_energy", "max_health", b"max_health", "max_shields", b"max_shields", "player_relative", b"player_relative", "shields", b"shields", "transport_slots_taken", b"transport_slots_taken", "unit_type", b"unit_type"]) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "add_on", + b"add_on", + "build_progress", + b"build_progress", + "energy", + b"energy", + "health", + b"health", + "max_energy", + b"max_energy", + "max_health", + b"max_health", + "max_shields", + b"max_shields", + "player_relative", + b"player_relative", + "shields", + b"shields", + "transport_slots_taken", + b"transport_slots_taken", + "unit_type", + b"unit_type", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "add_on", + b"add_on", + "build_progress", + b"build_progress", + "energy", + b"energy", + "health", + b"health", + "max_energy", + b"max_energy", + "max_health", + b"max_health", + "max_shields", + b"max_shields", + "player_relative", + b"player_relative", + "shields", + b"shields", + "transport_slots_taken", + b"transport_slots_taken", + "unit_type", + b"unit_type", + ], + ) -> None: ... global___UnitInfo = UnitInfo @@ -153,8 +238,34 @@ class SinglePanel(google.protobuf.message.Message): shield_upgrade_level: builtins.int | None = ..., buffs: collections.abc.Iterable[builtins.int] | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["armor_upgrade_level", b"armor_upgrade_level", "attack_upgrade_level", b"attack_upgrade_level", "shield_upgrade_level", b"shield_upgrade_level", "unit", b"unit"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["armor_upgrade_level", b"armor_upgrade_level", "attack_upgrade_level", b"attack_upgrade_level", "buffs", b"buffs", "shield_upgrade_level", b"shield_upgrade_level", "unit", b"unit"]) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "armor_upgrade_level", + b"armor_upgrade_level", + "attack_upgrade_level", + b"attack_upgrade_level", + "shield_upgrade_level", + b"shield_upgrade_level", + "unit", + b"unit", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "armor_upgrade_level", + b"armor_upgrade_level", + "attack_upgrade_level", + b"attack_upgrade_level", + "buffs", + b"buffs", + "shield_upgrade_level", + b"shield_upgrade_level", + "unit", + b"unit", + ], + ) -> None: ... global___SinglePanel = SinglePanel @@ -165,11 +276,7 @@ class MultiPanel(google.protobuf.message.Message): UNITS_FIELD_NUMBER: builtins.int @property def units(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___UnitInfo]: ... - def __init__( - self, - *, - units: collections.abc.Iterable[global___UnitInfo] | None = ..., - ) -> None: ... + def __init__(self, *, units: collections.abc.Iterable[global___UnitInfo] | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["units", b"units"]) -> None: ... global___MultiPanel = MultiPanel @@ -195,7 +302,9 @@ class CargoPanel(google.protobuf.message.Message): slots_available: builtins.int | None = ..., ) -> None: ... def HasField(self, field_name: typing.Literal["slots_available", b"slots_available", "unit", b"unit"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["passengers", b"passengers", "slots_available", b"slots_available", "unit", b"unit"]) -> None: ... + def ClearField( + self, field_name: typing.Literal["passengers", b"passengers", "slots_available", b"slots_available", "unit", b"unit"] + ) -> None: ... global___CargoPanel = CargoPanel @@ -208,14 +317,13 @@ class BuildItem(google.protobuf.message.Message): ability_id: builtins.int build_progress: builtins.float """Range: [0.0, 1.0]""" - def __init__( - self, - *, - ability_id: builtins.int | None = ..., - build_progress: builtins.float | None = ..., + def __init__(self, *, ability_id: builtins.int | None = ..., build_progress: builtins.float | None = ...) -> None: ... + def HasField( + self, field_name: typing.Literal["ability_id", b"ability_id", "build_progress", b"build_progress"] + ) -> builtins.bool: ... + def ClearField( + self, field_name: typing.Literal["ability_id", b"ability_id", "build_progress", b"build_progress"] ) -> None: ... - def HasField(self, field_name: typing.Literal["ability_id", b"ability_id", "build_progress", b"build_progress"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["ability_id", b"ability_id", "build_progress", b"build_progress"]) -> None: ... global___BuildItem = BuildItem @@ -244,7 +352,9 @@ class ProductionPanel(google.protobuf.message.Message): production_queue: collections.abc.Iterable[global___BuildItem] | None = ..., ) -> None: ... def HasField(self, field_name: typing.Literal["unit", b"unit"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["build_queue", b"build_queue", "production_queue", b"production_queue", "unit", b"unit"]) -> None: ... + def ClearField( + self, field_name: typing.Literal["build_queue", b"build_queue", "production_queue", b"production_queue", "unit", b"unit"] + ) -> None: ... global___ProductionPanel = ProductionPanel @@ -296,9 +406,72 @@ class ActionUI(google.protobuf.message.Message): production_panel: global___ActionProductionPanelRemoveFromQueue | None = ..., toggle_autocast: global___ActionToggleAutocast | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["action", b"action", "cargo_panel", b"cargo_panel", "control_group", b"control_group", "multi_panel", b"multi_panel", "production_panel", b"production_panel", "select_army", b"select_army", "select_idle_worker", b"select_idle_worker", "select_larva", b"select_larva", "select_warp_gates", b"select_warp_gates", "toggle_autocast", b"toggle_autocast"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["action", b"action", "cargo_panel", b"cargo_panel", "control_group", b"control_group", "multi_panel", b"multi_panel", "production_panel", b"production_panel", "select_army", b"select_army", "select_idle_worker", b"select_idle_worker", "select_larva", b"select_larva", "select_warp_gates", b"select_warp_gates", "toggle_autocast", b"toggle_autocast"]) -> None: ... - def WhichOneof(self, oneof_group: typing.Literal["action", b"action"]) -> typing.Literal["control_group", "select_army", "select_warp_gates", "select_larva", "select_idle_worker", "multi_panel", "cargo_panel", "production_panel", "toggle_autocast"] | None: ... + def HasField( + self, + field_name: typing.Literal[ + "action", + b"action", + "cargo_panel", + b"cargo_panel", + "control_group", + b"control_group", + "multi_panel", + b"multi_panel", + "production_panel", + b"production_panel", + "select_army", + b"select_army", + "select_idle_worker", + b"select_idle_worker", + "select_larva", + b"select_larva", + "select_warp_gates", + b"select_warp_gates", + "toggle_autocast", + b"toggle_autocast", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "action", + b"action", + "cargo_panel", + b"cargo_panel", + "control_group", + b"control_group", + "multi_panel", + b"multi_panel", + "production_panel", + b"production_panel", + "select_army", + b"select_army", + "select_idle_worker", + b"select_idle_worker", + "select_larva", + b"select_larva", + "select_warp_gates", + b"select_warp_gates", + "toggle_autocast", + b"toggle_autocast", + ], + ) -> None: ... + def WhichOneof( + self, oneof_group: typing.Literal["action", b"action"] + ) -> ( + typing.Literal[ + "control_group", + "select_army", + "select_warp_gates", + "select_larva", + "select_idle_worker", + "multi_panel", + "cargo_panel", + "production_panel", + "toggle_autocast", + ] + | None + ): ... global___ActionUI = ActionUI @@ -310,7 +483,10 @@ class ActionControlGroup(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _ControlGroupActionEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ActionControlGroup._ControlGroupAction.ValueType], builtins.type): + class _ControlGroupActionEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ActionControlGroup._ControlGroupAction.ValueType], + builtins.type, + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor Recall: ActionControlGroup._ControlGroupAction.ValueType # 1 """Equivalent to number hotkey. Replaces current selection with control group.""" @@ -345,8 +521,12 @@ class ActionControlGroup(google.protobuf.message.Message): action: global___ActionControlGroup.ControlGroupAction.ValueType | None = ..., control_group_index: builtins.int | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["action", b"action", "control_group_index", b"control_group_index"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["action", b"action", "control_group_index", b"control_group_index"]) -> None: ... + def HasField( + self, field_name: typing.Literal["action", b"action", "control_group_index", b"control_group_index"] + ) -> builtins.bool: ... + def ClearField( + self, field_name: typing.Literal["action", b"action", "control_group_index", b"control_group_index"] + ) -> None: ... global___ActionControlGroup = ActionControlGroup @@ -356,11 +536,7 @@ class ActionSelectArmy(google.protobuf.message.Message): SELECTION_ADD_FIELD_NUMBER: builtins.int selection_add: builtins.bool - def __init__( - self, - *, - selection_add: builtins.bool | None = ..., - ) -> None: ... + def __init__(self, *, selection_add: builtins.bool | None = ...) -> None: ... def HasField(self, field_name: typing.Literal["selection_add", b"selection_add"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["selection_add", b"selection_add"]) -> None: ... @@ -372,11 +548,7 @@ class ActionSelectWarpGates(google.protobuf.message.Message): SELECTION_ADD_FIELD_NUMBER: builtins.int selection_add: builtins.bool - def __init__( - self, - *, - selection_add: builtins.bool | None = ..., - ) -> None: ... + def __init__(self, *, selection_add: builtins.bool | None = ...) -> None: ... def HasField(self, field_name: typing.Literal["selection_add", b"selection_add"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["selection_add", b"selection_add"]) -> None: ... @@ -386,9 +558,7 @@ global___ActionSelectWarpGates = ActionSelectWarpGates class ActionSelectLarva(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - def __init__( - self, - ) -> None: ... + def __init__(self) -> None: ... global___ActionSelectLarva = ActionSelectLarva @@ -400,7 +570,9 @@ class ActionSelectIdleWorker(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _TypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ActionSelectIdleWorker._Type.ValueType], builtins.type): + class _TypeEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ActionSelectIdleWorker._Type.ValueType], builtins.type + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor Set: ActionSelectIdleWorker._Type.ValueType # 1 """Equivalent to click with no modifiers. Replaces selection with single idle worker.""" @@ -423,11 +595,7 @@ class ActionSelectIdleWorker(google.protobuf.message.Message): TYPE_FIELD_NUMBER: builtins.int type: global___ActionSelectIdleWorker.Type.ValueType - def __init__( - self, - *, - type: global___ActionSelectIdleWorker.Type.ValueType | None = ..., - ) -> None: ... + def __init__(self, *, type: global___ActionSelectIdleWorker.Type.ValueType | None = ...) -> None: ... def HasField(self, field_name: typing.Literal["type", b"type"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["type", b"type"]) -> None: ... @@ -441,7 +609,9 @@ class ActionMultiPanel(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _TypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ActionMultiPanel._Type.ValueType], builtins.type): + class _TypeEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ActionMultiPanel._Type.ValueType], builtins.type + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor SingleSelect: ActionMultiPanel._Type.ValueType # 1 """Click on icon""" @@ -467,10 +637,7 @@ class ActionMultiPanel(google.protobuf.message.Message): type: global___ActionMultiPanel.Type.ValueType unit_index: builtins.int def __init__( - self, - *, - type: global___ActionMultiPanel.Type.ValueType | None = ..., - unit_index: builtins.int | None = ..., + self, *, type: global___ActionMultiPanel.Type.ValueType | None = ..., unit_index: builtins.int | None = ... ) -> None: ... def HasField(self, field_name: typing.Literal["type", b"type", "unit_index", b"unit_index"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["type", b"type", "unit_index", b"unit_index"]) -> None: ... @@ -483,11 +650,7 @@ class ActionCargoPanelUnload(google.protobuf.message.Message): UNIT_INDEX_FIELD_NUMBER: builtins.int unit_index: builtins.int - def __init__( - self, - *, - unit_index: builtins.int | None = ..., - ) -> None: ... + def __init__(self, *, unit_index: builtins.int | None = ...) -> None: ... def HasField(self, field_name: typing.Literal["unit_index", b"unit_index"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["unit_index", b"unit_index"]) -> None: ... @@ -499,11 +662,7 @@ class ActionProductionPanelRemoveFromQueue(google.protobuf.message.Message): UNIT_INDEX_FIELD_NUMBER: builtins.int unit_index: builtins.int - def __init__( - self, - *, - unit_index: builtins.int | None = ..., - ) -> None: ... + def __init__(self, *, unit_index: builtins.int | None = ...) -> None: ... def HasField(self, field_name: typing.Literal["unit_index", b"unit_index"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["unit_index", b"unit_index"]) -> None: ... @@ -515,11 +674,7 @@ class ActionToggleAutocast(google.protobuf.message.Message): ABILITY_ID_FIELD_NUMBER: builtins.int ability_id: builtins.int - def __init__( - self, - *, - ability_id: builtins.int | None = ..., - ) -> None: ... + def __init__(self, *, ability_id: builtins.int | None = ...) -> None: ... def HasField(self, field_name: typing.Literal["ability_id", b"ability_id"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["ability_id", b"ability_id"]) -> None: ... diff --git a/stubs/tensorflow/tensorflow/compiler/xla/service/hlo_pb2.pyi b/stubs/tensorflow/tensorflow/compiler/xla/service/hlo_pb2.pyi index 25dd0678d051..9057959cc005 100644 --- a/stubs/tensorflow/tensorflow/compiler/xla/service/hlo_pb2.pyi +++ b/stubs/tensorflow/tensorflow/compiler/xla/service/hlo_pb2.pyi @@ -38,7 +38,9 @@ class _CustomCallSchedule: ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType -class _CustomCallScheduleEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_CustomCallSchedule.ValueType], builtins.type): +class _CustomCallScheduleEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_CustomCallSchedule.ValueType], builtins.type +): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor SCHEDULE_NONE: _CustomCallSchedule.ValueType # 0 SCHEDULE_LATEST: _CustomCallSchedule.ValueType # 1 @@ -55,7 +57,9 @@ class _CustomCallApiVersion: ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType -class _CustomCallApiVersionEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_CustomCallApiVersion.ValueType], builtins.type): +class _CustomCallApiVersionEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_CustomCallApiVersion.ValueType], builtins.type +): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor API_VERSION_UNSPECIFIED: _CustomCallApiVersion.ValueType # 0 API_VERSION_ORIGINAL: _CustomCallApiVersion.ValueType # 1 @@ -237,11 +241,7 @@ class HloInstructionProto(google.protobuf.message.Message): limit: builtins.int stride: builtins.int def __init__( - self, - *, - start: builtins.int | None = ..., - limit: builtins.int | None = ..., - stride: builtins.int | None = ..., + self, *, start: builtins.int | None = ..., limit: builtins.int | None = ..., stride: builtins.int | None = ... ) -> None: ... def ClearField(self, field_name: typing.Literal["limit", b"limit", "start", b"start", "stride", b"stride"]) -> None: ... @@ -461,7 +461,9 @@ class HloInstructionProto(google.protobuf.message.Message): """Describes the dimension numbers used for a convolution.""" @property - def slice_dimensions(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___HloInstructionProto.SliceDimensions]: ... + def slice_dimensions( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___HloInstructionProto.SliceDimensions]: ... @property def dynamic_slice_sizes(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: """Describes the [start, start + size) range size for a dynamic slice @@ -501,7 +503,9 @@ class HloInstructionProto(google.protobuf.message.Message): @property def sharding(self) -> tensorflow.compiler.xla.xla_data_pb2.OpSharding: ... @property - def replica_groups(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[tensorflow.compiler.xla.xla_data_pb2.ReplicaGroup]: + def replica_groups( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[tensorflow.compiler.xla.xla_data_pb2.ReplicaGroup]: """Deprecated, but keeping for backward compatibility. Use collective_device_list. Cross replica op fields. """ @@ -513,7 +517,9 @@ class HloInstructionProto(google.protobuf.message.Message): """Precision configuration for the instruction. Has backend-specific meaning.""" @property - def source_target_pairs(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[tensorflow.compiler.xla.xla_data_pb2.SourceTarget]: + def source_target_pairs( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[tensorflow.compiler.xla.xla_data_pb2.SourceTarget]: """Collective permute field.""" @property @@ -523,7 +529,9 @@ class HloInstructionProto(google.protobuf.message.Message): @property def domain_exit_sharding(self) -> tensorflow.compiler.xla.xla_data_pb2.OpSharding: ... @property - def operand_shapes_with_layout(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[tensorflow.compiler.xla.xla_data_pb2.ShapeProto]: ... + def operand_shapes_with_layout( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[tensorflow.compiler.xla.xla_data_pb2.ShapeProto]: ... @property def triangular_solve_options(self) -> tensorflow.compiler.xla.xla_data_pb2.TriangularSolveOptions: """Options for TriangularSolve""" @@ -537,7 +545,11 @@ class HloInstructionProto(google.protobuf.message.Message): """Describes how parameters behave with regards to replicas.""" @property - def output_operand_aliasing(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[tensorflow.compiler.xla.xla_data_pb2.OutputOperandAliasing]: + def output_operand_aliasing( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + tensorflow.compiler.xla.xla_data_pb2.OutputOperandAliasing + ]: """A list of OutputOperandAliasing pairs that specifies aliasing buffers between output and operands for kCustomCall and kFusion. """ @@ -553,7 +565,11 @@ class HloInstructionProto(google.protobuf.message.Message): """ @property - def dot_sparsity(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[tensorflow.compiler.xla.xla_data_pb2.SparsityDescriptor]: + def dot_sparsity( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + tensorflow.compiler.xla.xla_data_pb2.SparsityDescriptor + ]: """Sparsity descriptor for dot operation.""" @property @@ -621,7 +637,9 @@ class HloInstructionProto(google.protobuf.message.Message): cholesky_options: tensorflow.compiler.xla.xla_data_pb2.CholeskyOptions | None = ..., parameter_replication: tensorflow.compiler.xla.xla_data_pb2.ParameterReplication | None = ..., custom_call_has_side_effect: builtins.bool | None = ..., - output_operand_aliasing: collections.abc.Iterable[tensorflow.compiler.xla.xla_data_pb2.OutputOperandAliasing] | None = ..., + output_operand_aliasing: ( + collections.abc.Iterable[tensorflow.compiler.xla.xla_data_pb2.OutputOperandAliasing] | None + ) = ..., custom_call_schedule: global___CustomCallSchedule.ValueType | None = ..., delta: builtins.int | None = ..., indices_are_sorted: builtins.bool | None = ..., @@ -642,9 +660,215 @@ class HloInstructionProto(google.protobuf.message.Message): original_value: tensorflow.compiler.xla.xla_data_pb2.OriginalValueProto | None = ..., is_composite: builtins.bool | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["cholesky_options", b"cholesky_options", "collective_device_list", b"collective_device_list", "convolution_dimension_numbers", b"convolution_dimension_numbers", "cross_program_prefetch_index", b"cross_program_prefetch_index", "domain_entry_sharding", b"domain_entry_sharding", "domain_exit_sharding", b"domain_exit_sharding", "dot_dimension_numbers", b"dot_dimension_numbers", "frontend_attributes", b"frontend_attributes", "gather_dimension_numbers", b"gather_dimension_numbers", "literal", b"literal", "metadata", b"metadata", "optional_cross_program_prefetch_index", b"optional_cross_program_prefetch_index", "original_value", b"original_value", "outfeed_shape", b"outfeed_shape", "padding_config", b"padding_config", "parameter_replication", b"parameter_replication", "precision_config", b"precision_config", "scatter_dimension_numbers", b"scatter_dimension_numbers", "shape", b"shape", "sharding", b"sharding", "statistics_viz", b"statistics_viz", "triangular_solve_options", b"triangular_solve_options", "window", b"window"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["all_reduce_id", b"all_reduce_id", "async_execution_thread", b"async_execution_thread", "backend_config", b"backend_config", "batch_group_count", b"batch_group_count", "called_computation_ids", b"called_computation_ids", "channel_id", b"channel_id", "cholesky_options", b"cholesky_options", "collective_device_list", b"collective_device_list", "comparison_direction", b"comparison_direction", "comparison_type", b"comparison_type", "constrain_layout", b"constrain_layout", "control_predecessor_ids", b"control_predecessor_ids", "convolution_dimension_numbers", b"convolution_dimension_numbers", "cross_program_prefetch_index", b"cross_program_prefetch_index", "custom_call_api_version", b"custom_call_api_version", "custom_call_has_side_effect", b"custom_call_has_side_effect", "custom_call_schedule", b"custom_call_schedule", "custom_call_target", b"custom_call_target", "delta", b"delta", "dimensions", b"dimensions", "distribution", b"distribution", "domain_entry_sharding", b"domain_entry_sharding", "domain_exit_sharding", b"domain_exit_sharding", "dot_dimension_numbers", b"dot_dimension_numbers", "dot_sparsity", b"dot_sparsity", "dynamic_slice_sizes", b"dynamic_slice_sizes", "epsilon", b"epsilon", "exponent_bits", b"exponent_bits", "feature_group_count", b"feature_group_count", "feature_index", b"feature_index", "fft_length", b"fft_length", "fft_type", b"fft_type", "frontend_attributes", b"frontend_attributes", "fusion_kind", b"fusion_kind", "gather_dimension_numbers", b"gather_dimension_numbers", "gather_slice_sizes", b"gather_slice_sizes", "id", b"id", "indices_are_sorted", b"indices_are_sorted", "infeed_config", b"infeed_config", "is_composite", b"is_composite", "is_cross_program_prefetch", b"is_cross_program_prefetch", "is_host_transfer", b"is_host_transfer", "is_stable", b"is_stable", "k", b"k", "largest", b"largest", "literal", b"literal", "mantissa_bits", b"mantissa_bits", "metadata", b"metadata", "name", b"name", "opcode", b"opcode", "operand_ids", b"operand_ids", "operand_shapes_with_layout", b"operand_shapes_with_layout", "optional_cross_program_prefetch_index", b"optional_cross_program_prefetch_index", "original_value", b"original_value", "outfeed_config", b"outfeed_config", "outfeed_shape", b"outfeed_shape", "output_operand_aliasing", b"output_operand_aliasing", "padding_config", b"padding_config", "padding_type", b"padding_type", "parameter_number", b"parameter_number", "parameter_replication", b"parameter_replication", "precision_config", b"precision_config", "replica_groups", b"replica_groups", "rng_algorithm", b"rng_algorithm", "scatter_dimension_numbers", b"scatter_dimension_numbers", "shape", b"shape", "sharding", b"sharding", "slice_dimensions", b"slice_dimensions", "source_target_pairs", b"source_target_pairs", "statistics_viz", b"statistics_viz", "triangular_solve_options", b"triangular_solve_options", "tuple_index", b"tuple_index", "unique_indices", b"unique_indices", "use_global_device_ids", b"use_global_device_ids", "window", b"window"]) -> None: ... - def WhichOneof(self, oneof_group: typing.Literal["optional_cross_program_prefetch_index", b"optional_cross_program_prefetch_index"]) -> typing.Literal["cross_program_prefetch_index"] | None: ... + def HasField( + self, + field_name: typing.Literal[ + "cholesky_options", + b"cholesky_options", + "collective_device_list", + b"collective_device_list", + "convolution_dimension_numbers", + b"convolution_dimension_numbers", + "cross_program_prefetch_index", + b"cross_program_prefetch_index", + "domain_entry_sharding", + b"domain_entry_sharding", + "domain_exit_sharding", + b"domain_exit_sharding", + "dot_dimension_numbers", + b"dot_dimension_numbers", + "frontend_attributes", + b"frontend_attributes", + "gather_dimension_numbers", + b"gather_dimension_numbers", + "literal", + b"literal", + "metadata", + b"metadata", + "optional_cross_program_prefetch_index", + b"optional_cross_program_prefetch_index", + "original_value", + b"original_value", + "outfeed_shape", + b"outfeed_shape", + "padding_config", + b"padding_config", + "parameter_replication", + b"parameter_replication", + "precision_config", + b"precision_config", + "scatter_dimension_numbers", + b"scatter_dimension_numbers", + "shape", + b"shape", + "sharding", + b"sharding", + "statistics_viz", + b"statistics_viz", + "triangular_solve_options", + b"triangular_solve_options", + "window", + b"window", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "all_reduce_id", + b"all_reduce_id", + "async_execution_thread", + b"async_execution_thread", + "backend_config", + b"backend_config", + "batch_group_count", + b"batch_group_count", + "called_computation_ids", + b"called_computation_ids", + "channel_id", + b"channel_id", + "cholesky_options", + b"cholesky_options", + "collective_device_list", + b"collective_device_list", + "comparison_direction", + b"comparison_direction", + "comparison_type", + b"comparison_type", + "constrain_layout", + b"constrain_layout", + "control_predecessor_ids", + b"control_predecessor_ids", + "convolution_dimension_numbers", + b"convolution_dimension_numbers", + "cross_program_prefetch_index", + b"cross_program_prefetch_index", + "custom_call_api_version", + b"custom_call_api_version", + "custom_call_has_side_effect", + b"custom_call_has_side_effect", + "custom_call_schedule", + b"custom_call_schedule", + "custom_call_target", + b"custom_call_target", + "delta", + b"delta", + "dimensions", + b"dimensions", + "distribution", + b"distribution", + "domain_entry_sharding", + b"domain_entry_sharding", + "domain_exit_sharding", + b"domain_exit_sharding", + "dot_dimension_numbers", + b"dot_dimension_numbers", + "dot_sparsity", + b"dot_sparsity", + "dynamic_slice_sizes", + b"dynamic_slice_sizes", + "epsilon", + b"epsilon", + "exponent_bits", + b"exponent_bits", + "feature_group_count", + b"feature_group_count", + "feature_index", + b"feature_index", + "fft_length", + b"fft_length", + "fft_type", + b"fft_type", + "frontend_attributes", + b"frontend_attributes", + "fusion_kind", + b"fusion_kind", + "gather_dimension_numbers", + b"gather_dimension_numbers", + "gather_slice_sizes", + b"gather_slice_sizes", + "id", + b"id", + "indices_are_sorted", + b"indices_are_sorted", + "infeed_config", + b"infeed_config", + "is_composite", + b"is_composite", + "is_cross_program_prefetch", + b"is_cross_program_prefetch", + "is_host_transfer", + b"is_host_transfer", + "is_stable", + b"is_stable", + "k", + b"k", + "largest", + b"largest", + "literal", + b"literal", + "mantissa_bits", + b"mantissa_bits", + "metadata", + b"metadata", + "name", + b"name", + "opcode", + b"opcode", + "operand_ids", + b"operand_ids", + "operand_shapes_with_layout", + b"operand_shapes_with_layout", + "optional_cross_program_prefetch_index", + b"optional_cross_program_prefetch_index", + "original_value", + b"original_value", + "outfeed_config", + b"outfeed_config", + "outfeed_shape", + b"outfeed_shape", + "output_operand_aliasing", + b"output_operand_aliasing", + "padding_config", + b"padding_config", + "padding_type", + b"padding_type", + "parameter_number", + b"parameter_number", + "parameter_replication", + b"parameter_replication", + "precision_config", + b"precision_config", + "replica_groups", + b"replica_groups", + "rng_algorithm", + b"rng_algorithm", + "scatter_dimension_numbers", + b"scatter_dimension_numbers", + "shape", + b"shape", + "sharding", + b"sharding", + "slice_dimensions", + b"slice_dimensions", + "source_target_pairs", + b"source_target_pairs", + "statistics_viz", + b"statistics_viz", + "triangular_solve_options", + b"triangular_solve_options", + "tuple_index", + b"tuple_index", + "unique_indices", + b"unique_indices", + "use_global_device_ids", + b"use_global_device_ids", + "window", + b"window", + ], + ) -> None: ... + def WhichOneof( + self, oneof_group: typing.Literal["optional_cross_program_prefetch_index", b"optional_cross_program_prefetch_index"] + ) -> typing.Literal["cross_program_prefetch_index"] | None: ... global___HloInstructionProto = HloInstructionProto @@ -695,7 +919,25 @@ class HloComputationProto(google.protobuf.message.Message): execution_thread: builtins.str | None = ..., ) -> None: ... def HasField(self, field_name: typing.Literal["program_shape", b"program_shape"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["execution_thread", b"execution_thread", "id", b"id", "instructions", b"instructions", "is_fusion_computation", b"is_fusion_computation", "name", b"name", "program_shape", b"program_shape", "root_id", b"root_id"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "execution_thread", + b"execution_thread", + "id", + b"id", + "instructions", + b"instructions", + "is_fusion_computation", + b"is_fusion_computation", + "name", + b"name", + "program_shape", + b"program_shape", + "root_id", + b"root_id", + ], + ) -> None: ... global___HloComputationProto = HloComputationProto @@ -714,11 +956,7 @@ class HloScheduleProto(google.protobuf.message.Message): INSTRUCTION_IDS_FIELD_NUMBER: builtins.int @property def instruction_ids(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ... - def __init__( - self, - *, - instruction_ids: collections.abc.Iterable[builtins.int] | None = ..., - ) -> None: ... + def __init__(self, *, instruction_ids: collections.abc.Iterable[builtins.int] | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["instruction_ids", b"instruction_ids"]) -> None: ... @typing.final @@ -731,23 +969,20 @@ class HloScheduleProto(google.protobuf.message.Message): @property def value(self) -> global___HloScheduleProto.InstructionSequence: ... def __init__( - self, - *, - key: builtins.int | None = ..., - value: global___HloScheduleProto.InstructionSequence | None = ..., + self, *, key: builtins.int | None = ..., value: global___HloScheduleProto.InstructionSequence | None = ... ) -> None: ... def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... SEQUENCES_FIELD_NUMBER: builtins.int @property - def sequences(self) -> google.protobuf.internal.containers.MessageMap[builtins.int, global___HloScheduleProto.InstructionSequence]: + def sequences( + self, + ) -> google.protobuf.internal.containers.MessageMap[builtins.int, global___HloScheduleProto.InstructionSequence]: """Map from computation id to sequence.""" def __init__( - self, - *, - sequences: collections.abc.Mapping[builtins.int, global___HloScheduleProto.InstructionSequence] | None = ..., + self, *, sequences: collections.abc.Mapping[builtins.int, global___HloScheduleProto.InstructionSequence] | None = ... ) -> None: ... def ClearField(self, field_name: typing.Literal["sequences", b"sequences"]) -> None: ... @@ -800,15 +1035,29 @@ class HloInputOutputAliasProto(google.protobuf.message.Message): parameter_shape_index: collections.abc.Iterable[builtins.int] | None = ..., kind: global___Kind.ValueType | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["kind", b"kind", "output_shape_index", b"output_shape_index", "parameter_number", b"parameter_number", "parameter_shape_index", b"parameter_shape_index"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "kind", + b"kind", + "output_shape_index", + b"output_shape_index", + "parameter_number", + b"parameter_number", + "parameter_shape_index", + b"parameter_shape_index", + ], + ) -> None: ... ENTRIES_FIELD_NUMBER: builtins.int @property - def entries(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___HloInputOutputAliasProto.AliasEntryProto]: ... - def __init__( + def entries( self, - *, - entries: collections.abc.Iterable[global___HloInputOutputAliasProto.AliasEntryProto] | None = ..., + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + global___HloInputOutputAliasProto.AliasEntryProto + ]: ... + def __init__( + self, *, entries: collections.abc.Iterable[global___HloInputOutputAliasProto.AliasEntryProto] | None = ... ) -> None: ... def ClearField(self, field_name: typing.Literal["entries", b"entries"]) -> None: ... @@ -850,15 +1099,22 @@ class HloBufferDonorProto(google.protobuf.message.Message): parameter_number: builtins.int | None = ..., parameter_shape_index: collections.abc.Iterable[builtins.int] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["parameter_number", b"parameter_number", "parameter_shape_index", b"parameter_shape_index"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "parameter_number", b"parameter_number", "parameter_shape_index", b"parameter_shape_index" + ], + ) -> None: ... ENTRIES_FIELD_NUMBER: builtins.int @property - def entries(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___HloBufferDonorProto.BufferDonorEntryProto]: ... - def __init__( + def entries( self, - *, - entries: collections.abc.Iterable[global___HloBufferDonorProto.BufferDonorEntryProto] | None = ..., + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + global___HloBufferDonorProto.BufferDonorEntryProto + ]: ... + def __init__( + self, *, entries: collections.abc.Iterable[global___HloBufferDonorProto.BufferDonorEntryProto] | None = ... ) -> None: ... def ClearField(self, field_name: typing.Literal["entries", b"entries"]) -> None: ... @@ -882,7 +1138,9 @@ class CrossProgramPrefetch(google.protobuf.message.Message): index: collections.abc.Iterable[builtins.int] | None = ..., offset: builtins.int | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["index", b"index", "offset", b"offset", "parameter", b"parameter"]) -> None: ... + def ClearField( + self, field_name: typing.Literal["index", b"index", "offset", b"offset", "parameter", b"parameter"] + ) -> None: ... global___CrossProgramPrefetch = CrossProgramPrefetch @@ -927,7 +1185,12 @@ class StackFrameIndexProto(google.protobuf.message.Message): line: builtins.int | None = ..., column: builtins.int | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["column", b"column", "file_name_id", b"file_name_id", "function_name_id", b"function_name_id", "line", b"line"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "column", b"column", "file_name_id", b"file_name_id", "function_name_id", b"function_name_id", "line", b"line" + ], + ) -> None: ... @typing.final class StackFrame(google.protobuf.message.Message): @@ -942,12 +1205,11 @@ class StackFrameIndexProto(google.protobuf.message.Message): parent_frame_id: builtins.int """1-based position of the parent frame.""" def __init__( - self, - *, - file_location_id: builtins.int | None = ..., - parent_frame_id: builtins.int | None = ..., + self, *, file_location_id: builtins.int | None = ..., parent_frame_id: builtins.int | None = ... + ) -> None: ... + def ClearField( + self, field_name: typing.Literal["file_location_id", b"file_location_id", "parent_frame_id", b"parent_frame_id"] ) -> None: ... - def ClearField(self, field_name: typing.Literal["file_location_id", b"file_location_id", "parent_frame_id", b"parent_frame_id"]) -> None: ... FILE_NAMES_FIELD_NUMBER: builtins.int FUNCTION_NAMES_FIELD_NUMBER: builtins.int @@ -962,11 +1224,15 @@ class StackFrameIndexProto(google.protobuf.message.Message): """Flat index array of function names.""" @property - def file_locations(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___StackFrameIndexProto.FileLocation]: + def file_locations( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___StackFrameIndexProto.FileLocation]: """Flat index array of file locations.""" @property - def stack_frames(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___StackFrameIndexProto.StackFrame]: + def stack_frames( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___StackFrameIndexProto.StackFrame]: """Flat index array of frames.""" def __init__( @@ -977,7 +1243,19 @@ class StackFrameIndexProto(google.protobuf.message.Message): file_locations: collections.abc.Iterable[global___StackFrameIndexProto.FileLocation] | None = ..., stack_frames: collections.abc.Iterable[global___StackFrameIndexProto.StackFrame] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["file_locations", b"file_locations", "file_names", b"file_names", "function_names", b"function_names", "stack_frames", b"stack_frames"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "file_locations", + b"file_locations", + "file_names", + b"file_names", + "function_names", + b"function_names", + "stack_frames", + b"stack_frames", + ], + ) -> None: ... global___StackFrameIndexProto = StackFrameIndexProto @@ -991,7 +1269,9 @@ class HloModuleProto(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _ProfileTypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[HloModuleProto._ProfileType.ValueType], builtins.type): + class _ProfileTypeEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[HloModuleProto._ProfileType.ValueType], builtins.type + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor INVALID: HloModuleProto._ProfileType.ValueType # 0 FLAG: HloModuleProto._ProfileType.ValueType # 1 @@ -1040,7 +1320,21 @@ class HloModuleProto(google.protobuf.message.Message): compilation_event: tensorflow.compiler.xla.xla_data_pb2.CompilationEvent.ValueType | None = ..., fingerprint: builtins.str | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["compilation_event", b"compilation_event", "fingerprint", b"fingerprint", "profile_source", b"profile_source", "profile_type", b"profile_type", "relative_speedup", b"relative_speedup"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "compilation_event", + b"compilation_event", + "fingerprint", + b"fingerprint", + "profile_source", + b"profile_source", + "profile_type", + b"profile_type", + "relative_speedup", + b"relative_speedup", + ], + ) -> None: ... NAME_FIELD_NUMBER: builtins.int ENTRY_COMPUTATION_NAME_FIELD_NUMBER: builtins.int @@ -1092,13 +1386,19 @@ class HloModuleProto(google.protobuf.message.Message): """Describes the information of input buffer donors.""" @property - def cross_program_prefetches(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___CrossProgramPrefetch]: ... + def cross_program_prefetches( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___CrossProgramPrefetch]: ... @property def spmd_output_sharding(self) -> tensorflow.compiler.xla.xla_data_pb2.OpSharding: ... @property - def spmd_parameters_shardings(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[tensorflow.compiler.xla.xla_data_pb2.OpSharding]: ... + def spmd_parameters_shardings( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[tensorflow.compiler.xla.xla_data_pb2.OpSharding]: ... @property - def profile_info(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___HloModuleProto.ProfileInfo]: + def profile_info( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___HloModuleProto.ProfileInfo]: """Profile information for the HLO module.""" @property @@ -1135,8 +1435,68 @@ class HloModuleProto(google.protobuf.message.Message): stack_frame_index: global___StackFrameIndexProto | None = ..., frontend_attributes: tensorflow.compiler.xla.xla_data_pb2.FrontendAttributes | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["buffer_donor", b"buffer_donor", "device_assignment", b"device_assignment", "frontend_attributes", b"frontend_attributes", "host_program_shape", b"host_program_shape", "input_output_alias", b"input_output_alias", "schedule", b"schedule", "spmd_output_sharding", b"spmd_output_sharding", "stack_frame_index", b"stack_frame_index"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["buffer_donor", b"buffer_donor", "computations", b"computations", "cross_program_prefetches", b"cross_program_prefetches", "device_assignment", b"device_assignment", "entry_computation_id", b"entry_computation_id", "entry_computation_name", b"entry_computation_name", "frontend_attributes", b"frontend_attributes", "host_program_shape", b"host_program_shape", "id", b"id", "input_output_alias", b"input_output_alias", "is_dynamic", b"is_dynamic", "name", b"name", "profile_info", b"profile_info", "schedule", b"schedule", "spmd_output_sharding", b"spmd_output_sharding", "spmd_parameters_shardings", b"spmd_parameters_shardings", "stack_frame_index", b"stack_frame_index", "use_auto_spmd_partitioning", b"use_auto_spmd_partitioning"]) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "buffer_donor", + b"buffer_donor", + "device_assignment", + b"device_assignment", + "frontend_attributes", + b"frontend_attributes", + "host_program_shape", + b"host_program_shape", + "input_output_alias", + b"input_output_alias", + "schedule", + b"schedule", + "spmd_output_sharding", + b"spmd_output_sharding", + "stack_frame_index", + b"stack_frame_index", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "buffer_donor", + b"buffer_donor", + "computations", + b"computations", + "cross_program_prefetches", + b"cross_program_prefetches", + "device_assignment", + b"device_assignment", + "entry_computation_id", + b"entry_computation_id", + "entry_computation_name", + b"entry_computation_name", + "frontend_attributes", + b"frontend_attributes", + "host_program_shape", + b"host_program_shape", + "id", + b"id", + "input_output_alias", + b"input_output_alias", + "is_dynamic", + b"is_dynamic", + "name", + b"name", + "profile_info", + b"profile_info", + "schedule", + b"schedule", + "spmd_output_sharding", + b"spmd_output_sharding", + "spmd_parameters_shardings", + b"spmd_parameters_shardings", + "stack_frame_index", + b"stack_frame_index", + "use_auto_spmd_partitioning", + b"use_auto_spmd_partitioning", + ], + ) -> None: ... global___HloModuleProto = HloModuleProto @@ -1169,7 +1529,12 @@ class LogicalBufferProto(google.protobuf.message.Message): instruction_id: builtins.int | None = ..., shape_index: collections.abc.Iterable[builtins.int] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["instruction_id", b"instruction_id", "instruction_name", b"instruction_name", "shape_index", b"shape_index"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "instruction_id", b"instruction_id", "instruction_name", b"instruction_name", "shape_index", b"shape_index" + ], + ) -> None: ... ID_FIELD_NUMBER: builtins.int SIZE_FIELD_NUMBER: builtins.int @@ -1191,7 +1556,9 @@ class LogicalBufferProto(google.protobuf.message.Message): color: builtins.int | None = ..., ) -> None: ... def HasField(self, field_name: typing.Literal["defined_at", b"defined_at"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["color", b"color", "defined_at", b"defined_at", "id", b"id", "size", b"size"]) -> None: ... + def ClearField( + self, field_name: typing.Literal["color", b"color", "defined_at", b"defined_at", "id", b"id", "size", b"size"] + ) -> None: ... global___LogicalBufferProto = LogicalBufferProto @@ -1222,7 +1589,9 @@ class BufferAllocationProto(google.protobuf.message.Message): offset: builtins.int | None = ..., size: builtins.int | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["logical_buffer_id", b"logical_buffer_id", "offset", b"offset", "size", b"size"]) -> None: ... + def ClearField( + self, field_name: typing.Literal["logical_buffer_id", b"logical_buffer_id", "offset", b"offset", "size", b"size"] + ) -> None: ... INDEX_FIELD_NUMBER: builtins.int SIZE_FIELD_NUMBER: builtins.int @@ -1247,7 +1616,9 @@ class BufferAllocationProto(google.protobuf.message.Message): @property def parameter_shape_index(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ... @property - def assigned(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___BufferAllocationProto.Assigned]: ... + def assigned( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___BufferAllocationProto.Assigned]: ... def __init__( self, *, @@ -1263,7 +1634,33 @@ class BufferAllocationProto(google.protobuf.message.Message): color: builtins.int | None = ..., assigned: collections.abc.Iterable[global___BufferAllocationProto.Assigned] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["assigned", b"assigned", "color", b"color", "index", b"index", "is_constant", b"is_constant", "is_entry_computation_parameter", b"is_entry_computation_parameter", "is_thread_local", b"is_thread_local", "is_tuple", b"is_tuple", "maybe_live_out", b"maybe_live_out", "parameter_number", b"parameter_number", "parameter_shape_index", b"parameter_shape_index", "size", b"size"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "assigned", + b"assigned", + "color", + b"color", + "index", + b"index", + "is_constant", + b"is_constant", + "is_entry_computation_parameter", + b"is_entry_computation_parameter", + "is_thread_local", + b"is_thread_local", + "is_tuple", + b"is_tuple", + "maybe_live_out", + b"maybe_live_out", + "parameter_number", + b"parameter_number", + "parameter_shape_index", + b"parameter_shape_index", + "size", + b"size", + ], + ) -> None: ... global___BufferAllocationProto = BufferAllocationProto @@ -1285,7 +1682,9 @@ class HeapSimulatorTrace(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _KindEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[HeapSimulatorTrace.Event._Kind.ValueType], builtins.type): + class _KindEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[HeapSimulatorTrace.Event._Kind.ValueType], builtins.type + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor ALLOC: HeapSimulatorTrace.Event._Kind.ValueType # 0 """A memory region was allocated for the buffer.""" @@ -1339,7 +1738,21 @@ class HeapSimulatorTrace(google.protobuf.message.Message): instruction_name: builtins.str | None = ..., share_with_canonical_id: builtins.int | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["buffer_id", b"buffer_id", "computation_name", b"computation_name", "instruction_name", b"instruction_name", "kind", b"kind", "share_with_canonical_id", b"share_with_canonical_id"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "buffer_id", + b"buffer_id", + "computation_name", + b"computation_name", + "instruction_name", + b"instruction_name", + "kind", + b"kind", + "share_with_canonical_id", + b"share_with_canonical_id", + ], + ) -> None: ... EVENTS_FIELD_NUMBER: builtins.int WHOLE_MODULE_SIMULATION_FIELD_NUMBER: builtins.int @@ -1347,7 +1760,9 @@ class HeapSimulatorTrace(google.protobuf.message.Message): whole_module_simulation: builtins.bool buffer_allocation_index: builtins.int @property - def events(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___HeapSimulatorTrace.Event]: ... + def events( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___HeapSimulatorTrace.Event]: ... def __init__( self, *, @@ -1355,7 +1770,17 @@ class HeapSimulatorTrace(google.protobuf.message.Message): whole_module_simulation: builtins.bool | None = ..., buffer_allocation_index: builtins.int | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["buffer_allocation_index", b"buffer_allocation_index", "events", b"events", "whole_module_simulation", b"whole_module_simulation"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "buffer_allocation_index", + b"buffer_allocation_index", + "events", + b"events", + "whole_module_simulation", + b"whole_module_simulation", + ], + ) -> None: ... global___HeapSimulatorTrace = HeapSimulatorTrace @@ -1373,10 +1798,7 @@ class HloModuleGroupProto(google.protobuf.message.Message): @property def hlo_modules(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___HloModuleProto]: ... def __init__( - self, - *, - name: builtins.str | None = ..., - hlo_modules: collections.abc.Iterable[global___HloModuleProto] | None = ..., + self, *, name: builtins.str | None = ..., hlo_modules: collections.abc.Iterable[global___HloModuleProto] | None = ... ) -> None: ... def ClearField(self, field_name: typing.Literal["hlo_modules", b"hlo_modules", "name", b"name"]) -> None: ... @@ -1402,26 +1824,33 @@ class BufferAssignmentProto(google.protobuf.message.Message): @property def location(self) -> global___LogicalBufferProto.Location: ... def __init__( - self, - *, - source_buffer_id: builtins.int | None = ..., - location: global___LogicalBufferProto.Location | None = ..., + self, *, source_buffer_id: builtins.int | None = ..., location: global___LogicalBufferProto.Location | None = ... ) -> None: ... def HasField(self, field_name: typing.Literal["location", b"location"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["location", b"location", "source_buffer_id", b"source_buffer_id"]) -> None: ... + def ClearField( + self, field_name: typing.Literal["location", b"location", "source_buffer_id", b"source_buffer_id"] + ) -> None: ... LOGICAL_BUFFERS_FIELD_NUMBER: builtins.int BUFFER_ALIASES_FIELD_NUMBER: builtins.int BUFFER_ALLOCATIONS_FIELD_NUMBER: builtins.int HEAP_SIMULATOR_TRACES_FIELD_NUMBER: builtins.int @property - def logical_buffers(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___LogicalBufferProto]: ... + def logical_buffers( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___LogicalBufferProto]: ... @property - def buffer_aliases(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___BufferAssignmentProto.BufferAlias]: ... + def buffer_aliases( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___BufferAssignmentProto.BufferAlias]: ... @property - def buffer_allocations(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___BufferAllocationProto]: ... + def buffer_allocations( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___BufferAllocationProto]: ... @property - def heap_simulator_traces(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___HeapSimulatorTrace]: ... + def heap_simulator_traces( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___HeapSimulatorTrace]: ... def __init__( self, *, @@ -1430,7 +1859,19 @@ class BufferAssignmentProto(google.protobuf.message.Message): buffer_allocations: collections.abc.Iterable[global___BufferAllocationProto] | None = ..., heap_simulator_traces: collections.abc.Iterable[global___HeapSimulatorTrace] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["buffer_aliases", b"buffer_aliases", "buffer_allocations", b"buffer_allocations", "heap_simulator_traces", b"heap_simulator_traces", "logical_buffers", b"logical_buffers"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "buffer_aliases", + b"buffer_aliases", + "buffer_allocations", + b"buffer_allocations", + "heap_simulator_traces", + b"heap_simulator_traces", + "logical_buffers", + b"logical_buffers", + ], + ) -> None: ... global___BufferAssignmentProto = BufferAssignmentProto @@ -1447,13 +1888,14 @@ class HloProto(google.protobuf.message.Message): @property def buffer_assignment(self) -> global___BufferAssignmentProto: ... def __init__( - self, - *, - hlo_module: global___HloModuleProto | None = ..., - buffer_assignment: global___BufferAssignmentProto | None = ..., + self, *, hlo_module: global___HloModuleProto | None = ..., buffer_assignment: global___BufferAssignmentProto | None = ... + ) -> None: ... + def HasField( + self, field_name: typing.Literal["buffer_assignment", b"buffer_assignment", "hlo_module", b"hlo_module"] + ) -> builtins.bool: ... + def ClearField( + self, field_name: typing.Literal["buffer_assignment", b"buffer_assignment", "hlo_module", b"hlo_module"] ) -> None: ... - def HasField(self, field_name: typing.Literal["buffer_assignment", b"buffer_assignment", "hlo_module", b"hlo_module"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["buffer_assignment", b"buffer_assignment", "hlo_module", b"hlo_module"]) -> None: ... global___HloProto = HloProto @@ -1477,7 +1919,9 @@ class HloSnapshot(google.protobuf.message.Message): """The hlo graph.""" @property - def arguments(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[tensorflow.compiler.xla.xla_data_pb2.LiteralProto]: + def arguments( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[tensorflow.compiler.xla.xla_data_pb2.LiteralProto]: """The arguments passed to the graph.""" @property @@ -1493,7 +1937,12 @@ class HloSnapshot(google.protobuf.message.Message): execution_platform: builtins.str | None = ..., ) -> None: ... def HasField(self, field_name: typing.Literal["hlo", b"hlo", "result", b"result"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["arguments", b"arguments", "execution_platform", b"execution_platform", "hlo", b"hlo", "result", b"result"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "arguments", b"arguments", "execution_platform", b"execution_platform", "hlo", b"hlo", "result", b"result" + ], + ) -> None: ... global___HloSnapshot = HloSnapshot @@ -1542,7 +1991,21 @@ class HloModuleMetadataProto(google.protobuf.message.Message): partitioned_module_ids: collections.abc.Iterable[builtins.int] | None = ..., pass_metadata: collections.abc.Iterable[global___HloPassMetadata] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["canonical_module_id", b"canonical_module_id", "module_group_name", b"module_group_name", "original_module_id", b"original_module_id", "partitioned_module_ids", b"partitioned_module_ids", "pass_metadata", b"pass_metadata"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "canonical_module_id", + b"canonical_module_id", + "module_group_name", + b"module_group_name", + "original_module_id", + b"original_module_id", + "partitioned_module_ids", + b"partitioned_module_ids", + "pass_metadata", + b"pass_metadata", + ], + ) -> None: ... global___HloModuleMetadataProto = HloModuleMetadataProto @@ -1621,6 +2084,30 @@ class HloPassMetadata(google.protobuf.message.Message): custom_metadata: google.protobuf.any_pb2.Any | None = ..., ) -> None: ... def HasField(self, field_name: typing.Literal["custom_metadata", b"custom_metadata"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["custom_metadata", b"custom_metadata", "dump_filenames", b"dump_filenames", "end_timestamp_usec", b"end_timestamp_usec", "module_changed", b"module_changed", "module_group_module_ids", b"module_group_module_ids", "module_id", b"module_id", "pass_id", b"pass_id", "pass_name", b"pass_name", "pipeline_name", b"pipeline_name", "start_timestamp_usec", b"start_timestamp_usec"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "custom_metadata", + b"custom_metadata", + "dump_filenames", + b"dump_filenames", + "end_timestamp_usec", + b"end_timestamp_usec", + "module_changed", + b"module_changed", + "module_group_module_ids", + b"module_group_module_ids", + "module_id", + b"module_id", + "pass_id", + b"pass_id", + "pass_name", + b"pass_name", + "pipeline_name", + b"pipeline_name", + "start_timestamp_usec", + b"start_timestamp_usec", + ], + ) -> None: ... global___HloPassMetadata = HloPassMetadata diff --git a/stubs/tensorflow/tensorflow/compiler/xla/service/hlo_profile_printer_data_pb2.pyi b/stubs/tensorflow/tensorflow/compiler/xla/service/hlo_profile_printer_data_pb2.pyi index d1871110d55b..5699e8cc13fc 100644 --- a/stubs/tensorflow/tensorflow/compiler/xla/service/hlo_profile_printer_data_pb2.pyi +++ b/stubs/tensorflow/tensorflow/compiler/xla/service/hlo_profile_printer_data_pb2.pyi @@ -73,7 +73,27 @@ class HloProfilePrinterData(google.protobuf.message.Message): optimal_seconds: builtins.float | None = ..., profile_index: builtins.int | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["bytes_accessed", b"bytes_accessed", "category", b"category", "flop_count", b"flop_count", "long_name", b"long_name", "optimal_seconds", b"optimal_seconds", "profile_index", b"profile_index", "short_name", b"short_name", "transcendental_count", b"transcendental_count"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "bytes_accessed", + b"bytes_accessed", + "category", + b"category", + "flop_count", + b"flop_count", + "long_name", + b"long_name", + "optimal_seconds", + b"optimal_seconds", + "profile_index", + b"profile_index", + "short_name", + b"short_name", + "transcendental_count", + b"transcendental_count", + ], + ) -> None: ... @typing.final class HloComputationInfo(google.protobuf.message.Message): @@ -90,7 +110,11 @@ class HloProfilePrinterData(google.protobuf.message.Message): corresponding to this HloComputationInfo. """ @property - def instruction_infos(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___HloProfilePrinterData.HloInstructionInfo]: + def instruction_infos( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + global___HloProfilePrinterData.HloInstructionInfo + ]: """HloInstructionInfos for every HloInstruction in the HloComputation for corresponding to this HloComputattionInfo. """ @@ -102,7 +126,12 @@ class HloProfilePrinterData(google.protobuf.message.Message): profile_index: builtins.int | None = ..., instruction_infos: collections.abc.Iterable[global___HloProfilePrinterData.HloInstructionInfo] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["instruction_infos", b"instruction_infos", "name", b"name", "profile_index", b"profile_index"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "instruction_infos", b"instruction_infos", "name", b"name", "profile_index", b"profile_index" + ], + ) -> None: ... @typing.final class ExtraMetricsEntry(google.protobuf.message.Message): @@ -112,12 +141,7 @@ class HloProfilePrinterData(google.protobuf.message.Message): VALUE_FIELD_NUMBER: builtins.int key: builtins.str value: builtins.int - def __init__( - self, - *, - key: builtins.str | None = ..., - value: builtins.int | None = ..., - ) -> None: ... + def __init__(self, *, key: builtins.str | None = ..., value: builtins.int | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... COMPUTATION_INFOS_FIELD_NUMBER: builtins.int @@ -129,7 +153,9 @@ class HloProfilePrinterData(google.protobuf.message.Message): entry_computation: builtins.str """Name of the entry computation.""" @property - def computation_infos(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___HloProfilePrinterData.HloComputationInfo]: + def computation_infos( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___HloProfilePrinterData.HloComputationInfo]: """HloComputationInfos for every HloComputation in the HloModule.""" @property @@ -144,6 +170,18 @@ class HloProfilePrinterData(google.protobuf.message.Message): extra_metrics: collections.abc.Mapping[builtins.str, builtins.int] | None = ..., entry_computation: builtins.str | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["computation_infos", b"computation_infos", "entry_computation", b"entry_computation", "extra_metrics", b"extra_metrics", "profile_counters_size", b"profile_counters_size"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "computation_infos", + b"computation_infos", + "entry_computation", + b"entry_computation", + "extra_metrics", + b"extra_metrics", + "profile_counters_size", + b"profile_counters_size", + ], + ) -> None: ... global___HloProfilePrinterData = HloProfilePrinterData diff --git a/stubs/tensorflow/tensorflow/compiler/xla/service/metrics_pb2.pyi b/stubs/tensorflow/tensorflow/compiler/xla/service/metrics_pb2.pyi index 44592002fdd9..05eaf7379849 100644 --- a/stubs/tensorflow/tensorflow/compiler/xla/service/metrics_pb2.pyi +++ b/stubs/tensorflow/tensorflow/compiler/xla/service/metrics_pb2.pyi @@ -56,8 +56,22 @@ class PassMetrics(google.protobuf.message.Message): pass_duration: google.protobuf.duration_pb2.Duration | None = ..., custom_metrics: google.protobuf.any_pb2.Any | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["custom_metrics", b"custom_metrics", "pass_duration", b"pass_duration"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["custom_metrics", b"custom_metrics", "module_id", b"module_id", "pass_duration", b"pass_duration", "pass_name", b"pass_name"]) -> None: ... + def HasField( + self, field_name: typing.Literal["custom_metrics", b"custom_metrics", "pass_duration", b"pass_duration"] + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "custom_metrics", + b"custom_metrics", + "module_id", + b"module_id", + "pass_duration", + b"pass_duration", + "pass_name", + b"pass_name", + ], + ) -> None: ... global___PassMetrics = PassMetrics @@ -95,8 +109,64 @@ class JobInfo(google.protobuf.message.Message): task_id: builtins.int | None = ..., task_uid: builtins.int | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["_cell", b"_cell", "_name", b"_name", "_task_id", b"_task_id", "_task_uid", b"_task_uid", "_uid", b"_uid", "_user", b"_user", "cell", b"cell", "name", b"name", "task_id", b"task_id", "task_uid", b"task_uid", "uid", b"uid", "user", b"user"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["_cell", b"_cell", "_name", b"_name", "_task_id", b"_task_id", "_task_uid", b"_task_uid", "_uid", b"_uid", "_user", b"_user", "cell", b"cell", "name", b"name", "task_id", b"task_id", "task_uid", b"task_uid", "uid", b"uid", "user", b"user"]) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "_cell", + b"_cell", + "_name", + b"_name", + "_task_id", + b"_task_id", + "_task_uid", + b"_task_uid", + "_uid", + b"_uid", + "_user", + b"_user", + "cell", + b"cell", + "name", + b"name", + "task_id", + b"task_id", + "task_uid", + b"task_uid", + "uid", + b"uid", + "user", + b"user", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "_cell", + b"_cell", + "_name", + b"_name", + "_task_id", + b"_task_id", + "_task_uid", + b"_task_uid", + "_uid", + b"_uid", + "_user", + b"_user", + "cell", + b"cell", + "name", + b"name", + "task_id", + b"task_id", + "task_uid", + b"task_uid", + "uid", + b"uid", + "user", + b"user", + ], + ) -> None: ... @typing.overload def WhichOneof(self, oneof_group: typing.Literal["_cell", b"_cell"]) -> typing.Literal["cell"] | None: ... @typing.overload @@ -122,7 +192,10 @@ class CompilationLogEntry(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _CompilationStageEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[CompilationLogEntry._CompilationStage.ValueType], builtins.type): + class _CompilationStageEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[CompilationLogEntry._CompilationStage.ValueType], + builtins.type, + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor UNSPECIFIED: CompilationLogEntry._CompilationStage.ValueType # 0 END_TO_END: CompilationLogEntry._CompilationStage.ValueType # 1 @@ -184,7 +257,27 @@ class CompilationLogEntry(google.protobuf.message.Message): module_ids: collections.abc.Iterable[builtins.int] | None = ..., job_info: global___JobInfo | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["duration", b"duration", "job_info", b"job_info", "timestamp", b"timestamp"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["duration", b"duration", "job_info", b"job_info", "module_ids", b"module_ids", "pass_metrics", b"pass_metrics", "stage", b"stage", "task_index", b"task_index", "timestamp", b"timestamp"]) -> None: ... + def HasField( + self, field_name: typing.Literal["duration", b"duration", "job_info", b"job_info", "timestamp", b"timestamp"] + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "duration", + b"duration", + "job_info", + b"job_info", + "module_ids", + b"module_ids", + "pass_metrics", + b"pass_metrics", + "stage", + b"stage", + "task_index", + b"task_index", + "timestamp", + b"timestamp", + ], + ) -> None: ... global___CompilationLogEntry = CompilationLogEntry diff --git a/stubs/tensorflow/tensorflow/compiler/xla/service/test_compilation_environment_pb2.pyi b/stubs/tensorflow/tensorflow/compiler/xla/service/test_compilation_environment_pb2.pyi index c7a849570d8d..6b3b31b94f3d 100644 --- a/stubs/tensorflow/tensorflow/compiler/xla/service/test_compilation_environment_pb2.pyi +++ b/stubs/tensorflow/tensorflow/compiler/xla/service/test_compilation_environment_pb2.pyi @@ -31,11 +31,7 @@ class TestCompilationEnvironment1(google.protobuf.message.Message): SOME_FLAG_FIELD_NUMBER: builtins.int some_flag: builtins.int - def __init__( - self, - *, - some_flag: builtins.int | None = ..., - ) -> None: ... + def __init__(self, *, some_flag: builtins.int | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["some_flag", b"some_flag"]) -> None: ... global___TestCompilationEnvironment1 = TestCompilationEnvironment1 @@ -46,11 +42,7 @@ class TestCompilationEnvironment2(google.protobuf.message.Message): SOME_OTHER_FLAG_FIELD_NUMBER: builtins.int some_other_flag: builtins.int - def __init__( - self, - *, - some_other_flag: builtins.int | None = ..., - ) -> None: ... + def __init__(self, *, some_other_flag: builtins.int | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["some_other_flag", b"some_other_flag"]) -> None: ... global___TestCompilationEnvironment2 = TestCompilationEnvironment2 @@ -61,11 +53,7 @@ class TestCompilationEnvironment3(google.protobuf.message.Message): A_THIRD_FLAG_FIELD_NUMBER: builtins.int a_third_flag: builtins.int - def __init__( - self, - *, - a_third_flag: builtins.int | None = ..., - ) -> None: ... + def __init__(self, *, a_third_flag: builtins.int | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["a_third_flag", b"a_third_flag"]) -> None: ... global___TestCompilationEnvironment3 = TestCompilationEnvironment3 diff --git a/stubs/tensorflow/tensorflow/compiler/xla/service/xla_compile_result_pb2.pyi b/stubs/tensorflow/tensorflow/compiler/xla/service/xla_compile_result_pb2.pyi index 3c5dde2bac5c..e17f9559b648 100644 --- a/stubs/tensorflow/tensorflow/compiler/xla/service/xla_compile_result_pb2.pyi +++ b/stubs/tensorflow/tensorflow/compiler/xla/service/xla_compile_result_pb2.pyi @@ -73,8 +73,36 @@ class CompilerPerfStats(google.protobuf.message.Message): compilation_duration: google.protobuf.duration_pb2.Duration | None = ..., total_duration: google.protobuf.duration_pb2.Duration | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["compilation_duration", b"compilation_duration", "compilation_prologue_duration", b"compilation_prologue_duration", "hlo_verification_duration", b"hlo_verification_duration", "init_duration", b"init_duration", "total_duration", b"total_duration"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["compilation_duration", b"compilation_duration", "compilation_prologue_duration", b"compilation_prologue_duration", "hlo_verification_duration", b"hlo_verification_duration", "init_duration", b"init_duration", "total_duration", b"total_duration"]) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "compilation_duration", + b"compilation_duration", + "compilation_prologue_duration", + b"compilation_prologue_duration", + "hlo_verification_duration", + b"hlo_verification_duration", + "init_duration", + b"init_duration", + "total_duration", + b"total_duration", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "compilation_duration", + b"compilation_duration", + "compilation_prologue_duration", + b"compilation_prologue_duration", + "hlo_verification_duration", + b"hlo_verification_duration", + "init_duration", + b"init_duration", + "total_duration", + b"total_duration", + ], + ) -> None: ... global___CompilerPerfStats = CompilerPerfStats @@ -90,12 +118,7 @@ class CompilationResult(google.protobuf.message.Message): VALUE_FIELD_NUMBER: builtins.int key: builtins.str value: builtins.int - def __init__( - self, - *, - key: builtins.str | None = ..., - value: builtins.int | None = ..., - ) -> None: ... + def __init__(self, *, key: builtins.str | None = ..., value: builtins.int | None = ...) -> None: ... def HasField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... @@ -131,7 +154,14 @@ class CompilationResult(google.protobuf.message.Message): status: tensorflow.tsl.protobuf.status_pb2.StatusProto | None = ..., counters: collections.abc.Mapping[builtins.str, builtins.int] | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["hlo_module", b"hlo_module", "perf_stats", b"perf_stats", "status", b"status"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["counters", b"counters", "hlo_module", b"hlo_module", "perf_stats", b"perf_stats", "status", b"status"]) -> None: ... + def HasField( + self, field_name: typing.Literal["hlo_module", b"hlo_module", "perf_stats", b"perf_stats", "status", b"status"] + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "counters", b"counters", "hlo_module", b"hlo_module", "perf_stats", b"perf_stats", "status", b"status" + ], + ) -> None: ... global___CompilationResult = CompilationResult diff --git a/stubs/tensorflow/tensorflow/compiler/xla/tsl/protobuf/bfc_memory_map_pb2.pyi b/stubs/tensorflow/tensorflow/compiler/xla/tsl/protobuf/bfc_memory_map_pb2.pyi index 9e1871be16a8..0fe33b725b8e 100644 --- a/stubs/tensorflow/tensorflow/compiler/xla/tsl/protobuf/bfc_memory_map_pb2.pyi +++ b/stubs/tensorflow/tensorflow/compiler/xla/tsl/protobuf/bfc_memory_map_pb2.pyi @@ -38,7 +38,21 @@ class MemAllocatorStats(google.protobuf.message.Message): largest_alloc_size: builtins.int | None = ..., fragmentation_metric: builtins.float | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["bytes_in_use", b"bytes_in_use", "fragmentation_metric", b"fragmentation_metric", "largest_alloc_size", b"largest_alloc_size", "num_allocs", b"num_allocs", "peak_bytes_in_use", b"peak_bytes_in_use"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "bytes_in_use", + b"bytes_in_use", + "fragmentation_metric", + b"fragmentation_metric", + "largest_alloc_size", + b"largest_alloc_size", + "num_allocs", + b"num_allocs", + "peak_bytes_in_use", + b"peak_bytes_in_use", + ], + ) -> None: ... global___MemAllocatorStats = MemAllocatorStats @@ -77,7 +91,29 @@ class MemChunk(google.protobuf.message.Message): in_use: builtins.bool | None = ..., step_id: builtins.int | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["action_count", b"action_count", "address", b"address", "bin", b"bin", "freed_at_count", b"freed_at_count", "in_use", b"in_use", "op_name", b"op_name", "requested_size", b"requested_size", "size", b"size", "step_id", b"step_id"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "action_count", + b"action_count", + "address", + b"address", + "bin", + b"bin", + "freed_at_count", + b"freed_at_count", + "in_use", + b"in_use", + "op_name", + b"op_name", + "requested_size", + b"requested_size", + "size", + b"size", + "step_id", + b"step_id", + ], + ) -> None: ... global___MemChunk = MemChunk @@ -104,7 +140,21 @@ class BinSummary(google.protobuf.message.Message): total_chunks_in_use: builtins.int | None = ..., total_chunks_in_bin: builtins.int | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["bin", b"bin", "total_bytes_in_bin", b"total_bytes_in_bin", "total_bytes_in_use", b"total_bytes_in_use", "total_chunks_in_bin", b"total_chunks_in_bin", "total_chunks_in_use", b"total_chunks_in_use"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "bin", + b"bin", + "total_bytes_in_bin", + b"total_bytes_in_bin", + "total_bytes_in_use", + b"total_bytes_in_use", + "total_chunks_in_bin", + b"total_chunks_in_bin", + "total_chunks_in_use", + b"total_chunks_in_use", + ], + ) -> None: ... global___BinSummary = BinSummary @@ -116,12 +166,7 @@ class SnapShot(google.protobuf.message.Message): SIZE_FIELD_NUMBER: builtins.int action_count: builtins.int size: builtins.int - def __init__( - self, - *, - action_count: builtins.int | None = ..., - size: builtins.int | None = ..., - ) -> None: ... + def __init__(self, *, action_count: builtins.int | None = ..., size: builtins.int | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["action_count", b"action_count", "size", b"size"]) -> None: ... global___SnapShot = SnapShot @@ -154,6 +199,20 @@ class MemoryDump(google.protobuf.message.Message): stats: global___MemAllocatorStats | None = ..., ) -> None: ... def HasField(self, field_name: typing.Literal["stats", b"stats"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["allocator_name", b"allocator_name", "bin_summary", b"bin_summary", "chunk", b"chunk", "snap_shot", b"snap_shot", "stats", b"stats"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "allocator_name", + b"allocator_name", + "bin_summary", + b"bin_summary", + "chunk", + b"chunk", + "snap_shot", + b"snap_shot", + "stats", + b"stats", + ], + ) -> None: ... global___MemoryDump = MemoryDump diff --git a/stubs/tensorflow/tensorflow/compiler/xla/tsl/protobuf/test_log_pb2.pyi b/stubs/tensorflow/tensorflow/compiler/xla/tsl/protobuf/test_log_pb2.pyi index 0e06ea85faa7..a0cd6ef6a44b 100644 --- a/stubs/tensorflow/tensorflow/compiler/xla/tsl/protobuf/test_log_pb2.pyi +++ b/stubs/tensorflow/tensorflow/compiler/xla/tsl/protobuf/test_log_pb2.pyi @@ -30,15 +30,16 @@ class EntryValue(google.protobuf.message.Message): STRING_VALUE_FIELD_NUMBER: builtins.int double_value: builtins.float string_value: builtins.str - def __init__( - self, - *, - double_value: builtins.float | None = ..., - string_value: builtins.str | None = ..., + def __init__(self, *, double_value: builtins.float | None = ..., string_value: builtins.str | None = ...) -> None: ... + def HasField( + self, field_name: typing.Literal["double_value", b"double_value", "kind", b"kind", "string_value", b"string_value"] + ) -> builtins.bool: ... + def ClearField( + self, field_name: typing.Literal["double_value", b"double_value", "kind", b"kind", "string_value", b"string_value"] ) -> None: ... - def HasField(self, field_name: typing.Literal["double_value", b"double_value", "kind", b"kind", "string_value", b"string_value"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["double_value", b"double_value", "kind", b"kind", "string_value", b"string_value"]) -> None: ... - def WhichOneof(self, oneof_group: typing.Literal["kind", b"kind"]) -> typing.Literal["double_value", "string_value"] | None: ... + def WhichOneof( + self, oneof_group: typing.Literal["kind", b"kind"] + ) -> typing.Literal["double_value", "string_value"] | None: ... global___EntryValue = EntryValue @@ -71,7 +72,9 @@ class MetricEntry(google.protobuf.message.Message): max_value: google.protobuf.wrappers_pb2.DoubleValue | None = ..., ) -> None: ... def HasField(self, field_name: typing.Literal["max_value", b"max_value", "min_value", b"min_value"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["max_value", b"max_value", "min_value", b"min_value", "name", b"name", "value", b"value"]) -> None: ... + def ClearField( + self, field_name: typing.Literal["max_value", b"max_value", "min_value", b"min_value", "name", b"name", "value", b"value"] + ) -> None: ... global___MetricEntry = MetricEntry @@ -97,12 +100,7 @@ class BenchmarkEntry(google.protobuf.message.Message): key: builtins.str @property def value(self) -> global___EntryValue: ... - def __init__( - self, - *, - key: builtins.str | None = ..., - value: global___EntryValue | None = ..., - ) -> None: ... + def __init__(self, *, key: builtins.str | None = ..., value: global___EntryValue | None = ...) -> None: ... def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... @@ -146,7 +144,25 @@ class BenchmarkEntry(google.protobuf.message.Message): extras: collections.abc.Mapping[builtins.str, global___EntryValue] | None = ..., metrics: collections.abc.Iterable[global___MetricEntry] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["cpu_time", b"cpu_time", "extras", b"extras", "iters", b"iters", "metrics", b"metrics", "name", b"name", "throughput", b"throughput", "wall_time", b"wall_time"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "cpu_time", + b"cpu_time", + "extras", + b"extras", + "iters", + b"iters", + "metrics", + b"metrics", + "name", + b"name", + "throughput", + b"throughput", + "wall_time", + b"wall_time", + ], + ) -> None: ... global___BenchmarkEntry = BenchmarkEntry @@ -157,11 +173,7 @@ class BenchmarkEntries(google.protobuf.message.Message): ENTRY_FIELD_NUMBER: builtins.int @property def entry(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___BenchmarkEntry]: ... - def __init__( - self, - *, - entry: collections.abc.Iterable[global___BenchmarkEntry] | None = ..., - ) -> None: ... + def __init__(self, *, entry: collections.abc.Iterable[global___BenchmarkEntry] | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["entry", b"entry"]) -> None: ... global___BenchmarkEntries = BenchmarkEntries @@ -219,8 +231,24 @@ class CommitId(google.protobuf.message.Message): snapshot: builtins.str | None = ..., pending_changelist: builtins.int | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["changelist", b"changelist", "hash", b"hash", "kind", b"kind"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["changelist", b"changelist", "hash", b"hash", "kind", b"kind", "pending_changelist", b"pending_changelist", "snapshot", b"snapshot"]) -> None: ... + def HasField( + self, field_name: typing.Literal["changelist", b"changelist", "hash", b"hash", "kind", b"kind"] + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "changelist", + b"changelist", + "hash", + b"hash", + "kind", + b"kind", + "pending_changelist", + b"pending_changelist", + "snapshot", + b"snapshot", + ], + ) -> None: ... def WhichOneof(self, oneof_group: typing.Literal["kind", b"kind"]) -> typing.Literal["changelist", "hash"] | None: ... global___CommitId = CommitId @@ -237,12 +265,7 @@ class CPUInfo(google.protobuf.message.Message): VALUE_FIELD_NUMBER: builtins.int key: builtins.str value: builtins.int - def __init__( - self, - *, - key: builtins.str | None = ..., - value: builtins.int | None = ..., - ) -> None: ... + def __init__(self, *, key: builtins.str | None = ..., value: builtins.int | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... NUM_CORES_FIELD_NUMBER: builtins.int @@ -277,7 +300,23 @@ class CPUInfo(google.protobuf.message.Message): cpu_governor: builtins.str | None = ..., cache_size: collections.abc.Mapping[builtins.str, builtins.int] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["cache_size", b"cache_size", "cpu_governor", b"cpu_governor", "cpu_info", b"cpu_info", "mhz_per_cpu", b"mhz_per_cpu", "num_cores", b"num_cores", "num_cores_allowed", b"num_cores_allowed"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "cache_size", + b"cache_size", + "cpu_governor", + b"cpu_governor", + "cpu_info", + b"cpu_info", + "mhz_per_cpu", + b"mhz_per_cpu", + "num_cores", + b"num_cores", + "num_cores_allowed", + b"num_cores_allowed", + ], + ) -> None: ... global___CPUInfo = CPUInfo @@ -291,12 +330,7 @@ class MemoryInfo(google.protobuf.message.Message): """Total virtual memory in bytes""" available: builtins.int """Immediately available memory in bytes""" - def __init__( - self, - *, - total: builtins.int | None = ..., - available: builtins.int | None = ..., - ) -> None: ... + def __init__(self, *, total: builtins.int | None = ..., available: builtins.int | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["available", b"available", "total", b"total"]) -> None: ... global___MemoryInfo = MemoryInfo @@ -315,11 +349,7 @@ class GPUInfo(google.protobuf.message.Message): bus_id: builtins.str """e.g. "0000:04:00.0" """ def __init__( - self, - *, - model: builtins.str | None = ..., - uuid: builtins.str | None = ..., - bus_id: builtins.str | None = ..., + self, *, model: builtins.str | None = ..., uuid: builtins.str | None = ..., bus_id: builtins.str | None = ... ) -> None: ... def ClearField(self, field_name: typing.Literal["bus_id", b"bus_id", "model", b"model", "uuid", b"uuid"]) -> None: ... @@ -357,7 +387,23 @@ class PlatformInfo(google.protobuf.message.Message): system: builtins.str | None = ..., version: builtins.str | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["bits", b"bits", "linkage", b"linkage", "machine", b"machine", "release", b"release", "system", b"system", "version", b"version"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "bits", + b"bits", + "linkage", + b"linkage", + "machine", + b"machine", + "release", + b"release", + "system", + b"system", + "version", + b"version", + ], + ) -> None: ... global___PlatformInfo = PlatformInfo @@ -387,7 +433,12 @@ class AvailableDeviceInfo(google.protobuf.message.Message): memory_limit: builtins.int | None = ..., physical_description: builtins.str | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["memory_limit", b"memory_limit", "name", b"name", "physical_description", b"physical_description", "type", b"type"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "memory_limit", b"memory_limit", "name", b"name", "physical_description", b"physical_description", "type", b"type" + ], + ) -> None: ... global___AvailableDeviceInfo = AvailableDeviceInfo @@ -419,7 +470,9 @@ class MachineConfiguration(google.protobuf.message.Message): """Other devices that are attached and relevant (e.g. GPUInfo).""" @property - def available_device_info(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___AvailableDeviceInfo]: + def available_device_info( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___AvailableDeviceInfo]: """Devices accessible to the test (e.g. as given by list_local_devices).""" @property @@ -435,8 +488,29 @@ class MachineConfiguration(google.protobuf.message.Message): available_device_info: collections.abc.Iterable[global___AvailableDeviceInfo] | None = ..., memory_info: global___MemoryInfo | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["cpu_info", b"cpu_info", "memory_info", b"memory_info", "platform_info", b"platform_info"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["available_device_info", b"available_device_info", "cpu_info", b"cpu_info", "device_info", b"device_info", "hostname", b"hostname", "memory_info", b"memory_info", "platform_info", b"platform_info", "serial_identifier", b"serial_identifier"]) -> None: ... + def HasField( + self, + field_name: typing.Literal["cpu_info", b"cpu_info", "memory_info", b"memory_info", "platform_info", b"platform_info"], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "available_device_info", + b"available_device_info", + "cpu_info", + b"cpu_info", + "device_info", + b"device_info", + "hostname", + b"hostname", + "memory_info", + b"memory_info", + "platform_info", + b"platform_info", + "serial_identifier", + b"serial_identifier", + ], + ) -> None: ... global___MachineConfiguration = MachineConfiguration @@ -454,12 +528,7 @@ class RunConfiguration(google.protobuf.message.Message): VALUE_FIELD_NUMBER: builtins.int key: builtins.str value: builtins.str - def __init__( - self, - *, - key: builtins.str | None = ..., - value: builtins.str | None = ..., - ) -> None: ... + def __init__(self, *, key: builtins.str | None = ..., value: builtins.str | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... ARGUMENT_FIELD_NUMBER: builtins.int @@ -497,7 +566,9 @@ class TestResults(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _BenchmarkTypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[TestResults._BenchmarkType.ValueType], builtins.type): + class _BenchmarkTypeEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[TestResults._BenchmarkType.ValueType], builtins.type + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor UNKNOWN: TestResults._BenchmarkType.ValueType # 0 """Fallback for protos written before Type was introduced.""" @@ -588,7 +659,49 @@ class TestResults(google.protobuf.message.Message): run_mode: builtins.str | None = ..., tf_version: builtins.str | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["build_configuration", b"build_configuration", "commit_id", b"commit_id", "entries", b"entries", "machine_configuration", b"machine_configuration", "run_configuration", b"run_configuration"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["benchmark_type", b"benchmark_type", "build_configuration", b"build_configuration", "commit_id", b"commit_id", "entries", b"entries", "machine_configuration", b"machine_configuration", "name", b"name", "run_configuration", b"run_configuration", "run_mode", b"run_mode", "run_time", b"run_time", "start_time", b"start_time", "target", b"target", "tf_version", b"tf_version"]) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "build_configuration", + b"build_configuration", + "commit_id", + b"commit_id", + "entries", + b"entries", + "machine_configuration", + b"machine_configuration", + "run_configuration", + b"run_configuration", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "benchmark_type", + b"benchmark_type", + "build_configuration", + b"build_configuration", + "commit_id", + b"commit_id", + "entries", + b"entries", + "machine_configuration", + b"machine_configuration", + "name", + b"name", + "run_configuration", + b"run_configuration", + "run_mode", + b"run_mode", + "run_time", + b"run_time", + "start_time", + b"start_time", + "target", + b"target", + "tf_version", + b"tf_version", + ], + ) -> None: ... global___TestResults = TestResults diff --git a/stubs/tensorflow/tensorflow/compiler/xla/xla_data_pb2.pyi b/stubs/tensorflow/tensorflow/compiler/xla/xla_data_pb2.pyi index 572077b07eaa..de86f1c8f255 100644 --- a/stubs/tensorflow/tensorflow/compiler/xla/xla_data_pb2.pyi +++ b/stubs/tensorflow/tensorflow/compiler/xla/xla_data_pb2.pyi @@ -38,7 +38,9 @@ class _PrimitiveType: ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType -class _PrimitiveTypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_PrimitiveType.ValueType], builtins.type): +class _PrimitiveTypeEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_PrimitiveType.ValueType], builtins.type +): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor PRIMITIVE_TYPE_INVALID: _PrimitiveType.ValueType # 0 """Invalid primitive type to serve as default.""" @@ -251,7 +253,9 @@ class _DimLevelType: ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType -class _DimLevelTypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_DimLevelType.ValueType], builtins.type): +class _DimLevelTypeEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_DimLevelType.ValueType], builtins.type +): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor DIM_DENSE: _DimLevelType.ValueType # 0 """The corresponding dimension is Dense, every entry is stored.""" @@ -294,7 +298,9 @@ class _ProfileType: ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType -class _ProfileTypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_ProfileType.ValueType], builtins.type): +class _ProfileTypeEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_ProfileType.ValueType], builtins.type +): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor INVALID: _ProfileType.ValueType # 0 WINDOW: _ProfileType.ValueType # 1 @@ -314,7 +320,9 @@ class _ProfileSource: ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType -class _ProfileSourceEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_ProfileSource.ValueType], builtins.type): +class _ProfileSourceEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_ProfileSource.ValueType], builtins.type +): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor PROFILE_SOURCE_UNKNOWN_SOURCE: _ProfileSource.ValueType # 0 PROFILE_SOURCE_EMBEDDED: _ProfileSource.ValueType # 1 @@ -332,7 +340,9 @@ class _CompilationEvent: ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType -class _CompilationEventEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_CompilationEvent.ValueType], builtins.type): +class _CompilationEventEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_CompilationEvent.ValueType], builtins.type +): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor COMPILATION_EVENT_UNKNOWN_EVENT: _CompilationEvent.ValueType # 0 COMPILATION_EVENT_FIRST_COMPILATION: _CompilationEvent.ValueType # 1 @@ -350,7 +360,9 @@ class _PaddingType: ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType -class _PaddingTypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_PaddingType.ValueType], builtins.type): +class _PaddingTypeEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_PaddingType.ValueType], builtins.type +): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor PADDING_INVALID: _PaddingType.ValueType # 0 PADDING_VALID: _PaddingType.ValueType # 1 @@ -398,7 +410,9 @@ class _SparsityType: ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType -class _SparsityTypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_SparsityType.ValueType], builtins.type): +class _SparsityTypeEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_SparsityType.ValueType], builtins.type +): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor SPARSITY_INVALID: _SparsityType.ValueType # 0 SPARSITY_STRUCTURED_N_M: _SparsityType.ValueType # 1 @@ -415,7 +429,9 @@ class _RandomDistribution: ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType -class _RandomDistributionEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_RandomDistribution.ValueType], builtins.type): +class _RandomDistributionEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_RandomDistribution.ValueType], builtins.type +): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor RNG_INVALID: _RandomDistribution.ValueType # 0 RNG_UNIFORM: _RandomDistribution.ValueType # 1 @@ -444,7 +460,9 @@ class _RandomAlgorithm: ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType -class _RandomAlgorithmEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_RandomAlgorithm.ValueType], builtins.type): +class _RandomAlgorithmEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_RandomAlgorithm.ValueType], builtins.type +): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor RNG_DEFAULT: _RandomAlgorithm.ValueType # 0 """Backend dependent default algorithm.""" @@ -493,17 +511,27 @@ class PaddingConfig(google.protobuf.message.Message): edge_padding_high: builtins.int | None = ..., interior_padding: builtins.int | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["edge_padding_high", b"edge_padding_high", "edge_padding_low", b"edge_padding_low", "interior_padding", b"interior_padding"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "edge_padding_high", + b"edge_padding_high", + "edge_padding_low", + b"edge_padding_low", + "interior_padding", + b"interior_padding", + ], + ) -> None: ... DIMENSIONS_FIELD_NUMBER: builtins.int @property - def dimensions(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___PaddingConfig.PaddingConfigDimension]: + def dimensions( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___PaddingConfig.PaddingConfigDimension]: """The padding configuration for all dimensions.""" def __init__( - self, - *, - dimensions: collections.abc.Iterable[global___PaddingConfig.PaddingConfigDimension] | None = ..., + self, *, dimensions: collections.abc.Iterable[global___PaddingConfig.PaddingConfigDimension] | None = ... ) -> None: ... def ClearField(self, field_name: typing.Literal["dimensions", b"dimensions"]) -> None: ... @@ -527,11 +555,7 @@ class TileProto(google.protobuf.message.Message): tiled. """ - def __init__( - self, - *, - dimensions: collections.abc.Iterable[builtins.int] | None = ..., - ) -> None: ... + def __init__(self, *, dimensions: collections.abc.Iterable[builtins.int] | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["dimensions", b"dimensions"]) -> None: ... global___TileProto = TileProto @@ -554,10 +578,7 @@ class SplitConfigProto(google.protobuf.message.Message): """ def __init__( - self, - *, - dimension: builtins.int | None = ..., - split_indices: collections.abc.Iterable[builtins.int] | None = ..., + self, *, dimension: builtins.int | None = ..., split_indices: collections.abc.Iterable[builtins.int] | None = ... ) -> None: ... def ClearField(self, field_name: typing.Literal["dimension", b"dimension", "split_indices", b"split_indices"]) -> None: ... @@ -624,7 +645,9 @@ class LayoutProto(google.protobuf.message.Message): dynamic shape, e.g. a result of SliceToDynamic. """ @property - def dim_level_types(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[global___DimLevelType.ValueType]: + def dim_level_types( + self, + ) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[global___DimLevelType.ValueType]: """The dimension level type list for this array, specifying the way in which each array dimension is represented in memory. If this list is empty, the array is assumed to be dense. @@ -688,7 +711,37 @@ class LayoutProto(google.protobuf.message.Message): split_configs: collections.abc.Iterable[global___SplitConfigProto] | None = ..., ) -> None: ... def HasField(self, field_name: typing.Literal["physical_shape", b"physical_shape"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["dim_level_types", b"dim_level_types", "dim_ordered", b"dim_ordered", "dim_unique", b"dim_unique", "dynamic_shape_metadata_prefix_bytes", b"dynamic_shape_metadata_prefix_bytes", "element_size_in_bits", b"element_size_in_bits", "index_primitive_type", b"index_primitive_type", "memory_space", b"memory_space", "minor_to_major", b"minor_to_major", "physical_shape", b"physical_shape", "pointer_primitive_type", b"pointer_primitive_type", "split_configs", b"split_configs", "tail_padding_alignment_in_elements", b"tail_padding_alignment_in_elements", "tiles", b"tiles"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "dim_level_types", + b"dim_level_types", + "dim_ordered", + b"dim_ordered", + "dim_unique", + b"dim_unique", + "dynamic_shape_metadata_prefix_bytes", + b"dynamic_shape_metadata_prefix_bytes", + "element_size_in_bits", + b"element_size_in_bits", + "index_primitive_type", + b"index_primitive_type", + "memory_space", + b"memory_space", + "minor_to_major", + b"minor_to_major", + "physical_shape", + b"physical_shape", + "pointer_primitive_type", + b"pointer_primitive_type", + "split_configs", + b"split_configs", + "tail_padding_alignment_in_elements", + b"tail_padding_alignment_in_elements", + "tiles", + b"tiles", + ], + ) -> None: ... global___LayoutProto = LayoutProto @@ -752,7 +805,21 @@ class ShapeProto(google.protobuf.message.Message): is_dynamic_dimension: collections.abc.Iterable[builtins.bool] | None = ..., ) -> None: ... def HasField(self, field_name: typing.Literal["layout", b"layout"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["dimensions", b"dimensions", "element_type", b"element_type", "is_dynamic_dimension", b"is_dynamic_dimension", "layout", b"layout", "tuple_shapes", b"tuple_shapes"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "dimensions", + b"dimensions", + "element_type", + b"element_type", + "is_dynamic_dimension", + b"is_dynamic_dimension", + "layout", + b"layout", + "tuple_shapes", + b"tuple_shapes", + ], + ) -> None: ... global___ShapeProto = ShapeProto @@ -781,7 +848,9 @@ class ProgramShapeProto(google.protobuf.message.Message): parameter_names: collections.abc.Iterable[builtins.str] | None = ..., ) -> None: ... def HasField(self, field_name: typing.Literal["result", b"result"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["parameter_names", b"parameter_names", "parameters", b"parameters", "result", b"result"]) -> None: ... + def ClearField( + self, field_name: typing.Literal["parameter_names", b"parameter_names", "parameters", b"parameters", "result", b"result"] + ) -> None: ... global___ProgramShapeProto = ProgramShapeProto @@ -797,13 +866,10 @@ class ComputationStats(google.protobuf.message.Message): """The number of floating point operations in the computation.""" transcendental_count: builtins.float """The number of transcendental operations (e.g., exp) in the computation.""" - def __init__( - self, - *, - flop_count: builtins.float | None = ..., - transcendental_count: builtins.float | None = ..., + def __init__(self, *, flop_count: builtins.float | None = ..., transcendental_count: builtins.float | None = ...) -> None: ... + def ClearField( + self, field_name: typing.Literal["flop_count", b"flop_count", "transcendental_count", b"transcendental_count"] ) -> None: ... - def ClearField(self, field_name: typing.Literal["flop_count", b"flop_count", "transcendental_count", b"transcendental_count"]) -> None: ... global___ComputationStats = ComputationStats @@ -836,7 +902,9 @@ class OpMetadata(google.protobuf.message.Message): compilation_event: global___CompilationEvent.ValueType """The compilation event that triggered the use of the profiles.""" @property - def profile_type(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[global___ProfileType.ValueType]: + def profile_type( + self, + ) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[global___ProfileType.ValueType]: """The type of optimization profiles that this operation contains.""" def __init__( @@ -847,7 +915,19 @@ class OpMetadata(google.protobuf.message.Message): profile_source: global___ProfileSource.ValueType | None = ..., compilation_event: global___CompilationEvent.ValueType | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["compilation_event", b"compilation_event", "profile_source", b"profile_source", "profile_type", b"profile_type", "relative_speedup", b"relative_speedup"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "compilation_event", + b"compilation_event", + "profile_source", + b"profile_source", + "profile_type", + b"profile_type", + "relative_speedup", + b"relative_speedup", + ], + ) -> None: ... OP_TYPE_FIELD_NUMBER: builtins.int OP_NAME_FIELD_NUMBER: builtins.int @@ -929,7 +1009,35 @@ class OpMetadata(google.protobuf.message.Message): scheduling_name: builtins.str | None = ..., ) -> None: ... def HasField(self, field_name: typing.Literal["profile_info", b"profile_info"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["deduplicated_name", b"deduplicated_name", "op_name", b"op_name", "op_type", b"op_type", "preserve_layout", b"preserve_layout", "profile_info", b"profile_info", "profile_type", b"profile_type", "scheduling_name", b"scheduling_name", "size_of_generated_code_in_bytes", b"size_of_generated_code_in_bytes", "size_of_memory_working_set_in_bytes", b"size_of_memory_working_set_in_bytes", "source_file", b"source_file", "source_line", b"source_line", "stack_frame_id", b"stack_frame_id"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "deduplicated_name", + b"deduplicated_name", + "op_name", + b"op_name", + "op_type", + b"op_type", + "preserve_layout", + b"preserve_layout", + "profile_info", + b"profile_info", + "profile_type", + b"profile_type", + "scheduling_name", + b"scheduling_name", + "size_of_generated_code_in_bytes", + b"size_of_generated_code_in_bytes", + "size_of_memory_working_set_in_bytes", + b"size_of_memory_working_set_in_bytes", + "source_file", + b"source_file", + "source_line", + b"source_line", + "stack_frame_id", + b"stack_frame_id", + ], + ) -> None: ... global___OpMetadata = OpMetadata @@ -989,7 +1097,27 @@ class ExecutionProfile(google.protobuf.message.Message): profile_cache_hit: builtins.bool | None = ..., warmup_run_executed: builtins.bool | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["compilation_cache_hit", b"compilation_cache_hit", "compile_time_ms", b"compile_time_ms", "compute_and_transfer_time_ns", b"compute_and_transfer_time_ns", "compute_cycle_count", b"compute_cycle_count", "compute_time_ns", b"compute_time_ns", "executable_size_in_bytes", b"executable_size_in_bytes", "profile_cache_hit", b"profile_cache_hit", "warmup_run_executed", b"warmup_run_executed"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "compilation_cache_hit", + b"compilation_cache_hit", + "compile_time_ms", + b"compile_time_ms", + "compute_and_transfer_time_ns", + b"compute_and_transfer_time_ns", + "compute_cycle_count", + b"compute_cycle_count", + "compute_time_ns", + b"compute_time_ns", + "executable_size_in_bytes", + b"executable_size_in_bytes", + "profile_cache_hit", + b"profile_cache_hit", + "warmup_run_executed", + b"warmup_run_executed", + ], + ) -> None: ... global___ExecutionProfile = ExecutionProfile @@ -1003,11 +1131,7 @@ class ExecutionHandle(google.protobuf.message.Message): HANDLE_FIELD_NUMBER: builtins.int handle: builtins.int - def __init__( - self, - *, - handle: builtins.int | None = ..., - ) -> None: ... + def __init__(self, *, handle: builtins.int | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["handle", b"handle"]) -> None: ... global___ExecutionHandle = ExecutionHandle @@ -1023,11 +1147,7 @@ class GlobalDataHandle(google.protobuf.message.Message): HANDLE_FIELD_NUMBER: builtins.int handle: builtins.int - def __init__( - self, - *, - handle: builtins.int | None = ..., - ) -> None: ... + def __init__(self, *, handle: builtins.int | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["handle", b"handle"]) -> None: ... global___GlobalDataHandle = GlobalDataHandle @@ -1048,12 +1168,7 @@ class DeviceHandle(google.protobuf.message.Message): """The number of model-parallel virtual devices that communicate via XLA Send/Recv instructions. """ - def __init__( - self, - *, - handle: builtins.int | None = ..., - device_count: builtins.int | None = ..., - ) -> None: ... + def __init__(self, *, handle: builtins.int | None = ..., device_count: builtins.int | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["device_count", b"device_count", "handle", b"handle"]) -> None: ... global___DeviceHandle = DeviceHandle @@ -1071,7 +1186,9 @@ class ChannelHandle(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _ChannelTypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ChannelHandle._ChannelType.ValueType], builtins.type): + class _ChannelTypeEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ChannelHandle._ChannelType.ValueType], builtins.type + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor CHANNEL_TYPE_INVALID: ChannelHandle._ChannelType.ValueType # 0 """Invalid primitive type to serve as default.""" @@ -1105,10 +1222,7 @@ class ChannelHandle(google.protobuf.message.Message): handle: builtins.int type: global___ChannelHandle.ChannelType.ValueType def __init__( - self, - *, - handle: builtins.int | None = ..., - type: global___ChannelHandle.ChannelType.ValueType | None = ..., + self, *, handle: builtins.int | None = ..., type: global___ChannelHandle.ChannelType.ValueType | None = ... ) -> None: ... def ClearField(self, field_name: typing.Literal["handle", b"handle", "type", b"type"]) -> None: ... @@ -1134,11 +1248,7 @@ class DeviceAssignmentProto(google.protobuf.message.Message): REPLICA_DEVICE_IDS_FIELD_NUMBER: builtins.int @property def replica_device_ids(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ... - def __init__( - self, - *, - replica_device_ids: collections.abc.Iterable[builtins.int] | None = ..., - ) -> None: ... + def __init__(self, *, replica_device_ids: collections.abc.Iterable[builtins.int] | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["replica_device_ids", b"replica_device_ids"]) -> None: ... REPLICA_COUNT_FIELD_NUMBER: builtins.int @@ -1147,7 +1257,11 @@ class DeviceAssignmentProto(google.protobuf.message.Message): replica_count: builtins.int computation_count: builtins.int @property - def computation_devices(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___DeviceAssignmentProto.ComputationDevice]: ... + def computation_devices( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + global___DeviceAssignmentProto.ComputationDevice + ]: ... def __init__( self, *, @@ -1155,7 +1269,17 @@ class DeviceAssignmentProto(google.protobuf.message.Message): computation_count: builtins.int | None = ..., computation_devices: collections.abc.Iterable[global___DeviceAssignmentProto.ComputationDevice] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["computation_count", b"computation_count", "computation_devices", b"computation_devices", "replica_count", b"replica_count"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "computation_count", + b"computation_count", + "computation_devices", + b"computation_devices", + "replica_count", + b"replica_count", + ], + ) -> None: ... global___DeviceAssignmentProto = DeviceAssignmentProto @@ -1276,7 +1400,65 @@ class LiteralProto(google.protobuf.message.Message): sparse_indices: collections.abc.Iterable[builtins.int] | None = ..., ) -> None: ... def HasField(self, field_name: typing.Literal["shape", b"shape"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["bf16s", b"bf16s", "c128s", b"c128s", "c64s", b"c64s", "f16s", b"f16s", "f32s", b"f32s", "f64s", b"f64s", "f8e4m3b11fnuzs", b"f8e4m3b11fnuzs", "f8e4m3fns", b"f8e4m3fns", "f8e4m3fnuzs", b"f8e4m3fnuzs", "f8e5m2fnuzs", b"f8e5m2fnuzs", "f8e5m2s", b"f8e5m2s", "preds", b"preds", "s16s", b"s16s", "s2s", b"s2s", "s32s", b"s32s", "s4s", b"s4s", "s64s", b"s64s", "s8s", b"s8s", "shape", b"shape", "sparse_indices", b"sparse_indices", "tuple_literals", b"tuple_literals", "u16s", b"u16s", "u2s", b"u2s", "u32s", b"u32s", "u4s", b"u4s", "u64s", b"u64s", "u8s", b"u8s"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "bf16s", + b"bf16s", + "c128s", + b"c128s", + "c64s", + b"c64s", + "f16s", + b"f16s", + "f32s", + b"f32s", + "f64s", + b"f64s", + "f8e4m3b11fnuzs", + b"f8e4m3b11fnuzs", + "f8e4m3fns", + b"f8e4m3fns", + "f8e4m3fnuzs", + b"f8e4m3fnuzs", + "f8e5m2fnuzs", + b"f8e5m2fnuzs", + "f8e5m2s", + b"f8e5m2s", + "preds", + b"preds", + "s16s", + b"s16s", + "s2s", + b"s2s", + "s32s", + b"s32s", + "s4s", + b"s4s", + "s64s", + b"s64s", + "s8s", + b"s8s", + "shape", + b"shape", + "sparse_indices", + b"sparse_indices", + "tuple_literals", + b"tuple_literals", + "u16s", + b"u16s", + "u2s", + b"u2s", + "u32s", + b"u32s", + "u4s", + b"u4s", + "u64s", + b"u64s", + "u8s", + b"u8s", + ], + ) -> None: ... global___LiteralProto = LiteralProto @@ -1342,7 +1524,25 @@ class WindowDimension(google.protobuf.message.Message): base_dilation: builtins.int | None = ..., window_reversal: builtins.bool | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["base_dilation", b"base_dilation", "padding_high", b"padding_high", "padding_low", b"padding_low", "size", b"size", "stride", b"stride", "window_dilation", b"window_dilation", "window_reversal", b"window_reversal"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "base_dilation", + b"base_dilation", + "padding_high", + b"padding_high", + "padding_low", + b"padding_low", + "size", + b"size", + "stride", + b"stride", + "window_dilation", + b"window_dilation", + "window_reversal", + b"window_reversal", + ], + ) -> None: ... global___WindowDimension = WindowDimension @@ -1360,11 +1560,7 @@ class Window(google.protobuf.message.Message): DIMENSIONS_FIELD_NUMBER: builtins.int @property def dimensions(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___WindowDimension]: ... - def __init__( - self, - *, - dimensions: collections.abc.Iterable[global___WindowDimension] | None = ..., - ) -> None: ... + def __init__(self, *, dimensions: collections.abc.Iterable[global___WindowDimension] | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["dimensions", b"dimensions"]) -> None: ... global___Window = Window @@ -1391,7 +1587,7 @@ class GatherDimensionNumbers(google.protobuf.message.Message): """ @property def offset_dims(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: - """"Window indices" is a term for a set of indices that index into the + """ "Window indices" is a term for a set of indices that index into the interior of a dynamic-slice from the input tensor, the starting indices for which were computed from output_gather_dims (see the operation semantic for how this is defined) and the start_indices tensor. @@ -1435,7 +1631,23 @@ class GatherDimensionNumbers(google.protobuf.message.Message): operand_batching_dims: collections.abc.Iterable[builtins.int] | None = ..., start_indices_batching_dims: collections.abc.Iterable[builtins.int] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["collapsed_slice_dims", b"collapsed_slice_dims", "index_vector_dim", b"index_vector_dim", "offset_dims", b"offset_dims", "operand_batching_dims", b"operand_batching_dims", "start_index_map", b"start_index_map", "start_indices_batching_dims", b"start_indices_batching_dims"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "collapsed_slice_dims", + b"collapsed_slice_dims", + "index_vector_dim", + b"index_vector_dim", + "offset_dims", + b"offset_dims", + "operand_batching_dims", + b"operand_batching_dims", + "start_index_map", + b"start_index_map", + "start_indices_batching_dims", + b"start_indices_batching_dims", + ], + ) -> None: ... global___GatherDimensionNumbers = GatherDimensionNumbers @@ -1484,7 +1696,23 @@ class ScatterDimensionNumbers(google.protobuf.message.Message): input_batching_dims: collections.abc.Iterable[builtins.int] | None = ..., scatter_indices_batching_dims: collections.abc.Iterable[builtins.int] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["index_vector_dim", b"index_vector_dim", "input_batching_dims", b"input_batching_dims", "inserted_window_dims", b"inserted_window_dims", "scatter_dims_to_operand_dims", b"scatter_dims_to_operand_dims", "scatter_indices_batching_dims", b"scatter_indices_batching_dims", "update_window_dims", b"update_window_dims"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "index_vector_dim", + b"index_vector_dim", + "input_batching_dims", + b"input_batching_dims", + "inserted_window_dims", + b"inserted_window_dims", + "scatter_dims_to_operand_dims", + b"scatter_dims_to_operand_dims", + "scatter_indices_batching_dims", + b"scatter_indices_batching_dims", + "update_window_dims", + b"update_window_dims", + ], + ) -> None: ... global___ScatterDimensionNumbers = ScatterDimensionNumbers @@ -1549,7 +1777,29 @@ class ConvolutionDimensionNumbers(google.protobuf.message.Message): output_feature_dimension: builtins.int | None = ..., output_spatial_dimensions: collections.abc.Iterable[builtins.int] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["input_batch_dimension", b"input_batch_dimension", "input_feature_dimension", b"input_feature_dimension", "input_spatial_dimensions", b"input_spatial_dimensions", "kernel_input_feature_dimension", b"kernel_input_feature_dimension", "kernel_output_feature_dimension", b"kernel_output_feature_dimension", "kernel_spatial_dimensions", b"kernel_spatial_dimensions", "output_batch_dimension", b"output_batch_dimension", "output_feature_dimension", b"output_feature_dimension", "output_spatial_dimensions", b"output_spatial_dimensions"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "input_batch_dimension", + b"input_batch_dimension", + "input_feature_dimension", + b"input_feature_dimension", + "input_spatial_dimensions", + b"input_spatial_dimensions", + "kernel_input_feature_dimension", + b"kernel_input_feature_dimension", + "kernel_output_feature_dimension", + b"kernel_output_feature_dimension", + "kernel_spatial_dimensions", + b"kernel_spatial_dimensions", + "output_batch_dimension", + b"output_batch_dimension", + "output_feature_dimension", + b"output_feature_dimension", + "output_spatial_dimensions", + b"output_spatial_dimensions", + ], + ) -> None: ... global___ConvolutionDimensionNumbers = ConvolutionDimensionNumbers @@ -1585,7 +1835,19 @@ class DotDimensionNumbers(google.protobuf.message.Message): lhs_batch_dimensions: collections.abc.Iterable[builtins.int] | None = ..., rhs_batch_dimensions: collections.abc.Iterable[builtins.int] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["lhs_batch_dimensions", b"lhs_batch_dimensions", "lhs_contracting_dimensions", b"lhs_contracting_dimensions", "rhs_batch_dimensions", b"rhs_batch_dimensions", "rhs_contracting_dimensions", b"rhs_contracting_dimensions"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "lhs_batch_dimensions", + b"lhs_batch_dimensions", + "lhs_contracting_dimensions", + b"lhs_contracting_dimensions", + "rhs_batch_dimensions", + b"rhs_batch_dimensions", + "rhs_contracting_dimensions", + b"rhs_contracting_dimensions", + ], + ) -> None: ... global___DotDimensionNumbers = DotDimensionNumbers @@ -1623,7 +1885,9 @@ class SparsityDescriptor(google.protobuf.message.Message): n: builtins.int | None = ..., m: builtins.int | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["dimension", b"dimension", "index", b"index", "m", b"m", "n", b"n", "type", b"type"]) -> None: ... + def ClearField( + self, field_name: typing.Literal["dimension", b"dimension", "index", b"index", "m", b"m", "n", b"n", "type", b"type"] + ) -> None: ... global___SparsityDescriptor = SparsityDescriptor @@ -1635,7 +1899,9 @@ class TriangularSolveOptions(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _TransposeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[TriangularSolveOptions._Transpose.ValueType], builtins.type): + class _TransposeEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[TriangularSolveOptions._Transpose.ValueType], builtins.type + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor TRANSPOSE_INVALID: TriangularSolveOptions._Transpose.ValueType # 0 NO_TRANSPOSE: TriangularSolveOptions._Transpose.ValueType # 1 @@ -1675,7 +1941,12 @@ class TriangularSolveOptions(google.protobuf.message.Message): unit_diagonal: builtins.bool | None = ..., transpose_a: global___TriangularSolveOptions.Transpose.ValueType | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["left_side", b"left_side", "lower", b"lower", "transpose_a", b"transpose_a", "unit_diagonal", b"unit_diagonal"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "left_side", b"left_side", "lower", b"lower", "transpose_a", b"transpose_a", "unit_diagonal", b"unit_diagonal" + ], + ) -> None: ... global___TriangularSolveOptions = TriangularSolveOptions @@ -1688,11 +1959,7 @@ class CholeskyOptions(google.protobuf.message.Message): """If true, uses the lower triangle of `a`. If false, uses the upper triangle of `a`. """ - def __init__( - self, - *, - lower: builtins.bool | None = ..., - ) -> None: ... + def __init__(self, *, lower: builtins.bool | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["lower", b"lower"]) -> None: ... global___CholeskyOptions = CholeskyOptions @@ -1705,11 +1972,7 @@ class SortOptions(google.protobuf.message.Message): DESCENDING_FIELD_NUMBER: builtins.int descending: builtins.bool - def __init__( - self, - *, - descending: builtins.bool | None = ..., - ) -> None: ... + def __init__(self, *, descending: builtins.bool | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["descending", b"descending"]) -> None: ... global___SortOptions = SortOptions @@ -1730,22 +1993,13 @@ class FrontendAttributes(google.protobuf.message.Message): VALUE_FIELD_NUMBER: builtins.int key: builtins.str value: builtins.str - def __init__( - self, - *, - key: builtins.str | None = ..., - value: builtins.str | None = ..., - ) -> None: ... + def __init__(self, *, key: builtins.str | None = ..., value: builtins.str | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... MAP_FIELD_NUMBER: builtins.int @property def map(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: ... - def __init__( - self, - *, - map: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., - ) -> None: ... + def __init__(self, *, map: collections.abc.Mapping[builtins.str, builtins.str] | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["map", b"map"]) -> None: ... global___FrontendAttributes = FrontendAttributes @@ -1764,12 +2018,7 @@ class Statistic(google.protobuf.message.Message): """Must be within a range of [0, 100], in order for the graph dumper to properly render the statistic onto the graph. """ - def __init__( - self, - *, - stat_name: builtins.str | None = ..., - stat_val: builtins.float | None = ..., - ) -> None: ... + def __init__(self, *, stat_name: builtins.str | None = ..., stat_val: builtins.float | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["stat_name", b"stat_name", "stat_val", b"stat_val"]) -> None: ... global___Statistic = Statistic @@ -1794,7 +2043,9 @@ class StatisticsViz(google.protobuf.message.Message): stat_index_to_visualize: builtins.int | None = ..., statistics: collections.abc.Iterable[global___Statistic] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["stat_index_to_visualize", b"stat_index_to_visualize", "statistics", b"statistics"]) -> None: ... + def ClearField( + self, field_name: typing.Literal["stat_index_to_visualize", b"stat_index_to_visualize", "statistics", b"statistics"] + ) -> None: ... global___StatisticsViz = StatisticsViz @@ -1808,7 +2059,9 @@ class OpSharding(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _TypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[OpSharding._Type.ValueType], builtins.type): + class _TypeEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[OpSharding._Type.ValueType], builtins.type + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor REPLICATED: OpSharding._Type.ValueType # 0 """This sharding is replicated across all devices (implies maximal, @@ -1853,7 +2106,9 @@ class OpSharding(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _ShardGroupTypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[OpSharding._ShardGroupType.ValueType], builtins.type): + class _ShardGroupTypeEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[OpSharding._ShardGroupType.ValueType], builtins.type + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor AS: OpSharding._ShardGroupType.ValueType # 0 """This op will be sharded exactly the same as the other op. (hard @@ -1944,7 +2199,9 @@ class OpSharding(google.protobuf.message.Message): """ @property - def last_tile_dims(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[global___OpSharding.Type.ValueType]: + def last_tile_dims( + self, + ) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[global___OpSharding.Type.ValueType]: """This field is used to represented the sharding type of each subgroup. For example, sharding={devices=[2,2,2,2]0,1,2,...,15 last_tile_dims={ replicate, manual, unreduced}} means that each of the last 3 dimensions @@ -1983,7 +2240,37 @@ class OpSharding(google.protobuf.message.Message): shard_group_type: global___OpSharding.ShardGroupType.ValueType | None = ..., ) -> None: ... def HasField(self, field_name: typing.Literal["tile_shape", b"tile_shape"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["iota_reshape_dims", b"iota_reshape_dims", "iota_transpose_perm", b"iota_transpose_perm", "is_shard_group", b"is_shard_group", "last_tile_dims", b"last_tile_dims", "metadata", b"metadata", "replicate_on_last_tile_dim", b"replicate_on_last_tile_dim", "shard_group_id", b"shard_group_id", "shard_group_type", b"shard_group_type", "tile_assignment_devices", b"tile_assignment_devices", "tile_assignment_dimensions", b"tile_assignment_dimensions", "tile_shape", b"tile_shape", "tuple_shardings", b"tuple_shardings", "type", b"type"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "iota_reshape_dims", + b"iota_reshape_dims", + "iota_transpose_perm", + b"iota_transpose_perm", + "is_shard_group", + b"is_shard_group", + "last_tile_dims", + b"last_tile_dims", + "metadata", + b"metadata", + "replicate_on_last_tile_dim", + b"replicate_on_last_tile_dim", + "shard_group_id", + b"shard_group_id", + "shard_group_type", + b"shard_group_type", + "tile_assignment_devices", + b"tile_assignment_devices", + "tile_assignment_dimensions", + b"tile_assignment_dimensions", + "tile_shape", + b"tile_shape", + "tuple_shardings", + b"tuple_shardings", + "type", + b"type", + ], + ) -> None: ... global___OpSharding = OpSharding @@ -2002,11 +2289,7 @@ class ReplicaGroup(google.protobuf.message.Message): ids matters in some ops (e.g., all-to-all). """ - def __init__( - self, - *, - replica_ids: collections.abc.Iterable[builtins.int] | None = ..., - ) -> None: ... + def __init__(self, *, replica_ids: collections.abc.Iterable[builtins.int] | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["replica_ids", b"replica_ids"]) -> None: ... global___ReplicaGroup = ReplicaGroup @@ -2047,7 +2330,19 @@ class IotaReplicaGroupListProto(google.protobuf.message.Message): iota_reshape_dims: collections.abc.Iterable[builtins.int] | None = ..., iota_transpose_perm: collections.abc.Iterable[builtins.int] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["iota_reshape_dims", b"iota_reshape_dims", "iota_transpose_perm", b"iota_transpose_perm", "num_devices_per_group", b"num_devices_per_group", "num_replica_groups", b"num_replica_groups"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "iota_reshape_dims", + b"iota_reshape_dims", + "iota_transpose_perm", + b"iota_transpose_perm", + "num_devices_per_group", + b"num_devices_per_group", + "num_replica_groups", + b"num_replica_groups", + ], + ) -> None: ... global___IotaReplicaGroupListProto = IotaReplicaGroupListProto @@ -2081,7 +2376,10 @@ class CollectiveDeviceListProto(google.protobuf.message.Message): iota_replica_group_list: global___IotaReplicaGroupListProto | None = ..., ) -> None: ... def HasField(self, field_name: typing.Literal["iota_replica_group_list", b"iota_replica_group_list"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["iota_replica_group_list", b"iota_replica_group_list", "replica_groups", b"replica_groups"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal["iota_replica_group_list", b"iota_replica_group_list", "replica_groups", b"replica_groups"], + ) -> None: ... global___CollectiveDeviceListProto = CollectiveDeviceListProto @@ -2095,12 +2393,7 @@ class SourceTarget(google.protobuf.message.Message): TARGET_FIELD_NUMBER: builtins.int source: builtins.int target: builtins.int - def __init__( - self, - *, - source: builtins.int | None = ..., - target: builtins.int | None = ..., - ) -> None: ... + def __init__(self, *, source: builtins.int | None = ..., target: builtins.int | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["source", b"source", "target", b"target"]) -> None: ... global___SourceTarget = SourceTarget @@ -2117,7 +2410,9 @@ class PrecisionConfig(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _PrecisionEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[PrecisionConfig._Precision.ValueType], builtins.type): + class _PrecisionEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[PrecisionConfig._Precision.ValueType], builtins.type + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor DEFAULT: PrecisionConfig._Precision.ValueType # 0 HIGH: PrecisionConfig._Precision.ValueType # 1 @@ -2136,7 +2431,9 @@ class PrecisionConfig(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _AlgorithmEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[PrecisionConfig._Algorithm.ValueType], builtins.type): + class _AlgorithmEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[PrecisionConfig._Algorithm.ValueType], builtins.type + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor ALG_UNSET: PrecisionConfig._Algorithm.ValueType # 0 """If the algorithm is `ALG_UNSET`, we will decide the algorithm based on @@ -2231,14 +2528,18 @@ class PrecisionConfig(google.protobuf.message.Message): will be ignored. """ @property - def operand_precision(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[global___PrecisionConfig.Precision.ValueType]: ... + def operand_precision( + self, + ) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[global___PrecisionConfig.Precision.ValueType]: ... def __init__( self, *, operand_precision: collections.abc.Iterable[global___PrecisionConfig.Precision.ValueType] | None = ..., algorithm: global___PrecisionConfig.Algorithm.ValueType | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["algorithm", b"algorithm", "operand_precision", b"operand_precision"]) -> None: ... + def ClearField( + self, field_name: typing.Literal["algorithm", b"algorithm", "operand_precision", b"operand_precision"] + ) -> None: ... global___PrecisionConfig = PrecisionConfig @@ -2261,11 +2562,7 @@ class ParameterReplication(google.protobuf.message.Message): the HLO instruction's shape. """ - def __init__( - self, - *, - replicated_at_leaf_buffers: collections.abc.Iterable[builtins.bool] | None = ..., - ) -> None: ... + def __init__(self, *, replicated_at_leaf_buffers: collections.abc.Iterable[builtins.bool] | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["replicated_at_leaf_buffers", b"replicated_at_leaf_buffers"]) -> None: ... global___ParameterReplication = ParameterReplication @@ -2290,11 +2587,7 @@ class WhileLoopBackendConfig(google.protobuf.message.Message): N_FIELD_NUMBER: builtins.int n: builtins.int - def __init__( - self, - *, - n: builtins.int | None = ..., - ) -> None: ... + def __init__(self, *, n: builtins.int | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["n", b"n"]) -> None: ... KNOWN_TRIP_COUNT_FIELD_NUMBER: builtins.int @@ -2304,11 +2597,7 @@ class WhileLoopBackendConfig(google.protobuf.message.Message): unknown-trip-count. """ - def __init__( - self, - *, - known_trip_count: global___WhileLoopBackendConfig.KnownTripCount | None = ..., - ) -> None: ... + def __init__(self, *, known_trip_count: global___WhileLoopBackendConfig.KnownTripCount | None = ...) -> None: ... def HasField(self, field_name: typing.Literal["known_trip_count", b"known_trip_count"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["known_trip_count", b"known_trip_count"]) -> None: ... @@ -2337,7 +2626,17 @@ class OutputOperandAliasing(google.protobuf.message.Message): operand_index: builtins.int | None = ..., operand_shape_index: collections.abc.Iterable[builtins.int] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["operand_index", b"operand_index", "operand_shape_index", b"operand_shape_index", "output_shape_index", b"output_shape_index"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "operand_index", + b"operand_index", + "operand_shape_index", + b"operand_shape_index", + "output_shape_index", + b"output_shape_index", + ], + ) -> None: ... global___OutputOperandAliasing = OutputOperandAliasing @@ -2360,7 +2659,12 @@ class OriginalArrayProto(google.protobuf.message.Message): instruction_name: builtins.str | None = ..., shape_index: collections.abc.Iterable[builtins.int] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["instruction_name", b"instruction_name", "leaf_shape_index", b"leaf_shape_index", "shape_index", b"shape_index"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "instruction_name", b"instruction_name", "leaf_shape_index", b"leaf_shape_index", "shape_index", b"shape_index" + ], + ) -> None: ... global___OriginalArrayProto = OriginalArrayProto @@ -2371,11 +2675,7 @@ class OriginalValueProto(google.protobuf.message.Message): LEAVES_FIELD_NUMBER: builtins.int @property def leaves(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___OriginalArrayProto]: ... - def __init__( - self, - *, - leaves: collections.abc.Iterable[global___OriginalArrayProto] | None = ..., - ) -> None: ... + def __init__(self, *, leaves: collections.abc.Iterable[global___OriginalArrayProto] | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["leaves", b"leaves"]) -> None: ... global___OriginalValueProto = OriginalValueProto diff --git a/stubs/tensorflow/tensorflow/compiler/xla/xla_pb2.pyi b/stubs/tensorflow/tensorflow/compiler/xla/xla_pb2.pyi index 086a41fc6528..17820b9e12c0 100644 --- a/stubs/tensorflow/tensorflow/compiler/xla/xla_pb2.pyi +++ b/stubs/tensorflow/tensorflow/compiler/xla/xla_pb2.pyi @@ -45,12 +45,10 @@ class CompilationEnvironmentsProto(google.protobuf.message.Message): ENVIRONMENTS_FIELD_NUMBER: builtins.int @property - def environments(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[google.protobuf.any_pb2.Any]: ... - def __init__( + def environments( self, - *, - environments: collections.abc.Iterable[google.protobuf.any_pb2.Any] | None = ..., - ) -> None: ... + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[google.protobuf.any_pb2.Any]: ... + def __init__(self, *, environments: collections.abc.Iterable[google.protobuf.any_pb2.Any] | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["environments", b"environments"]) -> None: ... global___CompilationEnvironmentsProto = CompilationEnvironmentsProto @@ -79,7 +77,9 @@ class DebugOptions(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _ShapeChecksEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[DebugOptions._ShapeChecks.ValueType], builtins.type): + class _ShapeChecksEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[DebugOptions._ShapeChecks.ValueType], builtins.type + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor IGNORE: DebugOptions._ShapeChecks.ValueType # 0 """Do not insert any shape checks for dynamically shaped operations; output @@ -112,7 +112,9 @@ class DebugOptions(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _StepMarkerLocationEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[DebugOptions._StepMarkerLocation.ValueType], builtins.type): + class _StepMarkerLocationEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[DebugOptions._StepMarkerLocation.ValueType], builtins.type + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor STEP_MARK_AT_ENTRY: DebugOptions._StepMarkerLocation.ValueType # 0 """Generate a step marker at the program entry. This handles the case where @@ -153,7 +155,9 @@ class DebugOptions(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _CollectiveOpTypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[DebugOptions._CollectiveOpType.ValueType], builtins.type): + class _CollectiveOpTypeEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[DebugOptions._CollectiveOpType.ValueType], builtins.type + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor NOOP: DebugOptions._CollectiveOpType.ValueType # 0 ALLREDUCE: DebugOptions._CollectiveOpType.ValueType # 1 @@ -180,7 +184,9 @@ class DebugOptions(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _CommandBufferCmdTypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[DebugOptions._CommandBufferCmdType.ValueType], builtins.type): + class _CommandBufferCmdTypeEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[DebugOptions._CommandBufferCmdType.ValueType], builtins.type + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor INVALID: DebugOptions._CommandBufferCmdType.ValueType # 0 FUSION: DebugOptions._CommandBufferCmdType.ValueType # 1 @@ -211,7 +217,9 @@ class DebugOptions(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _PartitioningAlgorithmEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[DebugOptions._PartitioningAlgorithm.ValueType], builtins.type): + class _PartitioningAlgorithmEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[DebugOptions._PartitioningAlgorithm.ValueType], builtins.type + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor PARTITIONING_ALGORITHM_NOOP: DebugOptions._PartitioningAlgorithm.ValueType # 0 PARTITIONING_ALGORITHM_EXP0: DebugOptions._PartitioningAlgorithm.ValueType # 1 @@ -228,7 +236,9 @@ class DebugOptions(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _WhileLoopUnrollingEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[DebugOptions._WhileLoopUnrolling.ValueType], builtins.type): + class _WhileLoopUnrollingEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[DebugOptions._WhileLoopUnrolling.ValueType], builtins.type + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor WHILE_LOOP_UNROLLING_NO_UNROLL: DebugOptions._WhileLoopUnrolling.ValueType # 0 WHILE_LOOP_UNROLLING_DOUBLE_BUFFER: DebugOptions._WhileLoopUnrolling.ValueType # 1 @@ -251,7 +261,9 @@ class DebugOptions(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _AutotuneCacheModeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[DebugOptions._AutotuneCacheMode.ValueType], builtins.type): + class _AutotuneCacheModeEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[DebugOptions._AutotuneCacheMode.ValueType], builtins.type + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor AUTOTUNE_CACHE_MODE_UNSPECIFIED: DebugOptions._AutotuneCacheMode.ValueType # 0 AUTOTUNE_CACHE_MODE_UPDATE: DebugOptions._AutotuneCacheMode.ValueType # 1 @@ -282,12 +294,7 @@ class DebugOptions(google.protobuf.message.Message): VALUE_FIELD_NUMBER: builtins.int key: builtins.str value: builtins.str - def __init__( - self, - *, - key: builtins.str | None = ..., - value: builtins.str | None = ..., - ) -> None: ... + def __init__(self, *, key: builtins.str | None = ..., value: builtins.str | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... XLA_CPU_ENABLE_CONCURRENCY_OPTIMIZED_SCHEDULER_FIELD_NUMBER: builtins.int @@ -1170,13 +1177,19 @@ class DebugOptions(google.protobuf.message.Message): """Paths to files with LLVM code.""" @property - def xla_gpu_disable_async_collectives(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[global___DebugOptions.CollectiveOpType.ValueType]: ... + def xla_gpu_disable_async_collectives( + self, + ) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[global___DebugOptions.CollectiveOpType.ValueType]: ... @property - def xla_gpu_enable_command_buffer(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[global___DebugOptions.CommandBufferCmdType.ValueType]: + def xla_gpu_enable_command_buffer( + self, + ) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[global___DebugOptions.CommandBufferCmdType.ValueType]: """Determine the types of commands that are recorded into command buffers.""" @property - def legacy_command_buffer_custom_call_targets(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: + def legacy_command_buffer_custom_call_targets( + self, + ) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: """Custom call targets with legacy registry API (non FFI API), that support recording to command buffer custom command, i.e., custom call target supports cuda-graph capturing for CUDA devices. @@ -1271,7 +1284,9 @@ class DebugOptions(google.protobuf.message.Message): xla_gpu_enable_llvm_module_compilation_parallelism: builtins.bool | None = ..., xla_gpu_deterministic_ops: builtins.bool | None = ..., xla_gpu_llvm_ir_file: collections.abc.Iterable[builtins.str] | None = ..., - xla_gpu_disable_async_collectives: collections.abc.Iterable[global___DebugOptions.CollectiveOpType.ValueType] | None = ..., + xla_gpu_disable_async_collectives: ( + collections.abc.Iterable[global___DebugOptions.CollectiveOpType.ValueType] | None + ) = ..., xla_gpu_all_reduce_combine_threshold_bytes: builtins.int | None = ..., xla_gpu_all_gather_combine_threshold_bytes: builtins.int | None = ..., xla_gpu_reduce_scatter_combine_threshold_bytes: builtins.int | None = ..., @@ -1295,7 +1310,9 @@ class DebugOptions(google.protobuf.message.Message): xla_gpu_nccl_termination_timeout_seconds: builtins.int | None = ..., xla_gpu_enable_shared_constants: builtins.bool | None = ..., xla_gpu_enable_cublaslt: builtins.bool | None = ..., - xla_gpu_enable_command_buffer: collections.abc.Iterable[global___DebugOptions.CommandBufferCmdType.ValueType] | None = ..., + xla_gpu_enable_command_buffer: ( + collections.abc.Iterable[global___DebugOptions.CommandBufferCmdType.ValueType] | None + ) = ..., xla_gpu_graph_min_graph_size: builtins.int | None = ..., xla_gpu_graph_enable_concurrent_region: builtins.bool | None = ..., xla_gpu_redzone_scratch_max_megabytes: builtins.int | None = ..., @@ -1399,7 +1416,417 @@ class DebugOptions(google.protobuf.message.Message): xla_experimental_ignore_channel_id: builtins.bool | None = ..., xla_backend_extra_options: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["legacy_command_buffer_custom_call_targets", b"legacy_command_buffer_custom_call_targets", "xla_allow_excess_precision", b"xla_allow_excess_precision", "xla_allow_scalar_index_dynamic_ops", b"xla_allow_scalar_index_dynamic_ops", "xla_backend_extra_options", b"xla_backend_extra_options", "xla_backend_optimization_level", b"xla_backend_optimization_level", "xla_cmd_buffer_trace_cache_size", b"xla_cmd_buffer_trace_cache_size", "xla_cpu_enable_concurrency_optimized_scheduler", b"xla_cpu_enable_concurrency_optimized_scheduler", "xla_cpu_enable_custom_matmul_tiling", b"xla_cpu_enable_custom_matmul_tiling", "xla_cpu_enable_experimental_deallocation", b"xla_cpu_enable_experimental_deallocation", "xla_cpu_enable_fast_math", b"xla_cpu_enable_fast_math", "xla_cpu_enable_fast_min_max", b"xla_cpu_enable_fast_min_max", "xla_cpu_enable_mlir_fusion_outlining", b"xla_cpu_enable_mlir_fusion_outlining", "xla_cpu_enable_mlir_tiling_and_fusion", b"xla_cpu_enable_mlir_tiling_and_fusion", "xla_cpu_enable_xprof_traceme", b"xla_cpu_enable_xprof_traceme", "xla_cpu_fast_math_honor_division", b"xla_cpu_fast_math_honor_division", "xla_cpu_fast_math_honor_functions", b"xla_cpu_fast_math_honor_functions", "xla_cpu_fast_math_honor_infs", b"xla_cpu_fast_math_honor_infs", "xla_cpu_fast_math_honor_nans", b"xla_cpu_fast_math_honor_nans", "xla_cpu_matmul_tiling_k_dim", b"xla_cpu_matmul_tiling_k_dim", "xla_cpu_matmul_tiling_m_dim", b"xla_cpu_matmul_tiling_m_dim", "xla_cpu_matmul_tiling_n_dim", b"xla_cpu_matmul_tiling_n_dim", "xla_cpu_multi_thread_eigen", b"xla_cpu_multi_thread_eigen", "xla_cpu_parallel_codegen_split_count", b"xla_cpu_parallel_codegen_split_count", "xla_cpu_prefer_vector_width", b"xla_cpu_prefer_vector_width", "xla_cpu_strict_dot_conv_math", b"xla_cpu_strict_dot_conv_math", "xla_cpu_use_acl", b"xla_cpu_use_acl", "xla_cpu_use_mkl_dnn", b"xla_cpu_use_mkl_dnn", "xla_cpu_use_thunk_runtime", b"xla_cpu_use_thunk_runtime", "xla_debug_buffer_assignment_show_max", b"xla_debug_buffer_assignment_show_max", "xla_detailed_logging", b"xla_detailed_logging", "xla_disable_all_hlo_passes", b"xla_disable_all_hlo_passes", "xla_disable_hlo_passes", b"xla_disable_hlo_passes", "xla_dump_compress_protos", b"xla_dump_compress_protos", "xla_dump_disable_metadata", b"xla_dump_disable_metadata", "xla_dump_enable_mlir_pretty_form", b"xla_dump_enable_mlir_pretty_form", "xla_dump_fusion_visualization", b"xla_dump_fusion_visualization", "xla_dump_hlo_as_dot", b"xla_dump_hlo_as_dot", "xla_dump_hlo_as_html", b"xla_dump_hlo_as_html", "xla_dump_hlo_as_long_text", b"xla_dump_hlo_as_long_text", "xla_dump_hlo_as_proto", b"xla_dump_hlo_as_proto", "xla_dump_hlo_as_text", b"xla_dump_hlo_as_text", "xla_dump_hlo_as_url", b"xla_dump_hlo_as_url", "xla_dump_hlo_module_re", b"xla_dump_hlo_module_re", "xla_dump_hlo_pass_re", b"xla_dump_hlo_pass_re", "xla_dump_hlo_pipeline_re", b"xla_dump_hlo_pipeline_re", "xla_dump_hlo_snapshots", b"xla_dump_hlo_snapshots", "xla_dump_include_timestamp", b"xla_dump_include_timestamp", "xla_dump_large_constants", b"xla_dump_large_constants", "xla_dump_latency_hiding_schedule", b"xla_dump_latency_hiding_schedule", "xla_dump_max_hlo_modules", b"xla_dump_max_hlo_modules", "xla_dump_module_metadata", b"xla_dump_module_metadata", "xla_dump_to", b"xla_dump_to", "xla_eliminate_hlo_implicit_broadcast", b"xla_eliminate_hlo_implicit_broadcast", "xla_embed_ir_in_executable", b"xla_embed_ir_in_executable", "xla_enable_command_buffers_during_profiling", b"xla_enable_command_buffers_during_profiling", "xla_enable_dumping", b"xla_enable_dumping", "xla_enable_hlo_passes_only", b"xla_enable_hlo_passes_only", "xla_experimental_ignore_channel_id", b"xla_experimental_ignore_channel_id", "xla_force_host_platform_device_count", b"xla_force_host_platform_device_count", "xla_gpu_algorithm_denylist_path", b"xla_gpu_algorithm_denylist_path", "xla_gpu_all_gather_combine_threshold_bytes", b"xla_gpu_all_gather_combine_threshold_bytes", "xla_gpu_all_reduce_blueconnect_num_devices_per_host", b"xla_gpu_all_reduce_blueconnect_num_devices_per_host", "xla_gpu_all_reduce_combine_threshold_bytes", b"xla_gpu_all_reduce_combine_threshold_bytes", "xla_gpu_asm_extra_flags", b"xla_gpu_asm_extra_flags", "xla_gpu_async_dot", b"xla_gpu_async_dot", "xla_gpu_auto_spmd_partitioning_memory_budget_gb", b"xla_gpu_auto_spmd_partitioning_memory_budget_gb", "xla_gpu_auto_spmd_partitioning_memory_budget_ratio", b"xla_gpu_auto_spmd_partitioning_memory_budget_ratio", "xla_gpu_autotune_gemm_rtol", b"xla_gpu_autotune_gemm_rtol", "xla_gpu_autotune_level", b"xla_gpu_autotune_level", "xla_gpu_autotune_max_solutions", b"xla_gpu_autotune_max_solutions", "xla_gpu_collect_cost_model_stats", b"xla_gpu_collect_cost_model_stats", "xla_gpu_collective_inflation_factor", b"xla_gpu_collective_inflation_factor", "xla_gpu_collective_permute_decomposer_threshold", b"xla_gpu_collective_permute_decomposer_threshold", "xla_gpu_copy_insertion_use_region_analysis", b"xla_gpu_copy_insertion_use_region_analysis", "xla_gpu_crash_on_verification_failures", b"xla_gpu_crash_on_verification_failures", "xla_gpu_cublas_fallback", b"xla_gpu_cublas_fallback", "xla_gpu_cuda_data_dir", b"xla_gpu_cuda_data_dir", "xla_gpu_cudnn_gemm_fusion_level", b"xla_gpu_cudnn_gemm_fusion_level", "xla_gpu_cudnn_gemm_max_plans", b"xla_gpu_cudnn_gemm_max_plans", "xla_gpu_deterministic_ops", b"xla_gpu_deterministic_ops", "xla_gpu_disable_async_collectives", b"xla_gpu_disable_async_collectives", "xla_gpu_disable_gpuasm_optimizations", b"xla_gpu_disable_gpuasm_optimizations", "xla_gpu_dump_autotune_logs_to", b"xla_gpu_dump_autotune_logs_to", "xla_gpu_dump_autotune_results_to", b"xla_gpu_dump_autotune_results_to", "xla_gpu_dump_autotuned_gemm_fusions", b"xla_gpu_dump_autotuned_gemm_fusions", "xla_gpu_dump_llvmir", b"xla_gpu_dump_llvmir", "xla_gpu_enable_all_gather_combine_by_dim", b"xla_gpu_enable_all_gather_combine_by_dim", "xla_gpu_enable_analytical_latency_estimator", b"xla_gpu_enable_analytical_latency_estimator", "xla_gpu_enable_approx_costly_collectives", b"xla_gpu_enable_approx_costly_collectives", "xla_gpu_enable_bf16_3way_gemm", b"xla_gpu_enable_bf16_3way_gemm", "xla_gpu_enable_bf16_6way_gemm", b"xla_gpu_enable_bf16_6way_gemm", "xla_gpu_enable_command_buffer", b"xla_gpu_enable_command_buffer", "xla_gpu_enable_cub_radix_sort", b"xla_gpu_enable_cub_radix_sort", "xla_gpu_enable_cublaslt", b"xla_gpu_enable_cublaslt", "xla_gpu_enable_cudnn_fmha", b"xla_gpu_enable_cudnn_fmha", "xla_gpu_enable_cudnn_frontend", b"xla_gpu_enable_cudnn_frontend", "xla_gpu_enable_cudnn_int8x32_convolution_reordering", b"xla_gpu_enable_cudnn_int8x32_convolution_reordering", "xla_gpu_enable_cudnn_layer_norm", b"xla_gpu_enable_cudnn_layer_norm", "xla_gpu_enable_custom_fusions", b"xla_gpu_enable_custom_fusions", "xla_gpu_enable_custom_fusions_re", b"xla_gpu_enable_custom_fusions_re", "xla_gpu_enable_dot_strength_reduction", b"xla_gpu_enable_dot_strength_reduction", "xla_gpu_enable_dynamic_slice_fusion", b"xla_gpu_enable_dynamic_slice_fusion", "xla_gpu_enable_fast_min_max", b"xla_gpu_enable_fast_min_max", "xla_gpu_enable_highest_priority_async_stream", b"xla_gpu_enable_highest_priority_async_stream", "xla_gpu_enable_host_memory_offloading", b"xla_gpu_enable_host_memory_offloading", "xla_gpu_enable_latency_hiding_scheduler", b"xla_gpu_enable_latency_hiding_scheduler", "xla_gpu_enable_libnvjitlink", b"xla_gpu_enable_libnvjitlink", "xla_gpu_enable_libnvptxcompiler", b"xla_gpu_enable_libnvptxcompiler", "xla_gpu_enable_llvm_module_compilation_parallelism", b"xla_gpu_enable_llvm_module_compilation_parallelism", "xla_gpu_enable_nccl_clique_optimization", b"xla_gpu_enable_nccl_clique_optimization", "xla_gpu_enable_nccl_comm_splitting", b"xla_gpu_enable_nccl_comm_splitting", "xla_gpu_enable_nccl_per_stream_comms", b"xla_gpu_enable_nccl_per_stream_comms", "xla_gpu_enable_nccl_user_buffers", b"xla_gpu_enable_nccl_user_buffers", "xla_gpu_enable_pgle_accuracy_checker", b"xla_gpu_enable_pgle_accuracy_checker", "xla_gpu_enable_pipelined_all_gather", b"xla_gpu_enable_pipelined_all_gather", "xla_gpu_enable_pipelined_all_reduce", b"xla_gpu_enable_pipelined_all_reduce", "xla_gpu_enable_pipelined_collectives", b"xla_gpu_enable_pipelined_collectives", "xla_gpu_enable_pipelined_p2p", b"xla_gpu_enable_pipelined_p2p", "xla_gpu_enable_pipelined_reduce_scatter", b"xla_gpu_enable_pipelined_reduce_scatter", "xla_gpu_enable_priority_fusion", b"xla_gpu_enable_priority_fusion", "xla_gpu_enable_reassociation_for_converted_ar", b"xla_gpu_enable_reassociation_for_converted_ar", "xla_gpu_enable_reduce_scatter_combine_by_dim", b"xla_gpu_enable_reduce_scatter_combine_by_dim", "xla_gpu_enable_reduction_epilogue_fusion", b"xla_gpu_enable_reduction_epilogue_fusion", "xla_gpu_enable_shared_constants", b"xla_gpu_enable_shared_constants", "xla_gpu_enable_split_k_autotuning", b"xla_gpu_enable_split_k_autotuning", "xla_gpu_enable_triton_gemm", b"xla_gpu_enable_triton_gemm", "xla_gpu_enable_triton_gemm_int4", b"xla_gpu_enable_triton_gemm_int4", "xla_gpu_enable_triton_hopper", b"xla_gpu_enable_triton_hopper", "xla_gpu_enable_while_loop_double_buffering", b"xla_gpu_enable_while_loop_double_buffering", "xla_gpu_enable_while_loop_reduce_scatter_code_motion", b"xla_gpu_enable_while_loop_reduce_scatter_code_motion", "xla_gpu_enable_while_loop_unrolling", b"xla_gpu_enable_while_loop_unrolling", "xla_gpu_ensure_minor_dot_contraction_dims", b"xla_gpu_ensure_minor_dot_contraction_dims", "xla_gpu_exclude_nondeterministic_ops", b"xla_gpu_exclude_nondeterministic_ops", "xla_gpu_executable_terminate_timeout_seconds", b"xla_gpu_executable_terminate_timeout_seconds", "xla_gpu_executable_warn_stuck_timeout_seconds", b"xla_gpu_executable_warn_stuck_timeout_seconds", "xla_gpu_exhaustive_tiling_search", b"xla_gpu_exhaustive_tiling_search", "xla_gpu_experimental_autotune_cache_mode", b"xla_gpu_experimental_autotune_cache_mode", "xla_gpu_experimental_disable_binary_libraries", b"xla_gpu_experimental_disable_binary_libraries", "xla_gpu_experimental_enable_triton_softmax_priority_fusion", b"xla_gpu_experimental_enable_triton_softmax_priority_fusion", "xla_gpu_filter_kernels_spilling_registers_on_autotuning", b"xla_gpu_filter_kernels_spilling_registers_on_autotuning", "xla_gpu_force_compilation_parallelism", b"xla_gpu_force_compilation_parallelism", "xla_gpu_force_conv_nchw", b"xla_gpu_force_conv_nchw", "xla_gpu_force_conv_nhwc", b"xla_gpu_force_conv_nhwc", "xla_gpu_ftz", b"xla_gpu_ftz", "xla_gpu_fused_attention_use_cudnn_rng", b"xla_gpu_fused_attention_use_cudnn_rng", "xla_gpu_gemm_rewrite_size_threshold", b"xla_gpu_gemm_rewrite_size_threshold", "xla_gpu_graph_enable_concurrent_region", b"xla_gpu_graph_enable_concurrent_region", "xla_gpu_graph_min_graph_size", b"xla_gpu_graph_min_graph_size", "xla_gpu_kernel_cache_file", b"xla_gpu_kernel_cache_file", "xla_gpu_lhs_enable_gpu_async_tracker", b"xla_gpu_lhs_enable_gpu_async_tracker", "xla_gpu_llvm_ir_file", b"xla_gpu_llvm_ir_file", "xla_gpu_llvm_verification_level", b"xla_gpu_llvm_verification_level", "xla_gpu_load_autotune_results_from", b"xla_gpu_load_autotune_results_from", "xla_gpu_memory_limit_slop_factor", b"xla_gpu_memory_limit_slop_factor", "xla_gpu_mlir_emitter_level", b"xla_gpu_mlir_emitter_level", "xla_gpu_mock_custom_calls", b"xla_gpu_mock_custom_calls", "xla_gpu_multi_streamed_windowed_einsum", b"xla_gpu_multi_streamed_windowed_einsum", "xla_gpu_nccl_collective_max_nchannels", b"xla_gpu_nccl_collective_max_nchannels", "xla_gpu_nccl_p2p_max_nchannels", b"xla_gpu_nccl_p2p_max_nchannels", "xla_gpu_nccl_terminate_on_error", b"xla_gpu_nccl_terminate_on_error", "xla_gpu_nccl_termination_timeout_seconds", b"xla_gpu_nccl_termination_timeout_seconds", "xla_gpu_override_gemm_autotuner", b"xla_gpu_override_gemm_autotuner", "xla_gpu_per_fusion_autotune_cache_dir", b"xla_gpu_per_fusion_autotune_cache_dir", "xla_gpu_pgle_profile_file_or_directory_path", b"xla_gpu_pgle_profile_file_or_directory_path", "xla_gpu_ptx_file", b"xla_gpu_ptx_file", "xla_gpu_reduce_scatter_combine_threshold_bytes", b"xla_gpu_reduce_scatter_combine_threshold_bytes", "xla_gpu_redzone_padding_bytes", b"xla_gpu_redzone_padding_bytes", "xla_gpu_redzone_scratch_max_megabytes", b"xla_gpu_redzone_scratch_max_megabytes", "xla_gpu_require_complete_aot_autotune_results", b"xla_gpu_require_complete_aot_autotune_results", "xla_gpu_run_post_layout_collective_pipeliner", b"xla_gpu_run_post_layout_collective_pipeliner", "xla_gpu_shape_checks", b"xla_gpu_shape_checks", "xla_gpu_shard_autotuning", b"xla_gpu_shard_autotuning", "xla_gpu_strict_conv_algorithm_picker", b"xla_gpu_strict_conv_algorithm_picker", "xla_gpu_target_config_filename", b"xla_gpu_target_config_filename", "xla_gpu_temp_buffer_use_separate_color", b"xla_gpu_temp_buffer_use_separate_color", "xla_gpu_threshold_for_windowed_einsum_mib", b"xla_gpu_threshold_for_windowed_einsum_mib", "xla_gpu_triton_fusion_level", b"xla_gpu_triton_fusion_level", "xla_gpu_triton_gemm_any", b"xla_gpu_triton_gemm_any", "xla_gpu_triton_gemm_disable_reduced_precision_reduction", b"xla_gpu_triton_gemm_disable_reduced_precision_reduction", "xla_gpu_unsafe_fallback_to_driver_on_ptxas_not_found", b"xla_gpu_unsafe_fallback_to_driver_on_ptxas_not_found", "xla_gpu_unsafe_pipelined_loop_annotator", b"xla_gpu_unsafe_pipelined_loop_annotator", "xla_gpu_unsupported_enable_triton_gemm", b"xla_gpu_unsupported_enable_triton_gemm", "xla_gpu_use_memcpy_local_p2p", b"xla_gpu_use_memcpy_local_p2p", "xla_gpu_use_runtime_fusion", b"xla_gpu_use_runtime_fusion", "xla_gpu_verify_triton_fusion_numerics", b"xla_gpu_verify_triton_fusion_numerics", "xla_hlo_evaluator_use_fast_path", b"xla_hlo_evaluator_use_fast_path", "xla_hlo_graph_addresses", b"xla_hlo_graph_addresses", "xla_hlo_graph_sharding_color", b"xla_hlo_graph_sharding_color", "xla_hlo_profile", b"xla_hlo_profile", "xla_llvm_disable_expensive_passes", b"xla_llvm_disable_expensive_passes", "xla_llvm_enable_alias_scope_metadata", b"xla_llvm_enable_alias_scope_metadata", "xla_llvm_enable_invariant_load_metadata", b"xla_llvm_enable_invariant_load_metadata", "xla_llvm_enable_noalias_metadata", b"xla_llvm_enable_noalias_metadata", "xla_llvm_force_inline_before_split", b"xla_llvm_force_inline_before_split", "xla_multiheap_size_constraint_per_heap", b"xla_multiheap_size_constraint_per_heap", "xla_partitioning_algorithm", b"xla_partitioning_algorithm", "xla_reduce_window_rewrite_base_length", b"xla_reduce_window_rewrite_base_length", "xla_step_marker_location", b"xla_step_marker_location", "xla_syntax_sugar_async_ops", b"xla_syntax_sugar_async_ops", "xla_test_all_input_layouts", b"xla_test_all_input_layouts", "xla_test_all_output_layouts", b"xla_test_all_output_layouts", "xla_tpu_detect_inf", b"xla_tpu_detect_inf", "xla_tpu_detect_nan", b"xla_tpu_detect_nan"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "legacy_command_buffer_custom_call_targets", + b"legacy_command_buffer_custom_call_targets", + "xla_allow_excess_precision", + b"xla_allow_excess_precision", + "xla_allow_scalar_index_dynamic_ops", + b"xla_allow_scalar_index_dynamic_ops", + "xla_backend_extra_options", + b"xla_backend_extra_options", + "xla_backend_optimization_level", + b"xla_backend_optimization_level", + "xla_cmd_buffer_trace_cache_size", + b"xla_cmd_buffer_trace_cache_size", + "xla_cpu_enable_concurrency_optimized_scheduler", + b"xla_cpu_enable_concurrency_optimized_scheduler", + "xla_cpu_enable_custom_matmul_tiling", + b"xla_cpu_enable_custom_matmul_tiling", + "xla_cpu_enable_experimental_deallocation", + b"xla_cpu_enable_experimental_deallocation", + "xla_cpu_enable_fast_math", + b"xla_cpu_enable_fast_math", + "xla_cpu_enable_fast_min_max", + b"xla_cpu_enable_fast_min_max", + "xla_cpu_enable_mlir_fusion_outlining", + b"xla_cpu_enable_mlir_fusion_outlining", + "xla_cpu_enable_mlir_tiling_and_fusion", + b"xla_cpu_enable_mlir_tiling_and_fusion", + "xla_cpu_enable_xprof_traceme", + b"xla_cpu_enable_xprof_traceme", + "xla_cpu_fast_math_honor_division", + b"xla_cpu_fast_math_honor_division", + "xla_cpu_fast_math_honor_functions", + b"xla_cpu_fast_math_honor_functions", + "xla_cpu_fast_math_honor_infs", + b"xla_cpu_fast_math_honor_infs", + "xla_cpu_fast_math_honor_nans", + b"xla_cpu_fast_math_honor_nans", + "xla_cpu_matmul_tiling_k_dim", + b"xla_cpu_matmul_tiling_k_dim", + "xla_cpu_matmul_tiling_m_dim", + b"xla_cpu_matmul_tiling_m_dim", + "xla_cpu_matmul_tiling_n_dim", + b"xla_cpu_matmul_tiling_n_dim", + "xla_cpu_multi_thread_eigen", + b"xla_cpu_multi_thread_eigen", + "xla_cpu_parallel_codegen_split_count", + b"xla_cpu_parallel_codegen_split_count", + "xla_cpu_prefer_vector_width", + b"xla_cpu_prefer_vector_width", + "xla_cpu_strict_dot_conv_math", + b"xla_cpu_strict_dot_conv_math", + "xla_cpu_use_acl", + b"xla_cpu_use_acl", + "xla_cpu_use_mkl_dnn", + b"xla_cpu_use_mkl_dnn", + "xla_cpu_use_thunk_runtime", + b"xla_cpu_use_thunk_runtime", + "xla_debug_buffer_assignment_show_max", + b"xla_debug_buffer_assignment_show_max", + "xla_detailed_logging", + b"xla_detailed_logging", + "xla_disable_all_hlo_passes", + b"xla_disable_all_hlo_passes", + "xla_disable_hlo_passes", + b"xla_disable_hlo_passes", + "xla_dump_compress_protos", + b"xla_dump_compress_protos", + "xla_dump_disable_metadata", + b"xla_dump_disable_metadata", + "xla_dump_enable_mlir_pretty_form", + b"xla_dump_enable_mlir_pretty_form", + "xla_dump_fusion_visualization", + b"xla_dump_fusion_visualization", + "xla_dump_hlo_as_dot", + b"xla_dump_hlo_as_dot", + "xla_dump_hlo_as_html", + b"xla_dump_hlo_as_html", + "xla_dump_hlo_as_long_text", + b"xla_dump_hlo_as_long_text", + "xla_dump_hlo_as_proto", + b"xla_dump_hlo_as_proto", + "xla_dump_hlo_as_text", + b"xla_dump_hlo_as_text", + "xla_dump_hlo_as_url", + b"xla_dump_hlo_as_url", + "xla_dump_hlo_module_re", + b"xla_dump_hlo_module_re", + "xla_dump_hlo_pass_re", + b"xla_dump_hlo_pass_re", + "xla_dump_hlo_pipeline_re", + b"xla_dump_hlo_pipeline_re", + "xla_dump_hlo_snapshots", + b"xla_dump_hlo_snapshots", + "xla_dump_include_timestamp", + b"xla_dump_include_timestamp", + "xla_dump_large_constants", + b"xla_dump_large_constants", + "xla_dump_latency_hiding_schedule", + b"xla_dump_latency_hiding_schedule", + "xla_dump_max_hlo_modules", + b"xla_dump_max_hlo_modules", + "xla_dump_module_metadata", + b"xla_dump_module_metadata", + "xla_dump_to", + b"xla_dump_to", + "xla_eliminate_hlo_implicit_broadcast", + b"xla_eliminate_hlo_implicit_broadcast", + "xla_embed_ir_in_executable", + b"xla_embed_ir_in_executable", + "xla_enable_command_buffers_during_profiling", + b"xla_enable_command_buffers_during_profiling", + "xla_enable_dumping", + b"xla_enable_dumping", + "xla_enable_hlo_passes_only", + b"xla_enable_hlo_passes_only", + "xla_experimental_ignore_channel_id", + b"xla_experimental_ignore_channel_id", + "xla_force_host_platform_device_count", + b"xla_force_host_platform_device_count", + "xla_gpu_algorithm_denylist_path", + b"xla_gpu_algorithm_denylist_path", + "xla_gpu_all_gather_combine_threshold_bytes", + b"xla_gpu_all_gather_combine_threshold_bytes", + "xla_gpu_all_reduce_blueconnect_num_devices_per_host", + b"xla_gpu_all_reduce_blueconnect_num_devices_per_host", + "xla_gpu_all_reduce_combine_threshold_bytes", + b"xla_gpu_all_reduce_combine_threshold_bytes", + "xla_gpu_asm_extra_flags", + b"xla_gpu_asm_extra_flags", + "xla_gpu_async_dot", + b"xla_gpu_async_dot", + "xla_gpu_auto_spmd_partitioning_memory_budget_gb", + b"xla_gpu_auto_spmd_partitioning_memory_budget_gb", + "xla_gpu_auto_spmd_partitioning_memory_budget_ratio", + b"xla_gpu_auto_spmd_partitioning_memory_budget_ratio", + "xla_gpu_autotune_gemm_rtol", + b"xla_gpu_autotune_gemm_rtol", + "xla_gpu_autotune_level", + b"xla_gpu_autotune_level", + "xla_gpu_autotune_max_solutions", + b"xla_gpu_autotune_max_solutions", + "xla_gpu_collect_cost_model_stats", + b"xla_gpu_collect_cost_model_stats", + "xla_gpu_collective_inflation_factor", + b"xla_gpu_collective_inflation_factor", + "xla_gpu_collective_permute_decomposer_threshold", + b"xla_gpu_collective_permute_decomposer_threshold", + "xla_gpu_copy_insertion_use_region_analysis", + b"xla_gpu_copy_insertion_use_region_analysis", + "xla_gpu_crash_on_verification_failures", + b"xla_gpu_crash_on_verification_failures", + "xla_gpu_cublas_fallback", + b"xla_gpu_cublas_fallback", + "xla_gpu_cuda_data_dir", + b"xla_gpu_cuda_data_dir", + "xla_gpu_cudnn_gemm_fusion_level", + b"xla_gpu_cudnn_gemm_fusion_level", + "xla_gpu_cudnn_gemm_max_plans", + b"xla_gpu_cudnn_gemm_max_plans", + "xla_gpu_deterministic_ops", + b"xla_gpu_deterministic_ops", + "xla_gpu_disable_async_collectives", + b"xla_gpu_disable_async_collectives", + "xla_gpu_disable_gpuasm_optimizations", + b"xla_gpu_disable_gpuasm_optimizations", + "xla_gpu_dump_autotune_logs_to", + b"xla_gpu_dump_autotune_logs_to", + "xla_gpu_dump_autotune_results_to", + b"xla_gpu_dump_autotune_results_to", + "xla_gpu_dump_autotuned_gemm_fusions", + b"xla_gpu_dump_autotuned_gemm_fusions", + "xla_gpu_dump_llvmir", + b"xla_gpu_dump_llvmir", + "xla_gpu_enable_all_gather_combine_by_dim", + b"xla_gpu_enable_all_gather_combine_by_dim", + "xla_gpu_enable_analytical_latency_estimator", + b"xla_gpu_enable_analytical_latency_estimator", + "xla_gpu_enable_approx_costly_collectives", + b"xla_gpu_enable_approx_costly_collectives", + "xla_gpu_enable_bf16_3way_gemm", + b"xla_gpu_enable_bf16_3way_gemm", + "xla_gpu_enable_bf16_6way_gemm", + b"xla_gpu_enable_bf16_6way_gemm", + "xla_gpu_enable_command_buffer", + b"xla_gpu_enable_command_buffer", + "xla_gpu_enable_cub_radix_sort", + b"xla_gpu_enable_cub_radix_sort", + "xla_gpu_enable_cublaslt", + b"xla_gpu_enable_cublaslt", + "xla_gpu_enable_cudnn_fmha", + b"xla_gpu_enable_cudnn_fmha", + "xla_gpu_enable_cudnn_frontend", + b"xla_gpu_enable_cudnn_frontend", + "xla_gpu_enable_cudnn_int8x32_convolution_reordering", + b"xla_gpu_enable_cudnn_int8x32_convolution_reordering", + "xla_gpu_enable_cudnn_layer_norm", + b"xla_gpu_enable_cudnn_layer_norm", + "xla_gpu_enable_custom_fusions", + b"xla_gpu_enable_custom_fusions", + "xla_gpu_enable_custom_fusions_re", + b"xla_gpu_enable_custom_fusions_re", + "xla_gpu_enable_dot_strength_reduction", + b"xla_gpu_enable_dot_strength_reduction", + "xla_gpu_enable_dynamic_slice_fusion", + b"xla_gpu_enable_dynamic_slice_fusion", + "xla_gpu_enable_fast_min_max", + b"xla_gpu_enable_fast_min_max", + "xla_gpu_enable_highest_priority_async_stream", + b"xla_gpu_enable_highest_priority_async_stream", + "xla_gpu_enable_host_memory_offloading", + b"xla_gpu_enable_host_memory_offloading", + "xla_gpu_enable_latency_hiding_scheduler", + b"xla_gpu_enable_latency_hiding_scheduler", + "xla_gpu_enable_libnvjitlink", + b"xla_gpu_enable_libnvjitlink", + "xla_gpu_enable_libnvptxcompiler", + b"xla_gpu_enable_libnvptxcompiler", + "xla_gpu_enable_llvm_module_compilation_parallelism", + b"xla_gpu_enable_llvm_module_compilation_parallelism", + "xla_gpu_enable_nccl_clique_optimization", + b"xla_gpu_enable_nccl_clique_optimization", + "xla_gpu_enable_nccl_comm_splitting", + b"xla_gpu_enable_nccl_comm_splitting", + "xla_gpu_enable_nccl_per_stream_comms", + b"xla_gpu_enable_nccl_per_stream_comms", + "xla_gpu_enable_nccl_user_buffers", + b"xla_gpu_enable_nccl_user_buffers", + "xla_gpu_enable_pgle_accuracy_checker", + b"xla_gpu_enable_pgle_accuracy_checker", + "xla_gpu_enable_pipelined_all_gather", + b"xla_gpu_enable_pipelined_all_gather", + "xla_gpu_enable_pipelined_all_reduce", + b"xla_gpu_enable_pipelined_all_reduce", + "xla_gpu_enable_pipelined_collectives", + b"xla_gpu_enable_pipelined_collectives", + "xla_gpu_enable_pipelined_p2p", + b"xla_gpu_enable_pipelined_p2p", + "xla_gpu_enable_pipelined_reduce_scatter", + b"xla_gpu_enable_pipelined_reduce_scatter", + "xla_gpu_enable_priority_fusion", + b"xla_gpu_enable_priority_fusion", + "xla_gpu_enable_reassociation_for_converted_ar", + b"xla_gpu_enable_reassociation_for_converted_ar", + "xla_gpu_enable_reduce_scatter_combine_by_dim", + b"xla_gpu_enable_reduce_scatter_combine_by_dim", + "xla_gpu_enable_reduction_epilogue_fusion", + b"xla_gpu_enable_reduction_epilogue_fusion", + "xla_gpu_enable_shared_constants", + b"xla_gpu_enable_shared_constants", + "xla_gpu_enable_split_k_autotuning", + b"xla_gpu_enable_split_k_autotuning", + "xla_gpu_enable_triton_gemm", + b"xla_gpu_enable_triton_gemm", + "xla_gpu_enable_triton_gemm_int4", + b"xla_gpu_enable_triton_gemm_int4", + "xla_gpu_enable_triton_hopper", + b"xla_gpu_enable_triton_hopper", + "xla_gpu_enable_while_loop_double_buffering", + b"xla_gpu_enable_while_loop_double_buffering", + "xla_gpu_enable_while_loop_reduce_scatter_code_motion", + b"xla_gpu_enable_while_loop_reduce_scatter_code_motion", + "xla_gpu_enable_while_loop_unrolling", + b"xla_gpu_enable_while_loop_unrolling", + "xla_gpu_ensure_minor_dot_contraction_dims", + b"xla_gpu_ensure_minor_dot_contraction_dims", + "xla_gpu_exclude_nondeterministic_ops", + b"xla_gpu_exclude_nondeterministic_ops", + "xla_gpu_executable_terminate_timeout_seconds", + b"xla_gpu_executable_terminate_timeout_seconds", + "xla_gpu_executable_warn_stuck_timeout_seconds", + b"xla_gpu_executable_warn_stuck_timeout_seconds", + "xla_gpu_exhaustive_tiling_search", + b"xla_gpu_exhaustive_tiling_search", + "xla_gpu_experimental_autotune_cache_mode", + b"xla_gpu_experimental_autotune_cache_mode", + "xla_gpu_experimental_disable_binary_libraries", + b"xla_gpu_experimental_disable_binary_libraries", + "xla_gpu_experimental_enable_triton_softmax_priority_fusion", + b"xla_gpu_experimental_enable_triton_softmax_priority_fusion", + "xla_gpu_filter_kernels_spilling_registers_on_autotuning", + b"xla_gpu_filter_kernels_spilling_registers_on_autotuning", + "xla_gpu_force_compilation_parallelism", + b"xla_gpu_force_compilation_parallelism", + "xla_gpu_force_conv_nchw", + b"xla_gpu_force_conv_nchw", + "xla_gpu_force_conv_nhwc", + b"xla_gpu_force_conv_nhwc", + "xla_gpu_ftz", + b"xla_gpu_ftz", + "xla_gpu_fused_attention_use_cudnn_rng", + b"xla_gpu_fused_attention_use_cudnn_rng", + "xla_gpu_gemm_rewrite_size_threshold", + b"xla_gpu_gemm_rewrite_size_threshold", + "xla_gpu_graph_enable_concurrent_region", + b"xla_gpu_graph_enable_concurrent_region", + "xla_gpu_graph_min_graph_size", + b"xla_gpu_graph_min_graph_size", + "xla_gpu_kernel_cache_file", + b"xla_gpu_kernel_cache_file", + "xla_gpu_lhs_enable_gpu_async_tracker", + b"xla_gpu_lhs_enable_gpu_async_tracker", + "xla_gpu_llvm_ir_file", + b"xla_gpu_llvm_ir_file", + "xla_gpu_llvm_verification_level", + b"xla_gpu_llvm_verification_level", + "xla_gpu_load_autotune_results_from", + b"xla_gpu_load_autotune_results_from", + "xla_gpu_memory_limit_slop_factor", + b"xla_gpu_memory_limit_slop_factor", + "xla_gpu_mlir_emitter_level", + b"xla_gpu_mlir_emitter_level", + "xla_gpu_mock_custom_calls", + b"xla_gpu_mock_custom_calls", + "xla_gpu_multi_streamed_windowed_einsum", + b"xla_gpu_multi_streamed_windowed_einsum", + "xla_gpu_nccl_collective_max_nchannels", + b"xla_gpu_nccl_collective_max_nchannels", + "xla_gpu_nccl_p2p_max_nchannels", + b"xla_gpu_nccl_p2p_max_nchannels", + "xla_gpu_nccl_terminate_on_error", + b"xla_gpu_nccl_terminate_on_error", + "xla_gpu_nccl_termination_timeout_seconds", + b"xla_gpu_nccl_termination_timeout_seconds", + "xla_gpu_override_gemm_autotuner", + b"xla_gpu_override_gemm_autotuner", + "xla_gpu_per_fusion_autotune_cache_dir", + b"xla_gpu_per_fusion_autotune_cache_dir", + "xla_gpu_pgle_profile_file_or_directory_path", + b"xla_gpu_pgle_profile_file_or_directory_path", + "xla_gpu_ptx_file", + b"xla_gpu_ptx_file", + "xla_gpu_reduce_scatter_combine_threshold_bytes", + b"xla_gpu_reduce_scatter_combine_threshold_bytes", + "xla_gpu_redzone_padding_bytes", + b"xla_gpu_redzone_padding_bytes", + "xla_gpu_redzone_scratch_max_megabytes", + b"xla_gpu_redzone_scratch_max_megabytes", + "xla_gpu_require_complete_aot_autotune_results", + b"xla_gpu_require_complete_aot_autotune_results", + "xla_gpu_run_post_layout_collective_pipeliner", + b"xla_gpu_run_post_layout_collective_pipeliner", + "xla_gpu_shape_checks", + b"xla_gpu_shape_checks", + "xla_gpu_shard_autotuning", + b"xla_gpu_shard_autotuning", + "xla_gpu_strict_conv_algorithm_picker", + b"xla_gpu_strict_conv_algorithm_picker", + "xla_gpu_target_config_filename", + b"xla_gpu_target_config_filename", + "xla_gpu_temp_buffer_use_separate_color", + b"xla_gpu_temp_buffer_use_separate_color", + "xla_gpu_threshold_for_windowed_einsum_mib", + b"xla_gpu_threshold_for_windowed_einsum_mib", + "xla_gpu_triton_fusion_level", + b"xla_gpu_triton_fusion_level", + "xla_gpu_triton_gemm_any", + b"xla_gpu_triton_gemm_any", + "xla_gpu_triton_gemm_disable_reduced_precision_reduction", + b"xla_gpu_triton_gemm_disable_reduced_precision_reduction", + "xla_gpu_unsafe_fallback_to_driver_on_ptxas_not_found", + b"xla_gpu_unsafe_fallback_to_driver_on_ptxas_not_found", + "xla_gpu_unsafe_pipelined_loop_annotator", + b"xla_gpu_unsafe_pipelined_loop_annotator", + "xla_gpu_unsupported_enable_triton_gemm", + b"xla_gpu_unsupported_enable_triton_gemm", + "xla_gpu_use_memcpy_local_p2p", + b"xla_gpu_use_memcpy_local_p2p", + "xla_gpu_use_runtime_fusion", + b"xla_gpu_use_runtime_fusion", + "xla_gpu_verify_triton_fusion_numerics", + b"xla_gpu_verify_triton_fusion_numerics", + "xla_hlo_evaluator_use_fast_path", + b"xla_hlo_evaluator_use_fast_path", + "xla_hlo_graph_addresses", + b"xla_hlo_graph_addresses", + "xla_hlo_graph_sharding_color", + b"xla_hlo_graph_sharding_color", + "xla_hlo_profile", + b"xla_hlo_profile", + "xla_llvm_disable_expensive_passes", + b"xla_llvm_disable_expensive_passes", + "xla_llvm_enable_alias_scope_metadata", + b"xla_llvm_enable_alias_scope_metadata", + "xla_llvm_enable_invariant_load_metadata", + b"xla_llvm_enable_invariant_load_metadata", + "xla_llvm_enable_noalias_metadata", + b"xla_llvm_enable_noalias_metadata", + "xla_llvm_force_inline_before_split", + b"xla_llvm_force_inline_before_split", + "xla_multiheap_size_constraint_per_heap", + b"xla_multiheap_size_constraint_per_heap", + "xla_partitioning_algorithm", + b"xla_partitioning_algorithm", + "xla_reduce_window_rewrite_base_length", + b"xla_reduce_window_rewrite_base_length", + "xla_step_marker_location", + b"xla_step_marker_location", + "xla_syntax_sugar_async_ops", + b"xla_syntax_sugar_async_ops", + "xla_test_all_input_layouts", + b"xla_test_all_input_layouts", + "xla_test_all_output_layouts", + b"xla_test_all_output_layouts", + "xla_tpu_detect_inf", + b"xla_tpu_detect_inf", + "xla_tpu_detect_nan", + b"xla_tpu_detect_nan", + ], + ) -> None: ... global___DebugOptions = DebugOptions @@ -1417,11 +1844,7 @@ class GpuCompilationEnvironment(google.protobuf.message.Message): """Temporary dummy flag is added to test the flow. To be removed when we add flags here. """ - def __init__( - self, - *, - dummy_flag: builtins.int | None = ..., - ) -> None: ... + def __init__(self, *, dummy_flag: builtins.int | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["dummy_flag", b"dummy_flag"]) -> None: ... global___GpuCompilationEnvironment = GpuCompilationEnvironment @@ -1445,7 +1868,17 @@ class ShardableValueUpdatePairProto(google.protobuf.message.Message): parameter_shape_index: collections.abc.Iterable[builtins.int] | None = ..., output_shape_index: collections.abc.Iterable[builtins.int] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["input_parameter_number", b"input_parameter_number", "output_shape_index", b"output_shape_index", "parameter_shape_index", b"parameter_shape_index"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "input_parameter_number", + b"input_parameter_number", + "output_shape_index", + b"output_shape_index", + "parameter_shape_index", + b"parameter_shape_index", + ], + ) -> None: ... global___ShardableValueUpdatePairProto = ShardableValueUpdatePairProto @@ -1541,7 +1974,9 @@ class ExecutionOptions(google.protobuf.message.Message): @property def debug_options(self) -> global___DebugOptions: ... @property - def device_handles(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[tensorflow.compiler.xla.xla_data_pb2.DeviceHandle]: + def device_handles( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[tensorflow.compiler.xla.xla_data_pb2.DeviceHandle]: """This optional field specifies a particular set of devices to run the computation on. The computation will be partitioned across these devices. If not provided, the default device will be chosen. @@ -1566,7 +2001,9 @@ class ExecutionOptions(google.protobuf.message.Message): """ @property - def allow_spmd_sharding_propagation_to_parameters(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.bool]: + def allow_spmd_sharding_propagation_to_parameters( + self, + ) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.bool]: """Allows sharding propagation to propagate to the parameters. This changes the input shape of the computation (which is undesirable), but it can be used to allow to run partial compilation to determine what would be the @@ -1580,7 +2017,9 @@ class ExecutionOptions(google.protobuf.message.Message): """ @property - def allow_spmd_sharding_propagation_to_output(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.bool]: + def allow_spmd_sharding_propagation_to_output( + self, + ) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.bool]: """Allows sharding propagation to propagate to the outputs. This changes the output shape of the computation (which is undesirable), but it can be used to allow to run partial compilation to determine what would be the output @@ -1597,11 +2036,15 @@ class ExecutionOptions(google.protobuf.message.Message): """ @property - def param_requires_broadcast_via_collectives(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.bool]: + def param_requires_broadcast_via_collectives( + self, + ) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.bool]: """Whether to broadcast args across all replicas. One entry per arg.""" @property - def shardable_value_update_pairs(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ShardableValueUpdatePairProto]: + def shardable_value_update_pairs( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ShardableValueUpdatePairProto]: """The list of input/output pairs in the main program that could be sharded.""" def __init__( @@ -1630,8 +2073,66 @@ class ExecutionOptions(google.protobuf.message.Message): device_memory_size: builtins.int | None = ..., use_shardy_partitioner: builtins.bool | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["debug_options", b"debug_options", "device_assignment", b"device_assignment", "shape_with_output_layout", b"shape_with_output_layout"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["alias_passthrough_params", b"alias_passthrough_params", "allow_separate_sharding_programs", b"allow_separate_sharding_programs", "allow_spmd_sharding_propagation_to_output", b"allow_spmd_sharding_propagation_to_output", "allow_spmd_sharding_propagation_to_parameters", b"allow_spmd_sharding_propagation_to_parameters", "auto_spmd_partitioning_mesh_ids", b"auto_spmd_partitioning_mesh_ids", "auto_spmd_partitioning_mesh_shape", b"auto_spmd_partitioning_mesh_shape", "debug_options", b"debug_options", "deduplicate_hlo", b"deduplicate_hlo", "device_assignment", b"device_assignment", "device_handles", b"device_handles", "device_memory_size", b"device_memory_size", "fdo_profile", b"fdo_profile", "launch_id", b"launch_id", "num_partitions", b"num_partitions", "num_replicas", b"num_replicas", "param_requires_broadcast_via_collectives", b"param_requires_broadcast_via_collectives", "seed", b"seed", "shape_with_output_layout", b"shape_with_output_layout", "shardable_value_update_pairs", b"shardable_value_update_pairs", "use_auto_spmd_partitioning", b"use_auto_spmd_partitioning", "use_shardy_partitioner", b"use_shardy_partitioner", "use_spmd_partitioning", b"use_spmd_partitioning"]) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "debug_options", + b"debug_options", + "device_assignment", + b"device_assignment", + "shape_with_output_layout", + b"shape_with_output_layout", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "alias_passthrough_params", + b"alias_passthrough_params", + "allow_separate_sharding_programs", + b"allow_separate_sharding_programs", + "allow_spmd_sharding_propagation_to_output", + b"allow_spmd_sharding_propagation_to_output", + "allow_spmd_sharding_propagation_to_parameters", + b"allow_spmd_sharding_propagation_to_parameters", + "auto_spmd_partitioning_mesh_ids", + b"auto_spmd_partitioning_mesh_ids", + "auto_spmd_partitioning_mesh_shape", + b"auto_spmd_partitioning_mesh_shape", + "debug_options", + b"debug_options", + "deduplicate_hlo", + b"deduplicate_hlo", + "device_assignment", + b"device_assignment", + "device_handles", + b"device_handles", + "device_memory_size", + b"device_memory_size", + "fdo_profile", + b"fdo_profile", + "launch_id", + b"launch_id", + "num_partitions", + b"num_partitions", + "num_replicas", + b"num_replicas", + "param_requires_broadcast_via_collectives", + b"param_requires_broadcast_via_collectives", + "seed", + b"seed", + "shape_with_output_layout", + b"shape_with_output_layout", + "shardable_value_update_pairs", + b"shardable_value_update_pairs", + "use_auto_spmd_partitioning", + b"use_auto_spmd_partitioning", + "use_shardy_partitioner", + b"use_shardy_partitioner", + "use_spmd_partitioning", + b"use_spmd_partitioning", + ], + ) -> None: ... global___ExecutionOptions = ExecutionOptions @@ -1649,7 +2150,10 @@ class HloModuleConfigProto(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _FusionConfigCollectionEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[HloModuleConfigProto._FusionConfigCollection.ValueType], builtins.type): + class _FusionConfigCollectionEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[HloModuleConfigProto._FusionConfigCollection.ValueType], + builtins.type, + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor OFF: HloModuleConfigProto._FusionConfigCollection.ValueType # 0 """Do not collect configuration.""" @@ -1673,11 +2177,7 @@ class HloModuleConfigProto(google.protobuf.message.Message): VALS_FIELD_NUMBER: builtins.int @property def vals(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.bool]: ... - def __init__( - self, - *, - vals: collections.abc.Iterable[builtins.bool] | None = ..., - ) -> None: ... + def __init__(self, *, vals: collections.abc.Iterable[builtins.bool] | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["vals", b"vals"]) -> None: ... @typing.final @@ -1687,11 +2187,7 @@ class HloModuleConfigProto(google.protobuf.message.Message): VALS_FIELD_NUMBER: builtins.int @property def vals(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ... - def __init__( - self, - *, - vals: collections.abc.Iterable[builtins.int] | None = ..., - ) -> None: ... + def __init__(self, *, vals: collections.abc.Iterable[builtins.int] | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["vals", b"vals"]) -> None: ... @typing.final @@ -1700,12 +2196,10 @@ class HloModuleConfigProto(google.protobuf.message.Message): LISTS_FIELD_NUMBER: builtins.int @property - def lists(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___HloModuleConfigProto.Int64List]: ... - def __init__( + def lists( self, - *, - lists: collections.abc.Iterable[global___HloModuleConfigProto.Int64List] | None = ..., - ) -> None: ... + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___HloModuleConfigProto.Int64List]: ... + def __init__(self, *, lists: collections.abc.Iterable[global___HloModuleConfigProto.Int64List] | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["lists", b"lists"]) -> None: ... @typing.final @@ -1718,10 +2212,7 @@ class HloModuleConfigProto(google.protobuf.message.Message): @property def value(self) -> global___HloModuleConfigProto.Int64List: ... def __init__( - self, - *, - key: builtins.str | None = ..., - value: global___HloModuleConfigProto.Int64List | None = ..., + self, *, key: builtins.str | None = ..., value: global___HloModuleConfigProto.Int64List | None = ... ) -> None: ... def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... @@ -1734,12 +2225,7 @@ class HloModuleConfigProto(google.protobuf.message.Message): VALUE_FIELD_NUMBER: builtins.int key: builtins.str value: builtins.int - def __init__( - self, - *, - key: builtins.str | None = ..., - value: builtins.int | None = ..., - ) -> None: ... + def __init__(self, *, key: builtins.str | None = ..., value: builtins.int | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... ENTRY_COMPUTATION_LAYOUT_FIELD_NUMBER: builtins.int @@ -1797,11 +2283,17 @@ class HloModuleConfigProto(google.protobuf.message.Message): @property def entry_computation_layout(self) -> tensorflow.compiler.xla.xla_data_pb2.ProgramShapeProto: ... @property - def param_requires_broadcast_via_collectives(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.bool]: ... + def param_requires_broadcast_via_collectives( + self, + ) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.bool]: ... @property - def auto_spmd_partitioning_mesh_shape(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ... + def auto_spmd_partitioning_mesh_shape( + self, + ) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ... @property - def auto_spmd_partitioning_mesh_ids(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ... + def auto_spmd_partitioning_mesh_ids( + self, + ) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ... @property def debug_options(self) -> global___DebugOptions: ... @property @@ -1814,21 +2306,37 @@ class HloModuleConfigProto(google.protobuf.message.Message): """ @property - def shardable_value_update_pairs(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ShardableValueUpdatePairProto]: ... + def shardable_value_update_pairs( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ShardableValueUpdatePairProto]: ... @property - def fusion_config(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___HloModuleConfigProto.BoolList]: ... + def fusion_config( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___HloModuleConfigProto.BoolList]: ... @property - def dot_config(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, global___HloModuleConfigProto.Int64List]: ... + def dot_config( + self, + ) -> google.protobuf.internal.containers.MessageMap[builtins.str, global___HloModuleConfigProto.Int64List]: ... @property - def layout_config(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___HloModuleConfigProto.Int64ListList]: ... + def layout_config( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___HloModuleConfigProto.Int64ListList]: ... @property - def memory_space_assignment_config(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ... + def memory_space_assignment_config( + self, + ) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ... @property - def phase_ordering_config(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___HloModuleConfigProto.BoolList]: ... + def phase_ordering_config( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___HloModuleConfigProto.BoolList]: ... @property - def allow_spmd_sharding_propagation_to_parameters(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.bool]: ... + def allow_spmd_sharding_propagation_to_parameters( + self, + ) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.bool]: ... @property - def allow_spmd_sharding_propagation_to_output(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.bool]: ... + def allow_spmd_sharding_propagation_to_output( + self, + ) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.bool]: ... @property def analysis_allowance_map(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.int]: ... def __init__( @@ -1869,8 +2377,92 @@ class HloModuleConfigProto(google.protobuf.message.Message): device_memory_size: builtins.int | None = ..., use_shardy_partitioner: builtins.bool | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["debug_options", b"debug_options", "entry_computation_layout", b"entry_computation_layout", "pre_simulation_device_assignment", b"pre_simulation_device_assignment", "static_device_assignment", b"static_device_assignment"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["alias_passthrough_params", b"alias_passthrough_params", "allow_separate_sharding_programs", b"allow_separate_sharding_programs", "allow_spmd_sharding_propagation_to_output", b"allow_spmd_sharding_propagation_to_output", "allow_spmd_sharding_propagation_to_parameters", b"allow_spmd_sharding_propagation_to_parameters", "analysis_allowance_map", b"analysis_allowance_map", "auto_spmd_partitioning_mesh_ids", b"auto_spmd_partitioning_mesh_ids", "auto_spmd_partitioning_mesh_shape", b"auto_spmd_partitioning_mesh_shape", "content_aware_computation_sorting", b"content_aware_computation_sorting", "debug_options", b"debug_options", "deduplicate_hlo", b"deduplicate_hlo", "device_memory_size", b"device_memory_size", "device_type", b"device_type", "dot_config", b"dot_config", "entry_computation_layout", b"entry_computation_layout", "fdo_profile", b"fdo_profile", "fusion_config", b"fusion_config", "fusion_config_collection", b"fusion_config_collection", "intra_op_parallelism_threads", b"intra_op_parallelism_threads", "launch_id", b"launch_id", "layout_config", b"layout_config", "matrix_unit_operand_precision", b"matrix_unit_operand_precision", "memory_space_assignment_config", b"memory_space_assignment_config", "num_partitions", b"num_partitions", "param_requires_broadcast_via_collectives", b"param_requires_broadcast_via_collectives", "phase_index", b"phase_index", "phase_ordering_config", b"phase_ordering_config", "pre_simulation_device_assignment", b"pre_simulation_device_assignment", "replica_count", b"replica_count", "seed", b"seed", "shardable_value_update_pairs", b"shardable_value_update_pairs", "static_device_assignment", b"static_device_assignment", "use_auto_spmd_partitioning", b"use_auto_spmd_partitioning", "use_shardy_partitioner", b"use_shardy_partitioner", "use_spmd_partitioning", b"use_spmd_partitioning"]) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "debug_options", + b"debug_options", + "entry_computation_layout", + b"entry_computation_layout", + "pre_simulation_device_assignment", + b"pre_simulation_device_assignment", + "static_device_assignment", + b"static_device_assignment", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "alias_passthrough_params", + b"alias_passthrough_params", + "allow_separate_sharding_programs", + b"allow_separate_sharding_programs", + "allow_spmd_sharding_propagation_to_output", + b"allow_spmd_sharding_propagation_to_output", + "allow_spmd_sharding_propagation_to_parameters", + b"allow_spmd_sharding_propagation_to_parameters", + "analysis_allowance_map", + b"analysis_allowance_map", + "auto_spmd_partitioning_mesh_ids", + b"auto_spmd_partitioning_mesh_ids", + "auto_spmd_partitioning_mesh_shape", + b"auto_spmd_partitioning_mesh_shape", + "content_aware_computation_sorting", + b"content_aware_computation_sorting", + "debug_options", + b"debug_options", + "deduplicate_hlo", + b"deduplicate_hlo", + "device_memory_size", + b"device_memory_size", + "device_type", + b"device_type", + "dot_config", + b"dot_config", + "entry_computation_layout", + b"entry_computation_layout", + "fdo_profile", + b"fdo_profile", + "fusion_config", + b"fusion_config", + "fusion_config_collection", + b"fusion_config_collection", + "intra_op_parallelism_threads", + b"intra_op_parallelism_threads", + "launch_id", + b"launch_id", + "layout_config", + b"layout_config", + "matrix_unit_operand_precision", + b"matrix_unit_operand_precision", + "memory_space_assignment_config", + b"memory_space_assignment_config", + "num_partitions", + b"num_partitions", + "param_requires_broadcast_via_collectives", + b"param_requires_broadcast_via_collectives", + "phase_index", + b"phase_index", + "phase_ordering_config", + b"phase_ordering_config", + "pre_simulation_device_assignment", + b"pre_simulation_device_assignment", + "replica_count", + b"replica_count", + "seed", + b"seed", + "shardable_value_update_pairs", + b"shardable_value_update_pairs", + "static_device_assignment", + b"static_device_assignment", + "use_auto_spmd_partitioning", + b"use_auto_spmd_partitioning", + "use_shardy_partitioner", + b"use_shardy_partitioner", + "use_spmd_partitioning", + b"use_spmd_partitioning", + ], + ) -> None: ... global___HloModuleConfigProto = HloModuleConfigProto @@ -1920,7 +2512,12 @@ class ScheduleProto(google.protobuf.message.Message): start_timestamp_cycles: builtins.float | None = ..., end_timestamp_cycles: builtins.float | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["end_timestamp_cycles", b"end_timestamp_cycles", "id", b"id", "start_timestamp_cycles", b"start_timestamp_cycles"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "end_timestamp_cycles", b"end_timestamp_cycles", "id", b"id", "start_timestamp_cycles", b"start_timestamp_cycles" + ], + ) -> None: ... INSTRUCTIONS_FIELD_NUMBER: builtins.int COMPUTATION_ID_FIELD_NUMBER: builtins.int @@ -1930,7 +2527,9 @@ class ScheduleProto(google.protobuf.message.Message): """Computation id (matches the id in HloComputationProto).""" cycles_per_microsecond: builtins.int @property - def instructions(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ScheduleProto.Instruction]: ... + def instructions( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ScheduleProto.Instruction]: ... @property def hlo_module(self) -> tensorflow.compiler.xla.service.hlo_pb2.HloModuleProto: ... def __init__( @@ -1942,6 +2541,18 @@ class ScheduleProto(google.protobuf.message.Message): cycles_per_microsecond: builtins.int | None = ..., ) -> None: ... def HasField(self, field_name: typing.Literal["hlo_module", b"hlo_module"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["computation_id", b"computation_id", "cycles_per_microsecond", b"cycles_per_microsecond", "hlo_module", b"hlo_module", "instructions", b"instructions"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "computation_id", + b"computation_id", + "cycles_per_microsecond", + b"cycles_per_microsecond", + "hlo_module", + b"hlo_module", + "instructions", + b"instructions", + ], + ) -> None: ... global___ScheduleProto = ScheduleProto diff --git a/stubs/tensorflow/tensorflow/core/example/example_parser_configuration_pb2.pyi b/stubs/tensorflow/tensorflow/core/example/example_parser_configuration_pb2.pyi index 6b5c255da3b4..9ffaef3d17a0 100644 --- a/stubs/tensorflow/tensorflow/core/example/example_parser_configuration_pb2.pyi +++ b/stubs/tensorflow/tensorflow/core/example/example_parser_configuration_pb2.pyi @@ -36,7 +36,19 @@ class VarLenFeatureProto(google.protobuf.message.Message): indices_output_tensor_name: builtins.str | None = ..., shapes_output_tensor_name: builtins.str | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["dtype", b"dtype", "indices_output_tensor_name", b"indices_output_tensor_name", "shapes_output_tensor_name", b"shapes_output_tensor_name", "values_output_tensor_name", b"values_output_tensor_name"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "dtype", + b"dtype", + "indices_output_tensor_name", + b"indices_output_tensor_name", + "shapes_output_tensor_name", + b"shapes_output_tensor_name", + "values_output_tensor_name", + b"values_output_tensor_name", + ], + ) -> None: ... global___VarLenFeatureProto = VarLenFeatureProto @@ -63,7 +75,19 @@ class FixedLenFeatureProto(google.protobuf.message.Message): values_output_tensor_name: builtins.str | None = ..., ) -> None: ... def HasField(self, field_name: typing.Literal["default_value", b"default_value", "shape", b"shape"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["default_value", b"default_value", "dtype", b"dtype", "shape", b"shape", "values_output_tensor_name", b"values_output_tensor_name"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "default_value", + b"default_value", + "dtype", + b"dtype", + "shape", + b"shape", + "values_output_tensor_name", + b"values_output_tensor_name", + ], + ) -> None: ... global___FixedLenFeatureProto = FixedLenFeatureProto @@ -83,9 +107,21 @@ class FeatureConfiguration(google.protobuf.message.Message): fixed_len_feature: global___FixedLenFeatureProto | None = ..., var_len_feature: global___VarLenFeatureProto | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["config", b"config", "fixed_len_feature", b"fixed_len_feature", "var_len_feature", b"var_len_feature"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["config", b"config", "fixed_len_feature", b"fixed_len_feature", "var_len_feature", b"var_len_feature"]) -> None: ... - def WhichOneof(self, oneof_group: typing.Literal["config", b"config"]) -> typing.Literal["fixed_len_feature", "var_len_feature"] | None: ... + def HasField( + self, + field_name: typing.Literal[ + "config", b"config", "fixed_len_feature", b"fixed_len_feature", "var_len_feature", b"var_len_feature" + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "config", b"config", "fixed_len_feature", b"fixed_len_feature", "var_len_feature", b"var_len_feature" + ], + ) -> None: ... + def WhichOneof( + self, oneof_group: typing.Literal["config", b"config"] + ) -> typing.Literal["fixed_len_feature", "var_len_feature"] | None: ... global___FeatureConfiguration = FeatureConfiguration @@ -102,12 +138,7 @@ class ExampleParserConfiguration(google.protobuf.message.Message): key: builtins.str @property def value(self) -> global___FeatureConfiguration: ... - def __init__( - self, - *, - key: builtins.str | None = ..., - value: global___FeatureConfiguration | None = ..., - ) -> None: ... + def __init__(self, *, key: builtins.str | None = ..., value: global___FeatureConfiguration | None = ...) -> None: ... def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... @@ -115,9 +146,7 @@ class ExampleParserConfiguration(google.protobuf.message.Message): @property def feature_map(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, global___FeatureConfiguration]: ... def __init__( - self, - *, - feature_map: collections.abc.Mapping[builtins.str, global___FeatureConfiguration] | None = ..., + self, *, feature_map: collections.abc.Mapping[builtins.str, global___FeatureConfiguration] | None = ... ) -> None: ... def ClearField(self, field_name: typing.Literal["feature_map", b"feature_map"]) -> None: ... diff --git a/stubs/tensorflow/tensorflow/core/example/example_pb2.pyi b/stubs/tensorflow/tensorflow/core/example/example_pb2.pyi index 562a6c44cf55..63cf2c14f17a 100644 --- a/stubs/tensorflow/tensorflow/core/example/example_pb2.pyi +++ b/stubs/tensorflow/tensorflow/core/example/example_pb2.pyi @@ -95,11 +95,7 @@ class Example(google.protobuf.message.Message): FEATURES_FIELD_NUMBER: builtins.int @property def features(self) -> tensorflow.core.example.feature_pb2.Features: ... - def __init__( - self, - *, - features: tensorflow.core.example.feature_pb2.Features | None = ..., - ) -> None: ... + def __init__(self, *, features: tensorflow.core.example.feature_pb2.Features | None = ...) -> None: ... def HasField(self, field_name: typing.Literal["features", b"features"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["features", b"features"]) -> None: ... diff --git a/stubs/tensorflow/tensorflow/core/example/feature_pb2.pyi b/stubs/tensorflow/tensorflow/core/example/feature_pb2.pyi index 3fbc881cbaa2..9bec4dd165e0 100644 --- a/stubs/tensorflow/tensorflow/core/example/feature_pb2.pyi +++ b/stubs/tensorflow/tensorflow/core/example/feature_pb2.pyi @@ -77,11 +77,7 @@ class BytesList(google.protobuf.message.Message): VALUE_FIELD_NUMBER: builtins.int @property def value(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.bytes]: ... - def __init__( - self, - *, - value: collections.abc.Iterable[builtins.bytes] | None = ..., - ) -> None: ... + def __init__(self, *, value: collections.abc.Iterable[builtins.bytes] | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["value", b"value"]) -> None: ... global___BytesList = BytesList @@ -93,11 +89,7 @@ class FloatList(google.protobuf.message.Message): VALUE_FIELD_NUMBER: builtins.int @property def value(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.float]: ... - def __init__( - self, - *, - value: collections.abc.Iterable[builtins.float] | None = ..., - ) -> None: ... + def __init__(self, *, value: collections.abc.Iterable[builtins.float] | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["value", b"value"]) -> None: ... global___FloatList = FloatList @@ -109,11 +101,7 @@ class Int64List(google.protobuf.message.Message): VALUE_FIELD_NUMBER: builtins.int @property def value(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ... - def __init__( - self, - *, - value: collections.abc.Iterable[builtins.int] | None = ..., - ) -> None: ... + def __init__(self, *, value: collections.abc.Iterable[builtins.int] | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["value", b"value"]) -> None: ... global___Int64List = Int64List @@ -140,9 +128,21 @@ class Feature(google.protobuf.message.Message): float_list: global___FloatList | None = ..., int64_list: global___Int64List | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["bytes_list", b"bytes_list", "float_list", b"float_list", "int64_list", b"int64_list", "kind", b"kind"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["bytes_list", b"bytes_list", "float_list", b"float_list", "int64_list", b"int64_list", "kind", b"kind"]) -> None: ... - def WhichOneof(self, oneof_group: typing.Literal["kind", b"kind"]) -> typing.Literal["bytes_list", "float_list", "int64_list"] | None: ... + def HasField( + self, + field_name: typing.Literal[ + "bytes_list", b"bytes_list", "float_list", b"float_list", "int64_list", b"int64_list", "kind", b"kind" + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "bytes_list", b"bytes_list", "float_list", b"float_list", "int64_list", b"int64_list", "kind", b"kind" + ], + ) -> None: ... + def WhichOneof( + self, oneof_group: typing.Literal["kind", b"kind"] + ) -> typing.Literal["bytes_list", "float_list", "int64_list"] | None: ... global___Feature = Feature @@ -159,12 +159,7 @@ class Features(google.protobuf.message.Message): key: builtins.str @property def value(self) -> global___Feature: ... - def __init__( - self, - *, - key: builtins.str | None = ..., - value: global___Feature | None = ..., - ) -> None: ... + def __init__(self, *, key: builtins.str | None = ..., value: global___Feature | None = ...) -> None: ... def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... @@ -173,11 +168,7 @@ class Features(google.protobuf.message.Message): def feature(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, global___Feature]: """Map from feature name to feature.""" - def __init__( - self, - *, - feature: collections.abc.Mapping[builtins.str, global___Feature] | None = ..., - ) -> None: ... + def __init__(self, *, feature: collections.abc.Mapping[builtins.str, global___Feature] | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["feature", b"feature"]) -> None: ... global___Features = Features @@ -198,11 +189,7 @@ class FeatureList(google.protobuf.message.Message): FEATURE_FIELD_NUMBER: builtins.int @property def feature(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Feature]: ... - def __init__( - self, - *, - feature: collections.abc.Iterable[global___Feature] | None = ..., - ) -> None: ... + def __init__(self, *, feature: collections.abc.Iterable[global___Feature] | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["feature", b"feature"]) -> None: ... global___FeatureList = FeatureList @@ -220,12 +207,7 @@ class FeatureLists(google.protobuf.message.Message): key: builtins.str @property def value(self) -> global___FeatureList: ... - def __init__( - self, - *, - key: builtins.str | None = ..., - value: global___FeatureList | None = ..., - ) -> None: ... + def __init__(self, *, key: builtins.str | None = ..., value: global___FeatureList | None = ...) -> None: ... def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... @@ -234,11 +216,7 @@ class FeatureLists(google.protobuf.message.Message): def feature_list(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, global___FeatureList]: """Map from feature name to feature list.""" - def __init__( - self, - *, - feature_list: collections.abc.Mapping[builtins.str, global___FeatureList] | None = ..., - ) -> None: ... + def __init__(self, *, feature_list: collections.abc.Mapping[builtins.str, global___FeatureList] | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["feature_list", b"feature_list"]) -> None: ... global___FeatureLists = FeatureLists diff --git a/stubs/tensorflow/tensorflow/core/framework/allocation_description_pb2.pyi b/stubs/tensorflow/tensorflow/core/framework/allocation_description_pb2.pyi index 6e0f0cb22534..9f4e541f2299 100644 --- a/stubs/tensorflow/tensorflow/core/framework/allocation_description_pb2.pyi +++ b/stubs/tensorflow/tensorflow/core/framework/allocation_description_pb2.pyi @@ -43,6 +43,22 @@ class AllocationDescription(google.protobuf.message.Message): has_single_reference: builtins.bool | None = ..., ptr: builtins.int | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["allocated_bytes", b"allocated_bytes", "allocation_id", b"allocation_id", "allocator_name", b"allocator_name", "has_single_reference", b"has_single_reference", "ptr", b"ptr", "requested_bytes", b"requested_bytes"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "allocated_bytes", + b"allocated_bytes", + "allocation_id", + b"allocation_id", + "allocator_name", + b"allocator_name", + "has_single_reference", + b"has_single_reference", + "ptr", + b"ptr", + "requested_bytes", + b"requested_bytes", + ], + ) -> None: ... global___AllocationDescription = AllocationDescription diff --git a/stubs/tensorflow/tensorflow/core/framework/api_def_pb2.pyi b/stubs/tensorflow/tensorflow/core/framework/api_def_pb2.pyi index 30f81032a88d..b3d8c424234f 100644 --- a/stubs/tensorflow/tensorflow/core/framework/api_def_pb2.pyi +++ b/stubs/tensorflow/tensorflow/core/framework/api_def_pb2.pyi @@ -49,7 +49,9 @@ class ApiDef(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _VisibilityEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ApiDef._Visibility.ValueType], builtins.type): + class _VisibilityEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ApiDef._Visibility.ValueType], builtins.type + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor DEFAULT_VISIBILITY: ApiDef._Visibility.ValueType # 0 """Normally this is "VISIBLE" unless you are inheriting a @@ -117,7 +119,12 @@ class ApiDef(google.protobuf.message.Message): deprecated: builtins.bool | None = ..., deprecation_version: builtins.int | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["deprecated", b"deprecated", "deprecation_version", b"deprecation_version", "name", b"name"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "deprecated", b"deprecated", "deprecation_version", b"deprecation_version", "name", b"name" + ], + ) -> None: ... @typing.final class Arg(google.protobuf.message.Message): @@ -138,13 +145,11 @@ class ApiDef(google.protobuf.message.Message): them entirely) as can be done with op descriptions. """ def __init__( - self, - *, - name: builtins.str | None = ..., - rename_to: builtins.str | None = ..., - description: builtins.str | None = ..., + self, *, name: builtins.str | None = ..., rename_to: builtins.str | None = ..., description: builtins.str | None = ... + ) -> None: ... + def ClearField( + self, field_name: typing.Literal["description", b"description", "name", b"name", "rename_to", b"rename_to"] ) -> None: ... - def ClearField(self, field_name: typing.Literal["description", b"description", "name", b"name", "rename_to", b"rename_to"]) -> None: ... @typing.final class Attr(google.protobuf.message.Message): @@ -186,7 +191,12 @@ class ApiDef(google.protobuf.message.Message): description: builtins.str | None = ..., ) -> None: ... def HasField(self, field_name: typing.Literal["default_value", b"default_value"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["default_value", b"default_value", "description", b"description", "name", b"name", "rename_to", b"rename_to"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "default_value", b"default_value", "description", b"description", "name", b"name", "rename_to", b"rename_to" + ], + ) -> None: ... GRAPH_OP_NAME_FIELD_NUMBER: builtins.int DEPRECATION_MESSAGE_FIELD_NUMBER: builtins.int @@ -255,7 +265,37 @@ class ApiDef(google.protobuf.message.Message): description_prefix: builtins.str | None = ..., description_suffix: builtins.str | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["arg_order", b"arg_order", "attr", b"attr", "deprecation_message", b"deprecation_message", "deprecation_version", b"deprecation_version", "description", b"description", "description_prefix", b"description_prefix", "description_suffix", b"description_suffix", "endpoint", b"endpoint", "graph_op_name", b"graph_op_name", "in_arg", b"in_arg", "out_arg", b"out_arg", "summary", b"summary", "visibility", b"visibility"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "arg_order", + b"arg_order", + "attr", + b"attr", + "deprecation_message", + b"deprecation_message", + "deprecation_version", + b"deprecation_version", + "description", + b"description", + "description_prefix", + b"description_prefix", + "description_suffix", + b"description_suffix", + "endpoint", + b"endpoint", + "graph_op_name", + b"graph_op_name", + "in_arg", + b"in_arg", + "out_arg", + b"out_arg", + "summary", + b"summary", + "visibility", + b"visibility", + ], + ) -> None: ... global___ApiDef = ApiDef @@ -266,11 +306,7 @@ class ApiDefs(google.protobuf.message.Message): OP_FIELD_NUMBER: builtins.int @property def op(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ApiDef]: ... - def __init__( - self, - *, - op: collections.abc.Iterable[global___ApiDef] | None = ..., - ) -> None: ... + def __init__(self, *, op: collections.abc.Iterable[global___ApiDef] | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["op", b"op"]) -> None: ... global___ApiDefs = ApiDefs diff --git a/stubs/tensorflow/tensorflow/core/framework/attr_value_pb2.pyi b/stubs/tensorflow/tensorflow/core/framework/attr_value_pb2.pyi index 80bdde459e0f..bc36030c511d 100644 --- a/stubs/tensorflow/tensorflow/core/framework/attr_value_pb2.pyi +++ b/stubs/tensorflow/tensorflow/core/framework/attr_value_pb2.pyi @@ -41,35 +41,47 @@ class AttrValue(google.protobuf.message.Message): FUNC_FIELD_NUMBER: builtins.int @property def s(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.bytes]: - """"list(string)" """ + """ "list(string)" """ @property def i(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: - """"list(int)" """ + """ "list(int)" """ @property def f(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.float]: - """"list(float)" """ + """ "list(float)" """ @property def b(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.bool]: - """"list(bool)" """ + """ "list(bool)" """ @property - def type(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[tensorflow.core.framework.types_pb2.DataType.ValueType]: - """"list(type)" """ + def type( + self, + ) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[ + tensorflow.core.framework.types_pb2.DataType.ValueType + ]: + """ "list(type)" """ @property - def shape(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[tensorflow.core.framework.tensor_shape_pb2.TensorShapeProto]: - """"list(shape)" """ + def shape( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + tensorflow.core.framework.tensor_shape_pb2.TensorShapeProto + ]: + """ "list(shape)" """ @property - def tensor(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[tensorflow.core.framework.tensor_pb2.TensorProto]: - """"list(tensor)" """ + def tensor( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + tensorflow.core.framework.tensor_pb2.TensorProto + ]: + """ "list(tensor)" """ @property def func(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___NameAttrList]: - """"list(attr)" """ + """ "list(attr)" """ def __init__( self, @@ -83,7 +95,27 @@ class AttrValue(google.protobuf.message.Message): tensor: collections.abc.Iterable[tensorflow.core.framework.tensor_pb2.TensorProto] | None = ..., func: collections.abc.Iterable[global___NameAttrList] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["b", b"b", "f", b"f", "func", b"func", "i", b"i", "s", b"s", "shape", b"shape", "tensor", b"tensor", "type", b"type"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "b", + b"b", + "f", + b"f", + "func", + b"func", + "i", + b"i", + "s", + b"s", + "shape", + b"shape", + "tensor", + b"tensor", + "type", + b"type", + ], + ) -> None: ... S_FIELD_NUMBER: builtins.int I_FIELD_NUMBER: builtins.int @@ -116,11 +148,11 @@ class AttrValue(google.protobuf.message.Message): """ @property def shape(self) -> tensorflow.core.framework.tensor_shape_pb2.TensorShapeProto: - """"shape" """ + """ "shape" """ @property def tensor(self) -> tensorflow.core.framework.tensor_pb2.TensorProto: - """"tensor" """ + """ "tensor" """ @property def list(self) -> global___AttrValue.ListValue: @@ -128,7 +160,7 @@ class AttrValue(google.protobuf.message.Message): @property def func(self) -> global___NameAttrList: - """"func" represents a function. func.name is a function's name or + """ "func" represents a function. func.name is a function's name or a primitive op's name. func.attr.first is the name of an attr defined for that function. func.attr.second is the value for that attr in the instantiation. @@ -148,9 +180,63 @@ class AttrValue(google.protobuf.message.Message): func: global___NameAttrList | None = ..., placeholder: builtins.str | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["b", b"b", "f", b"f", "func", b"func", "i", b"i", "list", b"list", "placeholder", b"placeholder", "s", b"s", "shape", b"shape", "tensor", b"tensor", "type", b"type", "value", b"value"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["b", b"b", "f", b"f", "func", b"func", "i", b"i", "list", b"list", "placeholder", b"placeholder", "s", b"s", "shape", b"shape", "tensor", b"tensor", "type", b"type", "value", b"value"]) -> None: ... - def WhichOneof(self, oneof_group: typing.Literal["value", b"value"]) -> typing.Literal["s", "i", "f", "b", "type", "shape", "tensor", "list", "func", "placeholder"] | None: ... + def HasField( + self, + field_name: typing.Literal[ + "b", + b"b", + "f", + b"f", + "func", + b"func", + "i", + b"i", + "list", + b"list", + "placeholder", + b"placeholder", + "s", + b"s", + "shape", + b"shape", + "tensor", + b"tensor", + "type", + b"type", + "value", + b"value", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "b", + b"b", + "f", + b"f", + "func", + b"func", + "i", + b"i", + "list", + b"list", + "placeholder", + b"placeholder", + "s", + b"s", + "shape", + b"shape", + "tensor", + b"tensor", + "type", + b"type", + "value", + b"value", + ], + ) -> None: ... + def WhichOneof( + self, oneof_group: typing.Literal["value", b"value"] + ) -> typing.Literal["s", "i", "f", "b", "type", "shape", "tensor", "list", "func", "placeholder"] | None: ... global___AttrValue = AttrValue @@ -171,12 +257,7 @@ class NameAttrList(google.protobuf.message.Message): key: builtins.str @property def value(self) -> global___AttrValue: ... - def __init__( - self, - *, - key: builtins.str | None = ..., - value: global___AttrValue | None = ..., - ) -> None: ... + def __init__(self, *, key: builtins.str | None = ..., value: global___AttrValue | None = ...) -> None: ... def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... @@ -186,10 +267,7 @@ class NameAttrList(google.protobuf.message.Message): @property def attr(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, global___AttrValue]: ... def __init__( - self, - *, - name: builtins.str | None = ..., - attr: collections.abc.Mapping[builtins.str, global___AttrValue] | None = ..., + self, *, name: builtins.str | None = ..., attr: collections.abc.Mapping[builtins.str, global___AttrValue] | None = ... ) -> None: ... def ClearField(self, field_name: typing.Literal["attr", b"attr", "name", b"name"]) -> None: ... diff --git a/stubs/tensorflow/tensorflow/core/framework/cost_graph_pb2.pyi b/stubs/tensorflow/tensorflow/core/framework/cost_graph_pb2.pyi index d54c0196320a..3ba7d01628bf 100644 --- a/stubs/tensorflow/tensorflow/core/framework/cost_graph_pb2.pyi +++ b/stubs/tensorflow/tensorflow/core/framework/cost_graph_pb2.pyi @@ -37,12 +37,11 @@ class CostGraphDef(google.protobuf.message.Message): preceding_node: builtins.int preceding_port: builtins.int def __init__( - self, - *, - preceding_node: builtins.int | None = ..., - preceding_port: builtins.int | None = ..., + self, *, preceding_node: builtins.int | None = ..., preceding_port: builtins.int | None = ... + ) -> None: ... + def ClearField( + self, field_name: typing.Literal["preceding_node", b"preceding_node", "preceding_port", b"preceding_port"] ) -> None: ... - def ClearField(self, field_name: typing.Literal["preceding_node", b"preceding_node", "preceding_port", b"preceding_port"]) -> None: ... @typing.final class OutputInfo(google.protobuf.message.Message): @@ -72,7 +71,12 @@ class CostGraphDef(google.protobuf.message.Message): dtype: tensorflow.core.framework.types_pb2.DataType.ValueType | None = ..., ) -> None: ... def HasField(self, field_name: typing.Literal["shape", b"shape"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["alias_input_port", b"alias_input_port", "dtype", b"dtype", "shape", b"shape", "size", b"size"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "alias_input_port", b"alias_input_port", "dtype", b"dtype", "shape", b"shape", "size", b"size" + ], + ) -> None: ... NAME_FIELD_NUMBER: builtins.int DEVICE_FIELD_NUMBER: builtins.int @@ -122,9 +126,13 @@ class CostGraphDef(google.protobuf.message.Message): inaccurate: builtins.bool """Are the costs inaccurate?""" @property - def input_info(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___CostGraphDef.Node.InputInfo]: ... + def input_info( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___CostGraphDef.Node.InputInfo]: ... @property - def output_info(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___CostGraphDef.Node.OutputInfo]: ... + def output_info( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___CostGraphDef.Node.OutputInfo]: ... @property def control_input(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: """Ids of the control inputs for this node.""" @@ -149,7 +157,43 @@ class CostGraphDef(google.protobuf.message.Message): control_input: collections.abc.Iterable[builtins.int] | None = ..., inaccurate: builtins.bool | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["compute_cost", b"compute_cost", "compute_time", b"compute_time", "control_input", b"control_input", "device", b"device", "device_persistent_memory_size", b"device_persistent_memory_size", "device_temp_memory_size", b"device_temp_memory_size", "host_temp_memory_size", b"host_temp_memory_size", "id", b"id", "inaccurate", b"inaccurate", "input_info", b"input_info", "is_final", b"is_final", "memory_time", b"memory_time", "name", b"name", "output_info", b"output_info", "persistent_memory_size", b"persistent_memory_size", "temporary_memory_size", b"temporary_memory_size"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "compute_cost", + b"compute_cost", + "compute_time", + b"compute_time", + "control_input", + b"control_input", + "device", + b"device", + "device_persistent_memory_size", + b"device_persistent_memory_size", + "device_temp_memory_size", + b"device_temp_memory_size", + "host_temp_memory_size", + b"host_temp_memory_size", + "id", + b"id", + "inaccurate", + b"inaccurate", + "input_info", + b"input_info", + "is_final", + b"is_final", + "memory_time", + b"memory_time", + "name", + b"name", + "output_info", + b"output_info", + "persistent_memory_size", + b"persistent_memory_size", + "temporary_memory_size", + b"temporary_memory_size", + ], + ) -> None: ... @typing.final class AggregatedCost(google.protobuf.message.Message): @@ -163,12 +207,7 @@ class CostGraphDef(google.protobuf.message.Message): """Aggregated cost value.""" dimension: builtins.str """Aggregated cost dimension (e.g. 'memory', 'compute', 'network').""" - def __init__( - self, - *, - cost: builtins.float | None = ..., - dimension: builtins.str | None = ..., - ) -> None: ... + def __init__(self, *, cost: builtins.float | None = ..., dimension: builtins.str | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["cost", b"cost", "dimension", b"dimension"]) -> None: ... NODE_FIELD_NUMBER: builtins.int @@ -176,7 +215,9 @@ class CostGraphDef(google.protobuf.message.Message): @property def node(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___CostGraphDef.Node]: ... @property - def cost(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___CostGraphDef.AggregatedCost]: ... + def cost( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___CostGraphDef.AggregatedCost]: ... def __init__( self, *, diff --git a/stubs/tensorflow/tensorflow/core/framework/cpp_shape_inference_pb2.pyi b/stubs/tensorflow/tensorflow/core/framework/cpp_shape_inference_pb2.pyi index 070c7c3cdb63..1bf1c41641cc 100644 --- a/stubs/tensorflow/tensorflow/core/framework/cpp_shape_inference_pb2.pyi +++ b/stubs/tensorflow/tensorflow/core/framework/cpp_shape_inference_pb2.pyi @@ -50,7 +50,11 @@ class CppShapeInferenceResult(google.protobuf.message.Message): SHAPE_AND_TYPE_FIELD_NUMBER: builtins.int is_set: builtins.bool @property - def shape_and_type(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___CppShapeInferenceResult.HandleShapeAndType]: + def shape_and_type( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + global___CppShapeInferenceResult.HandleShapeAndType + ]: """Only valid if .""" def __init__( @@ -87,13 +91,20 @@ class CppShapeInferenceInputsNeeded(google.protobuf.message.Message): @property def input_tensors_needed(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ... @property - def input_tensors_as_shapes_needed(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ... + def input_tensors_as_shapes_needed( + self, + ) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ... def __init__( self, *, input_tensors_needed: collections.abc.Iterable[builtins.int] | None = ..., input_tensors_as_shapes_needed: collections.abc.Iterable[builtins.int] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["input_tensors_as_shapes_needed", b"input_tensors_as_shapes_needed", "input_tensors_needed", b"input_tensors_needed"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "input_tensors_as_shapes_needed", b"input_tensors_as_shapes_needed", "input_tensors_needed", b"input_tensors_needed" + ], + ) -> None: ... global___CppShapeInferenceInputsNeeded = CppShapeInferenceInputsNeeded diff --git a/stubs/tensorflow/tensorflow/core/framework/dataset_metadata_pb2.pyi b/stubs/tensorflow/tensorflow/core/framework/dataset_metadata_pb2.pyi index a1c7b19143c5..afde78f967f6 100644 --- a/stubs/tensorflow/tensorflow/core/framework/dataset_metadata_pb2.pyi +++ b/stubs/tensorflow/tensorflow/core/framework/dataset_metadata_pb2.pyi @@ -19,11 +19,7 @@ class Metadata(google.protobuf.message.Message): NAME_FIELD_NUMBER: builtins.int name: builtins.bytes - def __init__( - self, - *, - name: builtins.bytes | None = ..., - ) -> None: ... + def __init__(self, *, name: builtins.bytes | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["name", b"name"]) -> None: ... global___Metadata = Metadata diff --git a/stubs/tensorflow/tensorflow/core/framework/dataset_options_pb2.pyi b/stubs/tensorflow/tensorflow/core/framework/dataset_options_pb2.pyi index 5837446a1e77..724249f3c540 100644 --- a/stubs/tensorflow/tensorflow/core/framework/dataset_options_pb2.pyi +++ b/stubs/tensorflow/tensorflow/core/framework/dataset_options_pb2.pyi @@ -25,7 +25,9 @@ class _AutoShardPolicy: ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType -class _AutoShardPolicyEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_AutoShardPolicy.ValueType], builtins.type): +class _AutoShardPolicyEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_AutoShardPolicy.ValueType], builtins.type +): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor AUTO: _AutoShardPolicy.ValueType # 0 """AUTO: Attempts FILE-based sharding, falling back to DATA-based sharding.""" @@ -77,7 +79,9 @@ class _ExternalStatePolicy: ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType -class _ExternalStatePolicyEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_ExternalStatePolicy.ValueType], builtins.type): +class _ExternalStatePolicyEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_ExternalStatePolicy.ValueType], builtins.type +): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor POLICY_WARN: _ExternalStatePolicy.ValueType # 0 POLICY_IGNORE: _ExternalStatePolicy.ValueType # 1 @@ -116,18 +120,76 @@ class AutotuneOptions(google.protobuf.message.Message): autotune_algorithm: tensorflow.core.framework.model_pb2.AutotuneAlgorithm.ValueType | None = ..., initial_parallelism: builtins.int | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["autotune_algorithm", b"autotune_algorithm", "cpu_budget", b"cpu_budget", "enabled", b"enabled", "initial_parallelism", b"initial_parallelism", "optional_autotune_algorithm", b"optional_autotune_algorithm", "optional_cpu_budget", b"optional_cpu_budget", "optional_enabled", b"optional_enabled", "optional_initial_parallelism", b"optional_initial_parallelism", "optional_ram_budget", b"optional_ram_budget", "ram_budget", b"ram_budget"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["autotune_algorithm", b"autotune_algorithm", "cpu_budget", b"cpu_budget", "enabled", b"enabled", "initial_parallelism", b"initial_parallelism", "optional_autotune_algorithm", b"optional_autotune_algorithm", "optional_cpu_budget", b"optional_cpu_budget", "optional_enabled", b"optional_enabled", "optional_initial_parallelism", b"optional_initial_parallelism", "optional_ram_budget", b"optional_ram_budget", "ram_budget", b"ram_budget"]) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "autotune_algorithm", + b"autotune_algorithm", + "cpu_budget", + b"cpu_budget", + "enabled", + b"enabled", + "initial_parallelism", + b"initial_parallelism", + "optional_autotune_algorithm", + b"optional_autotune_algorithm", + "optional_cpu_budget", + b"optional_cpu_budget", + "optional_enabled", + b"optional_enabled", + "optional_initial_parallelism", + b"optional_initial_parallelism", + "optional_ram_budget", + b"optional_ram_budget", + "ram_budget", + b"ram_budget", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "autotune_algorithm", + b"autotune_algorithm", + "cpu_budget", + b"cpu_budget", + "enabled", + b"enabled", + "initial_parallelism", + b"initial_parallelism", + "optional_autotune_algorithm", + b"optional_autotune_algorithm", + "optional_cpu_budget", + b"optional_cpu_budget", + "optional_enabled", + b"optional_enabled", + "optional_initial_parallelism", + b"optional_initial_parallelism", + "optional_ram_budget", + b"optional_ram_budget", + "ram_budget", + b"ram_budget", + ], + ) -> None: ... @typing.overload - def WhichOneof(self, oneof_group: typing.Literal["optional_autotune_algorithm", b"optional_autotune_algorithm"]) -> typing.Literal["autotune_algorithm"] | None: ... + def WhichOneof( + self, oneof_group: typing.Literal["optional_autotune_algorithm", b"optional_autotune_algorithm"] + ) -> typing.Literal["autotune_algorithm"] | None: ... @typing.overload - def WhichOneof(self, oneof_group: typing.Literal["optional_cpu_budget", b"optional_cpu_budget"]) -> typing.Literal["cpu_budget"] | None: ... + def WhichOneof( + self, oneof_group: typing.Literal["optional_cpu_budget", b"optional_cpu_budget"] + ) -> typing.Literal["cpu_budget"] | None: ... @typing.overload - def WhichOneof(self, oneof_group: typing.Literal["optional_enabled", b"optional_enabled"]) -> typing.Literal["enabled"] | None: ... + def WhichOneof( + self, oneof_group: typing.Literal["optional_enabled", b"optional_enabled"] + ) -> typing.Literal["enabled"] | None: ... @typing.overload - def WhichOneof(self, oneof_group: typing.Literal["optional_initial_parallelism", b"optional_initial_parallelism"]) -> typing.Literal["initial_parallelism"] | None: ... + def WhichOneof( + self, oneof_group: typing.Literal["optional_initial_parallelism", b"optional_initial_parallelism"] + ) -> typing.Literal["initial_parallelism"] | None: ... @typing.overload - def WhichOneof(self, oneof_group: typing.Literal["optional_ram_budget", b"optional_ram_budget"]) -> typing.Literal["ram_budget"] | None: ... + def WhichOneof( + self, oneof_group: typing.Literal["optional_ram_budget", b"optional_ram_budget"] + ) -> typing.Literal["ram_budget"] | None: ... global___AutotuneOptions = AutotuneOptions @@ -141,7 +203,9 @@ class CardinalityOptions(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _ComputeLevelEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[CardinalityOptions._ComputeLevel.ValueType], builtins.type): + class _ComputeLevelEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[CardinalityOptions._ComputeLevel.ValueType], builtins.type + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor CARDINALITY_COMPUTE_UNSPECIFIED: CardinalityOptions._ComputeLevel.ValueType # 0 CARDINALITY_COMPUTE_LOW: CardinalityOptions._ComputeLevel.ValueType # 1 @@ -172,11 +236,7 @@ class CardinalityOptions(google.protobuf.message.Message): COMPUTE_LEVEL_FIELD_NUMBER: builtins.int compute_level: global___CardinalityOptions.ComputeLevel.ValueType - def __init__( - self, - *, - compute_level: global___CardinalityOptions.ComputeLevel.ValueType | None = ..., - ) -> None: ... + def __init__(self, *, compute_level: global___CardinalityOptions.ComputeLevel.ValueType | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["compute_level", b"compute_level"]) -> None: ... global___CardinalityOptions = CardinalityOptions @@ -192,14 +252,25 @@ class DistributeOptions(google.protobuf.message.Message): auto_shard_policy: global___AutoShardPolicy.ValueType num_devices: builtins.int def __init__( + self, *, auto_shard_policy: global___AutoShardPolicy.ValueType | None = ..., num_devices: builtins.int | None = ... + ) -> None: ... + def HasField( + self, field_name: typing.Literal["num_devices", b"num_devices", "optional_num_devices", b"optional_num_devices"] + ) -> builtins.bool: ... + def ClearField( self, - *, - auto_shard_policy: global___AutoShardPolicy.ValueType | None = ..., - num_devices: builtins.int | None = ..., + field_name: typing.Literal[ + "auto_shard_policy", + b"auto_shard_policy", + "num_devices", + b"num_devices", + "optional_num_devices", + b"optional_num_devices", + ], ) -> None: ... - def HasField(self, field_name: typing.Literal["num_devices", b"num_devices", "optional_num_devices", b"optional_num_devices"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["auto_shard_policy", b"auto_shard_policy", "num_devices", b"num_devices", "optional_num_devices", b"optional_num_devices"]) -> None: ... - def WhichOneof(self, oneof_group: typing.Literal["optional_num_devices", b"optional_num_devices"]) -> typing.Literal["num_devices"] | None: ... + def WhichOneof( + self, oneof_group: typing.Literal["optional_num_devices", b"optional_num_devices"] + ) -> typing.Literal["num_devices"] | None: ... global___DistributeOptions = DistributeOptions @@ -249,32 +320,160 @@ class OptimizationOptions(google.protobuf.message.Message): inject_prefetch: builtins.bool | None = ..., seq_interleave_prefetch: builtins.bool | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["apply_default_optimizations", b"apply_default_optimizations", "filter_fusion", b"filter_fusion", "filter_parallelization", b"filter_parallelization", "inject_prefetch", b"inject_prefetch", "map_and_batch_fusion", b"map_and_batch_fusion", "map_and_filter_fusion", b"map_and_filter_fusion", "map_fusion", b"map_fusion", "map_parallelization", b"map_parallelization", "noop_elimination", b"noop_elimination", "optional_apply_default_optimizations", b"optional_apply_default_optimizations", "optional_filter_fusion", b"optional_filter_fusion", "optional_filter_parallelization", b"optional_filter_parallelization", "optional_inject_prefetch", b"optional_inject_prefetch", "optional_map_and_batch_fusion", b"optional_map_and_batch_fusion", "optional_map_and_filter_fusion", b"optional_map_and_filter_fusion", "optional_map_fusion", b"optional_map_fusion", "optional_map_parallelization", b"optional_map_parallelization", "optional_noop_elimination", b"optional_noop_elimination", "optional_parallel_batch", b"optional_parallel_batch", "optional_seq_interleave_prefetch", b"optional_seq_interleave_prefetch", "optional_shuffle_and_repeat_fusion", b"optional_shuffle_and_repeat_fusion", "parallel_batch", b"parallel_batch", "seq_interleave_prefetch", b"seq_interleave_prefetch", "shuffle_and_repeat_fusion", b"shuffle_and_repeat_fusion"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["apply_default_optimizations", b"apply_default_optimizations", "filter_fusion", b"filter_fusion", "filter_parallelization", b"filter_parallelization", "inject_prefetch", b"inject_prefetch", "map_and_batch_fusion", b"map_and_batch_fusion", "map_and_filter_fusion", b"map_and_filter_fusion", "map_fusion", b"map_fusion", "map_parallelization", b"map_parallelization", "noop_elimination", b"noop_elimination", "optional_apply_default_optimizations", b"optional_apply_default_optimizations", "optional_filter_fusion", b"optional_filter_fusion", "optional_filter_parallelization", b"optional_filter_parallelization", "optional_inject_prefetch", b"optional_inject_prefetch", "optional_map_and_batch_fusion", b"optional_map_and_batch_fusion", "optional_map_and_filter_fusion", b"optional_map_and_filter_fusion", "optional_map_fusion", b"optional_map_fusion", "optional_map_parallelization", b"optional_map_parallelization", "optional_noop_elimination", b"optional_noop_elimination", "optional_parallel_batch", b"optional_parallel_batch", "optional_seq_interleave_prefetch", b"optional_seq_interleave_prefetch", "optional_shuffle_and_repeat_fusion", b"optional_shuffle_and_repeat_fusion", "parallel_batch", b"parallel_batch", "seq_interleave_prefetch", b"seq_interleave_prefetch", "shuffle_and_repeat_fusion", b"shuffle_and_repeat_fusion"]) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "apply_default_optimizations", + b"apply_default_optimizations", + "filter_fusion", + b"filter_fusion", + "filter_parallelization", + b"filter_parallelization", + "inject_prefetch", + b"inject_prefetch", + "map_and_batch_fusion", + b"map_and_batch_fusion", + "map_and_filter_fusion", + b"map_and_filter_fusion", + "map_fusion", + b"map_fusion", + "map_parallelization", + b"map_parallelization", + "noop_elimination", + b"noop_elimination", + "optional_apply_default_optimizations", + b"optional_apply_default_optimizations", + "optional_filter_fusion", + b"optional_filter_fusion", + "optional_filter_parallelization", + b"optional_filter_parallelization", + "optional_inject_prefetch", + b"optional_inject_prefetch", + "optional_map_and_batch_fusion", + b"optional_map_and_batch_fusion", + "optional_map_and_filter_fusion", + b"optional_map_and_filter_fusion", + "optional_map_fusion", + b"optional_map_fusion", + "optional_map_parallelization", + b"optional_map_parallelization", + "optional_noop_elimination", + b"optional_noop_elimination", + "optional_parallel_batch", + b"optional_parallel_batch", + "optional_seq_interleave_prefetch", + b"optional_seq_interleave_prefetch", + "optional_shuffle_and_repeat_fusion", + b"optional_shuffle_and_repeat_fusion", + "parallel_batch", + b"parallel_batch", + "seq_interleave_prefetch", + b"seq_interleave_prefetch", + "shuffle_and_repeat_fusion", + b"shuffle_and_repeat_fusion", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "apply_default_optimizations", + b"apply_default_optimizations", + "filter_fusion", + b"filter_fusion", + "filter_parallelization", + b"filter_parallelization", + "inject_prefetch", + b"inject_prefetch", + "map_and_batch_fusion", + b"map_and_batch_fusion", + "map_and_filter_fusion", + b"map_and_filter_fusion", + "map_fusion", + b"map_fusion", + "map_parallelization", + b"map_parallelization", + "noop_elimination", + b"noop_elimination", + "optional_apply_default_optimizations", + b"optional_apply_default_optimizations", + "optional_filter_fusion", + b"optional_filter_fusion", + "optional_filter_parallelization", + b"optional_filter_parallelization", + "optional_inject_prefetch", + b"optional_inject_prefetch", + "optional_map_and_batch_fusion", + b"optional_map_and_batch_fusion", + "optional_map_and_filter_fusion", + b"optional_map_and_filter_fusion", + "optional_map_fusion", + b"optional_map_fusion", + "optional_map_parallelization", + b"optional_map_parallelization", + "optional_noop_elimination", + b"optional_noop_elimination", + "optional_parallel_batch", + b"optional_parallel_batch", + "optional_seq_interleave_prefetch", + b"optional_seq_interleave_prefetch", + "optional_shuffle_and_repeat_fusion", + b"optional_shuffle_and_repeat_fusion", + "parallel_batch", + b"parallel_batch", + "seq_interleave_prefetch", + b"seq_interleave_prefetch", + "shuffle_and_repeat_fusion", + b"shuffle_and_repeat_fusion", + ], + ) -> None: ... @typing.overload - def WhichOneof(self, oneof_group: typing.Literal["optional_apply_default_optimizations", b"optional_apply_default_optimizations"]) -> typing.Literal["apply_default_optimizations"] | None: ... + def WhichOneof( + self, oneof_group: typing.Literal["optional_apply_default_optimizations", b"optional_apply_default_optimizations"] + ) -> typing.Literal["apply_default_optimizations"] | None: ... @typing.overload - def WhichOneof(self, oneof_group: typing.Literal["optional_filter_fusion", b"optional_filter_fusion"]) -> typing.Literal["filter_fusion"] | None: ... + def WhichOneof( + self, oneof_group: typing.Literal["optional_filter_fusion", b"optional_filter_fusion"] + ) -> typing.Literal["filter_fusion"] | None: ... @typing.overload - def WhichOneof(self, oneof_group: typing.Literal["optional_filter_parallelization", b"optional_filter_parallelization"]) -> typing.Literal["filter_parallelization"] | None: ... + def WhichOneof( + self, oneof_group: typing.Literal["optional_filter_parallelization", b"optional_filter_parallelization"] + ) -> typing.Literal["filter_parallelization"] | None: ... @typing.overload - def WhichOneof(self, oneof_group: typing.Literal["optional_inject_prefetch", b"optional_inject_prefetch"]) -> typing.Literal["inject_prefetch"] | None: ... + def WhichOneof( + self, oneof_group: typing.Literal["optional_inject_prefetch", b"optional_inject_prefetch"] + ) -> typing.Literal["inject_prefetch"] | None: ... @typing.overload - def WhichOneof(self, oneof_group: typing.Literal["optional_map_and_batch_fusion", b"optional_map_and_batch_fusion"]) -> typing.Literal["map_and_batch_fusion"] | None: ... + def WhichOneof( + self, oneof_group: typing.Literal["optional_map_and_batch_fusion", b"optional_map_and_batch_fusion"] + ) -> typing.Literal["map_and_batch_fusion"] | None: ... @typing.overload - def WhichOneof(self, oneof_group: typing.Literal["optional_map_and_filter_fusion", b"optional_map_and_filter_fusion"]) -> typing.Literal["map_and_filter_fusion"] | None: ... + def WhichOneof( + self, oneof_group: typing.Literal["optional_map_and_filter_fusion", b"optional_map_and_filter_fusion"] + ) -> typing.Literal["map_and_filter_fusion"] | None: ... @typing.overload - def WhichOneof(self, oneof_group: typing.Literal["optional_map_fusion", b"optional_map_fusion"]) -> typing.Literal["map_fusion"] | None: ... + def WhichOneof( + self, oneof_group: typing.Literal["optional_map_fusion", b"optional_map_fusion"] + ) -> typing.Literal["map_fusion"] | None: ... @typing.overload - def WhichOneof(self, oneof_group: typing.Literal["optional_map_parallelization", b"optional_map_parallelization"]) -> typing.Literal["map_parallelization"] | None: ... + def WhichOneof( + self, oneof_group: typing.Literal["optional_map_parallelization", b"optional_map_parallelization"] + ) -> typing.Literal["map_parallelization"] | None: ... @typing.overload - def WhichOneof(self, oneof_group: typing.Literal["optional_noop_elimination", b"optional_noop_elimination"]) -> typing.Literal["noop_elimination"] | None: ... + def WhichOneof( + self, oneof_group: typing.Literal["optional_noop_elimination", b"optional_noop_elimination"] + ) -> typing.Literal["noop_elimination"] | None: ... @typing.overload - def WhichOneof(self, oneof_group: typing.Literal["optional_parallel_batch", b"optional_parallel_batch"]) -> typing.Literal["parallel_batch"] | None: ... + def WhichOneof( + self, oneof_group: typing.Literal["optional_parallel_batch", b"optional_parallel_batch"] + ) -> typing.Literal["parallel_batch"] | None: ... @typing.overload - def WhichOneof(self, oneof_group: typing.Literal["optional_seq_interleave_prefetch", b"optional_seq_interleave_prefetch"]) -> typing.Literal["seq_interleave_prefetch"] | None: ... + def WhichOneof( + self, oneof_group: typing.Literal["optional_seq_interleave_prefetch", b"optional_seq_interleave_prefetch"] + ) -> typing.Literal["seq_interleave_prefetch"] | None: ... @typing.overload - def WhichOneof(self, oneof_group: typing.Literal["optional_shuffle_and_repeat_fusion", b"optional_shuffle_and_repeat_fusion"]) -> typing.Literal["shuffle_and_repeat_fusion"] | None: ... + def WhichOneof( + self, oneof_group: typing.Literal["optional_shuffle_and_repeat_fusion", b"optional_shuffle_and_repeat_fusion"] + ) -> typing.Literal["shuffle_and_repeat_fusion"] | None: ... global___OptimizationOptions = OptimizationOptions @@ -286,14 +485,14 @@ class ServiceOptions(google.protobuf.message.Message): PINNED_FIELD_NUMBER: builtins.int pinned: builtins.bool - def __init__( - self, - *, - pinned: builtins.bool | None = ..., - ) -> None: ... - def HasField(self, field_name: typing.Literal["optional_pinned", b"optional_pinned", "pinned", b"pinned"]) -> builtins.bool: ... + def __init__(self, *, pinned: builtins.bool | None = ...) -> None: ... + def HasField( + self, field_name: typing.Literal["optional_pinned", b"optional_pinned", "pinned", b"pinned"] + ) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["optional_pinned", b"optional_pinned", "pinned", b"pinned"]) -> None: ... - def WhichOneof(self, oneof_group: typing.Literal["optional_pinned", b"optional_pinned"]) -> typing.Literal["pinned"] | None: ... + def WhichOneof( + self, oneof_group: typing.Literal["optional_pinned", b"optional_pinned"] + ) -> typing.Literal["pinned"] | None: ... global___ServiceOptions = ServiceOptions @@ -308,17 +507,42 @@ class ThreadingOptions(google.protobuf.message.Message): max_intra_op_parallelism: builtins.int private_threadpool_size: builtins.int def __init__( + self, *, max_intra_op_parallelism: builtins.int | None = ..., private_threadpool_size: builtins.int | None = ... + ) -> None: ... + def HasField( self, - *, - max_intra_op_parallelism: builtins.int | None = ..., - private_threadpool_size: builtins.int | None = ..., + field_name: typing.Literal[ + "max_intra_op_parallelism", + b"max_intra_op_parallelism", + "optional_max_intra_op_parallelism", + b"optional_max_intra_op_parallelism", + "optional_private_threadpool_size", + b"optional_private_threadpool_size", + "private_threadpool_size", + b"private_threadpool_size", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "max_intra_op_parallelism", + b"max_intra_op_parallelism", + "optional_max_intra_op_parallelism", + b"optional_max_intra_op_parallelism", + "optional_private_threadpool_size", + b"optional_private_threadpool_size", + "private_threadpool_size", + b"private_threadpool_size", + ], ) -> None: ... - def HasField(self, field_name: typing.Literal["max_intra_op_parallelism", b"max_intra_op_parallelism", "optional_max_intra_op_parallelism", b"optional_max_intra_op_parallelism", "optional_private_threadpool_size", b"optional_private_threadpool_size", "private_threadpool_size", b"private_threadpool_size"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["max_intra_op_parallelism", b"max_intra_op_parallelism", "optional_max_intra_op_parallelism", b"optional_max_intra_op_parallelism", "optional_private_threadpool_size", b"optional_private_threadpool_size", "private_threadpool_size", b"private_threadpool_size"]) -> None: ... @typing.overload - def WhichOneof(self, oneof_group: typing.Literal["optional_max_intra_op_parallelism", b"optional_max_intra_op_parallelism"]) -> typing.Literal["max_intra_op_parallelism"] | None: ... + def WhichOneof( + self, oneof_group: typing.Literal["optional_max_intra_op_parallelism", b"optional_max_intra_op_parallelism"] + ) -> typing.Literal["max_intra_op_parallelism"] | None: ... @typing.overload - def WhichOneof(self, oneof_group: typing.Literal["optional_private_threadpool_size", b"optional_private_threadpool_size"]) -> typing.Literal["private_threadpool_size"] | None: ... + def WhichOneof( + self, oneof_group: typing.Literal["optional_private_threadpool_size", b"optional_private_threadpool_size"] + ) -> typing.Literal["private_threadpool_size"] | None: ... global___ThreadingOptions = ThreadingOptions @@ -390,19 +614,107 @@ class Options(google.protobuf.message.Message): symbolic_checkpoint: builtins.bool | None = ..., warm_start: builtins.bool | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["autotune_options", b"autotune_options", "dataset_name", b"dataset_name", "deterministic", b"deterministic", "distribute_options", b"distribute_options", "external_state_policy", b"external_state_policy", "optimization_options", b"optimization_options", "optional_dataset_name", b"optional_dataset_name", "optional_deterministic", b"optional_deterministic", "optional_external_state_policy", b"optional_external_state_policy", "optional_slack", b"optional_slack", "optional_symbolic_checkpoint", b"optional_symbolic_checkpoint", "optional_warm_start", b"optional_warm_start", "service_options", b"service_options", "slack", b"slack", "symbolic_checkpoint", b"symbolic_checkpoint", "threading_options", b"threading_options", "warm_start", b"warm_start"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["autotune_options", b"autotune_options", "dataset_name", b"dataset_name", "deterministic", b"deterministic", "distribute_options", b"distribute_options", "external_state_policy", b"external_state_policy", "framework_type", b"framework_type", "optimization_options", b"optimization_options", "optional_dataset_name", b"optional_dataset_name", "optional_deterministic", b"optional_deterministic", "optional_external_state_policy", b"optional_external_state_policy", "optional_slack", b"optional_slack", "optional_symbolic_checkpoint", b"optional_symbolic_checkpoint", "optional_warm_start", b"optional_warm_start", "service_options", b"service_options", "slack", b"slack", "symbolic_checkpoint", b"symbolic_checkpoint", "threading_options", b"threading_options", "warm_start", b"warm_start"]) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "autotune_options", + b"autotune_options", + "dataset_name", + b"dataset_name", + "deterministic", + b"deterministic", + "distribute_options", + b"distribute_options", + "external_state_policy", + b"external_state_policy", + "optimization_options", + b"optimization_options", + "optional_dataset_name", + b"optional_dataset_name", + "optional_deterministic", + b"optional_deterministic", + "optional_external_state_policy", + b"optional_external_state_policy", + "optional_slack", + b"optional_slack", + "optional_symbolic_checkpoint", + b"optional_symbolic_checkpoint", + "optional_warm_start", + b"optional_warm_start", + "service_options", + b"service_options", + "slack", + b"slack", + "symbolic_checkpoint", + b"symbolic_checkpoint", + "threading_options", + b"threading_options", + "warm_start", + b"warm_start", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "autotune_options", + b"autotune_options", + "dataset_name", + b"dataset_name", + "deterministic", + b"deterministic", + "distribute_options", + b"distribute_options", + "external_state_policy", + b"external_state_policy", + "framework_type", + b"framework_type", + "optimization_options", + b"optimization_options", + "optional_dataset_name", + b"optional_dataset_name", + "optional_deterministic", + b"optional_deterministic", + "optional_external_state_policy", + b"optional_external_state_policy", + "optional_slack", + b"optional_slack", + "optional_symbolic_checkpoint", + b"optional_symbolic_checkpoint", + "optional_warm_start", + b"optional_warm_start", + "service_options", + b"service_options", + "slack", + b"slack", + "symbolic_checkpoint", + b"symbolic_checkpoint", + "threading_options", + b"threading_options", + "warm_start", + b"warm_start", + ], + ) -> None: ... @typing.overload - def WhichOneof(self, oneof_group: typing.Literal["optional_dataset_name", b"optional_dataset_name"]) -> typing.Literal["dataset_name"] | None: ... + def WhichOneof( + self, oneof_group: typing.Literal["optional_dataset_name", b"optional_dataset_name"] + ) -> typing.Literal["dataset_name"] | None: ... @typing.overload - def WhichOneof(self, oneof_group: typing.Literal["optional_deterministic", b"optional_deterministic"]) -> typing.Literal["deterministic"] | None: ... + def WhichOneof( + self, oneof_group: typing.Literal["optional_deterministic", b"optional_deterministic"] + ) -> typing.Literal["deterministic"] | None: ... @typing.overload - def WhichOneof(self, oneof_group: typing.Literal["optional_external_state_policy", b"optional_external_state_policy"]) -> typing.Literal["external_state_policy"] | None: ... + def WhichOneof( + self, oneof_group: typing.Literal["optional_external_state_policy", b"optional_external_state_policy"] + ) -> typing.Literal["external_state_policy"] | None: ... @typing.overload def WhichOneof(self, oneof_group: typing.Literal["optional_slack", b"optional_slack"]) -> typing.Literal["slack"] | None: ... @typing.overload - def WhichOneof(self, oneof_group: typing.Literal["optional_symbolic_checkpoint", b"optional_symbolic_checkpoint"]) -> typing.Literal["symbolic_checkpoint"] | None: ... + def WhichOneof( + self, oneof_group: typing.Literal["optional_symbolic_checkpoint", b"optional_symbolic_checkpoint"] + ) -> typing.Literal["symbolic_checkpoint"] | None: ... @typing.overload - def WhichOneof(self, oneof_group: typing.Literal["optional_warm_start", b"optional_warm_start"]) -> typing.Literal["warm_start"] | None: ... + def WhichOneof( + self, oneof_group: typing.Literal["optional_warm_start", b"optional_warm_start"] + ) -> typing.Literal["warm_start"] | None: ... global___Options = Options diff --git a/stubs/tensorflow/tensorflow/core/framework/dataset_pb2.pyi b/stubs/tensorflow/tensorflow/core/framework/dataset_pb2.pyi index 72392b2b7698..0cfb9bb85928 100644 --- a/stubs/tensorflow/tensorflow/core/framework/dataset_pb2.pyi +++ b/stubs/tensorflow/tensorflow/core/framework/dataset_pb2.pyi @@ -51,7 +51,12 @@ class CompressedComponentMetadata(google.protobuf.message.Message): uncompressed_bytes: collections.abc.Iterable[builtins.int] | None = ..., ) -> None: ... def HasField(self, field_name: typing.Literal["tensor_shape", b"tensor_shape"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["dtype", b"dtype", "tensor_shape", b"tensor_shape", "uncompressed_bytes", b"uncompressed_bytes"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "dtype", b"dtype", "tensor_shape", b"tensor_shape", "uncompressed_bytes", b"uncompressed_bytes" + ], + ) -> None: ... global___CompressedComponentMetadata = CompressedComponentMetadata @@ -72,7 +77,9 @@ class CompressedElement(google.protobuf.message.Message): tensorflow/core/data/compression_utils.cc. """ @property - def component_metadata(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___CompressedComponentMetadata]: + def component_metadata( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___CompressedComponentMetadata]: """Metadata for the components of the element.""" def __init__( @@ -82,7 +89,9 @@ class CompressedElement(google.protobuf.message.Message): component_metadata: collections.abc.Iterable[global___CompressedComponentMetadata] | None = ..., version: builtins.int | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["component_metadata", b"component_metadata", "data", b"data", "version", b"version"]) -> None: ... + def ClearField( + self, field_name: typing.Literal["component_metadata", b"component_metadata", "data", b"data", "version", b"version"] + ) -> None: ... global___CompressedElement = CompressedElement @@ -94,11 +103,13 @@ class UncompressedElement(google.protobuf.message.Message): COMPONENTS_FIELD_NUMBER: builtins.int @property - def components(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[tensorflow.core.framework.tensor_pb2.TensorProto]: ... - def __init__( + def components( self, - *, - components: collections.abc.Iterable[tensorflow.core.framework.tensor_pb2.TensorProto] | None = ..., + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + tensorflow.core.framework.tensor_pb2.TensorProto + ]: ... + def __init__( + self, *, components: collections.abc.Iterable[tensorflow.core.framework.tensor_pb2.TensorProto] | None = ... ) -> None: ... def ClearField(self, field_name: typing.Literal["components", b"components"]) -> None: ... diff --git a/stubs/tensorflow/tensorflow/core/framework/device_attributes_pb2.pyi b/stubs/tensorflow/tensorflow/core/framework/device_attributes_pb2.pyi index aafdc2c8a51e..41abb3a43b72 100644 --- a/stubs/tensorflow/tensorflow/core/framework/device_attributes_pb2.pyi +++ b/stubs/tensorflow/tensorflow/core/framework/device_attributes_pb2.pyi @@ -24,13 +24,11 @@ class InterconnectLink(google.protobuf.message.Message): type: builtins.str strength: builtins.int def __init__( - self, - *, - device_id: builtins.int | None = ..., - type: builtins.str | None = ..., - strength: builtins.int | None = ..., + self, *, device_id: builtins.int | None = ..., type: builtins.str | None = ..., strength: builtins.int | None = ... + ) -> None: ... + def ClearField( + self, field_name: typing.Literal["device_id", b"device_id", "strength", b"strength", "type", b"type"] ) -> None: ... - def ClearField(self, field_name: typing.Literal["device_id", b"device_id", "strength", b"strength", "type", b"type"]) -> None: ... global___InterconnectLink = InterconnectLink @@ -41,11 +39,7 @@ class LocalLinks(google.protobuf.message.Message): LINK_FIELD_NUMBER: builtins.int @property def link(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___InterconnectLink]: ... - def __init__( - self, - *, - link: collections.abc.Iterable[global___InterconnectLink] | None = ..., - ) -> None: ... + def __init__(self, *, link: collections.abc.Iterable[global___InterconnectLink] | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["link", b"link"]) -> None: ... global___LocalLinks = LocalLinks @@ -68,14 +62,12 @@ class DeviceLocality(google.protobuf.message.Message): """Optional local interconnect links to other devices.""" def __init__( - self, - *, - bus_id: builtins.int | None = ..., - numa_node: builtins.int | None = ..., - links: global___LocalLinks | None = ..., + self, *, bus_id: builtins.int | None = ..., numa_node: builtins.int | None = ..., links: global___LocalLinks | None = ... ) -> None: ... def HasField(self, field_name: typing.Literal["links", b"links"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["bus_id", b"bus_id", "links", b"links", "numa_node", b"numa_node"]) -> None: ... + def ClearField( + self, field_name: typing.Literal["bus_id", b"bus_id", "links", b"links", "numa_node", b"numa_node"] + ) -> None: ... global___DeviceLocality = DeviceLocality @@ -125,6 +117,24 @@ class DeviceAttributes(google.protobuf.message.Message): xla_global_id: builtins.int | None = ..., ) -> None: ... def HasField(self, field_name: typing.Literal["locality", b"locality"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["device_type", b"device_type", "incarnation", b"incarnation", "locality", b"locality", "memory_limit", b"memory_limit", "name", b"name", "physical_device_desc", b"physical_device_desc", "xla_global_id", b"xla_global_id"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "device_type", + b"device_type", + "incarnation", + b"incarnation", + "locality", + b"locality", + "memory_limit", + b"memory_limit", + "name", + b"name", + "physical_device_desc", + b"physical_device_desc", + "xla_global_id", + b"xla_global_id", + ], + ) -> None: ... global___DeviceAttributes = DeviceAttributes diff --git a/stubs/tensorflow/tensorflow/core/framework/full_type_pb2.pyi b/stubs/tensorflow/tensorflow/core/framework/full_type_pb2.pyi index e37ac0ff0495..8025593122d5 100644 --- a/stubs/tensorflow/tensorflow/core/framework/full_type_pb2.pyi +++ b/stubs/tensorflow/tensorflow/core/framework/full_type_pb2.pyi @@ -24,7 +24,9 @@ class _FullTypeId: ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType -class _FullTypeIdEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_FullTypeId.ValueType], builtins.type): +class _FullTypeIdEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_FullTypeId.ValueType], builtins.type +): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor TFT_UNSET: _FullTypeId.ValueType # 0 """The default represents an uninitialized values.""" @@ -607,7 +609,9 @@ class FullTypeDef(google.protobuf.message.Message): i: builtins.int | None = ..., ) -> None: ... def HasField(self, field_name: typing.Literal["attr", b"attr", "i", b"i", "s", b"s"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["args", b"args", "attr", b"attr", "i", b"i", "s", b"s", "type_id", b"type_id"]) -> None: ... + def ClearField( + self, field_name: typing.Literal["args", b"args", "attr", b"attr", "i", b"i", "s", b"s", "type_id", b"type_id"] + ) -> None: ... def WhichOneof(self, oneof_group: typing.Literal["attr", b"attr"]) -> typing.Literal["s", "i"] | None: ... global___FullTypeDef = FullTypeDef diff --git a/stubs/tensorflow/tensorflow/core/framework/function_pb2.pyi b/stubs/tensorflow/tensorflow/core/framework/function_pb2.pyi index c0908173a958..8506f933b2c0 100644 --- a/stubs/tensorflow/tensorflow/core/framework/function_pb2.pyi +++ b/stubs/tensorflow/tensorflow/core/framework/function_pb2.pyi @@ -30,7 +30,9 @@ class FunctionDefLibrary(google.protobuf.message.Message): @property def gradient(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___GradientDef]: ... @property - def registered_gradients(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___RegisteredGradient]: ... + def registered_gradients( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___RegisteredGradient]: ... def __init__( self, *, @@ -38,7 +40,12 @@ class FunctionDefLibrary(google.protobuf.message.Message): gradient: collections.abc.Iterable[global___GradientDef] | None = ..., registered_gradients: collections.abc.Iterable[global___RegisteredGradient] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["function", b"function", "gradient", b"gradient", "registered_gradients", b"registered_gradients"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "function", b"function", "gradient", b"gradient", "registered_gradients", b"registered_gradients" + ], + ) -> None: ... global___FunctionDefLibrary = FunctionDefLibrary @@ -64,10 +71,7 @@ class FunctionDef(google.protobuf.message.Message): @property def value(self) -> tensorflow.core.framework.attr_value_pb2.AttrValue: ... def __init__( - self, - *, - key: builtins.str | None = ..., - value: tensorflow.core.framework.attr_value_pb2.AttrValue | None = ..., + self, *, key: builtins.str | None = ..., value: tensorflow.core.framework.attr_value_pb2.AttrValue | None = ... ) -> None: ... def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... @@ -90,21 +94,18 @@ class FunctionDef(google.protobuf.message.Message): @property def value(self) -> tensorflow.core.framework.attr_value_pb2.AttrValue: ... def __init__( - self, - *, - key: builtins.str | None = ..., - value: tensorflow.core.framework.attr_value_pb2.AttrValue | None = ..., + self, *, key: builtins.str | None = ..., value: tensorflow.core.framework.attr_value_pb2.AttrValue | None = ... ) -> None: ... def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... ATTR_FIELD_NUMBER: builtins.int @property - def attr(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, tensorflow.core.framework.attr_value_pb2.AttrValue]: ... - def __init__( + def attr( self, - *, - attr: collections.abc.Mapping[builtins.str, tensorflow.core.framework.attr_value_pb2.AttrValue] | None = ..., + ) -> google.protobuf.internal.containers.MessageMap[builtins.str, tensorflow.core.framework.attr_value_pb2.AttrValue]: ... + def __init__( + self, *, attr: collections.abc.Mapping[builtins.str, tensorflow.core.framework.attr_value_pb2.AttrValue] | None = ... ) -> None: ... def ClearField(self, field_name: typing.Literal["attr", b"attr"]) -> None: ... @@ -117,12 +118,7 @@ class FunctionDef(google.protobuf.message.Message): key: builtins.int @property def value(self) -> global___FunctionDef.ArgAttrs: ... - def __init__( - self, - *, - key: builtins.int | None = ..., - value: global___FunctionDef.ArgAttrs | None = ..., - ) -> None: ... + def __init__(self, *, key: builtins.int | None = ..., value: global___FunctionDef.ArgAttrs | None = ...) -> None: ... def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... @@ -134,12 +130,7 @@ class FunctionDef(google.protobuf.message.Message): VALUE_FIELD_NUMBER: builtins.int key: builtins.int value: builtins.int - def __init__( - self, - *, - key: builtins.int | None = ..., - value: builtins.int | None = ..., - ) -> None: ... + def __init__(self, *, key: builtins.int | None = ..., value: builtins.int | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... @typing.final @@ -150,12 +141,7 @@ class FunctionDef(google.protobuf.message.Message): VALUE_FIELD_NUMBER: builtins.int key: builtins.str value: builtins.str - def __init__( - self, - *, - key: builtins.str | None = ..., - value: builtins.str | None = ..., - ) -> None: ... + def __init__(self, *, key: builtins.str | None = ..., value: builtins.str | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... @typing.final @@ -166,12 +152,7 @@ class FunctionDef(google.protobuf.message.Message): VALUE_FIELD_NUMBER: builtins.int key: builtins.str value: builtins.str - def __init__( - self, - *, - key: builtins.str | None = ..., - value: builtins.str | None = ..., - ) -> None: ... + def __init__(self, *, key: builtins.str | None = ..., value: builtins.str | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... SIGNATURE_FIELD_NUMBER: builtins.int @@ -188,7 +169,9 @@ class FunctionDef(google.protobuf.message.Message): """ @property - def attr(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, tensorflow.core.framework.attr_value_pb2.AttrValue]: + def attr( + self, + ) -> google.protobuf.internal.containers.MessageMap[builtins.str, tensorflow.core.framework.attr_value_pb2.AttrValue]: """Attributes specific to this function definition.""" @property @@ -207,7 +190,9 @@ class FunctionDef(google.protobuf.message.Message): """ @property - def node_def(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[tensorflow.core.framework.node_def_pb2.NodeDef]: + def node_def( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[tensorflow.core.framework.node_def_pb2.NodeDef]: """The body of the function. Unlike the NodeDefs in a GraphDef, attrs may have values of type `placeholder` and the `input` field uses the "output" format above. @@ -241,7 +226,25 @@ class FunctionDef(google.protobuf.message.Message): control_ret: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., ) -> None: ... def HasField(self, field_name: typing.Literal["signature", b"signature"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["arg_attr", b"arg_attr", "attr", b"attr", "control_ret", b"control_ret", "node_def", b"node_def", "resource_arg_unique_id", b"resource_arg_unique_id", "ret", b"ret", "signature", b"signature"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "arg_attr", + b"arg_attr", + "attr", + b"attr", + "control_ret", + b"control_ret", + "node_def", + b"node_def", + "resource_arg_unique_id", + b"resource_arg_unique_id", + "ret", + b"ret", + "signature", + b"signature", + ], + ) -> None: ... global___FunctionDef = FunctionDef @@ -275,13 +278,10 @@ class GradientDef(google.protobuf.message.Message): """The function name.""" gradient_func: builtins.str """The gradient function's name.""" - def __init__( - self, - *, - function_name: builtins.str | None = ..., - gradient_func: builtins.str | None = ..., + def __init__(self, *, function_name: builtins.str | None = ..., gradient_func: builtins.str | None = ...) -> None: ... + def ClearField( + self, field_name: typing.Literal["function_name", b"function_name", "gradient_func", b"gradient_func"] ) -> None: ... - def ClearField(self, field_name: typing.Literal["function_name", b"function_name", "gradient_func", b"gradient_func"]) -> None: ... global___GradientDef = GradientDef @@ -301,12 +301,9 @@ class RegisteredGradient(google.protobuf.message.Message): """The gradient function's name.""" registered_op_type: builtins.str """The gradient function's registered op type.""" - def __init__( - self, - *, - gradient_func: builtins.str | None = ..., - registered_op_type: builtins.str | None = ..., + def __init__(self, *, gradient_func: builtins.str | None = ..., registered_op_type: builtins.str | None = ...) -> None: ... + def ClearField( + self, field_name: typing.Literal["gradient_func", b"gradient_func", "registered_op_type", b"registered_op_type"] ) -> None: ... - def ClearField(self, field_name: typing.Literal["gradient_func", b"gradient_func", "registered_op_type", b"registered_op_type"]) -> None: ... global___RegisteredGradient = RegisteredGradient diff --git a/stubs/tensorflow/tensorflow/core/framework/graph_debug_info_pb2.pyi b/stubs/tensorflow/tensorflow/core/framework/graph_debug_info_pb2.pyi index 739c2b5bd300..2c3f6761f1ef 100644 --- a/stubs/tensorflow/tensorflow/core/framework/graph_debug_info_pb2.pyi +++ b/stubs/tensorflow/tensorflow/core/framework/graph_debug_info_pb2.pyi @@ -49,8 +49,18 @@ class GraphDebugInfo(google.protobuf.message.Message): func: builtins.str | None = ..., code: builtins.str | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["code", b"code", "col", b"col", "file_index", b"file_index", "func", b"func", "line", b"line"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["code", b"code", "col", b"col", "file_index", b"file_index", "func", b"func", "line", b"line"]) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "code", b"code", "col", b"col", "file_index", b"file_index", "func", b"func", "line", b"line" + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "code", b"code", "col", b"col", "file_index", b"file_index", "func", b"func", "line", b"line" + ], + ) -> None: ... @typing.final class StackTrace(google.protobuf.message.Message): @@ -61,7 +71,9 @@ class GraphDebugInfo(google.protobuf.message.Message): FILE_LINE_COLS_FIELD_NUMBER: builtins.int FRAME_ID_FIELD_NUMBER: builtins.int @property - def file_line_cols(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___GraphDebugInfo.FileLineCol]: + def file_line_cols( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___GraphDebugInfo.FileLineCol]: """Deprecated.""" @property @@ -72,7 +84,9 @@ class GraphDebugInfo(google.protobuf.message.Message): file_line_cols: collections.abc.Iterable[global___GraphDebugInfo.FileLineCol] | None = ..., frame_id: collections.abc.Iterable[builtins.int] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["file_line_cols", b"file_line_cols", "frame_id", b"frame_id"]) -> None: ... + def ClearField( + self, field_name: typing.Literal["file_line_cols", b"file_line_cols", "frame_id", b"frame_id"] + ) -> None: ... @typing.final class FramesByIdEntry(google.protobuf.message.Message): @@ -84,10 +98,7 @@ class GraphDebugInfo(google.protobuf.message.Message): @property def value(self) -> global___GraphDebugInfo.FileLineCol: ... def __init__( - self, - *, - key: builtins.int | None = ..., - value: global___GraphDebugInfo.FileLineCol | None = ..., + self, *, key: builtins.int | None = ..., value: global___GraphDebugInfo.FileLineCol | None = ... ) -> None: ... def HasField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... @@ -101,12 +112,7 @@ class GraphDebugInfo(google.protobuf.message.Message): key: builtins.int @property def value(self) -> global___GraphDebugInfo.StackTrace: ... - def __init__( - self, - *, - key: builtins.int | None = ..., - value: global___GraphDebugInfo.StackTrace | None = ..., - ) -> None: ... + def __init__(self, *, key: builtins.int | None = ..., value: global___GraphDebugInfo.StackTrace | None = ...) -> None: ... def HasField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... @@ -119,12 +125,7 @@ class GraphDebugInfo(google.protobuf.message.Message): key: builtins.str @property def value(self) -> global___GraphDebugInfo.StackTrace: ... - def __init__( - self, - *, - key: builtins.str | None = ..., - value: global___GraphDebugInfo.StackTrace | None = ..., - ) -> None: ... + def __init__(self, *, key: builtins.str | None = ..., value: global___GraphDebugInfo.StackTrace | None = ...) -> None: ... def HasField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... @@ -136,12 +137,7 @@ class GraphDebugInfo(google.protobuf.message.Message): VALUE_FIELD_NUMBER: builtins.int key: builtins.str value: builtins.int - def __init__( - self, - *, - key: builtins.str | None = ..., - value: builtins.int | None = ..., - ) -> None: ... + def __init__(self, *, key: builtins.str | None = ..., value: builtins.int | None = ...) -> None: ... def HasField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... @@ -163,7 +159,9 @@ class GraphDebugInfo(google.protobuf.message.Message): """ @property - def traces_by_id(self) -> google.protobuf.internal.containers.MessageMap[builtins.int, global___GraphDebugInfo.StackTrace]: ... + def traces_by_id( + self, + ) -> google.protobuf.internal.containers.MessageMap[builtins.int, global___GraphDebugInfo.StackTrace]: ... @property def traces(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, global___GraphDebugInfo.StackTrace]: """Deprecated.""" @@ -193,6 +191,20 @@ class GraphDebugInfo(google.protobuf.message.Message): traces: collections.abc.Mapping[builtins.str, global___GraphDebugInfo.StackTrace] | None = ..., name_to_trace_id: collections.abc.Mapping[builtins.str, builtins.int] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["files", b"files", "frames_by_id", b"frames_by_id", "name_to_trace_id", b"name_to_trace_id", "traces", b"traces", "traces_by_id", b"traces_by_id"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "files", + b"files", + "frames_by_id", + b"frames_by_id", + "name_to_trace_id", + b"name_to_trace_id", + "traces", + b"traces", + "traces_by_id", + b"traces_by_id", + ], + ) -> None: ... global___GraphDebugInfo = GraphDebugInfo diff --git a/stubs/tensorflow/tensorflow/core/framework/graph_pb2.pyi b/stubs/tensorflow/tensorflow/core/framework/graph_pb2.pyi index 1158f045aba3..3a14bc413e52 100644 --- a/stubs/tensorflow/tensorflow/core/framework/graph_pb2.pyi +++ b/stubs/tensorflow/tensorflow/core/framework/graph_pb2.pyi @@ -34,7 +34,9 @@ class GraphDef(google.protobuf.message.Message): compatible, this field is entirely ignored. """ @property - def node(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[tensorflow.core.framework.node_def_pb2.NodeDef]: ... + def node( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[tensorflow.core.framework.node_def_pb2.NodeDef]: ... @property def versions(self) -> tensorflow.core.framework.versions_pb2.VersionDef: """Compatibility versions of the graph. See core/public/version.h for version @@ -44,7 +46,7 @@ class GraphDef(google.protobuf.message.Message): @property def library(self) -> tensorflow.core.framework.function_pb2.FunctionDefLibrary: - """"library" provides user-defined functions. + """ "library" provides user-defined functions. Naming: * library.function.name are in a flat namespace. @@ -85,7 +87,14 @@ class GraphDef(google.protobuf.message.Message): library: tensorflow.core.framework.function_pb2.FunctionDefLibrary | None = ..., debug_info: tensorflow.core.framework.graph_debug_info_pb2.GraphDebugInfo | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["debug_info", b"debug_info", "library", b"library", "versions", b"versions"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["debug_info", b"debug_info", "library", b"library", "node", b"node", "version", b"version", "versions", b"versions"]) -> None: ... + def HasField( + self, field_name: typing.Literal["debug_info", b"debug_info", "library", b"library", "versions", b"versions"] + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "debug_info", b"debug_info", "library", b"library", "node", b"node", "version", b"version", "versions", b"versions" + ], + ) -> None: ... global___GraphDef = GraphDef diff --git a/stubs/tensorflow/tensorflow/core/framework/graph_transfer_info_pb2.pyi b/stubs/tensorflow/tensorflow/core/framework/graph_transfer_info_pb2.pyi index ab4dd51228cd..bba887ced518 100644 --- a/stubs/tensorflow/tensorflow/core/framework/graph_transfer_info_pb2.pyi +++ b/stubs/tensorflow/tensorflow/core/framework/graph_transfer_info_pb2.pyi @@ -29,12 +29,7 @@ class GraphTransferNodeInput(google.protobuf.message.Message): OUTPUT_PORT_FIELD_NUMBER: builtins.int node_id: builtins.int output_port: builtins.int - def __init__( - self, - *, - node_id: builtins.int | None = ..., - output_port: builtins.int | None = ..., - ) -> None: ... + def __init__(self, *, node_id: builtins.int | None = ..., output_port: builtins.int | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["node_id", b"node_id", "output_port", b"output_port"]) -> None: ... global___GraphTransferNodeInput = GraphTransferNodeInput @@ -68,7 +63,25 @@ class GraphTransferNodeInfo(google.protobuf.message.Message): input_count: builtins.int | None = ..., output_count: builtins.int | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["input_count", b"input_count", "name", b"name", "node_id", b"node_id", "output_count", b"output_count", "padding_id", b"padding_id", "soc_op_id", b"soc_op_id", "type_name", b"type_name"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "input_count", + b"input_count", + "name", + b"name", + "node_id", + b"node_id", + "output_count", + b"output_count", + "padding_id", + b"padding_id", + "soc_op_id", + b"soc_op_id", + "type_name", + b"type_name", + ], + ) -> None: ... global___GraphTransferNodeInfo = GraphTransferNodeInfo @@ -96,7 +109,10 @@ class GraphTransferConstNodeInfo(google.protobuf.message.Message): data: builtins.bytes | None = ..., dtype: tensorflow.core.framework.types_pb2.DataType.ValueType | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["data", b"data", "dtype", b"dtype", "name", b"name", "node_id", b"node_id", "shape", b"shape"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal["data", b"data", "dtype", b"dtype", "name", b"name", "node_id", b"node_id", "shape", b"shape"], + ) -> None: ... global___GraphTransferConstNodeInfo = GraphTransferConstNodeInfo @@ -108,7 +124,9 @@ class GraphTransferNodeInputInfo(google.protobuf.message.Message): NODE_INPUT_FIELD_NUMBER: builtins.int node_id: builtins.int @property - def node_input(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___GraphTransferNodeInput]: ... + def node_input( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___GraphTransferNodeInput]: ... def __init__( self, *, @@ -129,10 +147,7 @@ class GraphTransferNodeOutputInfo(google.protobuf.message.Message): @property def max_byte_size(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ... def __init__( - self, - *, - node_id: builtins.int | None = ..., - max_byte_size: collections.abc.Iterable[builtins.int] | None = ..., + self, *, node_id: builtins.int | None = ..., max_byte_size: collections.abc.Iterable[builtins.int] | None = ... ) -> None: ... def ClearField(self, field_name: typing.Literal["max_byte_size", b"max_byte_size", "node_id", b"node_id"]) -> None: ... @@ -195,7 +210,9 @@ class GraphTransferInfo(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _DestinationEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[GraphTransferInfo._Destination.ValueType], builtins.type): + class _DestinationEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[GraphTransferInfo._Destination.ValueType], builtins.type + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor NOP: GraphTransferInfo._Destination.ValueType # 0 HEXAGON: GraphTransferInfo._Destination.ValueType # 1 @@ -214,19 +231,31 @@ class GraphTransferInfo(google.protobuf.message.Message): destination: global___GraphTransferInfo.Destination.ValueType """Destination of graph transfer""" @property - def node_info(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___GraphTransferNodeInfo]: ... + def node_info( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___GraphTransferNodeInfo]: ... @property - def const_node_info(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___GraphTransferConstNodeInfo]: ... + def const_node_info( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___GraphTransferConstNodeInfo]: ... @property - def node_input_info(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___GraphTransferNodeInputInfo]: ... + def node_input_info( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___GraphTransferNodeInputInfo]: ... @property - def node_output_info(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___GraphTransferNodeOutputInfo]: ... + def node_output_info( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___GraphTransferNodeOutputInfo]: ... @property - def graph_input_node_info(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___GraphTransferGraphInputNodeInfo]: + def graph_input_node_info( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___GraphTransferGraphInputNodeInfo]: """Input Node parameters of transferred graph""" @property - def graph_output_node_info(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___GraphTransferGraphOutputNodeInfo]: ... + def graph_output_node_info( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___GraphTransferGraphOutputNodeInfo]: ... def __init__( self, *, @@ -238,6 +267,24 @@ class GraphTransferInfo(google.protobuf.message.Message): graph_output_node_info: collections.abc.Iterable[global___GraphTransferGraphOutputNodeInfo] | None = ..., destination: global___GraphTransferInfo.Destination.ValueType | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["const_node_info", b"const_node_info", "destination", b"destination", "graph_input_node_info", b"graph_input_node_info", "graph_output_node_info", b"graph_output_node_info", "node_info", b"node_info", "node_input_info", b"node_input_info", "node_output_info", b"node_output_info"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "const_node_info", + b"const_node_info", + "destination", + b"destination", + "graph_input_node_info", + b"graph_input_node_info", + "graph_output_node_info", + b"graph_output_node_info", + "node_info", + b"node_info", + "node_input_info", + b"node_input_info", + "node_output_info", + b"node_output_info", + ], + ) -> None: ... global___GraphTransferInfo = GraphTransferInfo diff --git a/stubs/tensorflow/tensorflow/core/framework/kernel_def_pb2.pyi b/stubs/tensorflow/tensorflow/core/framework/kernel_def_pb2.pyi index ea1844b41fbc..c0f0f52e5cbf 100644 --- a/stubs/tensorflow/tensorflow/core/framework/kernel_def_pb2.pyi +++ b/stubs/tensorflow/tensorflow/core/framework/kernel_def_pb2.pyi @@ -62,7 +62,9 @@ class KernelDef(google.protobuf.message.Message): this is not set), we prefer GPU kernels over CPU. """ @property - def constraint(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___KernelDef.AttrConstraint]: ... + def constraint( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___KernelDef.AttrConstraint]: ... @property def host_memory_arg(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: """Names of the Op's input_/output_args that reside in host memory @@ -79,7 +81,23 @@ class KernelDef(google.protobuf.message.Message): label: builtins.str | None = ..., priority: builtins.int | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["constraint", b"constraint", "device_type", b"device_type", "host_memory_arg", b"host_memory_arg", "label", b"label", "op", b"op", "priority", b"priority"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "constraint", + b"constraint", + "device_type", + b"device_type", + "host_memory_arg", + b"host_memory_arg", + "label", + b"label", + "op", + b"op", + "priority", + b"priority", + ], + ) -> None: ... global___KernelDef = KernelDef @@ -92,11 +110,7 @@ class KernelList(google.protobuf.message.Message): KERNEL_FIELD_NUMBER: builtins.int @property def kernel(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___KernelDef]: ... - def __init__( - self, - *, - kernel: collections.abc.Iterable[global___KernelDef] | None = ..., - ) -> None: ... + def __init__(self, *, kernel: collections.abc.Iterable[global___KernelDef] | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["kernel", b"kernel"]) -> None: ... global___KernelList = KernelList diff --git a/stubs/tensorflow/tensorflow/core/framework/log_memory_pb2.pyi b/stubs/tensorflow/tensorflow/core/framework/log_memory_pb2.pyi index ba0e1700fc9f..7b8d91953e77 100644 --- a/stubs/tensorflow/tensorflow/core/framework/log_memory_pb2.pyi +++ b/stubs/tensorflow/tensorflow/core/framework/log_memory_pb2.pyi @@ -22,12 +22,7 @@ class MemoryLogStep(google.protobuf.message.Message): """Process-unique step id.""" handle: builtins.str """Handle describing the feeds and fetches of the step.""" - def __init__( - self, - *, - step_id: builtins.int | None = ..., - handle: builtins.str | None = ..., - ) -> None: ... + def __init__(self, *, step_id: builtins.int | None = ..., handle: builtins.str | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["handle", b"handle", "step_id", b"step_id"]) -> None: ... global___MemoryLogStep = MemoryLogStep @@ -57,7 +52,9 @@ class MemoryLogTensorAllocation(google.protobuf.message.Message): tensor: tensorflow.core.framework.tensor_description_pb2.TensorDescription | None = ..., ) -> None: ... def HasField(self, field_name: typing.Literal["tensor", b"tensor"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["kernel_name", b"kernel_name", "step_id", b"step_id", "tensor", b"tensor"]) -> None: ... + def ClearField( + self, field_name: typing.Literal["kernel_name", b"kernel_name", "step_id", b"step_id", "tensor", b"tensor"] + ) -> None: ... global___MemoryLogTensorAllocation = MemoryLogTensorAllocation @@ -73,13 +70,10 @@ class MemoryLogTensorDeallocation(google.protobuf.message.Message): """ allocator_name: builtins.str """Name of the allocator used.""" - def __init__( - self, - *, - allocation_id: builtins.int | None = ..., - allocator_name: builtins.str | None = ..., + def __init__(self, *, allocation_id: builtins.int | None = ..., allocator_name: builtins.str | None = ...) -> None: ... + def ClearField( + self, field_name: typing.Literal["allocation_id", b"allocation_id", "allocator_name", b"allocator_name"] ) -> None: ... - def ClearField(self, field_name: typing.Literal["allocation_id", b"allocation_id", "allocator_name", b"allocator_name"]) -> None: ... global___MemoryLogTensorDeallocation = MemoryLogTensorDeallocation @@ -112,7 +106,10 @@ class MemoryLogTensorOutput(google.protobuf.message.Message): tensor: tensorflow.core.framework.tensor_description_pb2.TensorDescription | None = ..., ) -> None: ... def HasField(self, field_name: typing.Literal["tensor", b"tensor"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["index", b"index", "kernel_name", b"kernel_name", "step_id", b"step_id", "tensor", b"tensor"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal["index", b"index", "kernel_name", b"kernel_name", "step_id", b"step_id", "tensor", b"tensor"], + ) -> None: ... global___MemoryLogTensorOutput = MemoryLogTensorOutput @@ -150,7 +147,23 @@ class MemoryLogRawAllocation(google.protobuf.message.Message): allocation_id: builtins.int | None = ..., allocator_name: builtins.str | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["allocation_id", b"allocation_id", "allocator_name", b"allocator_name", "num_bytes", b"num_bytes", "operation", b"operation", "ptr", b"ptr", "step_id", b"step_id"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "allocation_id", + b"allocation_id", + "allocator_name", + b"allocator_name", + "num_bytes", + b"num_bytes", + "operation", + b"operation", + "ptr", + b"ptr", + "step_id", + b"step_id", + ], + ) -> None: ... global___MemoryLogRawAllocation = MemoryLogRawAllocation @@ -186,6 +199,20 @@ class MemoryLogRawDeallocation(google.protobuf.message.Message): allocator_name: builtins.str | None = ..., deferred: builtins.bool | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["allocation_id", b"allocation_id", "allocator_name", b"allocator_name", "deferred", b"deferred", "operation", b"operation", "step_id", b"step_id"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "allocation_id", + b"allocation_id", + "allocator_name", + b"allocator_name", + "deferred", + b"deferred", + "operation", + b"operation", + "step_id", + b"step_id", + ], + ) -> None: ... global___MemoryLogRawDeallocation = MemoryLogRawDeallocation diff --git a/stubs/tensorflow/tensorflow/core/framework/model_pb2.pyi b/stubs/tensorflow/tensorflow/core/framework/model_pb2.pyi index 1fc78e2d7b8b..5f80b951a386 100644 --- a/stubs/tensorflow/tensorflow/core/framework/model_pb2.pyi +++ b/stubs/tensorflow/tensorflow/core/framework/model_pb2.pyi @@ -50,7 +50,9 @@ class _AutotuneAlgorithm: ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType -class _AutotuneAlgorithmEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_AutotuneAlgorithm.ValueType], builtins.type): +class _AutotuneAlgorithmEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_AutotuneAlgorithm.ValueType], builtins.type +): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor DEFAULT: _AutotuneAlgorithm.ValueType # 0 HILL_CLIMB: _AutotuneAlgorithm.ValueType # 1 @@ -118,7 +120,23 @@ class ModelProto(google.protobuf.message.Message): max: builtins.float | None = ..., tunable: builtins.bool | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["max", b"max", "min", b"min", "name", b"name", "state_value", b"state_value", "tunable", b"tunable", "value", b"value"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "max", + b"max", + "min", + b"min", + "name", + b"name", + "state_value", + b"state_value", + "tunable", + b"tunable", + "value", + b"value", + ], + ) -> None: ... ID_FIELD_NUMBER: builtins.int NAME_FIELD_NUMBER: builtins.int @@ -173,7 +191,9 @@ class ModelProto(google.protobuf.message.Message): buffered element. This is only used by ASYNC_KNOWN_RATIO nodes. """ @property - def parameters(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ModelProto.Node.Parameter]: + def parameters( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ModelProto.Node.Parameter]: """Parameters of this node.""" @property @@ -201,7 +221,45 @@ class ModelProto(google.protobuf.message.Message): ratio: builtins.float | None = ..., memory_ratio: builtins.float | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["autotune", b"autotune", "buffered_bytes", b"buffered_bytes", "buffered_elements", b"buffered_elements", "bytes_consumed", b"bytes_consumed", "bytes_produced", b"bytes_produced", "id", b"id", "input_processing_time_count", b"input_processing_time_count", "input_processing_time_sum", b"input_processing_time_sum", "inputs", b"inputs", "memory_ratio", b"memory_ratio", "name", b"name", "node_class", b"node_class", "num_elements", b"num_elements", "parameters", b"parameters", "processing_time", b"processing_time", "ratio", b"ratio", "record_metrics", b"record_metrics"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "autotune", + b"autotune", + "buffered_bytes", + b"buffered_bytes", + "buffered_elements", + b"buffered_elements", + "bytes_consumed", + b"bytes_consumed", + "bytes_produced", + b"bytes_produced", + "id", + b"id", + "input_processing_time_count", + b"input_processing_time_count", + "input_processing_time_sum", + b"input_processing_time_sum", + "inputs", + b"inputs", + "memory_ratio", + b"memory_ratio", + "name", + b"name", + "node_class", + b"node_class", + "num_elements", + b"num_elements", + "parameters", + b"parameters", + "processing_time", + b"processing_time", + "ratio", + b"ratio", + "record_metrics", + b"record_metrics", + ], + ) -> None: ... @typing.final class NodesEntry(google.protobuf.message.Message): @@ -212,12 +270,7 @@ class ModelProto(google.protobuf.message.Message): key: builtins.int @property def value(self) -> global___ModelProto.Node: ... - def __init__( - self, - *, - key: builtins.int | None = ..., - value: global___ModelProto.Node | None = ..., - ) -> None: ... + def __init__(self, *, key: builtins.int | None = ..., value: global___ModelProto.Node | None = ...) -> None: ... def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... @@ -249,7 +302,19 @@ class ModelProto(google.protobuf.message.Message): ram_budget: builtins.int | None = ..., model_input_time: builtins.float | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["algorithm", b"algorithm", "cpu_budget", b"cpu_budget", "model_input_time", b"model_input_time", "ram_budget", b"ram_budget"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "algorithm", + b"algorithm", + "cpu_budget", + b"cpu_budget", + "model_input_time", + b"model_input_time", + "ram_budget", + b"ram_budget", + ], + ) -> None: ... DATASET_NAME_FIELD_NUMBER: builtins.int NODES_FIELD_NUMBER: builtins.int @@ -282,6 +347,22 @@ class ModelProto(google.protobuf.message.Message): gap_times: collections.abc.Iterable[builtins.int] | None = ..., ) -> None: ... def HasField(self, field_name: typing.Literal["optimization_params", b"optimization_params"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["dataset_name", b"dataset_name", "gap_times", b"gap_times", "id_counter", b"id_counter", "nodes", b"nodes", "optimization_params", b"optimization_params", "output", b"output"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "dataset_name", + b"dataset_name", + "gap_times", + b"gap_times", + "id_counter", + b"id_counter", + "nodes", + b"nodes", + "optimization_params", + b"optimization_params", + "output", + b"output", + ], + ) -> None: ... global___ModelProto = ModelProto diff --git a/stubs/tensorflow/tensorflow/core/framework/node_def_pb2.pyi b/stubs/tensorflow/tensorflow/core/framework/node_def_pb2.pyi index 663e4d216ec9..510af1b62e6b 100644 --- a/stubs/tensorflow/tensorflow/core/framework/node_def_pb2.pyi +++ b/stubs/tensorflow/tensorflow/core/framework/node_def_pb2.pyi @@ -29,10 +29,7 @@ class NodeDef(google.protobuf.message.Message): @property def value(self) -> tensorflow.core.framework.attr_value_pb2.AttrValue: ... def __init__( - self, - *, - key: builtins.str | None = ..., - value: tensorflow.core.framework.attr_value_pb2.AttrValue | None = ..., + self, *, key: builtins.str | None = ..., value: tensorflow.core.framework.attr_value_pb2.AttrValue | None = ... ) -> None: ... def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... @@ -71,7 +68,12 @@ class NodeDef(google.protobuf.message.Message): original_node_names: collections.abc.Iterable[builtins.str] | None = ..., original_func_names: collections.abc.Iterable[builtins.str] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["original_func_names", b"original_func_names", "original_node_names", b"original_node_names"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "original_func_names", b"original_func_names", "original_node_names", b"original_node_names" + ], + ) -> None: ... NAME_FIELD_NUMBER: builtins.int OP_FIELD_NUMBER: builtins.int @@ -121,7 +123,9 @@ class NodeDef(google.protobuf.message.Message): """ @property - def attr(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, tensorflow.core.framework.attr_value_pb2.AttrValue]: + def attr( + self, + ) -> google.protobuf.internal.containers.MessageMap[builtins.str, tensorflow.core.framework.attr_value_pb2.AttrValue]: """Operation-specific graph-construction-time configuration. Note that this should include all attrs defined in the corresponding OpDef, including those with a value matching @@ -159,7 +163,30 @@ class NodeDef(google.protobuf.message.Message): experimental_debug_info: global___NodeDef.ExperimentalDebugInfo | None = ..., experimental_type: tensorflow.core.framework.full_type_pb2.FullTypeDef | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["experimental_debug_info", b"experimental_debug_info", "experimental_type", b"experimental_type"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["attr", b"attr", "device", b"device", "experimental_debug_info", b"experimental_debug_info", "experimental_type", b"experimental_type", "input", b"input", "name", b"name", "op", b"op"]) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "experimental_debug_info", b"experimental_debug_info", "experimental_type", b"experimental_type" + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "attr", + b"attr", + "device", + b"device", + "experimental_debug_info", + b"experimental_debug_info", + "experimental_type", + b"experimental_type", + "input", + b"input", + "name", + b"name", + "op", + b"op", + ], + ) -> None: ... global___NodeDef = NodeDef diff --git a/stubs/tensorflow/tensorflow/core/framework/op_def_pb2.pyi b/stubs/tensorflow/tensorflow/core/framework/op_def_pb2.pyi index 6656ddcadf3f..ee978cd9fac6 100644 --- a/stubs/tensorflow/tensorflow/core/framework/op_def_pb2.pyi +++ b/stubs/tensorflow/tensorflow/core/framework/op_def_pb2.pyi @@ -71,7 +71,11 @@ class OpDef(google.protobuf.message.Message): For outputs: if true, outputs are refs, otherwise they are not. """ @property - def handle_data(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[tensorflow.core.framework.resource_handle_pb2.ResourceHandleProto.DtypeAndShape]: + def handle_data( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + tensorflow.core.framework.resource_handle_pb2.ResourceHandleProto.DtypeAndShape + ]: """The handle data for resource inputs.""" @property @@ -96,12 +100,36 @@ class OpDef(google.protobuf.message.Message): type_attr: builtins.str | None = ..., number_attr: builtins.str | None = ..., type_list_attr: builtins.str | None = ..., - handle_data: collections.abc.Iterable[tensorflow.core.framework.resource_handle_pb2.ResourceHandleProto.DtypeAndShape] | None = ..., + handle_data: ( + collections.abc.Iterable[tensorflow.core.framework.resource_handle_pb2.ResourceHandleProto.DtypeAndShape] | None + ) = ..., is_ref: builtins.bool | None = ..., experimental_full_type: tensorflow.core.framework.full_type_pb2.FullTypeDef | None = ..., ) -> None: ... def HasField(self, field_name: typing.Literal["experimental_full_type", b"experimental_full_type"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["description", b"description", "experimental_full_type", b"experimental_full_type", "handle_data", b"handle_data", "is_ref", b"is_ref", "name", b"name", "number_attr", b"number_attr", "type", b"type", "type_attr", b"type_attr", "type_list_attr", b"type_list_attr"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "description", + b"description", + "experimental_full_type", + b"experimental_full_type", + "handle_data", + b"handle_data", + "is_ref", + b"is_ref", + "name", + b"name", + "number_attr", + b"number_attr", + "type", + b"type", + "type_attr", + b"type_attr", + "type_list_attr", + b"type_list_attr", + ], + ) -> None: ... @typing.final class AttrDef(google.protobuf.message.Message): @@ -166,8 +194,28 @@ class OpDef(google.protobuf.message.Message): minimum: builtins.int | None = ..., allowed_values: tensorflow.core.framework.attr_value_pb2.AttrValue | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["allowed_values", b"allowed_values", "default_value", b"default_value"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["allowed_values", b"allowed_values", "default_value", b"default_value", "description", b"description", "has_minimum", b"has_minimum", "minimum", b"minimum", "name", b"name", "type", b"type"]) -> None: ... + def HasField( + self, field_name: typing.Literal["allowed_values", b"allowed_values", "default_value", b"default_value"] + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "allowed_values", + b"allowed_values", + "default_value", + b"default_value", + "description", + b"description", + "has_minimum", + b"has_minimum", + "minimum", + b"minimum", + "name", + b"name", + "type", + b"type", + ], + ) -> None: ... NAME_FIELD_NUMBER: builtins.int INPUT_ARG_FIELD_NUMBER: builtins.int @@ -277,7 +325,37 @@ class OpDef(google.protobuf.message.Message): is_distributed_communication: builtins.bool | None = ..., ) -> None: ... def HasField(self, field_name: typing.Literal["deprecation", b"deprecation"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["allows_uninitialized_input", b"allows_uninitialized_input", "attr", b"attr", "control_output", b"control_output", "deprecation", b"deprecation", "description", b"description", "input_arg", b"input_arg", "is_aggregate", b"is_aggregate", "is_commutative", b"is_commutative", "is_distributed_communication", b"is_distributed_communication", "is_stateful", b"is_stateful", "name", b"name", "output_arg", b"output_arg", "summary", b"summary"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "allows_uninitialized_input", + b"allows_uninitialized_input", + "attr", + b"attr", + "control_output", + b"control_output", + "deprecation", + b"deprecation", + "description", + b"description", + "input_arg", + b"input_arg", + "is_aggregate", + b"is_aggregate", + "is_commutative", + b"is_commutative", + "is_distributed_communication", + b"is_distributed_communication", + "is_stateful", + b"is_stateful", + "name", + b"name", + "output_arg", + b"output_arg", + "summary", + b"summary", + ], + ) -> None: ... global___OpDef = OpDef @@ -293,12 +371,7 @@ class OpDeprecation(google.protobuf.message.Message): """First GraphDef version at which the op is disallowed.""" explanation: builtins.str """Explanation of why it was deprecated and what to use instead.""" - def __init__( - self, - *, - version: builtins.int | None = ..., - explanation: builtins.str | None = ..., - ) -> None: ... + def __init__(self, *, version: builtins.int | None = ..., explanation: builtins.str | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["explanation", b"explanation", "version", b"version"]) -> None: ... global___OpDeprecation = OpDeprecation @@ -312,11 +385,7 @@ class OpList(google.protobuf.message.Message): OP_FIELD_NUMBER: builtins.int @property def op(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___OpDef]: ... - def __init__( - self, - *, - op: collections.abc.Iterable[global___OpDef] | None = ..., - ) -> None: ... + def __init__(self, *, op: collections.abc.Iterable[global___OpDef] | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["op", b"op"]) -> None: ... global___OpList = OpList diff --git a/stubs/tensorflow/tensorflow/core/framework/optimized_function_graph_pb2.pyi b/stubs/tensorflow/tensorflow/core/framework/optimized_function_graph_pb2.pyi index 7f48d2d4b519..152ce34664ac 100644 --- a/stubs/tensorflow/tensorflow/core/framework/optimized_function_graph_pb2.pyi +++ b/stubs/tensorflow/tensorflow/core/framework/optimized_function_graph_pb2.pyi @@ -36,7 +36,10 @@ class OptimizedFunctionGraph(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _OptimizationSourceEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[OptimizedFunctionGraph._OptimizationSource.ValueType], builtins.type): + class _OptimizationSourceEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[OptimizedFunctionGraph._OptimizationSource.ValueType], + builtins.type, + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor SOURCE_UNSPECIFIED: OptimizedFunctionGraph._OptimizationSource.ValueType # 0 AOT: OptimizedFunctionGraph._OptimizationSource.ValueType # 1 @@ -64,12 +67,7 @@ class OptimizedFunctionGraph(google.protobuf.message.Message): VALUE_FIELD_NUMBER: builtins.int key: builtins.str value: builtins.str - def __init__( - self, - *, - key: builtins.str | None = ..., - value: builtins.str | None = ..., - ) -> None: ... + def __init__(self, *, key: builtins.str | None = ..., value: builtins.str | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... NAME_FIELD_NUMBER: builtins.int @@ -102,7 +100,9 @@ class OptimizedFunctionGraph(google.protobuf.message.Message): """ @property - def ret_types(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[tensorflow.core.framework.types_pb2.DataType.ValueType]: + def ret_types( + self, + ) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[tensorflow.core.framework.types_pb2.DataType.ValueType]: """Return node types of the function. This is an output of graph preprocessing. """ @@ -118,10 +118,48 @@ class OptimizedFunctionGraph(google.protobuf.message.Message): source: global___OptimizedFunctionGraph.OptimizationSource.ValueType | None = ..., optimization_time_usecs: builtins.int | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["_optimization_time_usecs", b"_optimization_time_usecs", "_source", b"_source", "function_graph", b"function_graph", "optimization_time_usecs", b"optimization_time_usecs", "source", b"source"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["_optimization_time_usecs", b"_optimization_time_usecs", "_source", b"_source", "function_graph", b"function_graph", "name", b"name", "node_name_to_control_ret", b"node_name_to_control_ret", "num_return_nodes", b"num_return_nodes", "optimization_time_usecs", b"optimization_time_usecs", "ret_types", b"ret_types", "source", b"source"]) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "_optimization_time_usecs", + b"_optimization_time_usecs", + "_source", + b"_source", + "function_graph", + b"function_graph", + "optimization_time_usecs", + b"optimization_time_usecs", + "source", + b"source", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "_optimization_time_usecs", + b"_optimization_time_usecs", + "_source", + b"_source", + "function_graph", + b"function_graph", + "name", + b"name", + "node_name_to_control_ret", + b"node_name_to_control_ret", + "num_return_nodes", + b"num_return_nodes", + "optimization_time_usecs", + b"optimization_time_usecs", + "ret_types", + b"ret_types", + "source", + b"source", + ], + ) -> None: ... @typing.overload - def WhichOneof(self, oneof_group: typing.Literal["_optimization_time_usecs", b"_optimization_time_usecs"]) -> typing.Literal["optimization_time_usecs"] | None: ... + def WhichOneof( + self, oneof_group: typing.Literal["_optimization_time_usecs", b"_optimization_time_usecs"] + ) -> typing.Literal["optimization_time_usecs"] | None: ... @typing.overload def WhichOneof(self, oneof_group: typing.Literal["_source", b"_source"]) -> typing.Literal["source"] | None: ... diff --git a/stubs/tensorflow/tensorflow/core/framework/reader_base_pb2.pyi b/stubs/tensorflow/tensorflow/core/framework/reader_base_pb2.pyi index 7e6e6ea31932..0ae5cb8b33fe 100644 --- a/stubs/tensorflow/tensorflow/core/framework/reader_base_pb2.pyi +++ b/stubs/tensorflow/tensorflow/core/framework/reader_base_pb2.pyi @@ -35,6 +35,18 @@ class ReaderBaseState(google.protobuf.message.Message): num_records_produced: builtins.int | None = ..., current_work: builtins.bytes | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["current_work", b"current_work", "num_records_produced", b"num_records_produced", "work_finished", b"work_finished", "work_started", b"work_started"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "current_work", + b"current_work", + "num_records_produced", + b"num_records_produced", + "work_finished", + b"work_finished", + "work_started", + b"work_started", + ], + ) -> None: ... global___ReaderBaseState = ReaderBaseState diff --git a/stubs/tensorflow/tensorflow/core/framework/resource_handle_pb2.pyi b/stubs/tensorflow/tensorflow/core/framework/resource_handle_pb2.pyi index 8767e1768bce..aeb126eae7c6 100644 --- a/stubs/tensorflow/tensorflow/core/framework/resource_handle_pb2.pyi +++ b/stubs/tensorflow/tensorflow/core/framework/resource_handle_pb2.pyi @@ -68,7 +68,9 @@ class ResourceHandleProto(google.protobuf.message.Message): available. """ @property - def dtypes_and_shapes(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ResourceHandleProto.DtypeAndShape]: + def dtypes_and_shapes( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ResourceHandleProto.DtypeAndShape]: """Data types and shapes for the underlying resource.""" def __init__( @@ -81,6 +83,22 @@ class ResourceHandleProto(google.protobuf.message.Message): maybe_type_name: builtins.str | None = ..., dtypes_and_shapes: collections.abc.Iterable[global___ResourceHandleProto.DtypeAndShape] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["container", b"container", "device", b"device", "dtypes_and_shapes", b"dtypes_and_shapes", "hash_code", b"hash_code", "maybe_type_name", b"maybe_type_name", "name", b"name"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "container", + b"container", + "device", + b"device", + "dtypes_and_shapes", + b"dtypes_and_shapes", + "hash_code", + b"hash_code", + "maybe_type_name", + b"maybe_type_name", + "name", + b"name", + ], + ) -> None: ... global___ResourceHandleProto = ResourceHandleProto diff --git a/stubs/tensorflow/tensorflow/core/framework/step_stats_pb2.pyi b/stubs/tensorflow/tensorflow/core/framework/step_stats_pb2.pyi index 947cbdb41683..1bcf58a8e510 100644 --- a/stubs/tensorflow/tensorflow/core/framework/step_stats_pb2.pyi +++ b/stubs/tensorflow/tensorflow/core/framework/step_stats_pb2.pyi @@ -27,12 +27,7 @@ class AllocationRecord(google.protobuf.message.Message): """The timestamp of the operation.""" alloc_bytes: builtins.int """Number of bytes allocated, or de-allocated if negative.""" - def __init__( - self, - *, - alloc_micros: builtins.int | None = ..., - alloc_bytes: builtins.int | None = ..., - ) -> None: ... + def __init__(self, *, alloc_micros: builtins.int | None = ..., alloc_bytes: builtins.int | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["alloc_bytes", b"alloc_bytes", "alloc_micros", b"alloc_micros"]) -> None: ... global___AllocationRecord = AllocationRecord @@ -58,7 +53,9 @@ class AllocatorMemoryUsed(google.protobuf.message.Message): The number of live bytes currently allocated by the allocator. """ @property - def allocation_records(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___AllocationRecord]: + def allocation_records( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___AllocationRecord]: """The allocation and deallocation timeline.""" def __init__( @@ -71,7 +68,23 @@ class AllocatorMemoryUsed(google.protobuf.message.Message): allocation_records: collections.abc.Iterable[global___AllocationRecord] | None = ..., allocator_bytes_in_use: builtins.int | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["allocation_records", b"allocation_records", "allocator_bytes_in_use", b"allocator_bytes_in_use", "allocator_name", b"allocator_name", "live_bytes", b"live_bytes", "peak_bytes", b"peak_bytes", "total_bytes", b"total_bytes"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "allocation_records", + b"allocation_records", + "allocator_bytes_in_use", + b"allocator_bytes_in_use", + "allocator_name", + b"allocator_name", + "live_bytes", + b"live_bytes", + "peak_bytes", + b"peak_bytes", + "total_bytes", + b"total_bytes", + ], + ) -> None: ... global___AllocatorMemoryUsed = AllocatorMemoryUsed @@ -116,7 +129,9 @@ class MemoryStats(google.protobuf.message.Message): @property def persistent_tensor_alloc_ids(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ... @property - def device_persistent_tensor_alloc_ids(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ... + def device_persistent_tensor_alloc_ids( + self, + ) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ... def __init__( self, *, @@ -127,7 +142,23 @@ class MemoryStats(google.protobuf.message.Message): device_persistent_memory_size: builtins.int | None = ..., device_persistent_tensor_alloc_ids: collections.abc.Iterable[builtins.int] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["device_persistent_memory_size", b"device_persistent_memory_size", "device_persistent_tensor_alloc_ids", b"device_persistent_tensor_alloc_ids", "device_temp_memory_size", b"device_temp_memory_size", "persistent_memory_size", b"persistent_memory_size", "persistent_tensor_alloc_ids", b"persistent_tensor_alloc_ids", "temp_memory_size", b"temp_memory_size"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "device_persistent_memory_size", + b"device_persistent_memory_size", + "device_persistent_tensor_alloc_ids", + b"device_persistent_tensor_alloc_ids", + "device_temp_memory_size", + b"device_temp_memory_size", + "persistent_memory_size", + b"persistent_memory_size", + "persistent_tensor_alloc_ids", + b"persistent_tensor_alloc_ids", + "temp_memory_size", + b"temp_memory_size", + ], + ) -> None: ... global___MemoryStats = MemoryStats @@ -177,7 +208,11 @@ class NodeExecStats(google.protobuf.message.Message): @property def output(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___NodeOutput]: ... @property - def referenced_tensor(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[tensorflow.core.framework.allocation_description_pb2.AllocationDescription]: ... + def referenced_tensor( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + tensorflow.core.framework.allocation_description_pb2.AllocationDescription + ]: ... @property def memory_stats(self) -> global___MemoryStats: ... def __init__( @@ -193,7 +228,9 @@ class NodeExecStats(google.protobuf.message.Message): timeline_label: builtins.str | None = ..., scheduled_micros: builtins.int | None = ..., thread_id: builtins.int | None = ..., - referenced_tensor: collections.abc.Iterable[tensorflow.core.framework.allocation_description_pb2.AllocationDescription] | None = ..., + referenced_tensor: ( + collections.abc.Iterable[tensorflow.core.framework.allocation_description_pb2.AllocationDescription] | None + ) = ..., memory_stats: global___MemoryStats | None = ..., all_start_nanos: builtins.int | None = ..., op_start_rel_nanos: builtins.int | None = ..., @@ -202,7 +239,45 @@ class NodeExecStats(google.protobuf.message.Message): scheduled_nanos: builtins.int | None = ..., ) -> None: ... def HasField(self, field_name: typing.Literal["memory_stats", b"memory_stats"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["all_end_rel_micros", b"all_end_rel_micros", "all_end_rel_nanos", b"all_end_rel_nanos", "all_start_micros", b"all_start_micros", "all_start_nanos", b"all_start_nanos", "memory", b"memory", "memory_stats", b"memory_stats", "node_name", b"node_name", "op_end_rel_micros", b"op_end_rel_micros", "op_end_rel_nanos", b"op_end_rel_nanos", "op_start_rel_micros", b"op_start_rel_micros", "op_start_rel_nanos", b"op_start_rel_nanos", "output", b"output", "referenced_tensor", b"referenced_tensor", "scheduled_micros", b"scheduled_micros", "scheduled_nanos", b"scheduled_nanos", "thread_id", b"thread_id", "timeline_label", b"timeline_label"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "all_end_rel_micros", + b"all_end_rel_micros", + "all_end_rel_nanos", + b"all_end_rel_nanos", + "all_start_micros", + b"all_start_micros", + "all_start_nanos", + b"all_start_nanos", + "memory", + b"memory", + "memory_stats", + b"memory_stats", + "node_name", + b"node_name", + "op_end_rel_micros", + b"op_end_rel_micros", + "op_end_rel_nanos", + b"op_end_rel_nanos", + "op_start_rel_micros", + b"op_start_rel_micros", + "op_start_rel_nanos", + b"op_start_rel_nanos", + "output", + b"output", + "referenced_tensor", + b"referenced_tensor", + "scheduled_micros", + b"scheduled_micros", + "scheduled_nanos", + b"scheduled_nanos", + "thread_id", + b"thread_id", + "timeline_label", + b"timeline_label", + ], + ) -> None: ... global___NodeExecStats = NodeExecStats @@ -218,12 +293,7 @@ class DeviceStepStats(google.protobuf.message.Message): VALUE_FIELD_NUMBER: builtins.int key: builtins.int value: builtins.str - def __init__( - self, - *, - key: builtins.int | None = ..., - value: builtins.str | None = ..., - ) -> None: ... + def __init__(self, *, key: builtins.int | None = ..., value: builtins.str | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... DEVICE_FIELD_NUMBER: builtins.int @@ -243,7 +313,9 @@ class DeviceStepStats(google.protobuf.message.Message): node_stats: collections.abc.Iterable[global___NodeExecStats] | None = ..., thread_names: collections.abc.Mapping[builtins.int, builtins.str] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["device", b"device", "node_stats", b"node_stats", "thread_names", b"thread_names"]) -> None: ... + def ClearField( + self, field_name: typing.Literal["device", b"device", "node_stats", b"node_stats", "thread_names", b"thread_names"] + ) -> None: ... global___DeviceStepStats = DeviceStepStats @@ -254,11 +326,7 @@ class StepStats(google.protobuf.message.Message): DEV_STATS_FIELD_NUMBER: builtins.int @property def dev_stats(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___DeviceStepStats]: ... - def __init__( - self, - *, - dev_stats: collections.abc.Iterable[global___DeviceStepStats] | None = ..., - ) -> None: ... + def __init__(self, *, dev_stats: collections.abc.Iterable[global___DeviceStepStats] | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["dev_stats", b"dev_stats"]) -> None: ... global___StepStats = StepStats diff --git a/stubs/tensorflow/tensorflow/core/framework/summary_pb2.pyi b/stubs/tensorflow/tensorflow/core/framework/summary_pb2.pyi index 5df66b7e7978..848d802496f8 100644 --- a/stubs/tensorflow/tensorflow/core/framework/summary_pb2.pyi +++ b/stubs/tensorflow/tensorflow/core/framework/summary_pb2.pyi @@ -81,11 +81,7 @@ class SummaryDescription(google.protobuf.message.Message): """Hint on how plugins should process the data in this series. Supported values include "scalar", "histogram", "image", "audio" """ - def __init__( - self, - *, - type_hint: builtins.str | None = ..., - ) -> None: ... + def __init__(self, *, type_hint: builtins.str | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["type_hint", b"type_hint"]) -> None: ... global___SummaryDescription = SummaryDescription @@ -110,12 +106,7 @@ class SummaryMetadata(google.protobuf.message.Message): """The content to store for the plugin. The best practice is for this to be a binary serialized protocol buffer. """ - def __init__( - self, - *, - plugin_name: builtins.str | None = ..., - content: builtins.bytes | None = ..., - ) -> None: ... + def __init__(self, *, plugin_name: builtins.str | None = ..., content: builtins.bytes | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["content", b"content", "plugin_name", b"plugin_name"]) -> None: ... PLUGIN_DATA_FIELD_NUMBER: builtins.int @@ -145,7 +136,19 @@ class SummaryMetadata(google.protobuf.message.Message): data_class: global___DataClass.ValueType | None = ..., ) -> None: ... def HasField(self, field_name: typing.Literal["plugin_data", b"plugin_data"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["data_class", b"data_class", "display_name", b"display_name", "plugin_data", b"plugin_data", "summary_description", b"summary_description"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "data_class", + b"data_class", + "display_name", + b"display_name", + "plugin_data", + b"plugin_data", + "summary_description", + b"summary_description", + ], + ) -> None: ... global___SummaryMetadata = SummaryMetadata @@ -193,7 +196,19 @@ class Summary(google.protobuf.message.Message): colorspace: builtins.int | None = ..., encoded_image_string: builtins.bytes | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["colorspace", b"colorspace", "encoded_image_string", b"encoded_image_string", "height", b"height", "width", b"width"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "colorspace", + b"colorspace", + "encoded_image_string", + b"encoded_image_string", + "height", + b"height", + "width", + b"width", + ], + ) -> None: ... @typing.final class Audio(google.protobuf.message.Message): @@ -224,7 +239,21 @@ class Summary(google.protobuf.message.Message): encoded_audio_string: builtins.bytes | None = ..., content_type: builtins.str | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["content_type", b"content_type", "encoded_audio_string", b"encoded_audio_string", "length_frames", b"length_frames", "num_channels", b"num_channels", "sample_rate", b"sample_rate"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "content_type", + b"content_type", + "encoded_audio_string", + b"encoded_audio_string", + "length_frames", + b"length_frames", + "num_channels", + b"num_channels", + "sample_rate", + b"sample_rate", + ], + ) -> None: ... @typing.final class Value(google.protobuf.message.Message): @@ -278,20 +307,62 @@ class Summary(google.protobuf.message.Message): audio: global___Summary.Audio | None = ..., tensor: tensorflow.core.framework.tensor_pb2.TensorProto | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["audio", b"audio", "histo", b"histo", "image", b"image", "metadata", b"metadata", "obsolete_old_style_histogram", b"obsolete_old_style_histogram", "simple_value", b"simple_value", "tensor", b"tensor", "value", b"value"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["audio", b"audio", "histo", b"histo", "image", b"image", "metadata", b"metadata", "node_name", b"node_name", "obsolete_old_style_histogram", b"obsolete_old_style_histogram", "simple_value", b"simple_value", "tag", b"tag", "tensor", b"tensor", "value", b"value"]) -> None: ... - def WhichOneof(self, oneof_group: typing.Literal["value", b"value"]) -> typing.Literal["simple_value", "obsolete_old_style_histogram", "image", "histo", "audio", "tensor"] | None: ... + def HasField( + self, + field_name: typing.Literal[ + "audio", + b"audio", + "histo", + b"histo", + "image", + b"image", + "metadata", + b"metadata", + "obsolete_old_style_histogram", + b"obsolete_old_style_histogram", + "simple_value", + b"simple_value", + "tensor", + b"tensor", + "value", + b"value", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "audio", + b"audio", + "histo", + b"histo", + "image", + b"image", + "metadata", + b"metadata", + "node_name", + b"node_name", + "obsolete_old_style_histogram", + b"obsolete_old_style_histogram", + "simple_value", + b"simple_value", + "tag", + b"tag", + "tensor", + b"tensor", + "value", + b"value", + ], + ) -> None: ... + def WhichOneof( + self, oneof_group: typing.Literal["value", b"value"] + ) -> typing.Literal["simple_value", "obsolete_old_style_histogram", "image", "histo", "audio", "tensor"] | None: ... VALUE_FIELD_NUMBER: builtins.int @property def value(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Summary.Value]: """Set of values for the summary.""" - def __init__( - self, - *, - value: collections.abc.Iterable[global___Summary.Value] | None = ..., - ) -> None: ... + def __init__(self, *, value: collections.abc.Iterable[global___Summary.Value] | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["value", b"value"]) -> None: ... global___Summary = Summary diff --git a/stubs/tensorflow/tensorflow/core/framework/tensor_description_pb2.pyi b/stubs/tensorflow/tensorflow/core/framework/tensor_description_pb2.pyi index 71a5df5340a2..3062402cc3c5 100644 --- a/stubs/tensorflow/tensorflow/core/framework/tensor_description_pb2.pyi +++ b/stubs/tensorflow/tensorflow/core/framework/tensor_description_pb2.pyi @@ -38,7 +38,12 @@ class TensorDescription(google.protobuf.message.Message): shape: tensorflow.core.framework.tensor_shape_pb2.TensorShapeProto | None = ..., allocation_description: tensorflow.core.framework.allocation_description_pb2.AllocationDescription | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["allocation_description", b"allocation_description", "shape", b"shape"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["allocation_description", b"allocation_description", "dtype", b"dtype", "shape", b"shape"]) -> None: ... + def HasField( + self, field_name: typing.Literal["allocation_description", b"allocation_description", "shape", b"shape"] + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal["allocation_description", b"allocation_description", "dtype", b"dtype", "shape", b"shape"], + ) -> None: ... global___TensorDescription = TensorDescription diff --git a/stubs/tensorflow/tensorflow/core/framework/tensor_pb2.pyi b/stubs/tensorflow/tensorflow/core/framework/tensor_pb2.pyi index 8e1a764d7021..40edf767d049 100644 --- a/stubs/tensorflow/tensorflow/core/framework/tensor_pb2.pyi +++ b/stubs/tensorflow/tensorflow/core/framework/tensor_pb2.pyi @@ -116,7 +116,11 @@ class TensorProto(google.protobuf.message.Message): """ @property - def resource_handle_val(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[tensorflow.core.framework.resource_handle_pb2.ResourceHandleProto]: + def resource_handle_val( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + tensorflow.core.framework.resource_handle_pb2.ResourceHandleProto + ]: """DT_RESOURCE""" @property @@ -147,14 +151,56 @@ class TensorProto(google.protobuf.message.Message): int64_val: collections.abc.Iterable[builtins.int] | None = ..., bool_val: collections.abc.Iterable[builtins.bool] | None = ..., dcomplex_val: collections.abc.Iterable[builtins.float] | None = ..., - resource_handle_val: collections.abc.Iterable[tensorflow.core.framework.resource_handle_pb2.ResourceHandleProto] | None = ..., + resource_handle_val: ( + collections.abc.Iterable[tensorflow.core.framework.resource_handle_pb2.ResourceHandleProto] | None + ) = ..., variant_val: collections.abc.Iterable[global___VariantTensorDataProto] | None = ..., uint32_val: collections.abc.Iterable[builtins.int] | None = ..., uint64_val: collections.abc.Iterable[builtins.int] | None = ..., float8_val: builtins.bytes | None = ..., ) -> None: ... def HasField(self, field_name: typing.Literal["tensor_shape", b"tensor_shape"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["bool_val", b"bool_val", "dcomplex_val", b"dcomplex_val", "double_val", b"double_val", "dtype", b"dtype", "float8_val", b"float8_val", "float_val", b"float_val", "half_val", b"half_val", "int64_val", b"int64_val", "int_val", b"int_val", "resource_handle_val", b"resource_handle_val", "scomplex_val", b"scomplex_val", "string_val", b"string_val", "tensor_content", b"tensor_content", "tensor_shape", b"tensor_shape", "uint32_val", b"uint32_val", "uint64_val", b"uint64_val", "variant_val", b"variant_val", "version_number", b"version_number"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "bool_val", + b"bool_val", + "dcomplex_val", + b"dcomplex_val", + "double_val", + b"double_val", + "dtype", + b"dtype", + "float8_val", + b"float8_val", + "float_val", + b"float_val", + "half_val", + b"half_val", + "int64_val", + b"int64_val", + "int_val", + b"int_val", + "resource_handle_val", + b"resource_handle_val", + "scomplex_val", + b"scomplex_val", + "string_val", + b"string_val", + "tensor_content", + b"tensor_content", + "tensor_shape", + b"tensor_shape", + "uint32_val", + b"uint32_val", + "uint64_val", + b"uint64_val", + "variant_val", + b"variant_val", + "version_number", + b"version_number", + ], + ) -> None: ... global___TensorProto = TensorProto @@ -182,6 +228,8 @@ class VariantTensorDataProto(google.protobuf.message.Message): metadata: builtins.bytes | None = ..., tensors: collections.abc.Iterable[global___TensorProto] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["metadata", b"metadata", "tensors", b"tensors", "type_name", b"type_name"]) -> None: ... + def ClearField( + self, field_name: typing.Literal["metadata", b"metadata", "tensors", b"tensors", "type_name", b"type_name"] + ) -> None: ... global___VariantTensorDataProto = VariantTensorDataProto diff --git a/stubs/tensorflow/tensorflow/core/framework/tensor_shape_pb2.pyi b/stubs/tensorflow/tensorflow/core/framework/tensor_shape_pb2.pyi index 2d852a920271..908b1159b8e2 100644 --- a/stubs/tensorflow/tensorflow/core/framework/tensor_shape_pb2.pyi +++ b/stubs/tensorflow/tensorflow/core/framework/tensor_shape_pb2.pyi @@ -36,12 +36,7 @@ class TensorShapeProto(google.protobuf.message.Message): """ name: builtins.str """Optional name of the tensor dimension.""" - def __init__( - self, - *, - size: builtins.int | None = ..., - name: builtins.str | None = ..., - ) -> None: ... + def __init__(self, *, size: builtins.int | None = ..., name: builtins.str | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["name", b"name", "size", b"size"]) -> None: ... DIM_FIELD_NUMBER: builtins.int diff --git a/stubs/tensorflow/tensorflow/core/framework/tensor_slice_pb2.pyi b/stubs/tensorflow/tensorflow/core/framework/tensor_slice_pb2.pyi index 949fd0f1d6a3..1ad03f3c5859 100644 --- a/stubs/tensorflow/tensorflow/core/framework/tensor_slice_pb2.pyi +++ b/stubs/tensorflow/tensorflow/core/framework/tensor_slice_pb2.pyi @@ -33,14 +33,11 @@ class TensorSliceProto(google.protobuf.message.Message): start: builtins.int """Start index of the slice, starting at 0.""" length: builtins.int - def __init__( - self, - *, - start: builtins.int | None = ..., - length: builtins.int | None = ..., - ) -> None: ... + def __init__(self, *, start: builtins.int | None = ..., length: builtins.int | None = ...) -> None: ... def HasField(self, field_name: typing.Literal["has_length", b"has_length", "length", b"length"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["has_length", b"has_length", "length", b"length", "start", b"start"]) -> None: ... + def ClearField( + self, field_name: typing.Literal["has_length", b"has_length", "length", b"length", "start", b"start"] + ) -> None: ... def WhichOneof(self, oneof_group: typing.Literal["has_length", b"has_length"]) -> typing.Literal["length"] | None: ... EXTENT_FIELD_NUMBER: builtins.int @@ -53,11 +50,7 @@ class TensorSliceProto(google.protobuf.message.Message): dimensions in the TensorShape. """ - def __init__( - self, - *, - extent: collections.abc.Iterable[global___TensorSliceProto.Extent] | None = ..., - ) -> None: ... + def __init__(self, *, extent: collections.abc.Iterable[global___TensorSliceProto.Extent] | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["extent", b"extent"]) -> None: ... global___TensorSliceProto = TensorSliceProto diff --git a/stubs/tensorflow/tensorflow/core/framework/types_pb2.pyi b/stubs/tensorflow/tensorflow/core/framework/types_pb2.pyi index a07d25c40e0e..18c74011593b 100644 --- a/stubs/tensorflow/tensorflow/core/framework/types_pb2.pyi +++ b/stubs/tensorflow/tensorflow/core/framework/types_pb2.pyi @@ -214,11 +214,7 @@ class SerializedDType(google.protobuf.message.Message): DATATYPE_FIELD_NUMBER: builtins.int datatype: global___DataType.ValueType - def __init__( - self, - *, - datatype: global___DataType.ValueType | None = ..., - ) -> None: ... + def __init__(self, *, datatype: global___DataType.ValueType | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["datatype", b"datatype"]) -> None: ... global___SerializedDType = SerializedDType diff --git a/stubs/tensorflow/tensorflow/core/framework/variable_pb2.pyi b/stubs/tensorflow/tensorflow/core/framework/variable_pb2.pyi index 2d2320096e66..1341eee711e5 100644 --- a/stubs/tensorflow/tensorflow/core/framework/variable_pb2.pyi +++ b/stubs/tensorflow/tensorflow/core/framework/variable_pb2.pyi @@ -24,7 +24,9 @@ class _VariableSynchronization: ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType -class _VariableSynchronizationEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_VariableSynchronization.ValueType], builtins.type): +class _VariableSynchronizationEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_VariableSynchronization.ValueType], builtins.type +): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor VARIABLE_SYNCHRONIZATION_AUTO: _VariableSynchronization.ValueType # 0 """`AUTO`: Indicates that the synchronization will be determined by the @@ -72,7 +74,9 @@ class _VariableAggregation: ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType -class _VariableAggregationEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_VariableAggregation.ValueType], builtins.type): +class _VariableAggregationEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_VariableAggregation.ValueType], builtins.type +): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor VARIABLE_AGGREGATION_NONE: _VariableAggregation.ValueType # 0 """`NONE`: This is the default, giving an error if you use a @@ -159,7 +163,29 @@ class VariableDef(google.protobuf.message.Message): aggregation: global___VariableAggregation.ValueType | None = ..., ) -> None: ... def HasField(self, field_name: typing.Literal["save_slice_info_def", b"save_slice_info_def"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["aggregation", b"aggregation", "initial_value_name", b"initial_value_name", "initializer_name", b"initializer_name", "is_resource", b"is_resource", "save_slice_info_def", b"save_slice_info_def", "snapshot_name", b"snapshot_name", "synchronization", b"synchronization", "trainable", b"trainable", "variable_name", b"variable_name"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "aggregation", + b"aggregation", + "initial_value_name", + b"initial_value_name", + "initializer_name", + b"initializer_name", + "is_resource", + b"is_resource", + "save_slice_info_def", + b"save_slice_info_def", + "snapshot_name", + b"snapshot_name", + "synchronization", + b"synchronization", + "trainable", + b"trainable", + "variable_name", + b"variable_name", + ], + ) -> None: ... global___VariableDef = VariableDef @@ -193,6 +219,11 @@ class SaveSliceInfoDef(google.protobuf.message.Message): var_offset: collections.abc.Iterable[builtins.int] | None = ..., var_shape: collections.abc.Iterable[builtins.int] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["full_name", b"full_name", "full_shape", b"full_shape", "var_offset", b"var_offset", "var_shape", b"var_shape"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "full_name", b"full_name", "full_shape", b"full_shape", "var_offset", b"var_offset", "var_shape", b"var_shape" + ], + ) -> None: ... global___SaveSliceInfoDef = SaveSliceInfoDef diff --git a/stubs/tensorflow/tensorflow/core/framework/versions_pb2.pyi b/stubs/tensorflow/tensorflow/core/framework/versions_pb2.pyi index 7ca17c36bdfd..040b4932af13 100644 --- a/stubs/tensorflow/tensorflow/core/framework/versions_pb2.pyi +++ b/stubs/tensorflow/tensorflow/core/framework/versions_pb2.pyi @@ -49,6 +49,9 @@ class VersionDef(google.protobuf.message.Message): min_consumer: builtins.int | None = ..., bad_consumers: collections.abc.Iterable[builtins.int] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["bad_consumers", b"bad_consumers", "min_consumer", b"min_consumer", "producer", b"producer"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal["bad_consumers", b"bad_consumers", "min_consumer", b"min_consumer", "producer", b"producer"], + ) -> None: ... global___VersionDef = VersionDef diff --git a/stubs/tensorflow/tensorflow/core/protobuf/cluster_pb2.pyi b/stubs/tensorflow/tensorflow/core/protobuf/cluster_pb2.pyi index ae19698d703d..0b4f68cbf42b 100644 --- a/stubs/tensorflow/tensorflow/core/protobuf/cluster_pb2.pyi +++ b/stubs/tensorflow/tensorflow/core/protobuf/cluster_pb2.pyi @@ -83,12 +83,7 @@ class JobDef(google.protobuf.message.Message): VALUE_FIELD_NUMBER: builtins.int key: builtins.int value: builtins.str - def __init__( - self, - *, - key: builtins.int | None = ..., - value: builtins.str | None = ..., - ) -> None: ... + def __init__(self, *, key: builtins.int | None = ..., value: builtins.str | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... NAME_FIELD_NUMBER: builtins.int @@ -108,10 +103,7 @@ class JobDef(google.protobuf.message.Message): """ def __init__( - self, - *, - name: builtins.str | None = ..., - tasks: collections.abc.Mapping[builtins.int, builtins.str] | None = ..., + self, *, name: builtins.str | None = ..., tasks: collections.abc.Mapping[builtins.int, builtins.str] | None = ... ) -> None: ... def ClearField(self, field_name: typing.Literal["name", b"name", "tasks", b"tasks"]) -> None: ... @@ -128,11 +120,7 @@ class ClusterDef(google.protobuf.message.Message): def job(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___JobDef]: """The jobs that comprise the cluster.""" - def __init__( - self, - *, - job: collections.abc.Iterable[global___JobDef] | None = ..., - ) -> None: ... + def __init__(self, *, job: collections.abc.Iterable[global___JobDef] | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["job", b"job"]) -> None: ... global___ClusterDef = ClusterDef diff --git a/stubs/tensorflow/tensorflow/core/protobuf/composite_tensor_variant_pb2.pyi b/stubs/tensorflow/tensorflow/core/protobuf/composite_tensor_variant_pb2.pyi index 1dbe6e5573b5..e17278f1946a 100644 --- a/stubs/tensorflow/tensorflow/core/protobuf/composite_tensor_variant_pb2.pyi +++ b/stubs/tensorflow/tensorflow/core/protobuf/composite_tensor_variant_pb2.pyi @@ -26,11 +26,7 @@ class CompositeTensorVariantMetadata(google.protobuf.message.Message): TYPE_SPEC_PROTO_FIELD_NUMBER: builtins.int @property def type_spec_proto(self) -> tensorflow.core.protobuf.struct_pb2.TypeSpecProto: ... - def __init__( - self, - *, - type_spec_proto: tensorflow.core.protobuf.struct_pb2.TypeSpecProto | None = ..., - ) -> None: ... + def __init__(self, *, type_spec_proto: tensorflow.core.protobuf.struct_pb2.TypeSpecProto | None = ...) -> None: ... def HasField(self, field_name: typing.Literal["type_spec_proto", b"type_spec_proto"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["type_spec_proto", b"type_spec_proto"]) -> None: ... diff --git a/stubs/tensorflow/tensorflow/core/protobuf/config_pb2.pyi b/stubs/tensorflow/tensorflow/core/protobuf/config_pb2.pyi index a655ef204b81..1930db2b79dd 100644 --- a/stubs/tensorflow/tensorflow/core/protobuf/config_pb2.pyi +++ b/stubs/tensorflow/tensorflow/core/protobuf/config_pb2.pyi @@ -88,7 +88,12 @@ class GPUOptions(google.protobuf.message.Message): priority: collections.abc.Iterable[builtins.int] | None = ..., device_ordinal: collections.abc.Iterable[builtins.int] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["device_ordinal", b"device_ordinal", "memory_limit_mb", b"memory_limit_mb", "priority", b"priority"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "device_ordinal", b"device_ordinal", "memory_limit_mb", b"memory_limit_mb", "priority", b"priority" + ], + ) -> None: ... @typing.final class StreamMergeOptions(google.protobuf.message.Message): @@ -131,7 +136,17 @@ class GPUOptions(google.protobuf.message.Message): merge_device_to_host_stream: builtins.bool | None = ..., merge_device_to_device_stream: builtins.bool | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["merge_device_to_device_stream", b"merge_device_to_device_stream", "merge_device_to_host_stream", b"merge_device_to_host_stream", "merge_host_to_device_stream", b"merge_host_to_device_stream"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "merge_device_to_device_stream", + b"merge_device_to_device_stream", + "merge_device_to_host_stream", + b"merge_device_to_host_stream", + "merge_host_to_device_stream", + b"merge_host_to_device_stream", + ], + ) -> None: ... VIRTUAL_DEVICES_FIELD_NUMBER: builtins.int NUM_VIRTUAL_DEVICES_PER_GPU_FIELD_NUMBER: builtins.int @@ -255,7 +270,9 @@ class GPUOptions(google.protobuf.message.Message): which enumerates jobs*tasks from a ServerDef. """ @property - def virtual_devices(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___GPUOptions.Experimental.VirtualDevices]: + def virtual_devices( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___GPUOptions.Experimental.VirtualDevices]: """The multi virtual device settings. If empty (not set), it will create single virtual device on each visible GPU, according to the settings in "visible_device_list" above. Otherwise, the number of elements in the @@ -321,7 +338,47 @@ class GPUOptions(google.protobuf.message.Message): stream_merge_options: global___GPUOptions.Experimental.StreamMergeOptions | None = ..., ) -> None: ... def HasField(self, field_name: typing.Literal["stream_merge_options", b"stream_merge_options"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["collective_ring_order", b"collective_ring_order", "disallow_retry_on_allocation_failure", b"disallow_retry_on_allocation_failure", "gpu_host_mem_disallow_growth", b"gpu_host_mem_disallow_growth", "gpu_host_mem_limit_in_mb", b"gpu_host_mem_limit_in_mb", "gpu_system_memory_size_in_mb", b"gpu_system_memory_size_in_mb", "internal_fragmentation_fraction", b"internal_fragmentation_fraction", "kernel_tracker_max_bytes", b"kernel_tracker_max_bytes", "kernel_tracker_max_interval", b"kernel_tracker_max_interval", "kernel_tracker_max_pending", b"kernel_tracker_max_pending", "node_id", b"node_id", "num_dev_to_dev_copy_streams", b"num_dev_to_dev_copy_streams", "num_virtual_devices_per_gpu", b"num_virtual_devices_per_gpu", "populate_pjrt_gpu_client_creation_info", b"populate_pjrt_gpu_client_creation_info", "stream_merge_options", b"stream_merge_options", "timestamped_allocator", b"timestamped_allocator", "use_cuda_malloc_async", b"use_cuda_malloc_async", "use_unified_memory", b"use_unified_memory", "virtual_devices", b"virtual_devices"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "collective_ring_order", + b"collective_ring_order", + "disallow_retry_on_allocation_failure", + b"disallow_retry_on_allocation_failure", + "gpu_host_mem_disallow_growth", + b"gpu_host_mem_disallow_growth", + "gpu_host_mem_limit_in_mb", + b"gpu_host_mem_limit_in_mb", + "gpu_system_memory_size_in_mb", + b"gpu_system_memory_size_in_mb", + "internal_fragmentation_fraction", + b"internal_fragmentation_fraction", + "kernel_tracker_max_bytes", + b"kernel_tracker_max_bytes", + "kernel_tracker_max_interval", + b"kernel_tracker_max_interval", + "kernel_tracker_max_pending", + b"kernel_tracker_max_pending", + "node_id", + b"node_id", + "num_dev_to_dev_copy_streams", + b"num_dev_to_dev_copy_streams", + "num_virtual_devices_per_gpu", + b"num_virtual_devices_per_gpu", + "populate_pjrt_gpu_client_creation_info", + b"populate_pjrt_gpu_client_creation_info", + "stream_merge_options", + b"stream_merge_options", + "timestamped_allocator", + b"timestamped_allocator", + "use_cuda_malloc_async", + b"use_cuda_malloc_async", + "use_unified_memory", + b"use_unified_memory", + "virtual_devices", + b"virtual_devices", + ], + ) -> None: ... PER_PROCESS_GPU_MEMORY_FRACTION_FIELD_NUMBER: builtins.int ALLOW_GROWTH_FIELD_NUMBER: builtins.int @@ -433,7 +490,29 @@ class GPUOptions(google.protobuf.message.Message): experimental: global___GPUOptions.Experimental | None = ..., ) -> None: ... def HasField(self, field_name: typing.Literal["experimental", b"experimental"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["allocator_type", b"allocator_type", "allow_growth", b"allow_growth", "deferred_deletion_bytes", b"deferred_deletion_bytes", "experimental", b"experimental", "force_gpu_compatible", b"force_gpu_compatible", "per_process_gpu_memory_fraction", b"per_process_gpu_memory_fraction", "polling_active_delay_usecs", b"polling_active_delay_usecs", "polling_inactive_delay_msecs", b"polling_inactive_delay_msecs", "visible_device_list", b"visible_device_list"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "allocator_type", + b"allocator_type", + "allow_growth", + b"allow_growth", + "deferred_deletion_bytes", + b"deferred_deletion_bytes", + "experimental", + b"experimental", + "force_gpu_compatible", + b"force_gpu_compatible", + "per_process_gpu_memory_fraction", + b"per_process_gpu_memory_fraction", + "polling_active_delay_usecs", + b"polling_active_delay_usecs", + "polling_inactive_delay_msecs", + b"polling_inactive_delay_msecs", + "visible_device_list", + b"visible_device_list", + ], + ) -> None: ... global___GPUOptions = GPUOptions @@ -447,7 +526,9 @@ class OptimizerOptions(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _LevelEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[OptimizerOptions._Level.ValueType], builtins.type): + class _LevelEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[OptimizerOptions._Level.ValueType], builtins.type + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor L1: OptimizerOptions._Level.ValueType # 0 """L1 is the default level. @@ -474,7 +555,9 @@ class OptimizerOptions(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _GlobalJitLevelEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[OptimizerOptions._GlobalJitLevel.ValueType], builtins.type): + class _GlobalJitLevelEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[OptimizerOptions._GlobalJitLevel.ValueType], builtins.type + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor DEFAULT: OptimizerOptions._GlobalJitLevel.ValueType # 0 """Default setting ("off" now, but later expected to be "on")""" @@ -549,7 +632,25 @@ class OptimizerOptions(google.protobuf.message.Message): global_jit_level: global___OptimizerOptions.GlobalJitLevel.ValueType | None = ..., cpu_global_jit: builtins.bool | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["cpu_global_jit", b"cpu_global_jit", "do_common_subexpression_elimination", b"do_common_subexpression_elimination", "do_constant_folding", b"do_constant_folding", "do_function_inlining", b"do_function_inlining", "global_jit_level", b"global_jit_level", "max_folded_constant_in_bytes", b"max_folded_constant_in_bytes", "opt_level", b"opt_level"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "cpu_global_jit", + b"cpu_global_jit", + "do_common_subexpression_elimination", + b"do_common_subexpression_elimination", + "do_constant_folding", + b"do_constant_folding", + "do_function_inlining", + b"do_function_inlining", + "global_jit_level", + b"global_jit_level", + "max_folded_constant_in_bytes", + b"max_folded_constant_in_bytes", + "opt_level", + b"opt_level", + ], + ) -> None: ... global___OptimizerOptions = OptimizerOptions @@ -622,8 +723,32 @@ class GraphOptions(google.protobuf.message.Message): timeline_step: builtins.int | None = ..., rewrite_options: tensorflow.core.protobuf.rewriter_config_pb2.RewriterConfig | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["optimizer_options", b"optimizer_options", "rewrite_options", b"rewrite_options"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["build_cost_model", b"build_cost_model", "build_cost_model_after", b"build_cost_model_after", "enable_bfloat16_sendrecv", b"enable_bfloat16_sendrecv", "enable_recv_scheduling", b"enable_recv_scheduling", "infer_shapes", b"infer_shapes", "optimizer_options", b"optimizer_options", "place_pruned_graph", b"place_pruned_graph", "rewrite_options", b"rewrite_options", "timeline_step", b"timeline_step"]) -> None: ... + def HasField( + self, field_name: typing.Literal["optimizer_options", b"optimizer_options", "rewrite_options", b"rewrite_options"] + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "build_cost_model", + b"build_cost_model", + "build_cost_model_after", + b"build_cost_model_after", + "enable_bfloat16_sendrecv", + b"enable_bfloat16_sendrecv", + "enable_recv_scheduling", + b"enable_recv_scheduling", + "infer_shapes", + b"infer_shapes", + "optimizer_options", + b"optimizer_options", + "place_pruned_graph", + b"place_pruned_graph", + "rewrite_options", + b"rewrite_options", + "timeline_step", + b"timeline_step", + ], + ) -> None: ... global___GraphOptions = GraphOptions @@ -656,12 +781,7 @@ class ThreadPoolOptionProto(google.protobuf.message.Message): value as is specified on this call. - threadpools created this way are never garbage collected. """ - def __init__( - self, - *, - num_threads: builtins.int | None = ..., - global_name: builtins.str | None = ..., - ) -> None: ... + def __init__(self, *, num_threads: builtins.int | None = ..., global_name: builtins.str | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["global_name", b"global_name", "num_threads", b"num_threads"]) -> None: ... global___ThreadPoolOptionProto = ThreadPoolOptionProto @@ -685,12 +805,7 @@ class SessionMetadata(google.protobuf.message.Message): name: builtins.str version: builtins.int """The version is optional. If set, needs to be >= 0.""" - def __init__( - self, - *, - name: builtins.str | None = ..., - version: builtins.int | None = ..., - ) -> None: ... + def __init__(self, *, name: builtins.str | None = ..., version: builtins.int | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["name", b"name", "version", b"version"]) -> None: ... global___SessionMetadata = SessionMetadata @@ -711,12 +826,7 @@ class ConfigProto(google.protobuf.message.Message): VALUE_FIELD_NUMBER: builtins.int key: builtins.str value: builtins.int - def __init__( - self, - *, - key: builtins.str | None = ..., - value: builtins.int | None = ..., - ) -> None: ... + def __init__(self, *, key: builtins.str | None = ..., value: builtins.int | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... @typing.final @@ -732,7 +842,10 @@ class ConfigProto(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _MlirBridgeRolloutEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ConfigProto.Experimental._MlirBridgeRollout.ValueType], builtins.type): + class _MlirBridgeRolloutEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ConfigProto.Experimental._MlirBridgeRollout.ValueType], + builtins.type, + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor MLIR_BRIDGE_ROLLOUT_UNSPECIFIED: ConfigProto.Experimental._MlirBridgeRollout.ValueType # 0 """If this field is left unspecified, the MLIR bridge may be selectively @@ -978,8 +1091,71 @@ class ConfigProto(google.protobuf.message.Message): disable_optimize_for_static_graph: builtins.bool | None = ..., disable_eager_executor_streaming_enqueue: builtins.bool | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["coordination_config", b"coordination_config", "session_metadata", b"session_metadata"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["backend_server_port", b"backend_server_port", "collective_deterministic_sequential_execution", b"collective_deterministic_sequential_execution", "collective_group_leader", b"collective_group_leader", "collective_nccl", b"collective_nccl", "coordination_config", b"coordination_config", "disable_eager_executor_streaming_enqueue", b"disable_eager_executor_streaming_enqueue", "disable_functional_ops_lowering", b"disable_functional_ops_lowering", "disable_optimize_for_static_graph", b"disable_optimize_for_static_graph", "disable_output_partition_graphs", b"disable_output_partition_graphs", "disable_thread_spinning", b"disable_thread_spinning", "enable_mlir_bridge", b"enable_mlir_bridge", "enable_mlir_graph_optimization", b"enable_mlir_graph_optimization", "enable_multi_host", b"enable_multi_host", "executor_type", b"executor_type", "mlir_bridge_rollout", b"mlir_bridge_rollout", "optimize_for_static_graph", b"optimize_for_static_graph", "recv_buf_max_chunk", b"recv_buf_max_chunk", "session_metadata", b"session_metadata", "share_cluster_devices_in_session", b"share_cluster_devices_in_session", "share_session_state_in_clusterspec_propagation", b"share_session_state_in_clusterspec_propagation", "stream_merge_threshold", b"stream_merge_threshold", "target_gpu", b"target_gpu", "target_tpu", b"target_tpu", "tfrt_use_ifrt", b"tfrt_use_ifrt", "use_numa_affinity", b"use_numa_affinity", "use_tfrt", b"use_tfrt", "xla_fusion_autotuner_thresh", b"xla_fusion_autotuner_thresh", "xla_prefer_single_graph_cluster", b"xla_prefer_single_graph_cluster"]) -> None: ... + def HasField( + self, + field_name: typing.Literal["coordination_config", b"coordination_config", "session_metadata", b"session_metadata"], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "backend_server_port", + b"backend_server_port", + "collective_deterministic_sequential_execution", + b"collective_deterministic_sequential_execution", + "collective_group_leader", + b"collective_group_leader", + "collective_nccl", + b"collective_nccl", + "coordination_config", + b"coordination_config", + "disable_eager_executor_streaming_enqueue", + b"disable_eager_executor_streaming_enqueue", + "disable_functional_ops_lowering", + b"disable_functional_ops_lowering", + "disable_optimize_for_static_graph", + b"disable_optimize_for_static_graph", + "disable_output_partition_graphs", + b"disable_output_partition_graphs", + "disable_thread_spinning", + b"disable_thread_spinning", + "enable_mlir_bridge", + b"enable_mlir_bridge", + "enable_mlir_graph_optimization", + b"enable_mlir_graph_optimization", + "enable_multi_host", + b"enable_multi_host", + "executor_type", + b"executor_type", + "mlir_bridge_rollout", + b"mlir_bridge_rollout", + "optimize_for_static_graph", + b"optimize_for_static_graph", + "recv_buf_max_chunk", + b"recv_buf_max_chunk", + "session_metadata", + b"session_metadata", + "share_cluster_devices_in_session", + b"share_cluster_devices_in_session", + "share_session_state_in_clusterspec_propagation", + b"share_session_state_in_clusterspec_propagation", + "stream_merge_threshold", + b"stream_merge_threshold", + "target_gpu", + b"target_gpu", + "target_tpu", + b"target_tpu", + "tfrt_use_ifrt", + b"tfrt_use_ifrt", + "use_numa_affinity", + b"use_numa_affinity", + "use_tfrt", + b"use_tfrt", + "xla_fusion_autotuner_thresh", + b"xla_fusion_autotuner_thresh", + "xla_prefer_single_graph_cluster", + b"xla_prefer_single_graph_cluster", + ], + ) -> None: ... DEVICE_COUNT_FIELD_NUMBER: builtins.int INTRA_OP_PARALLELISM_THREADS_FIELD_NUMBER: builtins.int @@ -1078,7 +1254,9 @@ class ConfigProto(google.protobuf.message.Message): """ @property - def session_inter_op_thread_pool(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ThreadPoolOptionProto]: + def session_inter_op_thread_pool( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ThreadPoolOptionProto]: """This option is experimental - it may be replaced with a different mechanism in the future. @@ -1151,8 +1329,64 @@ class ConfigProto(google.protobuf.message.Message): share_cluster_devices_in_session: builtins.bool | None = ..., experimental: global___ConfigProto.Experimental | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["cluster_def", b"cluster_def", "experimental", b"experimental", "gpu_options", b"gpu_options", "graph_options", b"graph_options", "pluggable_device_options", b"pluggable_device_options", "rpc_options", b"rpc_options"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["allow_soft_placement", b"allow_soft_placement", "cluster_def", b"cluster_def", "device_count", b"device_count", "device_filters", b"device_filters", "experimental", b"experimental", "gpu_options", b"gpu_options", "graph_options", b"graph_options", "inter_op_parallelism_threads", b"inter_op_parallelism_threads", "intra_op_parallelism_threads", b"intra_op_parallelism_threads", "isolate_session_state", b"isolate_session_state", "log_device_placement", b"log_device_placement", "operation_timeout_in_ms", b"operation_timeout_in_ms", "placement_period", b"placement_period", "pluggable_device_options", b"pluggable_device_options", "rpc_options", b"rpc_options", "session_inter_op_thread_pool", b"session_inter_op_thread_pool", "share_cluster_devices_in_session", b"share_cluster_devices_in_session", "use_per_session_threads", b"use_per_session_threads"]) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "cluster_def", + b"cluster_def", + "experimental", + b"experimental", + "gpu_options", + b"gpu_options", + "graph_options", + b"graph_options", + "pluggable_device_options", + b"pluggable_device_options", + "rpc_options", + b"rpc_options", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "allow_soft_placement", + b"allow_soft_placement", + "cluster_def", + b"cluster_def", + "device_count", + b"device_count", + "device_filters", + b"device_filters", + "experimental", + b"experimental", + "gpu_options", + b"gpu_options", + "graph_options", + b"graph_options", + "inter_op_parallelism_threads", + b"inter_op_parallelism_threads", + "intra_op_parallelism_threads", + b"intra_op_parallelism_threads", + "isolate_session_state", + b"isolate_session_state", + "log_device_placement", + b"log_device_placement", + "operation_timeout_in_ms", + b"operation_timeout_in_ms", + "placement_period", + b"placement_period", + "pluggable_device_options", + b"pluggable_device_options", + "rpc_options", + b"rpc_options", + "session_inter_op_thread_pool", + b"session_inter_op_thread_pool", + "share_cluster_devices_in_session", + b"share_cluster_devices_in_session", + "use_per_session_threads", + b"use_per_session_threads", + ], + ) -> None: ... global___ConfigProto = ConfigProto @@ -1166,7 +1400,9 @@ class RunOptions(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _TraceLevelEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[RunOptions._TraceLevel.ValueType], builtins.type): + class _TraceLevelEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[RunOptions._TraceLevel.ValueType], builtins.type + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor NO_TRACE: RunOptions._TraceLevel.ValueType # 0 SOFTWARE_TRACE: RunOptions._TraceLevel.ValueType # 1 @@ -1203,11 +1439,7 @@ class RunOptions(google.protobuf.message.Message): """Priority of the request. The run handler thread pool will schedule ops based on the priority number. The larger number means higher priority. """ - def __init__( - self, - *, - priority: builtins.int | None = ..., - ) -> None: ... + def __init__(self, *, priority: builtins.int | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["priority", b"priority"]) -> None: ... COLLECTIVE_GRAPH_KEY_FIELD_NUMBER: builtins.int @@ -1234,8 +1466,20 @@ class RunOptions(google.protobuf.message.Message): use_run_handler_pool: builtins.bool | None = ..., run_handler_pool_options: global___RunOptions.Experimental.RunHandlerPoolOptions | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["run_handler_pool_options", b"run_handler_pool_options"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["collective_graph_key", b"collective_graph_key", "run_handler_pool_options", b"run_handler_pool_options", "use_run_handler_pool", b"use_run_handler_pool"]) -> None: ... + def HasField( + self, field_name: typing.Literal["run_handler_pool_options", b"run_handler_pool_options"] + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "collective_graph_key", + b"collective_graph_key", + "run_handler_pool_options", + b"run_handler_pool_options", + "use_run_handler_pool", + b"use_run_handler_pool", + ], + ) -> None: ... TRACE_LEVEL_FIELD_NUMBER: builtins.int TIMEOUT_IN_MS_FIELD_NUMBER: builtins.int @@ -1283,8 +1527,28 @@ class RunOptions(google.protobuf.message.Message): report_tensor_allocations_upon_oom: builtins.bool | None = ..., experimental: global___RunOptions.Experimental | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["debug_options", b"debug_options", "experimental", b"experimental"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["debug_options", b"debug_options", "experimental", b"experimental", "inter_op_thread_pool", b"inter_op_thread_pool", "output_partition_graphs", b"output_partition_graphs", "report_tensor_allocations_upon_oom", b"report_tensor_allocations_upon_oom", "timeout_in_ms", b"timeout_in_ms", "trace_level", b"trace_level"]) -> None: ... + def HasField( + self, field_name: typing.Literal["debug_options", b"debug_options", "experimental", b"experimental"] + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "debug_options", + b"debug_options", + "experimental", + b"experimental", + "inter_op_thread_pool", + b"inter_op_thread_pool", + "output_partition_graphs", + b"output_partition_graphs", + "report_tensor_allocations_upon_oom", + b"report_tensor_allocations_upon_oom", + "timeout_in_ms", + b"timeout_in_ms", + "trace_level", + b"trace_level", + ], + ) -> None: ... global___RunOptions = RunOptions @@ -1302,7 +1566,9 @@ class RunMetadata(google.protobuf.message.Message): PRE_OPTIMIZATION_GRAPH_FIELD_NUMBER: builtins.int POST_OPTIMIZATION_GRAPH_FIELD_NUMBER: builtins.int @property - def partition_graphs(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[tensorflow.core.framework.graph_pb2.GraphDef]: + def partition_graphs( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[tensorflow.core.framework.graph_pb2.GraphDef]: """TODO(nareshmodi): Include some sort of function/cache-key identifier?""" @property @@ -1316,8 +1582,23 @@ class RunMetadata(google.protobuf.message.Message): pre_optimization_graph: tensorflow.core.framework.graph_pb2.GraphDef | None = ..., post_optimization_graph: tensorflow.core.framework.graph_pb2.GraphDef | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["post_optimization_graph", b"post_optimization_graph", "pre_optimization_graph", b"pre_optimization_graph"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["partition_graphs", b"partition_graphs", "post_optimization_graph", b"post_optimization_graph", "pre_optimization_graph", b"pre_optimization_graph"]) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "post_optimization_graph", b"post_optimization_graph", "pre_optimization_graph", b"pre_optimization_graph" + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "partition_graphs", + b"partition_graphs", + "post_optimization_graph", + b"post_optimization_graph", + "pre_optimization_graph", + b"pre_optimization_graph", + ], + ) -> None: ... STEP_STATS_FIELD_NUMBER: builtins.int COST_GRAPH_FIELD_NUMBER: builtins.int @@ -1336,11 +1617,15 @@ class RunMetadata(google.protobuf.message.Message): """The cost graph for the computation defined by the run call.""" @property - def partition_graphs(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[tensorflow.core.framework.graph_pb2.GraphDef]: + def partition_graphs( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[tensorflow.core.framework.graph_pb2.GraphDef]: """Graphs of the partitions executed by executors.""" @property - def function_graphs(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___RunMetadata.FunctionGraphs]: + def function_graphs( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___RunMetadata.FunctionGraphs]: """This is only populated for graphs that are run as functions in TensorFlow V2. There will be an entry below for each function that is traced. The main use cases of the post_optimization_graph and the partition_graphs @@ -1366,8 +1651,27 @@ class RunMetadata(google.protobuf.message.Message): function_graphs: collections.abc.Iterable[global___RunMetadata.FunctionGraphs] | None = ..., session_metadata: global___SessionMetadata | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["cost_graph", b"cost_graph", "session_metadata", b"session_metadata", "step_stats", b"step_stats"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["cost_graph", b"cost_graph", "function_graphs", b"function_graphs", "partition_graphs", b"partition_graphs", "session_metadata", b"session_metadata", "step_stats", b"step_stats"]) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "cost_graph", b"cost_graph", "session_metadata", b"session_metadata", "step_stats", b"step_stats" + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "cost_graph", + b"cost_graph", + "function_graphs", + b"function_graphs", + "partition_graphs", + b"partition_graphs", + "session_metadata", + b"session_metadata", + "step_stats", + b"step_stats", + ], + ) -> None: ... global___RunMetadata = RunMetadata @@ -1387,12 +1691,7 @@ class TensorConnection(google.protobuf.message.Message): """A tensor name. The value of this tensor will be bound to the value of the tensor named in `from_tensor`. """ - def __init__( - self, - *, - from_tensor: builtins.str | None = ..., - to_tensor: builtins.str | None = ..., - ) -> None: ... + def __init__(self, *, from_tensor: builtins.str | None = ..., to_tensor: builtins.str | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["from_tensor", b"from_tensor", "to_tensor", b"to_tensor"]) -> None: ... global___TensorConnection = TensorConnection @@ -1415,12 +1714,7 @@ class CallableOptions(google.protobuf.message.Message): VALUE_FIELD_NUMBER: builtins.int key: builtins.str value: builtins.str - def __init__( - self, - *, - key: builtins.str | None = ..., - value: builtins.str | None = ..., - ) -> None: ... + def __init__(self, *, key: builtins.str | None = ..., value: builtins.str | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... @typing.final @@ -1431,12 +1725,7 @@ class CallableOptions(google.protobuf.message.Message): VALUE_FIELD_NUMBER: builtins.int key: builtins.str value: builtins.str - def __init__( - self, - *, - key: builtins.str | None = ..., - value: builtins.str | None = ..., - ) -> None: ... + def __init__(self, *, key: builtins.str | None = ..., value: builtins.str | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... FEED_FIELD_NUMBER: builtins.int @@ -1553,6 +1842,26 @@ class CallableOptions(google.protobuf.message.Message): fetch_skip_sync: builtins.bool | None = ..., ) -> None: ... def HasField(self, field_name: typing.Literal["run_options", b"run_options"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["feed", b"feed", "feed_devices", b"feed_devices", "fetch", b"fetch", "fetch_devices", b"fetch_devices", "fetch_skip_sync", b"fetch_skip_sync", "run_options", b"run_options", "target", b"target", "tensor_connection", b"tensor_connection"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "feed", + b"feed", + "feed_devices", + b"feed_devices", + "fetch", + b"fetch", + "fetch_devices", + b"fetch_devices", + "fetch_skip_sync", + b"fetch_skip_sync", + "run_options", + b"run_options", + "target", + b"target", + "tensor_connection", + b"tensor_connection", + ], + ) -> None: ... global___CallableOptions = CallableOptions diff --git a/stubs/tensorflow/tensorflow/core/protobuf/control_flow_pb2.pyi b/stubs/tensorflow/tensorflow/core/protobuf/control_flow_pb2.pyi index 713544819ed5..2c044a8c2220 100644 --- a/stubs/tensorflow/tensorflow/core/protobuf/control_flow_pb2.pyi +++ b/stubs/tensorflow/tensorflow/core/protobuf/control_flow_pb2.pyi @@ -30,12 +30,7 @@ class ValuesDef(google.protobuf.message.Message): VALUE_FIELD_NUMBER: builtins.int key: builtins.str value: builtins.str - def __init__( - self, - *, - key: builtins.str | None = ..., - value: builtins.str | None = ..., - ) -> None: ... + def __init__(self, *, key: builtins.str | None = ..., value: builtins.str | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... VALUES_FIELD_NUMBER: builtins.int @@ -73,13 +68,14 @@ class ControlFlowContextDef(google.protobuf.message.Message): @property def while_ctxt(self) -> global___WhileContextDef: ... def __init__( - self, - *, - cond_ctxt: global___CondContextDef | None = ..., - while_ctxt: global___WhileContextDef | None = ..., + self, *, cond_ctxt: global___CondContextDef | None = ..., while_ctxt: global___WhileContextDef | None = ... + ) -> None: ... + def HasField( + self, field_name: typing.Literal["cond_ctxt", b"cond_ctxt", "ctxt", b"ctxt", "while_ctxt", b"while_ctxt"] + ) -> builtins.bool: ... + def ClearField( + self, field_name: typing.Literal["cond_ctxt", b"cond_ctxt", "ctxt", b"ctxt", "while_ctxt", b"while_ctxt"] ) -> None: ... - def HasField(self, field_name: typing.Literal["cond_ctxt", b"cond_ctxt", "ctxt", b"ctxt", "while_ctxt", b"while_ctxt"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["cond_ctxt", b"cond_ctxt", "ctxt", b"ctxt", "while_ctxt", b"while_ctxt"]) -> None: ... def WhichOneof(self, oneof_group: typing.Literal["ctxt", b"ctxt"]) -> typing.Literal["cond_ctxt", "while_ctxt"] | None: ... global___ControlFlowContextDef = ControlFlowContextDef @@ -109,7 +105,9 @@ class CondContextDef(google.protobuf.message.Message): """Values and external values in control flow context.""" @property - def nested_contexts(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ControlFlowContextDef]: + def nested_contexts( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ControlFlowContextDef]: """Contexts contained inside this context (e.g. nested conds).""" def __init__( @@ -123,7 +121,23 @@ class CondContextDef(google.protobuf.message.Message): nested_contexts: collections.abc.Iterable[global___ControlFlowContextDef] | None = ..., ) -> None: ... def HasField(self, field_name: typing.Literal["values_def", b"values_def"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["branch", b"branch", "context_name", b"context_name", "nested_contexts", b"nested_contexts", "pivot_name", b"pivot_name", "pred_name", b"pred_name", "values_def", b"values_def"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "branch", + b"branch", + "context_name", + b"context_name", + "nested_contexts", + b"nested_contexts", + "pivot_name", + b"pivot_name", + "pred_name", + b"pred_name", + "values_def", + b"values_def", + ], + ) -> None: ... global___CondContextDef = CondContextDef @@ -174,7 +188,9 @@ class WhileContextDef(google.protobuf.message.Message): """Values and external values in control flow context.""" @property - def nested_contexts(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ControlFlowContextDef]: + def nested_contexts( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ControlFlowContextDef]: """Contexts contained inside this context (e.g. nested whiles).""" def __init__( @@ -194,6 +210,34 @@ class WhileContextDef(google.protobuf.message.Message): nested_contexts: collections.abc.Iterable[global___ControlFlowContextDef] | None = ..., ) -> None: ... def HasField(self, field_name: typing.Literal["values_def", b"values_def"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["back_prop", b"back_prop", "context_name", b"context_name", "loop_enter_names", b"loop_enter_names", "loop_exit_names", b"loop_exit_names", "maximum_iterations_name", b"maximum_iterations_name", "nested_contexts", b"nested_contexts", "parallel_iterations", b"parallel_iterations", "pivot_for_body_name", b"pivot_for_body_name", "pivot_for_pred_name", b"pivot_for_pred_name", "pivot_name", b"pivot_name", "swap_memory", b"swap_memory", "values_def", b"values_def"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "back_prop", + b"back_prop", + "context_name", + b"context_name", + "loop_enter_names", + b"loop_enter_names", + "loop_exit_names", + b"loop_exit_names", + "maximum_iterations_name", + b"maximum_iterations_name", + "nested_contexts", + b"nested_contexts", + "parallel_iterations", + b"parallel_iterations", + "pivot_for_body_name", + b"pivot_for_body_name", + "pivot_for_pred_name", + b"pivot_for_pred_name", + "pivot_name", + b"pivot_name", + "swap_memory", + b"swap_memory", + "values_def", + b"values_def", + ], + ) -> None: ... global___WhileContextDef = WhileContextDef diff --git a/stubs/tensorflow/tensorflow/core/protobuf/core_platform_payloads_pb2.pyi b/stubs/tensorflow/tensorflow/core/protobuf/core_platform_payloads_pb2.pyi index 44eea2b74e65..39763f66f4e6 100644 --- a/stubs/tensorflow/tensorflow/core/protobuf/core_platform_payloads_pb2.pyi +++ b/stubs/tensorflow/tensorflow/core/protobuf/core_platform_payloads_pb2.pyi @@ -31,7 +31,9 @@ class ErrorSourceProto(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _ErrorSourceEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ErrorSourceProto._ErrorSource.ValueType], builtins.type): + class _ErrorSourceEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ErrorSourceProto._ErrorSource.ValueType], builtins.type + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor UNKNOWN: ErrorSourceProto._ErrorSource.ValueType # 0 TPU_COMPILE_OP: ErrorSourceProto._ErrorSource.ValueType # 1 @@ -58,11 +60,7 @@ class ErrorSourceProto(google.protobuf.message.Message): ERROR_SOURCE_FIELD_NUMBER: builtins.int error_source: global___ErrorSourceProto.ErrorSource.ValueType - def __init__( - self, - *, - error_source: global___ErrorSourceProto.ErrorSource.ValueType | None = ..., - ) -> None: ... + def __init__(self, *, error_source: global___ErrorSourceProto.ErrorSource.ValueType | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["error_source", b"error_source"]) -> None: ... global___ErrorSourceProto = ErrorSourceProto diff --git a/stubs/tensorflow/tensorflow/core/protobuf/data_service_pb2.pyi b/stubs/tensorflow/tensorflow/core/protobuf/data_service_pb2.pyi index 3c149e5ee5ac..cdafebecf7eb 100644 --- a/stubs/tensorflow/tensorflow/core/protobuf/data_service_pb2.pyi +++ b/stubs/tensorflow/tensorflow/core/protobuf/data_service_pb2.pyi @@ -22,7 +22,9 @@ class _DeploymentMode: ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType -class _DeploymentModeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_DeploymentMode.ValueType], builtins.type): +class _DeploymentModeEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_DeploymentMode.ValueType], builtins.type +): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor DEPLOYMENT_MODE_UNSPECIFIED: _DeploymentMode.ValueType # 0 DEPLOYMENT_MODE_COLOCATED: _DeploymentMode.ValueType # 1 @@ -58,7 +60,9 @@ class ProcessingModeDef(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _ShardingPolicyEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ProcessingModeDef._ShardingPolicy.ValueType], builtins.type): + class _ShardingPolicyEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ProcessingModeDef._ShardingPolicy.ValueType], builtins.type + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor OFF: ProcessingModeDef._ShardingPolicy.ValueType # 0 """No sharding will be performed. Each worker produces the entire dataset @@ -141,11 +145,7 @@ class ProcessingModeDef(google.protobuf.message.Message): SHARDING_POLICY_FIELD_NUMBER: builtins.int sharding_policy: global___ProcessingModeDef.ShardingPolicy.ValueType - def __init__( - self, - *, - sharding_policy: global___ProcessingModeDef.ShardingPolicy.ValueType | None = ..., - ) -> None: ... + def __init__(self, *, sharding_policy: global___ProcessingModeDef.ShardingPolicy.ValueType | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["sharding_policy", b"sharding_policy"]) -> None: ... global___ProcessingModeDef = ProcessingModeDef @@ -162,7 +162,9 @@ class DataServiceMetadata(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _CompressionEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[DataServiceMetadata._Compression.ValueType], builtins.type): + class _CompressionEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[DataServiceMetadata._Compression.ValueType], builtins.type + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor COMPRESSION_UNSPECIFIED: DataServiceMetadata._Compression.ValueType # 0 COMPRESSION_OFF: DataServiceMetadata._Compression.ValueType # 1 @@ -192,9 +194,25 @@ class DataServiceMetadata(google.protobuf.message.Message): compression: global___DataServiceMetadata.Compression.ValueType | None = ..., cardinality: builtins.int | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["element_spec", b"element_spec", "optional_element_spec", b"optional_element_spec"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["cardinality", b"cardinality", "compression", b"compression", "element_spec", b"element_spec", "optional_element_spec", b"optional_element_spec"]) -> None: ... - def WhichOneof(self, oneof_group: typing.Literal["optional_element_spec", b"optional_element_spec"]) -> typing.Literal["element_spec"] | None: ... + def HasField( + self, field_name: typing.Literal["element_spec", b"element_spec", "optional_element_spec", b"optional_element_spec"] + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "cardinality", + b"cardinality", + "compression", + b"compression", + "element_spec", + b"element_spec", + "optional_element_spec", + b"optional_element_spec", + ], + ) -> None: ... + def WhichOneof( + self, oneof_group: typing.Literal["optional_element_spec", b"optional_element_spec"] + ) -> typing.Literal["element_spec"] | None: ... global___DataServiceMetadata = DataServiceMetadata @@ -204,11 +222,7 @@ class CrossTrainerCacheOptions(google.protobuf.message.Message): TRAINER_ID_FIELD_NUMBER: builtins.int trainer_id: builtins.str - def __init__( - self, - *, - trainer_id: builtins.str | None = ..., - ) -> None: ... + def __init__(self, *, trainer_id: builtins.str | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["trainer_id", b"trainer_id"]) -> None: ... global___CrossTrainerCacheOptions = CrossTrainerCacheOptions @@ -223,11 +237,7 @@ class DataServiceConfig(google.protobuf.message.Message): DEPLOYMENT_MODE_FIELD_NUMBER: builtins.int deployment_mode: global___DeploymentMode.ValueType - def __init__( - self, - *, - deployment_mode: global___DeploymentMode.ValueType | None = ..., - ) -> None: ... + def __init__(self, *, deployment_mode: global___DeploymentMode.ValueType | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["deployment_mode", b"deployment_mode"]) -> None: ... global___DataServiceConfig = DataServiceConfig diff --git a/stubs/tensorflow/tensorflow/core/protobuf/debug_event_pb2.pyi b/stubs/tensorflow/tensorflow/core/protobuf/debug_event_pb2.pyi index 8ee6e76d65da..2906b660d309 100644 --- a/stubs/tensorflow/tensorflow/core/protobuf/debug_event_pb2.pyi +++ b/stubs/tensorflow/tensorflow/core/protobuf/debug_event_pb2.pyi @@ -26,7 +26,9 @@ class _TensorDebugMode: ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType -class _TensorDebugModeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_TensorDebugMode.ValueType], builtins.type): +class _TensorDebugModeEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_TensorDebugMode.ValueType], builtins.type +): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor UNSPECIFIED: _TensorDebugMode.ValueType # 0 NO_TENSOR: _TensorDebugMode.ValueType # 1 @@ -209,9 +211,76 @@ class DebugEvent(google.protobuf.message.Message): graph_id: builtins.str | None = ..., debugged_device: global___DebuggedDevice | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["debug_metadata", b"debug_metadata", "debugged_device", b"debugged_device", "debugged_graph", b"debugged_graph", "execution", b"execution", "graph_execution_trace", b"graph_execution_trace", "graph_id", b"graph_id", "graph_op_creation", b"graph_op_creation", "source_file", b"source_file", "stack_frame_with_id", b"stack_frame_with_id", "what", b"what"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["debug_metadata", b"debug_metadata", "debugged_device", b"debugged_device", "debugged_graph", b"debugged_graph", "execution", b"execution", "graph_execution_trace", b"graph_execution_trace", "graph_id", b"graph_id", "graph_op_creation", b"graph_op_creation", "source_file", b"source_file", "stack_frame_with_id", b"stack_frame_with_id", "step", b"step", "wall_time", b"wall_time", "what", b"what"]) -> None: ... - def WhichOneof(self, oneof_group: typing.Literal["what", b"what"]) -> typing.Literal["debug_metadata", "source_file", "stack_frame_with_id", "graph_op_creation", "debugged_graph", "execution", "graph_execution_trace", "graph_id", "debugged_device"] | None: ... + def HasField( + self, + field_name: typing.Literal[ + "debug_metadata", + b"debug_metadata", + "debugged_device", + b"debugged_device", + "debugged_graph", + b"debugged_graph", + "execution", + b"execution", + "graph_execution_trace", + b"graph_execution_trace", + "graph_id", + b"graph_id", + "graph_op_creation", + b"graph_op_creation", + "source_file", + b"source_file", + "stack_frame_with_id", + b"stack_frame_with_id", + "what", + b"what", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "debug_metadata", + b"debug_metadata", + "debugged_device", + b"debugged_device", + "debugged_graph", + b"debugged_graph", + "execution", + b"execution", + "graph_execution_trace", + b"graph_execution_trace", + "graph_id", + b"graph_id", + "graph_op_creation", + b"graph_op_creation", + "source_file", + b"source_file", + "stack_frame_with_id", + b"stack_frame_with_id", + "step", + b"step", + "wall_time", + b"wall_time", + "what", + b"what", + ], + ) -> None: ... + def WhichOneof( + self, oneof_group: typing.Literal["what", b"what"] + ) -> ( + typing.Literal[ + "debug_metadata", + "source_file", + "stack_frame_with_id", + "graph_op_creation", + "debugged_graph", + "execution", + "graph_execution_trace", + "graph_id", + "debugged_device", + ] + | None + ): ... global___DebugEvent = DebugEvent @@ -243,7 +312,12 @@ class DebugMetadata(google.protobuf.message.Message): file_version: builtins.str | None = ..., tfdbg_run_id: builtins.str | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["file_version", b"file_version", "tensorflow_version", b"tensorflow_version", "tfdbg_run_id", b"tfdbg_run_id"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "file_version", b"file_version", "tensorflow_version", b"tensorflow_version", "tfdbg_run_id", b"tfdbg_run_id" + ], + ) -> None: ... global___DebugMetadata = DebugMetadata @@ -273,7 +347,9 @@ class SourceFile(google.protobuf.message.Message): host_name: builtins.str | None = ..., lines: collections.abc.Iterable[builtins.str] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["file_path", b"file_path", "host_name", b"host_name", "lines", b"lines"]) -> None: ... + def ClearField( + self, field_name: typing.Literal["file_path", b"file_path", "host_name", b"host_name", "lines", b"lines"] + ) -> None: ... global___SourceFile = SourceFile @@ -326,12 +402,11 @@ class CodeLocation(google.protobuf.message.Message): """ def __init__( - self, - *, - host_name: builtins.str | None = ..., - stack_frame_ids: collections.abc.Iterable[builtins.str] | None = ..., + self, *, host_name: builtins.str | None = ..., stack_frame_ids: collections.abc.Iterable[builtins.str] | None = ... + ) -> None: ... + def ClearField( + self, field_name: typing.Literal["host_name", b"host_name", "stack_frame_ids", b"stack_frame_ids"] ) -> None: ... - def ClearField(self, field_name: typing.Literal["host_name", b"host_name", "stack_frame_ids", b"stack_frame_ids"]) -> None: ... global___CodeLocation = CodeLocation @@ -390,7 +465,29 @@ class GraphOpCreation(google.protobuf.message.Message): output_tensor_ids: collections.abc.Iterable[builtins.int] | None = ..., ) -> None: ... def HasField(self, field_name: typing.Literal["code_location", b"code_location"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["code_location", b"code_location", "device_name", b"device_name", "graph_id", b"graph_id", "graph_name", b"graph_name", "input_names", b"input_names", "num_outputs", b"num_outputs", "op_name", b"op_name", "op_type", b"op_type", "output_tensor_ids", b"output_tensor_ids"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "code_location", + b"code_location", + "device_name", + b"device_name", + "graph_id", + b"graph_id", + "graph_name", + b"graph_name", + "input_names", + b"input_names", + "num_outputs", + b"num_outputs", + "op_name", + b"op_name", + "op_type", + b"op_type", + "output_tensor_ids", + b"output_tensor_ids", + ], + ) -> None: ... global___GraphOpCreation = GraphOpCreation @@ -436,7 +533,23 @@ class DebuggedGraph(google.protobuf.message.Message): instrumented_graph_def: builtins.bytes | None = ..., outer_context_id: builtins.str | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["graph_id", b"graph_id", "graph_name", b"graph_name", "instrumented_graph_def", b"instrumented_graph_def", "instrumented_ops", b"instrumented_ops", "original_graph_def", b"original_graph_def", "outer_context_id", b"outer_context_id"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "graph_id", + b"graph_id", + "graph_name", + b"graph_name", + "instrumented_graph_def", + b"instrumented_graph_def", + "instrumented_ops", + b"instrumented_ops", + "original_graph_def", + b"original_graph_def", + "outer_context_id", + b"outer_context_id", + ], + ) -> None: ... global___DebuggedGraph = DebuggedGraph @@ -456,12 +569,7 @@ class DebuggedDevice(google.protobuf.message.Message): multi-host settings. TODO(cais): Test the uniqueness guarantee in multi-host settings. """ - def __init__( - self, - *, - device_name: builtins.str | None = ..., - device_id: builtins.int | None = ..., - ) -> None: ... + def __init__(self, *, device_name: builtins.str | None = ..., device_id: builtins.int | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["device_id", b"device_id", "device_name", b"device_name"]) -> None: ... global___DebuggedDevice = DebuggedDevice @@ -507,7 +615,9 @@ class Execution(google.protobuf.message.Message): """ @property - def tensor_protos(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[tensorflow.core.framework.tensor_pb2.TensorProto]: + def tensor_protos( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[tensorflow.core.framework.tensor_pb2.TensorProto]: """Output Tensor values in the type described by `tensor_value_type`. The length of this should match `num_outputs`. """ @@ -537,7 +647,29 @@ class Execution(google.protobuf.message.Message): output_tensor_device_ids: collections.abc.Iterable[builtins.int] | None = ..., ) -> None: ... def HasField(self, field_name: typing.Literal["code_location", b"code_location"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["code_location", b"code_location", "graph_id", b"graph_id", "input_tensor_ids", b"input_tensor_ids", "num_outputs", b"num_outputs", "op_type", b"op_type", "output_tensor_device_ids", b"output_tensor_device_ids", "output_tensor_ids", b"output_tensor_ids", "tensor_debug_mode", b"tensor_debug_mode", "tensor_protos", b"tensor_protos"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "code_location", + b"code_location", + "graph_id", + b"graph_id", + "input_tensor_ids", + b"input_tensor_ids", + "num_outputs", + b"num_outputs", + "op_type", + b"op_type", + "output_tensor_device_ids", + b"output_tensor_device_ids", + "output_tensor_ids", + b"output_tensor_ids", + "tensor_debug_mode", + b"tensor_debug_mode", + "tensor_protos", + b"tensor_protos", + ], + ) -> None: ... global___Execution = Execution @@ -593,6 +725,22 @@ class GraphExecutionTrace(google.protobuf.message.Message): device_name: builtins.str | None = ..., ) -> None: ... def HasField(self, field_name: typing.Literal["tensor_proto", b"tensor_proto"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["device_name", b"device_name", "op_name", b"op_name", "output_slot", b"output_slot", "tensor_debug_mode", b"tensor_debug_mode", "tensor_proto", b"tensor_proto", "tfdbg_context_id", b"tfdbg_context_id"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "device_name", + b"device_name", + "op_name", + b"op_name", + "output_slot", + b"output_slot", + "tensor_debug_mode", + b"tensor_debug_mode", + "tensor_proto", + b"tensor_proto", + "tfdbg_context_id", + b"tfdbg_context_id", + ], + ) -> None: ... global___GraphExecutionTrace = GraphExecutionTrace diff --git a/stubs/tensorflow/tensorflow/core/protobuf/debug_pb2.pyi b/stubs/tensorflow/tensorflow/core/protobuf/debug_pb2.pyi index 5ee6b532d850..28a2c99bc415 100644 --- a/stubs/tensorflow/tensorflow/core/protobuf/debug_pb2.pyi +++ b/stubs/tensorflow/tensorflow/core/protobuf/debug_pb2.pyi @@ -80,7 +80,21 @@ class DebugTensorWatch(google.protobuf.message.Message): debug_urls: collections.abc.Iterable[builtins.str] | None = ..., tolerate_debug_op_creation_failures: builtins.bool | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["debug_ops", b"debug_ops", "debug_urls", b"debug_urls", "node_name", b"node_name", "output_slot", b"output_slot", "tolerate_debug_op_creation_failures", b"tolerate_debug_op_creation_failures"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "debug_ops", + b"debug_ops", + "debug_urls", + b"debug_urls", + "node_name", + b"node_name", + "output_slot", + b"output_slot", + "tolerate_debug_op_creation_failures", + b"tolerate_debug_op_creation_failures", + ], + ) -> None: ... global___DebugTensorWatch = DebugTensorWatch @@ -105,7 +119,9 @@ class DebugOptions(google.protobuf.message.Message): are cleaned up from the disk after each Session.run. """ @property - def debug_tensor_watch_opts(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___DebugTensorWatch]: + def debug_tensor_watch_opts( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___DebugTensorWatch]: """Debugging options""" def __init__( @@ -115,7 +131,17 @@ class DebugOptions(google.protobuf.message.Message): global_step: builtins.int | None = ..., reset_disk_byte_usage: builtins.bool | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["debug_tensor_watch_opts", b"debug_tensor_watch_opts", "global_step", b"global_step", "reset_disk_byte_usage", b"reset_disk_byte_usage"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "debug_tensor_watch_opts", + b"debug_tensor_watch_opts", + "global_step", + b"global_step", + "reset_disk_byte_usage", + b"reset_disk_byte_usage", + ], + ) -> None: ... global___DebugOptions = DebugOptions @@ -149,7 +175,12 @@ class DebuggedSourceFile(google.protobuf.message.Message): bytes: builtins.int | None = ..., lines: collections.abc.Iterable[builtins.str] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["bytes", b"bytes", "file_path", b"file_path", "host", b"host", "last_modified", b"last_modified", "lines", b"lines"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "bytes", b"bytes", "file_path", b"file_path", "host", b"host", "last_modified", b"last_modified", "lines", b"lines" + ], + ) -> None: ... global___DebuggedSourceFile = DebuggedSourceFile @@ -162,11 +193,7 @@ class DebuggedSourceFiles(google.protobuf.message.Message): def source_files(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___DebuggedSourceFile]: """A collection of source code files.""" - def __init__( - self, - *, - source_files: collections.abc.Iterable[global___DebuggedSourceFile] | None = ..., - ) -> None: ... + def __init__(self, *, source_files: collections.abc.Iterable[global___DebuggedSourceFile] | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["source_files", b"source_files"]) -> None: ... global___DebuggedSourceFiles = DebuggedSourceFiles diff --git a/stubs/tensorflow/tensorflow/core/protobuf/device_filters_pb2.pyi b/stubs/tensorflow/tensorflow/core/protobuf/device_filters_pb2.pyi index f4e0c8efe920..a9fd3a142db7 100644 --- a/stubs/tensorflow/tensorflow/core/protobuf/device_filters_pb2.pyi +++ b/stubs/tensorflow/tensorflow/core/protobuf/device_filters_pb2.pyi @@ -67,11 +67,7 @@ class TaskDeviceFilters(google.protobuf.message.Message): DEVICE_FILTERS_FIELD_NUMBER: builtins.int @property def device_filters(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ... - def __init__( - self, - *, - device_filters: collections.abc.Iterable[builtins.str] | None = ..., - ) -> None: ... + def __init__(self, *, device_filters: collections.abc.Iterable[builtins.str] | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["device_filters", b"device_filters"]) -> None: ... global___TaskDeviceFilters = TaskDeviceFilters @@ -91,12 +87,7 @@ class JobDeviceFilters(google.protobuf.message.Message): key: builtins.int @property def value(self) -> global___TaskDeviceFilters: ... - def __init__( - self, - *, - key: builtins.int | None = ..., - value: global___TaskDeviceFilters | None = ..., - ) -> None: ... + def __init__(self, *, key: builtins.int | None = ..., value: global___TaskDeviceFilters | None = ...) -> None: ... def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... @@ -127,11 +118,7 @@ class ClusterDeviceFilters(google.protobuf.message.Message): JOBS_FIELD_NUMBER: builtins.int @property def jobs(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___JobDeviceFilters]: ... - def __init__( - self, - *, - jobs: collections.abc.Iterable[global___JobDeviceFilters] | None = ..., - ) -> None: ... + def __init__(self, *, jobs: collections.abc.Iterable[global___JobDeviceFilters] | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["jobs", b"jobs"]) -> None: ... global___ClusterDeviceFilters = ClusterDeviceFilters diff --git a/stubs/tensorflow/tensorflow/core/protobuf/device_properties_pb2.pyi b/stubs/tensorflow/tensorflow/core/protobuf/device_properties_pb2.pyi index af4bdd7a1f88..3aadb10563ec 100644 --- a/stubs/tensorflow/tensorflow/core/protobuf/device_properties_pb2.pyi +++ b/stubs/tensorflow/tensorflow/core/protobuf/device_properties_pb2.pyi @@ -39,12 +39,7 @@ class DeviceProperties(google.protobuf.message.Message): VALUE_FIELD_NUMBER: builtins.int key: builtins.str value: builtins.str - def __init__( - self, - *, - key: builtins.str | None = ..., - value: builtins.str | None = ..., - ) -> None: ... + def __init__(self, *, key: builtins.str | None = ..., value: builtins.str | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... TYPE_FIELD_NUMBER: builtins.int @@ -109,7 +104,37 @@ class DeviceProperties(google.protobuf.message.Message): memory_size: builtins.int | None = ..., bandwidth: builtins.int | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["bandwidth", b"bandwidth", "environment", b"environment", "frequency", b"frequency", "l1_cache_size", b"l1_cache_size", "l2_cache_size", b"l2_cache_size", "l3_cache_size", b"l3_cache_size", "memory_size", b"memory_size", "model", b"model", "num_cores", b"num_cores", "num_registers", b"num_registers", "shared_memory_size_per_multiprocessor", b"shared_memory_size_per_multiprocessor", "type", b"type", "vendor", b"vendor"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "bandwidth", + b"bandwidth", + "environment", + b"environment", + "frequency", + b"frequency", + "l1_cache_size", + b"l1_cache_size", + "l2_cache_size", + b"l2_cache_size", + "l3_cache_size", + b"l3_cache_size", + "memory_size", + b"memory_size", + "model", + b"model", + "num_cores", + b"num_cores", + "num_registers", + b"num_registers", + "shared_memory_size_per_multiprocessor", + b"shared_memory_size_per_multiprocessor", + "type", + b"type", + "vendor", + b"vendor", + ], + ) -> None: ... global___DeviceProperties = DeviceProperties @@ -122,12 +147,7 @@ class NamedDevice(google.protobuf.message.Message): name: builtins.str @property def properties(self) -> global___DeviceProperties: ... - def __init__( - self, - *, - name: builtins.str | None = ..., - properties: global___DeviceProperties | None = ..., - ) -> None: ... + def __init__(self, *, name: builtins.str | None = ..., properties: global___DeviceProperties | None = ...) -> None: ... def HasField(self, field_name: typing.Literal["properties", b"properties"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["name", b"name", "properties", b"properties"]) -> None: ... diff --git a/stubs/tensorflow/tensorflow/core/protobuf/fingerprint_pb2.pyi b/stubs/tensorflow/tensorflow/core/protobuf/fingerprint_pb2.pyi index 83956b31c972..e5c00ec8506d 100644 --- a/stubs/tensorflow/tensorflow/core/protobuf/fingerprint_pb2.pyi +++ b/stubs/tensorflow/tensorflow/core/protobuf/fingerprint_pb2.pyi @@ -55,6 +55,22 @@ class FingerprintDef(google.protobuf.message.Message): version: tensorflow.core.framework.versions_pb2.VersionDef | None = ..., ) -> None: ... def HasField(self, field_name: typing.Literal["version", b"version"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["checkpoint_hash", b"checkpoint_hash", "graph_def_program_hash", b"graph_def_program_hash", "saved_model_checksum", b"saved_model_checksum", "saved_object_graph_hash", b"saved_object_graph_hash", "signature_def_hash", b"signature_def_hash", "version", b"version"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "checkpoint_hash", + b"checkpoint_hash", + "graph_def_program_hash", + b"graph_def_program_hash", + "saved_model_checksum", + b"saved_model_checksum", + "saved_object_graph_hash", + b"saved_object_graph_hash", + "signature_def_hash", + b"signature_def_hash", + "version", + b"version", + ], + ) -> None: ... global___FingerprintDef = FingerprintDef diff --git a/stubs/tensorflow/tensorflow/core/protobuf/meta_graph_pb2.pyi b/stubs/tensorflow/tensorflow/core/protobuf/meta_graph_pb2.pyi index 78ea938af3f4..e5357894c7a2 100644 --- a/stubs/tensorflow/tensorflow/core/protobuf/meta_graph_pb2.pyi +++ b/stubs/tensorflow/tensorflow/core/protobuf/meta_graph_pb2.pyi @@ -55,12 +55,7 @@ class MetaGraphDef(google.protobuf.message.Message): VALUE_FIELD_NUMBER: builtins.int key: builtins.str value: builtins.str - def __init__( - self, - *, - key: builtins.str | None = ..., - value: builtins.str | None = ..., - ) -> None: ... + def __init__(self, *, key: builtins.str | None = ..., value: builtins.str | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... META_GRAPH_VERSION_FIELD_NUMBER: builtins.int @@ -127,8 +122,30 @@ class MetaGraphDef(google.protobuf.message.Message): stripped_default_attrs: builtins.bool | None = ..., function_aliases: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["any_info", b"any_info", "stripped_op_list", b"stripped_op_list"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["any_info", b"any_info", "function_aliases", b"function_aliases", "meta_graph_version", b"meta_graph_version", "stripped_default_attrs", b"stripped_default_attrs", "stripped_op_list", b"stripped_op_list", "tags", b"tags", "tensorflow_git_version", b"tensorflow_git_version", "tensorflow_version", b"tensorflow_version"]) -> None: ... + def HasField( + self, field_name: typing.Literal["any_info", b"any_info", "stripped_op_list", b"stripped_op_list"] + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "any_info", + b"any_info", + "function_aliases", + b"function_aliases", + "meta_graph_version", + b"meta_graph_version", + "stripped_default_attrs", + b"stripped_default_attrs", + "stripped_op_list", + b"stripped_op_list", + "tags", + b"tags", + "tensorflow_git_version", + b"tensorflow_git_version", + "tensorflow_version", + b"tensorflow_version", + ], + ) -> None: ... @typing.final class CollectionDefEntry(google.protobuf.message.Message): @@ -139,12 +156,7 @@ class MetaGraphDef(google.protobuf.message.Message): key: builtins.str @property def value(self) -> global___CollectionDef: ... - def __init__( - self, - *, - key: builtins.str | None = ..., - value: global___CollectionDef | None = ..., - ) -> None: ... + def __init__(self, *, key: builtins.str | None = ..., value: global___CollectionDef | None = ...) -> None: ... def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... @@ -157,12 +169,7 @@ class MetaGraphDef(google.protobuf.message.Message): key: builtins.str @property def value(self) -> global___SignatureDef: ... - def __init__( - self, - *, - key: builtins.str | None = ..., - value: global___SignatureDef | None = ..., - ) -> None: ... + def __init__(self, *, key: builtins.str | None = ..., value: global___SignatureDef | None = ...) -> None: ... def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... @@ -214,8 +221,38 @@ class MetaGraphDef(google.protobuf.message.Message): asset_file_def: collections.abc.Iterable[global___AssetFileDef] | None = ..., object_graph_def: tensorflow.core.protobuf.saved_object_graph_pb2.SavedObjectGraph | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["graph_def", b"graph_def", "meta_info_def", b"meta_info_def", "object_graph_def", b"object_graph_def", "saver_def", b"saver_def"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["asset_file_def", b"asset_file_def", "collection_def", b"collection_def", "graph_def", b"graph_def", "meta_info_def", b"meta_info_def", "object_graph_def", b"object_graph_def", "saver_def", b"saver_def", "signature_def", b"signature_def"]) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "graph_def", + b"graph_def", + "meta_info_def", + b"meta_info_def", + "object_graph_def", + b"object_graph_def", + "saver_def", + b"saver_def", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "asset_file_def", + b"asset_file_def", + "collection_def", + b"collection_def", + "graph_def", + b"graph_def", + "meta_info_def", + b"meta_info_def", + "object_graph_def", + b"object_graph_def", + "saver_def", + b"saver_def", + "signature_def", + b"signature_def", + ], + ) -> None: ... global___MetaGraphDef = MetaGraphDef @@ -306,11 +343,7 @@ class CollectionDef(google.protobuf.message.Message): VALUE_FIELD_NUMBER: builtins.int @property def value(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ... - def __init__( - self, - *, - value: collections.abc.Iterable[builtins.str] | None = ..., - ) -> None: ... + def __init__(self, *, value: collections.abc.Iterable[builtins.str] | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["value", b"value"]) -> None: ... @typing.final @@ -335,11 +368,7 @@ class CollectionDef(google.protobuf.message.Message): VALUE_FIELD_NUMBER: builtins.int @property def value(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.bytes]: ... - def __init__( - self, - *, - value: collections.abc.Iterable[builtins.bytes] | None = ..., - ) -> None: ... + def __init__(self, *, value: collections.abc.Iterable[builtins.bytes] | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["value", b"value"]) -> None: ... @typing.final @@ -351,11 +380,7 @@ class CollectionDef(google.protobuf.message.Message): VALUE_FIELD_NUMBER: builtins.int @property def value(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ... - def __init__( - self, - *, - value: collections.abc.Iterable[builtins.int] | None = ..., - ) -> None: ... + def __init__(self, *, value: collections.abc.Iterable[builtins.int] | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["value", b"value"]) -> None: ... @typing.final @@ -367,11 +392,7 @@ class CollectionDef(google.protobuf.message.Message): VALUE_FIELD_NUMBER: builtins.int @property def value(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.float]: ... - def __init__( - self, - *, - value: collections.abc.Iterable[builtins.float] | None = ..., - ) -> None: ... + def __init__(self, *, value: collections.abc.Iterable[builtins.float] | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["value", b"value"]) -> None: ... @typing.final @@ -383,11 +404,7 @@ class CollectionDef(google.protobuf.message.Message): VALUE_FIELD_NUMBER: builtins.int @property def value(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[google.protobuf.any_pb2.Any]: ... - def __init__( - self, - *, - value: collections.abc.Iterable[google.protobuf.any_pb2.Any] | None = ..., - ) -> None: ... + def __init__(self, *, value: collections.abc.Iterable[google.protobuf.any_pb2.Any] | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["value", b"value"]) -> None: ... NODE_LIST_FIELD_NUMBER: builtins.int @@ -414,9 +431,43 @@ class CollectionDef(google.protobuf.message.Message): float_list: global___CollectionDef.FloatList | None = ..., any_list: global___CollectionDef.AnyList | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["any_list", b"any_list", "bytes_list", b"bytes_list", "float_list", b"float_list", "int64_list", b"int64_list", "kind", b"kind", "node_list", b"node_list"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["any_list", b"any_list", "bytes_list", b"bytes_list", "float_list", b"float_list", "int64_list", b"int64_list", "kind", b"kind", "node_list", b"node_list"]) -> None: ... - def WhichOneof(self, oneof_group: typing.Literal["kind", b"kind"]) -> typing.Literal["node_list", "bytes_list", "int64_list", "float_list", "any_list"] | None: ... + def HasField( + self, + field_name: typing.Literal[ + "any_list", + b"any_list", + "bytes_list", + b"bytes_list", + "float_list", + b"float_list", + "int64_list", + b"int64_list", + "kind", + b"kind", + "node_list", + b"node_list", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "any_list", + b"any_list", + "bytes_list", + b"bytes_list", + "float_list", + b"float_list", + "int64_list", + b"int64_list", + "kind", + b"kind", + "node_list", + b"node_list", + ], + ) -> None: ... + def WhichOneof( + self, oneof_group: typing.Literal["kind", b"kind"] + ) -> typing.Literal["node_list", "bytes_list", "int64_list", "float_list", "any_list"] | None: ... global___CollectionDef = CollectionDef @@ -454,7 +505,17 @@ class TensorInfo(google.protobuf.message.Message): indices_tensor_name: builtins.str | None = ..., dense_shape_tensor_name: builtins.str | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["dense_shape_tensor_name", b"dense_shape_tensor_name", "indices_tensor_name", b"indices_tensor_name", "values_tensor_name", b"values_tensor_name"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "dense_shape_tensor_name", + b"dense_shape_tensor_name", + "indices_tensor_name", + b"indices_tensor_name", + "values_tensor_name", + b"values_tensor_name", + ], + ) -> None: ... @typing.final class CompositeTensor(google.protobuf.message.Message): @@ -517,9 +578,41 @@ class TensorInfo(google.protobuf.message.Message): dtype: tensorflow.core.framework.types_pb2.DataType.ValueType | None = ..., tensor_shape: tensorflow.core.framework.tensor_shape_pb2.TensorShapeProto | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["composite_tensor", b"composite_tensor", "coo_sparse", b"coo_sparse", "encoding", b"encoding", "name", b"name", "tensor_shape", b"tensor_shape"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["composite_tensor", b"composite_tensor", "coo_sparse", b"coo_sparse", "dtype", b"dtype", "encoding", b"encoding", "name", b"name", "tensor_shape", b"tensor_shape"]) -> None: ... - def WhichOneof(self, oneof_group: typing.Literal["encoding", b"encoding"]) -> typing.Literal["name", "coo_sparse", "composite_tensor"] | None: ... + def HasField( + self, + field_name: typing.Literal[ + "composite_tensor", + b"composite_tensor", + "coo_sparse", + b"coo_sparse", + "encoding", + b"encoding", + "name", + b"name", + "tensor_shape", + b"tensor_shape", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "composite_tensor", + b"composite_tensor", + "coo_sparse", + b"coo_sparse", + "dtype", + b"dtype", + "encoding", + b"encoding", + "name", + b"name", + "tensor_shape", + b"tensor_shape", + ], + ) -> None: ... + def WhichOneof( + self, oneof_group: typing.Literal["encoding", b"encoding"] + ) -> typing.Literal["name", "coo_sparse", "composite_tensor"] | None: ... global___TensorInfo = TensorInfo @@ -540,12 +633,7 @@ class SignatureDef(google.protobuf.message.Message): key: builtins.str @property def value(self) -> global___TensorInfo: ... - def __init__( - self, - *, - key: builtins.str | None = ..., - value: global___TensorInfo | None = ..., - ) -> None: ... + def __init__(self, *, key: builtins.str | None = ..., value: global___TensorInfo | None = ...) -> None: ... def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... @@ -558,12 +646,7 @@ class SignatureDef(google.protobuf.message.Message): key: builtins.str @property def value(self) -> global___TensorInfo: ... - def __init__( - self, - *, - key: builtins.str | None = ..., - value: global___TensorInfo | None = ..., - ) -> None: ... + def __init__(self, *, key: builtins.str | None = ..., value: global___TensorInfo | None = ...) -> None: ... def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... @@ -577,10 +660,7 @@ class SignatureDef(google.protobuf.message.Message): @property def value(self) -> tensorflow.core.framework.tensor_pb2.TensorProto: ... def __init__( - self, - *, - key: builtins.str | None = ..., - value: tensorflow.core.framework.tensor_pb2.TensorProto | None = ..., + self, *, key: builtins.str | None = ..., value: tensorflow.core.framework.tensor_pb2.TensorProto | None = ... ) -> None: ... def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... @@ -607,7 +687,9 @@ class SignatureDef(google.protobuf.message.Message): """Named output parameters.""" @property - def defaults(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, tensorflow.core.framework.tensor_pb2.TensorProto]: + def defaults( + self, + ) -> google.protobuf.internal.containers.MessageMap[builtins.str, tensorflow.core.framework.tensor_pb2.TensorProto]: """Named input to corresponding default values if any.""" def __init__( @@ -618,7 +700,12 @@ class SignatureDef(google.protobuf.message.Message): method_name: builtins.str | None = ..., defaults: collections.abc.Mapping[builtins.str, tensorflow.core.framework.tensor_pb2.TensorProto] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["defaults", b"defaults", "inputs", b"inputs", "method_name", b"method_name", "outputs", b"outputs"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "defaults", b"defaults", "inputs", b"inputs", "method_name", b"method_name", "outputs", b"outputs" + ], + ) -> None: ... global___SignatureDef = SignatureDef @@ -641,12 +728,7 @@ class AssetFileDef(google.protobuf.message.Message): def tensor_info(self) -> global___TensorInfo: """The tensor to bind the asset filename to.""" - def __init__( - self, - *, - tensor_info: global___TensorInfo | None = ..., - filename: builtins.str | None = ..., - ) -> None: ... + def __init__(self, *, tensor_info: global___TensorInfo | None = ..., filename: builtins.str | None = ...) -> None: ... def HasField(self, field_name: typing.Literal["tensor_info", b"tensor_info"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["filename", b"filename", "tensor_info", b"tensor_info"]) -> None: ... diff --git a/stubs/tensorflow/tensorflow/core/protobuf/named_tensor_pb2.pyi b/stubs/tensorflow/tensorflow/core/protobuf/named_tensor_pb2.pyi index c1c17417d31c..5128864c6868 100644 --- a/stubs/tensorflow/tensorflow/core/protobuf/named_tensor_pb2.pyi +++ b/stubs/tensorflow/tensorflow/core/protobuf/named_tensor_pb2.pyi @@ -33,10 +33,7 @@ class NamedTensorProto(google.protobuf.message.Message): """ def __init__( - self, - *, - name: builtins.str | None = ..., - tensor: tensorflow.core.framework.tensor_pb2.TensorProto | None = ..., + self, *, name: builtins.str | None = ..., tensor: tensorflow.core.framework.tensor_pb2.TensorProto | None = ... ) -> None: ... def HasField(self, field_name: typing.Literal["tensor", b"tensor"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["name", b"name", "tensor", b"tensor"]) -> None: ... diff --git a/stubs/tensorflow/tensorflow/core/protobuf/queue_runner_pb2.pyi b/stubs/tensorflow/tensorflow/core/protobuf/queue_runner_pb2.pyi index f93fe61c3752..68d0cda5c153 100644 --- a/stubs/tensorflow/tensorflow/core/protobuf/queue_runner_pb2.pyi +++ b/stubs/tensorflow/tensorflow/core/protobuf/queue_runner_pb2.pyi @@ -36,7 +36,9 @@ class QueueRunnerDef(google.protobuf.message.Message): """A list of enqueue operations.""" @property - def queue_closed_exception_types(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[tensorflow.tsl.protobuf.error_codes_pb2.Code.ValueType]: + def queue_closed_exception_types( + self, + ) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[tensorflow.tsl.protobuf.error_codes_pb2.Code.ValueType]: """A list of exception types considered to signal a safely closed queue if raised during enqueue operations. """ @@ -48,8 +50,24 @@ class QueueRunnerDef(google.protobuf.message.Message): enqueue_op_name: collections.abc.Iterable[builtins.str] | None = ..., close_op_name: builtins.str | None = ..., cancel_op_name: builtins.str | None = ..., - queue_closed_exception_types: collections.abc.Iterable[tensorflow.tsl.protobuf.error_codes_pb2.Code.ValueType] | None = ..., + queue_closed_exception_types: ( + collections.abc.Iterable[tensorflow.tsl.protobuf.error_codes_pb2.Code.ValueType] | None + ) = ..., + ) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "cancel_op_name", + b"cancel_op_name", + "close_op_name", + b"close_op_name", + "enqueue_op_name", + b"enqueue_op_name", + "queue_closed_exception_types", + b"queue_closed_exception_types", + "queue_name", + b"queue_name", + ], ) -> None: ... - def ClearField(self, field_name: typing.Literal["cancel_op_name", b"cancel_op_name", "close_op_name", b"close_op_name", "enqueue_op_name", b"enqueue_op_name", "queue_closed_exception_types", b"queue_closed_exception_types", "queue_name", b"queue_name"]) -> None: ... global___QueueRunnerDef = QueueRunnerDef diff --git a/stubs/tensorflow/tensorflow/core/protobuf/remote_tensor_handle_pb2.pyi b/stubs/tensorflow/tensorflow/core/protobuf/remote_tensor_handle_pb2.pyi index 6558122b328b..4450b2e472ff 100644 --- a/stubs/tensorflow/tensorflow/core/protobuf/remote_tensor_handle_pb2.pyi +++ b/stubs/tensorflow/tensorflow/core/protobuf/remote_tensor_handle_pb2.pyi @@ -60,7 +60,9 @@ class RemoteTensorHandle(google.protobuf.message.Message): dtype: tensorflow.core.framework.types_pb2.DataType.ValueType """Tensor type.""" @property - def resource_dtypes_and_shapes(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ResourceDtypeAndShape]: + def resource_dtypes_and_shapes( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ResourceDtypeAndShape]: """Optional data types and shapes of a remote resource variable.""" def __init__( @@ -73,6 +75,22 @@ class RemoteTensorHandle(google.protobuf.message.Message): dtype: tensorflow.core.framework.types_pb2.DataType.ValueType | None = ..., resource_dtypes_and_shapes: collections.abc.Iterable[global___ResourceDtypeAndShape] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["device", b"device", "dtype", b"dtype", "op_device", b"op_device", "op_id", b"op_id", "output_num", b"output_num", "resource_dtypes_and_shapes", b"resource_dtypes_and_shapes"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "device", + b"device", + "dtype", + b"dtype", + "op_device", + b"op_device", + "op_id", + b"op_id", + "output_num", + b"output_num", + "resource_dtypes_and_shapes", + b"resource_dtypes_and_shapes", + ], + ) -> None: ... global___RemoteTensorHandle = RemoteTensorHandle diff --git a/stubs/tensorflow/tensorflow/core/protobuf/rewriter_config_pb2.pyi b/stubs/tensorflow/tensorflow/core/protobuf/rewriter_config_pb2.pyi index ebe2295bc872..0779991c7dd3 100644 --- a/stubs/tensorflow/tensorflow/core/protobuf/rewriter_config_pb2.pyi +++ b/stubs/tensorflow/tensorflow/core/protobuf/rewriter_config_pb2.pyi @@ -30,12 +30,7 @@ class AutoParallelOptions(google.protobuf.message.Message): NUM_REPLICAS_FIELD_NUMBER: builtins.int enable: builtins.bool num_replicas: builtins.int - def __init__( - self, - *, - enable: builtins.bool | None = ..., - num_replicas: builtins.int | None = ..., - ) -> None: ... + def __init__(self, *, enable: builtins.bool | None = ..., num_replicas: builtins.int | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["enable", b"enable", "num_replicas", b"num_replicas"]) -> None: ... global___AutoParallelOptions = AutoParallelOptions @@ -49,11 +44,7 @@ class ScopedAllocatorOptions(google.protobuf.message.Message): def enable_op(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: """If present, only perform optimization for these ops.""" - def __init__( - self, - *, - enable_op: collections.abc.Iterable[builtins.str] | None = ..., - ) -> None: ... + def __init__(self, *, enable_op: collections.abc.Iterable[builtins.str] | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["enable_op", b"enable_op"]) -> None: ... global___ScopedAllocatorOptions = ScopedAllocatorOptions @@ -70,7 +61,9 @@ class RewriterConfig(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _ToggleEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[RewriterConfig._Toggle.ValueType], builtins.type): + class _ToggleEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[RewriterConfig._Toggle.ValueType], builtins.type + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor DEFAULT: RewriterConfig._Toggle.ValueType # 0 ON: RewriterConfig._Toggle.ValueType # 1 @@ -118,7 +111,9 @@ class RewriterConfig(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _CpuLayoutEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[RewriterConfig._CpuLayout.ValueType], builtins.type): + class _CpuLayoutEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[RewriterConfig._CpuLayout.ValueType], builtins.type + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor NO_CONVERSION_ON_CPU: RewriterConfig._CpuLayout.ValueType # 0 NCHW_TO_NHWC: RewriterConfig._CpuLayout.ValueType # 1 @@ -135,7 +130,9 @@ class RewriterConfig(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _NumIterationsTypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[RewriterConfig._NumIterationsType.ValueType], builtins.type): + class _NumIterationsTypeEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[RewriterConfig._NumIterationsType.ValueType], builtins.type + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor DEFAULT_NUM_ITERS: RewriterConfig._NumIterationsType.ValueType # 0 ONE: RewriterConfig._NumIterationsType.ValueType # 1 @@ -154,7 +151,9 @@ class RewriterConfig(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _MemOptTypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[RewriterConfig._MemOptType.ValueType], builtins.type): + class _MemOptTypeEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[RewriterConfig._MemOptType.ValueType], builtins.type + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor DEFAULT_MEM_OPT: RewriterConfig._MemOptType.ValueType # 0 """The default setting (SCHEDULING and SWAPPING HEURISTICS only)""" @@ -225,10 +224,7 @@ class RewriterConfig(google.protobuf.message.Message): @property def value(self) -> tensorflow.core.framework.attr_value_pb2.AttrValue: ... def __init__( - self, - *, - key: builtins.str | None = ..., - value: tensorflow.core.framework.attr_value_pb2.AttrValue | None = ..., + self, *, key: builtins.str | None = ..., value: tensorflow.core.framework.attr_value_pb2.AttrValue | None = ... ) -> None: ... def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... @@ -237,7 +233,9 @@ class RewriterConfig(google.protobuf.message.Message): PARAMETER_MAP_FIELD_NUMBER: builtins.int name: builtins.str @property - def parameter_map(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, tensorflow.core.framework.attr_value_pb2.AttrValue]: ... + def parameter_map( + self, + ) -> google.protobuf.internal.containers.MessageMap[builtins.str, tensorflow.core.framework.attr_value_pb2.AttrValue]: ... def __init__( self, *, @@ -438,7 +436,9 @@ class RewriterConfig(google.protobuf.message.Message): """ @property - def custom_optimizers(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___RewriterConfig.CustomGraphOptimizer]: + def custom_optimizers( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___RewriterConfig.CustomGraphOptimizer]: """list of CustomGraphOptimizers to apply.""" @property @@ -492,7 +492,97 @@ class RewriterConfig(google.protobuf.message.Message): inter_optimizer_verifier_config: tensorflow.core.protobuf.verifier_config_pb2.VerifierConfig | None = ..., post_optimization_verifier_config: tensorflow.core.protobuf.verifier_config_pb2.VerifierConfig | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["auto_parallel", b"auto_parallel", "inter_optimizer_verifier_config", b"inter_optimizer_verifier_config", "post_optimization_verifier_config", b"post_optimization_verifier_config", "scoped_allocator_opts", b"scoped_allocator_opts"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["arithmetic_optimization", b"arithmetic_optimization", "auto_mixed_precision", b"auto_mixed_precision", "auto_mixed_precision_cpu", b"auto_mixed_precision_cpu", "auto_mixed_precision_mkl", b"auto_mixed_precision_mkl", "auto_mixed_precision_onednn_bfloat16", b"auto_mixed_precision_onednn_bfloat16", "auto_parallel", b"auto_parallel", "common_subgraph_elimination", b"common_subgraph_elimination", "constant_folding", b"constant_folding", "cpu_layout_conversion", b"cpu_layout_conversion", "custom_optimizers", b"custom_optimizers", "debug_stripper", b"debug_stripper", "dependency_optimization", b"dependency_optimization", "disable_meta_optimizer", b"disable_meta_optimizer", "disable_model_pruning", b"disable_model_pruning", "disable_tfg_optimizer", b"disable_tfg_optimizer", "experimental_conditional_code_motion", b"experimental_conditional_code_motion", "experimental_disable_compressed_tensor_optimization", b"experimental_disable_compressed_tensor_optimization", "experimental_disable_folding_quantization_emulation", b"experimental_disable_folding_quantization_emulation", "fail_on_optimizer_errors", b"fail_on_optimizer_errors", "function_optimization", b"function_optimization", "implementation_selector", b"implementation_selector", "inter_optimizer_verifier_config", b"inter_optimizer_verifier_config", "layout_optimizer", b"layout_optimizer", "loop_optimization", b"loop_optimization", "memory_optimization", b"memory_optimization", "memory_optimizer_target_node_name_scope", b"memory_optimizer_target_node_name_scope", "meta_optimizer_iterations", b"meta_optimizer_iterations", "meta_optimizer_timeout_ms", b"meta_optimizer_timeout_ms", "min_graph_nodes", b"min_graph_nodes", "optimizers", b"optimizers", "pin_to_host_optimization", b"pin_to_host_optimization", "post_optimization_verifier_config", b"post_optimization_verifier_config", "remapping", b"remapping", "scoped_allocator_optimization", b"scoped_allocator_optimization", "scoped_allocator_opts", b"scoped_allocator_opts", "shape_optimization", b"shape_optimization", "use_plugin_optimizers", b"use_plugin_optimizers"]) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "auto_parallel", + b"auto_parallel", + "inter_optimizer_verifier_config", + b"inter_optimizer_verifier_config", + "post_optimization_verifier_config", + b"post_optimization_verifier_config", + "scoped_allocator_opts", + b"scoped_allocator_opts", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "arithmetic_optimization", + b"arithmetic_optimization", + "auto_mixed_precision", + b"auto_mixed_precision", + "auto_mixed_precision_cpu", + b"auto_mixed_precision_cpu", + "auto_mixed_precision_mkl", + b"auto_mixed_precision_mkl", + "auto_mixed_precision_onednn_bfloat16", + b"auto_mixed_precision_onednn_bfloat16", + "auto_parallel", + b"auto_parallel", + "common_subgraph_elimination", + b"common_subgraph_elimination", + "constant_folding", + b"constant_folding", + "cpu_layout_conversion", + b"cpu_layout_conversion", + "custom_optimizers", + b"custom_optimizers", + "debug_stripper", + b"debug_stripper", + "dependency_optimization", + b"dependency_optimization", + "disable_meta_optimizer", + b"disable_meta_optimizer", + "disable_model_pruning", + b"disable_model_pruning", + "disable_tfg_optimizer", + b"disable_tfg_optimizer", + "experimental_conditional_code_motion", + b"experimental_conditional_code_motion", + "experimental_disable_compressed_tensor_optimization", + b"experimental_disable_compressed_tensor_optimization", + "experimental_disable_folding_quantization_emulation", + b"experimental_disable_folding_quantization_emulation", + "fail_on_optimizer_errors", + b"fail_on_optimizer_errors", + "function_optimization", + b"function_optimization", + "implementation_selector", + b"implementation_selector", + "inter_optimizer_verifier_config", + b"inter_optimizer_verifier_config", + "layout_optimizer", + b"layout_optimizer", + "loop_optimization", + b"loop_optimization", + "memory_optimization", + b"memory_optimization", + "memory_optimizer_target_node_name_scope", + b"memory_optimizer_target_node_name_scope", + "meta_optimizer_iterations", + b"meta_optimizer_iterations", + "meta_optimizer_timeout_ms", + b"meta_optimizer_timeout_ms", + "min_graph_nodes", + b"min_graph_nodes", + "optimizers", + b"optimizers", + "pin_to_host_optimization", + b"pin_to_host_optimization", + "post_optimization_verifier_config", + b"post_optimization_verifier_config", + "remapping", + b"remapping", + "scoped_allocator_optimization", + b"scoped_allocator_optimization", + "scoped_allocator_opts", + b"scoped_allocator_opts", + "shape_optimization", + b"shape_optimization", + "use_plugin_optimizers", + b"use_plugin_optimizers", + ], + ) -> None: ... global___RewriterConfig = RewriterConfig diff --git a/stubs/tensorflow/tensorflow/core/protobuf/saved_model_pb2.pyi b/stubs/tensorflow/tensorflow/core/protobuf/saved_model_pb2.pyi index cd2bdb7a2a13..e387ee87563d 100644 --- a/stubs/tensorflow/tensorflow/core/protobuf/saved_model_pb2.pyi +++ b/stubs/tensorflow/tensorflow/core/protobuf/saved_model_pb2.pyi @@ -30,7 +30,11 @@ class SavedModel(google.protobuf.message.Message): at release will be 1. """ @property - def meta_graphs(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[tensorflow.core.protobuf.meta_graph_pb2.MetaGraphDef]: + def meta_graphs( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + tensorflow.core.protobuf.meta_graph_pb2.MetaGraphDef + ]: """One or more MetaGraphs.""" def __init__( @@ -39,6 +43,9 @@ class SavedModel(google.protobuf.message.Message): saved_model_schema_version: builtins.int | None = ..., meta_graphs: collections.abc.Iterable[tensorflow.core.protobuf.meta_graph_pb2.MetaGraphDef] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["meta_graphs", b"meta_graphs", "saved_model_schema_version", b"saved_model_schema_version"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal["meta_graphs", b"meta_graphs", "saved_model_schema_version", b"saved_model_schema_version"], + ) -> None: ... global___SavedModel = SavedModel diff --git a/stubs/tensorflow/tensorflow/core/protobuf/saved_object_graph_pb2.pyi b/stubs/tensorflow/tensorflow/core/protobuf/saved_object_graph_pb2.pyi index 4a6f0c1d51e0..ddc1f9d6a6ca 100644 --- a/stubs/tensorflow/tensorflow/core/protobuf/saved_object_graph_pb2.pyi +++ b/stubs/tensorflow/tensorflow/core/protobuf/saved_object_graph_pb2.pyi @@ -46,12 +46,7 @@ class SavedObjectGraph(google.protobuf.message.Message): key: builtins.str @property def value(self) -> global___SavedConcreteFunction: ... - def __init__( - self, - *, - key: builtins.str | None = ..., - value: global___SavedConcreteFunction | None = ..., - ) -> None: ... + def __init__(self, *, key: builtins.str | None = ..., value: global___SavedConcreteFunction | None = ...) -> None: ... def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... @@ -94,12 +89,7 @@ class SavedObject(google.protobuf.message.Message): key: builtins.str @property def value(self) -> global___SaveableObject: ... - def __init__( - self, - *, - key: builtins.str | None = ..., - value: global___SaveableObject | None = ..., - ) -> None: ... + def __init__(self, *, key: builtins.str | None = ..., value: global___SaveableObject | None = ...) -> None: ... def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... @@ -141,7 +131,11 @@ class SavedObject(google.protobuf.message.Message): `registered_saver` is defined for each SavedObject. """ @property - def children(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[tensorflow.core.protobuf.trackable_object_graph_pb2.TrackableObjectGraph.TrackableObject.ObjectReference]: + def children( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + tensorflow.core.protobuf.trackable_object_graph_pb2.TrackableObjectGraph.TrackableObject.ObjectReference + ]: """Objects which this object depends on: named edges in the dependency graph. @@ -150,14 +144,22 @@ class SavedObject(google.protobuf.message.Message): """ @property - def dependencies(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[tensorflow.core.protobuf.trackable_object_graph_pb2.TrackableObjectGraph.TrackableObject.ObjectReference]: + def dependencies( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + tensorflow.core.protobuf.trackable_object_graph_pb2.TrackableObjectGraph.TrackableObject.ObjectReference + ]: """Ordered list of dependencies that must be loaded before this object. SavedModel loads with the bottom-up approach, by first creating all objects (in the order defined by the dependencies), then connecting the edges. """ @property - def slot_variables(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[tensorflow.core.protobuf.trackable_object_graph_pb2.TrackableObjectGraph.TrackableObject.SlotVariableReference]: + def slot_variables( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + tensorflow.core.protobuf.trackable_object_graph_pb2.TrackableObjectGraph.TrackableObject.SlotVariableReference + ]: """Slot variables owned by this object. This describes the three-way (optimizer, variable, slot variable) relationship; none of the three depend on the others directly. @@ -199,9 +201,24 @@ class SavedObject(google.protobuf.message.Message): def __init__( self, *, - children: collections.abc.Iterable[tensorflow.core.protobuf.trackable_object_graph_pb2.TrackableObjectGraph.TrackableObject.ObjectReference] | None = ..., - dependencies: collections.abc.Iterable[tensorflow.core.protobuf.trackable_object_graph_pb2.TrackableObjectGraph.TrackableObject.ObjectReference] | None = ..., - slot_variables: collections.abc.Iterable[tensorflow.core.protobuf.trackable_object_graph_pb2.TrackableObjectGraph.TrackableObject.SlotVariableReference] | None = ..., + children: ( + collections.abc.Iterable[ + tensorflow.core.protobuf.trackable_object_graph_pb2.TrackableObjectGraph.TrackableObject.ObjectReference + ] + | None + ) = ..., + dependencies: ( + collections.abc.Iterable[ + tensorflow.core.protobuf.trackable_object_graph_pb2.TrackableObjectGraph.TrackableObject.ObjectReference + ] + | None + ) = ..., + slot_variables: ( + collections.abc.Iterable[ + tensorflow.core.protobuf.trackable_object_graph_pb2.TrackableObjectGraph.TrackableObject.SlotVariableReference + ] + | None + ) = ..., user_object: global___SavedUserObject | None = ..., asset: global___SavedAsset | None = ..., function: global___SavedFunction | None = ..., @@ -215,9 +232,76 @@ class SavedObject(google.protobuf.message.Message): serialized_user_proto: google.protobuf.any_pb2.Any | None = ..., registered_saver: builtins.str | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["asset", b"asset", "bare_concrete_function", b"bare_concrete_function", "captured_tensor", b"captured_tensor", "constant", b"constant", "function", b"function", "kind", b"kind", "resource", b"resource", "serialized_user_proto", b"serialized_user_proto", "user_object", b"user_object", "variable", b"variable"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["asset", b"asset", "bare_concrete_function", b"bare_concrete_function", "captured_tensor", b"captured_tensor", "children", b"children", "constant", b"constant", "dependencies", b"dependencies", "function", b"function", "kind", b"kind", "registered_name", b"registered_name", "registered_saver", b"registered_saver", "resource", b"resource", "saveable_objects", b"saveable_objects", "serialized_user_proto", b"serialized_user_proto", "slot_variables", b"slot_variables", "user_object", b"user_object", "variable", b"variable"]) -> None: ... - def WhichOneof(self, oneof_group: typing.Literal["kind", b"kind"]) -> typing.Literal["user_object", "asset", "function", "variable", "bare_concrete_function", "constant", "resource", "captured_tensor"] | None: ... + def HasField( + self, + field_name: typing.Literal[ + "asset", + b"asset", + "bare_concrete_function", + b"bare_concrete_function", + "captured_tensor", + b"captured_tensor", + "constant", + b"constant", + "function", + b"function", + "kind", + b"kind", + "resource", + b"resource", + "serialized_user_proto", + b"serialized_user_proto", + "user_object", + b"user_object", + "variable", + b"variable", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "asset", + b"asset", + "bare_concrete_function", + b"bare_concrete_function", + "captured_tensor", + b"captured_tensor", + "children", + b"children", + "constant", + b"constant", + "dependencies", + b"dependencies", + "function", + b"function", + "kind", + b"kind", + "registered_name", + b"registered_name", + "registered_saver", + b"registered_saver", + "resource", + b"resource", + "saveable_objects", + b"saveable_objects", + "serialized_user_proto", + b"serialized_user_proto", + "slot_variables", + b"slot_variables", + "user_object", + b"user_object", + "variable", + b"variable", + ], + ) -> None: ... + def WhichOneof( + self, oneof_group: typing.Literal["kind", b"kind"] + ) -> ( + typing.Literal[ + "user_object", "asset", "function", "variable", "bare_concrete_function", "constant", "resource", "captured_tensor" + ] + | None + ): ... global___SavedObject = SavedObject @@ -257,7 +341,9 @@ class SavedUserObject(google.protobuf.message.Message): metadata: builtins.str | None = ..., ) -> None: ... def HasField(self, field_name: typing.Literal["version", b"version"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["identifier", b"identifier", "metadata", b"metadata", "version", b"version"]) -> None: ... + def ClearField( + self, field_name: typing.Literal["identifier", b"identifier", "metadata", b"metadata", "version", b"version"] + ) -> None: ... global___SavedUserObject = SavedUserObject @@ -279,11 +365,7 @@ class SavedAsset(google.protobuf.message.Message): Only the field `AssetFileDef.filename` is used. Other fields, such as `AssetFileDef.tensor_info`, MUST be ignored. """ - def __init__( - self, - *, - asset_file_def_index: builtins.int | None = ..., - ) -> None: ... + def __init__(self, *, asset_file_def_index: builtins.int | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["asset_file_def_index", b"asset_file_def_index"]) -> None: ... global___SavedAsset = SavedAsset @@ -307,7 +389,9 @@ class SavedFunction(google.protobuf.message.Message): function_spec: global___FunctionSpec | None = ..., ) -> None: ... def HasField(self, field_name: typing.Literal["function_spec", b"function_spec"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["concrete_functions", b"concrete_functions", "function_spec", b"function_spec"]) -> None: ... + def ClearField( + self, field_name: typing.Literal["concrete_functions", b"concrete_functions", "function_spec", b"function_spec"] + ) -> None: ... global___SavedFunction = SavedFunction @@ -321,12 +405,7 @@ class CapturedTensor(google.protobuf.message.Message): """Name of captured tensor""" concrete_function: builtins.str """Name of concrete function which contains the computed graph tensor.""" - def __init__( - self, - *, - name: builtins.str | None = ..., - concrete_function: builtins.str | None = ..., - ) -> None: ... + def __init__(self, *, name: builtins.str | None = ..., concrete_function: builtins.str | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["concrete_function", b"concrete_function", "name", b"name"]) -> None: ... global___CapturedTensor = CapturedTensor @@ -364,8 +443,23 @@ class SavedConcreteFunction(google.protobuf.message.Message): canonicalized_input_signature: tensorflow.core.protobuf.struct_pb2.StructuredValue | None = ..., output_signature: tensorflow.core.protobuf.struct_pb2.StructuredValue | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["canonicalized_input_signature", b"canonicalized_input_signature", "output_signature", b"output_signature"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["bound_inputs", b"bound_inputs", "canonicalized_input_signature", b"canonicalized_input_signature", "output_signature", b"output_signature"]) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "canonicalized_input_signature", b"canonicalized_input_signature", "output_signature", b"output_signature" + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "bound_inputs", + b"bound_inputs", + "canonicalized_input_signature", + b"canonicalized_input_signature", + "output_signature", + b"output_signature", + ], + ) -> None: ... global___SavedConcreteFunction = SavedConcreteFunction @@ -404,7 +498,19 @@ class SavedBareConcreteFunction(google.protobuf.message.Message): function_spec: global___FunctionSpec | None = ..., ) -> None: ... def HasField(self, field_name: typing.Literal["function_spec", b"function_spec"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["allowed_positional_arguments", b"allowed_positional_arguments", "argument_keywords", b"argument_keywords", "concrete_function_name", b"concrete_function_name", "function_spec", b"function_spec"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "allowed_positional_arguments", + b"allowed_positional_arguments", + "argument_keywords", + b"argument_keywords", + "concrete_function_name", + b"concrete_function_name", + "function_spec", + b"function_spec", + ], + ) -> None: ... global___SavedBareConcreteFunction = SavedBareConcreteFunction @@ -415,11 +521,7 @@ class SavedConstant(google.protobuf.message.Message): OPERATION_FIELD_NUMBER: builtins.int operation: builtins.str """An Operation name for a ConstantOp in this SavedObjectGraph's MetaGraph.""" - def __init__( - self, - *, - operation: builtins.str | None = ..., - ) -> None: ... + def __init__(self, *, operation: builtins.str | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["operation", b"operation"]) -> None: ... global___SavedConstant = SavedConstant @@ -449,7 +551,9 @@ class SavedVariable(google.protobuf.message.Message): @property def shape(self) -> tensorflow.core.framework.tensor_shape_pb2.TensorShapeProto: ... @property - def experimental_distributed_variable_components(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___SavedVariable]: + def experimental_distributed_variable_components( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___SavedVariable]: """List of component variables for a distributed variable. When this field is non-empty, the SavedVariable will be assumed @@ -471,7 +575,27 @@ class SavedVariable(google.protobuf.message.Message): experimental_distributed_variable_components: collections.abc.Iterable[global___SavedVariable] | None = ..., ) -> None: ... def HasField(self, field_name: typing.Literal["shape", b"shape"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["aggregation", b"aggregation", "device", b"device", "dtype", b"dtype", "experimental_distributed_variable_components", b"experimental_distributed_variable_components", "name", b"name", "shape", b"shape", "synchronization", b"synchronization", "trainable", b"trainable"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "aggregation", + b"aggregation", + "device", + b"device", + "dtype", + b"dtype", + "experimental_distributed_variable_components", + b"experimental_distributed_variable_components", + "name", + b"name", + "shape", + b"shape", + "synchronization", + b"synchronization", + "trainable", + b"trainable", + ], + ) -> None: ... global___SavedVariable = SavedVariable @@ -487,7 +611,9 @@ class FunctionSpec(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _JitCompileEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[FunctionSpec._JitCompile.ValueType], builtins.type): + class _JitCompileEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[FunctionSpec._JitCompile.ValueType], builtins.type + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor DEFAULT: FunctionSpec._JitCompile.ValueType # 0 ON: FunctionSpec._JitCompile.ValueType # 1 @@ -531,8 +657,22 @@ class FunctionSpec(google.protobuf.message.Message): input_signature: tensorflow.core.protobuf.struct_pb2.StructuredValue | None = ..., jit_compile: global___FunctionSpec.JitCompile.ValueType | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["fullargspec", b"fullargspec", "input_signature", b"input_signature"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["fullargspec", b"fullargspec", "input_signature", b"input_signature", "is_method", b"is_method", "jit_compile", b"jit_compile"]) -> None: ... + def HasField( + self, field_name: typing.Literal["fullargspec", b"fullargspec", "input_signature", b"input_signature"] + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "fullargspec", + b"fullargspec", + "input_signature", + b"input_signature", + "is_method", + b"is_method", + "jit_compile", + b"jit_compile", + ], + ) -> None: ... global___FunctionSpec = FunctionSpec @@ -551,11 +691,7 @@ class SavedResource(google.protobuf.message.Message): creation function, e.g. "CPU". An empty string allows the user to select a device. """ - def __init__( - self, - *, - device: builtins.str | None = ..., - ) -> None: ... + def __init__(self, *, device: builtins.str | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["device", b"device"]) -> None: ... global___SavedResource = SavedResource @@ -571,12 +707,9 @@ class SaveableObject(google.protobuf.message.Message): These functions save and restore directly from tensors. """ restore_function: builtins.int - def __init__( - self, - *, - save_function: builtins.int | None = ..., - restore_function: builtins.int | None = ..., + def __init__(self, *, save_function: builtins.int | None = ..., restore_function: builtins.int | None = ...) -> None: ... + def ClearField( + self, field_name: typing.Literal["restore_function", b"restore_function", "save_function", b"save_function"] ) -> None: ... - def ClearField(self, field_name: typing.Literal["restore_function", b"restore_function", "save_function", b"save_function"]) -> None: ... global___SaveableObject = SaveableObject diff --git a/stubs/tensorflow/tensorflow/core/protobuf/saver_pb2.pyi b/stubs/tensorflow/tensorflow/core/protobuf/saver_pb2.pyi index 1f89eebf8f98..42159a4dbb60 100644 --- a/stubs/tensorflow/tensorflow/core/protobuf/saver_pb2.pyi +++ b/stubs/tensorflow/tensorflow/core/protobuf/saver_pb2.pyi @@ -28,7 +28,9 @@ class SaverDef(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _CheckpointFormatVersionEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[SaverDef._CheckpointFormatVersion.ValueType], builtins.type): + class _CheckpointFormatVersionEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[SaverDef._CheckpointFormatVersion.ValueType], builtins.type + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor LEGACY: SaverDef._CheckpointFormatVersion.ValueType # 0 """Internal legacy format.""" @@ -88,6 +90,24 @@ class SaverDef(google.protobuf.message.Message): keep_checkpoint_every_n_hours: builtins.float | None = ..., version: global___SaverDef.CheckpointFormatVersion.ValueType | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["filename_tensor_name", b"filename_tensor_name", "keep_checkpoint_every_n_hours", b"keep_checkpoint_every_n_hours", "max_to_keep", b"max_to_keep", "restore_op_name", b"restore_op_name", "save_tensor_name", b"save_tensor_name", "sharded", b"sharded", "version", b"version"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "filename_tensor_name", + b"filename_tensor_name", + "keep_checkpoint_every_n_hours", + b"keep_checkpoint_every_n_hours", + "max_to_keep", + b"max_to_keep", + "restore_op_name", + b"restore_op_name", + "save_tensor_name", + b"save_tensor_name", + "sharded", + b"sharded", + "version", + b"version", + ], + ) -> None: ... global___SaverDef = SaverDef diff --git a/stubs/tensorflow/tensorflow/core/protobuf/service_config_pb2.pyi b/stubs/tensorflow/tensorflow/core/protobuf/service_config_pb2.pyi index d5e74f6d52f9..af7c8f598a43 100644 --- a/stubs/tensorflow/tensorflow/core/protobuf/service_config_pb2.pyi +++ b/stubs/tensorflow/tensorflow/core/protobuf/service_config_pb2.pyi @@ -110,7 +110,35 @@ class DispatcherConfig(google.protobuf.message.Message): worker_timeout_ms: builtins.int | None = ..., worker_max_concurrent_snapshots: builtins.int | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["client_timeout_ms", b"client_timeout_ms", "deployment_mode", b"deployment_mode", "fault_tolerant_mode", b"fault_tolerant_mode", "gc_dynamic_sharding_jobs", b"gc_dynamic_sharding_jobs", "job_gc_check_interval_ms", b"job_gc_check_interval_ms", "job_gc_timeout_ms", b"job_gc_timeout_ms", "port", b"port", "protocol", b"protocol", "work_dir", b"work_dir", "worker_addresses", b"worker_addresses", "worker_max_concurrent_snapshots", b"worker_max_concurrent_snapshots", "worker_timeout_ms", b"worker_timeout_ms"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "client_timeout_ms", + b"client_timeout_ms", + "deployment_mode", + b"deployment_mode", + "fault_tolerant_mode", + b"fault_tolerant_mode", + "gc_dynamic_sharding_jobs", + b"gc_dynamic_sharding_jobs", + "job_gc_check_interval_ms", + b"job_gc_check_interval_ms", + "job_gc_timeout_ms", + b"job_gc_timeout_ms", + "port", + b"port", + "protocol", + b"protocol", + "work_dir", + b"work_dir", + "worker_addresses", + b"worker_addresses", + "worker_max_concurrent_snapshots", + b"worker_max_concurrent_snapshots", + "worker_timeout_ms", + b"worker_timeout_ms", + ], + ) -> None: ... global___DispatcherConfig = DispatcherConfig @@ -212,6 +240,36 @@ class WorkerConfig(google.protobuf.message.Message): snapshot_max_chunk_size_bytes: builtins.int | None = ..., shutdown_quiet_period_ms: builtins.int | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["cross_trainer_cache_size_bytes", b"cross_trainer_cache_size_bytes", "data_transfer_address", b"data_transfer_address", "data_transfer_port", b"data_transfer_port", "data_transfer_protocol", b"data_transfer_protocol", "dispatcher_address", b"dispatcher_address", "dispatcher_timeout_ms", b"dispatcher_timeout_ms", "heartbeat_interval_ms", b"heartbeat_interval_ms", "port", b"port", "protocol", b"protocol", "shutdown_quiet_period_ms", b"shutdown_quiet_period_ms", "snapshot_max_chunk_size_bytes", b"snapshot_max_chunk_size_bytes", "worker_address", b"worker_address", "worker_tags", b"worker_tags"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "cross_trainer_cache_size_bytes", + b"cross_trainer_cache_size_bytes", + "data_transfer_address", + b"data_transfer_address", + "data_transfer_port", + b"data_transfer_port", + "data_transfer_protocol", + b"data_transfer_protocol", + "dispatcher_address", + b"dispatcher_address", + "dispatcher_timeout_ms", + b"dispatcher_timeout_ms", + "heartbeat_interval_ms", + b"heartbeat_interval_ms", + "port", + b"port", + "protocol", + b"protocol", + "shutdown_quiet_period_ms", + b"shutdown_quiet_period_ms", + "snapshot_max_chunk_size_bytes", + b"snapshot_max_chunk_size_bytes", + "worker_address", + b"worker_address", + "worker_tags", + b"worker_tags", + ], + ) -> None: ... global___WorkerConfig = WorkerConfig diff --git a/stubs/tensorflow/tensorflow/core/protobuf/snapshot_pb2.pyi b/stubs/tensorflow/tensorflow/core/protobuf/snapshot_pb2.pyi index daeb2da4ad65..10ff6f28e41f 100644 --- a/stubs/tensorflow/tensorflow/core/protobuf/snapshot_pb2.pyi +++ b/stubs/tensorflow/tensorflow/core/protobuf/snapshot_pb2.pyi @@ -27,11 +27,13 @@ class SnapshotRecord(google.protobuf.message.Message): TENSOR_FIELD_NUMBER: builtins.int @property - def tensor(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[tensorflow.core.framework.tensor_pb2.TensorProto]: ... - def __init__( + def tensor( self, - *, - tensor: collections.abc.Iterable[tensorflow.core.framework.tensor_pb2.TensorProto] | None = ..., + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + tensorflow.core.framework.tensor_pb2.TensorProto + ]: ... + def __init__( + self, *, tensor: collections.abc.Iterable[tensorflow.core.framework.tensor_pb2.TensorProto] | None = ... ) -> None: ... def ClearField(self, field_name: typing.Literal["tensor", b"tensor"]) -> None: ... @@ -64,7 +66,9 @@ class SnapshotMetadataRecord(google.protobuf.message.Message): """The number of elements in the snapshot.""" finalized: builtins.bool @property - def dtype(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[tensorflow.core.framework.types_pb2.DataType.ValueType]: + def dtype( + self, + ) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[tensorflow.core.framework.types_pb2.DataType.ValueType]: """A list of tensor dtype corresponding to each element of the snapshot.""" def __init__( @@ -78,7 +82,25 @@ class SnapshotMetadataRecord(google.protobuf.message.Message): num_elements: builtins.int | None = ..., finalized: builtins.bool | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["creation_timestamp", b"creation_timestamp", "dtype", b"dtype", "finalized", b"finalized", "graph_hash", b"graph_hash", "num_elements", b"num_elements", "run_id", b"run_id", "version", b"version"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "creation_timestamp", + b"creation_timestamp", + "dtype", + b"dtype", + "finalized", + b"finalized", + "graph_hash", + b"graph_hash", + "num_elements", + b"num_elements", + "run_id", + b"run_id", + "version", + b"version", + ], + ) -> None: ... global___SnapshotMetadataRecord = SnapshotMetadataRecord @@ -101,7 +123,9 @@ class TensorMetadata(google.protobuf.message.Message): tensor_size_bytes: builtins.int | None = ..., ) -> None: ... def HasField(self, field_name: typing.Literal["tensor_shape", b"tensor_shape"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["tensor_shape", b"tensor_shape", "tensor_size_bytes", b"tensor_size_bytes"]) -> None: ... + def ClearField( + self, field_name: typing.Literal["tensor_shape", b"tensor_shape", "tensor_size_bytes", b"tensor_size_bytes"] + ) -> None: ... global___TensorMetadata = TensorMetadata @@ -114,11 +138,7 @@ class SnapshotTensorMetadata(google.protobuf.message.Message): TENSOR_METADATA_FIELD_NUMBER: builtins.int @property def tensor_metadata(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___TensorMetadata]: ... - def __init__( - self, - *, - tensor_metadata: collections.abc.Iterable[global___TensorMetadata] | None = ..., - ) -> None: ... + def __init__(self, *, tensor_metadata: collections.abc.Iterable[global___TensorMetadata] | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["tensor_metadata", b"tensor_metadata"]) -> None: ... global___SnapshotTensorMetadata = SnapshotTensorMetadata @@ -138,12 +158,7 @@ class DistributedSnapshotMetadata(google.protobuf.message.Message): `tsl::io::compression`. In particular, an empty string specifies not to compress. """ - def __init__( - self, - *, - element_spec: builtins.bytes | None = ..., - compression: builtins.str | None = ..., - ) -> None: ... + def __init__(self, *, element_spec: builtins.bytes | None = ..., compression: builtins.str | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["compression", b"compression", "element_spec", b"element_spec"]) -> None: ... global___DistributedSnapshotMetadata = DistributedSnapshotMetadata diff --git a/stubs/tensorflow/tensorflow/core/protobuf/struct_pb2.pyi b/stubs/tensorflow/tensorflow/core/protobuf/struct_pb2.pyi index 0081e3011610..3d3bdfcaf808 100644 --- a/stubs/tensorflow/tensorflow/core/protobuf/struct_pb2.pyi +++ b/stubs/tensorflow/tensorflow/core/protobuf/struct_pb2.pyi @@ -151,9 +151,107 @@ class StructuredValue(google.protobuf.message.Message): tensor_value: tensorflow.core.framework.tensor_pb2.TensorProto | None = ..., numpy_value: tensorflow.core.framework.tensor_pb2.TensorProto | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["bool_value", b"bool_value", "bounded_tensor_spec_value", b"bounded_tensor_spec_value", "dict_value", b"dict_value", "float64_value", b"float64_value", "int64_value", b"int64_value", "kind", b"kind", "list_value", b"list_value", "named_tuple_value", b"named_tuple_value", "none_value", b"none_value", "numpy_value", b"numpy_value", "string_value", b"string_value", "tensor_dtype_value", b"tensor_dtype_value", "tensor_shape_value", b"tensor_shape_value", "tensor_spec_value", b"tensor_spec_value", "tensor_value", b"tensor_value", "tuple_value", b"tuple_value", "type_spec_value", b"type_spec_value"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["bool_value", b"bool_value", "bounded_tensor_spec_value", b"bounded_tensor_spec_value", "dict_value", b"dict_value", "float64_value", b"float64_value", "int64_value", b"int64_value", "kind", b"kind", "list_value", b"list_value", "named_tuple_value", b"named_tuple_value", "none_value", b"none_value", "numpy_value", b"numpy_value", "string_value", b"string_value", "tensor_dtype_value", b"tensor_dtype_value", "tensor_shape_value", b"tensor_shape_value", "tensor_spec_value", b"tensor_spec_value", "tensor_value", b"tensor_value", "tuple_value", b"tuple_value", "type_spec_value", b"type_spec_value"]) -> None: ... - def WhichOneof(self, oneof_group: typing.Literal["kind", b"kind"]) -> typing.Literal["none_value", "float64_value", "int64_value", "string_value", "bool_value", "tensor_shape_value", "tensor_dtype_value", "tensor_spec_value", "type_spec_value", "bounded_tensor_spec_value", "list_value", "tuple_value", "dict_value", "named_tuple_value", "tensor_value", "numpy_value"] | None: ... + def HasField( + self, + field_name: typing.Literal[ + "bool_value", + b"bool_value", + "bounded_tensor_spec_value", + b"bounded_tensor_spec_value", + "dict_value", + b"dict_value", + "float64_value", + b"float64_value", + "int64_value", + b"int64_value", + "kind", + b"kind", + "list_value", + b"list_value", + "named_tuple_value", + b"named_tuple_value", + "none_value", + b"none_value", + "numpy_value", + b"numpy_value", + "string_value", + b"string_value", + "tensor_dtype_value", + b"tensor_dtype_value", + "tensor_shape_value", + b"tensor_shape_value", + "tensor_spec_value", + b"tensor_spec_value", + "tensor_value", + b"tensor_value", + "tuple_value", + b"tuple_value", + "type_spec_value", + b"type_spec_value", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "bool_value", + b"bool_value", + "bounded_tensor_spec_value", + b"bounded_tensor_spec_value", + "dict_value", + b"dict_value", + "float64_value", + b"float64_value", + "int64_value", + b"int64_value", + "kind", + b"kind", + "list_value", + b"list_value", + "named_tuple_value", + b"named_tuple_value", + "none_value", + b"none_value", + "numpy_value", + b"numpy_value", + "string_value", + b"string_value", + "tensor_dtype_value", + b"tensor_dtype_value", + "tensor_shape_value", + b"tensor_shape_value", + "tensor_spec_value", + b"tensor_spec_value", + "tensor_value", + b"tensor_value", + "tuple_value", + b"tuple_value", + "type_spec_value", + b"type_spec_value", + ], + ) -> None: ... + def WhichOneof( + self, oneof_group: typing.Literal["kind", b"kind"] + ) -> ( + typing.Literal[ + "none_value", + "float64_value", + "int64_value", + "string_value", + "bool_value", + "tensor_shape_value", + "tensor_dtype_value", + "tensor_spec_value", + "type_spec_value", + "bounded_tensor_spec_value", + "list_value", + "tuple_value", + "dict_value", + "named_tuple_value", + "tensor_value", + "numpy_value", + ] + | None + ): ... global___StructuredValue = StructuredValue @@ -163,9 +261,7 @@ class NoneValue(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - def __init__( - self, - ) -> None: ... + def __init__(self) -> None: ... global___NoneValue = NoneValue @@ -178,11 +274,7 @@ class ListValue(google.protobuf.message.Message): VALUES_FIELD_NUMBER: builtins.int @property def values(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___StructuredValue]: ... - def __init__( - self, - *, - values: collections.abc.Iterable[global___StructuredValue] | None = ..., - ) -> None: ... + def __init__(self, *, values: collections.abc.Iterable[global___StructuredValue] | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["values", b"values"]) -> None: ... global___ListValue = ListValue @@ -196,11 +288,7 @@ class TupleValue(google.protobuf.message.Message): VALUES_FIELD_NUMBER: builtins.int @property def values(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___StructuredValue]: ... - def __init__( - self, - *, - values: collections.abc.Iterable[global___StructuredValue] | None = ..., - ) -> None: ... + def __init__(self, *, values: collections.abc.Iterable[global___StructuredValue] | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["values", b"values"]) -> None: ... global___TupleValue = TupleValue @@ -222,23 +310,14 @@ class DictValue(google.protobuf.message.Message): key: builtins.str @property def value(self) -> global___StructuredValue: ... - def __init__( - self, - *, - key: builtins.str | None = ..., - value: global___StructuredValue | None = ..., - ) -> None: ... + def __init__(self, *, key: builtins.str | None = ..., value: global___StructuredValue | None = ...) -> None: ... def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... FIELDS_FIELD_NUMBER: builtins.int @property def fields(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, global___StructuredValue]: ... - def __init__( - self, - *, - fields: collections.abc.Mapping[builtins.str, global___StructuredValue] | None = ..., - ) -> None: ... + def __init__(self, *, fields: collections.abc.Mapping[builtins.str, global___StructuredValue] | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["fields", b"fields"]) -> None: ... global___DictValue = DictValue @@ -254,12 +333,7 @@ class PairValue(google.protobuf.message.Message): key: builtins.str @property def value(self) -> global___StructuredValue: ... - def __init__( - self, - *, - key: builtins.str | None = ..., - value: global___StructuredValue | None = ..., - ) -> None: ... + def __init__(self, *, key: builtins.str | None = ..., value: global___StructuredValue | None = ...) -> None: ... def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... @@ -277,10 +351,7 @@ class NamedTupleValue(google.protobuf.message.Message): @property def values(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___PairValue]: ... def __init__( - self, - *, - name: builtins.str | None = ..., - values: collections.abc.Iterable[global___PairValue] | None = ..., + self, *, name: builtins.str | None = ..., values: collections.abc.Iterable[global___PairValue] | None = ... ) -> None: ... def ClearField(self, field_name: typing.Literal["name", b"name", "values", b"values"]) -> None: ... @@ -339,8 +410,15 @@ class BoundedTensorSpecProto(google.protobuf.message.Message): minimum: tensorflow.core.framework.tensor_pb2.TensorProto | None = ..., maximum: tensorflow.core.framework.tensor_pb2.TensorProto | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["maximum", b"maximum", "minimum", b"minimum", "shape", b"shape"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["dtype", b"dtype", "maximum", b"maximum", "minimum", b"minimum", "name", b"name", "shape", b"shape"]) -> None: ... + def HasField( + self, field_name: typing.Literal["maximum", b"maximum", "minimum", b"minimum", "shape", b"shape"] + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "dtype", b"dtype", "maximum", b"maximum", "minimum", b"minimum", "name", b"name", "shape", b"shape" + ], + ) -> None: ... global___BoundedTensorSpecProto = BoundedTensorSpecProto @@ -354,7 +432,9 @@ class TypeSpecProto(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _TypeSpecClassEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[TypeSpecProto._TypeSpecClass.ValueType], builtins.type): + class _TypeSpecClassEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[TypeSpecProto._TypeSpecClass.ValueType], builtins.type + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor UNKNOWN: TypeSpecProto._TypeSpecClass.ValueType # 0 SPARSE_TENSOR_SPEC: TypeSpecProto._TypeSpecClass.ValueType # 1 @@ -439,6 +519,18 @@ class TypeSpecProto(google.protobuf.message.Message): num_flat_components: builtins.int | None = ..., ) -> None: ... def HasField(self, field_name: typing.Literal["type_state", b"type_state"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["num_flat_components", b"num_flat_components", "type_spec_class", b"type_spec_class", "type_spec_class_name", b"type_spec_class_name", "type_state", b"type_state"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "num_flat_components", + b"num_flat_components", + "type_spec_class", + b"type_spec_class", + "type_spec_class_name", + b"type_spec_class_name", + "type_state", + b"type_state", + ], + ) -> None: ... global___TypeSpecProto = TypeSpecProto diff --git a/stubs/tensorflow/tensorflow/core/protobuf/tensor_bundle_pb2.pyi b/stubs/tensorflow/tensorflow/core/protobuf/tensor_bundle_pb2.pyi index b1b95ae3fab4..58f69470b734 100644 --- a/stubs/tensorflow/tensorflow/core/protobuf/tensor_bundle_pb2.pyi +++ b/stubs/tensorflow/tensorflow/core/protobuf/tensor_bundle_pb2.pyi @@ -43,7 +43,9 @@ class BundleHeaderProto(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _EndiannessEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[BundleHeaderProto._Endianness.ValueType], builtins.type): + class _EndiannessEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[BundleHeaderProto._Endianness.ValueType], builtins.type + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor LITTLE: BundleHeaderProto._Endianness.ValueType # 0 BIG: BundleHeaderProto._Endianness.ValueType # 1 @@ -77,7 +79,9 @@ class BundleHeaderProto(google.protobuf.message.Message): version: tensorflow.core.framework.versions_pb2.VersionDef | None = ..., ) -> None: ... def HasField(self, field_name: typing.Literal["version", b"version"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["endianness", b"endianness", "num_shards", b"num_shards", "version", b"version"]) -> None: ... + def ClearField( + self, field_name: typing.Literal["endianness", b"endianness", "num_shards", b"num_shards", "version", b"version"] + ) -> None: ... global___BundleHeaderProto = BundleHeaderProto @@ -107,7 +111,11 @@ class BundleEntryProto(google.protobuf.message.Message): @property def shape(self) -> tensorflow.core.framework.tensor_shape_pb2.TensorShapeProto: ... @property - def slices(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[tensorflow.core.framework.tensor_slice_pb2.TensorSliceProto]: + def slices( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + tensorflow.core.framework.tensor_slice_pb2.TensorSliceProto + ]: """Iff present, this entry represents a partitioned tensor. The previous fields are interpreted as follows: @@ -129,6 +137,24 @@ class BundleEntryProto(google.protobuf.message.Message): slices: collections.abc.Iterable[tensorflow.core.framework.tensor_slice_pb2.TensorSliceProto] | None = ..., ) -> None: ... def HasField(self, field_name: typing.Literal["shape", b"shape"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["crc32c", b"crc32c", "dtype", b"dtype", "offset", b"offset", "shape", b"shape", "shard_id", b"shard_id", "size", b"size", "slices", b"slices"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "crc32c", + b"crc32c", + "dtype", + b"dtype", + "offset", + b"offset", + "shape", + b"shape", + "shard_id", + b"shard_id", + "size", + b"size", + "slices", + b"slices", + ], + ) -> None: ... global___BundleEntryProto = BundleEntryProto diff --git a/stubs/tensorflow/tensorflow/core/protobuf/tensorflow_server_pb2.pyi b/stubs/tensorflow/tensorflow/core/protobuf/tensorflow_server_pb2.pyi index e5120748aeef..470738c92cb2 100644 --- a/stubs/tensorflow/tensorflow/core/protobuf/tensorflow_server_pb2.pyi +++ b/stubs/tensorflow/tensorflow/core/protobuf/tensorflow_server_pb2.pyi @@ -89,7 +89,37 @@ class ServerDef(google.protobuf.message.Message): port: builtins.int | None = ..., cluster_device_filters: tensorflow.core.protobuf.device_filters_pb2.ClusterDeviceFilters | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["cluster", b"cluster", "cluster_device_filters", b"cluster_device_filters", "default_session_config", b"default_session_config"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["cluster", b"cluster", "cluster_device_filters", b"cluster_device_filters", "default_session_config", b"default_session_config", "job_name", b"job_name", "port", b"port", "protocol", b"protocol", "replica", b"replica", "task_index", b"task_index"]) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "cluster", + b"cluster", + "cluster_device_filters", + b"cluster_device_filters", + "default_session_config", + b"default_session_config", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "cluster", + b"cluster", + "cluster_device_filters", + b"cluster_device_filters", + "default_session_config", + b"default_session_config", + "job_name", + b"job_name", + "port", + b"port", + "protocol", + b"protocol", + "replica", + b"replica", + "task_index", + b"task_index", + ], + ) -> None: ... global___ServerDef = ServerDef diff --git a/stubs/tensorflow/tensorflow/core/protobuf/tpu/compilation_result_pb2.pyi b/stubs/tensorflow/tensorflow/core/protobuf/tpu/compilation_result_pb2.pyi index 6d04ebf01637..1220c78dc5f5 100644 --- a/stubs/tensorflow/tensorflow/core/protobuf/tpu/compilation_result_pb2.pyi +++ b/stubs/tensorflow/tensorflow/core/protobuf/tpu/compilation_result_pb2.pyi @@ -35,7 +35,9 @@ class CompilationResultProto(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _ErrorCodeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[CompilationResultProto._ErrorCode.ValueType], builtins.type): + class _ErrorCodeEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[CompilationResultProto._ErrorCode.ValueType], builtins.type + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor UNKNOWN: CompilationResultProto._ErrorCode.ValueType # 0 OUT_OF_MEMORY: CompilationResultProto._ErrorCode.ValueType # 1 @@ -53,7 +55,9 @@ class CompilationResultProto(google.protobuf.message.Message): status_error_message: builtins.str error_code: global___CompilationResultProto.ErrorCode.ValueType @property - def hlo_protos(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[tensorflow.compiler.xla.service.hlo_pb2.HloProto]: + def hlo_protos( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[tensorflow.compiler.xla.service.hlo_pb2.HloProto]: """HLO proto.""" def __init__( @@ -64,6 +68,18 @@ class CompilationResultProto(google.protobuf.message.Message): hlo_protos: collections.abc.Iterable[tensorflow.compiler.xla.service.hlo_pb2.HloProto] | None = ..., error_code: global___CompilationResultProto.ErrorCode.ValueType | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["error_code", b"error_code", "hlo_protos", b"hlo_protos", "status_code", b"status_code", "status_error_message", b"status_error_message"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "error_code", + b"error_code", + "hlo_protos", + b"hlo_protos", + "status_code", + b"status_code", + "status_error_message", + b"status_error_message", + ], + ) -> None: ... global___CompilationResultProto = CompilationResultProto diff --git a/stubs/tensorflow/tensorflow/core/protobuf/tpu/dynamic_padding_pb2.pyi b/stubs/tensorflow/tensorflow/core/protobuf/tpu/dynamic_padding_pb2.pyi index e8e639bc0b2d..d898f55864e3 100644 --- a/stubs/tensorflow/tensorflow/core/protobuf/tpu/dynamic_padding_pb2.pyi +++ b/stubs/tensorflow/tensorflow/core/protobuf/tpu/dynamic_padding_pb2.pyi @@ -37,6 +37,11 @@ class PaddingMap(google.protobuf.message.Message): shape_index: builtins.int | None = ..., padding_arg_index: builtins.int | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["arg_index", b"arg_index", "padding_arg_index", b"padding_arg_index", "shape_index", b"shape_index"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "arg_index", b"arg_index", "padding_arg_index", b"padding_arg_index", "shape_index", b"shape_index" + ], + ) -> None: ... global___PaddingMap = PaddingMap diff --git a/stubs/tensorflow/tensorflow/core/protobuf/tpu/optimization_parameters_pb2.pyi b/stubs/tensorflow/tensorflow/core/protobuf/tpu/optimization_parameters_pb2.pyi index 5e08f7ce0e06..9b914134520d 100644 --- a/stubs/tensorflow/tensorflow/core/protobuf/tpu/optimization_parameters_pb2.pyi +++ b/stubs/tensorflow/tensorflow/core/protobuf/tpu/optimization_parameters_pb2.pyi @@ -83,7 +83,10 @@ class SimulatedQuantization(google.protobuf.message.Message): num_buckets: builtins.int | None = ..., ) -> None: ... def HasField(self, field_name: typing.Literal["clipping_limits", b"clipping_limits"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["clipping_limits", b"clipping_limits", "enabled", b"enabled", "num_buckets", b"num_buckets"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal["clipping_limits", b"clipping_limits", "enabled", b"enabled", "num_buckets", b"num_buckets"], + ) -> None: ... global___SimulatedQuantization = SimulatedQuantization @@ -126,11 +129,7 @@ class DynamicLearningRate(google.protobuf.message.Message): particular tag is specified by populating its corresponding index in the list of learning_rate scalars. """ - def __init__( - self, - *, - tag: builtins.int | None = ..., - ) -> None: ... + def __init__(self, *, tag: builtins.int | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["tag", b"tag"]) -> None: ... global___DynamicLearningRate = DynamicLearningRate @@ -146,15 +145,16 @@ class LearningRate(google.protobuf.message.Message): constant: builtins.float @property def dynamic(self) -> global___DynamicLearningRate: ... - def __init__( - self, - *, - constant: builtins.float | None = ..., - dynamic: global___DynamicLearningRate | None = ..., + def __init__(self, *, constant: builtins.float | None = ..., dynamic: global___DynamicLearningRate | None = ...) -> None: ... + def HasField( + self, field_name: typing.Literal["constant", b"constant", "dynamic", b"dynamic", "learning_rate", b"learning_rate"] + ) -> builtins.bool: ... + def ClearField( + self, field_name: typing.Literal["constant", b"constant", "dynamic", b"dynamic", "learning_rate", b"learning_rate"] ) -> None: ... - def HasField(self, field_name: typing.Literal["constant", b"constant", "dynamic", b"dynamic", "learning_rate", b"learning_rate"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["constant", b"constant", "dynamic", b"dynamic", "learning_rate", b"learning_rate"]) -> None: ... - def WhichOneof(self, oneof_group: typing.Literal["learning_rate", b"learning_rate"]) -> typing.Literal["constant", "dynamic"] | None: ... + def WhichOneof( + self, oneof_group: typing.Literal["learning_rate", b"learning_rate"] + ) -> typing.Literal["constant", "dynamic"] | None: ... global___LearningRate = LearningRate @@ -169,9 +169,7 @@ class AdagradParameters(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - def __init__( - self, - ) -> None: ... + def __init__(self) -> None: ... global___AdagradParameters = AdagradParameters @@ -216,7 +214,21 @@ class AdagradMomentumParameters(google.protobuf.message.Message): beta2: builtins.float | None = ..., epsilon: builtins.float | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["beta2", b"beta2", "epsilon", b"epsilon", "exponent", b"exponent", "momentum", b"momentum", "use_nesterov", b"use_nesterov"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "beta2", + b"beta2", + "epsilon", + b"epsilon", + "exponent", + b"exponent", + "momentum", + b"momentum", + "use_nesterov", + b"use_nesterov", + ], + ) -> None: ... global___AdagradMomentumParameters = AdagradMomentumParameters @@ -249,7 +261,17 @@ class BoundedAdagradParameters(google.protobuf.message.Message): max_var_update: builtins.float | None = ..., max_accumulator: builtins.float | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["max_accumulator", b"max_accumulator", "max_var_update", b"max_var_update", "update_accumulator_first", b"update_accumulator_first"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "max_accumulator", + b"max_accumulator", + "max_var_update", + b"max_var_update", + "update_accumulator_first", + b"update_accumulator_first", + ], + ) -> None: ... global___BoundedAdagradParameters = BoundedAdagradParameters @@ -261,9 +283,7 @@ class StochasticGradientDescentParameters(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - def __init__( - self, - ) -> None: ... + def __init__(self) -> None: ... global___StochasticGradientDescentParameters = StochasticGradientDescentParameters @@ -317,7 +337,23 @@ class FtrlParameters(google.protobuf.message.Message): multiply_linear_by_lr: builtins.bool | None = ..., allow_zero_accumulator: builtins.bool | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["allow_zero_accumulator", b"allow_zero_accumulator", "beta", b"beta", "l1", b"l1", "l2", b"l2", "lr_power", b"lr_power", "multiply_linear_by_lr", b"multiply_linear_by_lr"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "allow_zero_accumulator", + b"allow_zero_accumulator", + "beta", + b"beta", + "l1", + b"l1", + "l2", + b"l2", + "lr_power", + b"lr_power", + "multiply_linear_by_lr", + b"multiply_linear_by_lr", + ], + ) -> None: ... global___FtrlParameters = FtrlParameters @@ -366,7 +402,21 @@ class AdamParameters(google.protobuf.message.Message): use_non_lazy_adam: builtins.bool | None = ..., use_sum_inside_sqrt: builtins.bool | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["beta1", b"beta1", "beta2", b"beta2", "epsilon", b"epsilon", "use_non_lazy_adam", b"use_non_lazy_adam", "use_sum_inside_sqrt", b"use_sum_inside_sqrt"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "beta1", + b"beta1", + "beta2", + b"beta2", + "epsilon", + b"epsilon", + "use_non_lazy_adam", + b"use_non_lazy_adam", + "use_sum_inside_sqrt", + b"use_sum_inside_sqrt", + ], + ) -> None: ... global___AdamParameters = AdamParameters @@ -382,12 +432,7 @@ class MomentumParameters(google.protobuf.message.Message): USE_NESTEROV_FIELD_NUMBER: builtins.int momentum: builtins.float use_nesterov: builtins.bool - def __init__( - self, - *, - momentum: builtins.float | None = ..., - use_nesterov: builtins.bool | None = ..., - ) -> None: ... + def __init__(self, *, momentum: builtins.float | None = ..., use_nesterov: builtins.bool | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["momentum", b"momentum", "use_nesterov", b"use_nesterov"]) -> None: ... global___MomentumParameters = MomentumParameters @@ -416,7 +461,9 @@ class LionParameters(google.protobuf.message.Message): beta2: builtins.float | None = ..., use_non_lazy_lion: builtins.bool | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["beta1", b"beta1", "beta2", b"beta2", "use_non_lazy_lion", b"use_non_lazy_lion"]) -> None: ... + def ClearField( + self, field_name: typing.Literal["beta1", b"beta1", "beta2", b"beta2", "use_non_lazy_lion", b"use_non_lazy_lion"] + ) -> None: ... global___LionParameters = LionParameters @@ -435,11 +482,7 @@ class RmsPropParameters(google.protobuf.message.Message): momentum: builtins.float epsilon: builtins.float def __init__( - self, - *, - rho: builtins.float | None = ..., - momentum: builtins.float | None = ..., - epsilon: builtins.float | None = ..., + self, *, rho: builtins.float | None = ..., momentum: builtins.float | None = ..., epsilon: builtins.float | None = ... ) -> None: ... def ClearField(self, field_name: typing.Literal["epsilon", b"epsilon", "momentum", b"momentum", "rho", b"rho"]) -> None: ... @@ -460,11 +503,7 @@ class CenteredRmsPropParameters(google.protobuf.message.Message): momentum: builtins.float epsilon: builtins.float def __init__( - self, - *, - rho: builtins.float | None = ..., - momentum: builtins.float | None = ..., - epsilon: builtins.float | None = ..., + self, *, rho: builtins.float | None = ..., momentum: builtins.float | None = ..., epsilon: builtins.float | None = ... ) -> None: ... def ClearField(self, field_name: typing.Literal["epsilon", b"epsilon", "momentum", b"momentum", "rho", b"rho"]) -> None: ... @@ -516,7 +555,35 @@ class MdlAdagradLightParameters(google.protobuf.message.Message): hard_limit_min_benefit: builtins.bool | None = ..., mdl_regularize: builtins.bool | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["benefit_revisit_scale", b"benefit_revisit_scale", "hard_limit_min_benefit", b"hard_limit_min_benefit", "l2", b"l2", "lr_power", b"lr_power", "max_event_benefit", b"max_event_benefit", "max_total_benefit", b"max_total_benefit", "mdl_benefit_rampup_coeff", b"mdl_benefit_rampup_coeff", "mdl_hard_limit", b"mdl_hard_limit", "mdl_min_weight", b"mdl_min_weight", "mdl_mix_in_margin", b"mdl_mix_in_margin", "mdl_regularize", b"mdl_regularize", "min_servable_mdl_benefit", b"min_servable_mdl_benefit"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "benefit_revisit_scale", + b"benefit_revisit_scale", + "hard_limit_min_benefit", + b"hard_limit_min_benefit", + "l2", + b"l2", + "lr_power", + b"lr_power", + "max_event_benefit", + b"max_event_benefit", + "max_total_benefit", + b"max_total_benefit", + "mdl_benefit_rampup_coeff", + b"mdl_benefit_rampup_coeff", + "mdl_hard_limit", + b"mdl_hard_limit", + "mdl_min_weight", + b"mdl_min_weight", + "mdl_mix_in_margin", + b"mdl_mix_in_margin", + "mdl_regularize", + b"mdl_regularize", + "min_servable_mdl_benefit", + b"min_servable_mdl_benefit", + ], + ) -> None: ... global___MdlAdagradLightParameters = MdlAdagradLightParameters @@ -532,12 +599,7 @@ class AdadeltaParameters(google.protobuf.message.Message): EPSILON_FIELD_NUMBER: builtins.int rho: builtins.float epsilon: builtins.float - def __init__( - self, - *, - rho: builtins.float | None = ..., - epsilon: builtins.float | None = ..., - ) -> None: ... + def __init__(self, *, rho: builtins.float | None = ..., epsilon: builtins.float | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["epsilon", b"epsilon", "rho", b"rho"]) -> None: ... global___AdadeltaParameters = AdadeltaParameters @@ -554,12 +616,7 @@ class ProximalAdagradParameters(google.protobuf.message.Message): L2_FIELD_NUMBER: builtins.int l1: builtins.float l2: builtins.float - def __init__( - self, - *, - l1: builtins.float | None = ..., - l2: builtins.float | None = ..., - ) -> None: ... + def __init__(self, *, l1: builtins.float | None = ..., l2: builtins.float | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["l1", b"l1", "l2", b"l2"]) -> None: ... global___ProximalAdagradParameters = ProximalAdagradParameters @@ -589,11 +646,7 @@ class OnlineYogiParameters(google.protobuf.message.Message): beta2: builtins.float """\\beta_2 from Algorithm 2 in the paper.""" def __init__( - self, - *, - l1: builtins.float | None = ..., - l2: builtins.float | None = ..., - beta2: builtins.float | None = ..., + self, *, l1: builtins.float | None = ..., l2: builtins.float | None = ..., beta2: builtins.float | None = ... ) -> None: ... def ClearField(self, field_name: typing.Literal["beta2", b"beta2", "l1", b"l1", "l2", b"l2"]) -> None: ... @@ -638,7 +691,9 @@ class ProximalYogiParameters(google.protobuf.message.Message): beta2: builtins.float | None = ..., epsilon: builtins.float | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["beta1", b"beta1", "beta2", b"beta2", "epsilon", b"epsilon", "l1", b"l1", "l2", b"l2"]) -> None: ... + def ClearField( + self, field_name: typing.Literal["beta1", b"beta1", "beta2", b"beta2", "epsilon", b"epsilon", "l1", b"l1", "l2", b"l2"] + ) -> None: ... global___ProximalYogiParameters = ProximalYogiParameters @@ -696,7 +751,19 @@ class FrequencyEstimatorParameters(google.protobuf.message.Message): outlier_threshold: builtins.float | None = ..., weight_exponent: builtins.float | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["max_delta", b"max_delta", "outlier_threshold", b"outlier_threshold", "tau", b"tau", "weight_exponent", b"weight_exponent"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "max_delta", + b"max_delta", + "outlier_threshold", + b"outlier_threshold", + "tau", + b"tau", + "weight_exponent", + b"weight_exponent", + ], + ) -> None: ... global___FrequencyEstimatorParameters = FrequencyEstimatorParameters @@ -728,11 +795,7 @@ class UserDefinedProgramParameters(google.protobuf.message.Message): PROGRAM_FIELD_NUMBER: builtins.int @property def program(self) -> tensorflow.compiler.xla.service.hlo_pb2.HloModuleProto: ... - def __init__( - self, - *, - program: tensorflow.compiler.xla.service.hlo_pb2.HloModuleProto | None = ..., - ) -> None: ... + def __init__(self, *, program: tensorflow.compiler.xla.service.hlo_pb2.HloModuleProto | None = ...) -> None: ... def HasField(self, field_name: typing.Literal["program", b"program"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["program", b"program"]) -> None: ... @@ -748,9 +811,7 @@ class AssignParameters(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - def __init__( - self, - ) -> None: ... + def __init__(self) -> None: ... global___AssignParameters = AssignParameters @@ -768,7 +829,9 @@ class GradientAccumulationStatus(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _StatusEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[GradientAccumulationStatus._Status.ValueType], builtins.type): + class _StatusEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[GradientAccumulationStatus._Status.ValueType], builtins.type + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor UNSPECIFIED: GradientAccumulationStatus._Status.ValueType # 0 ENABLED: GradientAccumulationStatus._Status.ValueType # 1 @@ -781,9 +844,7 @@ class GradientAccumulationStatus(google.protobuf.message.Message): ENABLED: GradientAccumulationStatus.Status.ValueType # 1 DISABLED: GradientAccumulationStatus.Status.ValueType # 2 - def __init__( - self, - ) -> None: ... + def __init__(self) -> None: ... global___GradientAccumulationStatus = GradientAccumulationStatus @@ -835,7 +896,9 @@ class LowDimensionalPackingStatus(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _StatusEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[LowDimensionalPackingStatus._Status.ValueType], builtins.type): + class _StatusEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[LowDimensionalPackingStatus._Status.ValueType], builtins.type + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor UNSPECIFIED: LowDimensionalPackingStatus._Status.ValueType # 0 ENABLED: LowDimensionalPackingStatus._Status.ValueType # 1 @@ -858,9 +921,7 @@ class LowDimensionalPackingStatus(google.protobuf.message.Message): ENABLED: LowDimensionalPackingStatus.Status.ValueType # 1 DISABLED: LowDimensionalPackingStatus.Status.ValueType # 2 - def __init__( - self, - ) -> None: ... + def __init__(self) -> None: ... global___LowDimensionalPackingStatus = LowDimensionalPackingStatus @@ -876,7 +937,10 @@ class HotIdReplicationConfiguration(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _StatusEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[HotIdReplicationConfiguration._Status.ValueType], builtins.type): + class _StatusEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[HotIdReplicationConfiguration._Status.ValueType], + builtins.type, + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor UNSPECIFIED: HotIdReplicationConfiguration._Status.ValueType # 0 ENABLED: HotIdReplicationConfiguration._Status.ValueType # 1 @@ -897,11 +961,7 @@ class HotIdReplicationConfiguration(google.protobuf.message.Message): STATUS_FIELD_NUMBER: builtins.int status: global___HotIdReplicationConfiguration.Status.ValueType - def __init__( - self, - *, - status: global___HotIdReplicationConfiguration.Status.ValueType | None = ..., - ) -> None: ... + def __init__(self, *, status: global___HotIdReplicationConfiguration.Status.ValueType | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["status", b"status"]) -> None: ... global___HotIdReplicationConfiguration = HotIdReplicationConfiguration @@ -1056,9 +1116,145 @@ class OptimizationParameters(google.protobuf.message.Message): user_defined_program: global___UserDefinedProgramParameters | None = ..., assign: global___AssignParameters | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["adadelta", b"adadelta", "adagrad", b"adagrad", "adagrad_momentum", b"adagrad_momentum", "adam", b"adam", "assign", b"assign", "bounded_adagrad", b"bounded_adagrad", "centered_rms_prop", b"centered_rms_prop", "clipping_limits", b"clipping_limits", "frequency_estimator", b"frequency_estimator", "ftrl", b"ftrl", "gradient_clipping_limits", b"gradient_clipping_limits", "hot_id_replication_configuration", b"hot_id_replication_configuration", "learning_rate", b"learning_rate", "lion", b"lion", "mdl_adagrad_light", b"mdl_adagrad_light", "momentum", b"momentum", "online_yogi", b"online_yogi", "parameters", b"parameters", "proximal_adagrad", b"proximal_adagrad", "proximal_yogi", b"proximal_yogi", "rms_prop", b"rms_prop", "simulated_quantization", b"simulated_quantization", "stochastic_gradient_descent", b"stochastic_gradient_descent", "user_defined_program", b"user_defined_program"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["adadelta", b"adadelta", "adagrad", b"adagrad", "adagrad_momentum", b"adagrad_momentum", "adam", b"adam", "assign", b"assign", "bounded_adagrad", b"bounded_adagrad", "centered_rms_prop", b"centered_rms_prop", "clipping_limits", b"clipping_limits", "frequency_estimator", b"frequency_estimator", "ftrl", b"ftrl", "gradient_accumulation_status", b"gradient_accumulation_status", "gradient_clipping_limits", b"gradient_clipping_limits", "hot_id_replication_configuration", b"hot_id_replication_configuration", "learning_rate", b"learning_rate", "lion", b"lion", "low_dimensional_packing_status", b"low_dimensional_packing_status", "mdl_adagrad_light", b"mdl_adagrad_light", "momentum", b"momentum", "multiply_weight_decay_factor_by_learning_rate", b"multiply_weight_decay_factor_by_learning_rate", "online_yogi", b"online_yogi", "parameters", b"parameters", "proximal_adagrad", b"proximal_adagrad", "proximal_yogi", b"proximal_yogi", "rms_prop", b"rms_prop", "simulated_quantization", b"simulated_quantization", "stochastic_gradient_descent", b"stochastic_gradient_descent", "user_defined_program", b"user_defined_program", "weight_decay_factor", b"weight_decay_factor"]) -> None: ... - def WhichOneof(self, oneof_group: typing.Literal["parameters", b"parameters"]) -> typing.Literal["adagrad", "adagrad_momentum", "bounded_adagrad", "stochastic_gradient_descent", "ftrl", "adam", "momentum", "lion", "rms_prop", "centered_rms_prop", "mdl_adagrad_light", "adadelta", "proximal_adagrad", "online_yogi", "proximal_yogi", "frequency_estimator", "user_defined_program", "assign"] | None: ... + def HasField( + self, + field_name: typing.Literal[ + "adadelta", + b"adadelta", + "adagrad", + b"adagrad", + "adagrad_momentum", + b"adagrad_momentum", + "adam", + b"adam", + "assign", + b"assign", + "bounded_adagrad", + b"bounded_adagrad", + "centered_rms_prop", + b"centered_rms_prop", + "clipping_limits", + b"clipping_limits", + "frequency_estimator", + b"frequency_estimator", + "ftrl", + b"ftrl", + "gradient_clipping_limits", + b"gradient_clipping_limits", + "hot_id_replication_configuration", + b"hot_id_replication_configuration", + "learning_rate", + b"learning_rate", + "lion", + b"lion", + "mdl_adagrad_light", + b"mdl_adagrad_light", + "momentum", + b"momentum", + "online_yogi", + b"online_yogi", + "parameters", + b"parameters", + "proximal_adagrad", + b"proximal_adagrad", + "proximal_yogi", + b"proximal_yogi", + "rms_prop", + b"rms_prop", + "simulated_quantization", + b"simulated_quantization", + "stochastic_gradient_descent", + b"stochastic_gradient_descent", + "user_defined_program", + b"user_defined_program", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "adadelta", + b"adadelta", + "adagrad", + b"adagrad", + "adagrad_momentum", + b"adagrad_momentum", + "adam", + b"adam", + "assign", + b"assign", + "bounded_adagrad", + b"bounded_adagrad", + "centered_rms_prop", + b"centered_rms_prop", + "clipping_limits", + b"clipping_limits", + "frequency_estimator", + b"frequency_estimator", + "ftrl", + b"ftrl", + "gradient_accumulation_status", + b"gradient_accumulation_status", + "gradient_clipping_limits", + b"gradient_clipping_limits", + "hot_id_replication_configuration", + b"hot_id_replication_configuration", + "learning_rate", + b"learning_rate", + "lion", + b"lion", + "low_dimensional_packing_status", + b"low_dimensional_packing_status", + "mdl_adagrad_light", + b"mdl_adagrad_light", + "momentum", + b"momentum", + "multiply_weight_decay_factor_by_learning_rate", + b"multiply_weight_decay_factor_by_learning_rate", + "online_yogi", + b"online_yogi", + "parameters", + b"parameters", + "proximal_adagrad", + b"proximal_adagrad", + "proximal_yogi", + b"proximal_yogi", + "rms_prop", + b"rms_prop", + "simulated_quantization", + b"simulated_quantization", + "stochastic_gradient_descent", + b"stochastic_gradient_descent", + "user_defined_program", + b"user_defined_program", + "weight_decay_factor", + b"weight_decay_factor", + ], + ) -> None: ... + def WhichOneof( + self, oneof_group: typing.Literal["parameters", b"parameters"] + ) -> ( + typing.Literal[ + "adagrad", + "adagrad_momentum", + "bounded_adagrad", + "stochastic_gradient_descent", + "ftrl", + "adam", + "momentum", + "lion", + "rms_prop", + "centered_rms_prop", + "mdl_adagrad_light", + "adadelta", + "proximal_adagrad", + "online_yogi", + "proximal_yogi", + "frequency_estimator", + "user_defined_program", + "assign", + ] + | None + ): ... global___OptimizationParameters = OptimizationParameters @@ -1079,9 +1275,7 @@ class StateVariableSpecification(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - def __init__( - self, - ) -> None: ... + def __init__(self) -> None: ... @typing.final class FillWithConstant(google.protobuf.message.Message): @@ -1094,11 +1288,7 @@ class StateVariableSpecification(google.protobuf.message.Message): INITIAL_VALUE_FIELD_NUMBER: builtins.int initial_value: builtins.float - def __init__( - self, - *, - initial_value: builtins.float | None = ..., - ) -> None: ... + def __init__(self, *, initial_value: builtins.float | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["initial_value", b"initial_value"]) -> None: ... NAME_FIELD_NUMBER: builtins.int @@ -1117,8 +1307,20 @@ class StateVariableSpecification(google.protobuf.message.Message): user_defined: global___StateVariableSpecification.UserDefined | None = ..., fill_with_constant: global___StateVariableSpecification.FillWithConstant | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["fill_with_constant", b"fill_with_constant", "usage", b"usage", "user_defined", b"user_defined"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["fill_with_constant", b"fill_with_constant", "name", b"name", "usage", b"usage", "user_defined", b"user_defined"]) -> None: ... - def WhichOneof(self, oneof_group: typing.Literal["usage", b"usage"]) -> typing.Literal["user_defined", "fill_with_constant"] | None: ... + def HasField( + self, + field_name: typing.Literal[ + "fill_with_constant", b"fill_with_constant", "usage", b"usage", "user_defined", b"user_defined" + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "fill_with_constant", b"fill_with_constant", "name", b"name", "usage", b"usage", "user_defined", b"user_defined" + ], + ) -> None: ... + def WhichOneof( + self, oneof_group: typing.Literal["usage", b"usage"] + ) -> typing.Literal["user_defined", "fill_with_constant"] | None: ... global___StateVariableSpecification = StateVariableSpecification diff --git a/stubs/tensorflow/tensorflow/core/protobuf/tpu/topology_pb2.pyi b/stubs/tensorflow/tensorflow/core/protobuf/tpu/topology_pb2.pyi index d9d75f0b13da..574cd7a67e8e 100644 --- a/stubs/tensorflow/tensorflow/core/protobuf/tpu/topology_pb2.pyi +++ b/stubs/tensorflow/tensorflow/core/protobuf/tpu/topology_pb2.pyi @@ -30,7 +30,9 @@ class TPUHardwareFeature(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _EmbeddingFeatureEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[TPUHardwareFeature._EmbeddingFeature.ValueType], builtins.type): + class _EmbeddingFeatureEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[TPUHardwareFeature._EmbeddingFeature.ValueType], builtins.type + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor UNSUPPORTED: TPUHardwareFeature._EmbeddingFeature.ValueType # 0 """No embedding lookup accelerator available on the tpu.""" @@ -72,7 +74,12 @@ class TPUHardwareFeature(google.protobuf.message.Message): embedding_feature: global___TPUHardwareFeature.EmbeddingFeature.ValueType | None = ..., num_embedding_devices_per_chip: builtins.int | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["embedding_feature", b"embedding_feature", "num_embedding_devices_per_chip", b"num_embedding_devices_per_chip"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "embedding_feature", b"embedding_feature", "num_embedding_devices_per_chip", b"num_embedding_devices_per_chip" + ], + ) -> None: ... global___TPUHardwareFeature = TPUHardwareFeature @@ -123,6 +130,20 @@ class TopologyProto(google.protobuf.message.Message): tpu_hardware_feature: global___TPUHardwareFeature | None = ..., ) -> None: ... def HasField(self, field_name: typing.Literal["tpu_hardware_feature", b"tpu_hardware_feature"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["device_coordinates", b"device_coordinates", "mesh_shape", b"mesh_shape", "num_tasks", b"num_tasks", "num_tpu_devices_per_task", b"num_tpu_devices_per_task", "tpu_hardware_feature", b"tpu_hardware_feature"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "device_coordinates", + b"device_coordinates", + "mesh_shape", + b"mesh_shape", + "num_tasks", + b"num_tasks", + "num_tpu_devices_per_task", + b"num_tpu_devices_per_task", + "tpu_hardware_feature", + b"tpu_hardware_feature", + ], + ) -> None: ... global___TopologyProto = TopologyProto diff --git a/stubs/tensorflow/tensorflow/core/protobuf/tpu/tpu_embedding_configuration_pb2.pyi b/stubs/tensorflow/tensorflow/core/protobuf/tpu/tpu_embedding_configuration_pb2.pyi index c33925975268..32d67050f571 100644 --- a/stubs/tensorflow/tensorflow/core/protobuf/tpu/tpu_embedding_configuration_pb2.pyi +++ b/stubs/tensorflow/tensorflow/core/protobuf/tpu/tpu_embedding_configuration_pb2.pyi @@ -29,7 +29,9 @@ class TPUEmbeddingConfiguration(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _ModeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[TPUEmbeddingConfiguration._Mode.ValueType], builtins.type): + class _ModeEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[TPUEmbeddingConfiguration._Mode.ValueType], builtins.type + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor UNSPECIFIED: TPUEmbeddingConfiguration._Mode.ValueType # 0 INFERENCE: TPUEmbeddingConfiguration._Mode.ValueType # 1 @@ -50,7 +52,10 @@ class TPUEmbeddingConfiguration(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _ShardingStrategyEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[TPUEmbeddingConfiguration._ShardingStrategy.ValueType], builtins.type): + class _ShardingStrategyEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[TPUEmbeddingConfiguration._ShardingStrategy.ValueType], + builtins.type, + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor DIV_DEFAULT: TPUEmbeddingConfiguration._ShardingStrategy.ValueType # 0 MOD: TPUEmbeddingConfiguration._ShardingStrategy.ValueType # 1 @@ -107,8 +112,24 @@ class TPUEmbeddingConfiguration(google.protobuf.message.Message): num_features: builtins.int | None = ..., optimization_parameters: tensorflow.core.protobuf.tpu.optimization_parameters_pb2.OptimizationParameters | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["optimization_parameters", b"optimization_parameters"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["dimension", b"dimension", "name", b"name", "num_features", b"num_features", "optimization_parameters", b"optimization_parameters", "vocabulary_size", b"vocabulary_size"]) -> None: ... + def HasField( + self, field_name: typing.Literal["optimization_parameters", b"optimization_parameters"] + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "dimension", + b"dimension", + "name", + b"name", + "num_features", + b"num_features", + "optimization_parameters", + b"optimization_parameters", + "vocabulary_size", + b"vocabulary_size", + ], + ) -> None: ... @typing.final class FeatureDescriptor(google.protobuf.message.Message): @@ -141,7 +162,9 @@ class TPUEmbeddingConfiguration(google.protobuf.message.Message): table_id: builtins.int | None = ..., input_shape: collections.abc.Iterable[builtins.int] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["input_shape", b"input_shape", "name", b"name", "table_id", b"table_id"]) -> None: ... + def ClearField( + self, field_name: typing.Literal["input_shape", b"input_shape", "name", b"name", "table_id", b"table_id"] + ) -> None: ... @typing.final class SpmdSharding(google.protobuf.message.Message): @@ -159,13 +182,10 @@ class TPUEmbeddingConfiguration(google.protobuf.message.Message): """Whether SPMD sharding is enabled.""" num_cores_per_replica: builtins.int """Number of cores per replica.""" - def __init__( - self, - *, - enabled: builtins.bool | None = ..., - num_cores_per_replica: builtins.int | None = ..., + def __init__(self, *, enabled: builtins.bool | None = ..., num_cores_per_replica: builtins.int | None = ...) -> None: ... + def ClearField( + self, field_name: typing.Literal["enabled", b"enabled", "num_cores_per_replica", b"num_cores_per_replica"] ) -> None: ... - def ClearField(self, field_name: typing.Literal["enabled", b"enabled", "num_cores_per_replica", b"num_cores_per_replica"]) -> None: ... TABLE_DESCRIPTOR_FIELD_NUMBER: builtins.int MODE_FIELD_NUMBER: builtins.int @@ -232,9 +252,17 @@ class TPUEmbeddingConfiguration(google.protobuf.message.Message): models to reuse embedding lookup statistics. """ @property - def table_descriptor(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___TPUEmbeddingConfiguration.TableDescriptor]: ... + def table_descriptor( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + global___TPUEmbeddingConfiguration.TableDescriptor + ]: ... @property - def feature_descriptor(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___TPUEmbeddingConfiguration.FeatureDescriptor]: + def feature_descriptor( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + global___TPUEmbeddingConfiguration.FeatureDescriptor + ]: """If the feature_descriptor field is populated, the model should NOT populate TableDescriptor.num_features and batch_size_per_tensor_core. These two fields will be auto-populated by the TPUEmbedding rewrite passes. @@ -257,7 +285,31 @@ class TPUEmbeddingConfiguration(google.protobuf.message.Message): spmd_sharding: global___TPUEmbeddingConfiguration.SpmdSharding | None = ..., ) -> None: ... def HasField(self, field_name: typing.Literal["spmd_sharding", b"spmd_sharding"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["batch_size_per_tensor_core", b"batch_size_per_tensor_core", "feature_descriptor", b"feature_descriptor", "mode", b"mode", "num_hosts", b"num_hosts", "num_tensor_cores", b"num_tensor_cores", "pipeline_execution_with_tensor_core", b"pipeline_execution_with_tensor_core", "profile_data_directory", b"profile_data_directory", "sharding_strategy", b"sharding_strategy", "spmd_sharding", b"spmd_sharding", "table_descriptor", b"table_descriptor"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "batch_size_per_tensor_core", + b"batch_size_per_tensor_core", + "feature_descriptor", + b"feature_descriptor", + "mode", + b"mode", + "num_hosts", + b"num_hosts", + "num_tensor_cores", + b"num_tensor_cores", + "pipeline_execution_with_tensor_core", + b"pipeline_execution_with_tensor_core", + "profile_data_directory", + b"profile_data_directory", + "sharding_strategy", + b"sharding_strategy", + "spmd_sharding", + b"spmd_sharding", + "table_descriptor", + b"table_descriptor", + ], + ) -> None: ... global___TPUEmbeddingConfiguration = TPUEmbeddingConfiguration @@ -269,8 +321,6 @@ class TPUEmbeddingError(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - def __init__( - self, - ) -> None: ... + def __init__(self) -> None: ... global___TPUEmbeddingError = TPUEmbeddingError diff --git a/stubs/tensorflow/tensorflow/core/protobuf/trackable_object_graph_pb2.pyi b/stubs/tensorflow/tensorflow/core/protobuf/trackable_object_graph_pb2.pyi index 63730b6360c6..f25a3bfda24f 100644 --- a/stubs/tensorflow/tensorflow/core/protobuf/trackable_object_graph_pb2.pyi +++ b/stubs/tensorflow/tensorflow/core/protobuf/trackable_object_graph_pb2.pyi @@ -39,12 +39,7 @@ class TrackableObjectGraph(google.protobuf.message.Message): """ local_name: builtins.str """A user-provided name for the edge.""" - def __init__( - self, - *, - node_id: builtins.int | None = ..., - local_name: builtins.str | None = ..., - ) -> None: ... + def __init__(self, *, node_id: builtins.int | None = ..., local_name: builtins.str | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["local_name", b"local_name", "node_id", b"node_id"]) -> None: ... @typing.final @@ -74,7 +69,9 @@ class TrackableObjectGraph(google.protobuf.message.Message): full_name: builtins.str | None = ..., checkpoint_key: builtins.str | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["checkpoint_key", b"checkpoint_key", "full_name", b"full_name", "name", b"name"]) -> None: ... + def ClearField( + self, field_name: typing.Literal["checkpoint_key", b"checkpoint_key", "full_name", b"full_name", "name", b"name"] + ) -> None: ... @typing.final class SlotVariableReference(google.protobuf.message.Message): @@ -100,7 +97,17 @@ class TrackableObjectGraph(google.protobuf.message.Message): slot_name: builtins.str | None = ..., slot_variable_node_id: builtins.int | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["original_variable_node_id", b"original_variable_node_id", "slot_name", b"slot_name", "slot_variable_node_id", b"slot_variable_node_id"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "original_variable_node_id", + b"original_variable_node_id", + "slot_name", + b"slot_name", + "slot_variable_node_id", + b"slot_variable_node_id", + ], + ) -> None: ... CHILDREN_FIELD_NUMBER: builtins.int ATTRIBUTES_FIELD_NUMBER: builtins.int @@ -108,15 +115,27 @@ class TrackableObjectGraph(google.protobuf.message.Message): REGISTERED_SAVER_FIELD_NUMBER: builtins.int HAS_CHECKPOINT_VALUES_FIELD_NUMBER: builtins.int @property - def children(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___TrackableObjectGraph.TrackableObject.ObjectReference]: + def children( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + global___TrackableObjectGraph.TrackableObject.ObjectReference + ]: """Objects which this object depends on.""" @property - def attributes(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___TrackableObjectGraph.TrackableObject.SerializedTensor]: + def attributes( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + global___TrackableObjectGraph.TrackableObject.SerializedTensor + ]: """Serialized data specific to this object.""" @property - def slot_variables(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___TrackableObjectGraph.TrackableObject.SlotVariableReference]: + def slot_variables( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + global___TrackableObjectGraph.TrackableObject.SlotVariableReference + ]: """Slot variables owned by this object.""" @property @@ -138,20 +157,41 @@ class TrackableObjectGraph(google.protobuf.message.Message): *, children: collections.abc.Iterable[global___TrackableObjectGraph.TrackableObject.ObjectReference] | None = ..., attributes: collections.abc.Iterable[global___TrackableObjectGraph.TrackableObject.SerializedTensor] | None = ..., - slot_variables: collections.abc.Iterable[global___TrackableObjectGraph.TrackableObject.SlotVariableReference] | None = ..., + slot_variables: ( + collections.abc.Iterable[global___TrackableObjectGraph.TrackableObject.SlotVariableReference] | None + ) = ..., registered_saver: global___RegisteredSaver | None = ..., has_checkpoint_values: google.protobuf.wrappers_pb2.BoolValue | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["has_checkpoint_values", b"has_checkpoint_values", "registered_saver", b"registered_saver"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["attributes", b"attributes", "children", b"children", "has_checkpoint_values", b"has_checkpoint_values", "registered_saver", b"registered_saver", "slot_variables", b"slot_variables"]) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "has_checkpoint_values", b"has_checkpoint_values", "registered_saver", b"registered_saver" + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "attributes", + b"attributes", + "children", + b"children", + "has_checkpoint_values", + b"has_checkpoint_values", + "registered_saver", + b"registered_saver", + "slot_variables", + b"slot_variables", + ], + ) -> None: ... NODES_FIELD_NUMBER: builtins.int @property - def nodes(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___TrackableObjectGraph.TrackableObject]: ... - def __init__( + def nodes( self, - *, - nodes: collections.abc.Iterable[global___TrackableObjectGraph.TrackableObject] | None = ..., + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___TrackableObjectGraph.TrackableObject]: ... + def __init__( + self, *, nodes: collections.abc.Iterable[global___TrackableObjectGraph.TrackableObject] | None = ... ) -> None: ... def ClearField(self, field_name: typing.Literal["nodes", b"nodes"]) -> None: ... @@ -167,12 +207,7 @@ class RegisteredSaver(google.protobuf.message.Message): """The name of the registered saver/restore function.""" object_name: builtins.str """Unique auto-generated name of the object.""" - def __init__( - self, - *, - name: builtins.str | None = ..., - object_name: builtins.str | None = ..., - ) -> None: ... + def __init__(self, *, name: builtins.str | None = ..., object_name: builtins.str | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["name", b"name", "object_name", b"object_name"]) -> None: ... global___RegisteredSaver = RegisteredSaver diff --git a/stubs/tensorflow/tensorflow/core/protobuf/transport_options_pb2.pyi b/stubs/tensorflow/tensorflow/core/protobuf/transport_options_pb2.pyi index 26d7b1406dae..38596e5e8783 100644 --- a/stubs/tensorflow/tensorflow/core/protobuf/transport_options_pb2.pyi +++ b/stubs/tensorflow/tensorflow/core/protobuf/transport_options_pb2.pyi @@ -22,11 +22,7 @@ class RecvBufRespExtra(google.protobuf.message.Message): TENSOR_CONTENT_FIELD_NUMBER: builtins.int @property def tensor_content(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.bytes]: ... - def __init__( - self, - *, - tensor_content: collections.abc.Iterable[builtins.bytes] | None = ..., - ) -> None: ... + def __init__(self, *, tensor_content: collections.abc.Iterable[builtins.bytes] | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["tensor_content", b"tensor_content"]) -> None: ... global___RecvBufRespExtra = RecvBufRespExtra diff --git a/stubs/tensorflow/tensorflow/core/protobuf/verifier_config_pb2.pyi b/stubs/tensorflow/tensorflow/core/protobuf/verifier_config_pb2.pyi index 405ab99a2dd5..c65f7c9cdb24 100644 --- a/stubs/tensorflow/tensorflow/core/protobuf/verifier_config_pb2.pyi +++ b/stubs/tensorflow/tensorflow/core/protobuf/verifier_config_pb2.pyi @@ -28,7 +28,9 @@ class VerifierConfig(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _ToggleEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[VerifierConfig._Toggle.ValueType], builtins.type): + class _ToggleEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[VerifierConfig._Toggle.ValueType], builtins.type + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor DEFAULT: VerifierConfig._Toggle.ValueType # 0 ON: VerifierConfig._Toggle.ValueType # 1 @@ -53,6 +55,11 @@ class VerifierConfig(google.protobuf.message.Message): verification_timeout_in_ms: builtins.int | None = ..., structure_verifier: global___VerifierConfig.Toggle.ValueType | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["structure_verifier", b"structure_verifier", "verification_timeout_in_ms", b"verification_timeout_in_ms"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "structure_verifier", b"structure_verifier", "verification_timeout_in_ms", b"verification_timeout_in_ms" + ], + ) -> None: ... global___VerifierConfig = VerifierConfig diff --git a/stubs/tensorflow/tensorflow/core/util/event_pb2.pyi b/stubs/tensorflow/tensorflow/core/util/event_pb2.pyi index 9d6a986624bc..7a8ac4afd856 100644 --- a/stubs/tensorflow/tensorflow/core/util/event_pb2.pyi +++ b/stubs/tensorflow/tensorflow/core/util/event_pb2.pyi @@ -25,7 +25,9 @@ class _WorkerHealth: ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType -class _WorkerHealthEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_WorkerHealth.ValueType], builtins.type): +class _WorkerHealthEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_WorkerHealth.ValueType], builtins.type +): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor OK: _WorkerHealth.ValueType # 0 """By default a worker is healthy.""" @@ -53,7 +55,9 @@ class _WorkerShutdownMode: ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType -class _WorkerShutdownModeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_WorkerShutdownMode.ValueType], builtins.type): +class _WorkerShutdownModeEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_WorkerShutdownMode.ValueType], builtins.type +): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor DEFAULT: _WorkerShutdownMode.ValueType # 0 NOT_CONFIGURED: _WorkerShutdownMode.ValueType # 1 @@ -142,9 +146,64 @@ class Event(google.protobuf.message.Message): meta_graph_def: builtins.bytes | None = ..., source_metadata: global___SourceMetadata | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["file_version", b"file_version", "graph_def", b"graph_def", "log_message", b"log_message", "meta_graph_def", b"meta_graph_def", "session_log", b"session_log", "source_metadata", b"source_metadata", "summary", b"summary", "tagged_run_metadata", b"tagged_run_metadata", "what", b"what"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["file_version", b"file_version", "graph_def", b"graph_def", "log_message", b"log_message", "meta_graph_def", b"meta_graph_def", "session_log", b"session_log", "source_metadata", b"source_metadata", "step", b"step", "summary", b"summary", "tagged_run_metadata", b"tagged_run_metadata", "wall_time", b"wall_time", "what", b"what"]) -> None: ... - def WhichOneof(self, oneof_group: typing.Literal["what", b"what"]) -> typing.Literal["file_version", "graph_def", "summary", "log_message", "session_log", "tagged_run_metadata", "meta_graph_def"] | None: ... + def HasField( + self, + field_name: typing.Literal[ + "file_version", + b"file_version", + "graph_def", + b"graph_def", + "log_message", + b"log_message", + "meta_graph_def", + b"meta_graph_def", + "session_log", + b"session_log", + "source_metadata", + b"source_metadata", + "summary", + b"summary", + "tagged_run_metadata", + b"tagged_run_metadata", + "what", + b"what", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "file_version", + b"file_version", + "graph_def", + b"graph_def", + "log_message", + b"log_message", + "meta_graph_def", + b"meta_graph_def", + "session_log", + b"session_log", + "source_metadata", + b"source_metadata", + "step", + b"step", + "summary", + b"summary", + "tagged_run_metadata", + b"tagged_run_metadata", + "wall_time", + b"wall_time", + "what", + b"what", + ], + ) -> None: ... + def WhichOneof( + self, oneof_group: typing.Literal["what", b"what"] + ) -> ( + typing.Literal[ + "file_version", "graph_def", "summary", "log_message", "session_log", "tagged_run_metadata", "meta_graph_def" + ] + | None + ): ... global___Event = Event @@ -159,11 +218,7 @@ class SourceMetadata(google.protobuf.message.Message): """Low level name of the summary writer, such as `tensorflow.core.util.events_writer`. """ - def __init__( - self, - *, - writer: builtins.str | None = ..., - ) -> None: ... + def __init__(self, *, writer: builtins.str | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["writer", b"writer"]) -> None: ... global___SourceMetadata = SourceMetadata @@ -182,7 +237,9 @@ class LogMessage(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _LevelEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[LogMessage._Level.ValueType], builtins.type): + class _LevelEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[LogMessage._Level.ValueType], builtins.type + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor UNKNOWN: LogMessage._Level.ValueType # 0 DEBUGGING: LogMessage._Level.ValueType # 10 @@ -214,10 +271,7 @@ class LogMessage(google.protobuf.message.Message): level: global___LogMessage.Level.ValueType message: builtins.str def __init__( - self, - *, - level: global___LogMessage.Level.ValueType | None = ..., - message: builtins.str | None = ..., + self, *, level: global___LogMessage.Level.ValueType | None = ..., message: builtins.str | None = ... ) -> None: ... def ClearField(self, field_name: typing.Literal["level", b"level", "message", b"message"]) -> None: ... @@ -233,7 +287,9 @@ class SessionLog(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _SessionStatusEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[SessionLog._SessionStatus.ValueType], builtins.type): + class _SessionStatusEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[SessionLog._SessionStatus.ValueType], builtins.type + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor STATUS_UNSPECIFIED: SessionLog._SessionStatus.ValueType # 0 START: SessionLog._SessionStatus.ValueType # 1 @@ -260,7 +316,9 @@ class SessionLog(google.protobuf.message.Message): checkpoint_path: builtins.str | None = ..., msg: builtins.str | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["checkpoint_path", b"checkpoint_path", "msg", b"msg", "status", b"status"]) -> None: ... + def ClearField( + self, field_name: typing.Literal["checkpoint_path", b"checkpoint_path", "msg", b"msg", "status", b"status"] + ) -> None: ... global___SessionLog = SessionLog @@ -278,12 +336,7 @@ class TaggedRunMetadata(google.protobuf.message.Message): """Byte-encoded version of the `RunMetadata` proto in order to allow lazy deserialization. """ - def __init__( - self, - *, - tag: builtins.str | None = ..., - run_metadata: builtins.bytes | None = ..., - ) -> None: ... + def __init__(self, *, tag: builtins.str | None = ..., run_metadata: builtins.bytes | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["run_metadata", b"run_metadata", "tag", b"tag"]) -> None: ... global___TaggedRunMetadata = TaggedRunMetadata @@ -294,11 +347,7 @@ class WatchdogConfig(google.protobuf.message.Message): TIMEOUT_MS_FIELD_NUMBER: builtins.int timeout_ms: builtins.int - def __init__( - self, - *, - timeout_ms: builtins.int | None = ..., - ) -> None: ... + def __init__(self, *, timeout_ms: builtins.int | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["timeout_ms", b"timeout_ms"]) -> None: ... global___WatchdogConfig = WatchdogConfig @@ -309,11 +358,7 @@ class RequestedExitCode(google.protobuf.message.Message): EXIT_CODE_FIELD_NUMBER: builtins.int exit_code: builtins.int - def __init__( - self, - *, - exit_code: builtins.int | None = ..., - ) -> None: ... + def __init__(self, *, exit_code: builtins.int | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["exit_code", b"exit_code"]) -> None: ... global___RequestedExitCode = RequestedExitCode @@ -337,8 +382,15 @@ class WorkerHeartbeatRequest(google.protobuf.message.Message): watchdog_config: global___WatchdogConfig | None = ..., exit_code: global___RequestedExitCode | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["exit_code", b"exit_code", "watchdog_config", b"watchdog_config"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["exit_code", b"exit_code", "shutdown_mode", b"shutdown_mode", "watchdog_config", b"watchdog_config"]) -> None: ... + def HasField( + self, field_name: typing.Literal["exit_code", b"exit_code", "watchdog_config", b"watchdog_config"] + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "exit_code", b"exit_code", "shutdown_mode", b"shutdown_mode", "watchdog_config", b"watchdog_config" + ], + ) -> None: ... global___WorkerHeartbeatRequest = WorkerHeartbeatRequest @@ -360,6 +412,8 @@ class WorkerHeartbeatResponse(google.protobuf.message.Message): worker_log: collections.abc.Iterable[global___Event] | None = ..., hostname: builtins.str | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["health_status", b"health_status", "hostname", b"hostname", "worker_log", b"worker_log"]) -> None: ... + def ClearField( + self, field_name: typing.Literal["health_status", b"health_status", "hostname", b"hostname", "worker_log", b"worker_log"] + ) -> None: ... global___WorkerHeartbeatResponse = WorkerHeartbeatResponse diff --git a/stubs/tensorflow/tensorflow/core/util/memmapped_file_system_pb2.pyi b/stubs/tensorflow/tensorflow/core/util/memmapped_file_system_pb2.pyi index 266b0ed6e5d3..c3b2b662d9d4 100644 --- a/stubs/tensorflow/tensorflow/core/util/memmapped_file_system_pb2.pyi +++ b/stubs/tensorflow/tensorflow/core/util/memmapped_file_system_pb2.pyi @@ -40,11 +40,7 @@ class MemmappedFileSystemDirectoryElement(google.protobuf.message.Message): name: builtins.str length: builtins.int def __init__( - self, - *, - offset: builtins.int | None = ..., - name: builtins.str | None = ..., - length: builtins.int | None = ..., + self, *, offset: builtins.int | None = ..., name: builtins.str | None = ..., length: builtins.int | None = ... ) -> None: ... def ClearField(self, field_name: typing.Literal["length", b"length", "name", b"name", "offset", b"offset"]) -> None: ... @@ -58,11 +54,11 @@ class MemmappedFileSystemDirectory(google.protobuf.message.Message): ELEMENT_FIELD_NUMBER: builtins.int @property - def element(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___MemmappedFileSystemDirectoryElement]: ... - def __init__( + def element( self, - *, - element: collections.abc.Iterable[global___MemmappedFileSystemDirectoryElement] | None = ..., + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___MemmappedFileSystemDirectoryElement]: ... + def __init__( + self, *, element: collections.abc.Iterable[global___MemmappedFileSystemDirectoryElement] | None = ... ) -> None: ... def ClearField(self, field_name: typing.Literal["element", b"element"]) -> None: ... diff --git a/stubs/tensorflow/tensorflow/core/util/saved_tensor_slice_pb2.pyi b/stubs/tensorflow/tensorflow/core/util/saved_tensor_slice_pb2.pyi index 9b492465ac4f..03a7ef0e2e94 100644 --- a/stubs/tensorflow/tensorflow/core/util/saved_tensor_slice_pb2.pyi +++ b/stubs/tensorflow/tensorflow/core/util/saved_tensor_slice_pb2.pyi @@ -52,7 +52,11 @@ class SavedSliceMeta(google.protobuf.message.Message): """Shape of the tensor""" @property - def slice(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[tensorflow.core.framework.tensor_slice_pb2.TensorSliceProto]: + def slice( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + tensorflow.core.framework.tensor_slice_pb2.TensorSliceProto + ]: """Explicit list of slices saved in the checkpoint file.""" def __init__( @@ -64,7 +68,9 @@ class SavedSliceMeta(google.protobuf.message.Message): slice: collections.abc.Iterable[tensorflow.core.framework.tensor_slice_pb2.TensorSliceProto] | None = ..., ) -> None: ... def HasField(self, field_name: typing.Literal["shape", b"shape"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["name", b"name", "shape", b"shape", "slice", b"slice", "type", b"type"]) -> None: ... + def ClearField( + self, field_name: typing.Literal["name", b"name", "shape", b"shape", "slice", b"slice", "type", b"type"] + ) -> None: ... global___SavedSliceMeta = SavedSliceMeta @@ -158,12 +164,7 @@ class SavedTensorSlices(google.protobuf.message.Message): def data(self) -> global___SavedSlice: """This exists in all but the first item of each checkpoint file.""" - def __init__( - self, - *, - meta: global___SavedTensorSliceMeta | None = ..., - data: global___SavedSlice | None = ..., - ) -> None: ... + def __init__(self, *, meta: global___SavedTensorSliceMeta | None = ..., data: global___SavedSlice | None = ...) -> None: ... def HasField(self, field_name: typing.Literal["data", b"data", "meta", b"meta"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["data", b"data", "meta", b"meta"]) -> None: ... diff --git a/stubs/tensorflow/tensorflow/python/keras/protobuf/projector_config_pb2.pyi b/stubs/tensorflow/tensorflow/python/keras/protobuf/projector_config_pb2.pyi index 22be35d7d6bc..9141272b1c52 100644 --- a/stubs/tensorflow/tensorflow/python/keras/protobuf/projector_config_pb2.pyi +++ b/stubs/tensorflow/tensorflow/python/keras/protobuf/projector_config_pb2.pyi @@ -28,12 +28,11 @@ class SpriteMetadata(google.protobuf.message.Message): """[width, height] of a single image in the sprite.""" def __init__( - self, - *, - image_path: builtins.str | None = ..., - single_image_dim: collections.abc.Iterable[builtins.int] | None = ..., + self, *, image_path: builtins.str | None = ..., single_image_dim: collections.abc.Iterable[builtins.int] | None = ... + ) -> None: ... + def ClearField( + self, field_name: typing.Literal["image_path", b"image_path", "single_image_dim", b"single_image_dim"] ) -> None: ... - def ClearField(self, field_name: typing.Literal["image_path", b"image_path", "single_image_dim", b"single_image_dim"]) -> None: ... global___SpriteMetadata = SpriteMetadata @@ -73,7 +72,23 @@ class EmbeddingInfo(google.protobuf.message.Message): tensor_path: builtins.str | None = ..., ) -> None: ... def HasField(self, field_name: typing.Literal["sprite", b"sprite"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["bookmarks_path", b"bookmarks_path", "metadata_path", b"metadata_path", "sprite", b"sprite", "tensor_name", b"tensor_name", "tensor_path", b"tensor_path", "tensor_shape", b"tensor_shape"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "bookmarks_path", + b"bookmarks_path", + "metadata_path", + b"metadata_path", + "sprite", + b"sprite", + "tensor_name", + b"tensor_name", + "tensor_path", + b"tensor_path", + "tensor_shape", + b"tensor_shape", + ], + ) -> None: ... global___EmbeddingInfo = EmbeddingInfo @@ -99,6 +114,16 @@ class ProjectorConfig(google.protobuf.message.Message): embeddings: collections.abc.Iterable[global___EmbeddingInfo] | None = ..., model_checkpoint_dir: builtins.str | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["embeddings", b"embeddings", "model_checkpoint_dir", b"model_checkpoint_dir", "model_checkpoint_path", b"model_checkpoint_path"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "embeddings", + b"embeddings", + "model_checkpoint_dir", + b"model_checkpoint_dir", + "model_checkpoint_path", + b"model_checkpoint_path", + ], + ) -> None: ... global___ProjectorConfig = ProjectorConfig diff --git a/stubs/tensorflow/tensorflow/python/keras/protobuf/saved_metadata_pb2.pyi b/stubs/tensorflow/tensorflow/python/keras/protobuf/saved_metadata_pb2.pyi index 0133517d273f..e4a13c9db33a 100644 --- a/stubs/tensorflow/tensorflow/python/keras/protobuf/saved_metadata_pb2.pyi +++ b/stubs/tensorflow/tensorflow/python/keras/protobuf/saved_metadata_pb2.pyi @@ -25,11 +25,7 @@ class SavedMetadata(google.protobuf.message.Message): Keras object is stored. """ - def __init__( - self, - *, - nodes: collections.abc.Iterable[global___SavedObject] | None = ..., - ) -> None: ... + def __init__(self, *, nodes: collections.abc.Iterable[global___SavedObject] | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["nodes", b"nodes"]) -> None: ... global___SavedMetadata = SavedMetadata @@ -74,6 +70,20 @@ class SavedObject(google.protobuf.message.Message): version: tensorflow.python.keras.protobuf.versions_pb2.VersionDef | None = ..., ) -> None: ... def HasField(self, field_name: typing.Literal["version", b"version"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["identifier", b"identifier", "metadata", b"metadata", "node_id", b"node_id", "node_path", b"node_path", "version", b"version"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "identifier", + b"identifier", + "metadata", + b"metadata", + "node_id", + b"node_id", + "node_path", + b"node_path", + "version", + b"version", + ], + ) -> None: ... global___SavedObject = SavedObject diff --git a/stubs/tensorflow/tensorflow/python/keras/protobuf/versions_pb2.pyi b/stubs/tensorflow/tensorflow/python/keras/protobuf/versions_pb2.pyi index 55d2e725bc0d..10936e5157fd 100644 --- a/stubs/tensorflow/tensorflow/python/keras/protobuf/versions_pb2.pyi +++ b/stubs/tensorflow/tensorflow/python/keras/protobuf/versions_pb2.pyi @@ -55,6 +55,9 @@ class VersionDef(google.protobuf.message.Message): min_consumer: builtins.int | None = ..., bad_consumers: collections.abc.Iterable[builtins.int] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["bad_consumers", b"bad_consumers", "min_consumer", b"min_consumer", "producer", b"producer"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal["bad_consumers", b"bad_consumers", "min_consumer", b"min_consumer", "producer", b"producer"], + ) -> None: ... global___VersionDef = VersionDef diff --git a/stubs/tensorflow/tensorflow/tsl/protobuf/coordination_config_pb2.pyi b/stubs/tensorflow/tensorflow/tsl/protobuf/coordination_config_pb2.pyi index a2eb29dda885..f784e1621046 100644 --- a/stubs/tensorflow/tensorflow/tsl/protobuf/coordination_config_pb2.pyi +++ b/stubs/tensorflow/tensorflow/tsl/protobuf/coordination_config_pb2.pyi @@ -25,12 +25,7 @@ class CoordinatedJob(google.protobuf.message.Message): NUM_TASKS_FIELD_NUMBER: builtins.int name: builtins.str num_tasks: builtins.int - def __init__( - self, - *, - name: builtins.str | None = ..., - num_tasks: builtins.int | None = ..., - ) -> None: ... + def __init__(self, *, name: builtins.str | None = ..., num_tasks: builtins.int | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["name", b"name", "num_tasks", b"num_tasks"]) -> None: ... global___CoordinatedJob = CoordinatedJob @@ -101,7 +96,9 @@ class CoordinationServiceConfig(google.protobuf.message.Message): propagation mechanism. """ @property - def coordinated_job_list(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___CoordinatedJob]: ... + def coordinated_job_list( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___CoordinatedJob]: ... @property def recoverable_jobs(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: """The list of jobs which are recoverable. If a task in this list fails, @@ -126,6 +123,34 @@ class CoordinationServiceConfig(google.protobuf.message.Message): force_disable: builtins.bool | None = ..., poll_for_error_from_service_at_startup: builtins.bool | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["agent_destruction_without_shutdown", b"agent_destruction_without_shutdown", "allow_new_incarnation_to_reconnect", b"allow_new_incarnation_to_reconnect", "cluster_register_timeout_in_ms", b"cluster_register_timeout_in_ms", "coordinated_job_list", b"coordinated_job_list", "enable_health_check", b"enable_health_check", "force_disable", b"force_disable", "heartbeat_timeout_in_ms", b"heartbeat_timeout_in_ms", "poll_for_error_from_service_at_startup", b"poll_for_error_from_service_at_startup", "recoverable_jobs", b"recoverable_jobs", "service_leader", b"service_leader", "service_type", b"service_type", "shutdown_barrier_timeout_in_ms", b"shutdown_barrier_timeout_in_ms"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "agent_destruction_without_shutdown", + b"agent_destruction_without_shutdown", + "allow_new_incarnation_to_reconnect", + b"allow_new_incarnation_to_reconnect", + "cluster_register_timeout_in_ms", + b"cluster_register_timeout_in_ms", + "coordinated_job_list", + b"coordinated_job_list", + "enable_health_check", + b"enable_health_check", + "force_disable", + b"force_disable", + "heartbeat_timeout_in_ms", + b"heartbeat_timeout_in_ms", + "poll_for_error_from_service_at_startup", + b"poll_for_error_from_service_at_startup", + "recoverable_jobs", + b"recoverable_jobs", + "service_leader", + b"service_leader", + "service_type", + b"service_type", + "shutdown_barrier_timeout_in_ms", + b"shutdown_barrier_timeout_in_ms", + ], + ) -> None: ... global___CoordinationServiceConfig = CoordinationServiceConfig diff --git a/stubs/tensorflow/tensorflow/tsl/protobuf/coordination_service_pb2.pyi b/stubs/tensorflow/tensorflow/tsl/protobuf/coordination_service_pb2.pyi index 82468c997f24..27721cfbefe4 100644 --- a/stubs/tensorflow/tensorflow/tsl/protobuf/coordination_service_pb2.pyi +++ b/stubs/tensorflow/tensorflow/tsl/protobuf/coordination_service_pb2.pyi @@ -25,7 +25,9 @@ class _CoordinatedTaskState: ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType -class _CoordinatedTaskStateEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_CoordinatedTaskState.ValueType], builtins.type): +class _CoordinatedTaskStateEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_CoordinatedTaskState.ValueType], builtins.type +): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor TASKSTATE_UNSPECIFIED: _CoordinatedTaskState.ValueType # 0 """TASKSTATE_UNSPECIFIED is an invalid state such that indicates a bug.""" @@ -63,12 +65,7 @@ class CoordinatedTask(google.protobuf.message.Message): TASK_ID_FIELD_NUMBER: builtins.int job_name: builtins.str task_id: builtins.int - def __init__( - self, - *, - job_name: builtins.str | None = ..., - task_id: builtins.int | None = ..., - ) -> None: ... + def __init__(self, *, job_name: builtins.str | None = ..., task_id: builtins.int | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["job_name", b"job_name", "task_id", b"task_id"]) -> None: ... global___CoordinatedTask = CoordinatedTask @@ -95,13 +92,12 @@ class CoordinationServiceError(google.protobuf.message.Message): """ def __init__( - self, - *, - is_reported_error: builtins.bool | None = ..., - source_task: global___CoordinatedTask | None = ..., + self, *, is_reported_error: builtins.bool | None = ..., source_task: global___CoordinatedTask | None = ... ) -> None: ... def HasField(self, field_name: typing.Literal["source_task", b"source_task"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["is_reported_error", b"is_reported_error", "source_task", b"source_task"]) -> None: ... + def ClearField( + self, field_name: typing.Literal["is_reported_error", b"is_reported_error", "source_task", b"source_task"] + ) -> None: ... global___CoordinationServiceError = CoordinationServiceError @@ -131,7 +127,21 @@ class CoordinatedTaskStateInfo(google.protobuf.message.Message): error_payload: global___CoordinationServiceError | None = ..., ) -> None: ... def HasField(self, field_name: typing.Literal["error_payload", b"error_payload", "task", b"task"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["error_code", b"error_code", "error_message", b"error_message", "error_payload", b"error_payload", "state", b"state", "task", b"task"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "error_code", + b"error_code", + "error_message", + b"error_message", + "error_payload", + b"error_payload", + "state", + b"state", + "task", + b"task", + ], + ) -> None: ... global___CoordinatedTaskStateInfo = CoordinatedTaskStateInfo @@ -144,11 +154,7 @@ class DeviceInfo(google.protobuf.message.Message): DEVICE_FIELD_NUMBER: builtins.int @property def device(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[google.protobuf.any_pb2.Any]: ... - def __init__( - self, - *, - device: collections.abc.Iterable[google.protobuf.any_pb2.Any] | None = ..., - ) -> None: ... + def __init__(self, *, device: collections.abc.Iterable[google.protobuf.any_pb2.Any] | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["device", b"device"]) -> None: ... global___DeviceInfo = DeviceInfo @@ -168,12 +174,7 @@ class RegisterTaskRequest(google.protobuf.message.Message): incarnation: builtins.int @property def source_task(self) -> global___CoordinatedTask: ... - def __init__( - self, - *, - incarnation: builtins.int | None = ..., - source_task: global___CoordinatedTask | None = ..., - ) -> None: ... + def __init__(self, *, incarnation: builtins.int | None = ..., source_task: global___CoordinatedTask | None = ...) -> None: ... def HasField(self, field_name: typing.Literal["source_task", b"source_task"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["incarnation", b"incarnation", "source_task", b"source_task"]) -> None: ... @@ -185,11 +186,7 @@ class RegisterTaskResponse(google.protobuf.message.Message): LEADER_INCARNATION_FIELD_NUMBER: builtins.int leader_incarnation: builtins.int - def __init__( - self, - *, - leader_incarnation: builtins.int | None = ..., - ) -> None: ... + def __init__(self, *, leader_incarnation: builtins.int | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["leader_incarnation", b"leader_incarnation"]) -> None: ... global___RegisterTaskResponse = RegisterTaskResponse @@ -205,12 +202,7 @@ class HeartbeatRequest(google.protobuf.message.Message): incarnation: builtins.int @property def source_task(self) -> global___CoordinatedTask: ... - def __init__( - self, - *, - incarnation: builtins.int | None = ..., - source_task: global___CoordinatedTask | None = ..., - ) -> None: ... + def __init__(self, *, incarnation: builtins.int | None = ..., source_task: global___CoordinatedTask | None = ...) -> None: ... def HasField(self, field_name: typing.Literal["source_task", b"source_task"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["incarnation", b"incarnation", "source_task", b"source_task"]) -> None: ... @@ -225,11 +217,7 @@ class HeartbeatResponse(google.protobuf.message.Message): """If there are failures in cluster, use additional metadata in response to broadcast error code and message to other tasks. """ - def __init__( - self, - *, - leader_incarnation: builtins.int | None = ..., - ) -> None: ... + def __init__(self, *, leader_incarnation: builtins.int | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["leader_incarnation", b"leader_incarnation"]) -> None: ... global___HeartbeatResponse = HeartbeatResponse @@ -241,11 +229,7 @@ class PollForErrorRequest(google.protobuf.message.Message): SOURCE_TASK_FIELD_NUMBER: builtins.int @property def source_task(self) -> global___CoordinatedTask: ... - def __init__( - self, - *, - source_task: global___CoordinatedTask | None = ..., - ) -> None: ... + def __init__(self, *, source_task: global___CoordinatedTask | None = ...) -> None: ... def HasField(self, field_name: typing.Literal["source_task", b"source_task"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["source_task", b"source_task"]) -> None: ... @@ -255,9 +239,7 @@ global___PollForErrorRequest = PollForErrorRequest class PollForErrorResponse(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - def __init__( - self, - ) -> None: ... + def __init__(self) -> None: ... global___PollForErrorResponse = PollForErrorResponse @@ -276,12 +258,11 @@ class WaitForAllTasksRequest(google.protobuf.message.Message): """All local device attributes on the request sender;""" def __init__( - self, - *, - source_task: global___CoordinatedTask | None = ..., - device_info: global___DeviceInfo | None = ..., + self, *, source_task: global___CoordinatedTask | None = ..., device_info: global___DeviceInfo | None = ... ) -> None: ... - def HasField(self, field_name: typing.Literal["device_info", b"device_info", "source_task", b"source_task"]) -> builtins.bool: ... + def HasField( + self, field_name: typing.Literal["device_info", b"device_info", "source_task", b"source_task"] + ) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["device_info", b"device_info", "source_task", b"source_task"]) -> None: ... global___WaitForAllTasksRequest = WaitForAllTasksRequest @@ -298,13 +279,12 @@ class WaitForAllTasksResponse(google.protobuf.message.Message): """All devices in the cluster.""" def __init__( - self, - *, - leader_incarnation: builtins.int | None = ..., - device_info: global___DeviceInfo | None = ..., + self, *, leader_incarnation: builtins.int | None = ..., device_info: global___DeviceInfo | None = ... ) -> None: ... def HasField(self, field_name: typing.Literal["device_info", b"device_info"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["device_info", b"device_info", "leader_incarnation", b"leader_incarnation"]) -> None: ... + def ClearField( + self, field_name: typing.Literal["device_info", b"device_info", "leader_incarnation", b"leader_incarnation"] + ) -> None: ... global___WaitForAllTasksResponse = WaitForAllTasksResponse @@ -317,11 +297,7 @@ class ShutdownTaskRequest(google.protobuf.message.Message): SOURCE_TASK_FIELD_NUMBER: builtins.int @property def source_task(self) -> global___CoordinatedTask: ... - def __init__( - self, - *, - source_task: global___CoordinatedTask | None = ..., - ) -> None: ... + def __init__(self, *, source_task: global___CoordinatedTask | None = ...) -> None: ... def HasField(self, field_name: typing.Literal["source_task", b"source_task"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["source_task", b"source_task"]) -> None: ... @@ -331,9 +307,7 @@ global___ShutdownTaskRequest = ShutdownTaskRequest class ShutdownTaskResponse(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - def __init__( - self, - ) -> None: ... + def __init__(self) -> None: ... global___ShutdownTaskResponse = ShutdownTaskResponse @@ -346,11 +320,7 @@ class ResetTaskRequest(google.protobuf.message.Message): SOURCE_TASK_FIELD_NUMBER: builtins.int @property def source_task(self) -> global___CoordinatedTask: ... - def __init__( - self, - *, - source_task: global___CoordinatedTask | None = ..., - ) -> None: ... + def __init__(self, *, source_task: global___CoordinatedTask | None = ...) -> None: ... def HasField(self, field_name: typing.Literal["source_task", b"source_task"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["source_task", b"source_task"]) -> None: ... @@ -360,9 +330,7 @@ global___ResetTaskRequest = ResetTaskRequest class ResetTaskResponse(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - def __init__( - self, - ) -> None: ... + def __init__(self) -> None: ... global___ResetTaskResponse = ResetTaskResponse @@ -387,7 +355,12 @@ class ReportErrorToTaskRequest(google.protobuf.message.Message): error_payload: global___CoordinationServiceError | None = ..., ) -> None: ... def HasField(self, field_name: typing.Literal["error_payload", b"error_payload"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["error_code", b"error_code", "error_message", b"error_message", "error_payload", b"error_payload"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "error_code", b"error_code", "error_message", b"error_message", "error_payload", b"error_payload" + ], + ) -> None: ... global___ReportErrorToTaskRequest = ReportErrorToTaskRequest @@ -395,9 +368,7 @@ global___ReportErrorToTaskRequest = ReportErrorToTaskRequest class ReportErrorToTaskResponse(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - def __init__( - self, - ) -> None: ... + def __init__(self) -> None: ... global___ReportErrorToTaskResponse = ReportErrorToTaskResponse @@ -422,7 +393,12 @@ class ReportErrorToServiceRequest(google.protobuf.message.Message): error_origin: global___CoordinatedTask | None = ..., ) -> None: ... def HasField(self, field_name: typing.Literal["error_origin", b"error_origin"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["error_code", b"error_code", "error_message", b"error_message", "error_origin", b"error_origin"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "error_code", b"error_code", "error_message", b"error_message", "error_origin", b"error_origin" + ], + ) -> None: ... global___ReportErrorToServiceRequest = ReportErrorToServiceRequest @@ -430,9 +406,7 @@ global___ReportErrorToServiceRequest = ReportErrorToServiceRequest class ReportErrorToServiceResponse(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - def __init__( - self, - ) -> None: ... + def __init__(self) -> None: ... global___ReportErrorToServiceResponse = ReportErrorToServiceResponse @@ -445,11 +419,7 @@ class GetTaskStateRequest(google.protobuf.message.Message): SOURCE_TASK_FIELD_NUMBER: builtins.int @property def source_task(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___CoordinatedTask]: ... - def __init__( - self, - *, - source_task: collections.abc.Iterable[global___CoordinatedTask] | None = ..., - ) -> None: ... + def __init__(self, *, source_task: collections.abc.Iterable[global___CoordinatedTask] | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["source_task", b"source_task"]) -> None: ... global___GetTaskStateRequest = GetTaskStateRequest @@ -460,12 +430,10 @@ class GetTaskStateResponse(google.protobuf.message.Message): TASK_STATE_FIELD_NUMBER: builtins.int @property - def task_state(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___CoordinatedTaskStateInfo]: ... - def __init__( + def task_state( self, - *, - task_state: collections.abc.Iterable[global___CoordinatedTaskStateInfo] | None = ..., - ) -> None: ... + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___CoordinatedTaskStateInfo]: ... + def __init__(self, *, task_state: collections.abc.Iterable[global___CoordinatedTaskStateInfo] | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["task_state", b"task_state"]) -> None: ... global___GetTaskStateResponse = GetTaskStateResponse @@ -483,12 +451,7 @@ class KeyValueEntry(google.protobuf.message.Message): VALUE_FIELD_NUMBER: builtins.int key: builtins.str value: builtins.bytes - def __init__( - self, - *, - key: builtins.str | None = ..., - value: builtins.bytes | None = ..., - ) -> None: ... + def __init__(self, *, key: builtins.str | None = ..., value: builtins.bytes | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... global___KeyValueEntry = KeyValueEntry @@ -504,12 +467,7 @@ class InsertKeyValueRequest(google.protobuf.message.Message): allow_overwrite: builtins.bool @property def kv(self) -> global___KeyValueEntry: ... - def __init__( - self, - *, - kv: global___KeyValueEntry | None = ..., - allow_overwrite: builtins.bool | None = ..., - ) -> None: ... + def __init__(self, *, kv: global___KeyValueEntry | None = ..., allow_overwrite: builtins.bool | None = ...) -> None: ... def HasField(self, field_name: typing.Literal["kv", b"kv"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["allow_overwrite", b"allow_overwrite", "kv", b"kv"]) -> None: ... @@ -519,9 +477,7 @@ global___InsertKeyValueRequest = InsertKeyValueRequest class InsertKeyValueResponse(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - def __init__( - self, - ) -> None: ... + def __init__(self) -> None: ... global___InsertKeyValueResponse = InsertKeyValueResponse @@ -533,11 +489,7 @@ class GetKeyValueRequest(google.protobuf.message.Message): KEY_FIELD_NUMBER: builtins.int key: builtins.str - def __init__( - self, - *, - key: builtins.str | None = ..., - ) -> None: ... + def __init__(self, *, key: builtins.str | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["key", b"key"]) -> None: ... global___GetKeyValueRequest = GetKeyValueRequest @@ -549,11 +501,7 @@ class GetKeyValueResponse(google.protobuf.message.Message): KV_FIELD_NUMBER: builtins.int @property def kv(self) -> global___KeyValueEntry: ... - def __init__( - self, - *, - kv: global___KeyValueEntry | None = ..., - ) -> None: ... + def __init__(self, *, kv: global___KeyValueEntry | None = ...) -> None: ... def HasField(self, field_name: typing.Literal["kv", b"kv"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["kv", b"kv"]) -> None: ... @@ -565,11 +513,7 @@ class TryGetKeyValueRequest(google.protobuf.message.Message): KEY_FIELD_NUMBER: builtins.int key: builtins.str - def __init__( - self, - *, - key: builtins.str | None = ..., - ) -> None: ... + def __init__(self, *, key: builtins.str | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["key", b"key"]) -> None: ... global___TryGetKeyValueRequest = TryGetKeyValueRequest @@ -581,11 +525,7 @@ class TryGetKeyValueResponse(google.protobuf.message.Message): KV_FIELD_NUMBER: builtins.int @property def kv(self) -> global___KeyValueEntry: ... - def __init__( - self, - *, - kv: global___KeyValueEntry | None = ..., - ) -> None: ... + def __init__(self, *, kv: global___KeyValueEntry | None = ...) -> None: ... def HasField(self, field_name: typing.Literal["kv", b"kv"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["kv", b"kv"]) -> None: ... @@ -597,11 +537,7 @@ class GetKeyValueDirRequest(google.protobuf.message.Message): DIRECTORY_KEY_FIELD_NUMBER: builtins.int directory_key: builtins.str - def __init__( - self, - *, - directory_key: builtins.str | None = ..., - ) -> None: ... + def __init__(self, *, directory_key: builtins.str | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["directory_key", b"directory_key"]) -> None: ... global___GetKeyValueDirRequest = GetKeyValueDirRequest @@ -616,10 +552,7 @@ class GetKeyValueDirResponse(google.protobuf.message.Message): @property def kv(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___KeyValueEntry]: ... def __init__( - self, - *, - directory_key: builtins.str | None = ..., - kv: collections.abc.Iterable[global___KeyValueEntry] | None = ..., + self, *, directory_key: builtins.str | None = ..., kv: collections.abc.Iterable[global___KeyValueEntry] | None = ... ) -> None: ... def ClearField(self, field_name: typing.Literal["directory_key", b"directory_key", "kv", b"kv"]) -> None: ... @@ -637,12 +570,7 @@ class DeleteKeyValueRequest(google.protobuf.message.Message): IS_DIRECTORY_FIELD_NUMBER: builtins.int key: builtins.str is_directory: builtins.bool - def __init__( - self, - *, - key: builtins.str | None = ..., - is_directory: builtins.bool | None = ..., - ) -> None: ... + def __init__(self, *, key: builtins.str | None = ..., is_directory: builtins.bool | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["is_directory", b"is_directory", "key", b"key"]) -> None: ... global___DeleteKeyValueRequest = DeleteKeyValueRequest @@ -651,9 +579,7 @@ global___DeleteKeyValueRequest = DeleteKeyValueRequest class DeleteKeyValueResponse(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - def __init__( - self, - ) -> None: ... + def __init__(self) -> None: ... global___DeleteKeyValueResponse = DeleteKeyValueResponse @@ -688,7 +614,19 @@ class BarrierRequest(google.protobuf.message.Message): source_task: global___CoordinatedTask | None = ..., ) -> None: ... def HasField(self, field_name: typing.Literal["source_task", b"source_task"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["barrier_id", b"barrier_id", "barrier_timeout_in_ms", b"barrier_timeout_in_ms", "source_task", b"source_task", "tasks", b"tasks"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "barrier_id", + b"barrier_id", + "barrier_timeout_in_ms", + b"barrier_timeout_in_ms", + "source_task", + b"source_task", + "tasks", + b"tasks", + ], + ) -> None: ... global___BarrierRequest = BarrierRequest @@ -696,9 +634,7 @@ global___BarrierRequest = BarrierRequest class BarrierResponse(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - def __init__( - self, - ) -> None: ... + def __init__(self) -> None: ... global___BarrierResponse = BarrierResponse @@ -715,12 +651,7 @@ class CancelBarrierRequest(google.protobuf.message.Message): def source_task(self) -> global___CoordinatedTask: """Task that is making the request.""" - def __init__( - self, - *, - barrier_id: builtins.str | None = ..., - source_task: global___CoordinatedTask | None = ..., - ) -> None: ... + def __init__(self, *, barrier_id: builtins.str | None = ..., source_task: global___CoordinatedTask | None = ...) -> None: ... def HasField(self, field_name: typing.Literal["source_task", b"source_task"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["barrier_id", b"barrier_id", "source_task", b"source_task"]) -> None: ... @@ -730,8 +661,6 @@ global___CancelBarrierRequest = CancelBarrierRequest class CancelBarrierResponse(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - def __init__( - self, - ) -> None: ... + def __init__(self) -> None: ... global___CancelBarrierResponse = CancelBarrierResponse diff --git a/stubs/tensorflow/tensorflow/tsl/protobuf/distributed_runtime_payloads_pb2.pyi b/stubs/tensorflow/tensorflow/tsl/protobuf/distributed_runtime_payloads_pb2.pyi index 57202db5c159..54b11ce7c741 100644 --- a/stubs/tensorflow/tensorflow/tsl/protobuf/distributed_runtime_payloads_pb2.pyi +++ b/stubs/tensorflow/tensorflow/tsl/protobuf/distributed_runtime_payloads_pb2.pyi @@ -30,22 +30,13 @@ class GrpcPayloadContainer(google.protobuf.message.Message): VALUE_FIELD_NUMBER: builtins.int key: builtins.str value: builtins.bytes - def __init__( - self, - *, - key: builtins.str | None = ..., - value: builtins.bytes | None = ..., - ) -> None: ... + def __init__(self, *, key: builtins.str | None = ..., value: builtins.bytes | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... PAYLOADS_FIELD_NUMBER: builtins.int @property def payloads(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.bytes]: ... - def __init__( - self, - *, - payloads: collections.abc.Mapping[builtins.str, builtins.bytes] | None = ..., - ) -> None: ... + def __init__(self, *, payloads: collections.abc.Mapping[builtins.str, builtins.bytes] | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["payloads", b"payloads"]) -> None: ... global___GrpcPayloadContainer = GrpcPayloadContainer @@ -59,9 +50,7 @@ class GrpcPayloadsLost(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - def __init__( - self, - ) -> None: ... + def __init__(self) -> None: ... global___GrpcPayloadsLost = GrpcPayloadsLost @@ -75,8 +64,6 @@ class WorkerPossiblyRestarted(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - def __init__( - self, - ) -> None: ... + def __init__(self) -> None: ... global___WorkerPossiblyRestarted = WorkerPossiblyRestarted diff --git a/stubs/tensorflow/tensorflow/tsl/protobuf/dnn_pb2.pyi b/stubs/tensorflow/tensorflow/tsl/protobuf/dnn_pb2.pyi index 929709311d04..c8d947994d74 100644 --- a/stubs/tensorflow/tensorflow/tsl/protobuf/dnn_pb2.pyi +++ b/stubs/tensorflow/tensorflow/tsl/protobuf/dnn_pb2.pyi @@ -63,7 +63,9 @@ class _DataLayout: ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType -class _DataLayoutEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_DataLayout.ValueType], builtins.type): +class _DataLayoutEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_DataLayout.ValueType], builtins.type +): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor kYXDepthBatch: _DataLayout.ValueType # 0 """Naming convention: @@ -121,7 +123,9 @@ class _FilterLayout: ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType -class _FilterLayoutEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_FilterLayout.ValueType], builtins.type): +class _FilterLayoutEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_FilterLayout.ValueType], builtins.type +): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor kOutputInputYX: _FilterLayout.ValueType # 0 """Naming convention: @@ -175,7 +179,9 @@ class _ActivationMode: ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType -class _ActivationModeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_ActivationMode.ValueType], builtins.type): +class _ActivationModeEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_ActivationMode.ValueType], builtins.type +): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor kNone: _ActivationMode.ValueType # 0 kSigmoid: _ActivationMode.ValueType # 1 @@ -229,7 +235,9 @@ class _ConvolutionMode: ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType -class _ConvolutionModeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_ConvolutionMode.ValueType], builtins.type): +class _ConvolutionModeEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_ConvolutionMode.ValueType], builtins.type +): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor CROSS_CORRELATION: _ConvolutionMode.ValueType # 0 CONVOLUTION: _ConvolutionMode.ValueType # 1 @@ -248,7 +256,9 @@ class _ConvolutionKind: ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType -class _ConvolutionKindEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_ConvolutionKind.ValueType], builtins.type): +class _ConvolutionKindEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_ConvolutionKind.ValueType], builtins.type +): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor INVALID: _ConvolutionKind.ValueType # 0 FORWARD: _ConvolutionKind.ValueType # 1 @@ -289,7 +299,9 @@ class _FusedMHAKind: ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType -class _FusedMHAKindEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_FusedMHAKind.ValueType], builtins.type): +class _FusedMHAKindEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_FusedMHAKind.ValueType], builtins.type +): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor BMM1_OUTPUT_UNKNOWN: _FusedMHAKind.ValueType # 0 BMM1_OUTPUT_INPUT_TYPE: _FusedMHAKind.ValueType # 1 @@ -307,7 +319,9 @@ class _FMHAMaskKind: ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType -class _FMHAMaskKindEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_FMHAMaskKind.ValueType], builtins.type): +class _FMHAMaskKindEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_FMHAMaskKind.ValueType], builtins.type +): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor NO_MASK: _FMHAMaskKind.ValueType # 0 PADDING: _FMHAMaskKind.ValueType # 1 @@ -348,9 +362,30 @@ class TensorDescriptorProto(google.protobuf.message.Message): data_layout: global___DataLayout.ValueType | None = ..., filter_layout: global___FilterLayout.ValueType | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["data_layout", b"data_layout", "filter_layout", b"filter_layout", "layout_oneof", b"layout_oneof"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["data_layout", b"data_layout", "data_type", b"data_type", "dimensions", b"dimensions", "filter_layout", b"filter_layout", "layout_oneof", b"layout_oneof"]) -> None: ... - def WhichOneof(self, oneof_group: typing.Literal["layout_oneof", b"layout_oneof"]) -> typing.Literal["data_layout", "filter_layout"] | None: ... + def HasField( + self, + field_name: typing.Literal[ + "data_layout", b"data_layout", "filter_layout", b"filter_layout", "layout_oneof", b"layout_oneof" + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "data_layout", + b"data_layout", + "data_type", + b"data_type", + "dimensions", + b"dimensions", + "filter_layout", + b"filter_layout", + "layout_oneof", + b"layout_oneof", + ], + ) -> None: ... + def WhichOneof( + self, oneof_group: typing.Literal["layout_oneof", b"layout_oneof"] + ) -> typing.Literal["data_layout", "filter_layout"] | None: ... global___TensorDescriptorProto = TensorDescriptorProto @@ -364,7 +399,9 @@ class AlgorithmProto(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _MathTypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[AlgorithmProto._MathType.ValueType], builtins.type): + class _MathTypeEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[AlgorithmProto._MathType.ValueType], builtins.type + ): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor DEFAULT_MATH: AlgorithmProto._MathType.ValueType # 0 TENSOR_OP_MATH: AlgorithmProto._MathType.ValueType # 1 @@ -387,12 +424,7 @@ class AlgorithmProto(google.protobuf.message.Message): VALUE_FIELD_NUMBER: builtins.int key: builtins.int value: builtins.int - def __init__( - self, - *, - key: builtins.int | None = ..., - value: builtins.int | None = ..., - ) -> None: ... + def __init__(self, *, key: builtins.int | None = ..., value: builtins.int | None = ...) -> None: ... def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... ALGO_ID_FIELD_NUMBER: builtins.int @@ -433,7 +465,21 @@ class AlgorithmProto(google.protobuf.message.Message): workspace_size: google.protobuf.wrappers_pb2.UInt64Value | None = ..., ) -> None: ... def HasField(self, field_name: typing.Literal["workspace_size", b"workspace_size"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["algo_id", b"algo_id", "is_cudnn_frontend", b"is_cudnn_frontend", "math_type", b"math_type", "tuning_knobs", b"tuning_knobs", "workspace_size", b"workspace_size"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "algo_id", + b"algo_id", + "is_cudnn_frontend", + b"is_cudnn_frontend", + "math_type", + b"math_type", + "tuning_knobs", + b"tuning_knobs", + "workspace_size", + b"workspace_size", + ], + ) -> None: ... global___AlgorithmProto = AlgorithmProto @@ -461,14 +507,52 @@ class AlgorithmConfigProto(google.protobuf.message.Message): algorithm_no_scratch: global___AlgorithmProto | None = ..., scratch_size: builtins.int | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["algorithm", b"algorithm", "algorithm_no_scratch", b"algorithm_no_scratch", "optional_algorithm", b"optional_algorithm", "optional_algorithm_no_scratch", b"optional_algorithm_no_scratch", "optional_scratch_size", b"optional_scratch_size", "scratch_size", b"scratch_size"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["algorithm", b"algorithm", "algorithm_no_scratch", b"algorithm_no_scratch", "optional_algorithm", b"optional_algorithm", "optional_algorithm_no_scratch", b"optional_algorithm_no_scratch", "optional_scratch_size", b"optional_scratch_size", "scratch_size", b"scratch_size"]) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "algorithm", + b"algorithm", + "algorithm_no_scratch", + b"algorithm_no_scratch", + "optional_algorithm", + b"optional_algorithm", + "optional_algorithm_no_scratch", + b"optional_algorithm_no_scratch", + "optional_scratch_size", + b"optional_scratch_size", + "scratch_size", + b"scratch_size", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "algorithm", + b"algorithm", + "algorithm_no_scratch", + b"algorithm_no_scratch", + "optional_algorithm", + b"optional_algorithm", + "optional_algorithm_no_scratch", + b"optional_algorithm_no_scratch", + "optional_scratch_size", + b"optional_scratch_size", + "scratch_size", + b"scratch_size", + ], + ) -> None: ... @typing.overload - def WhichOneof(self, oneof_group: typing.Literal["optional_algorithm", b"optional_algorithm"]) -> typing.Literal["algorithm"] | None: ... + def WhichOneof( + self, oneof_group: typing.Literal["optional_algorithm", b"optional_algorithm"] + ) -> typing.Literal["algorithm"] | None: ... @typing.overload - def WhichOneof(self, oneof_group: typing.Literal["optional_algorithm_no_scratch", b"optional_algorithm_no_scratch"]) -> typing.Literal["algorithm_no_scratch"] | None: ... + def WhichOneof( + self, oneof_group: typing.Literal["optional_algorithm_no_scratch", b"optional_algorithm_no_scratch"] + ) -> typing.Literal["algorithm_no_scratch"] | None: ... @typing.overload - def WhichOneof(self, oneof_group: typing.Literal["optional_scratch_size", b"optional_scratch_size"]) -> typing.Literal["scratch_size"] | None: ... + def WhichOneof( + self, oneof_group: typing.Literal["optional_scratch_size", b"optional_scratch_size"] + ) -> typing.Literal["scratch_size"] | None: ... global___AlgorithmConfigProto = AlgorithmConfigProto @@ -512,6 +596,24 @@ class ConvolutionDescriptorProto(google.protobuf.message.Message): convolution_mode: global___ConvolutionMode.ValueType | None = ..., name: builtins.str | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["compute_mode", b"compute_mode", "convolution_mode", b"convolution_mode", "dilations", b"dilations", "group_count", b"group_count", "name", b"name", "paddings", b"paddings", "strides", b"strides"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "compute_mode", + b"compute_mode", + "convolution_mode", + b"convolution_mode", + "dilations", + b"dilations", + "group_count", + b"group_count", + "name", + b"name", + "paddings", + b"paddings", + "strides", + b"strides", + ], + ) -> None: ... global___ConvolutionDescriptorProto = ConvolutionDescriptorProto diff --git a/stubs/tensorflow/tensorflow/tsl/protobuf/histogram_pb2.pyi b/stubs/tensorflow/tensorflow/tsl/protobuf/histogram_pb2.pyi index a33728dc47e4..986ebfcdc44f 100644 --- a/stubs/tensorflow/tensorflow/tsl/protobuf/histogram_pb2.pyi +++ b/stubs/tensorflow/tensorflow/tsl/protobuf/histogram_pb2.pyi @@ -55,6 +55,24 @@ class HistogramProto(google.protobuf.message.Message): bucket_limit: collections.abc.Iterable[builtins.float] | None = ..., bucket: collections.abc.Iterable[builtins.float] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["bucket", b"bucket", "bucket_limit", b"bucket_limit", "max", b"max", "min", b"min", "num", b"num", "sum", b"sum", "sum_squares", b"sum_squares"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "bucket", + b"bucket", + "bucket_limit", + b"bucket_limit", + "max", + b"max", + "min", + b"min", + "num", + b"num", + "sum", + b"sum", + "sum_squares", + b"sum_squares", + ], + ) -> None: ... global___HistogramProto = HistogramProto diff --git a/stubs/tensorflow/tensorflow/tsl/protobuf/rpc_options_pb2.pyi b/stubs/tensorflow/tensorflow/tsl/protobuf/rpc_options_pb2.pyi index 6e08a2bf99ef..062ea4bd4610 100644 --- a/stubs/tensorflow/tensorflow/tsl/protobuf/rpc_options_pb2.pyi +++ b/stubs/tensorflow/tensorflow/tsl/protobuf/rpc_options_pb2.pyi @@ -64,6 +64,22 @@ class RPCOptions(google.protobuf.message.Message): disable_session_connection_sharing: builtins.bool | None = ..., num_channels_per_target: builtins.int | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["cache_rpc_response", b"cache_rpc_response", "compression_algorithm", b"compression_algorithm", "compression_level", b"compression_level", "disable_session_connection_sharing", b"disable_session_connection_sharing", "num_channels_per_target", b"num_channels_per_target", "use_rpc_for_inprocess_master", b"use_rpc_for_inprocess_master"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "cache_rpc_response", + b"cache_rpc_response", + "compression_algorithm", + b"compression_algorithm", + "compression_level", + b"compression_level", + "disable_session_connection_sharing", + b"disable_session_connection_sharing", + "num_channels_per_target", + b"num_channels_per_target", + "use_rpc_for_inprocess_master", + b"use_rpc_for_inprocess_master", + ], + ) -> None: ... global___RPCOptions = RPCOptions diff --git a/stubs/tensorflow/tensorflow/tsl/protobuf/status_pb2.pyi b/stubs/tensorflow/tensorflow/tsl/protobuf/status_pb2.pyi index 77e0f5b7bf39..b2df45b32086 100644 --- a/stubs/tensorflow/tensorflow/tsl/protobuf/status_pb2.pyi +++ b/stubs/tensorflow/tensorflow/tsl/protobuf/status_pb2.pyi @@ -27,10 +27,7 @@ class StatusProto(google.protobuf.message.Message): message: builtins.str """Detail error message.""" def __init__( - self, - *, - code: tensorflow.tsl.protobuf.error_codes_pb2.Code.ValueType | None = ..., - message: builtins.str | None = ..., + self, *, code: tensorflow.tsl.protobuf.error_codes_pb2.Code.ValueType | None = ..., message: builtins.str | None = ... ) -> None: ... def ClearField(self, field_name: typing.Literal["code", b"code", "message", b"message"]) -> None: ... From 6567e0a119322fffd1aa4e1782f1e1673d7f8e10 Mon Sep 17 00:00:00 2001 From: Avasam Date: Mon, 5 May 2025 09:24:42 -0400 Subject: [PATCH 286/388] Openpyxl: type cell values (#13929) --- stubs/openpyxl/openpyxl/cell/__init__.pyi | 6 ++- stubs/openpyxl/openpyxl/cell/cell.pyi | 13 +++--- stubs/openpyxl/openpyxl/cell/read_only.pyi | 6 +-- .../openpyxl/chart/series_factory.pyi | 8 ++-- .../openpyxl/worksheet/_read_only.pyi | 4 +- .../openpyxl/openpyxl/worksheet/worksheet.pyi | 45 +++++++++---------- 6 files changed, 40 insertions(+), 42 deletions(-) diff --git a/stubs/openpyxl/openpyxl/cell/__init__.pyi b/stubs/openpyxl/openpyxl/cell/__init__.pyi index 54641897e49a..5501a28fd6eb 100644 --- a/stubs/openpyxl/openpyxl/cell/__init__.pyi +++ b/stubs/openpyxl/openpyxl/cell/__init__.pyi @@ -10,7 +10,7 @@ from .cell import Cell as Cell, MergedCell as MergedCell, WriteOnlyCell as Write from .read_only import ReadOnlyCell as ReadOnlyCell _TimeTypes: TypeAlias = datetime | date | time | timedelta -_CellValue: TypeAlias = ( # noqa: Y047 # Used in other modules +_CellGetValue: TypeAlias = ( # noqa: Y047 # Used in other modules # if numpy is installed also numpy bool and number types bool | float @@ -20,7 +20,9 @@ _CellValue: TypeAlias = ( # noqa: Y047 # Used in other modules | _TimeTypes | DataTableFormula | ArrayFormula + | None ) -_AnyCellValue: TypeAlias = Any # Any of _CellValue # noqa: Y047 # Used in other modules +_AnyCellValue: TypeAlias = Any # AnyOf _CellGetValue # noqa: Y047 # Used in other modules +_CellSetValue: TypeAlias = _CellGetValue | bytes # noqa: Y047 # Used in other modules _CellOrMergedCell: TypeAlias = Cell | MergedCell # noqa: Y047 # Used in other modules diff --git a/stubs/openpyxl/openpyxl/cell/cell.pyi b/stubs/openpyxl/openpyxl/cell/cell.pyi index dea71e03d8a8..fb5cbb54545c 100644 --- a/stubs/openpyxl/openpyxl/cell/cell.pyi +++ b/stubs/openpyxl/openpyxl/cell/cell.pyi @@ -3,7 +3,7 @@ from datetime import datetime from re import Pattern from typing import Final, Literal, overload -from openpyxl.cell import _CellOrMergedCell, _CellValue, _TimeTypes +from openpyxl.cell import _CellGetValue, _CellOrMergedCell, _CellSetValue, _TimeTypes from openpyxl.comments.comments import Comment from openpyxl.compat.numbers import NUMERIC_TYPES as NUMERIC_TYPES # cell numeric types from openpyxl.styles.cell_style import StyleArray @@ -45,7 +45,7 @@ class Cell(StyleableObject): worksheet: _WorkbookChild | ReadOnlyWorksheet, row: int, column: int, - value: str | float | datetime | None = None, + value: _CellSetValue = None, style_array: StyleArray | None = None, ) -> None: ... @property @@ -64,11 +64,11 @@ class Cell(StyleableObject): def check_string(self, value: str | ReadableBuffer) -> str: ... def check_error(self, value: object) -> str: ... @property - def value(self) -> _CellValue | None: ... + def value(self) -> _CellGetValue: ... @value.setter - def value(self, value: _CellValue | bytes | None) -> None: ... + def value(self, value: _CellSetValue) -> None: ... @property - def internal_value(self) -> _CellValue | None: ... + def internal_value(self) -> _CellGetValue: ... @property def hyperlink(self) -> Hyperlink | None: ... @hyperlink.setter @@ -94,6 +94,7 @@ class MergedCell(StyleableObject): # https://github.com/python/mypy/issues/6700 @property def coordinate(self) -> str: ... - value: str | float | int | datetime | None + # The value of a MergedCell is always None. + value: None def WriteOnlyCell(ws: _WorkbookChild | ReadOnlyWorksheet, value: str | float | datetime | None = None) -> Cell: ... diff --git a/stubs/openpyxl/openpyxl/cell/read_only.pyi b/stubs/openpyxl/openpyxl/cell/read_only.pyi index 170624f03860..b8dd7cf3226e 100644 --- a/stubs/openpyxl/openpyxl/cell/read_only.pyi +++ b/stubs/openpyxl/openpyxl/cell/read_only.pyi @@ -1,7 +1,7 @@ from _typeshed import Incomplete from typing import Final -from openpyxl.cell import _CellValue +from openpyxl.cell import _CellGetValue from openpyxl.styles.alignment import Alignment from openpyxl.styles.borders import Border from openpyxl.styles.cell_style import StyleArray @@ -51,9 +51,9 @@ class ReadOnlyCell: @property def is_date(self) -> bool: ... @property - def internal_value(self) -> _CellValue | None: ... + def internal_value(self) -> _CellGetValue: ... @property - def value(self) -> _CellValue | None: ... + def value(self) -> _CellGetValue: ... @value.setter def value(self, value: None) -> None: ... diff --git a/stubs/openpyxl/openpyxl/chart/series_factory.pyi b/stubs/openpyxl/openpyxl/chart/series_factory.pyi index 93bc957ff400..9faddc33a413 100644 --- a/stubs/openpyxl/openpyxl/chart/series_factory.pyi +++ b/stubs/openpyxl/openpyxl/chart/series_factory.pyi @@ -1,9 +1,9 @@ -from _typeshed import Incomplete +from .reference import Reference def SeriesFactory( - values, - xvalues: Incomplete | None = None, - zvalues: Incomplete | None = None, + values: Reference | str, + xvalues: Reference | str | None = None, + zvalues: Reference | str | None = None, title: object = None, title_from_data: bool = False, ): ... diff --git a/stubs/openpyxl/openpyxl/worksheet/_read_only.pyi b/stubs/openpyxl/openpyxl/worksheet/_read_only.pyi index 0579afc366a4..fbd34ec813a3 100644 --- a/stubs/openpyxl/openpyxl/worksheet/_read_only.pyi +++ b/stubs/openpyxl/openpyxl/worksheet/_read_only.pyi @@ -2,7 +2,7 @@ from _typeshed import SupportsGetItem from collections.abc import Generator from openpyxl import _VisibilityType -from openpyxl.cell import _CellOrMergedCell, _CellValue +from openpyxl.cell import _CellGetValue, _CellOrMergedCell from openpyxl.utils.cell import _RangeBoundariesTuple from openpyxl.workbook.workbook import Workbook from openpyxl.worksheet.worksheet import Worksheet @@ -15,7 +15,7 @@ class ReadOnlyWorksheet: # Same as Worksheet.values # https://github.com/python/mypy/issues/6700 @property - def values(self) -> Generator[tuple[_CellValue, ...], None, None]: ... + def values(self) -> Generator[tuple[_CellGetValue, ...], None, None]: ... # Same as Worksheet.rows # https://github.com/python/mypy/issues/6700 @property diff --git a/stubs/openpyxl/openpyxl/worksheet/worksheet.pyi b/stubs/openpyxl/openpyxl/worksheet/worksheet.pyi index 6b40a85a7be6..87819280da3a 100644 --- a/stubs/openpyxl/openpyxl/worksheet/worksheet.pyi +++ b/stubs/openpyxl/openpyxl/worksheet/worksheet.pyi @@ -1,12 +1,11 @@ from _typeshed import ConvertibleToInt, Incomplete from collections.abc import Generator, Iterable, Iterator -from datetime import datetime from types import GeneratorType from typing import Any, Final, Literal, NoReturn, overload from typing_extensions import deprecated from openpyxl import _Decodable, _VisibilityType -from openpyxl.cell import _CellOrMergedCell, _CellValue +from openpyxl.cell import _AnyCellValue, _CellGetValue, _CellOrMergedCell, _CellSetValue from openpyxl.cell.cell import Cell from openpyxl.chart._chart import ChartBase from openpyxl.drawing.image import Image @@ -87,7 +86,11 @@ class Worksheet(_WorkbookChild): def freeze_panes(self) -> str | None: ... @freeze_panes.setter def freeze_panes(self, topLeftCell: str | Cell | None = None) -> None: ... - def cell(self, row: int, column: int, value: _CellValue | None = None) -> _CellOrMergedCell: ... + # A MergedCell value should be kept to None + @overload + def cell(self, row: int, column: int, value: None = None) -> _CellOrMergedCell: ... + @overload + def cell(self, row: int, column: int, value: _CellSetValue = None) -> Cell: ... # An int is necessarily a row selection @overload def __getitem__(self, key: int) -> tuple[_CellOrMergedCell, ...]: ... @@ -99,7 +102,7 @@ class Worksheet(_WorkbookChild): def __getitem__( self, key: str ) -> Any: ... # AnyOf[_CellOrMergedCell, tuple[_CellOrMergedCell, ...], tuple[tuple[_CellOrMergedCell, ...], ...]] - def __setitem__(self, key: str, value: _CellValue) -> None: ... + def __setitem__(self, key: str, value: _CellSetValue) -> None: ... def __iter__(self) -> Iterator[tuple[_CellOrMergedCell, ...]]: ... def __delitem__(self, key: str) -> None: ... @property @@ -116,7 +119,7 @@ class Worksheet(_WorkbookChild): @overload def iter_rows( self, min_row: int | None, max_row: int | None, min_col: int | None, max_col: int | None, values_only: Literal[True] - ) -> Generator[tuple[str | float | datetime | None, ...], None, None]: ... + ) -> Generator[tuple[_CellGetValue, ...], None, None]: ... @overload def iter_rows( self, @@ -126,7 +129,7 @@ class Worksheet(_WorkbookChild): max_col: int | None = None, *, values_only: Literal[True], - ) -> Generator[tuple[str | float | datetime | None, ...], None, None]: ... + ) -> Generator[tuple[_CellGetValue, ...], None, None]: ... @overload def iter_rows( self, @@ -139,9 +142,7 @@ class Worksheet(_WorkbookChild): @overload def iter_rows( self, min_row: int | None, max_row: int | None, min_col: int | None, max_col: int | None, values_only: bool - ) -> ( - Generator[tuple[_CellOrMergedCell, ...], None, None] | Generator[tuple[str | float | datetime | None, ...], None, None] - ): ... + ) -> Generator[tuple[_CellOrMergedCell, ...], None, None] | Generator[tuple[_CellGetValue, ...], None, None]: ... @overload def iter_rows( self, @@ -151,17 +152,15 @@ class Worksheet(_WorkbookChild): max_col: int | None = None, *, values_only: bool, - ) -> ( - Generator[tuple[_CellOrMergedCell, ...], None, None] | Generator[tuple[str | float | datetime | None, ...], None, None] - ): ... + ) -> Generator[tuple[_CellOrMergedCell, ...], None, None] | Generator[tuple[_CellGetValue, ...], None, None]: ... @property def rows(self) -> Generator[tuple[_CellOrMergedCell, ...], None, None]: ... @property - def values(self) -> Generator[tuple[_CellValue | None, ...]]: ... + def values(self) -> Generator[tuple[_CellGetValue, ...]]: ... @overload def iter_cols( self, min_col: int | None, max_col: int | None, min_row: int | None, max_row: int | None, values_only: Literal[True] - ) -> Generator[tuple[str | float | datetime | None, ...], None, None]: ... + ) -> Generator[tuple[_CellGetValue, ...], None, None]: ... @overload def iter_cols( self, @@ -171,7 +170,7 @@ class Worksheet(_WorkbookChild): max_row: int | None = None, *, values_only: Literal[True], - ) -> Generator[tuple[str | float | datetime | None, ...], None, None]: ... + ) -> Generator[tuple[_CellGetValue, ...], None, None]: ... @overload def iter_cols( self, @@ -184,9 +183,7 @@ class Worksheet(_WorkbookChild): @overload def iter_cols( self, min_col: int | None, max_col: int | None, min_row: int | None, max_row: int | None, values_only: bool - ) -> ( - Generator[tuple[_CellOrMergedCell, ...], None, None] | Generator[tuple[str | float | datetime | None, ...], None, None] - ): ... + ) -> Generator[tuple[_CellOrMergedCell, ...], None, None] | Generator[tuple[_CellGetValue, ...], None, None]: ... @overload def iter_cols( self, @@ -196,9 +193,7 @@ class Worksheet(_WorkbookChild): max_row: int | None = None, *, values_only: bool, - ) -> ( - Generator[tuple[_CellOrMergedCell, ...], None, None] | Generator[tuple[str | float | datetime | None, ...], None, None] - ): ... + ) -> Generator[tuple[_CellOrMergedCell, ...], None, None] | Generator[tuple[_CellGetValue, ...], None, None]: ... @property def columns(self) -> Generator[tuple[_CellOrMergedCell, ...], None, None]: ... @property @@ -252,11 +247,11 @@ class Worksheet(_WorkbookChild): def append( self, iterable: ( - list[Any] # lists are invariant, but any subtype or union will do - | tuple[_CellOrMergedCell | str | float | datetime | None, ...] + list[_AnyCellValue] + | tuple[_CellOrMergedCell | _CellGetValue, ...] | range - | GeneratorType[_CellOrMergedCell | str | float | datetime | None, object, object] - | dict[int | str, str | float | datetime | None] + | GeneratorType[_CellOrMergedCell | _CellGetValue, object, object] + | dict[int | str, _AnyCellValue] ), ) -> None: ... def insert_rows(self, idx: int, amount: int = 1) -> None: ... From 795f3513aae762debb129030ac3bd0ed3eb2a68d Mon Sep 17 00:00:00 2001 From: Micah Denbraver Date: Mon, 5 May 2025 06:45:25 -0700 Subject: [PATCH 287/388] grpc `_CallIterator` is an iterator (#13925) --- stubs/grpcio/@tests/test_cases/check_grpc.py | 1 + stubs/grpcio/grpc/__init__.pyi | 1 + 2 files changed, 2 insertions(+) diff --git a/stubs/grpcio/@tests/test_cases/check_grpc.py b/stubs/grpcio/@tests/test_cases/check_grpc.py index e413ba974930..4ff365685afc 100644 --- a/stubs/grpcio/@tests/test_cases/check_grpc.py +++ b/stubs/grpcio/@tests/test_cases/check_grpc.py @@ -44,3 +44,4 @@ call_iter = cast(grpc._CallIterator[str], None) for call in call_iter: assert_type(call, str) +assert_type(next(call_iter), str) diff --git a/stubs/grpcio/grpc/__init__.pyi b/stubs/grpcio/grpc/__init__.pyi index caf5e9884257..47de021b06b5 100644 --- a/stubs/grpcio/grpc/__init__.pyi +++ b/stubs/grpcio/grpc/__init__.pyi @@ -426,6 +426,7 @@ class UnaryUnaryClientInterceptor(abc.ABC, Generic[_TRequest, _TResponse]): @type_check_only class _CallIterator(Call, Generic[_TResponse], metaclass=abc.ABCMeta): def __iter__(self) -> Iterator[_TResponse]: ... + def __next__(self) -> _TResponse: ... class UnaryStreamClientInterceptor(abc.ABC, Generic[_TRequest, _TResponse]): @abc.abstractmethod From 77a16a8df0df257d87a47269d3777956d060bcf6 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Mon, 5 May 2025 15:46:49 +0200 Subject: [PATCH 288/388] [stubsabot] Bump zstd to 1.5.6.8 (#13917) --- stubs/zstd/METADATA.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stubs/zstd/METADATA.toml b/stubs/zstd/METADATA.toml index 76bf2c1ba8b6..1013123647dd 100644 --- a/stubs/zstd/METADATA.toml +++ b/stubs/zstd/METADATA.toml @@ -1,2 +1,2 @@ -version = "1.5.6.7" +version = "1.5.6.8" upstream_repository = "https://github.com/sergey-dryabzhinsky/python-zstd" From dabb1f1329ead5b1d134049bdbe04961defa35cf Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Mon, 5 May 2025 18:15:03 +0400 Subject: [PATCH 289/388] Bump qrcode to 8.2.* (#13918) --- stubs/qrcode/METADATA.toml | 2 +- stubs/qrcode/qrcode/image/styledpil.pyi | 2 +- stubs/qrcode/qrcode/image/styles/colormasks.pyi | 3 ++- 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/stubs/qrcode/METADATA.toml b/stubs/qrcode/METADATA.toml index 152711e7a255..d08ef719fb7a 100644 --- a/stubs/qrcode/METADATA.toml +++ b/stubs/qrcode/METADATA.toml @@ -1,4 +1,4 @@ -version = "8.1.*" +version = "8.2.*" upstream_repository = "https://github.com/lincolnloop/python-qrcode" # must be a version of Pillow that is py.typed requires = ["Pillow>=10.3.0"] diff --git a/stubs/qrcode/qrcode/image/styledpil.pyi b/stubs/qrcode/qrcode/image/styledpil.pyi index 655b57b7589c..c8e28463352e 100644 --- a/stubs/qrcode/qrcode/image/styledpil.pyi +++ b/stubs/qrcode/qrcode/image/styledpil.pyi @@ -39,7 +39,7 @@ class StyledPilImage(base.BaseImageWithDrawer): # the new_image method accepts arbitrary keyword arguments to accommodate # subclasses with additional arguments. def new_image(self, **kwargs: Any) -> Image.Image: ... - def draw_embeded_image(self) -> None: ... + def draw_embedded_image(self) -> None: ... # kwargs are passed on to PIL.Image.save, which also accepts arbitrary keyword arguments. def save( # type: ignore[override] self, diff --git a/stubs/qrcode/qrcode/image/styles/colormasks.pyi b/stubs/qrcode/qrcode/image/styles/colormasks.pyi index 8fdd08621f4f..4c66dd25e97a 100644 --- a/stubs/qrcode/qrcode/image/styles/colormasks.pyi +++ b/stubs/qrcode/qrcode/image/styles/colormasks.pyi @@ -12,7 +12,7 @@ class QRColorMask: paint_color: Ink # image is not actually used by any of the initialize implementations in this project. def initialize(self, styledPilImage: StyledPilImage, image: Image.Image) -> None: ... - def apply_mask(self, image: Image.Image) -> None: ... + def apply_mask(self, image: Image.Image, use_cache: bool = False) -> None: ... def get_fg_pixel(self, image: Image.Image, x: int, y: int) -> Ink: ... def get_bg_pixel(self, image: Image.Image, x: int, y: int) -> Ink: ... def interp_num(self, n1: int, n2: int, norm: float) -> int: ... @@ -23,6 +23,7 @@ class QRColorMask: class SolidFillColorMask(QRColorMask): front_color: Ink def __init__(self, back_color: Ink = (255, 255, 255), front_color: Ink = (0, 0, 0)) -> None: ... + def apply_mask(self, image: Image.Image) -> None: ... # type: ignore[override] class RadialGradiantColorMask(QRColorMask): center_color: Ink From d4e7872ceba9caa934336ac27d6448fc530f3760 Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Mon, 5 May 2025 18:17:54 +0400 Subject: [PATCH 290/388] Bump braintree to 4.35.* (#13914) --- stubs/braintree/METADATA.toml | 2 +- stubs/braintree/braintree/__init__.pyi | 2 + stubs/braintree/braintree/error_codes.pyi | 98 ------------------- .../braintree/merchant_account/__init__.pyi | 3 - .../merchant_account/business_details.pyi | 9 -- .../merchant_account/funding_details.pyi | 7 -- .../merchant_account/individual_details.pyi | 9 -- .../merchant_account/merchant_account.pyi | 4 - .../braintree/merchant_account_gateway.pyi | 4 - .../braintree/payment_facilitator.pyi | 6 ++ stubs/braintree/braintree/sub_merchant.pyi | 4 + stubs/braintree/braintree/testing_gateway.pyi | 1 - stubs/braintree/braintree/transaction.pyi | 15 +-- .../braintree/transaction_gateway.pyi | 2 - .../braintree/webhook_notification.pyi | 2 - 15 files changed, 15 insertions(+), 153 deletions(-) delete mode 100644 stubs/braintree/braintree/merchant_account/business_details.pyi delete mode 100644 stubs/braintree/braintree/merchant_account/funding_details.pyi delete mode 100644 stubs/braintree/braintree/merchant_account/individual_details.pyi create mode 100644 stubs/braintree/braintree/payment_facilitator.pyi create mode 100644 stubs/braintree/braintree/sub_merchant.pyi diff --git a/stubs/braintree/METADATA.toml b/stubs/braintree/METADATA.toml index 1fea0e751c75..efee48e9e87f 100644 --- a/stubs/braintree/METADATA.toml +++ b/stubs/braintree/METADATA.toml @@ -1,2 +1,2 @@ -version = "4.34.*" +version = "4.35.*" upstream_repository = "https://github.com/braintree/braintree_python" diff --git a/stubs/braintree/braintree/__init__.pyi b/stubs/braintree/braintree/__init__.pyi index 1c360cd4ff34..7080ac619e8d 100644 --- a/stubs/braintree/braintree/__init__.pyi +++ b/stubs/braintree/braintree/__init__.pyi @@ -48,6 +48,7 @@ from braintree.merchant_account import MerchantAccount as MerchantAccount from braintree.merchant_account_gateway import MerchantAccountGateway as MerchantAccountGateway from braintree.oauth_access_revocation import OAuthAccessRevocation as OAuthAccessRevocation from braintree.partner_merchant import PartnerMerchant as PartnerMerchant +from braintree.payment_facilitator import PaymentFacilitator as PaymentFacilitator from braintree.payment_instrument_type import PaymentInstrumentType as PaymentInstrumentType from braintree.payment_method import PaymentMethod as PaymentMethod from braintree.payment_method_customer_data_updated_metadata import ( @@ -68,6 +69,7 @@ from braintree.sepa_direct_debit_account import SepaDirectDebitAccount as SepaDi from braintree.settlement_batch_summary import SettlementBatchSummary as SettlementBatchSummary from braintree.signature_service import SignatureService as SignatureService from braintree.status_event import StatusEvent as StatusEvent +from braintree.sub_merchant import SubMerchant as SubMerchant from braintree.subscription import Subscription as Subscription from braintree.subscription_gateway import SubscriptionGateway as SubscriptionGateway from braintree.subscription_search import SubscriptionSearch as SubscriptionSearch diff --git a/stubs/braintree/braintree/error_codes.pyi b/stubs/braintree/braintree/error_codes.pyi index 4f1fffcbc1e4..401746560cb1 100644 --- a/stubs/braintree/braintree/error_codes.pyi +++ b/stubs/braintree/braintree/error_codes.pyi @@ -141,7 +141,6 @@ class ErrorCodes: VerificationMerchantAccountIdIsInvalid: Final = "91728" VerificationMerchantAccountIsForbidden: Final = "91743" VerificationMerchantAccountIsSuspended: Final = "91742" - VerificationMerchantAccountCannotBeSubMerchantAccount: Final = "91755" class Customer: CompanyIsTooLong: Final = "81601" @@ -228,105 +227,13 @@ class ErrorCodes: MerchantAccountExistsForId: Final = "93620" class MerchantAccount: - IdFormatIsInvalid: Final = "82603" - IdIsInUse: Final = "82604" - IdIsNotAllowed: Final = "82605" - IdIsTooLong: Final = "82602" - MasterMerchantAccountIdIsInvalid: Final = "82607" - MasterMerchantAccountIdIsRequired: Final = "82606" - MasterMerchantAccountMustBeActive: Final = "82608" - TosAcceptedIsRequired: Final = "82610" - CannotBeUpdated: Final = "82674" - IdCannotBeUpdated: Final = "82675" - MasterMerchantAccountIdCannotBeUpdated: Final = "82676" - Declined: Final = "82626" - DeclinedMasterCardMatch: Final = "82622" - DeclinedOFAC: Final = "82621" - DeclinedFailedKYC: Final = "82623" - DeclinedSsnInvalid: Final = "82624" - DeclinedSsnMatchesDeceased: Final = "82625" - class ApplicantDetails: - AccountNumberIsRequired: Final = "82614" - CompanyNameIsInvalid: Final = "82631" - CompanyNameIsRequiredWithTaxId: Final = "82633" - DateOfBirthIsRequired: Final = "82612" Declined: Final = "82626" DeclinedMasterCardMatch: Final = "82622" DeclinedOFAC: Final = "82621" DeclinedFailedKYC: Final = "82623" DeclinedSsnInvalid: Final = "82624" DeclinedSsnMatchesDeceased: Final = "82625" - EmailAddressIsInvalid: Final = "82616" - FirstNameIsInvalid: Final = "82627" - FirstNameIsRequired: Final = "82609" - LastNameIsInvalid: Final = "82628" - LastNameIsRequired: Final = "82611" - PhoneIsInvalid: Final = "82636" - RoutingNumberIsInvalid: Final = "82635" - RoutingNumberIsRequired: Final = "82613" - SsnIsInvalid: Final = "82615" - TaxIdIsInvalid: Final = "82632" - TaxIdIsRequiredWithCompanyName: Final = "82634" - DateOfBirthIsInvalid: Final = "82663" - EmailAddressIsRequired: Final = "82665" - AccountNumberIsInvalid: Final = "82670" - TaxIdMustBeBlank: Final = "82673" - - class Address: - LocalityIsRequired: Final = "82618" - PostalCodeIsInvalid: Final = "82630" - PostalCodeIsRequired: Final = "82619" - RegionIsRequired: Final = "82620" - StreetAddressIsInvalid: Final = "82629" - StreetAddressIsRequired: Final = "82617" - RegionIsInvalid: Final = "82664" - - class Individual: - FirstNameIsRequired: Final = "82637" - LastNameIsRequired: Final = "82638" - DateOfBirthIsRequired: Final = "82639" - SsnIsInvalid: Final = "82642" - EmailAddressIsInvalid: Final = "82643" - FirstNameIsInvalid: Final = "82644" - LastNameIsInvalid: Final = "82645" - PhoneIsInvalid: Final = "82656" - DateOfBirthIsInvalid: Final = "82666" - EmailAddressIsRequired: Final = "82667" - - class Address: - StreetAddressIsRequired: Final = "82657" - LocalityIsRequired: Final = "82658" - PostalCodeIsRequired: Final = "82659" - RegionIsRequired: Final = "82660" - StreetAddressIsInvalid: Final = "82661" - PostalCodeIsInvalid: Final = "82662" - RegionIsInvalid: Final = "82668" - - class Business: - DbaNameIsInvalid: Final = "82646" - LegalNameIsInvalid: Final = "82677" - LegalNameIsRequiredWithTaxId: Final = "82669" - TaxIdIsInvalid: Final = "82647" - TaxIdIsRequiredWithLegalName: Final = "82648" - TaxIdMustBeBlank: Final = "82672" - - class Address: - StreetAddressIsInvalid: Final = "82685" - PostalCodeIsInvalid: Final = "82686" - RegionIsInvalid: Final = "82684" - - class Funding: - RoutingNumberIsRequired: Final = "82640" - AccountNumberIsRequired: Final = "82641" - RoutingNumberIsInvalid: Final = "82649" - AccountNumberIsInvalid: Final = "82671" - DestinationIsInvalid: Final = "82679" - DestinationIsRequired: Final = "82678" - EmailAddressIsInvalid: Final = "82681" - EmailAddressIsRequired: Final = "82680" - MobilePhoneIsInvalid: Final = "82683" - MobilePhoneIsRequired: Final = "82682" class OAuth: InvalidGrant: Final = "93801" @@ -365,7 +272,6 @@ class ErrorCodes: MerchantAccountIdIsInvalid: Final = "94204" MerchantAccountIsSuspended: Final = "94205" MerchantAccountIsForbidden: Final = "94206" - MerchantAccountCannotBeSubMerchantAccount: Final = "94208" AccountTypeIsInvalid: Final = "942184" AccountTypeNotSupported: Final = "942185" @@ -553,14 +459,11 @@ class ErrorCodes: CannotCloneTransactionWithVaultCreditCard: Final = "91540" CannotCloneUnsuccessfulTransaction: Final = "91542" CannotCloneVoiceAuthorizations: Final = "91541" - CannotHoldInEscrow: Final = "91560" - CannotPartiallyRefundEscrowedTransaction: Final = "91563" CannotRefundCredit: Final = "91505" CannotRefundSettlingTransaction: Final = "91574" CannotRefundUnlessSettled: Final = "91506" CannotRefundWithPendingMerchantAccount: Final = "91559" CannotRefundWithSuspendedMerchantAccount: Final = "91538" - CannotReleaseFromEscrow: Final = "91561" CannotSimulateTransactionSettlement: Final = "91575" CannotSubmitForPartialSettlement: Final = "915103" CannotSubmitForSettlement: Final = "91507" @@ -643,7 +546,6 @@ class ErrorCodes: ShipsFromPostalCodeInvalidCharacters: Final = "915167" ShipsFromPostalCodeIsInvalid: Final = "915166" ShipsFromPostalCodeIsTooLong: Final = "915165" - SubMerchantAccountRequiresServiceFeeAmount: Final = "91553" SubscriptionDoesNotBelongToCustomer: Final = "91529" SubscriptionIdIsInvalid: Final = "91528" SubscriptionStatusMustBePastDue: Final = "91531" diff --git a/stubs/braintree/braintree/merchant_account/__init__.pyi b/stubs/braintree/braintree/merchant_account/__init__.pyi index 743f6fa816b9..85c7724f4d94 100644 --- a/stubs/braintree/braintree/merchant_account/__init__.pyi +++ b/stubs/braintree/braintree/merchant_account/__init__.pyi @@ -1,4 +1 @@ -from braintree.merchant_account.business_details import BusinessDetails as BusinessDetails -from braintree.merchant_account.funding_details import FundingDetails as FundingDetails -from braintree.merchant_account.individual_details import IndividualDetails as IndividualDetails from braintree.merchant_account.merchant_account import MerchantAccount as MerchantAccount diff --git a/stubs/braintree/braintree/merchant_account/business_details.pyi b/stubs/braintree/braintree/merchant_account/business_details.pyi deleted file mode 100644 index a28a36962cbc..000000000000 --- a/stubs/braintree/braintree/merchant_account/business_details.pyi +++ /dev/null @@ -1,9 +0,0 @@ -from typing import ClassVar - -from braintree.attribute_getter import AttributeGetter -from braintree.merchant_account.address_details import AddressDetails - -class BusinessDetails(AttributeGetter): - detail_list: ClassVar[list[str]] - address_details: AddressDetails - def __init__(self, attributes) -> None: ... diff --git a/stubs/braintree/braintree/merchant_account/funding_details.pyi b/stubs/braintree/braintree/merchant_account/funding_details.pyi deleted file mode 100644 index fb94e7fa06f9..000000000000 --- a/stubs/braintree/braintree/merchant_account/funding_details.pyi +++ /dev/null @@ -1,7 +0,0 @@ -from typing import ClassVar - -from braintree.attribute_getter import AttributeGetter - -class FundingDetails(AttributeGetter): - detail_list: ClassVar[list[str]] - def __init__(self, attributes) -> None: ... diff --git a/stubs/braintree/braintree/merchant_account/individual_details.pyi b/stubs/braintree/braintree/merchant_account/individual_details.pyi deleted file mode 100644 index c96feb7ee95f..000000000000 --- a/stubs/braintree/braintree/merchant_account/individual_details.pyi +++ /dev/null @@ -1,9 +0,0 @@ -from typing import ClassVar - -from braintree.attribute_getter import AttributeGetter -from braintree.merchant_account.address_details import AddressDetails - -class IndividualDetails(AttributeGetter): - detail_list: ClassVar[list[str]] - address_details: AddressDetails - def __init__(self, attributes) -> None: ... diff --git a/stubs/braintree/braintree/merchant_account/merchant_account.pyi b/stubs/braintree/braintree/merchant_account/merchant_account.pyi index cd82f5c81cd9..dac21af07dcb 100644 --- a/stubs/braintree/braintree/merchant_account/merchant_account.pyi +++ b/stubs/braintree/braintree/merchant_account/merchant_account.pyi @@ -1,7 +1,6 @@ from _typeshed import Incomplete from typing import Final -from braintree.merchant_account import BusinessDetails, FundingDetails, IndividualDetails from braintree.resource import Resource class MerchantAccount(Resource): @@ -16,9 +15,6 @@ class MerchantAccount(Resource): MobilePhone: Final = "mobile_phone" FundingDestinations: type[FundingDestination] - individual_details: IndividualDetails - business_details: BusinessDetails - funding_details: FundingDetails master_merchant_account: MerchantAccount def __init__(self, gateway, attributes) -> None: ... @staticmethod diff --git a/stubs/braintree/braintree/merchant_account_gateway.pyi b/stubs/braintree/braintree/merchant_account_gateway.pyi index b467975c03e5..5d426f6b4108 100644 --- a/stubs/braintree/braintree/merchant_account_gateway.pyi +++ b/stubs/braintree/braintree/merchant_account_gateway.pyi @@ -8,10 +8,6 @@ class MerchantAccountGateway: gateway: Incomplete config: Incomplete def __init__(self, gateway) -> None: ... - def create(self, params: dict[str, Incomplete] | None = None) -> SuccessfulResult | ErrorResult | None: ... - def update( - self, merchant_account_id: str, params: dict[str, Incomplete] | None = None - ) -> SuccessfulResult | ErrorResult | None: ... def find(self, merchant_account_id: str) -> MerchantAccount: ... def create_for_currency(self, params: dict[str, Incomplete] | None = None) -> SuccessfulResult | ErrorResult | None: ... def all(self) -> SuccessfulResult: ... diff --git a/stubs/braintree/braintree/payment_facilitator.pyi b/stubs/braintree/braintree/payment_facilitator.pyi new file mode 100644 index 000000000000..905ced231d28 --- /dev/null +++ b/stubs/braintree/braintree/payment_facilitator.pyi @@ -0,0 +1,6 @@ +from braintree.attribute_getter import AttributeGetter +from braintree.sub_merchant import SubMerchant + +class PaymentFacilitator(AttributeGetter): + sub_merchant: SubMerchant + def __init__(self, attributes) -> None: ... diff --git a/stubs/braintree/braintree/sub_merchant.pyi b/stubs/braintree/braintree/sub_merchant.pyi new file mode 100644 index 000000000000..ad790ceae83e --- /dev/null +++ b/stubs/braintree/braintree/sub_merchant.pyi @@ -0,0 +1,4 @@ +from braintree.attribute_getter import AttributeGetter + +class SubMerchant(AttributeGetter): + def __init__(self, attributes) -> None: ... diff --git a/stubs/braintree/braintree/testing_gateway.pyi b/stubs/braintree/braintree/testing_gateway.pyi index 53065d8ef07e..54727fdffc97 100644 --- a/stubs/braintree/braintree/testing_gateway.pyi +++ b/stubs/braintree/braintree/testing_gateway.pyi @@ -5,7 +5,6 @@ class TestingGateway: config: Incomplete def __init__(self, gateway) -> None: ... def make_past_due(self, subscription_id, number_of_days_past_due: int = 1) -> None: ... - def escrow_transaction(self, transaction_id) -> None: ... def settle_transaction(self, transaction_id): ... def settlement_confirm_transaction(self, transaction_id): ... def settlement_decline_transaction(self, transaction_id): ... diff --git a/stubs/braintree/braintree/transaction.pyi b/stubs/braintree/braintree/transaction.pyi index 9fc29be1b5d8..e2dc4045b9d4 100644 --- a/stubs/braintree/braintree/transaction.pyi +++ b/stubs/braintree/braintree/transaction.pyi @@ -22,6 +22,7 @@ from braintree.masterpass_card import MasterpassCard from braintree.meta_checkout_card import MetaCheckoutCard from braintree.meta_checkout_token import MetaCheckoutToken from braintree.package_details import PackageDetails +from braintree.payment_facilitator import PaymentFacilitator from braintree.paypal_account import PayPalAccount from braintree.paypal_here import PayPalHere from braintree.resource import Resource @@ -60,13 +61,6 @@ class Transaction(Resource): ControlPanel: Final = "control_panel" Recurring: Final = "recurring" - class EscrowStatus: - HoldPending: Final = "hold_pending" - Held: Final = "held" - ReleasePending: Final = "release_pending" - Released: Final = "released" - Refunded: Final = "refunded" - class Status: AuthorizationExpired: Final = "authorization_expired" Authorized: Final = "authorized" @@ -105,22 +99,16 @@ class Transaction(Resource): @staticmethod def clone_transaction(transaction_id, params): ... @staticmethod - def cancel_release(transaction_id): ... - @staticmethod def credit(params: Incomplete | None = None): ... @staticmethod def find(transaction_id): ... @staticmethod - def hold_in_escrow(transaction_id): ... - @staticmethod def refund(transaction_id, amount_or_options: Incomplete | None = None): ... @staticmethod def sale(params: Incomplete | None = None): ... @staticmethod def search(*query): ... @staticmethod - def release_from_escrow(transaction_id): ... - @staticmethod def submit_for_settlement(transaction_id, amount: Incomplete | None = None, params: Incomplete | None = None): ... @staticmethod def update_details(transaction_id, params: Incomplete | None = None): ... @@ -185,6 +173,7 @@ class Transaction(Resource): facilitated_details: FacilitatedDetails facilitator_details: FacilitatorDetails network_transaction_id: Incomplete + payment_facilitator: PaymentFacilitator def __init__(self, gateway, attributes) -> None: ... @property def vault_billing_address(self): ... diff --git a/stubs/braintree/braintree/transaction_gateway.pyi b/stubs/braintree/braintree/transaction_gateway.pyi index e1dd995ae576..a0b2cb8cbaf0 100644 --- a/stubs/braintree/braintree/transaction_gateway.pyi +++ b/stubs/braintree/braintree/transaction_gateway.pyi @@ -10,11 +10,9 @@ class TransactionGateway: def create(self, params): ... def credit(self, params): ... def find(self, transaction_id): ... - def hold_in_escrow(self, transaction_id): ... def refund(self, transaction_id, amount_or_options: Incomplete | None = None): ... def sale(self, params): ... def search(self, *query): ... - def release_from_escrow(self, transaction_id): ... def submit_for_settlement(self, transaction_id, amount: Incomplete | None = None, params: Incomplete | None = None): ... def update_details(self, transaction_id, params: Incomplete | None = None): ... def submit_for_partial_settlement(self, transaction_id, amount, params: Incomplete | None = None): ... diff --git a/stubs/braintree/braintree/webhook_notification.pyi b/stubs/braintree/braintree/webhook_notification.pyi index 476818075c5b..085fa5de4b6c 100644 --- a/stubs/braintree/braintree/webhook_notification.pyi +++ b/stubs/braintree/braintree/webhook_notification.pyi @@ -52,8 +52,6 @@ class WebhookNotification(Resource): PaymentMethodRevokedByCustomer: Final = "payment_method_revoked_by_customer" RecipientUpdatedGrantedPaymentMethod: Final = "recipient_updated_granted_payment_method" RefundFailed: Final = "refund_failed" - SubMerchantAccountApproved: Final = "sub_merchant_account_approved" - SubMerchantAccountDeclined: Final = "sub_merchant_account_declined" SubscriptionBillingSkipped: Final = "subscription_billing_skipped" SubscriptionCanceled: Final = "subscription_canceled" SubscriptionChargedSuccessfully: Final = "subscription_charged_successfully" From 864c5352060a6fb256c1ffe10cbe3625fea78805 Mon Sep 17 00:00:00 2001 From: Avasam Date: Mon, 5 May 2025 10:34:44 -0400 Subject: [PATCH 291/388] Run Black after grpcio merge (#13944) --- .../grpcio/grpc_channelz/v1/channelz_pb2.pyi | 163 ++++++++++++++++-- .../v1alpha/reflection_pb2.pyi | 21 ++- 2 files changed, 166 insertions(+), 18 deletions(-) diff --git a/stubs/grpcio/grpc_channelz/v1/channelz_pb2.pyi b/stubs/grpcio/grpc_channelz/v1/channelz_pb2.pyi index b867e0e8c0b8..262fa0f3f89c 100644 --- a/stubs/grpcio/grpc_channelz/v1/channelz_pb2.pyi +++ b/stubs/grpcio/grpc_channelz/v1/channelz_pb2.pyi @@ -7,6 +7,7 @@ from google.protobuf import any_pb2, duration_pb2, message, timestamp_pb2, wrapp from google.protobuf.internal import containers DESCRIPTOR: FileDescriptor + @final class Channel(message.Message, metaclass=MessageMeta): REF_FIELD_NUMBER: ClassVar[int] @@ -19,7 +20,14 @@ class Channel(message.Message, metaclass=MessageMeta): channel_ref: containers.RepeatedCompositeFieldContainer[ChannelRef] subchannel_ref: containers.RepeatedCompositeFieldContainer[SubchannelRef] socket_ref: containers.RepeatedCompositeFieldContainer[SocketRef] - def __init__(self, ref: ChannelRef | Mapping[Incomplete, Incomplete] | None = ..., data: ChannelData | Mapping[Incomplete, Incomplete] | None = ..., channel_ref: Iterable[ChannelRef | Mapping[Incomplete, Incomplete]] | None = ..., subchannel_ref: Iterable[SubchannelRef | Mapping[Incomplete, Incomplete]] | None = ..., socket_ref: Iterable[SocketRef | Mapping[Incomplete, Incomplete]] | None = ...) -> None: ... + def __init__( + self, + ref: ChannelRef | Mapping[Incomplete, Incomplete] | None = ..., + data: ChannelData | Mapping[Incomplete, Incomplete] | None = ..., + channel_ref: Iterable[ChannelRef | Mapping[Incomplete, Incomplete]] | None = ..., + subchannel_ref: Iterable[SubchannelRef | Mapping[Incomplete, Incomplete]] | None = ..., + socket_ref: Iterable[SocketRef | Mapping[Incomplete, Incomplete]] | None = ..., + ) -> None: ... DESCRIPTOR: Descriptor @final @@ -34,7 +42,14 @@ class Subchannel(message.Message, metaclass=MessageMeta): channel_ref: containers.RepeatedCompositeFieldContainer[ChannelRef] subchannel_ref: containers.RepeatedCompositeFieldContainer[SubchannelRef] socket_ref: containers.RepeatedCompositeFieldContainer[SocketRef] - def __init__(self, ref: SubchannelRef | Mapping[Incomplete, Incomplete] | None = ..., data: ChannelData | Mapping[Incomplete, Incomplete] | None = ..., channel_ref: Iterable[ChannelRef | Mapping[Incomplete, Incomplete]] | None = ..., subchannel_ref: Iterable[SubchannelRef | Mapping[Incomplete, Incomplete]] | None = ..., socket_ref: Iterable[SocketRef | Mapping[Incomplete, Incomplete]] | None = ...) -> None: ... + def __init__( + self, + ref: SubchannelRef | Mapping[Incomplete, Incomplete] | None = ..., + data: ChannelData | Mapping[Incomplete, Incomplete] | None = ..., + channel_ref: Iterable[ChannelRef | Mapping[Incomplete, Incomplete]] | None = ..., + subchannel_ref: Iterable[SubchannelRef | Mapping[Incomplete, Incomplete]] | None = ..., + socket_ref: Iterable[SocketRef | Mapping[Incomplete, Incomplete]] | None = ..., + ) -> None: ... DESCRIPTOR: Descriptor @final @@ -67,7 +82,16 @@ class ChannelData(message.Message, metaclass=MessageMeta): calls_succeeded: int calls_failed: int last_call_started_timestamp: timestamp_pb2.Timestamp - def __init__(self, state: ChannelConnectivityState | Mapping[Incomplete, Incomplete] | None = ..., target: str | None = ..., trace: ChannelTrace | Mapping[Incomplete, Incomplete] | None = ..., calls_started: int | None = ..., calls_succeeded: int | None = ..., calls_failed: int | None = ..., last_call_started_timestamp: timestamp_pb2.Timestamp | Mapping[Incomplete, Incomplete] | None = ...) -> None: ... + def __init__( + self, + state: ChannelConnectivityState | Mapping[Incomplete, Incomplete] | None = ..., + target: str | None = ..., + trace: ChannelTrace | Mapping[Incomplete, Incomplete] | None = ..., + calls_started: int | None = ..., + calls_succeeded: int | None = ..., + calls_failed: int | None = ..., + last_call_started_timestamp: timestamp_pb2.Timestamp | Mapping[Incomplete, Incomplete] | None = ..., + ) -> None: ... DESCRIPTOR: Descriptor @final @@ -87,7 +111,14 @@ class ChannelTraceEvent(message.Message, metaclass=MessageMeta): timestamp: timestamp_pb2.Timestamp channel_ref: ChannelRef subchannel_ref: SubchannelRef - def __init__(self, description: str | None = ..., severity: Incomplete | str | None = ..., timestamp: timestamp_pb2.Timestamp | Mapping[Incomplete, Incomplete] | None = ..., channel_ref: ChannelRef | Mapping[Incomplete, Incomplete] | None = ..., subchannel_ref: SubchannelRef | Mapping[Incomplete, Incomplete] | None = ...) -> None: ... + def __init__( + self, + description: str | None = ..., + severity: Incomplete | str | None = ..., + timestamp: timestamp_pb2.Timestamp | Mapping[Incomplete, Incomplete] | None = ..., + channel_ref: ChannelRef | Mapping[Incomplete, Incomplete] | None = ..., + subchannel_ref: SubchannelRef | Mapping[Incomplete, Incomplete] | None = ..., + ) -> None: ... DESCRIPTOR: Descriptor @final @@ -98,7 +129,12 @@ class ChannelTrace(message.Message, metaclass=MessageMeta): num_events_logged: int creation_timestamp: timestamp_pb2.Timestamp events: containers.RepeatedCompositeFieldContainer[ChannelTraceEvent] - def __init__(self, num_events_logged: int | None = ..., creation_timestamp: timestamp_pb2.Timestamp | Mapping[Incomplete, Incomplete] | None = ..., events: Iterable[ChannelTraceEvent | Mapping[Incomplete, Incomplete]] | None = ...) -> None: ... + def __init__( + self, + num_events_logged: int | None = ..., + creation_timestamp: timestamp_pb2.Timestamp | Mapping[Incomplete, Incomplete] | None = ..., + events: Iterable[ChannelTraceEvent | Mapping[Incomplete, Incomplete]] | None = ..., + ) -> None: ... DESCRIPTOR: Descriptor @final @@ -145,7 +181,12 @@ class Server(message.Message, metaclass=MessageMeta): ref: ServerRef data: ServerData listen_socket: containers.RepeatedCompositeFieldContainer[SocketRef] - def __init__(self, ref: ServerRef | Mapping[Incomplete, Incomplete] | None = ..., data: ServerData | Mapping[Incomplete, Incomplete] | None = ..., listen_socket: Iterable[SocketRef | Mapping[Incomplete, Incomplete]] | None = ...) -> None: ... + def __init__( + self, + ref: ServerRef | Mapping[Incomplete, Incomplete] | None = ..., + data: ServerData | Mapping[Incomplete, Incomplete] | None = ..., + listen_socket: Iterable[SocketRef | Mapping[Incomplete, Incomplete]] | None = ..., + ) -> None: ... DESCRIPTOR: Descriptor @final @@ -160,7 +201,14 @@ class ServerData(message.Message, metaclass=MessageMeta): calls_succeeded: int calls_failed: int last_call_started_timestamp: timestamp_pb2.Timestamp - def __init__(self, trace: ChannelTrace | Mapping[Incomplete, Incomplete] | None = ..., calls_started: int | None = ..., calls_succeeded: int | None = ..., calls_failed: int | None = ..., last_call_started_timestamp: timestamp_pb2.Timestamp | Mapping[Incomplete, Incomplete] | None = ...) -> None: ... + def __init__( + self, + trace: ChannelTrace | Mapping[Incomplete, Incomplete] | None = ..., + calls_started: int | None = ..., + calls_succeeded: int | None = ..., + calls_failed: int | None = ..., + last_call_started_timestamp: timestamp_pb2.Timestamp | Mapping[Incomplete, Incomplete] | None = ..., + ) -> None: ... DESCRIPTOR: Descriptor @final @@ -177,7 +225,15 @@ class Socket(message.Message, metaclass=MessageMeta): remote: Address security: Security remote_name: str - def __init__(self, ref: SocketRef | Mapping[Incomplete, Incomplete] | None = ..., data: SocketData | Mapping[Incomplete, Incomplete] | None = ..., local: Address | Mapping[Incomplete, Incomplete] | None = ..., remote: Address | Mapping[Incomplete, Incomplete] | None = ..., security: Security | Mapping[Incomplete, Incomplete] | None = ..., remote_name: str | None = ...) -> None: ... + def __init__( + self, + ref: SocketRef | Mapping[Incomplete, Incomplete] | None = ..., + data: SocketData | Mapping[Incomplete, Incomplete] | None = ..., + local: Address | Mapping[Incomplete, Incomplete] | None = ..., + remote: Address | Mapping[Incomplete, Incomplete] | None = ..., + security: Security | Mapping[Incomplete, Incomplete] | None = ..., + remote_name: str | None = ..., + ) -> None: ... DESCRIPTOR: Descriptor @final @@ -208,7 +264,22 @@ class SocketData(message.Message, metaclass=MessageMeta): local_flow_control_window: wrappers_pb2.Int64Value remote_flow_control_window: wrappers_pb2.Int64Value option: containers.RepeatedCompositeFieldContainer[SocketOption] - def __init__(self, streams_started: int | None = ..., streams_succeeded: int | None = ..., streams_failed: int | None = ..., messages_sent: int | None = ..., messages_received: int | None = ..., keep_alives_sent: int | None = ..., last_local_stream_created_timestamp: timestamp_pb2.Timestamp | Mapping[Incomplete, Incomplete] | None = ..., last_remote_stream_created_timestamp: timestamp_pb2.Timestamp | Mapping[Incomplete, Incomplete] | None = ..., last_message_sent_timestamp: timestamp_pb2.Timestamp | Mapping[Incomplete, Incomplete] | None = ..., last_message_received_timestamp: timestamp_pb2.Timestamp | Mapping[Incomplete, Incomplete] | None = ..., local_flow_control_window: wrappers_pb2.Int64Value | Mapping[Incomplete, Incomplete] | None = ..., remote_flow_control_window: wrappers_pb2.Int64Value | Mapping[Incomplete, Incomplete] | None = ..., option: Iterable[SocketOption | Mapping[Incomplete, Incomplete]] | None = ...) -> None: ... + def __init__( + self, + streams_started: int | None = ..., + streams_succeeded: int | None = ..., + streams_failed: int | None = ..., + messages_sent: int | None = ..., + messages_received: int | None = ..., + keep_alives_sent: int | None = ..., + last_local_stream_created_timestamp: timestamp_pb2.Timestamp | Mapping[Incomplete, Incomplete] | None = ..., + last_remote_stream_created_timestamp: timestamp_pb2.Timestamp | Mapping[Incomplete, Incomplete] | None = ..., + last_message_sent_timestamp: timestamp_pb2.Timestamp | Mapping[Incomplete, Incomplete] | None = ..., + last_message_received_timestamp: timestamp_pb2.Timestamp | Mapping[Incomplete, Incomplete] | None = ..., + local_flow_control_window: wrappers_pb2.Int64Value | Mapping[Incomplete, Incomplete] | None = ..., + remote_flow_control_window: wrappers_pb2.Int64Value | Mapping[Incomplete, Incomplete] | None = ..., + option: Iterable[SocketOption | Mapping[Incomplete, Incomplete]] | None = ..., + ) -> None: ... DESCRIPTOR: Descriptor @final @@ -220,11 +291,13 @@ class Address(message.Message, metaclass=MessageMeta): ip_address: bytes port: int def __init__(self, ip_address: bytes | None = ..., port: int | None = ...) -> None: ... + @final class UdsAddress(message.Message, metaclass=MessageMeta): FILENAME_FIELD_NUMBER: ClassVar[int] filename: str def __init__(self, filename: str | None = ...) -> None: ... + @final class OtherAddress(message.Message, metaclass=MessageMeta): NAME_FIELD_NUMBER: ClassVar[int] @@ -232,13 +305,19 @@ class Address(message.Message, metaclass=MessageMeta): name: str value: any_pb2.Any def __init__(self, name: str | None = ..., value: any_pb2.Any | Mapping[Incomplete, Incomplete] | None = ...) -> None: ... + TCPIP_ADDRESS_FIELD_NUMBER: ClassVar[int] UDS_ADDRESS_FIELD_NUMBER: ClassVar[int] OTHER_ADDRESS_FIELD_NUMBER: ClassVar[int] tcpip_address: Address.TcpIpAddress uds_address: Address.UdsAddress other_address: Address.OtherAddress - def __init__(self, tcpip_address: Address.TcpIpAddress | Mapping[Incomplete, Incomplete] | None = ..., uds_address: Address.UdsAddress | Mapping[Incomplete, Incomplete] | None = ..., other_address: Address.OtherAddress | Mapping[Incomplete, Incomplete] | None = ...) -> None: ... + def __init__( + self, + tcpip_address: Address.TcpIpAddress | Mapping[Incomplete, Incomplete] | None = ..., + uds_address: Address.UdsAddress | Mapping[Incomplete, Incomplete] | None = ..., + other_address: Address.OtherAddress | Mapping[Incomplete, Incomplete] | None = ..., + ) -> None: ... DESCRIPTOR: Descriptor @final @@ -253,7 +332,14 @@ class Security(message.Message, metaclass=MessageMeta): other_name: str local_certificate: bytes remote_certificate: bytes - def __init__(self, standard_name: str | None = ..., other_name: str | None = ..., local_certificate: bytes | None = ..., remote_certificate: bytes | None = ...) -> None: ... + def __init__( + self, + standard_name: str | None = ..., + other_name: str | None = ..., + local_certificate: bytes | None = ..., + remote_certificate: bytes | None = ..., + ) -> None: ... + @final class OtherSecurity(message.Message, metaclass=MessageMeta): NAME_FIELD_NUMBER: ClassVar[int] @@ -261,11 +347,16 @@ class Security(message.Message, metaclass=MessageMeta): name: str value: any_pb2.Any def __init__(self, name: str | None = ..., value: any_pb2.Any | Mapping[Incomplete, Incomplete] | None = ...) -> None: ... + TLS_FIELD_NUMBER: ClassVar[int] OTHER_FIELD_NUMBER: ClassVar[int] tls: Security.Tls other: Security.OtherSecurity - def __init__(self, tls: Security.Tls | Mapping[Incomplete, Incomplete] | None = ..., other: Security.OtherSecurity | Mapping[Incomplete, Incomplete] | None = ...) -> None: ... + def __init__( + self, + tls: Security.Tls | Mapping[Incomplete, Incomplete] | None = ..., + other: Security.OtherSecurity | Mapping[Incomplete, Incomplete] | None = ..., + ) -> None: ... DESCRIPTOR: Descriptor @final @@ -276,7 +367,12 @@ class SocketOption(message.Message, metaclass=MessageMeta): name: str value: str additional: any_pb2.Any - def __init__(self, name: str | None = ..., value: str | None = ..., additional: any_pb2.Any | Mapping[Incomplete, Incomplete] | None = ...) -> None: ... + def __init__( + self, + name: str | None = ..., + value: str | None = ..., + additional: any_pb2.Any | Mapping[Incomplete, Incomplete] | None = ..., + ) -> None: ... DESCRIPTOR: Descriptor @final @@ -292,7 +388,9 @@ class SocketOptionLinger(message.Message, metaclass=MessageMeta): DURATION_FIELD_NUMBER: ClassVar[int] active: bool duration: duration_pb2.Duration - def __init__(self, active: bool = ..., duration: duration_pb2.Duration | Mapping[Incomplete, Incomplete] | None = ...) -> None: ... + def __init__( + self, active: bool = ..., duration: duration_pb2.Duration | Mapping[Incomplete, Incomplete] | None = ... + ) -> None: ... DESCRIPTOR: Descriptor @final @@ -355,7 +453,38 @@ class SocketOptionTcpInfo(message.Message, metaclass=MessageMeta): tcpi_snd_cwnd: int tcpi_advmss: int tcpi_reordering: int - def __init__(self, tcpi_state: int | None = ..., tcpi_ca_state: int | None = ..., tcpi_retransmits: int | None = ..., tcpi_probes: int | None = ..., tcpi_backoff: int | None = ..., tcpi_options: int | None = ..., tcpi_snd_wscale: int | None = ..., tcpi_rcv_wscale: int | None = ..., tcpi_rto: int | None = ..., tcpi_ato: int | None = ..., tcpi_snd_mss: int | None = ..., tcpi_rcv_mss: int | None = ..., tcpi_unacked: int | None = ..., tcpi_sacked: int | None = ..., tcpi_lost: int | None = ..., tcpi_retrans: int | None = ..., tcpi_fackets: int | None = ..., tcpi_last_data_sent: int | None = ..., tcpi_last_ack_sent: int | None = ..., tcpi_last_data_recv: int | None = ..., tcpi_last_ack_recv: int | None = ..., tcpi_pmtu: int | None = ..., tcpi_rcv_ssthresh: int | None = ..., tcpi_rtt: int | None = ..., tcpi_rttvar: int | None = ..., tcpi_snd_ssthresh: int | None = ..., tcpi_snd_cwnd: int | None = ..., tcpi_advmss: int | None = ..., tcpi_reordering: int | None = ...) -> None: ... + def __init__( + self, + tcpi_state: int | None = ..., + tcpi_ca_state: int | None = ..., + tcpi_retransmits: int | None = ..., + tcpi_probes: int | None = ..., + tcpi_backoff: int | None = ..., + tcpi_options: int | None = ..., + tcpi_snd_wscale: int | None = ..., + tcpi_rcv_wscale: int | None = ..., + tcpi_rto: int | None = ..., + tcpi_ato: int | None = ..., + tcpi_snd_mss: int | None = ..., + tcpi_rcv_mss: int | None = ..., + tcpi_unacked: int | None = ..., + tcpi_sacked: int | None = ..., + tcpi_lost: int | None = ..., + tcpi_retrans: int | None = ..., + tcpi_fackets: int | None = ..., + tcpi_last_data_sent: int | None = ..., + tcpi_last_ack_sent: int | None = ..., + tcpi_last_data_recv: int | None = ..., + tcpi_last_ack_recv: int | None = ..., + tcpi_pmtu: int | None = ..., + tcpi_rcv_ssthresh: int | None = ..., + tcpi_rtt: int | None = ..., + tcpi_rttvar: int | None = ..., + tcpi_snd_ssthresh: int | None = ..., + tcpi_snd_cwnd: int | None = ..., + tcpi_advmss: int | None = ..., + tcpi_reordering: int | None = ..., + ) -> None: ... DESCRIPTOR: Descriptor @final @@ -425,7 +554,9 @@ class GetServerSocketsResponse(message.Message, metaclass=MessageMeta): END_FIELD_NUMBER: ClassVar[int] socket_ref: containers.RepeatedCompositeFieldContainer[SocketRef] end: bool - def __init__(self, socket_ref: Iterable[SocketRef | Mapping[Incomplete, Incomplete]] | None = ..., end: bool = ...) -> None: ... + def __init__( + self, socket_ref: Iterable[SocketRef | Mapping[Incomplete, Incomplete]] | None = ..., end: bool = ... + ) -> None: ... DESCRIPTOR: Descriptor @final diff --git a/stubs/grpcio/grpc_reflection/v1alpha/reflection_pb2.pyi b/stubs/grpcio/grpc_reflection/v1alpha/reflection_pb2.pyi index 35eff35cdb30..94f335771385 100644 --- a/stubs/grpcio/grpc_reflection/v1alpha/reflection_pb2.pyi +++ b/stubs/grpcio/grpc_reflection/v1alpha/reflection_pb2.pyi @@ -22,8 +22,17 @@ class ServerReflectionRequest(message.Message, metaclass=MessageMeta): file_containing_extension: ExtensionRequest all_extension_numbers_of_type: str list_services: str - def __init__(self, host: str | None = ..., file_by_filename: str | None = ..., file_containing_symbol: str | None = ..., file_containing_extension: ExtensionRequest | Mapping[Incomplete, Incomplete] | None = ..., all_extension_numbers_of_type: str | None = ..., list_services: str | None = ...) -> None: ... + def __init__( + self, + host: str | None = ..., + file_by_filename: str | None = ..., + file_containing_symbol: str | None = ..., + file_containing_extension: ExtensionRequest | Mapping[Incomplete, Incomplete] | None = ..., + all_extension_numbers_of_type: str | None = ..., + list_services: str | None = ..., + ) -> None: ... DESCRIPTOR: Descriptor + @final class ExtensionRequest(message.Message, metaclass=MessageMeta): CONTAINING_TYPE_FIELD_NUMBER: ClassVar[int] @@ -47,7 +56,15 @@ class ServerReflectionResponse(message.Message, metaclass=MessageMeta): all_extension_numbers_response: ExtensionNumberResponse list_services_response: ListServiceResponse error_response: ErrorResponse - def __init__(self, valid_host: str | None = ..., original_request: ServerReflectionRequest | Mapping[Incomplete, Incomplete] | None = ..., file_descriptor_response: FileDescriptorResponse | Mapping[Incomplete, Incomplete] | None = ..., all_extension_numbers_response: ExtensionNumberResponse | Mapping[Incomplete, Incomplete] | None = ..., list_services_response: ListServiceResponse | Mapping[Incomplete, Incomplete] | None = ..., error_response: ErrorResponse | Mapping[Incomplete, Incomplete] | None = ...) -> None: ... + def __init__( + self, + valid_host: str | None = ..., + original_request: ServerReflectionRequest | Mapping[Incomplete, Incomplete] | None = ..., + file_descriptor_response: FileDescriptorResponse | Mapping[Incomplete, Incomplete] | None = ..., + all_extension_numbers_response: ExtensionNumberResponse | Mapping[Incomplete, Incomplete] | None = ..., + list_services_response: ListServiceResponse | Mapping[Incomplete, Incomplete] | None = ..., + error_response: ErrorResponse | Mapping[Incomplete, Incomplete] | None = ..., + ) -> None: ... DESCRIPTOR: Descriptor @final From e0de065c13db6c44307e4939ad4702803ccb0d6e Mon Sep 17 00:00:00 2001 From: Avasam Date: Mon, 5 May 2025 10:48:46 -0400 Subject: [PATCH 292/388] Extract grpcio plugins (#13896) --- pyrightconfig.stricter.json | 8 ++++---- stubs/grpcio-channelz/METADATA.toml | 3 +++ .../grpc_channelz/__init__.pyi | 0 .../grpc_channelz/v1/__init__.pyi | 0 .../grpc_channelz/v1/_async.pyi | 0 .../grpc_channelz/v1/_servicer.pyi | 0 .../grpc_channelz/v1/channelz.pyi | 0 .../grpc_channelz/v1/channelz_pb2.pyi | 0 .../grpc_channelz/v1/channelz_pb2_grpc.pyi | 0 stubs/grpcio-health-checking/METADATA.toml | 3 +++ .../grpc_health/__init__.pyi | 0 .../grpc_health/v1/__init__.pyi | 0 .../grpc_health/v1/health.pyi | 0 .../grpc_health/v1/health_pb2.pyi | 0 .../grpc_health/v1/health_pb2_grpc.pyi | 0 .../@tests/test_cases/check_reflection.py | 0 .../@tests/test_cases/check_reflection_aio.py | 0 stubs/grpcio-reflection/METADATA.toml | 3 +++ .../grpc_reflection/__init__.pyi | 0 .../grpc_reflection/v1alpha/__init__.pyi | 0 .../grpc_reflection/v1alpha/_async.pyi | 0 .../grpc_reflection/v1alpha/_base.pyi | 0 .../v1alpha/proto_reflection_descriptor_database.pyi | 0 .../grpc_reflection/v1alpha/reflection.pyi | 0 .../grpc_reflection/v1alpha/reflection_pb2.pyi | 0 .../grpc_reflection/v1alpha/reflection_pb2_grpc.pyi | 0 stubs/grpcio-status/METADATA.toml | 3 +++ stubs/{grpcio => grpcio-status}/grpc_status/__init__.pyi | 0 stubs/{grpcio => grpcio-status}/grpc_status/_async.pyi | 0 .../{grpcio => grpcio-status}/grpc_status/rpc_status.pyi | 0 stubs/grpcio/METADATA.toml | 9 --------- 31 files changed, 16 insertions(+), 13 deletions(-) create mode 100644 stubs/grpcio-channelz/METADATA.toml rename stubs/{grpcio => grpcio-channelz}/grpc_channelz/__init__.pyi (100%) rename stubs/{grpcio => grpcio-channelz}/grpc_channelz/v1/__init__.pyi (100%) rename stubs/{grpcio => grpcio-channelz}/grpc_channelz/v1/_async.pyi (100%) rename stubs/{grpcio => grpcio-channelz}/grpc_channelz/v1/_servicer.pyi (100%) rename stubs/{grpcio => grpcio-channelz}/grpc_channelz/v1/channelz.pyi (100%) rename stubs/{grpcio => grpcio-channelz}/grpc_channelz/v1/channelz_pb2.pyi (100%) rename stubs/{grpcio => grpcio-channelz}/grpc_channelz/v1/channelz_pb2_grpc.pyi (100%) create mode 100644 stubs/grpcio-health-checking/METADATA.toml rename stubs/{grpcio => grpcio-health-checking}/grpc_health/__init__.pyi (100%) rename stubs/{grpcio => grpcio-health-checking}/grpc_health/v1/__init__.pyi (100%) rename stubs/{grpcio => grpcio-health-checking}/grpc_health/v1/health.pyi (100%) rename stubs/{grpcio => grpcio-health-checking}/grpc_health/v1/health_pb2.pyi (100%) rename stubs/{grpcio => grpcio-health-checking}/grpc_health/v1/health_pb2_grpc.pyi (100%) rename stubs/{grpcio => grpcio-reflection}/@tests/test_cases/check_reflection.py (100%) rename stubs/{grpcio => grpcio-reflection}/@tests/test_cases/check_reflection_aio.py (100%) create mode 100644 stubs/grpcio-reflection/METADATA.toml rename stubs/{grpcio => grpcio-reflection}/grpc_reflection/__init__.pyi (100%) rename stubs/{grpcio => grpcio-reflection}/grpc_reflection/v1alpha/__init__.pyi (100%) rename stubs/{grpcio => grpcio-reflection}/grpc_reflection/v1alpha/_async.pyi (100%) rename stubs/{grpcio => grpcio-reflection}/grpc_reflection/v1alpha/_base.pyi (100%) rename stubs/{grpcio => grpcio-reflection}/grpc_reflection/v1alpha/proto_reflection_descriptor_database.pyi (100%) rename stubs/{grpcio => grpcio-reflection}/grpc_reflection/v1alpha/reflection.pyi (100%) rename stubs/{grpcio => grpcio-reflection}/grpc_reflection/v1alpha/reflection_pb2.pyi (100%) rename stubs/{grpcio => grpcio-reflection}/grpc_reflection/v1alpha/reflection_pb2_grpc.pyi (100%) create mode 100644 stubs/grpcio-status/METADATA.toml rename stubs/{grpcio => grpcio-status}/grpc_status/__init__.pyi (100%) rename stubs/{grpcio => grpcio-status}/grpc_status/_async.pyi (100%) rename stubs/{grpcio => grpcio-status}/grpc_status/rpc_status.pyi (100%) diff --git a/pyrightconfig.stricter.json b/pyrightconfig.stricter.json index 08ff5a63e091..74196972b4ee 100644 --- a/pyrightconfig.stricter.json +++ b/pyrightconfig.stricter.json @@ -44,11 +44,11 @@ "stubs/gdb", "stubs/geopandas", "stubs/google-cloud-ndb", + "stubs/grpcio-channelz/grpc_channelz/v1", + "stubs/grpcio-health-checking/grpc_health/v1/health_pb2_grpc.pyi", + "stubs/grpcio-reflection/grpc_reflection/v1alpha", + "stubs/grpcio-status/grpc_status", "stubs/grpcio/grpc/__init__.pyi", - "stubs/grpcio/grpc_channelz/v1", - "stubs/grpcio/grpc_health/v1/health_pb2_grpc.pyi", - "stubs/grpcio/grpc_reflection/v1alpha", - "stubs/grpcio/grpc_status", "stubs/hdbcli/hdbcli/dbapi.pyi", "stubs/html5lib", "stubs/httplib2", diff --git a/stubs/grpcio-channelz/METADATA.toml b/stubs/grpcio-channelz/METADATA.toml new file mode 100644 index 000000000000..bac12ed6b8d8 --- /dev/null +++ b/stubs/grpcio-channelz/METADATA.toml @@ -0,0 +1,3 @@ +version = "1.*" +upstream_repository = "https://github.com/grpc/grpc" +requires = ["types-grpcio", "types-protobuf"] diff --git a/stubs/grpcio/grpc_channelz/__init__.pyi b/stubs/grpcio-channelz/grpc_channelz/__init__.pyi similarity index 100% rename from stubs/grpcio/grpc_channelz/__init__.pyi rename to stubs/grpcio-channelz/grpc_channelz/__init__.pyi diff --git a/stubs/grpcio/grpc_channelz/v1/__init__.pyi b/stubs/grpcio-channelz/grpc_channelz/v1/__init__.pyi similarity index 100% rename from stubs/grpcio/grpc_channelz/v1/__init__.pyi rename to stubs/grpcio-channelz/grpc_channelz/v1/__init__.pyi diff --git a/stubs/grpcio/grpc_channelz/v1/_async.pyi b/stubs/grpcio-channelz/grpc_channelz/v1/_async.pyi similarity index 100% rename from stubs/grpcio/grpc_channelz/v1/_async.pyi rename to stubs/grpcio-channelz/grpc_channelz/v1/_async.pyi diff --git a/stubs/grpcio/grpc_channelz/v1/_servicer.pyi b/stubs/grpcio-channelz/grpc_channelz/v1/_servicer.pyi similarity index 100% rename from stubs/grpcio/grpc_channelz/v1/_servicer.pyi rename to stubs/grpcio-channelz/grpc_channelz/v1/_servicer.pyi diff --git a/stubs/grpcio/grpc_channelz/v1/channelz.pyi b/stubs/grpcio-channelz/grpc_channelz/v1/channelz.pyi similarity index 100% rename from stubs/grpcio/grpc_channelz/v1/channelz.pyi rename to stubs/grpcio-channelz/grpc_channelz/v1/channelz.pyi diff --git a/stubs/grpcio/grpc_channelz/v1/channelz_pb2.pyi b/stubs/grpcio-channelz/grpc_channelz/v1/channelz_pb2.pyi similarity index 100% rename from stubs/grpcio/grpc_channelz/v1/channelz_pb2.pyi rename to stubs/grpcio-channelz/grpc_channelz/v1/channelz_pb2.pyi diff --git a/stubs/grpcio/grpc_channelz/v1/channelz_pb2_grpc.pyi b/stubs/grpcio-channelz/grpc_channelz/v1/channelz_pb2_grpc.pyi similarity index 100% rename from stubs/grpcio/grpc_channelz/v1/channelz_pb2_grpc.pyi rename to stubs/grpcio-channelz/grpc_channelz/v1/channelz_pb2_grpc.pyi diff --git a/stubs/grpcio-health-checking/METADATA.toml b/stubs/grpcio-health-checking/METADATA.toml new file mode 100644 index 000000000000..bac12ed6b8d8 --- /dev/null +++ b/stubs/grpcio-health-checking/METADATA.toml @@ -0,0 +1,3 @@ +version = "1.*" +upstream_repository = "https://github.com/grpc/grpc" +requires = ["types-grpcio", "types-protobuf"] diff --git a/stubs/grpcio/grpc_health/__init__.pyi b/stubs/grpcio-health-checking/grpc_health/__init__.pyi similarity index 100% rename from stubs/grpcio/grpc_health/__init__.pyi rename to stubs/grpcio-health-checking/grpc_health/__init__.pyi diff --git a/stubs/grpcio/grpc_health/v1/__init__.pyi b/stubs/grpcio-health-checking/grpc_health/v1/__init__.pyi similarity index 100% rename from stubs/grpcio/grpc_health/v1/__init__.pyi rename to stubs/grpcio-health-checking/grpc_health/v1/__init__.pyi diff --git a/stubs/grpcio/grpc_health/v1/health.pyi b/stubs/grpcio-health-checking/grpc_health/v1/health.pyi similarity index 100% rename from stubs/grpcio/grpc_health/v1/health.pyi rename to stubs/grpcio-health-checking/grpc_health/v1/health.pyi diff --git a/stubs/grpcio/grpc_health/v1/health_pb2.pyi b/stubs/grpcio-health-checking/grpc_health/v1/health_pb2.pyi similarity index 100% rename from stubs/grpcio/grpc_health/v1/health_pb2.pyi rename to stubs/grpcio-health-checking/grpc_health/v1/health_pb2.pyi diff --git a/stubs/grpcio/grpc_health/v1/health_pb2_grpc.pyi b/stubs/grpcio-health-checking/grpc_health/v1/health_pb2_grpc.pyi similarity index 100% rename from stubs/grpcio/grpc_health/v1/health_pb2_grpc.pyi rename to stubs/grpcio-health-checking/grpc_health/v1/health_pb2_grpc.pyi diff --git a/stubs/grpcio/@tests/test_cases/check_reflection.py b/stubs/grpcio-reflection/@tests/test_cases/check_reflection.py similarity index 100% rename from stubs/grpcio/@tests/test_cases/check_reflection.py rename to stubs/grpcio-reflection/@tests/test_cases/check_reflection.py diff --git a/stubs/grpcio/@tests/test_cases/check_reflection_aio.py b/stubs/grpcio-reflection/@tests/test_cases/check_reflection_aio.py similarity index 100% rename from stubs/grpcio/@tests/test_cases/check_reflection_aio.py rename to stubs/grpcio-reflection/@tests/test_cases/check_reflection_aio.py diff --git a/stubs/grpcio-reflection/METADATA.toml b/stubs/grpcio-reflection/METADATA.toml new file mode 100644 index 000000000000..bac12ed6b8d8 --- /dev/null +++ b/stubs/grpcio-reflection/METADATA.toml @@ -0,0 +1,3 @@ +version = "1.*" +upstream_repository = "https://github.com/grpc/grpc" +requires = ["types-grpcio", "types-protobuf"] diff --git a/stubs/grpcio/grpc_reflection/__init__.pyi b/stubs/grpcio-reflection/grpc_reflection/__init__.pyi similarity index 100% rename from stubs/grpcio/grpc_reflection/__init__.pyi rename to stubs/grpcio-reflection/grpc_reflection/__init__.pyi diff --git a/stubs/grpcio/grpc_reflection/v1alpha/__init__.pyi b/stubs/grpcio-reflection/grpc_reflection/v1alpha/__init__.pyi similarity index 100% rename from stubs/grpcio/grpc_reflection/v1alpha/__init__.pyi rename to stubs/grpcio-reflection/grpc_reflection/v1alpha/__init__.pyi diff --git a/stubs/grpcio/grpc_reflection/v1alpha/_async.pyi b/stubs/grpcio-reflection/grpc_reflection/v1alpha/_async.pyi similarity index 100% rename from stubs/grpcio/grpc_reflection/v1alpha/_async.pyi rename to stubs/grpcio-reflection/grpc_reflection/v1alpha/_async.pyi diff --git a/stubs/grpcio/grpc_reflection/v1alpha/_base.pyi b/stubs/grpcio-reflection/grpc_reflection/v1alpha/_base.pyi similarity index 100% rename from stubs/grpcio/grpc_reflection/v1alpha/_base.pyi rename to stubs/grpcio-reflection/grpc_reflection/v1alpha/_base.pyi diff --git a/stubs/grpcio/grpc_reflection/v1alpha/proto_reflection_descriptor_database.pyi b/stubs/grpcio-reflection/grpc_reflection/v1alpha/proto_reflection_descriptor_database.pyi similarity index 100% rename from stubs/grpcio/grpc_reflection/v1alpha/proto_reflection_descriptor_database.pyi rename to stubs/grpcio-reflection/grpc_reflection/v1alpha/proto_reflection_descriptor_database.pyi diff --git a/stubs/grpcio/grpc_reflection/v1alpha/reflection.pyi b/stubs/grpcio-reflection/grpc_reflection/v1alpha/reflection.pyi similarity index 100% rename from stubs/grpcio/grpc_reflection/v1alpha/reflection.pyi rename to stubs/grpcio-reflection/grpc_reflection/v1alpha/reflection.pyi diff --git a/stubs/grpcio/grpc_reflection/v1alpha/reflection_pb2.pyi b/stubs/grpcio-reflection/grpc_reflection/v1alpha/reflection_pb2.pyi similarity index 100% rename from stubs/grpcio/grpc_reflection/v1alpha/reflection_pb2.pyi rename to stubs/grpcio-reflection/grpc_reflection/v1alpha/reflection_pb2.pyi diff --git a/stubs/grpcio/grpc_reflection/v1alpha/reflection_pb2_grpc.pyi b/stubs/grpcio-reflection/grpc_reflection/v1alpha/reflection_pb2_grpc.pyi similarity index 100% rename from stubs/grpcio/grpc_reflection/v1alpha/reflection_pb2_grpc.pyi rename to stubs/grpcio-reflection/grpc_reflection/v1alpha/reflection_pb2_grpc.pyi diff --git a/stubs/grpcio-status/METADATA.toml b/stubs/grpcio-status/METADATA.toml new file mode 100644 index 000000000000..f70c0410a6a0 --- /dev/null +++ b/stubs/grpcio-status/METADATA.toml @@ -0,0 +1,3 @@ +version = "1.*" +upstream_repository = "https://github.com/grpc/grpc" +requires = ["types-grpcio"] diff --git a/stubs/grpcio/grpc_status/__init__.pyi b/stubs/grpcio-status/grpc_status/__init__.pyi similarity index 100% rename from stubs/grpcio/grpc_status/__init__.pyi rename to stubs/grpcio-status/grpc_status/__init__.pyi diff --git a/stubs/grpcio/grpc_status/_async.pyi b/stubs/grpcio-status/grpc_status/_async.pyi similarity index 100% rename from stubs/grpcio/grpc_status/_async.pyi rename to stubs/grpcio-status/grpc_status/_async.pyi diff --git a/stubs/grpcio/grpc_status/rpc_status.pyi b/stubs/grpcio-status/grpc_status/rpc_status.pyi similarity index 100% rename from stubs/grpcio/grpc_status/rpc_status.pyi rename to stubs/grpcio-status/grpc_status/rpc_status.pyi diff --git a/stubs/grpcio/METADATA.toml b/stubs/grpcio/METADATA.toml index 9c94f65fb2ef..b81a42967303 100644 --- a/stubs/grpcio/METADATA.toml +++ b/stubs/grpcio/METADATA.toml @@ -1,15 +1,6 @@ version = "1.*" upstream_repository = "https://github.com/grpc/grpc" partial_stub = true -requires = [ - "types-protobuf", -] [tool.stubtest] ignore_missing_stub = true -stubtest_requirements = [ - "grpcio-channelz", - "grpcio-health-checking", - "grpcio-reflection", - "grpcio-status", -] From 3ba8907fda3200be3b20975688b21d4728d7611e Mon Sep 17 00:00:00 2001 From: Sebastian Rittau Date: Mon, 5 May 2025 17:59:51 +0200 Subject: [PATCH 293/388] Clean up and fix email message types (#13532) * Unify the `_MessageT` type var in `email._policybase`. * Use explicit type arguments for `Message` type in `_MessageT` type var. In particular, change bound from `Message[str, str]` to `Message[Any, Any]`. * Change `__init__()` overloads of `Parser` and `BytesParser` to accept `Message` objects that are not `Message[str, str]` if `_class` is not also given. --- stdlib/@tests/test_cases/email/check_parser.py | 16 ++++++++++++++++ stdlib/email/__init__.pyi | 3 ++- stdlib/email/_policybase.pyi | 6 +++--- stdlib/email/feedparser.pyi | 5 ++--- stdlib/email/generator.pyi | 2 +- stdlib/email/mime/message.pyi | 3 ++- stdlib/email/mime/multipart.pyi | 3 ++- stdlib/email/parser.pyi | 17 +++++++++-------- stdlib/email/policy.pyi | 8 +++----- stdlib/http/client.pyi | 2 +- stdlib/mailbox.pyi | 2 +- 11 files changed, 42 insertions(+), 25 deletions(-) create mode 100644 stdlib/@tests/test_cases/email/check_parser.py diff --git a/stdlib/@tests/test_cases/email/check_parser.py b/stdlib/@tests/test_cases/email/check_parser.py new file mode 100644 index 000000000000..fd5c24a9f6de --- /dev/null +++ b/stdlib/@tests/test_cases/email/check_parser.py @@ -0,0 +1,16 @@ +import email.policy +from email.message import EmailMessage, Message +from email.parser import BytesParser, Parser +from typing_extensions import assert_type + +p1 = Parser() +p2 = Parser(policy=email.policy.default) + +assert_type(p1, Parser[Message[str, str]]) +assert_type(p2, Parser[EmailMessage]) + +bp1 = BytesParser() +bp2 = BytesParser(policy=email.policy.default) + +assert_type(bp1, BytesParser[Message[str, str]]) +assert_type(bp2, BytesParser[EmailMessage]) diff --git a/stdlib/email/__init__.pyi b/stdlib/email/__init__.pyi index 628ffb2b793a..53f8c350b01e 100644 --- a/stdlib/email/__init__.pyi +++ b/stdlib/email/__init__.pyi @@ -1,6 +1,7 @@ from collections.abc import Callable +from email._policybase import _MessageT from email.message import Message -from email.policy import Policy, _MessageT +from email.policy import Policy from typing import IO, overload from typing_extensions import TypeAlias diff --git a/stdlib/email/_policybase.pyi b/stdlib/email/_policybase.pyi index 5266609e597f..b345c84a95cb 100644 --- a/stdlib/email/_policybase.pyi +++ b/stdlib/email/_policybase.pyi @@ -2,12 +2,12 @@ from abc import ABCMeta, abstractmethod from email.errors import MessageDefect from email.header import Header from email.message import Message -from typing import Generic, Protocol, TypeVar, type_check_only +from typing import Any, Generic, Protocol, TypeVar, type_check_only from typing_extensions import Self __all__ = ["Policy", "Compat32", "compat32"] -_MessageT = TypeVar("_MessageT", bound=Message, default=Message) +_MessageT = TypeVar("_MessageT", bound=Message[Any, Any], default=Message[str, str]) @type_check_only class _MessageFactory(Protocol[_MessageT]): @@ -74,4 +74,4 @@ class Compat32(Policy[_MessageT]): def fold(self, name: str, value: str) -> str: ... def fold_binary(self, name: str, value: str) -> bytes: ... -compat32: Compat32[Message] +compat32: Compat32[Message[str, str]] diff --git a/stdlib/email/feedparser.pyi b/stdlib/email/feedparser.pyi index 8c268ca1ae18..d9279e9cd996 100644 --- a/stdlib/email/feedparser.pyi +++ b/stdlib/email/feedparser.pyi @@ -1,12 +1,11 @@ from collections.abc import Callable +from email._policybase import _MessageT from email.message import Message from email.policy import Policy -from typing import Generic, TypeVar, overload +from typing import Generic, overload __all__ = ["FeedParser", "BytesFeedParser"] -_MessageT = TypeVar("_MessageT", bound=Message, default=Message) - class FeedParser(Generic[_MessageT]): @overload def __init__(self: FeedParser[Message], _factory: None = None, *, policy: Policy[Message] = ...) -> None: ... diff --git a/stdlib/email/generator.pyi b/stdlib/email/generator.pyi index dfa0604a20a9..d30e686299fa 100644 --- a/stdlib/email/generator.pyi +++ b/stdlib/email/generator.pyi @@ -7,7 +7,7 @@ from typing_extensions import Self __all__ = ["Generator", "DecodedGenerator", "BytesGenerator"] # By default, generators do not have a message policy. -_MessageT = TypeVar("_MessageT", bound=Message, default=Any) +_MessageT = TypeVar("_MessageT", bound=Message[Any, Any], default=Any) class Generator(Generic[_MessageT]): maxheaderlen: int | None diff --git a/stdlib/email/mime/message.pyi b/stdlib/email/mime/message.pyi index 2a5f46296150..a1e370e2eab5 100644 --- a/stdlib/email/mime/message.pyi +++ b/stdlib/email/mime/message.pyi @@ -1,5 +1,6 @@ +from email._policybase import _MessageT from email.mime.nonmultipart import MIMENonMultipart -from email.policy import Policy, _MessageT +from email.policy import Policy __all__ = ["MIMEMessage"] diff --git a/stdlib/email/mime/multipart.pyi b/stdlib/email/mime/multipart.pyi index 1c229f7436a8..fb9599edbcb8 100644 --- a/stdlib/email/mime/multipart.pyi +++ b/stdlib/email/mime/multipart.pyi @@ -1,7 +1,8 @@ from collections.abc import Sequence from email import _ParamsType +from email._policybase import _MessageT from email.mime.base import MIMEBase -from email.policy import Policy, _MessageT +from email.policy import Policy __all__ = ["MIMEMultipart"] diff --git a/stdlib/email/parser.pyi b/stdlib/email/parser.pyi index a1a57b4eef4b..a4924a6cbd88 100644 --- a/stdlib/email/parser.pyi +++ b/stdlib/email/parser.pyi @@ -1,20 +1,21 @@ from _typeshed import SupportsRead from collections.abc import Callable +from email._policybase import _MessageT from email.feedparser import BytesFeedParser as BytesFeedParser, FeedParser as FeedParser from email.message import Message from email.policy import Policy from io import _WrappedBuffer -from typing import Generic, TypeVar, overload +from typing import Generic, overload __all__ = ["Parser", "HeaderParser", "BytesParser", "BytesHeaderParser", "FeedParser", "BytesFeedParser"] -_MessageT = TypeVar("_MessageT", bound=Message, default=Message) - class Parser(Generic[_MessageT]): @overload - def __init__(self: Parser[Message[str, str]], _class: None = None, *, policy: Policy[Message[str, str]] = ...) -> None: ... + def __init__(self: Parser[Message[str, str]], _class: None = None) -> None: ... @overload - def __init__(self, _class: Callable[[], _MessageT], *, policy: Policy[_MessageT] = ...) -> None: ... + def __init__(self, _class: None = None, *, policy: Policy[_MessageT]) -> None: ... + @overload + def __init__(self, _class: Callable[[], _MessageT] | None, *, policy: Policy[_MessageT] = ...) -> None: ... def parse(self, fp: SupportsRead[str], headersonly: bool = False) -> _MessageT: ... def parsestr(self, text: str, headersonly: bool = False) -> _MessageT: ... @@ -25,9 +26,9 @@ class HeaderParser(Parser[_MessageT]): class BytesParser(Generic[_MessageT]): parser: Parser[_MessageT] @overload - def __init__( - self: BytesParser[Message[str, str]], _class: None = None, *, policy: Policy[Message[str, str]] = ... - ) -> None: ... + def __init__(self: BytesParser[Message[str, str]], _class: None = None) -> None: ... + @overload + def __init__(self, _class: None = None, *, policy: Policy[_MessageT]) -> None: ... @overload def __init__(self, _class: Callable[[], _MessageT], *, policy: Policy[_MessageT] = ...) -> None: ... def parse(self, fp: _WrappedBuffer, headersonly: bool = False) -> _MessageT: ... diff --git a/stdlib/email/policy.pyi b/stdlib/email/policy.pyi index 5005483edf86..35c999919eed 100644 --- a/stdlib/email/policy.pyi +++ b/stdlib/email/policy.pyi @@ -1,14 +1,12 @@ from collections.abc import Callable -from email._policybase import Compat32 as Compat32, Policy as Policy, _MessageFactory, compat32 as compat32 +from email._policybase import Compat32 as Compat32, Policy as Policy, _MessageFactory, _MessageT, compat32 as compat32 from email.contentmanager import ContentManager -from email.message import EmailMessage, Message -from typing import Any, TypeVar, overload +from email.message import EmailMessage +from typing import Any, overload from typing_extensions import Self __all__ = ["Compat32", "compat32", "Policy", "EmailPolicy", "default", "strict", "SMTP", "HTTP"] -_MessageT = TypeVar("_MessageT", bound=Message, default=Message) - class EmailPolicy(Policy[_MessageT]): utf8: bool refold_source: str diff --git a/stdlib/http/client.pyi b/stdlib/http/client.pyi index 9e0f61598cb8..5c35dff28d43 100644 --- a/stdlib/http/client.pyi +++ b/stdlib/http/client.pyi @@ -5,6 +5,7 @@ import sys import types from _typeshed import MaybeNone, ReadableBuffer, SupportsRead, SupportsReadline, WriteableBuffer from collections.abc import Callable, Iterable, Iterator, Mapping +from email._policybase import _MessageT from socket import socket from typing import BinaryIO, Literal, TypeVar, overload from typing_extensions import Self, TypeAlias @@ -33,7 +34,6 @@ __all__ = [ _DataType: TypeAlias = SupportsRead[bytes] | Iterable[ReadableBuffer] | ReadableBuffer _T = TypeVar("_T") -_MessageT = TypeVar("_MessageT", bound=email.message.Message) _HeaderValue: TypeAlias = ReadableBuffer | str | int HTTP_PORT: int diff --git a/stdlib/mailbox.pyi b/stdlib/mailbox.pyi index dc2fbd593d67..ff605c0661fb 100644 --- a/stdlib/mailbox.pyi +++ b/stdlib/mailbox.pyi @@ -4,6 +4,7 @@ import sys from _typeshed import StrPath, SupportsNoArgReadline, SupportsRead from abc import ABCMeta, abstractmethod from collections.abc import Callable, Iterable, Iterator, Mapping, Sequence +from email._policybase import _MessageT from types import GenericAlias, TracebackType from typing import IO, Any, AnyStr, Generic, Literal, Protocol, TypeVar, overload from typing_extensions import Self, TypeAlias @@ -29,7 +30,6 @@ __all__ = [ ] _T = TypeVar("_T") -_MessageT = TypeVar("_MessageT", bound=Message) class _SupportsReadAndReadline(SupportsRead[bytes], SupportsNoArgReadline[bytes], Protocol): ... From 74028d79e4c2d48d776829c0f2cf2249adc59b24 Mon Sep 17 00:00:00 2001 From: Avasam Date: Mon, 5 May 2025 12:59:43 -0400 Subject: [PATCH 294/388] Enable Ruff flake8-use-pathlib (PTH) (#13795) Port existing code to pathlib --- lib/ts_utils/paths.py | 1 + lib/ts_utils/utils.py | 7 +- pyproject.toml | 6 +- scripts/create_baseline_stubs.py | 44 ++++++------ scripts/sync_protobuf/_utils.py | 7 +- scripts/sync_protobuf/google_protobuf.py | 4 +- scripts/sync_protobuf/tensorflow.py | 9 +-- tests/check_typeshed_structure.py | 11 ++- tests/mypy_test.py | 15 ++-- tests/pytype_test.py | 87 +++++++++++------------- tests/runtests.py | 25 +++---- 11 files changed, 102 insertions(+), 114 deletions(-) diff --git a/lib/ts_utils/paths.py b/lib/ts_utils/paths.py index 2894aa24b2d7..568a091a5ae6 100644 --- a/lib/ts_utils/paths.py +++ b/lib/ts_utils/paths.py @@ -11,6 +11,7 @@ PYPROJECT_PATH: Final = TS_BASE_PATH / "pyproject.toml" REQUIREMENTS_PATH: Final = TS_BASE_PATH / "requirements-tests.txt" +GITIGNORE_PATH: Final = TS_BASE_PATH / ".gitignore" TESTS_DIR: Final = "@tests" TEST_CASES_DIR: Final = "test_cases" diff --git a/lib/ts_utils/utils.py b/lib/ts_utils/utils.py index fba574d7557f..11d47acdfaff 100644 --- a/lib/ts_utils/utils.py +++ b/lib/ts_utils/utils.py @@ -3,7 +3,6 @@ from __future__ import annotations import functools -import os import re import sys import tempfile @@ -16,7 +15,7 @@ import pathspec from packaging.requirements import Requirement -from .paths import REQUIREMENTS_PATH, STDLIB_PATH, STUBS_PATH, TEST_CASES_DIR, allowlists_path, test_cases_path +from .paths import GITIGNORE_PATH, REQUIREMENTS_PATH, STDLIB_PATH, STUBS_PATH, TEST_CASES_DIR, allowlists_path, test_cases_path if TYPE_CHECKING: from _typeshed import OpenTextMode @@ -215,7 +214,7 @@ def allowlists(distribution_name: str) -> list[str]: def NamedTemporaryFile(mode: OpenTextMode) -> TemporaryFileWrapper[str]: # noqa: N802 def close(self: TemporaryFileWrapper[str]) -> None: TemporaryFileWrapper.close(self) # pyright: ignore[reportUnknownMemberType] - os.remove(self.name) + Path(self.name).unlink() temp = tempfile.NamedTemporaryFile(mode, delete=False) # noqa: SIM115, TID251 temp.close = MethodType(close, temp) # type: ignore[method-assign] @@ -229,7 +228,7 @@ def close(self: TemporaryFileWrapper[str]) -> None: @functools.cache def get_gitignore_spec() -> pathspec.PathSpec: - with open(".gitignore", encoding="UTF-8") as f: + with GITIGNORE_PATH.open(encoding="UTF-8") as f: return pathspec.GitIgnoreSpec.from_lines(f) diff --git a/pyproject.toml b/pyproject.toml index 0c443d8d9639..e1c07d518c89 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -54,6 +54,7 @@ select = [ "PGH", # pygrep-hooks "PIE", # flake8-pie "PL", # Pylint + "PTH", # flake8-use-pathlib "RSE", # flake8-raise "RUF", # Ruff-specific and unused-noqa "SLOT", # flake8-slots @@ -80,9 +81,6 @@ select = [ "FURB187", # Use of assignment of `reversed` on list `{name}` # Used for lint.flake8-import-conventions.aliases "ICN001", # `{name}` should be imported as `{asname}` - # Autofixable flake8-use-pathlib only - "PTH201", # Do not pass the current directory explicitly to `Path` - "PTH210", # Invalid suffix passed to `.with_suffix()` # PYI: only enable rules that have autofixes and that we always want to fix (even manually), # avoids duplicate # noqa with flake8-pyi "PYI009", # Empty body should contain `...`, not pass @@ -167,6 +165,8 @@ ignore = [ "PLR2004", # Magic value used in comparison, consider replacing `{value}` with a constant variable # Keep codeflow path separation explicit "PLR5501", # Use `elif` instead of `else` then `if`, to reduce indentation + # Often just leads to redundant more verbose code when needing an actual str + "PTH208", # Use `pathlib.Path.iterdir()` instead. # Allow FIXME "TD001", # Invalid TODO tag: `{tag}` # Git blame is sufficient diff --git a/scripts/create_baseline_stubs.py b/scripts/create_baseline_stubs.py index 223ef89cfcb2..db4fd9f3bf08 100755 --- a/scripts/create_baseline_stubs.py +++ b/scripts/create_baseline_stubs.py @@ -12,19 +12,20 @@ import argparse import asyncio -import glob -import os.path import re import subprocess import sys import urllib.parse from http import HTTPStatus from importlib.metadata import distribution +from pathlib import Path import aiohttp import termcolor -PYRIGHT_CONFIG = "pyrightconfig.stricter.json" +from ts_utils.paths import STDLIB_PATH, STUBS_PATH + +PYRIGHT_CONFIG = Path("pyrightconfig.stricter.json") def search_pip_freeze_output(project: str, output: str) -> tuple[str, str] | None: @@ -52,22 +53,22 @@ def get_installed_package_info(project: str) -> tuple[str, str] | None: return search_pip_freeze_output(project, r.stdout) -def run_stubgen(package: str, output: str) -> None: +def run_stubgen(package: str, output: Path) -> None: print(f"Running stubgen: stubgen -o {output} -p {package}") subprocess.run(["stubgen", "-o", output, "-p", package, "--export-less"], check=True) -def run_stubdefaulter(stub_dir: str) -> None: +def run_stubdefaulter(stub_dir: Path) -> None: print(f"Running stubdefaulter: stubdefaulter --packages {stub_dir}") subprocess.run(["stubdefaulter", "--packages", stub_dir], check=False) -def run_black(stub_dir: str) -> None: +def run_black(stub_dir: Path) -> None: print(f"Running Black: black {stub_dir}") - subprocess.run(["pre-commit", "run", "black", "--files", *glob.iglob(f"{stub_dir}/**/*.pyi")], check=False) + subprocess.run(["pre-commit", "run", "black", "--files", *stub_dir.rglob("*.pyi")], check=False) -def run_ruff(stub_dir: str) -> None: +def run_ruff(stub_dir: Path) -> None: print(f"Running Ruff: ruff check {stub_dir} --fix-only") subprocess.run([sys.executable, "-m", "ruff", "check", stub_dir, "--fix-only"], check=False) @@ -115,14 +116,14 @@ async def get_upstream_repo_url(project: str) -> str | None: return None -def create_metadata(project: str, stub_dir: str, version: str) -> None: +def create_metadata(project: str, stub_dir: Path, version: str) -> None: """Create a METADATA.toml file.""" match = re.match(r"[0-9]+.[0-9]+", version) if match is None: sys.exit(f"Error: Cannot parse version number: {version}") - filename = os.path.join(stub_dir, "METADATA.toml") + filename = stub_dir / "METADATA.toml" version = match.group(0) - if os.path.exists(filename): + if filename.exists(): return metadata = f'version = "{version}.*"\n' upstream_repo_url = asyncio.run(get_upstream_repo_url(project)) @@ -135,13 +136,12 @@ def create_metadata(project: str, stub_dir: str, version: str) -> None: else: metadata += f'upstream_repository = "{upstream_repo_url}"\n' print(f"Writing {filename}") - with open(filename, "w", encoding="UTF-8") as file: - file.write(metadata) + filename.write_text(metadata, encoding="UTF-8") -def add_pyright_exclusion(stub_dir: str) -> None: +def add_pyright_exclusion(stub_dir: Path) -> None: """Exclude stub_dir from strict pyright checks.""" - with open(PYRIGHT_CONFIG, encoding="UTF-8") as f: + with PYRIGHT_CONFIG.open(encoding="UTF-8") as f: lines = f.readlines() i = 0 while i < len(lines) and not lines[i].strip().startswith('"exclude": ['): @@ -167,7 +167,7 @@ def add_pyright_exclusion(stub_dir: str) -> None: third_party_excludes[-1] = last_line + "\n" # Must use forward slash in the .json file - line_to_add = f' "{stub_dir}",\n'.replace("\\", "/") + line_to_add = f' "{stub_dir.as_posix()}",\n' if line_to_add in third_party_excludes: print(f"{PYRIGHT_CONFIG} already up-to-date") @@ -177,7 +177,7 @@ def add_pyright_exclusion(stub_dir: str) -> None: third_party_excludes.sort(key=str.lower) print(f"Updating {PYRIGHT_CONFIG}") - with open(PYRIGHT_CONFIG, "w", encoding="UTF-8") as f: + with PYRIGHT_CONFIG.open("w", encoding="UTF-8") as f: f.writelines(before_third_party_excludes) f.writelines(third_party_excludes) f.writelines(after_third_party_excludes) @@ -194,7 +194,7 @@ def main() -> None: parser.add_argument("--package", help="generate stubs for this Python package (default is autodetected)") args = parser.parse_args() project = args.project - package = args.package + package: str = args.package if not re.match(r"[a-zA-Z0-9-_.]+$", project): sys.exit(f"Invalid character in project name: {project!r}") @@ -214,7 +214,7 @@ def main() -> None: print(f'Using detected package "{package}" for project "{project}"', file=sys.stderr) print("Suggestion: Try again with --package argument if that's not what you wanted", file=sys.stderr) - if not os.path.isdir("stubs") or not os.path.isdir("stdlib"): + if not STUBS_PATH.is_dir() or not STDLIB_PATH.is_dir(): sys.exit("Error: Current working directory must be the root of typeshed repository") # Get normalized project name and version of installed package. @@ -226,9 +226,9 @@ def main() -> None: sys.exit(1) project, version = info - stub_dir = os.path.join("stubs", project) - package_dir = os.path.join(stub_dir, package) - if os.path.exists(package_dir): + stub_dir = STUBS_PATH / project + package_dir = stub_dir / package + if package_dir.exists(): sys.exit(f"Error: {package_dir} already exists (delete it first)") run_stubgen(package, stub_dir) diff --git a/scripts/sync_protobuf/_utils.py b/scripts/sync_protobuf/_utils.py index 2cab826f571a..f22bd10fb353 100644 --- a/scripts/sync_protobuf/_utils.py +++ b/scripts/sync_protobuf/_utils.py @@ -4,6 +4,7 @@ import sys from collections.abc import Iterable from http.client import HTTPResponse +from pathlib import Path from typing import TYPE_CHECKING from urllib.request import urlopen from zipfile import ZipFile @@ -18,11 +19,11 @@ MYPY_PROTOBUF_VERSION = mypy_protobuf__version__ -def download_file(url: str, destination: StrPath) -> None: +def download_file(url: str, destination: Path) -> None: print(f"Downloading '{url}' to '{destination}'") resp: HTTPResponse - with urlopen(url) as resp, open(destination, "wb") as file: - file.write(resp.read()) + with urlopen(url) as resp: + destination.write_bytes(resp.read()) def extract_archive(archive_path: StrPath, destination: StrPath) -> None: diff --git a/scripts/sync_protobuf/google_protobuf.py b/scripts/sync_protobuf/google_protobuf.py index e36373cff108..358e5c454e44 100755 --- a/scripts/sync_protobuf/google_protobuf.py +++ b/scripts/sync_protobuf/google_protobuf.py @@ -33,7 +33,7 @@ def extract_python_version(file_path: Path) -> str: """Extract the Python version from https://github.com/protocolbuffers/protobuf/blob/main/version.json .""" - with open(file_path) as file: + with file_path.open() as file: data: dict[str, Any] = json.load(file) # The root key will be the protobuf source code version version = next(iter(data.values()))["languages"]["python"] @@ -47,7 +47,7 @@ def extract_proto_file_paths(temp_dir: Path) -> list[str]: as described in py_proto_library calls in https://github.com/protocolbuffers/protobuf/blob/main/python/dist/BUILD.bazel . """ - with open(temp_dir / EXTRACTED_PACKAGE_DIR / "python" / "dist" / "BUILD.bazel") as file: + with (temp_dir / EXTRACTED_PACKAGE_DIR / "python" / "dist" / "BUILD.bazel").open() as file: matched_lines = filter(None, (re.search(PROTO_FILE_PATTERN, line) for line in file)) proto_files = [ EXTRACTED_PACKAGE_DIR + "/src/google/protobuf/" + match.group(1).replace("compiler_", "compiler/") + ".proto" diff --git a/scripts/sync_protobuf/tensorflow.py b/scripts/sync_protobuf/tensorflow.py index 3c84980f1cd8..fcb53226636e 100755 --- a/scripts/sync_protobuf/tensorflow.py +++ b/scripts/sync_protobuf/tensorflow.py @@ -6,7 +6,6 @@ from __future__ import annotations -import os import re import shutil import subprocess @@ -72,21 +71,19 @@ def post_creation() -> None: for path in STUBS_FOLDER.rglob("*_pb2.pyi"): print(f"Fixing imports in '{path}'") - with open(path, encoding="utf-8") as file: - filedata = file.read() + filedata = path.read_text(encoding="utf-8") # Replace the target string filedata = re.sub(TSL_IMPORT_PATTERN, "\\1tensorflow.tsl.", filedata) filedata = re.sub(XLA_IMPORT_PATTERN, "\\1tensorflow.compiler.xla.", filedata) # Write the file out again - with open(path, "w", encoding="utf-8") as file: - file.write(filedata) + path.write_text(filedata, encoding="utf-8") print() for to_remove in PROTOS_TO_REMOVE: file_path = STUBS_FOLDER / "tensorflow" / to_remove - os.remove(file_path) + file_path.unlink() print(f"Removed '{file_path}'") diff --git a/tests/check_typeshed_structure.py b/tests/check_typeshed_structure.py index bcb02061e055..e64123fb54ba 100755 --- a/tests/check_typeshed_structure.py +++ b/tests/check_typeshed_structure.py @@ -113,11 +113,10 @@ def check_test_cases() -> None: def check_no_symlinks() -> None: """Check that there are no symlinks in the typeshed repository.""" - files = [os.path.join(root, file) for root, _, files in os.walk(".") for file in files] + files = [Path(root, file) for root, _, files in os.walk(".") for file in files] no_symlink = "You cannot use symlinks in typeshed, please copy {} to its link." for file in files: - _, ext = os.path.splitext(file) - if ext == ".pyi" and os.path.islink(file): + if file.suffix == ".pyi" and file.is_symlink(): raise ValueError(no_symlink.format(file)) @@ -141,18 +140,18 @@ def _find_stdlib_modules() -> set[str]: modules = set[str]() for path, _, files in os.walk(STDLIB_PATH): for filename in files: - base_module = ".".join(os.path.normpath(path).split(os.sep)[1:]) + base_module = ".".join(Path(path).parts[1:]) if filename == "__init__.pyi": modules.add(base_module) elif filename.endswith(".pyi"): - mod, _ = os.path.splitext(filename) + mod = filename[:-4] modules.add(f"{base_module}.{mod}" if base_module else mod) return modules def check_metadata() -> None: """Check that all METADATA.toml files are valid.""" - for distribution in os.listdir("stubs"): + for distribution in os.listdir(STUBS_PATH): # This function does various sanity checks for METADATA.toml files read_metadata(distribution) diff --git a/tests/mypy_test.py b/tests/mypy_test.py index 84c8fa1467a8..3607199431d4 100755 --- a/tests/mypy_test.py +++ b/tests/mypy_test.py @@ -150,6 +150,8 @@ def match(path: Path, args: TestConfig) -> bool: def add_files(files: list[Path], module: Path, args: TestConfig) -> None: """Add all files in package or module represented by 'name' located in 'root'.""" + if module.name.startswith("."): + return if module.is_file() and module.suffix == ".pyi": if match(module, args): files.append(module) @@ -244,10 +246,8 @@ def add_third_party_files(distribution: str, files: list[Path], args: TestConfig seen_dists.add(distribution) seen_dists.update(r.name for r in typeshed_reqs) root = distribution_path(distribution) - for name in os.listdir(root): - if name.startswith("."): - continue - add_files(files, (root / name), args) + for path in root.iterdir(): + add_files(files, path, args) class TestResult(NamedTuple): @@ -295,7 +295,7 @@ def test_third_party_distribution( def test_stdlib(args: TestConfig) -> TestResult: files: list[Path] = [] for file in STDLIB_PATH.iterdir(): - if file.name in ("VERSIONS", TESTS_DIR) or file.name.startswith("."): + if file.name in ("VERSIONS", TESTS_DIR): continue add_files(files, file, args) @@ -525,15 +525,14 @@ def test_third_party_stubs(args: TestConfig, tempdir: Path) -> TestSummary: def test_typeshed(args: TestConfig, tempdir: Path) -> TestSummary: print(f"*** Testing Python {args.version} on {args.platform}") - stdlib_dir, stubs_dir = Path("stdlib"), Path("stubs") summary = TestSummary() - if stdlib_dir in args.filter or any(stdlib_dir in path.parents for path in args.filter): + if STDLIB_PATH in args.filter or any(STDLIB_PATH in path.parents for path in args.filter): mypy_result, files_checked = test_stdlib(args) summary.register_result(mypy_result, files_checked) print() - if stubs_dir in args.filter or any(stubs_dir in path.parents for path in args.filter): + if STUBS_PATH in args.filter or any(STUBS_PATH in path.parents for path in args.filter): tp_results = test_third_party_stubs(args, tempdir) summary.merge(tp_results) print() diff --git a/tests/pytype_test.py b/tests/pytype_test.py index 71b313641517..3de443dfabc0 100755 --- a/tests/pytype_test.py +++ b/tests/pytype_test.py @@ -24,7 +24,6 @@ print("pytype does not support Python 3.13+ yet.", file=sys.stderr) sys.exit(1) - import argparse import importlib.metadata import inspect @@ -38,21 +37,22 @@ from pytype.imports import typeshed # type: ignore[import] from ts_utils.metadata import read_dependencies +from ts_utils.paths import STDLIB_PATH, STUBS_PATH, TS_BASE_PATH from ts_utils.utils import SupportedVersionsDict, parse_stdlib_versions_file, supported_versions_for_module -TYPESHED_SUBDIRS = ["stdlib", "stubs"] +TYPESHED_SUBDIRS = [STDLIB_PATH.absolute(), STUBS_PATH.absolute()] TYPESHED_HOME = "TYPESHED_HOME" +EXCLUDE_LIST = TS_BASE_PATH / "tests" / "pytype_exclude_list.txt" _LOADERS: dict[str, tuple[pytype_config.Options, load_pytd.Loader]] = {} def main() -> None: args = create_parser().parse_args() - typeshed_location = args.typeshed_location or os.getcwd() - subdir_paths = [os.path.join(typeshed_location, d) for d in TYPESHED_SUBDIRS] - check_subdirs_discoverable(subdir_paths) + typeshed_location = Path(args.typeshed_location) or Path.cwd() + check_subdirs_discoverable(TYPESHED_SUBDIRS) old_typeshed_home = os.environ.get(TYPESHED_HOME) - os.environ[TYPESHED_HOME] = typeshed_location - files_to_test = determine_files_to_test(paths=args.files or subdir_paths) + os.environ[TYPESHED_HOME] = str(typeshed_location) + files_to_test = determine_files_to_test(paths=[Path(file) for file in args.files] or TYPESHED_SUBDIRS) run_all_tests(files_to_test=files_to_test, print_stderr=args.print_stderr, dry_run=args.dry_run) if old_typeshed_home is None: del os.environ[TYPESHED_HOME] @@ -75,12 +75,12 @@ def create_parser() -> argparse.ArgumentParser: return parser -def run_pytype(*, filename: str, python_version: str, missing_modules: Iterable[str]) -> str | None: +def run_pytype(*, filename: StrPath, python_version: str, missing_modules: Iterable[str]) -> str | None: """Run pytype, returning the stderr if any.""" if python_version not in _LOADERS: options = pytype_config.Options.create("", parse_pyi=True, python_version=python_version) # For simplicity, pretends missing modules are part of the stdlib. - missing_modules = tuple(os.path.join("stdlib", m) for m in missing_modules) + missing_modules = tuple(str(STDLIB_PATH / m) for m in missing_modules) loader = load_pytd.create_loader(options, missing_modules) _LOADERS[python_version] = (options, loader) options, loader = _LOADERS[python_version] @@ -100,13 +100,13 @@ def _get_relative(filename: StrPath) -> Path: filepath = Path(filename) for d in TYPESHED_SUBDIRS: try: - return filepath.absolute().relative_to(Path(d).absolute().parent) + return filepath.absolute().relative_to(d.parent) except ValueError: continue raise ValueError(f"{filepath} not relative to {TYPESHED_SUBDIRS}") -def _get_module_name(filename: str) -> str: +def _get_module_name(filename: StrPath) -> str: """Convert a filename {subdir}/m.n/module/foo to module.foo.""" parts = _get_relative(filename).parts if parts[0] == "stdlib": @@ -117,13 +117,13 @@ def _get_module_name(filename: str) -> str: return ".".join(module_parts).replace(".pyi", "").replace(".__init__", "") -def check_subdirs_discoverable(subdir_paths: list[str]) -> None: +def check_subdirs_discoverable(subdir_paths: Iterable[Path]) -> None: for p in subdir_paths: - if not os.path.isdir(p): + if not p.is_dir(): raise SystemExit(f"Cannot find typeshed subdir at {p} (specify parent dir via --typeshed-location)") -def determine_files_to_test(*, paths: Sequence[str]) -> list[str]: +def determine_files_to_test(*, paths: Iterable[Path]) -> list[Path]: """Determine all files to test. Checks for files in the pytype exclude list and for the stdlib VERSIONS file. @@ -132,28 +132,25 @@ def determine_files_to_test(*, paths: Sequence[str]) -> list[str]: ts = typeshed.Typeshed() exclude_list = set(ts.read_blacklist()) stdlib_module_versions = parse_stdlib_versions_file() - files = [] - for f in sorted(filenames): - if _get_relative(f).as_posix() in exclude_list: - continue - if not _is_supported_stdlib_version(stdlib_module_versions, f): - continue - files.append(f) - return files + return [ + f + for f in sorted(filenames) + if _get_relative(f).as_posix() not in exclude_list and _is_supported_stdlib_version(stdlib_module_versions, f) + ] -def find_stubs_in_paths(paths: Sequence[str]) -> list[str]: - filenames: list[str] = [] +def find_stubs_in_paths(paths: Iterable[Path]) -> list[Path]: + filenames: list[Path] = [] for path in paths: - if os.path.isdir(path): + if path.is_dir(): for root, _, fns in os.walk(path): - filenames.extend(os.path.join(root, fn) for fn in fns if fn.endswith(".pyi")) + filenames.extend(Path(root, fn) for fn in fns if fn.endswith(".pyi")) else: filenames.append(path) return filenames -def _is_supported_stdlib_version(module_versions: SupportedVersionsDict, filename: str) -> bool: +def _is_supported_stdlib_version(module_versions: SupportedVersionsDict, filename: StrPath) -> bool: parts = _get_relative(filename).parts if parts[0] != "stdlib": return True @@ -181,7 +178,7 @@ def _get_pkgs_associated_with_requirement(req_name: str) -> list[str]: return sorted({package.removesuffix("-stubs") for package in packages}) -def get_missing_modules(files_to_test: Sequence[str]) -> Iterable[str]: +def get_missing_modules(files_to_test: Iterable[Path]) -> Iterable[str]: """Get names of modules that should be treated as missing. Some typeshed stubs depend on dependencies outside of typeshed. Since pytype @@ -191,37 +188,35 @@ def get_missing_modules(files_to_test: Sequence[str]) -> Iterable[str]: Similarly, pytype cannot parse files on its exclude list, so we also treat those as missing. """ - stub_distributions = set() + stub_distributions = set[str]() for fi in files_to_test: - parts = fi.split(os.sep) + parts = fi.parts try: idx = parts.index("stubs") except ValueError: continue stub_distributions.add(parts[idx + 1]) - missing_modules = set() - for distribution in stub_distributions: - for external_req in read_dependencies(distribution).external_pkgs: - associated_packages = _get_pkgs_associated_with_requirement(external_req.name) - missing_modules.update(associated_packages) - - test_dir = os.path.dirname(__file__) - exclude_list = os.path.join(test_dir, "pytype_exclude_list.txt") - with open(exclude_list) as f: - excluded_files = f.readlines() - for fi in excluded_files: - if not fi.startswith("stubs/"): + missing_modules = { + associated_package + for distribution in stub_distributions + for external_req in read_dependencies(distribution).external_pkgs + for associated_package in _get_pkgs_associated_with_requirement(external_req.name) + } + + with EXCLUDE_LIST.open() as f: + for line in f: + if not line.startswith("stubs/"): # Skips comments, empty lines, and stdlib files, which are in # the exclude list because pytype has its own version. continue - unused_stubs_prefix, unused_pkg, mod_path = fi.split("/", 2) # pyright: ignore[reportUnusedVariable] - missing_modules.add(os.path.splitext(mod_path)[0]) + _ts_subdir, _distribution, module_path = line.strip().split("/", 2) + missing_modules.add(module_path.removesuffix(".pyi")) return missing_modules -def run_all_tests(*, files_to_test: Sequence[str], print_stderr: bool, dry_run: bool) -> None: - bad = [] +def run_all_tests(*, files_to_test: Sequence[Path], print_stderr: bool, dry_run: bool) -> None: + bad: list[tuple[StrPath, str, str]] = [] errors = 0 total_tests = len(files_to_test) missing_modules = get_missing_modules(files_to_test) diff --git a/tests/runtests.py b/tests/runtests.py index e81fb848c7c4..5e1e9e96cc99 100755 --- a/tests/runtests.py +++ b/tests/runtests.py @@ -3,7 +3,6 @@ import argparse import json -import os import re import subprocess import sys @@ -13,8 +12,8 @@ from ts_utils.paths import TEST_CASES_DIR, test_cases_path from ts_utils.utils import colored -_STRICTER_CONFIG_FILE = "pyrightconfig.stricter.json" -_TESTCASES_CONFIG_FILE = "pyrightconfig.testcases.json" +_STRICTER_CONFIG_FILE = Path("pyrightconfig.stricter.json") +_TESTCASES_CONFIG_FILE = Path("pyrightconfig.testcases.json") _NPX_ERROR_PATTERN = r"error (runn|find)ing npx" _NPX_ERROR_MESSAGE = colored("\nSkipping Pyright tests: npx is not installed or can't be run!", "yellow") _SUCCESS = colored("Success", "green") @@ -33,10 +32,9 @@ def _parse_jsonc(json_text: str) -> str: return valid_json -def _get_strict_params(stub_path: str) -> list[str]: - with open(_STRICTER_CONFIG_FILE, encoding="UTF-8") as file: - data = json.loads(_parse_jsonc(file.read())) - lower_stub_path = stub_path.lower() +def _get_strict_params(stub_path: Path) -> list[str | Path]: + data = json.loads(_parse_jsonc(_STRICTER_CONFIG_FILE.read_text(encoding="UTF-8"))) + lower_stub_path = stub_path.as_posix().lower() if any(lower_stub_path == stub.lower() for stub in data["exclude"]): return [] return ["-p", _STRICTER_CONFIG_FILE] @@ -60,23 +58,22 @@ def main() -> None: ) parser.add_argument("path", help="Path of the stub to test in format /, from the root of the project.") args = parser.parse_args() - path: str = args.path + path = Path(args.path) run_stubtest: bool = args.run_stubtest python_version: str = args.python_version - path_tokens = Path(path).parts - if len(path_tokens) != 2: + if len(path.parts) != 2: parser.error("'path' argument should be in format /.") - folder, stub = path_tokens + folder, stub = path.parts if folder not in {"stdlib", "stubs"}: parser.error("Only the 'stdlib' and 'stubs' folders are supported.") - if not os.path.exists(path): + if not path.exists(): parser.error(f"{path=} does not exist.") stubtest_result: subprocess.CompletedProcess[bytes] | None = None pytype_result: subprocess.CompletedProcess[bytes] | None = None print("\nRunning pre-commit...") - pre_commit_result = subprocess.run(["pre-commit", "run", "--files", *Path(path).rglob("*")], check=False) + pre_commit_result = subprocess.run(["pre-commit", "run", "--files", *path.rglob("*")], check=False) print("\nRunning check_typeshed_structure.py...") check_structure_result = subprocess.run([sys.executable, "tests/check_typeshed_structure.py"], check=False) @@ -141,7 +138,7 @@ def main() -> None: regr_test_returncode = 0 else: print(f"\nRunning Pyright regression tests for Python {python_version}...") - command = [ + command: list[str | Path] = [ sys.executable, "tests/pyright_test.py", str(cases_path), From d0bef333fd00357a24ee69984b0adf6cea970632 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Tue, 6 May 2025 08:31:22 +0200 Subject: [PATCH 295/388] [stubsabot] Bump zstd to 1.5.7.0 (#13946) --- stubs/zstd/METADATA.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stubs/zstd/METADATA.toml b/stubs/zstd/METADATA.toml index 1013123647dd..37447439d079 100644 --- a/stubs/zstd/METADATA.toml +++ b/stubs/zstd/METADATA.toml @@ -1,2 +1,2 @@ -version = "1.5.6.8" +version = "1.5.7.0" upstream_repository = "https://github.com/sergey-dryabzhinsky/python-zstd" From 0cceffc03150e209dfc8e3507c17ea77b13e2a1c Mon Sep 17 00:00:00 2001 From: Tatsh Date: Tue, 6 May 2025 02:36:34 -0400 Subject: [PATCH 296/388] Add binaryornot stubs (#13920) --- stubs/binaryornot/@tests/stubtest_allowlist.txt | 1 + stubs/binaryornot/METADATA.toml | 2 ++ stubs/binaryornot/binaryornot/__init__.pyi | 5 +++++ stubs/binaryornot/binaryornot/check.pyi | 3 +++ stubs/binaryornot/binaryornot/helpers.pyi | 5 +++++ 5 files changed, 16 insertions(+) create mode 100644 stubs/binaryornot/@tests/stubtest_allowlist.txt create mode 100644 stubs/binaryornot/METADATA.toml create mode 100644 stubs/binaryornot/binaryornot/__init__.pyi create mode 100644 stubs/binaryornot/binaryornot/check.pyi create mode 100644 stubs/binaryornot/binaryornot/helpers.pyi diff --git a/stubs/binaryornot/@tests/stubtest_allowlist.txt b/stubs/binaryornot/@tests/stubtest_allowlist.txt new file mode 100644 index 000000000000..742a3bb53e80 --- /dev/null +++ b/stubs/binaryornot/@tests/stubtest_allowlist.txt @@ -0,0 +1 @@ +binaryornot\..+?\.logger diff --git a/stubs/binaryornot/METADATA.toml b/stubs/binaryornot/METADATA.toml new file mode 100644 index 000000000000..bc45896c1a04 --- /dev/null +++ b/stubs/binaryornot/METADATA.toml @@ -0,0 +1,2 @@ +version = "0.4.*" +upstream_repository = "https://github.com/binaryornot/binaryornot" diff --git a/stubs/binaryornot/binaryornot/__init__.pyi b/stubs/binaryornot/binaryornot/__init__.pyi new file mode 100644 index 000000000000..4c304c608dbc --- /dev/null +++ b/stubs/binaryornot/binaryornot/__init__.pyi @@ -0,0 +1,5 @@ +from typing import Final + +__author__: Final[str] +__email__: Final[str] +__version__: Final[str] diff --git a/stubs/binaryornot/binaryornot/check.pyi b/stubs/binaryornot/binaryornot/check.pyi new file mode 100644 index 000000000000..8ebb58019b10 --- /dev/null +++ b/stubs/binaryornot/binaryornot/check.pyi @@ -0,0 +1,3 @@ +from _typeshed import StrOrBytesPath + +def is_binary(filename: StrOrBytesPath) -> bool: ... diff --git a/stubs/binaryornot/binaryornot/helpers.pyi b/stubs/binaryornot/binaryornot/helpers.pyi new file mode 100644 index 000000000000..4534c3ffe99c --- /dev/null +++ b/stubs/binaryornot/binaryornot/helpers.pyi @@ -0,0 +1,5 @@ +from _typeshed import StrOrBytesPath + +def print_as_hex(s: str) -> None: ... +def get_starting_chunk(filename: StrOrBytesPath, length: int = 1024) -> bytes: ... +def is_binary_string(bytes_to_check: bytes | bytearray) -> bool: ... From 04b80188fb8599f842f670c1381efa06f410fdfb Mon Sep 17 00:00:00 2001 From: Avasam Date: Tue, 6 May 2025 02:39:14 -0400 Subject: [PATCH 297/388] networkx: Most nodelist params are collections (#13945) --- .../algorithms/centrality/laplacian.pyi | 5 ++--- .../algorithms/link_analysis/pagerank_alg.pyi | 4 ++-- .../algorithms/shortest_paths/dense.pyi | 4 ++-- stubs/networkx/networkx/algorithms/triads.pyi | 4 ++-- stubs/networkx/networkx/convert.pyi | 10 ++++++---- stubs/networkx/networkx/drawing/nx_pylab.pyi | 7 ++++--- .../networkx/networkx/generators/community.pyi | 3 ++- .../networkx/linalg/bethehessianmatrix.pyi | 3 ++- stubs/networkx/networkx/linalg/graphmatrix.pyi | 7 +++++-- .../networkx/linalg/laplacianmatrix.pyi | 17 +++++++++++++---- .../networkx/linalg/modularitymatrix.pyi | 5 +++-- 11 files changed, 43 insertions(+), 26 deletions(-) diff --git a/stubs/networkx/networkx/algorithms/centrality/laplacian.pyi b/stubs/networkx/networkx/algorithms/centrality/laplacian.pyi index 8dfdb2d0d11a..6869ac801ce3 100644 --- a/stubs/networkx/networkx/algorithms/centrality/laplacian.pyi +++ b/stubs/networkx/networkx/algorithms/centrality/laplacian.pyi @@ -1,5 +1,4 @@ -from _typeshed import Incomplete -from collections.abc import Iterable +from collections.abc import Collection from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @@ -8,7 +7,7 @@ from networkx.utils.backends import _dispatchable def laplacian_centrality( G: Graph[_Node], normalized: bool = True, - nodelist: Iterable[Incomplete] | None = None, + nodelist: Collection[_Node] | None = None, weight: str | None = "weight", walk_type: str | None = None, alpha: float = 0.95, diff --git a/stubs/networkx/networkx/algorithms/link_analysis/pagerank_alg.pyi b/stubs/networkx/networkx/algorithms/link_analysis/pagerank_alg.pyi index cb3ab19ce19d..f79671d5cffb 100644 --- a/stubs/networkx/networkx/algorithms/link_analysis/pagerank_alg.pyi +++ b/stubs/networkx/networkx/algorithms/link_analysis/pagerank_alg.pyi @@ -1,5 +1,5 @@ from _typeshed import Incomplete, SupportsGetItem -from collections.abc import Iterable +from collections.abc import Collection from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @@ -20,7 +20,7 @@ def google_matrix( G: Graph[_Node], alpha: float = 0.85, personalization: SupportsGetItem[Incomplete, Incomplete] | None = None, - nodelist: Iterable[Incomplete] | None = None, + nodelist: Collection[_Node] | None = None, weight: str | None = "weight", dangling: SupportsGetItem[Incomplete, Incomplete] | None = None, ): ... diff --git a/stubs/networkx/networkx/algorithms/shortest_paths/dense.pyi b/stubs/networkx/networkx/algorithms/shortest_paths/dense.pyi index 88e66d572eb0..f40ce0ac0403 100644 --- a/stubs/networkx/networkx/algorithms/shortest_paths/dense.pyi +++ b/stubs/networkx/networkx/algorithms/shortest_paths/dense.pyi @@ -1,11 +1,11 @@ from _typeshed import Incomplete, SupportsGetItem -from collections.abc import Iterable +from collections.abc import Collection from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @_dispatchable -def floyd_warshall_numpy(G: Graph[_Node], nodelist: Iterable[Incomplete] | None = None, weight: str | None = "weight"): ... +def floyd_warshall_numpy(G: Graph[_Node], nodelist: Collection[_Node] | None = None, weight: str | None = "weight"): ... @_dispatchable def floyd_warshall_predecessor_and_distance(G: Graph[_Node], weight: str | None = "weight"): ... @_dispatchable diff --git a/stubs/networkx/networkx/algorithms/triads.pyi b/stubs/networkx/networkx/algorithms/triads.pyi index d6bea369c744..1942e68f9170 100644 --- a/stubs/networkx/networkx/algorithms/triads.pyi +++ b/stubs/networkx/networkx/algorithms/triads.pyi @@ -1,5 +1,5 @@ from _typeshed import Incomplete -from collections.abc import Generator, Iterable +from collections.abc import Collection, Generator from networkx.classes.digraph import DiGraph from networkx.classes.graph import Graph, _Node @@ -7,7 +7,7 @@ from networkx.utils.backends import _dispatchable from numpy.random import RandomState @_dispatchable -def triadic_census(G: DiGraph[_Node], nodelist: Iterable[Incomplete] | None = None): ... +def triadic_census(G: DiGraph[_Node], nodelist: Collection[_Node] | None = None): ... @_dispatchable def is_triad(G: Graph[_Node]): ... @_dispatchable diff --git a/stubs/networkx/networkx/convert.pyi b/stubs/networkx/networkx/convert.pyi index d02a612f993c..a85a117a939c 100644 --- a/stubs/networkx/networkx/convert.pyi +++ b/stubs/networkx/networkx/convert.pyi @@ -1,5 +1,5 @@ from _typeshed import Incomplete -from collections.abc import Callable, Iterable +from collections.abc import Callable, Collection, Iterable from networkx.classes.graph import Graph, _Data, _Node from networkx.utils.backends import _dispatchable @@ -18,13 +18,15 @@ def to_networkx_graph( data: _Data[_Node], create_using: Graph[_Node] | Callable[[], Graph[_Node]] | None = None, multigraph_input: bool = False ) -> Graph[_Node]: ... @_dispatchable -def to_dict_of_lists(G: Graph[_Node], nodelist: None | Iterable[_Node] = None) -> dict[_Node, list[_Node]]: ... +def to_dict_of_lists(G: Graph[_Node], nodelist: Collection[_Node] | None = None) -> dict[_Node, list[_Node]]: ... @_dispatchable def from_dict_of_lists(d: dict[_Node, Iterable[_Node]], create_using: Incomplete | None = None) -> Graph[_Node]: ... -def to_dict_of_dicts(G: Graph[_Node], nodelist=None, edge_data=None) -> dict[Incomplete, Incomplete]: ... +def to_dict_of_dicts( + G: Graph[_Node], nodelist: Collection[_Node] | None = None, edge_data=None +) -> dict[Incomplete, Incomplete]: ... @_dispatchable def from_dict_of_dicts(d, create_using=None, multigraph_input=False) -> Graph[Incomplete]: ... @_dispatchable -def to_edgelist(G: Graph[_Node], nodelist=None): ... +def to_edgelist(G: Graph[_Node], nodelist: Collection[_Node] | None = None): ... @_dispatchable def from_edgelist(edgelist, create_using=None) -> Graph[Incomplete]: ... diff --git a/stubs/networkx/networkx/drawing/nx_pylab.pyi b/stubs/networkx/networkx/drawing/nx_pylab.pyi index 1852dcdd96a2..cb530376b15a 100644 --- a/stubs/networkx/networkx/drawing/nx_pylab.pyi +++ b/stubs/networkx/networkx/drawing/nx_pylab.pyi @@ -1,4 +1,5 @@ from _typeshed import Incomplete +from collections.abc import Collection def draw(G, pos: Incomplete | None = None, ax: Incomplete | None = None, **kwds) -> None: ... def draw_networkx( @@ -7,7 +8,7 @@ def draw_networkx( def draw_networkx_nodes( G, pos, - nodelist: Incomplete | None = None, + nodelist: Collection[Incomplete] | None = None, node_size: Incomplete | int = 300, node_color: str = "#1f78b4", node_shape: str = "o", @@ -39,7 +40,7 @@ def draw_networkx_edges( arrows: Incomplete | None = None, label: Incomplete | None = None, node_size: Incomplete | int = 300, - nodelist: Incomplete | None = None, + nodelist: list[Incomplete] | None = None, node_shape: str = "o", connectionstyle: str = "arc3", min_source_margin: int = 0, @@ -79,7 +80,7 @@ def draw_networkx_edge_labels( rotate: bool = True, clip_on: bool = True, node_size: int = 300, - nodelist: Incomplete | None = None, + nodelist: list[Incomplete] | None = None, connectionstyle: str = "arc3", hide_ticks: bool = True, ): ... diff --git a/stubs/networkx/networkx/generators/community.pyi b/stubs/networkx/networkx/generators/community.pyi index 55e386934db1..8cfedcb8aefb 100644 --- a/stubs/networkx/networkx/generators/community.pyi +++ b/stubs/networkx/networkx/generators/community.pyi @@ -1,4 +1,5 @@ from _typeshed import Incomplete +from collections.abc import Collection from networkx.utils.backends import _dispatchable @@ -22,7 +23,7 @@ def windmill_graph(n, k): ... def stochastic_block_model( sizes, p, - nodelist: Incomplete | None = None, + nodelist: Collection[Incomplete] | None = None, seed: Incomplete | None = None, directed: bool = False, selfloops: bool = False, diff --git a/stubs/networkx/networkx/linalg/bethehessianmatrix.pyi b/stubs/networkx/networkx/linalg/bethehessianmatrix.pyi index a122ca706a24..70bafa4f9957 100644 --- a/stubs/networkx/networkx/linalg/bethehessianmatrix.pyi +++ b/stubs/networkx/networkx/linalg/bethehessianmatrix.pyi @@ -1,6 +1,7 @@ from _typeshed import Incomplete +from collections.abc import Collection from networkx.utils.backends import _dispatchable @_dispatchable -def bethe_hessian_matrix(G, r: Incomplete | None = None, nodelist: Incomplete | None = None): ... +def bethe_hessian_matrix(G, r: Incomplete | None = None, nodelist: Collection[Incomplete] | None = None): ... diff --git a/stubs/networkx/networkx/linalg/graphmatrix.pyi b/stubs/networkx/networkx/linalg/graphmatrix.pyi index 6eb0c4243974..a26cdff363e4 100644 --- a/stubs/networkx/networkx/linalg/graphmatrix.pyi +++ b/stubs/networkx/networkx/linalg/graphmatrix.pyi @@ -1,14 +1,17 @@ from _typeshed import Incomplete +from collections.abc import Collection from networkx.utils.backends import _dispatchable @_dispatchable def incidence_matrix( G, - nodelist: Incomplete | None = None, + nodelist: Collection[Incomplete] | None = None, edgelist: Incomplete | None = None, oriented: bool = False, weight: Incomplete | None = None, ): ... @_dispatchable -def adjacency_matrix(G, nodelist: Incomplete | None = None, dtype: Incomplete | None = None, weight: str = "weight"): ... +def adjacency_matrix( + G, nodelist: Collection[Incomplete] | None = None, dtype: Incomplete | None = None, weight: str = "weight" +): ... diff --git a/stubs/networkx/networkx/linalg/laplacianmatrix.pyi b/stubs/networkx/networkx/linalg/laplacianmatrix.pyi index 5e607d91b15d..db9a7745c215 100644 --- a/stubs/networkx/networkx/linalg/laplacianmatrix.pyi +++ b/stubs/networkx/networkx/linalg/laplacianmatrix.pyi @@ -1,18 +1,27 @@ from _typeshed import Incomplete +from collections.abc import Collection from networkx.utils.backends import _dispatchable @_dispatchable -def laplacian_matrix(G, nodelist: Incomplete | None = None, weight: str = "weight"): ... +def laplacian_matrix(G, nodelist: Collection[Incomplete] | None = None, weight: str = "weight"): ... @_dispatchable -def normalized_laplacian_matrix(G, nodelist: Incomplete | None = None, weight: str = "weight"): ... +def normalized_laplacian_matrix(G, nodelist: Collection[Incomplete] | None = None, weight: str = "weight"): ... @_dispatchable def total_spanning_tree_weight(G, weight: Incomplete | None = None): ... @_dispatchable def directed_laplacian_matrix( - G, nodelist: Incomplete | None = None, weight: str = "weight", walk_type: Incomplete | None = None, alpha: float = 0.95 + G, + nodelist: Collection[Incomplete] | None = None, + weight: str = "weight", + walk_type: Incomplete | None = None, + alpha: float = 0.95, ): ... @_dispatchable def directed_combinatorial_laplacian_matrix( - G, nodelist: Incomplete | None = None, weight: str = "weight", walk_type: Incomplete | None = None, alpha: float = 0.95 + G, + nodelist: Collection[Incomplete] | None = None, + weight: str = "weight", + walk_type: Incomplete | None = None, + alpha: float = 0.95, ): ... diff --git a/stubs/networkx/networkx/linalg/modularitymatrix.pyi b/stubs/networkx/networkx/linalg/modularitymatrix.pyi index 5ef6eec7c3bf..7cc73a5f0550 100644 --- a/stubs/networkx/networkx/linalg/modularitymatrix.pyi +++ b/stubs/networkx/networkx/linalg/modularitymatrix.pyi @@ -1,8 +1,9 @@ from _typeshed import Incomplete +from collections.abc import Collection from networkx.utils.backends import _dispatchable @_dispatchable -def modularity_matrix(G, nodelist: Incomplete | None = None, weight: Incomplete | None = None): ... +def modularity_matrix(G, nodelist: Collection[Incomplete] | None = None, weight: Incomplete | None = None): ... @_dispatchable -def directed_modularity_matrix(G, nodelist: Incomplete | None = None, weight: Incomplete | None = None): ... +def directed_modularity_matrix(G, nodelist: Collection[Incomplete] | None = None, weight: Incomplete | None = None): ... From 74eff3ab58436a5d6465b5ef2d579cc90a1e110a Mon Sep 17 00:00:00 2001 From: Avasam Date: Thu, 8 May 2025 22:25:25 -0400 Subject: [PATCH 298/388] networkx: add all missing `__all__` (#13956) --- .../algorithms/approximation/clique.pyi | 2 + .../approximation/clustering_coefficient.pyi | 2 + .../algorithms/approximation/connectivity.pyi | 2 + .../approximation/distance_measures.pyi | 2 + .../approximation/dominating_set.pyi | 2 + .../algorithms/approximation/kcomponents.pyi | 2 + .../algorithms/approximation/matching.pyi | 2 + .../algorithms/approximation/maxcut.pyi | 2 + .../algorithms/approximation/ramsey.pyi | 2 + .../algorithms/approximation/steinertree.pyi | 2 + .../approximation/traveling_salesman.pyi | 9 +++++ .../algorithms/approximation/vertex_cover.pyi | 2 + .../algorithms/assortativity/connectivity.pyi | 2 + .../algorithms/assortativity/correlation.pyi | 7 ++++ .../algorithms/assortativity/mixing.pyi | 2 + .../assortativity/neighbor_degree.pyi | 2 + .../algorithms/assortativity/pairs.pyi | 2 + .../networkx/algorithms/asteroidal.pyi | 2 + .../networkx/algorithms/bipartite/basic.pyi | 2 + .../algorithms/bipartite/centrality.pyi | 2 + .../networkx/algorithms/bipartite/cluster.pyi | 2 + .../algorithms/bipartite/covering.pyi | 2 + .../algorithms/bipartite/edgelist.pyi | 2 + .../algorithms/bipartite/extendability.pyi | 2 + .../algorithms/bipartite/generators.pyi | 11 ++++++ .../algorithms/bipartite/matching.pyi | 2 + .../networkx/algorithms/bipartite/matrix.pyi | 2 + .../algorithms/bipartite/projection.pyi | 8 ++++ .../algorithms/bipartite/redundancy.pyi | 2 + .../algorithms/bipartite/spectral.pyi | 2 + .../networkx/networkx/algorithms/boundary.pyi | 1 + .../networkx/networkx/algorithms/bridges.pyi | 2 + .../algorithms/centrality/betweenness.pyi | 2 + .../centrality/betweenness_subset.pyi | 2 + .../algorithms/centrality/closeness.pyi | 2 + .../centrality/current_flow_betweenness.pyi | 6 +++ .../current_flow_betweenness_subset.pyi | 2 + .../centrality/current_flow_closeness.pyi | 2 + .../algorithms/centrality/degree_alg.pyi | 2 + .../algorithms/centrality/dispersion.pyi | 2 + .../algorithms/centrality/eigenvector.pyi | 2 + .../networkx/algorithms/centrality/group.pyi | 9 +++++ .../algorithms/centrality/harmonic.pyi | 2 + .../networkx/algorithms/centrality/katz.pyi | 2 + .../algorithms/centrality/laplacian.pyi | 2 + .../algorithms/centrality/percolation.pyi | 2 + .../algorithms/centrality/reaching.pyi | 2 + .../algorithms/centrality/second_order.pyi | 2 + .../algorithms/centrality/subgraph_alg.pyi | 2 + .../algorithms/centrality/trophic.pyi | 2 + .../algorithms/centrality/voterank_alg.pyi | 2 + stubs/networkx/networkx/algorithms/chains.pyi | 2 + .../networkx/networkx/algorithms/chordal.pyi | 12 ++++++ stubs/networkx/networkx/algorithms/clique.pyi | 14 +++++++ .../networkx/networkx/algorithms/cluster.pyi | 2 + .../coloring/equitable_coloring.pyi | 2 + .../algorithms/communicability_alg.pyi | 2 + .../algorithms/community/asyn_fluid.pyi | 2 + .../algorithms/community/centrality.pyi | 2 + .../algorithms/community/community_utils.pyi | 2 + .../networkx/algorithms/community/kclique.pyi | 2 + .../algorithms/community/kernighan_lin.pyi | 2 + .../community/label_propagation.pyi | 4 ++ .../networkx/algorithms/community/louvain.pyi | 2 + .../networkx/algorithms/community/lukes.pyi | 2 + .../algorithms/community/modularity_max.pyi | 2 + .../algorithms/components/attracting.pyi | 2 + .../algorithms/components/biconnected.pyi | 2 + .../algorithms/components/connected.pyi | 2 + .../algorithms/components/semiconnected.pyi | 2 + .../components/strongly_connected.pyi | 8 ++++ .../components/weakly_connected.pyi | 2 + .../connectivity/edge_augmentation.pyi | 2 + .../connectivity/edge_kcomponents.pyi | 2 + .../algorithms/connectivity/stoerwagner.pyi | 2 + .../algorithms/connectivity/utils.pyi | 2 + stubs/networkx/networkx/algorithms/core.pyi | 2 + .../networkx/networkx/algorithms/covering.pyi | 2 + stubs/networkx/networkx/algorithms/cuts.pyi | 11 ++++++ stubs/networkx/networkx/algorithms/cycles.pyi | 12 ++++++ .../networkx/algorithms/d_separation.pyi | 8 ++++ stubs/networkx/networkx/algorithms/dag.pyi | 21 ++++++++++ .../networkx/algorithms/distance_measures.pyi | 19 ++++++++++ .../networkx/algorithms/distance_regular.pyi | 2 + .../networkx/algorithms/dominance.pyi | 2 + .../networkx/algorithms/dominating.pyi | 2 + .../algorithms/efficiency_measures.pyi | 2 + stubs/networkx/networkx/algorithms/euler.pyi | 2 + .../algorithms/flow/boykovkolmogorov.pyi | 2 + .../algorithms/flow/capacityscaling.pyi | 2 + .../networkx/algorithms/flow/dinitz_alg.pyi | 2 + .../networkx/algorithms/flow/edmondskarp.pyi | 2 + .../networkx/algorithms/flow/mincost.pyi | 2 + .../algorithms/flow/networksimplex.pyi | 2 + .../networkx/algorithms/flow/preflowpush.pyi | 2 + .../flow/shortestaugmentingpath.pyi | 2 + .../networkx/algorithms/flow/utils.pyi | 2 + .../networkx/algorithms/graph_hashing.pyi | 2 + .../networkx/algorithms/graphical.pyi | 9 +++++ .../networkx/algorithms/hierarchy.pyi | 2 + stubs/networkx/networkx/algorithms/hybrid.pyi | 2 + .../networkx/networkx/algorithms/isolate.pyi | 2 + .../algorithms/isomorphism/ismags.pyi | 2 + .../algorithms/isomorphism/matchhelpers.pyi | 12 ++++++ .../isomorphism/tree_isomorphism.pyi | 2 + .../networkx/algorithms/isomorphism/vf2pp.pyi | 2 + .../algorithms/link_analysis/hits_alg.pyi | 2 + .../algorithms/link_analysis/pagerank_alg.pyi | 2 + .../networkx/algorithms/link_prediction.pyi | 11 ++++++ .../algorithms/lowest_common_ancestors.pyi | 2 + .../networkx/networkx/algorithms/matching.pyi | 9 +++++ .../networkx/algorithms/minors/__init__.pyi | 2 + .../algorithms/minors/contraction.pyi | 2 + stubs/networkx/networkx/algorithms/mis.pyi | 2 + stubs/networkx/networkx/algorithms/moral.pyi | 2 + .../algorithms/node_classification.pyi | 2 + .../networkx/algorithms/non_randomness.pyi | 2 + .../networkx/algorithms/operators/all.pyi | 2 + .../networkx/algorithms/operators/binary.pyi | 4 ++ .../networkx/algorithms/operators/product.pyi | 14 +++++++ .../networkx/algorithms/operators/unary.pyi | 2 + .../networkx/algorithms/planar_drawing.pyi | 2 + .../networkx/algorithms/polynomials.pyi | 2 + .../networkx/algorithms/reciprocity.pyi | 2 + .../networkx/networkx/algorithms/regular.pyi | 2 + .../networkx/networkx/algorithms/richclub.pyi | 2 + .../algorithms/shortest_paths/astar.pyi | 2 + .../algorithms/shortest_paths/dense.pyi | 2 + .../algorithms/shortest_paths/generic.pyi | 18 +++++++++ .../algorithms/shortest_paths/unweighted.pyi | 11 ++++++ .../algorithms/shortest_paths/weighted.pyi | 28 ++++++++++++++ .../networkx/algorithms/similarity.pyi | 10 +++++ .../networkx/algorithms/smallworld.pyi | 2 + .../networkx/networkx/algorithms/smetric.pyi | 2 + .../networkx/algorithms/sparsifiers.pyi | 2 + .../networkx/algorithms/structuralholes.pyi | 2 + .../networkx/algorithms/summarization.pyi | 2 + stubs/networkx/networkx/algorithms/swap.pyi | 2 + .../networkx/algorithms/threshold.pyi | 2 + .../networkx/algorithms/time_dependent.pyi | 2 + .../networkx/algorithms/tournament.pyi | 2 + .../algorithms/traversal/beamsearch.pyi | 2 + .../traversal/breadth_first_search.pyi | 16 ++++++++ .../traversal/depth_first_search.pyi | 10 +++++ .../networkx/algorithms/traversal/edgebfs.pyi | 2 + .../networkx/algorithms/traversal/edgedfs.pyi | 2 + .../networkx/algorithms/tree/branchings.pyi | 3 ++ .../networkx/algorithms/tree/coding.pyi | 2 + .../algorithms/tree/decomposition.pyi | 2 + .../networkx/networkx/algorithms/tree/mst.pyi | 16 ++++++++ .../networkx/algorithms/tree/operations.pyi | 2 + .../networkx/algorithms/tree/recognition.pyi | 2 + stubs/networkx/networkx/algorithms/triads.pyi | 2 + .../networkx/networkx/algorithms/vitality.pyi | 2 + .../networkx/networkx/algorithms/voronoi.pyi | 2 + stubs/networkx/networkx/algorithms/walks.pyi | 2 + stubs/networkx/networkx/algorithms/wiener.pyi | 6 +++ stubs/networkx/networkx/classes/coreviews.pyi | 14 +++++++ stubs/networkx/networkx/classes/digraph.pyi | 2 + stubs/networkx/networkx/classes/filters.pyi | 14 +++++++ stubs/networkx/networkx/classes/function.pyi | 5 +++ stubs/networkx/networkx/classes/graph.pyi | 2 + .../networkx/networkx/classes/graphviews.pyi | 2 + .../networkx/classes/multidigraph.pyi | 2 + .../networkx/networkx/classes/multigraph.pyi | 2 + .../networkx/networkx/classes/reportviews.pyi | 25 ++++++++++++ stubs/networkx/networkx/convert_matrix.pyi | 15 ++++++++ stubs/networkx/networkx/drawing/layout.pyi | 38 +++++++++++++++++++ stubs/networkx/networkx/drawing/nx_agraph.pyi | 2 + stubs/networkx/networkx/drawing/nx_latex.pyi | 2 + stubs/networkx/networkx/drawing/nx_pydot.pyi | 2 + stubs/networkx/networkx/drawing/nx_pylab.pyi | 18 +++++++++ stubs/networkx/networkx/exception.pyi | 17 +++++++++ stubs/networkx/networkx/generators/atlas.pyi | 2 + .../networkx/networkx/generators/classic.pyi | 29 ++++++++++++++ .../networkx/networkx/generators/cographs.pyi | 2 + .../networkx/generators/community.pyi | 13 +++++++ .../networkx/networkx/generators/directed.pyi | 2 + .../networkx/generators/duplication.pyi | 2 + stubs/networkx/networkx/generators/ego.pyi | 2 + .../networkx/generators/expanders.pyi | 15 ++++++++ .../networkx/generators/geometric.pyi | 16 ++++++++ .../networkx/generators/harary_graph.pyi | 2 + .../networkx/generators/intersection.pyi | 2 + .../networkx/generators/interval_graph.pyi | 2 + .../networkx/generators/joint_degree_seq.pyi | 2 + .../networkx/networkx/generators/lattice.pyi | 2 + stubs/networkx/networkx/generators/line.pyi | 2 + .../networkx/generators/mycielski.pyi | 2 + .../generators/nonisomorphic_trees.pyi | 2 + .../networkx/generators/random_clustered.pyi | 1 + .../networkx/generators/random_graphs.pyi | 22 +++++++++++ stubs/networkx/networkx/generators/small.pyi | 26 +++++++++++++ stubs/networkx/networkx/generators/social.pyi | 2 + .../generators/spectral_graph_forge.pyi | 2 + .../networkx/generators/stochastic.pyi | 2 + stubs/networkx/networkx/generators/sudoku.pyi | 2 + .../networkx/generators/time_series.pyi | 2 + stubs/networkx/networkx/generators/trees.pyi | 25 ++++++++++++ stubs/networkx/networkx/generators/triads.pyi | 2 + .../networkx/linalg/algebraicconnectivity.pyi | 2 + stubs/networkx/networkx/linalg/attrmatrix.pyi | 2 + .../networkx/linalg/bethehessianmatrix.pyi | 2 + .../networkx/networkx/linalg/graphmatrix.pyi | 2 + .../networkx/linalg/laplacianmatrix.pyi | 8 ++++ .../networkx/linalg/modularitymatrix.pyi | 2 + stubs/networkx/networkx/linalg/spectrum.pyi | 8 ++++ stubs/networkx/networkx/readwrite/adjlist.pyi | 2 + .../networkx/networkx/readwrite/edgelist.pyi | 9 +++++ stubs/networkx/networkx/readwrite/graph6.pyi | 2 + .../readwrite/json_graph/adjacency.pyi | 2 + .../readwrite/json_graph/cytoscape.pyi | 2 + .../readwrite/json_graph/node_link.pyi | 2 + .../networkx/readwrite/json_graph/tree.pyi | 2 + stubs/networkx/networkx/readwrite/leda.pyi | 2 + .../networkx/readwrite/multiline_adjlist.pyi | 2 + stubs/networkx/networkx/readwrite/pajek.pyi | 2 + stubs/networkx/networkx/readwrite/sparse6.pyi | 2 + stubs/networkx/networkx/relabel.pyi | 2 + stubs/networkx/networkx/utils/__init__.pyi | 3 ++ stubs/networkx/networkx/utils/configs.pyi | 2 +- stubs/networkx/networkx/utils/decorators.pyi | 2 + stubs/networkx/networkx/utils/heaps.pyi | 2 + .../networkx/networkx/utils/mapped_queue.pyi | 2 + .../networkx/utils/random_sequence.pyi | 9 +++++ stubs/networkx/networkx/utils/rcm.pyi | 2 + 226 files changed, 1014 insertions(+), 1 deletion(-) diff --git a/stubs/networkx/networkx/algorithms/approximation/clique.pyi b/stubs/networkx/networkx/algorithms/approximation/clique.pyi index 99c86b3a1c87..4587354a1c07 100644 --- a/stubs/networkx/networkx/algorithms/approximation/clique.pyi +++ b/stubs/networkx/networkx/algorithms/approximation/clique.pyi @@ -1,6 +1,8 @@ from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["clique_removal", "max_clique", "large_clique_size", "maximum_independent_set"] + @_dispatchable def maximum_independent_set(G: Graph[_Node]): ... @_dispatchable diff --git a/stubs/networkx/networkx/algorithms/approximation/clustering_coefficient.pyi b/stubs/networkx/networkx/algorithms/approximation/clustering_coefficient.pyi index b02ea8144d50..4c1a7d55c117 100644 --- a/stubs/networkx/networkx/algorithms/approximation/clustering_coefficient.pyi +++ b/stubs/networkx/networkx/algorithms/approximation/clustering_coefficient.pyi @@ -2,5 +2,7 @@ from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable from numpy.random import RandomState +__all__ = ["average_clustering"] + @_dispatchable def average_clustering(G: Graph[_Node], trials: int = 1000, seed: int | RandomState | None = None): ... diff --git a/stubs/networkx/networkx/algorithms/approximation/connectivity.pyi b/stubs/networkx/networkx/algorithms/approximation/connectivity.pyi index b20466e208e5..7adcdcb978f2 100644 --- a/stubs/networkx/networkx/algorithms/approximation/connectivity.pyi +++ b/stubs/networkx/networkx/algorithms/approximation/connectivity.pyi @@ -4,6 +4,8 @@ from collections.abc import Iterable from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["local_node_connectivity", "node_connectivity", "all_pairs_node_connectivity"] + @_dispatchable def local_node_connectivity(G: Graph[_Node], source: _Node, target: _Node, cutoff: int | None = None): ... @_dispatchable diff --git a/stubs/networkx/networkx/algorithms/approximation/distance_measures.pyi b/stubs/networkx/networkx/algorithms/approximation/distance_measures.pyi index 08662306c401..06ecfd181cc2 100644 --- a/stubs/networkx/networkx/algorithms/approximation/distance_measures.pyi +++ b/stubs/networkx/networkx/algorithms/approximation/distance_measures.pyi @@ -2,5 +2,7 @@ from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable from numpy.random import RandomState +__all__ = ["diameter"] + @_dispatchable def diameter(G: Graph[_Node], seed: int | RandomState | None = None): ... diff --git a/stubs/networkx/networkx/algorithms/approximation/dominating_set.pyi b/stubs/networkx/networkx/algorithms/approximation/dominating_set.pyi index 0eda30759ea1..a24428f63b8e 100644 --- a/stubs/networkx/networkx/algorithms/approximation/dominating_set.pyi +++ b/stubs/networkx/networkx/algorithms/approximation/dominating_set.pyi @@ -1,6 +1,8 @@ from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["min_weighted_dominating_set", "min_edge_dominating_set"] + @_dispatchable def min_weighted_dominating_set(G: Graph[_Node], weight: str | None = None): ... @_dispatchable diff --git a/stubs/networkx/networkx/algorithms/approximation/kcomponents.pyi b/stubs/networkx/networkx/algorithms/approximation/kcomponents.pyi index 27bdc9efc106..b1965791435a 100644 --- a/stubs/networkx/networkx/algorithms/approximation/kcomponents.pyi +++ b/stubs/networkx/networkx/algorithms/approximation/kcomponents.pyi @@ -1,5 +1,7 @@ from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["k_components"] + @_dispatchable def k_components(G: Graph[_Node], min_density: float = 0.95): ... diff --git a/stubs/networkx/networkx/algorithms/approximation/matching.pyi b/stubs/networkx/networkx/algorithms/approximation/matching.pyi index 798b8f0ac6be..799818b498f2 100644 --- a/stubs/networkx/networkx/algorithms/approximation/matching.pyi +++ b/stubs/networkx/networkx/algorithms/approximation/matching.pyi @@ -1,5 +1,7 @@ from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["min_maximal_matching"] + @_dispatchable def min_maximal_matching(G: Graph[_Node]): ... diff --git a/stubs/networkx/networkx/algorithms/approximation/maxcut.pyi b/stubs/networkx/networkx/algorithms/approximation/maxcut.pyi index e84b601c8f21..af1c01f9ec8e 100644 --- a/stubs/networkx/networkx/algorithms/approximation/maxcut.pyi +++ b/stubs/networkx/networkx/algorithms/approximation/maxcut.pyi @@ -4,6 +4,8 @@ from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable from numpy.random import RandomState +__all__ = ["randomized_partitioning", "one_exchange"] + @_dispatchable def randomized_partitioning( G: Graph[_Node], seed: int | RandomState | None = None, p: float = 0.5, weight: str | None = None diff --git a/stubs/networkx/networkx/algorithms/approximation/ramsey.pyi b/stubs/networkx/networkx/algorithms/approximation/ramsey.pyi index 1769ec99285b..cd8d97f11cbe 100644 --- a/stubs/networkx/networkx/algorithms/approximation/ramsey.pyi +++ b/stubs/networkx/networkx/algorithms/approximation/ramsey.pyi @@ -1,5 +1,7 @@ from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["ramsey_R2"] + @_dispatchable def ramsey_R2(G: Graph[_Node]): ... diff --git a/stubs/networkx/networkx/algorithms/approximation/steinertree.pyi b/stubs/networkx/networkx/algorithms/approximation/steinertree.pyi index f75bd9023db9..2b3bedd6fe31 100644 --- a/stubs/networkx/networkx/algorithms/approximation/steinertree.pyi +++ b/stubs/networkx/networkx/algorithms/approximation/steinertree.pyi @@ -4,6 +4,8 @@ from collections.abc import Iterable from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["metric_closure", "steiner_tree"] + @_dispatchable def metric_closure(G: Graph[_Node], weight="weight"): ... @_dispatchable diff --git a/stubs/networkx/networkx/algorithms/approximation/traveling_salesman.pyi b/stubs/networkx/networkx/algorithms/approximation/traveling_salesman.pyi index bccba05d805d..7d0b61f6f407 100644 --- a/stubs/networkx/networkx/algorithms/approximation/traveling_salesman.pyi +++ b/stubs/networkx/networkx/algorithms/approximation/traveling_salesman.pyi @@ -6,6 +6,15 @@ from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable from numpy.random import RandomState +__all__ = [ + "traveling_salesman_problem", + "christofides", + "asadpour_atsp", + "greedy_tsp", + "simulated_annealing_tsp", + "threshold_accepting_tsp", +] + @_dispatchable def christofides(G: Graph[_Node], weight: str | None = "weight", tree: Graph[_Node] | None = None): ... @_dispatchable diff --git a/stubs/networkx/networkx/algorithms/approximation/vertex_cover.pyi b/stubs/networkx/networkx/algorithms/approximation/vertex_cover.pyi index 26d3fb82b1bb..ffd5206df85d 100644 --- a/stubs/networkx/networkx/algorithms/approximation/vertex_cover.pyi +++ b/stubs/networkx/networkx/algorithms/approximation/vertex_cover.pyi @@ -1,5 +1,7 @@ from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["min_weighted_vertex_cover"] + @_dispatchable def min_weighted_vertex_cover(G: Graph[_Node], weight: str | None = None): ... diff --git a/stubs/networkx/networkx/algorithms/assortativity/connectivity.pyi b/stubs/networkx/networkx/algorithms/assortativity/connectivity.pyi index 3c0c202c56e8..a4782a825d00 100644 --- a/stubs/networkx/networkx/algorithms/assortativity/connectivity.pyi +++ b/stubs/networkx/networkx/algorithms/assortativity/connectivity.pyi @@ -4,6 +4,8 @@ from collections.abc import Iterable from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["average_degree_connectivity"] + @_dispatchable def average_degree_connectivity( G: Graph[_Node], source="in+out", target="in+out", nodes: Iterable[Incomplete] | None = None, weight: str | None = None diff --git a/stubs/networkx/networkx/algorithms/assortativity/correlation.pyi b/stubs/networkx/networkx/algorithms/assortativity/correlation.pyi index 9fe4866db3d8..2789386434d7 100644 --- a/stubs/networkx/networkx/algorithms/assortativity/correlation.pyi +++ b/stubs/networkx/networkx/algorithms/assortativity/correlation.pyi @@ -4,6 +4,13 @@ from collections.abc import Iterable from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = [ + "degree_pearson_correlation_coefficient", + "degree_assortativity_coefficient", + "attribute_assortativity_coefficient", + "numeric_assortativity_coefficient", +] + @_dispatchable def degree_assortativity_coefficient( G: Graph[_Node], x: str = "out", y: str = "in", weight: str | None = None, nodes: Iterable[Incomplete] | None = None diff --git a/stubs/networkx/networkx/algorithms/assortativity/mixing.pyi b/stubs/networkx/networkx/algorithms/assortativity/mixing.pyi index c80fdae4eadb..4c5ced294890 100644 --- a/stubs/networkx/networkx/algorithms/assortativity/mixing.pyi +++ b/stubs/networkx/networkx/algorithms/assortativity/mixing.pyi @@ -4,6 +4,8 @@ from collections.abc import Iterable from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["attribute_mixing_matrix", "attribute_mixing_dict", "degree_mixing_matrix", "degree_mixing_dict", "mixing_dict"] + @_dispatchable def attribute_mixing_dict( G: Graph[_Node], attribute: str, nodes: Iterable[Incomplete] | None = None, normalized: bool = False diff --git a/stubs/networkx/networkx/algorithms/assortativity/neighbor_degree.pyi b/stubs/networkx/networkx/algorithms/assortativity/neighbor_degree.pyi index 042777067212..d86ce3da8211 100644 --- a/stubs/networkx/networkx/algorithms/assortativity/neighbor_degree.pyi +++ b/stubs/networkx/networkx/algorithms/assortativity/neighbor_degree.pyi @@ -4,6 +4,8 @@ from collections.abc import Iterable from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["average_neighbor_degree"] + @_dispatchable def average_neighbor_degree( G: Graph[_Node], diff --git a/stubs/networkx/networkx/algorithms/assortativity/pairs.pyi b/stubs/networkx/networkx/algorithms/assortativity/pairs.pyi index 4e9fb3c7516b..08898fc2f30a 100644 --- a/stubs/networkx/networkx/algorithms/assortativity/pairs.pyi +++ b/stubs/networkx/networkx/algorithms/assortativity/pairs.pyi @@ -4,6 +4,8 @@ from collections.abc import Generator, Iterable from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["node_attribute_xy", "node_degree_xy"] + @_dispatchable def node_attribute_xy( G: Graph[_Node], attribute, nodes: Iterable[Incomplete] | None = None diff --git a/stubs/networkx/networkx/algorithms/asteroidal.pyi b/stubs/networkx/networkx/algorithms/asteroidal.pyi index 21fdc6879686..eaa859a3bc22 100644 --- a/stubs/networkx/networkx/algorithms/asteroidal.pyi +++ b/stubs/networkx/networkx/algorithms/asteroidal.pyi @@ -1,6 +1,8 @@ from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["is_at_free", "find_asteroidal_triple"] + @_dispatchable def find_asteroidal_triple(G: Graph[_Node]): ... @_dispatchable diff --git a/stubs/networkx/networkx/algorithms/bipartite/basic.pyi b/stubs/networkx/networkx/algorithms/bipartite/basic.pyi index c8d80a1b642d..280d44b12d91 100644 --- a/stubs/networkx/networkx/algorithms/bipartite/basic.pyi +++ b/stubs/networkx/networkx/algorithms/bipartite/basic.pyi @@ -4,6 +4,8 @@ from collections.abc import Iterable from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["is_bipartite", "is_bipartite_node_set", "color", "sets", "density", "degrees"] + @_dispatchable def color(G: Graph[_Node]): ... @_dispatchable diff --git a/stubs/networkx/networkx/algorithms/bipartite/centrality.pyi b/stubs/networkx/networkx/algorithms/bipartite/centrality.pyi index fdd6212831f1..976948059f2b 100644 --- a/stubs/networkx/networkx/algorithms/bipartite/centrality.pyi +++ b/stubs/networkx/networkx/algorithms/bipartite/centrality.pyi @@ -1,6 +1,8 @@ from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["degree_centrality", "betweenness_centrality", "closeness_centrality"] + @_dispatchable def degree_centrality(G: Graph[_Node], nodes): ... @_dispatchable diff --git a/stubs/networkx/networkx/algorithms/bipartite/cluster.pyi b/stubs/networkx/networkx/algorithms/bipartite/cluster.pyi index 7fed50d545d0..d4fffe4ed70a 100644 --- a/stubs/networkx/networkx/algorithms/bipartite/cluster.pyi +++ b/stubs/networkx/networkx/algorithms/bipartite/cluster.pyi @@ -4,6 +4,8 @@ from collections.abc import Iterable from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["clustering", "average_clustering", "latapy_clustering", "robins_alexander_clustering"] + @_dispatchable def latapy_clustering(G: Graph[_Node], nodes: Iterable[Incomplete] | None = None, mode: str = "dot"): ... diff --git a/stubs/networkx/networkx/algorithms/bipartite/covering.pyi b/stubs/networkx/networkx/algorithms/bipartite/covering.pyi index 9d9fe9758baf..c915c19c00f0 100644 --- a/stubs/networkx/networkx/algorithms/bipartite/covering.pyi +++ b/stubs/networkx/networkx/algorithms/bipartite/covering.pyi @@ -4,5 +4,7 @@ from collections.abc import Callable from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["min_edge_cover"] + @_dispatchable def min_edge_cover(G: Graph[_Node], matching_algorithm: Callable[..., Incomplete] | None = None): ... diff --git a/stubs/networkx/networkx/algorithms/bipartite/edgelist.pyi b/stubs/networkx/networkx/algorithms/bipartite/edgelist.pyi index 73ca8035db9e..18fb7d40473f 100644 --- a/stubs/networkx/networkx/algorithms/bipartite/edgelist.pyi +++ b/stubs/networkx/networkx/algorithms/bipartite/edgelist.pyi @@ -4,6 +4,8 @@ from collections.abc import Generator from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["generate_edgelist", "write_edgelist", "parse_edgelist", "read_edgelist"] + @_dispatchable def write_edgelist(G, path, comments: str = "#", delimiter: str = " ", data: bool = True, encoding: str = "utf-8") -> None: ... @_dispatchable diff --git a/stubs/networkx/networkx/algorithms/bipartite/extendability.pyi b/stubs/networkx/networkx/algorithms/bipartite/extendability.pyi index 9bfee7872513..155cb1cb05b3 100644 --- a/stubs/networkx/networkx/algorithms/bipartite/extendability.pyi +++ b/stubs/networkx/networkx/algorithms/bipartite/extendability.pyi @@ -1,5 +1,7 @@ from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["maximal_extendability"] + @_dispatchable def maximal_extendability(G: Graph[_Node]): ... diff --git a/stubs/networkx/networkx/algorithms/bipartite/generators.pyi b/stubs/networkx/networkx/algorithms/bipartite/generators.pyi index 3fe8b22ca59e..f9aef242b895 100644 --- a/stubs/networkx/networkx/algorithms/bipartite/generators.pyi +++ b/stubs/networkx/networkx/algorithms/bipartite/generators.pyi @@ -5,6 +5,17 @@ from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable from numpy.random import RandomState +__all__ = [ + "configuration_model", + "havel_hakimi_graph", + "reverse_havel_hakimi_graph", + "alternating_havel_hakimi_graph", + "preferential_attachment_graph", + "random_graph", + "gnmk_random_graph", + "complete_bipartite_graph", +] + @_dispatchable def complete_bipartite_graph(n1, n2, create_using: Graph[_Node] | None = None): ... @_dispatchable diff --git a/stubs/networkx/networkx/algorithms/bipartite/matching.pyi b/stubs/networkx/networkx/algorithms/bipartite/matching.pyi index b744c44f2e34..91ef136107e4 100644 --- a/stubs/networkx/networkx/algorithms/bipartite/matching.pyi +++ b/stubs/networkx/networkx/algorithms/bipartite/matching.pyi @@ -4,6 +4,8 @@ from collections.abc import Iterable from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["maximum_matching", "hopcroft_karp_matching", "eppstein_matching", "to_vertex_cover", "minimum_weight_full_matching"] + @_dispatchable def hopcroft_karp_matching(G: Graph[_Node], top_nodes: Iterable[_Node] | None = None): ... @_dispatchable diff --git a/stubs/networkx/networkx/algorithms/bipartite/matrix.pyi b/stubs/networkx/networkx/algorithms/bipartite/matrix.pyi index f8a7af78c99b..2e484482aa8d 100644 --- a/stubs/networkx/networkx/algorithms/bipartite/matrix.pyi +++ b/stubs/networkx/networkx/algorithms/bipartite/matrix.pyi @@ -4,6 +4,8 @@ from collections.abc import Iterable from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["biadjacency_matrix", "from_biadjacency_matrix"] + @_dispatchable def biadjacency_matrix( G: Graph[_Node], diff --git a/stubs/networkx/networkx/algorithms/bipartite/projection.pyi b/stubs/networkx/networkx/algorithms/bipartite/projection.pyi index c545b78ff61e..9f1fd27d3880 100644 --- a/stubs/networkx/networkx/algorithms/bipartite/projection.pyi +++ b/stubs/networkx/networkx/algorithms/bipartite/projection.pyi @@ -4,6 +4,14 @@ from collections.abc import Callable, Iterable from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = [ + "projected_graph", + "weighted_projected_graph", + "collaboration_weighted_projected_graph", + "overlap_weighted_projected_graph", + "generic_weighted_projected_graph", +] + @_dispatchable def projected_graph(B: Graph[_Node], nodes: Iterable[Incomplete], multigraph: bool = False): ... @_dispatchable diff --git a/stubs/networkx/networkx/algorithms/bipartite/redundancy.pyi b/stubs/networkx/networkx/algorithms/bipartite/redundancy.pyi index 474329b6e68e..cb7233ef90ab 100644 --- a/stubs/networkx/networkx/algorithms/bipartite/redundancy.pyi +++ b/stubs/networkx/networkx/algorithms/bipartite/redundancy.pyi @@ -4,5 +4,7 @@ from collections.abc import Iterable from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["node_redundancy"] + @_dispatchable def node_redundancy(G: Graph[_Node], nodes: Iterable[Incomplete] | None = None): ... diff --git a/stubs/networkx/networkx/algorithms/bipartite/spectral.pyi b/stubs/networkx/networkx/algorithms/bipartite/spectral.pyi index c3060cce5cda..1778bd31cf5a 100644 --- a/stubs/networkx/networkx/algorithms/bipartite/spectral.pyi +++ b/stubs/networkx/networkx/algorithms/bipartite/spectral.pyi @@ -1,5 +1,7 @@ from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["spectral_bipartivity"] + @_dispatchable def spectral_bipartivity(G: Graph[_Node], nodes=None, weight: str = "weight"): ... diff --git a/stubs/networkx/networkx/algorithms/boundary.pyi b/stubs/networkx/networkx/algorithms/boundary.pyi index e85644bef42f..08fb9203269f 100644 --- a/stubs/networkx/networkx/algorithms/boundary.pyi +++ b/stubs/networkx/networkx/algorithms/boundary.pyi @@ -6,6 +6,7 @@ from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable _U = TypeVar("_U") +__all__ = ["edge_boundary", "node_boundary"] @overload def edge_boundary( diff --git a/stubs/networkx/networkx/algorithms/bridges.pyi b/stubs/networkx/networkx/algorithms/bridges.pyi index 2beb94e80077..530802159370 100644 --- a/stubs/networkx/networkx/algorithms/bridges.pyi +++ b/stubs/networkx/networkx/algorithms/bridges.pyi @@ -4,6 +4,8 @@ from typing import overload from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["bridges", "has_bridges", "local_bridges"] + @_dispatchable def bridges(G: Graph[_Node], root: _Node | None = None) -> Generator[_Node, None, None]: ... @_dispatchable diff --git a/stubs/networkx/networkx/algorithms/centrality/betweenness.pyi b/stubs/networkx/networkx/algorithms/centrality/betweenness.pyi index 7d8351a67c5a..ddaf1288a3d4 100644 --- a/stubs/networkx/networkx/algorithms/centrality/betweenness.pyi +++ b/stubs/networkx/networkx/algorithms/centrality/betweenness.pyi @@ -2,6 +2,8 @@ from networkx.classes.graph import Graph, _Edge, _Node from networkx.utils.backends import _dispatchable from numpy.random import RandomState +__all__ = ["betweenness_centrality", "edge_betweenness_centrality"] + @_dispatchable def betweenness_centrality( G: Graph[_Node], diff --git a/stubs/networkx/networkx/algorithms/centrality/betweenness_subset.pyi b/stubs/networkx/networkx/algorithms/centrality/betweenness_subset.pyi index 3f3af3e5bbae..8f91e193b844 100644 --- a/stubs/networkx/networkx/algorithms/centrality/betweenness_subset.pyi +++ b/stubs/networkx/networkx/algorithms/centrality/betweenness_subset.pyi @@ -3,6 +3,8 @@ from collections.abc import Iterable from networkx.classes.graph import Graph, _Edge, _Node from networkx.utils.backends import _dispatchable +__all__ = ["betweenness_centrality_subset", "edge_betweenness_centrality_subset"] + @_dispatchable def betweenness_centrality_subset( G: Graph[_Node], diff --git a/stubs/networkx/networkx/algorithms/centrality/closeness.pyi b/stubs/networkx/networkx/algorithms/centrality/closeness.pyi index edfae3a7b828..72201138a975 100644 --- a/stubs/networkx/networkx/algorithms/centrality/closeness.pyi +++ b/stubs/networkx/networkx/algorithms/centrality/closeness.pyi @@ -3,6 +3,8 @@ from _typeshed import Incomplete, SupportsGetItem from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["closeness_centrality", "incremental_closeness_centrality"] + @_dispatchable def closeness_centrality( G: Graph[_Node], u: _Node | None = None, distance=None, wf_improved: bool | None = True diff --git a/stubs/networkx/networkx/algorithms/centrality/current_flow_betweenness.pyi b/stubs/networkx/networkx/algorithms/centrality/current_flow_betweenness.pyi index 3995a217fefc..7d39f64677cf 100644 --- a/stubs/networkx/networkx/algorithms/centrality/current_flow_betweenness.pyi +++ b/stubs/networkx/networkx/algorithms/centrality/current_flow_betweenness.pyi @@ -2,6 +2,12 @@ from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable from numpy.random import RandomState +__all__ = [ + "current_flow_betweenness_centrality", + "approximate_current_flow_betweenness_centrality", + "edge_current_flow_betweenness_centrality", +] + @_dispatchable def approximate_current_flow_betweenness_centrality( G: Graph[_Node], diff --git a/stubs/networkx/networkx/algorithms/centrality/current_flow_betweenness_subset.pyi b/stubs/networkx/networkx/algorithms/centrality/current_flow_betweenness_subset.pyi index 7712c02ac954..9fe21cad877d 100644 --- a/stubs/networkx/networkx/algorithms/centrality/current_flow_betweenness_subset.pyi +++ b/stubs/networkx/networkx/algorithms/centrality/current_flow_betweenness_subset.pyi @@ -3,6 +3,8 @@ from collections.abc import Iterable from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["current_flow_betweenness_centrality_subset", "edge_current_flow_betweenness_centrality_subset"] + @_dispatchable def current_flow_betweenness_centrality_subset( G: Graph[_Node], diff --git a/stubs/networkx/networkx/algorithms/centrality/current_flow_closeness.pyi b/stubs/networkx/networkx/algorithms/centrality/current_flow_closeness.pyi index 534c06845d13..00ff9c43f1b1 100644 --- a/stubs/networkx/networkx/algorithms/centrality/current_flow_closeness.pyi +++ b/stubs/networkx/networkx/algorithms/centrality/current_flow_closeness.pyi @@ -1,6 +1,8 @@ from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["current_flow_closeness_centrality", "information_centrality"] + @_dispatchable def current_flow_closeness_centrality(G: Graph[_Node], weight: str | None = None, dtype: type = ..., solver: str = "lu"): ... diff --git a/stubs/networkx/networkx/algorithms/centrality/degree_alg.pyi b/stubs/networkx/networkx/algorithms/centrality/degree_alg.pyi index 30618e780bfe..a3d4719c8eba 100644 --- a/stubs/networkx/networkx/algorithms/centrality/degree_alg.pyi +++ b/stubs/networkx/networkx/algorithms/centrality/degree_alg.pyi @@ -1,6 +1,8 @@ from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["degree_centrality", "in_degree_centrality", "out_degree_centrality"] + @_dispatchable def degree_centrality(G: Graph[_Node]) -> dict[_Node, float]: ... @_dispatchable diff --git a/stubs/networkx/networkx/algorithms/centrality/dispersion.pyi b/stubs/networkx/networkx/algorithms/centrality/dispersion.pyi index b673beb8778b..4cc9ed0f4de0 100644 --- a/stubs/networkx/networkx/algorithms/centrality/dispersion.pyi +++ b/stubs/networkx/networkx/algorithms/centrality/dispersion.pyi @@ -1,6 +1,8 @@ from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["dispersion"] + @_dispatchable def dispersion( G: Graph[_Node], diff --git a/stubs/networkx/networkx/algorithms/centrality/eigenvector.pyi b/stubs/networkx/networkx/algorithms/centrality/eigenvector.pyi index 8c9ccd34c568..5e1af9216cf8 100644 --- a/stubs/networkx/networkx/algorithms/centrality/eigenvector.pyi +++ b/stubs/networkx/networkx/algorithms/centrality/eigenvector.pyi @@ -3,6 +3,8 @@ from _typeshed import Incomplete, SupportsGetItem from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["eigenvector_centrality", "eigenvector_centrality_numpy"] + @_dispatchable def eigenvector_centrality( G: Graph[_Node], diff --git a/stubs/networkx/networkx/algorithms/centrality/group.pyi b/stubs/networkx/networkx/algorithms/centrality/group.pyi index 3229bdec3789..0efcdbbe4dcf 100644 --- a/stubs/networkx/networkx/algorithms/centrality/group.pyi +++ b/stubs/networkx/networkx/algorithms/centrality/group.pyi @@ -4,6 +4,15 @@ from collections.abc import Iterable from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = [ + "group_betweenness_centrality", + "group_closeness_centrality", + "group_degree_centrality", + "group_in_degree_centrality", + "group_out_degree_centrality", + "prominent_group", +] + @_dispatchable def group_betweenness_centrality( G: Graph[_Node], C, normalized: bool | None = True, weight: str | None = None, endpoints: bool | None = False diff --git a/stubs/networkx/networkx/algorithms/centrality/harmonic.pyi b/stubs/networkx/networkx/algorithms/centrality/harmonic.pyi index af566d1c4d7d..7b75d7ee75ee 100644 --- a/stubs/networkx/networkx/algorithms/centrality/harmonic.pyi +++ b/stubs/networkx/networkx/algorithms/centrality/harmonic.pyi @@ -4,6 +4,8 @@ from collections.abc import Iterable from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["harmonic_centrality"] + @_dispatchable def harmonic_centrality( G: Graph[_Node], nbunch: Iterable[Incomplete] | None = None, distance=None, sources: Iterable[Incomplete] | None = None diff --git a/stubs/networkx/networkx/algorithms/centrality/katz.pyi b/stubs/networkx/networkx/algorithms/centrality/katz.pyi index ba353da7a8ce..6f0f4f3bae8b 100644 --- a/stubs/networkx/networkx/algorithms/centrality/katz.pyi +++ b/stubs/networkx/networkx/algorithms/centrality/katz.pyi @@ -3,6 +3,8 @@ from _typeshed import Incomplete, SupportsGetItem from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["katz_centrality", "katz_centrality_numpy"] + @_dispatchable def katz_centrality( G: Graph[_Node], diff --git a/stubs/networkx/networkx/algorithms/centrality/laplacian.pyi b/stubs/networkx/networkx/algorithms/centrality/laplacian.pyi index 6869ac801ce3..b68497906824 100644 --- a/stubs/networkx/networkx/algorithms/centrality/laplacian.pyi +++ b/stubs/networkx/networkx/algorithms/centrality/laplacian.pyi @@ -3,6 +3,8 @@ from collections.abc import Collection from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["laplacian_centrality"] + @_dispatchable def laplacian_centrality( G: Graph[_Node], diff --git a/stubs/networkx/networkx/algorithms/centrality/percolation.pyi b/stubs/networkx/networkx/algorithms/centrality/percolation.pyi index b9124b5e7a0a..f6bee5c6bd05 100644 --- a/stubs/networkx/networkx/algorithms/centrality/percolation.pyi +++ b/stubs/networkx/networkx/algorithms/centrality/percolation.pyi @@ -3,6 +3,8 @@ from _typeshed import Incomplete, SupportsGetItem from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["percolation_centrality"] + @_dispatchable def percolation_centrality( G: Graph[_Node], diff --git a/stubs/networkx/networkx/algorithms/centrality/reaching.pyi b/stubs/networkx/networkx/algorithms/centrality/reaching.pyi index b2a1a2a0deeb..f99040b4f6ef 100644 --- a/stubs/networkx/networkx/algorithms/centrality/reaching.pyi +++ b/stubs/networkx/networkx/algorithms/centrality/reaching.pyi @@ -4,6 +4,8 @@ from networkx.classes.digraph import DiGraph from networkx.classes.graph import _Node from networkx.utils.backends import _dispatchable +__all__ = ["global_reaching_centrality", "local_reaching_centrality"] + @_dispatchable def global_reaching_centrality(G: DiGraph[_Node], weight: str | None = None, normalized: bool | None = True): ... @_dispatchable diff --git a/stubs/networkx/networkx/algorithms/centrality/second_order.pyi b/stubs/networkx/networkx/algorithms/centrality/second_order.pyi index df8b706360c8..cda7cac85c00 100644 --- a/stubs/networkx/networkx/algorithms/centrality/second_order.pyi +++ b/stubs/networkx/networkx/algorithms/centrality/second_order.pyi @@ -1,5 +1,7 @@ from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["second_order_centrality"] + @_dispatchable def second_order_centrality(G: Graph[_Node], weight: str | None = "weight"): ... diff --git a/stubs/networkx/networkx/algorithms/centrality/subgraph_alg.pyi b/stubs/networkx/networkx/algorithms/centrality/subgraph_alg.pyi index 4035e39ea41d..94075f2541b7 100644 --- a/stubs/networkx/networkx/algorithms/centrality/subgraph_alg.pyi +++ b/stubs/networkx/networkx/algorithms/centrality/subgraph_alg.pyi @@ -1,6 +1,8 @@ from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["subgraph_centrality_exp", "subgraph_centrality", "communicability_betweenness_centrality", "estrada_index"] + @_dispatchable def subgraph_centrality_exp(G: Graph[_Node]): ... @_dispatchable diff --git a/stubs/networkx/networkx/algorithms/centrality/trophic.pyi b/stubs/networkx/networkx/algorithms/centrality/trophic.pyi index 654ecef945be..f0ae0a5b53f1 100644 --- a/stubs/networkx/networkx/algorithms/centrality/trophic.pyi +++ b/stubs/networkx/networkx/algorithms/centrality/trophic.pyi @@ -2,6 +2,8 @@ from networkx.classes.digraph import DiGraph from networkx.classes.graph import _Node from networkx.utils.backends import _dispatchable +__all__ = ["trophic_levels", "trophic_differences", "trophic_incoherence_parameter"] + @_dispatchable def trophic_levels(G: DiGraph[_Node], weight="weight"): ... @_dispatchable diff --git a/stubs/networkx/networkx/algorithms/centrality/voterank_alg.pyi b/stubs/networkx/networkx/algorithms/centrality/voterank_alg.pyi index c313b9c8e60d..bf5969f63ea0 100644 --- a/stubs/networkx/networkx/algorithms/centrality/voterank_alg.pyi +++ b/stubs/networkx/networkx/algorithms/centrality/voterank_alg.pyi @@ -1,5 +1,7 @@ from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["voterank"] + @_dispatchable def voterank(G: Graph[_Node], number_of_nodes: int | None = None): ... diff --git a/stubs/networkx/networkx/algorithms/chains.pyi b/stubs/networkx/networkx/algorithms/chains.pyi index e7f99119c9d4..a91aed52d82c 100644 --- a/stubs/networkx/networkx/algorithms/chains.pyi +++ b/stubs/networkx/networkx/algorithms/chains.pyi @@ -3,5 +3,7 @@ from collections.abc import Generator from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["chain_decomposition"] + @_dispatchable def chain_decomposition(G: Graph[_Node], root: _Node | None = None) -> Generator[list[tuple[_Node, _Node]], None, None]: ... diff --git a/stubs/networkx/networkx/algorithms/chordal.pyi b/stubs/networkx/networkx/algorithms/chordal.pyi index 264e5c769a5d..628df41c19a8 100644 --- a/stubs/networkx/networkx/algorithms/chordal.pyi +++ b/stubs/networkx/networkx/algorithms/chordal.pyi @@ -1,10 +1,20 @@ import sys +from _typeshed import Incomplete from collections.abc import Generator from networkx.classes.graph import Graph, _Node from networkx.exception import NetworkXException from networkx.utils.backends import _dispatchable +__all__ = [ + "is_chordal", + "find_induced_nodes", + "chordal_graph_cliques", + "chordal_graph_treewidth", + "NetworkXTreewidthBoundExceeded", + "complete_to_chordal_graph", +] + class NetworkXTreewidthBoundExceeded(NetworkXException): ... @_dispatchable @@ -15,3 +25,5 @@ def find_induced_nodes(G: Graph[_Node], s: _Node, t: _Node, treewidth_bound: flo def chordal_graph_cliques(G: Graph[_Node]) -> Generator[frozenset[_Node], None, None]: ... @_dispatchable def chordal_graph_treewidth(G: Graph[_Node]) -> int: ... +@_dispatchable +def complete_to_chordal_graph(G) -> tuple[Incomplete, dict[Incomplete, int]]: ... diff --git a/stubs/networkx/networkx/algorithms/clique.pyi b/stubs/networkx/networkx/algorithms/clique.pyi index 72af1e40f811..3fa315dcb846 100644 --- a/stubs/networkx/networkx/algorithms/clique.pyi +++ b/stubs/networkx/networkx/algorithms/clique.pyi @@ -5,6 +5,17 @@ from typing import overload from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = [ + "find_cliques", + "find_cliques_recursive", + "make_max_clique_graph", + "make_clique_bipartite", + "node_clique_number", + "number_of_cliques", + "enumerate_all_cliques", + "max_weight_clique", +] + @_dispatchable def enumerate_all_cliques(G: Graph[_Node]) -> Generator[list[_Node], None, None]: ... @_dispatchable @@ -23,3 +34,6 @@ def node_clique_number( ) -> dict[_Node, int]: ... @overload def node_clique_number(G: Graph[_Node], nodes=None, cliques: Iterable[Incomplete] | None = None, separate_nodes=False) -> int: ... +def number_of_cliques(G, nodes=None, cliques=None) -> int | dict[Incomplete, Incomplete]: ... +@_dispatchable +def max_weight_clique(G, weight="weight") -> tuple[Incomplete, Incomplete]: ... diff --git a/stubs/networkx/networkx/algorithms/cluster.pyi b/stubs/networkx/networkx/algorithms/cluster.pyi index 4558712c7761..dfa594aca74b 100644 --- a/stubs/networkx/networkx/algorithms/cluster.pyi +++ b/stubs/networkx/networkx/algorithms/cluster.pyi @@ -3,6 +3,8 @@ from collections.abc import Iterable from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["triangles", "average_clustering", "clustering", "transitivity", "square_clustering", "generalized_degree"] + @_dispatchable def triangles(G: Graph[_Node], nodes=None): ... @_dispatchable diff --git a/stubs/networkx/networkx/algorithms/coloring/equitable_coloring.pyi b/stubs/networkx/networkx/algorithms/coloring/equitable_coloring.pyi index 488bcd7fa6ba..1e768dd502b5 100644 --- a/stubs/networkx/networkx/algorithms/coloring/equitable_coloring.pyi +++ b/stubs/networkx/networkx/algorithms/coloring/equitable_coloring.pyi @@ -1,5 +1,7 @@ from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["equitable_color"] + @_dispatchable def equitable_color(G: Graph[_Node], num_colors): ... diff --git a/stubs/networkx/networkx/algorithms/communicability_alg.pyi b/stubs/networkx/networkx/algorithms/communicability_alg.pyi index a36df0c93379..4cbbc0adb2e4 100644 --- a/stubs/networkx/networkx/algorithms/communicability_alg.pyi +++ b/stubs/networkx/networkx/algorithms/communicability_alg.pyi @@ -1,6 +1,8 @@ from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["communicability", "communicability_exp"] + @_dispatchable def communicability(G: Graph[_Node]): ... @_dispatchable diff --git a/stubs/networkx/networkx/algorithms/community/asyn_fluid.pyi b/stubs/networkx/networkx/algorithms/community/asyn_fluid.pyi index 17a087cd6ce5..5729204d3b82 100644 --- a/stubs/networkx/networkx/algorithms/community/asyn_fluid.pyi +++ b/stubs/networkx/networkx/algorithms/community/asyn_fluid.pyi @@ -2,5 +2,7 @@ from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable from numpy.random import RandomState +__all__ = ["asyn_fluidc"] + @_dispatchable def asyn_fluidc(G: Graph[_Node], k: int, max_iter: int = 100, seed: int | RandomState | None = None): ... diff --git a/stubs/networkx/networkx/algorithms/community/centrality.pyi b/stubs/networkx/networkx/algorithms/community/centrality.pyi index 8fbd47609966..ec3b07141fe1 100644 --- a/stubs/networkx/networkx/algorithms/community/centrality.pyi +++ b/stubs/networkx/networkx/algorithms/community/centrality.pyi @@ -4,6 +4,8 @@ from collections.abc import Callable, Generator from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["girvan_newman"] + @_dispatchable def girvan_newman( G: Graph[_Node], most_valuable_edge: Callable[..., Incomplete] | None = None diff --git a/stubs/networkx/networkx/algorithms/community/community_utils.pyi b/stubs/networkx/networkx/algorithms/community/community_utils.pyi index 2528f102b3b6..032161edf31c 100644 --- a/stubs/networkx/networkx/algorithms/community/community_utils.pyi +++ b/stubs/networkx/networkx/algorithms/community/community_utils.pyi @@ -1,5 +1,7 @@ from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["is_partition"] + @_dispatchable def is_partition(G: Graph[_Node], communities): ... diff --git a/stubs/networkx/networkx/algorithms/community/kclique.pyi b/stubs/networkx/networkx/algorithms/community/kclique.pyi index 13c777be295e..31eaf1d71b6a 100644 --- a/stubs/networkx/networkx/algorithms/community/kclique.pyi +++ b/stubs/networkx/networkx/algorithms/community/kclique.pyi @@ -4,5 +4,7 @@ from collections.abc import Generator from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["k_clique_communities"] + @_dispatchable def k_clique_communities(G: Graph[_Node], k: int, cliques=None) -> Generator[Incomplete, None, None]: ... diff --git a/stubs/networkx/networkx/algorithms/community/kernighan_lin.pyi b/stubs/networkx/networkx/algorithms/community/kernighan_lin.pyi index 4bb3fce53e9a..00133f83134e 100644 --- a/stubs/networkx/networkx/algorithms/community/kernighan_lin.pyi +++ b/stubs/networkx/networkx/algorithms/community/kernighan_lin.pyi @@ -4,6 +4,8 @@ from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable from numpy.random import RandomState +__all__ = ["kernighan_lin_bisection"] + @_dispatchable def kernighan_lin_bisection( G: Graph[_Node], diff --git a/stubs/networkx/networkx/algorithms/community/label_propagation.pyi b/stubs/networkx/networkx/algorithms/community/label_propagation.pyi index 7679d01bc6d3..23840b8cbde7 100644 --- a/stubs/networkx/networkx/algorithms/community/label_propagation.pyi +++ b/stubs/networkx/networkx/algorithms/community/label_propagation.pyi @@ -5,6 +5,10 @@ from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable from numpy.random import RandomState +__all__ = ["label_propagation_communities", "asyn_lpa_communities", "fast_label_propagation_communities"] + +@_dispatchable +def fast_label_propagation_communities(G, *, weight=None, seed=None) -> Generator[Incomplete]: ... @_dispatchable def asyn_lpa_communities( G: Graph[_Node], weight: str | None = None, seed: int | RandomState | None = None diff --git a/stubs/networkx/networkx/algorithms/community/louvain.pyi b/stubs/networkx/networkx/algorithms/community/louvain.pyi index be5194a9174a..40cf510a5845 100644 --- a/stubs/networkx/networkx/algorithms/community/louvain.pyi +++ b/stubs/networkx/networkx/algorithms/community/louvain.pyi @@ -5,6 +5,8 @@ from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable from numpy.random import RandomState +__all__ = ["louvain_communities", "louvain_partitions"] + @_dispatchable def louvain_communities( G: Graph[_Node], diff --git a/stubs/networkx/networkx/algorithms/community/lukes.pyi b/stubs/networkx/networkx/algorithms/community/lukes.pyi index 81a7a1335170..766f478a0336 100644 --- a/stubs/networkx/networkx/algorithms/community/lukes.pyi +++ b/stubs/networkx/networkx/algorithms/community/lukes.pyi @@ -1,5 +1,7 @@ from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["lukes_partitioning"] + @_dispatchable def lukes_partitioning(G: Graph[_Node], max_size: int, node_weight=None, edge_weight=None): ... diff --git a/stubs/networkx/networkx/algorithms/community/modularity_max.pyi b/stubs/networkx/networkx/algorithms/community/modularity_max.pyi index b93dbf52033a..5cfedb7d5271 100644 --- a/stubs/networkx/networkx/algorithms/community/modularity_max.pyi +++ b/stubs/networkx/networkx/algorithms/community/modularity_max.pyi @@ -1,6 +1,8 @@ from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["greedy_modularity_communities", "naive_greedy_modularity_communities"] + @_dispatchable def greedy_modularity_communities( G: Graph[_Node], weight: str | None = None, resolution: float | None = 1, cutoff: int | None = 1, best_n: int | None = None diff --git a/stubs/networkx/networkx/algorithms/components/attracting.pyi b/stubs/networkx/networkx/algorithms/components/attracting.pyi index 93e97a4fb04d..88649cf94a16 100644 --- a/stubs/networkx/networkx/algorithms/components/attracting.pyi +++ b/stubs/networkx/networkx/algorithms/components/attracting.pyi @@ -3,6 +3,8 @@ from collections.abc import Generator from networkx.utils.backends import _dispatchable +__all__ = ["number_attracting_components", "attracting_components", "is_attracting_component"] + @_dispatchable def attracting_components(G) -> Generator[Incomplete, None, None]: ... @_dispatchable diff --git a/stubs/networkx/networkx/algorithms/components/biconnected.pyi b/stubs/networkx/networkx/algorithms/components/biconnected.pyi index 782f1c69014b..b7056599a5a7 100644 --- a/stubs/networkx/networkx/algorithms/components/biconnected.pyi +++ b/stubs/networkx/networkx/algorithms/components/biconnected.pyi @@ -4,6 +4,8 @@ from collections.abc import Generator from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["biconnected_components", "biconnected_component_edges", "is_biconnected", "articulation_points"] + @_dispatchable def is_biconnected(G: Graph[_Node]): ... @_dispatchable diff --git a/stubs/networkx/networkx/algorithms/components/connected.pyi b/stubs/networkx/networkx/algorithms/components/connected.pyi index 55dc4cd2b8fd..c13a09e93da7 100644 --- a/stubs/networkx/networkx/algorithms/components/connected.pyi +++ b/stubs/networkx/networkx/algorithms/components/connected.pyi @@ -4,6 +4,8 @@ from collections.abc import Generator from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["number_connected_components", "connected_components", "is_connected", "node_connected_component"] + @_dispatchable def connected_components(G: Graph[_Node]) -> Generator[Incomplete, None, None]: ... @_dispatchable diff --git a/stubs/networkx/networkx/algorithms/components/semiconnected.pyi b/stubs/networkx/networkx/algorithms/components/semiconnected.pyi index 178a602e4e47..9a02ac4e7156 100644 --- a/stubs/networkx/networkx/algorithms/components/semiconnected.pyi +++ b/stubs/networkx/networkx/algorithms/components/semiconnected.pyi @@ -1,5 +1,7 @@ from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["is_semiconnected"] + @_dispatchable def is_semiconnected(G: Graph[_Node]): ... diff --git a/stubs/networkx/networkx/algorithms/components/strongly_connected.pyi b/stubs/networkx/networkx/algorithms/components/strongly_connected.pyi index 4747f4950e01..7bbf495ae517 100644 --- a/stubs/networkx/networkx/algorithms/components/strongly_connected.pyi +++ b/stubs/networkx/networkx/algorithms/components/strongly_connected.pyi @@ -4,6 +4,14 @@ from networkx.classes.digraph import DiGraph from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = [ + "number_strongly_connected_components", + "strongly_connected_components", + "is_strongly_connected", + "kosaraju_strongly_connected_components", + "condensation", +] + @_dispatchable def strongly_connected_components(G: Graph[_Node]) -> Generator[set[_Node], None, None]: ... @_dispatchable diff --git a/stubs/networkx/networkx/algorithms/components/weakly_connected.pyi b/stubs/networkx/networkx/algorithms/components/weakly_connected.pyi index 84cccf2f0250..b854f21acd06 100644 --- a/stubs/networkx/networkx/algorithms/components/weakly_connected.pyi +++ b/stubs/networkx/networkx/algorithms/components/weakly_connected.pyi @@ -3,6 +3,8 @@ from collections.abc import Generator from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["number_weakly_connected_components", "weakly_connected_components", "is_weakly_connected"] + @_dispatchable def weakly_connected_components(G: Graph[_Node]) -> Generator[set[_Node], None, None]: ... @_dispatchable diff --git a/stubs/networkx/networkx/algorithms/connectivity/edge_augmentation.pyi b/stubs/networkx/networkx/algorithms/connectivity/edge_augmentation.pyi index de9d9d64763b..31ef0154fd5e 100644 --- a/stubs/networkx/networkx/algorithms/connectivity/edge_augmentation.pyi +++ b/stubs/networkx/networkx/algorithms/connectivity/edge_augmentation.pyi @@ -4,6 +4,8 @@ from collections.abc import Generator from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["k_edge_augmentation", "is_k_edge_connected", "is_locally_k_edge_connected"] + @_dispatchable def is_k_edge_connected(G: Graph[_Node], k: int): ... @_dispatchable diff --git a/stubs/networkx/networkx/algorithms/connectivity/edge_kcomponents.pyi b/stubs/networkx/networkx/algorithms/connectivity/edge_kcomponents.pyi index 67927d5cb64a..938cae6ec11a 100644 --- a/stubs/networkx/networkx/algorithms/connectivity/edge_kcomponents.pyi +++ b/stubs/networkx/networkx/algorithms/connectivity/edge_kcomponents.pyi @@ -4,6 +4,8 @@ from collections.abc import Generator from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["k_edge_components", "k_edge_subgraphs", "bridge_components", "EdgeComponentAuxGraph"] + @_dispatchable def k_edge_components(G: Graph[_Node], k: int): ... @_dispatchable diff --git a/stubs/networkx/networkx/algorithms/connectivity/stoerwagner.pyi b/stubs/networkx/networkx/algorithms/connectivity/stoerwagner.pyi index c0b8cd3a733c..501e5bf82c88 100644 --- a/stubs/networkx/networkx/algorithms/connectivity/stoerwagner.pyi +++ b/stubs/networkx/networkx/algorithms/connectivity/stoerwagner.pyi @@ -1,5 +1,7 @@ from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["stoer_wagner"] + @_dispatchable def stoer_wagner(G: Graph[_Node], weight: str = "weight", heap: type = ...): ... diff --git a/stubs/networkx/networkx/algorithms/connectivity/utils.pyi b/stubs/networkx/networkx/algorithms/connectivity/utils.pyi index b43489088325..bac3f5d9297d 100644 --- a/stubs/networkx/networkx/algorithms/connectivity/utils.pyi +++ b/stubs/networkx/networkx/algorithms/connectivity/utils.pyi @@ -1,5 +1,7 @@ from networkx.utils.backends import _dispatchable +__all__ = ["build_auxiliary_node_connectivity", "build_auxiliary_edge_connectivity"] + @_dispatchable def build_auxiliary_node_connectivity(G): ... @_dispatchable diff --git a/stubs/networkx/networkx/algorithms/core.pyi b/stubs/networkx/networkx/algorithms/core.pyi index 401797dc8be1..acd4b9b7a2f0 100644 --- a/stubs/networkx/networkx/algorithms/core.pyi +++ b/stubs/networkx/networkx/algorithms/core.pyi @@ -3,6 +3,8 @@ from _typeshed import Incomplete, SupportsGetItem from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["core_number", "k_core", "k_shell", "k_crust", "k_corona", "k_truss", "onion_layers"] + @_dispatchable def core_number(G: Graph[_Node]): ... @_dispatchable diff --git a/stubs/networkx/networkx/algorithms/covering.pyi b/stubs/networkx/networkx/algorithms/covering.pyi index 0680bbc27e41..3dd7daadc967 100644 --- a/stubs/networkx/networkx/algorithms/covering.pyi +++ b/stubs/networkx/networkx/algorithms/covering.pyi @@ -4,6 +4,8 @@ from collections.abc import Callable from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["min_edge_cover", "is_edge_cover"] + @_dispatchable def min_edge_cover(G: Graph[_Node], matching_algorithm: Callable[..., Incomplete] | None = None): ... @_dispatchable diff --git a/stubs/networkx/networkx/algorithms/cuts.pyi b/stubs/networkx/networkx/algorithms/cuts.pyi index 4f3e3474b5b9..be3f75e20529 100644 --- a/stubs/networkx/networkx/algorithms/cuts.pyi +++ b/stubs/networkx/networkx/algorithms/cuts.pyi @@ -3,6 +3,17 @@ from collections.abc import Iterable from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = [ + "boundary_expansion", + "conductance", + "cut_size", + "edge_expansion", + "mixing_expansion", + "node_expansion", + "normalized_cut_size", + "volume", +] + @_dispatchable def cut_size(G: Graph[_Node], S: Iterable[_Node], T: Iterable[_Node] | None = None, weight: str | None = None): ... @_dispatchable diff --git a/stubs/networkx/networkx/algorithms/cycles.pyi b/stubs/networkx/networkx/algorithms/cycles.pyi index 9f8b887516e7..0c4d9944f4fc 100644 --- a/stubs/networkx/networkx/algorithms/cycles.pyi +++ b/stubs/networkx/networkx/algorithms/cycles.pyi @@ -5,6 +5,16 @@ from networkx.classes.digraph import DiGraph from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = [ + "cycle_basis", + "simple_cycles", + "recursive_simple_cycles", + "find_cycle", + "minimum_cycle_basis", + "chordless_cycles", + "girth", +] + @_dispatchable def cycle_basis(G: Graph[_Node], root: _Node | None = None): ... @_dispatchable @@ -24,3 +34,5 @@ def recursive_simple_cycles(G: DiGraph[_Node]): ... def find_cycle(G: Graph[_Node], source=None, orientation=None): ... @_dispatchable def minimum_cycle_basis(G: Graph[_Node], weight: str | None = None): ... +@_dispatchable +def girth(G) -> float | int: ... diff --git a/stubs/networkx/networkx/algorithms/d_separation.pyi b/stubs/networkx/networkx/algorithms/d_separation.pyi index e1eb575409fd..ad2bf15dd428 100644 --- a/stubs/networkx/networkx/algorithms/d_separation.pyi +++ b/stubs/networkx/networkx/algorithms/d_separation.pyi @@ -1,7 +1,15 @@ +from _typeshed import Incomplete + from networkx.classes.digraph import DiGraph from networkx.classes.graph import _Node from networkx.utils.backends import _dispatchable +__all__ = ["is_d_separator", "is_minimal_d_separator", "find_minimal_d_separator", "d_separated", "minimal_d_separator"] + +@_dispatchable +def is_d_separator(G, x, y, z) -> bool: ... +@_dispatchable +def find_minimal_d_separator(G, x, y, *, included=None, restricted=None) -> set[Incomplete] | None: ... @_dispatchable def d_separated(G, x, y, z): ... @_dispatchable diff --git a/stubs/networkx/networkx/algorithms/dag.pyi b/stubs/networkx/networkx/algorithms/dag.pyi index 07c3b3deff43..b44d34db3690 100644 --- a/stubs/networkx/networkx/algorithms/dag.pyi +++ b/stubs/networkx/networkx/algorithms/dag.pyi @@ -5,6 +5,25 @@ from networkx.classes.digraph import DiGraph from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = [ + "descendants", + "ancestors", + "topological_sort", + "lexicographical_topological_sort", + "all_topological_sorts", + "topological_generations", + "is_directed_acyclic_graph", + "is_aperiodic", + "transitive_closure", + "transitive_closure_dag", + "transitive_reduction", + "antichains", + "dag_longest_path", + "dag_longest_path_length", + "dag_to_branching", + "compute_v_structures", +] + @_dispatchable def descendants(G: Graph[_Node], source) -> set[_Node]: ... @_dispatchable @@ -42,3 +61,5 @@ def dag_longest_path( def dag_longest_path_length(G: DiGraph[_Node], weight: str | None = "weight", default_weight: int | None = 1) -> int: ... @_dispatchable def dag_to_branching(G: Graph[_Node]) -> Graph[_Node]: ... +@_dispatchable +def compute_v_structures(G) -> Generator[tuple[Incomplete, Incomplete, Incomplete]]: ... diff --git a/stubs/networkx/networkx/algorithms/distance_measures.pyi b/stubs/networkx/networkx/algorithms/distance_measures.pyi index 1f768011a2e9..a74483194497 100644 --- a/stubs/networkx/networkx/algorithms/distance_measures.pyi +++ b/stubs/networkx/networkx/algorithms/distance_measures.pyi @@ -1,11 +1,26 @@ from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = [ + "eccentricity", + "diameter", + "harmonic_diameter", + "radius", + "periphery", + "center", + "barycenter", + "resistance_distance", + "kemeny_constant", + "effective_graph_resistance", +] + @_dispatchable def eccentricity(G: Graph[_Node], v: _Node | None = None, sp=None, weight: str | None = None): ... @_dispatchable def diameter(G: Graph[_Node], e=None, usebounds=False, weight: str | None = None): ... @_dispatchable +def harmonic_diameter(G, sp=None) -> float: ... +@_dispatchable def periphery(G: Graph[_Node], e=None, usebounds=False, weight: str | None = None): ... @_dispatchable def radius(G: Graph[_Node], e=None, usebounds=False, weight: str | None = None): ... @@ -15,3 +30,7 @@ def center(G: Graph[_Node], e=None, usebounds=False, weight: str | None = None): def barycenter(G, weight: str | None = None, attr=None, sp=None): ... @_dispatchable def resistance_distance(G: Graph[_Node], nodeA=None, nodeB=None, weight: str | None = None, invert_weight: bool = True): ... +@_dispatchable +def effective_graph_resistance(G, weight=None, invert_weight=True) -> float: ... +@_dispatchable +def kemeny_constant(G, *, weight=None) -> float: ... diff --git a/stubs/networkx/networkx/algorithms/distance_regular.pyi b/stubs/networkx/networkx/algorithms/distance_regular.pyi index 182d37110f07..616db02e900e 100644 --- a/stubs/networkx/networkx/algorithms/distance_regular.pyi +++ b/stubs/networkx/networkx/algorithms/distance_regular.pyi @@ -1,6 +1,8 @@ from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["is_distance_regular", "is_strongly_regular", "intersection_array", "global_parameters"] + @_dispatchable def is_distance_regular(G: Graph[_Node]): ... @_dispatchable diff --git a/stubs/networkx/networkx/algorithms/dominance.pyi b/stubs/networkx/networkx/algorithms/dominance.pyi index cc431c676e73..76b3ed977bc9 100644 --- a/stubs/networkx/networkx/algorithms/dominance.pyi +++ b/stubs/networkx/networkx/algorithms/dominance.pyi @@ -1,6 +1,8 @@ from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["immediate_dominators", "dominance_frontiers"] + @_dispatchable def immediate_dominators(G: Graph[_Node], start: _Node): ... @_dispatchable diff --git a/stubs/networkx/networkx/algorithms/dominating.pyi b/stubs/networkx/networkx/algorithms/dominating.pyi index 3786bb538077..a1058271ad9b 100644 --- a/stubs/networkx/networkx/algorithms/dominating.pyi +++ b/stubs/networkx/networkx/algorithms/dominating.pyi @@ -4,6 +4,8 @@ from collections.abc import Iterable from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["dominating_set", "is_dominating_set"] + @_dispatchable def dominating_set(G: Graph[_Node], start_with: _Node | None = None): ... @_dispatchable diff --git a/stubs/networkx/networkx/algorithms/efficiency_measures.pyi b/stubs/networkx/networkx/algorithms/efficiency_measures.pyi index 420795a59a58..13369a99ef06 100644 --- a/stubs/networkx/networkx/algorithms/efficiency_measures.pyi +++ b/stubs/networkx/networkx/algorithms/efficiency_measures.pyi @@ -1,6 +1,8 @@ from networkx.classes.graph import _Node from networkx.utils.backends import _dispatchable +__all__ = ["efficiency", "local_efficiency", "global_efficiency"] + @_dispatchable def efficiency(G, u: _Node, v: _Node): ... @_dispatchable diff --git a/stubs/networkx/networkx/algorithms/euler.pyi b/stubs/networkx/networkx/algorithms/euler.pyi index 0e8552402c8d..80f0d6e024e2 100644 --- a/stubs/networkx/networkx/algorithms/euler.pyi +++ b/stubs/networkx/networkx/algorithms/euler.pyi @@ -4,6 +4,8 @@ from collections.abc import Generator from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["is_eulerian", "eulerian_circuit", "eulerize", "is_semieulerian", "has_eulerian_path", "eulerian_path"] + @_dispatchable def is_eulerian(G: Graph[_Node]): ... @_dispatchable diff --git a/stubs/networkx/networkx/algorithms/flow/boykovkolmogorov.pyi b/stubs/networkx/networkx/algorithms/flow/boykovkolmogorov.pyi index 1389aac0009d..90a90d365a2d 100644 --- a/stubs/networkx/networkx/algorithms/flow/boykovkolmogorov.pyi +++ b/stubs/networkx/networkx/algorithms/flow/boykovkolmogorov.pyi @@ -1,6 +1,8 @@ from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["boykov_kolmogorov"] + @_dispatchable def boykov_kolmogorov( G: Graph[_Node], diff --git a/stubs/networkx/networkx/algorithms/flow/capacityscaling.pyi b/stubs/networkx/networkx/algorithms/flow/capacityscaling.pyi index 8f5b3bef312a..13b7ad6da8a1 100644 --- a/stubs/networkx/networkx/algorithms/flow/capacityscaling.pyi +++ b/stubs/networkx/networkx/algorithms/flow/capacityscaling.pyi @@ -1,6 +1,8 @@ from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["capacity_scaling"] + @_dispatchable def capacity_scaling( G: Graph[_Node], demand: str = "demand", capacity: str = "capacity", weight: str = "weight", heap: type = ... diff --git a/stubs/networkx/networkx/algorithms/flow/dinitz_alg.pyi b/stubs/networkx/networkx/algorithms/flow/dinitz_alg.pyi index bd4c8bb1bdd6..03136846c7d2 100644 --- a/stubs/networkx/networkx/algorithms/flow/dinitz_alg.pyi +++ b/stubs/networkx/networkx/algorithms/flow/dinitz_alg.pyi @@ -1,6 +1,8 @@ from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["dinitz"] + @_dispatchable def dinitz( G: Graph[_Node], diff --git a/stubs/networkx/networkx/algorithms/flow/edmondskarp.pyi b/stubs/networkx/networkx/algorithms/flow/edmondskarp.pyi index 95fb8d5a3809..c415d24327ab 100644 --- a/stubs/networkx/networkx/algorithms/flow/edmondskarp.pyi +++ b/stubs/networkx/networkx/algorithms/flow/edmondskarp.pyi @@ -1,6 +1,8 @@ from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["edmonds_karp"] + @_dispatchable def edmonds_karp( G: Graph[_Node], diff --git a/stubs/networkx/networkx/algorithms/flow/mincost.pyi b/stubs/networkx/networkx/algorithms/flow/mincost.pyi index 86e0834f567e..86c8e66dcd6c 100644 --- a/stubs/networkx/networkx/algorithms/flow/mincost.pyi +++ b/stubs/networkx/networkx/algorithms/flow/mincost.pyi @@ -3,6 +3,8 @@ from _typeshed import Incomplete, SupportsGetItem from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["min_cost_flow_cost", "min_cost_flow", "cost_of_flow", "max_flow_min_cost"] + @_dispatchable def min_cost_flow_cost(G: Graph[_Node], demand: str = "demand", capacity: str = "capacity", weight: str = "weight"): ... @_dispatchable diff --git a/stubs/networkx/networkx/algorithms/flow/networksimplex.pyi b/stubs/networkx/networkx/algorithms/flow/networksimplex.pyi index b9cf6bae5b18..c6028757f936 100644 --- a/stubs/networkx/networkx/algorithms/flow/networksimplex.pyi +++ b/stubs/networkx/networkx/algorithms/flow/networksimplex.pyi @@ -4,6 +4,8 @@ from collections.abc import Generator from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["network_simplex"] + class _DataEssentialsAndFunctions: node_list: Incomplete node_indices: Incomplete diff --git a/stubs/networkx/networkx/algorithms/flow/preflowpush.pyi b/stubs/networkx/networkx/algorithms/flow/preflowpush.pyi index cd25f1cb00bd..e3f5b5bed7e4 100644 --- a/stubs/networkx/networkx/algorithms/flow/preflowpush.pyi +++ b/stubs/networkx/networkx/algorithms/flow/preflowpush.pyi @@ -1,6 +1,8 @@ from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["preflow_push"] + @_dispatchable def preflow_push( G: Graph[_Node], diff --git a/stubs/networkx/networkx/algorithms/flow/shortestaugmentingpath.pyi b/stubs/networkx/networkx/algorithms/flow/shortestaugmentingpath.pyi index 43b5ae4af72c..643baf509cdd 100644 --- a/stubs/networkx/networkx/algorithms/flow/shortestaugmentingpath.pyi +++ b/stubs/networkx/networkx/algorithms/flow/shortestaugmentingpath.pyi @@ -1,6 +1,8 @@ from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["shortest_augmenting_path"] + @_dispatchable def shortest_augmenting_path( G: Graph[_Node], diff --git a/stubs/networkx/networkx/algorithms/flow/utils.pyi b/stubs/networkx/networkx/algorithms/flow/utils.pyi index 3b041a5fc2b1..0ae3a6bba8cc 100644 --- a/stubs/networkx/networkx/algorithms/flow/utils.pyi +++ b/stubs/networkx/networkx/algorithms/flow/utils.pyi @@ -2,6 +2,8 @@ from _typeshed import Incomplete from networkx.utils.backends import _dispatchable +__all__ = ["CurrentEdge", "Level", "GlobalRelabelThreshold", "build_residual_network", "detect_unboundedness", "build_flow_dict"] + class CurrentEdge: def __init__(self, edges) -> None: ... def get(self): ... diff --git a/stubs/networkx/networkx/algorithms/graph_hashing.pyi b/stubs/networkx/networkx/algorithms/graph_hashing.pyi index 98b93f08f7b1..7232060d585f 100644 --- a/stubs/networkx/networkx/algorithms/graph_hashing.pyi +++ b/stubs/networkx/networkx/algorithms/graph_hashing.pyi @@ -1,6 +1,8 @@ from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["weisfeiler_lehman_graph_hash", "weisfeiler_lehman_subgraph_hashes"] + @_dispatchable def weisfeiler_lehman_graph_hash( G: Graph[_Node], diff --git a/stubs/networkx/networkx/algorithms/graphical.pyi b/stubs/networkx/networkx/algorithms/graphical.pyi index 87c3a9068dfd..54936da7634a 100644 --- a/stubs/networkx/networkx/algorithms/graphical.pyi +++ b/stubs/networkx/networkx/algorithms/graphical.pyi @@ -3,6 +3,15 @@ from collections.abc import Iterable from networkx.utils.backends import _dispatchable +__all__ = [ + "is_graphical", + "is_multigraphical", + "is_pseudographical", + "is_digraphical", + "is_valid_degree_sequence_erdos_gallai", + "is_valid_degree_sequence_havel_hakimi", +] + @_dispatchable def is_graphical(sequence: Iterable[Incomplete], method="eg"): ... @_dispatchable diff --git a/stubs/networkx/networkx/algorithms/hierarchy.pyi b/stubs/networkx/networkx/algorithms/hierarchy.pyi index e747ffc5109e..e982bfa88a4f 100644 --- a/stubs/networkx/networkx/algorithms/hierarchy.pyi +++ b/stubs/networkx/networkx/algorithms/hierarchy.pyi @@ -1,4 +1,6 @@ from networkx.utils.backends import _dispatchable +__all__ = ["flow_hierarchy"] + @_dispatchable def flow_hierarchy(G, weight: str | None = None): ... diff --git a/stubs/networkx/networkx/algorithms/hybrid.pyi b/stubs/networkx/networkx/algorithms/hybrid.pyi index 171162022da3..5e1326905d26 100644 --- a/stubs/networkx/networkx/algorithms/hybrid.pyi +++ b/stubs/networkx/networkx/algorithms/hybrid.pyi @@ -1,6 +1,8 @@ from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["kl_connected_subgraph", "is_kl_connected"] + @_dispatchable def kl_connected_subgraph(G: Graph[_Node], k: int, l: int, low_memory: bool = False, same_as_graph: bool = False): ... @_dispatchable diff --git a/stubs/networkx/networkx/algorithms/isolate.pyi b/stubs/networkx/networkx/algorithms/isolate.pyi index d08ac5b19588..4a5715d228f4 100644 --- a/stubs/networkx/networkx/algorithms/isolate.pyi +++ b/stubs/networkx/networkx/algorithms/isolate.pyi @@ -1,6 +1,8 @@ from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["is_isolate", "isolates", "number_of_isolates"] + @_dispatchable def is_isolate(G: Graph[_Node], n: _Node): ... @_dispatchable diff --git a/stubs/networkx/networkx/algorithms/isomorphism/ismags.pyi b/stubs/networkx/networkx/algorithms/isomorphism/ismags.pyi index 4a64a8191674..e0dae00bd5fe 100644 --- a/stubs/networkx/networkx/algorithms/isomorphism/ismags.pyi +++ b/stubs/networkx/networkx/algorithms/isomorphism/ismags.pyi @@ -1,6 +1,8 @@ from _typeshed import Incomplete from collections.abc import Generator +__all__ = ["ISMAGS"] + class ISMAGS: graph: Incomplete subgraph: Incomplete diff --git a/stubs/networkx/networkx/algorithms/isomorphism/matchhelpers.pyi b/stubs/networkx/networkx/algorithms/isomorphism/matchhelpers.pyi index aab135be0fda..2175fb430a40 100644 --- a/stubs/networkx/networkx/algorithms/isomorphism/matchhelpers.pyi +++ b/stubs/networkx/networkx/algorithms/isomorphism/matchhelpers.pyi @@ -2,6 +2,18 @@ from _typeshed import Incomplete from networkx.utils.backends import _dispatchable +__all__ = [ + "categorical_node_match", + "categorical_edge_match", + "categorical_multiedge_match", + "numerical_node_match", + "numerical_edge_match", + "numerical_multiedge_match", + "generic_node_match", + "generic_edge_match", + "generic_multiedge_match", +] + @_dispatchable def categorical_node_match(attr, default): ... diff --git a/stubs/networkx/networkx/algorithms/isomorphism/tree_isomorphism.pyi b/stubs/networkx/networkx/algorithms/isomorphism/tree_isomorphism.pyi index 5cfad7cd15cd..0497880a4136 100644 --- a/stubs/networkx/networkx/algorithms/isomorphism/tree_isomorphism.pyi +++ b/stubs/networkx/networkx/algorithms/isomorphism/tree_isomorphism.pyi @@ -1,6 +1,8 @@ from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["rooted_tree_isomorphism", "tree_isomorphism"] + @_dispatchable def rooted_tree_isomorphism(t1, root1, t2, root2): ... @_dispatchable diff --git a/stubs/networkx/networkx/algorithms/isomorphism/vf2pp.pyi b/stubs/networkx/networkx/algorithms/isomorphism/vf2pp.pyi index 704bdcfc0f26..0ea2eff0b582 100644 --- a/stubs/networkx/networkx/algorithms/isomorphism/vf2pp.pyi +++ b/stubs/networkx/networkx/algorithms/isomorphism/vf2pp.pyi @@ -5,6 +5,8 @@ from typing import NamedTuple from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["vf2pp_isomorphism", "vf2pp_is_isomorphic", "vf2pp_all_isomorphisms"] + class _GraphParameters(NamedTuple): G1: Incomplete G2: Incomplete diff --git a/stubs/networkx/networkx/algorithms/link_analysis/hits_alg.pyi b/stubs/networkx/networkx/algorithms/link_analysis/hits_alg.pyi index b91bc4610a9d..c3bbba2cea25 100644 --- a/stubs/networkx/networkx/algorithms/link_analysis/hits_alg.pyi +++ b/stubs/networkx/networkx/algorithms/link_analysis/hits_alg.pyi @@ -3,6 +3,8 @@ from _typeshed import Incomplete, SupportsGetItem from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["hits"] + @_dispatchable def hits( G: Graph[_Node], diff --git a/stubs/networkx/networkx/algorithms/link_analysis/pagerank_alg.pyi b/stubs/networkx/networkx/algorithms/link_analysis/pagerank_alg.pyi index f79671d5cffb..93a97d20ea4b 100644 --- a/stubs/networkx/networkx/algorithms/link_analysis/pagerank_alg.pyi +++ b/stubs/networkx/networkx/algorithms/link_analysis/pagerank_alg.pyi @@ -4,6 +4,8 @@ from collections.abc import Collection from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["pagerank", "google_matrix"] + @_dispatchable def pagerank( G: Graph[_Node], diff --git a/stubs/networkx/networkx/algorithms/link_prediction.pyi b/stubs/networkx/networkx/algorithms/link_prediction.pyi index 6c298d3ca531..9a9a815dc998 100644 --- a/stubs/networkx/networkx/algorithms/link_prediction.pyi +++ b/stubs/networkx/networkx/algorithms/link_prediction.pyi @@ -1,6 +1,17 @@ from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = [ + "resource_allocation_index", + "jaccard_coefficient", + "adamic_adar_index", + "preferential_attachment", + "cn_soundarajan_hopcroft", + "ra_index_soundarajan_hopcroft", + "within_inter_cluster", + "common_neighbor_centrality", +] + @_dispatchable def resource_allocation_index(G: Graph[_Node], ebunch=None): ... @_dispatchable diff --git a/stubs/networkx/networkx/algorithms/lowest_common_ancestors.pyi b/stubs/networkx/networkx/algorithms/lowest_common_ancestors.pyi index b884ed0cdd2c..480a56cfdf5e 100644 --- a/stubs/networkx/networkx/algorithms/lowest_common_ancestors.pyi +++ b/stubs/networkx/networkx/algorithms/lowest_common_ancestors.pyi @@ -5,6 +5,8 @@ from networkx.classes.digraph import DiGraph from networkx.classes.graph import _Node from networkx.utils.backends import _dispatchable +__all__ = ["all_pairs_lowest_common_ancestor", "tree_all_pairs_lowest_common_ancestor", "lowest_common_ancestor"] + @_dispatchable def all_pairs_lowest_common_ancestor(G: DiGraph[_Node], pairs=None): ... @_dispatchable diff --git a/stubs/networkx/networkx/algorithms/matching.pyi b/stubs/networkx/networkx/algorithms/matching.pyi index cf7301679632..4753abee8876 100644 --- a/stubs/networkx/networkx/algorithms/matching.pyi +++ b/stubs/networkx/networkx/algorithms/matching.pyi @@ -1,6 +1,15 @@ from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = [ + "is_matching", + "is_maximal_matching", + "is_perfect_matching", + "max_weight_matching", + "min_weight_matching", + "maximal_matching", +] + @_dispatchable def maximal_matching(G: Graph[_Node]): ... @_dispatchable diff --git a/stubs/networkx/networkx/algorithms/minors/__init__.pyi b/stubs/networkx/networkx/algorithms/minors/__init__.pyi index e8f96f71acd2..746914c0bcec 100644 --- a/stubs/networkx/networkx/algorithms/minors/__init__.pyi +++ b/stubs/networkx/networkx/algorithms/minors/__init__.pyi @@ -5,3 +5,5 @@ from networkx.algorithms.minors.contraction import ( identified_nodes as identified_nodes, quotient_graph as quotient_graph, ) + +__all__ = ["contracted_edge", "contracted_nodes", "equivalence_classes", "identified_nodes", "quotient_graph"] diff --git a/stubs/networkx/networkx/algorithms/minors/contraction.pyi b/stubs/networkx/networkx/algorithms/minors/contraction.pyi index 40dc9a0adda3..200e9f2fcdca 100644 --- a/stubs/networkx/networkx/algorithms/minors/contraction.pyi +++ b/stubs/networkx/networkx/algorithms/minors/contraction.pyi @@ -4,6 +4,8 @@ from collections.abc import Callable from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["contracted_edge", "contracted_nodes", "equivalence_classes", "identified_nodes", "quotient_graph"] + @_dispatchable def equivalence_classes(iterable, relation): ... @_dispatchable diff --git a/stubs/networkx/networkx/algorithms/mis.pyi b/stubs/networkx/networkx/algorithms/mis.pyi index 66ec315178b7..bbe4abaeb266 100644 --- a/stubs/networkx/networkx/algorithms/mis.pyi +++ b/stubs/networkx/networkx/algorithms/mis.pyi @@ -5,6 +5,8 @@ from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable from numpy.random import RandomState +__all__ = ["maximal_independent_set"] + @_dispatchable def maximal_independent_set( G: Graph[_Node], nodes: Iterable[Incomplete] | None = None, seed: int | RandomState | None = None diff --git a/stubs/networkx/networkx/algorithms/moral.pyi b/stubs/networkx/networkx/algorithms/moral.pyi index 626c6c3f0393..7727b2f68e54 100644 --- a/stubs/networkx/networkx/algorithms/moral.pyi +++ b/stubs/networkx/networkx/algorithms/moral.pyi @@ -1,5 +1,7 @@ from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["moral_graph"] + @_dispatchable def moral_graph(G: Graph[_Node]): ... diff --git a/stubs/networkx/networkx/algorithms/node_classification.pyi b/stubs/networkx/networkx/algorithms/node_classification.pyi index 45f0e4df7886..8d5089104a36 100644 --- a/stubs/networkx/networkx/algorithms/node_classification.pyi +++ b/stubs/networkx/networkx/algorithms/node_classification.pyi @@ -1,6 +1,8 @@ from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["harmonic_function", "local_and_global_consistency"] + @_dispatchable def harmonic_function(G: Graph[_Node], max_iter: int = 30, label_name: str = "label"): ... @_dispatchable diff --git a/stubs/networkx/networkx/algorithms/non_randomness.pyi b/stubs/networkx/networkx/algorithms/non_randomness.pyi index fc1f04b00f2e..b9b8f840aa0d 100644 --- a/stubs/networkx/networkx/algorithms/non_randomness.pyi +++ b/stubs/networkx/networkx/algorithms/non_randomness.pyi @@ -1,5 +1,7 @@ from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["non_randomness"] + @_dispatchable def non_randomness(G: Graph[_Node], k: int | None = None, weight: str | None = "weight"): ... diff --git a/stubs/networkx/networkx/algorithms/operators/all.pyi b/stubs/networkx/networkx/algorithms/operators/all.pyi index 67e66bfcf29c..9816017a7840 100644 --- a/stubs/networkx/networkx/algorithms/operators/all.pyi +++ b/stubs/networkx/networkx/algorithms/operators/all.pyi @@ -3,6 +3,8 @@ from collections.abc import Iterable from networkx.utils.backends import _dispatchable +__all__ = ["union_all", "compose_all", "disjoint_union_all", "intersection_all"] + @_dispatchable def union_all(graphs: Iterable[Incomplete], rename: Iterable[Incomplete] | None = ()): ... @_dispatchable diff --git a/stubs/networkx/networkx/algorithms/operators/binary.pyi b/stubs/networkx/networkx/algorithms/operators/binary.pyi index 7666bdbef236..493b1762c7ad 100644 --- a/stubs/networkx/networkx/algorithms/operators/binary.pyi +++ b/stubs/networkx/networkx/algorithms/operators/binary.pyi @@ -6,6 +6,8 @@ from networkx.classes.digraph import DiGraph from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["union", "compose", "disjoint_union", "intersection", "difference", "symmetric_difference", "full_join"] + @_dispatchable def disjoint_union(G: Graph[_Node], H: Graph[_Node]): ... @_dispatchable @@ -21,4 +23,6 @@ _Y_co = TypeVar("_Y_co", bound=Hashable, covariant=True) @_dispatchable def compose(G: Graph[_X_co], H: Graph[_Y_co]) -> DiGraph[_X_co | _Y_co]: ... @_dispatchable +def full_join(G, H, rename=(None, None)): ... +@_dispatchable def union(G: Graph[_X_co], H: Graph[_Y_co], rename: Iterable[Incomplete] | None = ()) -> DiGraph[_X_co | _Y_co]: ... diff --git a/stubs/networkx/networkx/algorithms/operators/product.pyi b/stubs/networkx/networkx/algorithms/operators/product.pyi index de12cd51bd36..ae3adbb54f4e 100644 --- a/stubs/networkx/networkx/algorithms/operators/product.pyi +++ b/stubs/networkx/networkx/algorithms/operators/product.pyi @@ -1,3 +1,4 @@ +from _typeshed import Incomplete from collections.abc import Hashable from typing import TypeVar @@ -7,6 +8,17 @@ from networkx.utils.backends import _dispatchable _X = TypeVar("_X", bound=Hashable) _Y = TypeVar("_Y", bound=Hashable) +__all__ = [ + "tensor_product", + "cartesian_product", + "lexicographic_product", + "strong_product", + "power", + "rooted_product", + "corona_product", + "modular_product", +] + @_dispatchable def tensor_product(G: Graph[_X], H: Graph[_Y]) -> Graph[tuple[_X, _Y]]: ... @_dispatchable @@ -21,3 +33,5 @@ def power(G: Graph[_Node], k): ... def rooted_product(G: Graph[_X], H: Graph[_Y], root: _Y) -> Graph[tuple[_X, _Y]]: ... @_dispatchable def corona_product(G: Graph[_X], H: Graph[_Y]) -> Graph[tuple[_X, _Y]]: ... +@_dispatchable +def modular_product(G, H) -> Graph[Incomplete]: ... diff --git a/stubs/networkx/networkx/algorithms/operators/unary.pyi b/stubs/networkx/networkx/algorithms/operators/unary.pyi index cda8ebbf6b45..0a42c013322d 100644 --- a/stubs/networkx/networkx/algorithms/operators/unary.pyi +++ b/stubs/networkx/networkx/algorithms/operators/unary.pyi @@ -6,6 +6,8 @@ from networkx.utils.backends import _dispatchable _G = TypeVar("_G", bound=Graph[Hashable]) +__all__ = ["complement", "reverse"] + @_dispatchable def complement(G: Graph[_Node]): ... @_dispatchable diff --git a/stubs/networkx/networkx/algorithms/planar_drawing.pyi b/stubs/networkx/networkx/algorithms/planar_drawing.pyi index 028c7aec2520..197e912ec440 100644 --- a/stubs/networkx/networkx/algorithms/planar_drawing.pyi +++ b/stubs/networkx/networkx/algorithms/planar_drawing.pyi @@ -1,4 +1,6 @@ from networkx.utils.backends import _dispatchable +__all__ = ["combinatorial_embedding_to_pos"] + @_dispatchable def combinatorial_embedding_to_pos(embedding, fully_triangulate: bool = False): ... diff --git a/stubs/networkx/networkx/algorithms/polynomials.pyi b/stubs/networkx/networkx/algorithms/polynomials.pyi index cadffc38a170..9c40146e8d5b 100644 --- a/stubs/networkx/networkx/algorithms/polynomials.pyi +++ b/stubs/networkx/networkx/algorithms/polynomials.pyi @@ -1,6 +1,8 @@ from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["tutte_polynomial", "chromatic_polynomial"] + @_dispatchable def tutte_polynomial(G: Graph[_Node]): ... @_dispatchable diff --git a/stubs/networkx/networkx/algorithms/reciprocity.pyi b/stubs/networkx/networkx/algorithms/reciprocity.pyi index 547dc885d5cc..2208e9e00507 100644 --- a/stubs/networkx/networkx/algorithms/reciprocity.pyi +++ b/stubs/networkx/networkx/algorithms/reciprocity.pyi @@ -3,6 +3,8 @@ from collections.abc import Iterable from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["reciprocity", "overall_reciprocity"] + @_dispatchable def reciprocity(G: Graph[_Node], nodes: Iterable[_Node] | None = None): ... @_dispatchable diff --git a/stubs/networkx/networkx/algorithms/regular.pyi b/stubs/networkx/networkx/algorithms/regular.pyi index 61a19ea04a3b..04d966f9416f 100644 --- a/stubs/networkx/networkx/algorithms/regular.pyi +++ b/stubs/networkx/networkx/algorithms/regular.pyi @@ -1,6 +1,8 @@ from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["is_regular", "is_k_regular", "k_factor"] + @_dispatchable def is_regular(G: Graph[_Node]): ... @_dispatchable diff --git a/stubs/networkx/networkx/algorithms/richclub.pyi b/stubs/networkx/networkx/algorithms/richclub.pyi index 2b7b8b21fb2c..abf5cd680b9a 100644 --- a/stubs/networkx/networkx/algorithms/richclub.pyi +++ b/stubs/networkx/networkx/algorithms/richclub.pyi @@ -2,5 +2,7 @@ from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable from numpy.random import RandomState +__all__ = ["rich_club_coefficient"] + @_dispatchable def rich_club_coefficient(G: Graph[_Node], normalized: bool = True, Q: float = 100, seed: int | RandomState | None = None): ... diff --git a/stubs/networkx/networkx/algorithms/shortest_paths/astar.pyi b/stubs/networkx/networkx/algorithms/shortest_paths/astar.pyi index 63260ea30639..0faccba54c02 100644 --- a/stubs/networkx/networkx/algorithms/shortest_paths/astar.pyi +++ b/stubs/networkx/networkx/algorithms/shortest_paths/astar.pyi @@ -5,6 +5,8 @@ from typing import Any from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["astar_path", "astar_path_length"] + @_dispatchable def astar_path( G: Graph[_Node], diff --git a/stubs/networkx/networkx/algorithms/shortest_paths/dense.pyi b/stubs/networkx/networkx/algorithms/shortest_paths/dense.pyi index f40ce0ac0403..ce36193312ab 100644 --- a/stubs/networkx/networkx/algorithms/shortest_paths/dense.pyi +++ b/stubs/networkx/networkx/algorithms/shortest_paths/dense.pyi @@ -4,6 +4,8 @@ from collections.abc import Collection from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["floyd_warshall", "floyd_warshall_predecessor_and_distance", "reconstruct_path", "floyd_warshall_numpy"] + @_dispatchable def floyd_warshall_numpy(G: Graph[_Node], nodelist: Collection[_Node] | None = None, weight: str | None = "weight"): ... @_dispatchable diff --git a/stubs/networkx/networkx/algorithms/shortest_paths/generic.pyi b/stubs/networkx/networkx/algorithms/shortest_paths/generic.pyi index f623aeb9ee90..0857f5602331 100644 --- a/stubs/networkx/networkx/algorithms/shortest_paths/generic.pyi +++ b/stubs/networkx/networkx/algorithms/shortest_paths/generic.pyi @@ -5,6 +5,16 @@ from typing import overload from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = [ + "shortest_path", + "all_shortest_paths", + "single_source_all_shortest_paths", + "all_pairs_all_shortest_paths", + "shortest_path_length", + "average_shortest_path_length", + "has_path", +] + @_dispatchable def has_path(G: Graph[_Node], source: _Node, target: _Node) -> bool: ... @overload @@ -51,3 +61,11 @@ def all_shortest_paths( weight: str | Callable[..., Incomplete] | None = None, method: str | None = "dijkstra", ) -> Generator[list[_Node], None, None]: ... +@_dispatchable +def single_source_all_shortest_paths( + G, source, weight=None, method="dijkstra" +) -> Generator[tuple[Incomplete, list[list[Incomplete]]]]: ... +@_dispatchable +def all_pairs_all_shortest_paths( + G, weight=None, method="dijkstra" +) -> Generator[tuple[Incomplete, dict[Incomplete, Incomplete]]]: ... diff --git a/stubs/networkx/networkx/algorithms/shortest_paths/unweighted.pyi b/stubs/networkx/networkx/algorithms/shortest_paths/unweighted.pyi index 5e12a5322da0..7b5248b19985 100644 --- a/stubs/networkx/networkx/algorithms/shortest_paths/unweighted.pyi +++ b/stubs/networkx/networkx/algorithms/shortest_paths/unweighted.pyi @@ -4,6 +4,17 @@ from collections.abc import Generator from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = [ + "bidirectional_shortest_path", + "single_source_shortest_path", + "single_source_shortest_path_length", + "single_target_shortest_path", + "single_target_shortest_path_length", + "all_pairs_shortest_path", + "all_pairs_shortest_path_length", + "predecessor", +] + @_dispatchable def single_source_shortest_path_length(G: Graph[_Node], source: _Node, cutoff: int | None = None): ... @_dispatchable diff --git a/stubs/networkx/networkx/algorithms/shortest_paths/weighted.pyi b/stubs/networkx/networkx/algorithms/shortest_paths/weighted.pyi index dd933aef97a5..28fb31c561ec 100644 --- a/stubs/networkx/networkx/algorithms/shortest_paths/weighted.pyi +++ b/stubs/networkx/networkx/algorithms/shortest_paths/weighted.pyi @@ -5,6 +5,34 @@ from typing import Any from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = [ + "dijkstra_path", + "dijkstra_path_length", + "bidirectional_dijkstra", + "single_source_dijkstra", + "single_source_dijkstra_path", + "single_source_dijkstra_path_length", + "multi_source_dijkstra", + "multi_source_dijkstra_path", + "multi_source_dijkstra_path_length", + "all_pairs_dijkstra", + "all_pairs_dijkstra_path", + "all_pairs_dijkstra_path_length", + "dijkstra_predecessor_and_distance", + "bellman_ford_path", + "bellman_ford_path_length", + "single_source_bellman_ford", + "single_source_bellman_ford_path", + "single_source_bellman_ford_path_length", + "all_pairs_bellman_ford_path", + "all_pairs_bellman_ford_path_length", + "bellman_ford_predecessor_and_distance", + "negative_edge_cycle", + "find_negative_cycle", + "goldberg_radzik", + "johnson", +] + @_dispatchable def dijkstra_path( G: Graph[_Node], diff --git a/stubs/networkx/networkx/algorithms/similarity.pyi b/stubs/networkx/networkx/algorithms/similarity.pyi index 92164d71d9d8..822a9e46dff8 100644 --- a/stubs/networkx/networkx/algorithms/similarity.pyi +++ b/stubs/networkx/networkx/algorithms/similarity.pyi @@ -5,6 +5,16 @@ from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable from numpy.random import RandomState +__all__ = [ + "graph_edit_distance", + "optimal_edit_paths", + "optimize_graph_edit_distance", + "optimize_edit_paths", + "simrank_similarity", + "panther_similarity", + "generate_random_paths", +] + @_dispatchable def graph_edit_distance( G1: Graph[_Node], diff --git a/stubs/networkx/networkx/algorithms/smallworld.pyi b/stubs/networkx/networkx/algorithms/smallworld.pyi index 1489f56198ae..665226a3d6ee 100644 --- a/stubs/networkx/networkx/algorithms/smallworld.pyi +++ b/stubs/networkx/networkx/algorithms/smallworld.pyi @@ -2,6 +2,8 @@ from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable from numpy.random import RandomState +__all__ = ["random_reference", "lattice_reference", "sigma", "omega"] + @_dispatchable def random_reference(G: Graph[_Node], niter: int = 1, connectivity: bool = True, seed: int | RandomState | None = None): ... @_dispatchable diff --git a/stubs/networkx/networkx/algorithms/smetric.pyi b/stubs/networkx/networkx/algorithms/smetric.pyi index ee2e7115e94a..e1cf4fd2563f 100644 --- a/stubs/networkx/networkx/algorithms/smetric.pyi +++ b/stubs/networkx/networkx/algorithms/smetric.pyi @@ -1,5 +1,7 @@ from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["s_metric"] + @_dispatchable def s_metric(G: Graph[_Node]): ... diff --git a/stubs/networkx/networkx/algorithms/sparsifiers.pyi b/stubs/networkx/networkx/algorithms/sparsifiers.pyi index e2c456c9c1c1..67670ddcce21 100644 --- a/stubs/networkx/networkx/algorithms/sparsifiers.pyi +++ b/stubs/networkx/networkx/algorithms/sparsifiers.pyi @@ -2,5 +2,7 @@ from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable from numpy.random import RandomState +__all__ = ["spanner"] + @_dispatchable def spanner(G: Graph[_Node], stretch: float, weight: str | None = None, seed: int | RandomState | None = None): ... diff --git a/stubs/networkx/networkx/algorithms/structuralholes.pyi b/stubs/networkx/networkx/algorithms/structuralholes.pyi index 76e026c4415d..8d1519350ad6 100644 --- a/stubs/networkx/networkx/algorithms/structuralholes.pyi +++ b/stubs/networkx/networkx/algorithms/structuralholes.pyi @@ -4,6 +4,8 @@ from collections.abc import Iterable from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["constraint", "local_constraint", "effective_size"] + @_dispatchable def effective_size(G: Graph[_Node], nodes: Iterable[Incomplete] | None = None, weight: str | None = None): ... @_dispatchable diff --git a/stubs/networkx/networkx/algorithms/summarization.pyi b/stubs/networkx/networkx/algorithms/summarization.pyi index 05116996663b..db044b9271f7 100644 --- a/stubs/networkx/networkx/algorithms/summarization.pyi +++ b/stubs/networkx/networkx/algorithms/summarization.pyi @@ -4,6 +4,8 @@ from collections.abc import Iterable from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["dedensify", "snap_aggregation"] + @_dispatchable def dedensify(G: Graph[_Node], threshold: int, prefix=None, copy: bool | None = True): ... @_dispatchable diff --git a/stubs/networkx/networkx/algorithms/swap.pyi b/stubs/networkx/networkx/algorithms/swap.pyi index 006bb38c9cd0..529b895d61c0 100644 --- a/stubs/networkx/networkx/algorithms/swap.pyi +++ b/stubs/networkx/networkx/algorithms/swap.pyi @@ -3,6 +3,8 @@ from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable from numpy.random import RandomState +__all__ = ["double_edge_swap", "connected_double_edge_swap", "directed_edge_swap"] + @_dispatchable def directed_edge_swap(G: DiGraph[_Node], *, nswap: int = 1, max_tries: int = 100, seed: int | RandomState | None = None): ... @_dispatchable diff --git a/stubs/networkx/networkx/algorithms/threshold.pyi b/stubs/networkx/networkx/algorithms/threshold.pyi index 8a06957f891a..d92695ec45d9 100644 --- a/stubs/networkx/networkx/algorithms/threshold.pyi +++ b/stubs/networkx/networkx/algorithms/threshold.pyi @@ -1,6 +1,8 @@ from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["is_threshold_graph", "find_threshold_graph"] + @_dispatchable def is_threshold_graph(G: Graph[_Node]): ... @_dispatchable diff --git a/stubs/networkx/networkx/algorithms/time_dependent.pyi b/stubs/networkx/networkx/algorithms/time_dependent.pyi index 4a77f32bb5dc..554159d1d4bf 100644 --- a/stubs/networkx/networkx/algorithms/time_dependent.pyi +++ b/stubs/networkx/networkx/algorithms/time_dependent.pyi @@ -1,5 +1,7 @@ from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["cd_index"] + @_dispatchable def cd_index(G: Graph[_Node], node: _Node, time_delta, *, time: str = "time", weight: str | None = None): ... diff --git a/stubs/networkx/networkx/algorithms/tournament.pyi b/stubs/networkx/networkx/algorithms/tournament.pyi index 9d92b1a46429..f80e6e5b9834 100644 --- a/stubs/networkx/networkx/algorithms/tournament.pyi +++ b/stubs/networkx/networkx/algorithms/tournament.pyi @@ -2,6 +2,8 @@ from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable from numpy.random import RandomState +__all__ = ["hamiltonian_path", "is_reachable", "is_strongly_connected", "is_tournament", "random_tournament", "score_sequence"] + @_dispatchable def is_tournament(G: Graph[_Node]): ... @_dispatchable diff --git a/stubs/networkx/networkx/algorithms/traversal/beamsearch.pyi b/stubs/networkx/networkx/algorithms/traversal/beamsearch.pyi index 7040e297b596..50b393fddccd 100644 --- a/stubs/networkx/networkx/algorithms/traversal/beamsearch.pyi +++ b/stubs/networkx/networkx/algorithms/traversal/beamsearch.pyi @@ -4,6 +4,8 @@ from collections.abc import Callable, Generator from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["bfs_beam_edges"] + @_dispatchable def bfs_beam_edges( G: Graph[_Node], source: _Node, value: Callable[..., Incomplete], width: int | None = None diff --git a/stubs/networkx/networkx/algorithms/traversal/breadth_first_search.pyi b/stubs/networkx/networkx/algorithms/traversal/breadth_first_search.pyi index 9832210f2917..226ebaaa07f1 100644 --- a/stubs/networkx/networkx/algorithms/traversal/breadth_first_search.pyi +++ b/stubs/networkx/networkx/algorithms/traversal/breadth_first_search.pyi @@ -1,9 +1,23 @@ from _typeshed import Incomplete from collections.abc import Callable, Generator +from typing import Literal from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = [ + "bfs_edges", + "bfs_tree", + "bfs_predecessors", + "bfs_successors", + "descendants_at_distance", + "bfs_layers", + "bfs_labeled_edges", + "generic_bfs_edges", +] + +@_dispatchable +def generic_bfs_edges(G, source, neighbors=None, depth_limit=None) -> Generator[tuple[Incomplete, Incomplete]]: ... @_dispatchable def bfs_edges( G: Graph[_Node], @@ -31,4 +45,6 @@ def bfs_successors( @_dispatchable def bfs_layers(G: Graph[_Node], sources) -> Generator[Incomplete, None, None]: ... @_dispatchable +def bfs_labeled_edges(G, sources) -> Generator[tuple[Incomplete, Incomplete, Literal["tree", "level", "forward", "reverse"]]]: ... +@_dispatchable def descendants_at_distance(G: Graph[_Node], source, distance): ... diff --git a/stubs/networkx/networkx/algorithms/traversal/depth_first_search.pyi b/stubs/networkx/networkx/algorithms/traversal/depth_first_search.pyi index b4f3e1bc0a21..f66eee33c824 100644 --- a/stubs/networkx/networkx/algorithms/traversal/depth_first_search.pyi +++ b/stubs/networkx/networkx/algorithms/traversal/depth_first_search.pyi @@ -4,6 +4,16 @@ from collections.abc import Callable, Generator from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = [ + "dfs_edges", + "dfs_tree", + "dfs_predecessors", + "dfs_successors", + "dfs_preorder_nodes", + "dfs_postorder_nodes", + "dfs_labeled_edges", +] + @_dispatchable def dfs_edges( G: Graph[_Node], source: _Node | None = None, depth_limit=None, *, sort_neighbors: Callable[..., Incomplete] | None = None diff --git a/stubs/networkx/networkx/algorithms/traversal/edgebfs.pyi b/stubs/networkx/networkx/algorithms/traversal/edgebfs.pyi index 1829e0bbe401..19c0db4ef67d 100644 --- a/stubs/networkx/networkx/algorithms/traversal/edgebfs.pyi +++ b/stubs/networkx/networkx/algorithms/traversal/edgebfs.pyi @@ -4,5 +4,7 @@ from collections.abc import Generator from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["edge_bfs"] + @_dispatchable def edge_bfs(G: Graph[_Node], source=None, orientation=None) -> Generator[Incomplete, None, Incomplete]: ... diff --git a/stubs/networkx/networkx/algorithms/traversal/edgedfs.pyi b/stubs/networkx/networkx/algorithms/traversal/edgedfs.pyi index 3e8b6485fd2d..d46c0229d271 100644 --- a/stubs/networkx/networkx/algorithms/traversal/edgedfs.pyi +++ b/stubs/networkx/networkx/algorithms/traversal/edgedfs.pyi @@ -4,5 +4,7 @@ from collections.abc import Generator from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["edge_dfs"] + @_dispatchable def edge_dfs(G: Graph[_Node], source=None, orientation=None) -> Generator[Incomplete, None, Incomplete]: ... diff --git a/stubs/networkx/networkx/algorithms/tree/branchings.pyi b/stubs/networkx/networkx/algorithms/tree/branchings.pyi index 00a09bdafdbe..f04b808be8ba 100644 --- a/stubs/networkx/networkx/algorithms/tree/branchings.pyi +++ b/stubs/networkx/networkx/algorithms/tree/branchings.pyi @@ -12,6 +12,7 @@ __all__ = [ "greedy_branching", "maximum_branching", "minimum_branching", + "minimal_branching", "maximum_spanning_arborescence", "minimum_spanning_arborescence", "ArborescenceIterator", @@ -32,6 +33,8 @@ def minimum_branching( G: DiGraph[_Node], attr: str = "weight", default: float = 1, preserve_attrs: bool = False, partition: str | None = None ): ... @_dispatchable +def minimal_branching(G, /, *, attr="weight", default=1, preserve_attrs=False, partition=None): ... +@_dispatchable def maximum_spanning_arborescence( G: DiGraph[_Node], attr: str = "weight", default: float = 1, preserve_attrs: bool = False, partition: str | None = None ): ... diff --git a/stubs/networkx/networkx/algorithms/tree/coding.pyi b/stubs/networkx/networkx/algorithms/tree/coding.pyi index 14906e0ba3c3..afd2ee79e2a1 100644 --- a/stubs/networkx/networkx/algorithms/tree/coding.pyi +++ b/stubs/networkx/networkx/algorithms/tree/coding.pyi @@ -5,6 +5,8 @@ from networkx.classes.graph import Graph, _Node from networkx.exception import NetworkXException from networkx.utils.backends import _dispatchable +__all__ = ["from_nested_tuple", "from_prufer_sequence", "NotATree", "to_nested_tuple", "to_prufer_sequence"] + class NotATree(NetworkXException): ... @_dispatchable diff --git a/stubs/networkx/networkx/algorithms/tree/decomposition.pyi b/stubs/networkx/networkx/algorithms/tree/decomposition.pyi index 998d6241c363..79889442969e 100644 --- a/stubs/networkx/networkx/algorithms/tree/decomposition.pyi +++ b/stubs/networkx/networkx/algorithms/tree/decomposition.pyi @@ -1,4 +1,6 @@ from networkx.utils.backends import _dispatchable +__all__ = ["junction_tree"] + @_dispatchable def junction_tree(G): ... diff --git a/stubs/networkx/networkx/algorithms/tree/mst.pyi b/stubs/networkx/networkx/algorithms/tree/mst.pyi index 2a55ed99c20a..5cd518995676 100644 --- a/stubs/networkx/networkx/algorithms/tree/mst.pyi +++ b/stubs/networkx/networkx/algorithms/tree/mst.pyi @@ -2,11 +2,24 @@ from _typeshed import Incomplete from collections.abc import Iterator from dataclasses import dataclass from enum import Enum +from typing import Literal from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable from numpy.random import RandomState +__all__ = [ + "minimum_spanning_edges", + "maximum_spanning_edges", + "minimum_spanning_tree", + "maximum_spanning_tree", + "number_of_spanning_trees", + "random_spanning_tree", + "partition_spanning_tree", + "EdgePartition", + "SpanningTreeIterator", +] + class EdgePartition(Enum): OPEN = 0 INCLUDED = 1 @@ -60,3 +73,6 @@ class SpanningTreeIterator: def __iter__(self) -> Iterator[Incomplete]: ... def __next__(self): ... + +@_dispatchable +def number_of_spanning_trees(G, *, root=None, weight=None) -> float | Literal[0]: ... diff --git a/stubs/networkx/networkx/algorithms/tree/operations.pyi b/stubs/networkx/networkx/algorithms/tree/operations.pyi index a88e48eca6e1..0388ce40d84b 100644 --- a/stubs/networkx/networkx/algorithms/tree/operations.pyi +++ b/stubs/networkx/networkx/algorithms/tree/operations.pyi @@ -3,5 +3,7 @@ from collections.abc import Iterable from networkx.utils.backends import _dispatchable +__all__ = ["join_trees"] + @_dispatchable def join_trees(rooted_trees: Iterable[Incomplete], *, label_attribute: str | None = None, first_label: int | None = 0): ... diff --git a/stubs/networkx/networkx/algorithms/tree/recognition.pyi b/stubs/networkx/networkx/algorithms/tree/recognition.pyi index 219e48d02f33..efacca7c55ba 100644 --- a/stubs/networkx/networkx/algorithms/tree/recognition.pyi +++ b/stubs/networkx/networkx/algorithms/tree/recognition.pyi @@ -2,6 +2,8 @@ from networkx.classes.digraph import DiGraph from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["is_arborescence", "is_branching", "is_forest", "is_tree"] + @_dispatchable def is_arborescence(G: Graph[_Node]): ... @_dispatchable diff --git a/stubs/networkx/networkx/algorithms/triads.pyi b/stubs/networkx/networkx/algorithms/triads.pyi index 1942e68f9170..8c0c61034b7c 100644 --- a/stubs/networkx/networkx/algorithms/triads.pyi +++ b/stubs/networkx/networkx/algorithms/triads.pyi @@ -6,6 +6,8 @@ from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable from numpy.random import RandomState +__all__ = ["triadic_census", "is_triad", "all_triplets", "all_triads", "triads_by_type", "triad_type", "random_triad"] + @_dispatchable def triadic_census(G: DiGraph[_Node], nodelist: Collection[_Node] | None = None): ... @_dispatchable diff --git a/stubs/networkx/networkx/algorithms/vitality.pyi b/stubs/networkx/networkx/algorithms/vitality.pyi index ec105de8f4be..a9b84a11c10e 100644 --- a/stubs/networkx/networkx/algorithms/vitality.pyi +++ b/stubs/networkx/networkx/algorithms/vitality.pyi @@ -3,6 +3,8 @@ from _typeshed import Incomplete from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["closeness_vitality"] + @_dispatchable def closeness_vitality( G: Graph[_Node], node: Incomplete | None = None, weight: str | None = None, wiener_index: float | None = None diff --git a/stubs/networkx/networkx/algorithms/voronoi.pyi b/stubs/networkx/networkx/algorithms/voronoi.pyi index 806b4b12656c..cd21b3d0c246 100644 --- a/stubs/networkx/networkx/algorithms/voronoi.pyi +++ b/stubs/networkx/networkx/algorithms/voronoi.pyi @@ -5,6 +5,8 @@ from typing import Any from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["voronoi_cells"] + @_dispatchable def voronoi_cells( G: Graph[_Node], diff --git a/stubs/networkx/networkx/algorithms/walks.pyi b/stubs/networkx/networkx/algorithms/walks.pyi index 9c5d25a38a22..7e296bd5129e 100644 --- a/stubs/networkx/networkx/algorithms/walks.pyi +++ b/stubs/networkx/networkx/algorithms/walks.pyi @@ -1,5 +1,7 @@ from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["number_of_walks"] + @_dispatchable def number_of_walks(G: Graph[_Node], walk_length: int): ... diff --git a/stubs/networkx/networkx/algorithms/wiener.pyi b/stubs/networkx/networkx/algorithms/wiener.pyi index e55f890203a8..87e87f305c3a 100644 --- a/stubs/networkx/networkx/algorithms/wiener.pyi +++ b/stubs/networkx/networkx/algorithms/wiener.pyi @@ -1,5 +1,11 @@ from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable +__all__ = ["wiener_index", "schultz_index", "gutman_index"] + @_dispatchable def wiener_index(G: Graph[_Node], weight: str | None = None): ... +@_dispatchable +def schultz_index(G, weight=None) -> float: ... +@_dispatchable +def gutman_index(G, weight=None) -> float: ... diff --git a/stubs/networkx/networkx/classes/coreviews.pyi b/stubs/networkx/networkx/classes/coreviews.pyi index 5c10338c679a..286c7db65c4d 100644 --- a/stubs/networkx/networkx/classes/coreviews.pyi +++ b/stubs/networkx/networkx/classes/coreviews.pyi @@ -6,6 +6,20 @@ _T = TypeVar("_T") _U = TypeVar("_U") _V = TypeVar("_V") +__all__ = [ + "AtlasView", + "AdjacencyView", + "MultiAdjacencyView", + "UnionAtlas", + "UnionAdjacency", + "UnionMultiInner", + "UnionMultiAdjacency", + "FilterAtlas", + "FilterAdjacency", + "FilterMultiInner", + "FilterMultiAdjacency", +] + class AtlasView(Mapping[_T, dict[_U, _V]]): def __getstate__(self) -> dict[str, Mapping[_T, dict[_U, _V]]]: ... def __setstate__(self, state: dict[str, Mapping[_T, dict[_U, _V]]]) -> None: ... diff --git a/stubs/networkx/networkx/classes/digraph.pyi b/stubs/networkx/networkx/classes/digraph.pyi index 2c3476e7035b..281da1a62514 100644 --- a/stubs/networkx/networkx/classes/digraph.pyi +++ b/stubs/networkx/networkx/classes/digraph.pyi @@ -6,6 +6,8 @@ from networkx.classes.coreviews import AdjacencyView from networkx.classes.graph import Graph, _Node from networkx.classes.reportviews import DiDegreeView, OutEdgeView +__all__ = ["DiGraph"] + class DiGraph(Graph[_Node]): @cached_property def succ(self) -> AdjacencyView[_Node, _Node, dict[str, Incomplete]]: ... diff --git a/stubs/networkx/networkx/classes/filters.pyi b/stubs/networkx/networkx/classes/filters.pyi index 0d8e26af83cc..c8689991e6f4 100644 --- a/stubs/networkx/networkx/classes/filters.pyi +++ b/stubs/networkx/networkx/classes/filters.pyi @@ -1,5 +1,19 @@ from _typeshed import Incomplete +__all__ = [ + "no_filter", + "hide_nodes", + "hide_edges", + "hide_multiedges", + "hide_diedges", + "hide_multidiedges", + "show_nodes", + "show_edges", + "show_multiedges", + "show_diedges", + "show_multidiedges", +] + def no_filter(*items): ... def hide_nodes(nodes): ... def hide_diedges(edges): ... diff --git a/stubs/networkx/networkx/classes/function.pyi b/stubs/networkx/networkx/classes/function.pyi index 459cd8baa8d9..aa2d8623aec2 100644 --- a/stubs/networkx/networkx/classes/function.pyi +++ b/stubs/networkx/networkx/classes/function.pyi @@ -32,8 +32,10 @@ __all__ = [ "create_empty_copy", "set_node_attributes", "get_node_attributes", + "remove_node_attributes", "set_edge_attributes", "get_edge_attributes", + "remove_edge_attributes", "all_neighbors", "non_neighbors", "non_edges", @@ -47,6 +49,7 @@ __all__ = [ "path_weight", "is_path", ] + _U = TypeVar("_U") def nodes(G): ... @@ -86,6 +89,7 @@ def set_node_attributes( name: None = None, ) -> None: ... def get_node_attributes(G: Graph[_Node], name: str, default=None) -> dict[_Node, Incomplete]: ... +def remove_node_attributes(G, *attr_names, nbunch=None) -> None: ... @overload def set_edge_attributes(G: Graph[_Node], values: SupportsItems[tuple[_Node, _Node], Incomplete], name: str) -> None: ... @overload @@ -93,6 +97,7 @@ def set_edge_attributes(G: MultiGraph[_Node], values: dict[tuple[_Node, _Node, I @overload def set_edge_attributes(G: Graph[Hashable], values, name: None = None) -> None: ... def get_edge_attributes(G: Graph[_Node], name: str, default=None) -> dict[tuple[_Node, _Node], Incomplete]: ... +def remove_edge_attributes(G, *attr_names, ebunch=None) -> None: ... def all_neighbors(graph: Graph[_Node], node: _Node) -> Iterator[_Node]: ... def non_neighbors(graph: Graph[_Node], node: _Node) -> Generator[_Node, None, None]: ... def non_edges(graph: Graph[_Node]) -> Generator[tuple[_Node, _Node], None, None]: ... diff --git a/stubs/networkx/networkx/classes/graph.pyi b/stubs/networkx/networkx/classes/graph.pyi index 981d61fd0b34..5fda0dcc5843 100644 --- a/stubs/networkx/networkx/classes/graph.pyi +++ b/stubs/networkx/networkx/classes/graph.pyi @@ -26,6 +26,8 @@ _Data: TypeAlias = ( # | scipy.sparse.base.spmatrix ) +__all__ = ["Graph"] + class Graph(Collection[_Node]): node_dict_factory: ClassVar[_MapFactory] = ... node_attr_dict_factory: ClassVar[_MapFactory] = ... diff --git a/stubs/networkx/networkx/classes/graphviews.pyi b/stubs/networkx/networkx/classes/graphviews.pyi index 798eed5726ba..460e2df058c9 100644 --- a/stubs/networkx/networkx/classes/graphviews.pyi +++ b/stubs/networkx/networkx/classes/graphviews.pyi @@ -9,6 +9,8 @@ from networkx.classes.multigraph import MultiGraph _G = TypeVar("_G", bound=Graph[Hashable]) _D = TypeVar("_D", bound=DiGraph[Hashable]) +__all__ = ["generic_graph_view", "subgraph_view", "reverse_view"] + @overload def generic_graph_view(G: _G, create_using: None = None) -> _G: ... @overload diff --git a/stubs/networkx/networkx/classes/multidigraph.pyi b/stubs/networkx/networkx/classes/multidigraph.pyi index ebfe40bc2df9..0bf2f2fe87cd 100644 --- a/stubs/networkx/networkx/classes/multidigraph.pyi +++ b/stubs/networkx/networkx/classes/multidigraph.pyi @@ -7,6 +7,8 @@ from networkx.classes.graph import _Node from networkx.classes.multigraph import MultiGraph from networkx.classes.reportviews import InMultiDegreeView, OutMultiDegreeView, OutMultiEdgeView +__all__ = ["MultiDiGraph"] + class MultiDiGraph(MultiGraph[_Node], DiGraph[_Node]): @cached_property def succ(self) -> MultiAdjacencyView[_Node, _Node, dict[str, Incomplete]]: ... diff --git a/stubs/networkx/networkx/classes/multigraph.pyi b/stubs/networkx/networkx/classes/multigraph.pyi index f6b7766ccf9d..63b159ad06d9 100644 --- a/stubs/networkx/networkx/classes/multigraph.pyi +++ b/stubs/networkx/networkx/classes/multigraph.pyi @@ -9,6 +9,8 @@ from networkx.classes.reportviews import OutMultiEdgeView _MultiEdge: TypeAlias = tuple[_Node, _Node, int] # noqa: Y047 +__all__ = ["MultiGraph"] + class MultiGraph(Graph[_Node]): def __init__(self, incoming_graph_data: Incomplete | None = None, multigraph_input: bool | None = None, **attr) -> None: ... @cached_property diff --git a/stubs/networkx/networkx/classes/reportviews.pyi b/stubs/networkx/networkx/classes/reportviews.pyi index 6a654dcccfe3..2a2a5c6d7286 100644 --- a/stubs/networkx/networkx/classes/reportviews.pyi +++ b/stubs/networkx/networkx/classes/reportviews.pyi @@ -9,6 +9,31 @@ from networkx.classes.graph import Graph, _Edge, _NBunch, _Node _D = TypeVar("_D") _U = TypeVar("_U") +__all__ = [ + "NodeView", + "NodeDataView", + "EdgeView", + "OutEdgeView", + "InEdgeView", + "EdgeDataView", + "OutEdgeDataView", + "InEdgeDataView", + "MultiEdgeView", + "OutMultiEdgeView", + "InMultiEdgeView", + "MultiEdgeDataView", + "OutMultiEdgeDataView", + "InMultiEdgeDataView", + "DegreeView", + "DiDegreeView", + "InDegreeView", + "OutDegreeView", + "MultiDegreeView", + "DiMultiDegreeView", + "InMultiDegreeView", + "OutMultiDegreeView", +] + class NodeView(Mapping[_Node, dict[str, Any]], AbstractSet[_Node]): def __init__(self, graph: Graph[_Node]) -> None: ... def __len__(self) -> int: ... diff --git a/stubs/networkx/networkx/convert_matrix.pyi b/stubs/networkx/networkx/convert_matrix.pyi index a94e00e7ada8..2cc8aab6e8b6 100644 --- a/stubs/networkx/networkx/convert_matrix.pyi +++ b/stubs/networkx/networkx/convert_matrix.pyi @@ -17,6 +17,17 @@ _ExtensionDtype: TypeAlias = Incomplete _Axes: TypeAlias = Collection[_Node] _G = TypeVar("_G", bound=Graph[Hashable]) +__all__ = [ + "from_pandas_adjacency", + "to_pandas_adjacency", + "from_pandas_edgelist", + "to_pandas_edgelist", + "from_scipy_sparse_array", + "to_scipy_sparse_array", + "from_numpy_array", + "to_numpy_array", +] + @_dispatchable def to_pandas_adjacency( G: Graph[_Node], @@ -69,6 +80,10 @@ def from_pandas_edgelist( edge_key: str | None = None, ) -> Graph[Incomplete]: ... @_dispatchable +def to_scipy_sparse_array(G, nodelist=None, dtype=None, weight="weight", format="csr"): ... +@_dispatchable +def from_scipy_sparse_array(A, parallel_edges=False, create_using=None, edge_attribute="weight"): ... +@_dispatchable def to_numpy_array( G: Graph[_Node], nodelist: Collection[_Node] | None = None, diff --git a/stubs/networkx/networkx/drawing/layout.pyi b/stubs/networkx/networkx/drawing/layout.pyi index 8d5f3dd1d1e9..fae316ed4d09 100644 --- a/stubs/networkx/networkx/drawing/layout.pyi +++ b/stubs/networkx/networkx/drawing/layout.pyi @@ -2,6 +2,25 @@ from _typeshed import Incomplete import numpy +__all__ = [ + "bipartite_layout", + "circular_layout", + "forceatlas2_layout", + "kamada_kawai_layout", + "random_layout", + "rescale_layout", + "rescale_layout_dict", + "shell_layout", + "spring_layout", + "spectral_layout", + "planar_layout", + "fruchterman_reingold_layout", + "spiral_layout", + "multipartite_layout", + "bfs_layout", + "arf_layout", +] + def random_layout(G, center: Incomplete | None = None, dim: int = 2, seed: Incomplete | None = None): ... def circular_layout(G, scale: float = 1, center: Incomplete | None = None, dim: int = 2): ... def shell_layout( @@ -59,5 +78,24 @@ def arf_layout( *, seed: int | numpy.random.RandomState | None = None, ): ... +def forceatlas2_layout( + G, + pos=None, + *, + max_iter=100, + jitter_tolerance=1.0, + scaling_ratio=2.0, + gravity=1.0, + distributed_action=False, + strong_gravity=False, + node_mass=None, + node_size=None, + weight=None, + dissuade_hubs=False, + linlog=False, + seed=None, + dim=2, +) -> dict[Incomplete, Incomplete]: ... def rescale_layout(pos, scale: float = 1): ... def rescale_layout_dict(pos, scale: float = 1): ... +def bfs_layout(G, start, *, align="vertical", scale=1, center=None) -> dict[Incomplete, Incomplete]: ... diff --git a/stubs/networkx/networkx/drawing/nx_agraph.pyi b/stubs/networkx/networkx/drawing/nx_agraph.pyi index 025489a5759a..0c744f10e408 100644 --- a/stubs/networkx/networkx/drawing/nx_agraph.pyi +++ b/stubs/networkx/networkx/drawing/nx_agraph.pyi @@ -9,6 +9,8 @@ from networkx.utils.backends import _dispatchable # from pygraphviz.agraph import AGraph as _AGraph _AGraph: TypeAlias = Incomplete +__all__ = ["from_agraph", "to_agraph", "write_dot", "read_dot", "graphviz_layout", "pygraphviz_layout", "view_pygraphviz"] + @_dispatchable def from_agraph(A, create_using: Incomplete | None = None) -> Graph[Incomplete]: ... def to_agraph(N: Graph[Hashable]) -> _AGraph: ... diff --git a/stubs/networkx/networkx/drawing/nx_latex.pyi b/stubs/networkx/networkx/drawing/nx_latex.pyi index ae19dd148b76..a7ca26c18821 100644 --- a/stubs/networkx/networkx/drawing/nx_latex.pyi +++ b/stubs/networkx/networkx/drawing/nx_latex.pyi @@ -1,5 +1,7 @@ from _typeshed import Incomplete +__all__ = ["to_latex_raw", "to_latex", "write_latex"] + def to_latex_raw( G, pos: str = "pos", diff --git a/stubs/networkx/networkx/drawing/nx_pydot.pyi b/stubs/networkx/networkx/drawing/nx_pydot.pyi index 0d8a59e9044f..dbb039f03b56 100644 --- a/stubs/networkx/networkx/drawing/nx_pydot.pyi +++ b/stubs/networkx/networkx/drawing/nx_pydot.pyi @@ -2,6 +2,8 @@ from _typeshed import Incomplete from networkx.utils.backends import _dispatchable +__all__ = ["write_dot", "read_dot", "graphviz_layout", "pydot_layout", "to_pydot", "from_pydot"] + def write_dot(G, path) -> None: ... @_dispatchable def read_dot(path): ... diff --git a/stubs/networkx/networkx/drawing/nx_pylab.pyi b/stubs/networkx/networkx/drawing/nx_pylab.pyi index cb530376b15a..9ed051b6707b 100644 --- a/stubs/networkx/networkx/drawing/nx_pylab.pyi +++ b/stubs/networkx/networkx/drawing/nx_pylab.pyi @@ -1,6 +1,23 @@ from _typeshed import Incomplete from collections.abc import Collection +__all__ = [ + "draw", + "draw_networkx", + "draw_networkx_nodes", + "draw_networkx_edges", + "draw_networkx_labels", + "draw_networkx_edge_labels", + "draw_circular", + "draw_kamada_kawai", + "draw_random", + "draw_spectral", + "draw_spring", + "draw_planar", + "draw_shell", + "draw_forceatlas2", +] + def draw(G, pos: Incomplete | None = None, ax: Incomplete | None = None, **kwds) -> None: ... def draw_networkx( G, pos: Incomplete | None = None, arrows: Incomplete | None = None, with_labels: bool = True, **kwds @@ -91,3 +108,4 @@ def draw_spectral(G, **kwargs) -> None: ... def draw_spring(G, **kwargs) -> None: ... def draw_shell(G, nlist: Incomplete | None = None, **kwargs) -> None: ... def draw_planar(G, **kwargs) -> None: ... +def draw_forceatlas2(G, **kwargs) -> None: ... diff --git a/stubs/networkx/networkx/exception.pyi b/stubs/networkx/networkx/exception.pyi index ab6a0e2e0c26..c1ebc86eee91 100644 --- a/stubs/networkx/networkx/exception.pyi +++ b/stubs/networkx/networkx/exception.pyi @@ -1,3 +1,20 @@ +__all__ = [ + "HasACycle", + "NodeNotFound", + "PowerIterationFailedConvergence", + "ExceededMaxIterations", + "AmbiguousSolution", + "NetworkXAlgorithmError", + "NetworkXException", + "NetworkXError", + "NetworkXNoCycle", + "NetworkXNoPath", + "NetworkXNotImplemented", + "NetworkXPointlessConcept", + "NetworkXUnbounded", + "NetworkXUnfeasible", +] + class NetworkXException(Exception): ... class NetworkXError(NetworkXException): ... class NetworkXPointlessConcept(NetworkXException): ... diff --git a/stubs/networkx/networkx/generators/atlas.pyi b/stubs/networkx/networkx/generators/atlas.pyi index f97cf915b717..4fffc13ca131 100644 --- a/stubs/networkx/networkx/generators/atlas.pyi +++ b/stubs/networkx/networkx/generators/atlas.pyi @@ -1,5 +1,7 @@ from networkx.utils.backends import _dispatchable +__all__ = ["graph_atlas", "graph_atlas_g"] + @_dispatchable def graph_atlas(i): ... @_dispatchable diff --git a/stubs/networkx/networkx/generators/classic.pyi b/stubs/networkx/networkx/generators/classic.pyi index b35c8b4adb64..99b8a6d8413b 100644 --- a/stubs/networkx/networkx/generators/classic.pyi +++ b/stubs/networkx/networkx/generators/classic.pyi @@ -1,10 +1,37 @@ from _typeshed import Incomplete +from networkx.classes import Graph from networkx.utils.backends import _dispatchable +__all__ = [ + "balanced_tree", + "barbell_graph", + "binomial_tree", + "complete_graph", + "complete_multipartite_graph", + "circular_ladder_graph", + "circulant_graph", + "cycle_graph", + "dorogovtsev_goltsev_mendes_graph", + "empty_graph", + "full_rary_tree", + "kneser_graph", + "ladder_graph", + "lollipop_graph", + "null_graph", + "path_graph", + "star_graph", + "tadpole_graph", + "trivial_graph", + "turan_graph", + "wheel_graph", +] + @_dispatchable def full_rary_tree(r, n, create_using: Incomplete | None = None): ... @_dispatchable +def kneser_graph(n, k) -> Graph[Incomplete]: ... +@_dispatchable def balanced_tree(r, h, create_using: Incomplete | None = None): ... @_dispatchable def barbell_graph(m1, m2, create_using: Incomplete | None = None): ... @@ -33,6 +60,8 @@ def path_graph(n, create_using: Incomplete | None = None): ... @_dispatchable def star_graph(n, create_using: Incomplete | None = None): ... @_dispatchable +def tadpole_graph(m, n, create_using=None) -> Graph[Incomplete] | Incomplete: ... +@_dispatchable def trivial_graph(create_using: Incomplete | None = None): ... @_dispatchable def turan_graph(n, r): ... diff --git a/stubs/networkx/networkx/generators/cographs.pyi b/stubs/networkx/networkx/generators/cographs.pyi index 2349a9dbe77d..a3081cae6274 100644 --- a/stubs/networkx/networkx/generators/cographs.pyi +++ b/stubs/networkx/networkx/generators/cographs.pyi @@ -2,5 +2,7 @@ from _typeshed import Incomplete from networkx.utils.backends import _dispatchable +__all__ = ["random_cograph"] + @_dispatchable def random_cograph(n, seed: Incomplete | None = None): ... diff --git a/stubs/networkx/networkx/generators/community.pyi b/stubs/networkx/networkx/generators/community.pyi index 8cfedcb8aefb..d95c6eeee30b 100644 --- a/stubs/networkx/networkx/generators/community.pyi +++ b/stubs/networkx/networkx/generators/community.pyi @@ -3,6 +3,19 @@ from collections.abc import Collection from networkx.utils.backends import _dispatchable +__all__ = [ + "caveman_graph", + "connected_caveman_graph", + "relaxed_caveman_graph", + "random_partition_graph", + "planted_partition_graph", + "gaussian_random_partition_graph", + "ring_of_cliques", + "windmill_graph", + "stochastic_block_model", + "LFR_benchmark_graph", +] + @_dispatchable def caveman_graph(l, k): ... @_dispatchable diff --git a/stubs/networkx/networkx/generators/directed.pyi b/stubs/networkx/networkx/generators/directed.pyi index 4be5ac627222..bb8f174dcde3 100644 --- a/stubs/networkx/networkx/generators/directed.pyi +++ b/stubs/networkx/networkx/generators/directed.pyi @@ -2,6 +2,8 @@ from _typeshed import Incomplete from networkx.utils.backends import _dispatchable +__all__ = ["gn_graph", "gnc_graph", "gnr_graph", "random_k_out_graph", "scale_free_graph"] + @_dispatchable def gn_graph(n, kernel: Incomplete | None = None, create_using: Incomplete | None = None, seed: Incomplete | None = None): ... @_dispatchable diff --git a/stubs/networkx/networkx/generators/duplication.pyi b/stubs/networkx/networkx/generators/duplication.pyi index ec3248a8b9f1..4b62c92c75e2 100644 --- a/stubs/networkx/networkx/generators/duplication.pyi +++ b/stubs/networkx/networkx/generators/duplication.pyi @@ -2,6 +2,8 @@ from _typeshed import Incomplete from networkx.utils.backends import _dispatchable +__all__ = ["partial_duplication_graph", "duplication_divergence_graph"] + @_dispatchable def partial_duplication_graph(N, n, p, q, seed: Incomplete | None = None): ... @_dispatchable diff --git a/stubs/networkx/networkx/generators/ego.pyi b/stubs/networkx/networkx/generators/ego.pyi index f64b9ec10d4b..83725a552c50 100644 --- a/stubs/networkx/networkx/generators/ego.pyi +++ b/stubs/networkx/networkx/generators/ego.pyi @@ -2,5 +2,7 @@ from _typeshed import Incomplete from networkx.utils.backends import _dispatchable +__all__ = ["ego_graph"] + @_dispatchable def ego_graph(G, n, radius: float = 1, center: bool = True, undirected: bool = False, distance: Incomplete | None = None): ... diff --git a/stubs/networkx/networkx/generators/expanders.pyi b/stubs/networkx/networkx/generators/expanders.pyi index 9b2d9b1cd597..f59596b797b2 100644 --- a/stubs/networkx/networkx/generators/expanders.pyi +++ b/stubs/networkx/networkx/generators/expanders.pyi @@ -2,9 +2,24 @@ from _typeshed import Incomplete from networkx.utils.backends import _dispatchable +__all__ = [ + "margulis_gabber_galil_graph", + "chordal_cycle_graph", + "paley_graph", + "maybe_regular_expander", + "is_regular_expander", + "random_regular_expander_graph", +] + @_dispatchable def margulis_gabber_galil_graph(n, create_using: Incomplete | None = None): ... @_dispatchable def chordal_cycle_graph(p, create_using: Incomplete | None = None): ... @_dispatchable def paley_graph(p, create_using: Incomplete | None = None): ... +@_dispatchable +def maybe_regular_expander(n, d, *, create_using=None, max_tries=100, seed=None): ... +@_dispatchable +def is_regular_expander(G, *, epsilon=0) -> bool: ... +@_dispatchable +def random_regular_expander_graph(n, d, *, epsilon=0, create_using=None, max_tries=100, seed=None): ... diff --git a/stubs/networkx/networkx/generators/geometric.pyi b/stubs/networkx/networkx/generators/geometric.pyi index 0442140b39bf..abaaba251721 100644 --- a/stubs/networkx/networkx/generators/geometric.pyi +++ b/stubs/networkx/networkx/generators/geometric.pyi @@ -1,7 +1,19 @@ from _typeshed import Incomplete +from networkx.classes.graph import Graph from networkx.utils.backends import _dispatchable +__all__ = [ + "geometric_edges", + "geographical_threshold_graph", + "navigable_small_world_graph", + "random_geometric_graph", + "soft_random_geometric_graph", + "thresholded_random_geometric_graph", + "waxman_graph", + "geometric_soft_configuration_graph", +] + @_dispatchable def geometric_edges(G, radius, p: float = 2): ... @_dispatchable @@ -54,3 +66,7 @@ def thresholded_random_geometric_graph( p: float = 2, seed: Incomplete | None = None, ): ... +@_dispatchable +def geometric_soft_configuration_graph( + *, beta, n=None, gamma=None, mean_degree=None, kappas=None, seed=None +) -> Graph[Incomplete]: ... diff --git a/stubs/networkx/networkx/generators/harary_graph.pyi b/stubs/networkx/networkx/generators/harary_graph.pyi index b62d2e2ce0e0..341308b8992d 100644 --- a/stubs/networkx/networkx/generators/harary_graph.pyi +++ b/stubs/networkx/networkx/generators/harary_graph.pyi @@ -2,6 +2,8 @@ from _typeshed import Incomplete from networkx.utils.backends import _dispatchable +__all__ = ["hnm_harary_graph", "hkn_harary_graph"] + @_dispatchable def hnm_harary_graph(n, m, create_using: Incomplete | None = None): ... @_dispatchable diff --git a/stubs/networkx/networkx/generators/intersection.pyi b/stubs/networkx/networkx/generators/intersection.pyi index a58cbf5befcf..73a5a7528c22 100644 --- a/stubs/networkx/networkx/generators/intersection.pyi +++ b/stubs/networkx/networkx/generators/intersection.pyi @@ -2,6 +2,8 @@ from _typeshed import Incomplete from networkx.utils.backends import _dispatchable +__all__ = ["uniform_random_intersection_graph", "k_random_intersection_graph", "general_random_intersection_graph"] + @_dispatchable def uniform_random_intersection_graph(n, m, p, seed: Incomplete | None = None): ... @_dispatchable diff --git a/stubs/networkx/networkx/generators/interval_graph.pyi b/stubs/networkx/networkx/generators/interval_graph.pyi index 8a0d89c9e33e..2901b87d003b 100644 --- a/stubs/networkx/networkx/generators/interval_graph.pyi +++ b/stubs/networkx/networkx/generators/interval_graph.pyi @@ -1,4 +1,6 @@ from networkx.utils.backends import _dispatchable +__all__ = ["interval_graph"] + @_dispatchable def interval_graph(intervals): ... diff --git a/stubs/networkx/networkx/generators/joint_degree_seq.pyi b/stubs/networkx/networkx/generators/joint_degree_seq.pyi index 2f4ce6eeba33..b9f976ac343b 100644 --- a/stubs/networkx/networkx/generators/joint_degree_seq.pyi +++ b/stubs/networkx/networkx/generators/joint_degree_seq.pyi @@ -2,6 +2,8 @@ from _typeshed import Incomplete from networkx.utils.backends import _dispatchable +__all__ = ["is_valid_joint_degree", "is_valid_directed_joint_degree", "joint_degree_graph", "directed_joint_degree_graph"] + @_dispatchable def is_valid_joint_degree(joint_degrees): ... @_dispatchable diff --git a/stubs/networkx/networkx/generators/lattice.pyi b/stubs/networkx/networkx/generators/lattice.pyi index 50ac54691da4..29d8e5fa972f 100644 --- a/stubs/networkx/networkx/generators/lattice.pyi +++ b/stubs/networkx/networkx/generators/lattice.pyi @@ -2,6 +2,8 @@ from _typeshed import Incomplete from networkx.utils.backends import _dispatchable +__all__ = ["grid_2d_graph", "grid_graph", "hypercube_graph", "triangular_lattice_graph", "hexagonal_lattice_graph"] + @_dispatchable def grid_2d_graph(m, n, periodic: bool = False, create_using: Incomplete | None = None): ... @_dispatchable diff --git a/stubs/networkx/networkx/generators/line.pyi b/stubs/networkx/networkx/generators/line.pyi index 9d3625e22c7c..01a36dc8312f 100644 --- a/stubs/networkx/networkx/generators/line.pyi +++ b/stubs/networkx/networkx/generators/line.pyi @@ -2,6 +2,8 @@ from _typeshed import Incomplete from networkx.utils.backends import _dispatchable +__all__ = ["line_graph", "inverse_line_graph"] + @_dispatchable def line_graph(G, create_using: Incomplete | None = None): ... @_dispatchable diff --git a/stubs/networkx/networkx/generators/mycielski.pyi b/stubs/networkx/networkx/generators/mycielski.pyi index 31ced02dcc80..cc40796b7627 100644 --- a/stubs/networkx/networkx/generators/mycielski.pyi +++ b/stubs/networkx/networkx/generators/mycielski.pyi @@ -1,5 +1,7 @@ from networkx.utils.backends import _dispatchable +__all__ = ["mycielskian", "mycielski_graph"] + @_dispatchable def mycielskian(G, iterations: int = 1): ... @_dispatchable diff --git a/stubs/networkx/networkx/generators/nonisomorphic_trees.pyi b/stubs/networkx/networkx/generators/nonisomorphic_trees.pyi index 57e277dde533..bf4ee819b70c 100644 --- a/stubs/networkx/networkx/generators/nonisomorphic_trees.pyi +++ b/stubs/networkx/networkx/generators/nonisomorphic_trees.pyi @@ -3,6 +3,8 @@ from collections.abc import Generator from networkx.utils.backends import _dispatchable +__all__ = ["nonisomorphic_trees", "number_of_nonisomorphic_trees"] + @_dispatchable def nonisomorphic_trees(order, create: str = "graph") -> Generator[Incomplete, None, None]: ... @_dispatchable diff --git a/stubs/networkx/networkx/generators/random_clustered.pyi b/stubs/networkx/networkx/generators/random_clustered.pyi index 1a914e12bc04..3fa7ff59543b 100644 --- a/stubs/networkx/networkx/generators/random_clustered.pyi +++ b/stubs/networkx/networkx/generators/random_clustered.pyi @@ -6,6 +6,7 @@ from networkx.classes.graph import Graph from networkx.utils.misc import _RandomState _G = TypeVar("_G", bound=Graph[int]) +__all__ = ["random_clustered_graph"] @overload def random_clustered_graph( diff --git a/stubs/networkx/networkx/generators/random_graphs.pyi b/stubs/networkx/networkx/generators/random_graphs.pyi index e3b557c785a0..a0e78626e796 100644 --- a/stubs/networkx/networkx/generators/random_graphs.pyi +++ b/stubs/networkx/networkx/generators/random_graphs.pyi @@ -2,6 +2,28 @@ from _typeshed import Incomplete from networkx.utils.backends import _dispatchable +__all__ = [ + "fast_gnp_random_graph", + "gnp_random_graph", + "dense_gnm_random_graph", + "gnm_random_graph", + "erdos_renyi_graph", + "binomial_graph", + "newman_watts_strogatz_graph", + "watts_strogatz_graph", + "connected_watts_strogatz_graph", + "random_regular_graph", + "barabasi_albert_graph", + "dual_barabasi_albert_graph", + "extended_barabasi_albert_graph", + "powerlaw_cluster_graph", + "random_lobster", + "random_shell_graph", + "random_powerlaw_tree", + "random_powerlaw_tree_sequence", + "random_kernel_graph", +] + @_dispatchable def fast_gnp_random_graph(n, p, seed: Incomplete | None = None, directed: bool = False): ... @_dispatchable diff --git a/stubs/networkx/networkx/generators/small.pyi b/stubs/networkx/networkx/generators/small.pyi index 4563f2308e58..97c1057d8a02 100644 --- a/stubs/networkx/networkx/generators/small.pyi +++ b/stubs/networkx/networkx/generators/small.pyi @@ -2,6 +2,32 @@ from _typeshed import Incomplete from networkx.utils.backends import _dispatchable +__all__ = [ + "LCF_graph", + "bull_graph", + "chvatal_graph", + "cubical_graph", + "desargues_graph", + "diamond_graph", + "dodecahedral_graph", + "frucht_graph", + "heawood_graph", + "hoffman_singleton_graph", + "house_graph", + "house_x_graph", + "icosahedral_graph", + "krackhardt_kite_graph", + "moebius_kantor_graph", + "octahedral_graph", + "pappus_graph", + "petersen_graph", + "sedgewick_maze_graph", + "tetrahedral_graph", + "truncated_cube_graph", + "truncated_tetrahedron_graph", + "tutte_graph", +] + @_dispatchable def LCF_graph(n, shift_list, repeats, create_using: Incomplete | None = None): ... @_dispatchable diff --git a/stubs/networkx/networkx/generators/social.pyi b/stubs/networkx/networkx/generators/social.pyi index b145710668b6..7a67b31c5c2e 100644 --- a/stubs/networkx/networkx/generators/social.pyi +++ b/stubs/networkx/networkx/generators/social.pyi @@ -1,5 +1,7 @@ from networkx.utils.backends import _dispatchable +__all__ = ["karate_club_graph", "davis_southern_women_graph", "florentine_families_graph", "les_miserables_graph"] + @_dispatchable def karate_club_graph(): ... @_dispatchable diff --git a/stubs/networkx/networkx/generators/spectral_graph_forge.pyi b/stubs/networkx/networkx/generators/spectral_graph_forge.pyi index 23e1e5bffaa5..6fc6902a354f 100644 --- a/stubs/networkx/networkx/generators/spectral_graph_forge.pyi +++ b/stubs/networkx/networkx/generators/spectral_graph_forge.pyi @@ -2,5 +2,7 @@ from _typeshed import Incomplete from networkx.utils.backends import _dispatchable +__all__ = ["spectral_graph_forge"] + @_dispatchable def spectral_graph_forge(G, alpha, transformation: str = "identity", seed: Incomplete | None = None): ... diff --git a/stubs/networkx/networkx/generators/stochastic.pyi b/stubs/networkx/networkx/generators/stochastic.pyi index 4dd8f1ecb2b3..045818852fea 100644 --- a/stubs/networkx/networkx/generators/stochastic.pyi +++ b/stubs/networkx/networkx/generators/stochastic.pyi @@ -1,4 +1,6 @@ from networkx.utils.backends import _dispatchable +__all__ = ["stochastic_graph"] + @_dispatchable def stochastic_graph(G, copy: bool = True, weight: str = "weight"): ... diff --git a/stubs/networkx/networkx/generators/sudoku.pyi b/stubs/networkx/networkx/generators/sudoku.pyi index 15eb488e7f75..4538521973cc 100644 --- a/stubs/networkx/networkx/generators/sudoku.pyi +++ b/stubs/networkx/networkx/generators/sudoku.pyi @@ -1,4 +1,6 @@ from networkx.utils.backends import _dispatchable +__all__ = ["sudoku_graph"] + @_dispatchable def sudoku_graph(n: int = 3): ... diff --git a/stubs/networkx/networkx/generators/time_series.pyi b/stubs/networkx/networkx/generators/time_series.pyi index b5655e820964..b240695b18c4 100644 --- a/stubs/networkx/networkx/generators/time_series.pyi +++ b/stubs/networkx/networkx/generators/time_series.pyi @@ -1,4 +1,6 @@ from networkx.utils.backends import _dispatchable +__all__ = ["visibility_graph"] + @_dispatchable def visibility_graph(series): ... diff --git a/stubs/networkx/networkx/generators/trees.pyi b/stubs/networkx/networkx/generators/trees.pyi index 238bc03ef9ee..8652af12a6a7 100644 --- a/stubs/networkx/networkx/generators/trees.pyi +++ b/stubs/networkx/networkx/generators/trees.pyi @@ -1,6 +1,31 @@ +from _typeshed import Incomplete + from networkx.utils.backends import _dispatchable +__all__ = [ + "prefix_tree", + "prefix_tree_recursive", + "random_labeled_tree", + "random_labeled_rooted_tree", + "random_labeled_rooted_forest", + "random_unlabeled_tree", + "random_unlabeled_rooted_tree", + "random_unlabeled_rooted_forest", +] + @_dispatchable def prefix_tree(paths): ... @_dispatchable def prefix_tree_recursive(paths): ... +@_dispatchable +def random_labeled_tree(n, *, seed=None): ... +@_dispatchable +def random_labeled_rooted_tree(n, *, seed=None): ... +@_dispatchable +def random_unlabeled_rooted_tree(n, *, number_of_trees=None, seed=None) -> Incomplete | list[Incomplete]: ... +@_dispatchable +def random_labeled_rooted_forest(n, *, seed=None): ... +@_dispatchable +def random_unlabeled_rooted_forest(n, *, q=None, number_of_forests=None, seed=None) -> Incomplete | list[Incomplete]: ... +@_dispatchable +def random_unlabeled_tree(n, *, number_of_trees=None, seed=None) -> Incomplete | list[Incomplete]: ... diff --git a/stubs/networkx/networkx/generators/triads.pyi b/stubs/networkx/networkx/generators/triads.pyi index cdc648ee378d..e0ce3d43241b 100644 --- a/stubs/networkx/networkx/generators/triads.pyi +++ b/stubs/networkx/networkx/generators/triads.pyi @@ -1,4 +1,6 @@ from networkx.utils.backends import _dispatchable +__all__ = ["triad_graph"] + @_dispatchable def triad_graph(triad_name): ... diff --git a/stubs/networkx/networkx/linalg/algebraicconnectivity.pyi b/stubs/networkx/networkx/linalg/algebraicconnectivity.pyi index 52961225175a..fa12fa8c1acd 100644 --- a/stubs/networkx/networkx/linalg/algebraicconnectivity.pyi +++ b/stubs/networkx/networkx/linalg/algebraicconnectivity.pyi @@ -2,6 +2,8 @@ from _typeshed import Incomplete from networkx.utils.backends import _dispatchable +__all__ = ["algebraic_connectivity", "fiedler_vector", "spectral_ordering", "spectral_bisection"] + class _PCGSolver: def __init__(self, A, M) -> None: ... def solve(self, B, tol): ... diff --git a/stubs/networkx/networkx/linalg/attrmatrix.pyi b/stubs/networkx/networkx/linalg/attrmatrix.pyi index 360a53ffbdd6..3e645842ec26 100644 --- a/stubs/networkx/networkx/linalg/attrmatrix.pyi +++ b/stubs/networkx/networkx/linalg/attrmatrix.pyi @@ -2,6 +2,8 @@ from _typeshed import Incomplete from networkx.utils.backends import _dispatchable +__all__ = ["attr_matrix", "attr_sparse_matrix"] + @_dispatchable def attr_matrix( G, diff --git a/stubs/networkx/networkx/linalg/bethehessianmatrix.pyi b/stubs/networkx/networkx/linalg/bethehessianmatrix.pyi index 70bafa4f9957..2e98b1cdb995 100644 --- a/stubs/networkx/networkx/linalg/bethehessianmatrix.pyi +++ b/stubs/networkx/networkx/linalg/bethehessianmatrix.pyi @@ -3,5 +3,7 @@ from collections.abc import Collection from networkx.utils.backends import _dispatchable +__all__ = ["bethe_hessian_matrix"] + @_dispatchable def bethe_hessian_matrix(G, r: Incomplete | None = None, nodelist: Collection[Incomplete] | None = None): ... diff --git a/stubs/networkx/networkx/linalg/graphmatrix.pyi b/stubs/networkx/networkx/linalg/graphmatrix.pyi index a26cdff363e4..97fb7371cb81 100644 --- a/stubs/networkx/networkx/linalg/graphmatrix.pyi +++ b/stubs/networkx/networkx/linalg/graphmatrix.pyi @@ -3,6 +3,8 @@ from collections.abc import Collection from networkx.utils.backends import _dispatchable +__all__ = ["incidence_matrix", "adjacency_matrix"] + @_dispatchable def incidence_matrix( G, diff --git a/stubs/networkx/networkx/linalg/laplacianmatrix.pyi b/stubs/networkx/networkx/linalg/laplacianmatrix.pyi index db9a7745c215..ebbfd847905e 100644 --- a/stubs/networkx/networkx/linalg/laplacianmatrix.pyi +++ b/stubs/networkx/networkx/linalg/laplacianmatrix.pyi @@ -3,6 +3,14 @@ from collections.abc import Collection from networkx.utils.backends import _dispatchable +__all__ = [ + "laplacian_matrix", + "normalized_laplacian_matrix", + "total_spanning_tree_weight", + "directed_laplacian_matrix", + "directed_combinatorial_laplacian_matrix", +] + @_dispatchable def laplacian_matrix(G, nodelist: Collection[Incomplete] | None = None, weight: str = "weight"): ... @_dispatchable diff --git a/stubs/networkx/networkx/linalg/modularitymatrix.pyi b/stubs/networkx/networkx/linalg/modularitymatrix.pyi index 7cc73a5f0550..03b0e1ef1193 100644 --- a/stubs/networkx/networkx/linalg/modularitymatrix.pyi +++ b/stubs/networkx/networkx/linalg/modularitymatrix.pyi @@ -3,6 +3,8 @@ from collections.abc import Collection from networkx.utils.backends import _dispatchable +__all__ = ["modularity_matrix", "directed_modularity_matrix"] + @_dispatchable def modularity_matrix(G, nodelist: Collection[Incomplete] | None = None, weight: Incomplete | None = None): ... @_dispatchable diff --git a/stubs/networkx/networkx/linalg/spectrum.pyi b/stubs/networkx/networkx/linalg/spectrum.pyi index dae0e63001e8..3051a272c3ec 100644 --- a/stubs/networkx/networkx/linalg/spectrum.pyi +++ b/stubs/networkx/networkx/linalg/spectrum.pyi @@ -2,6 +2,14 @@ from _typeshed import Incomplete from networkx.utils.backends import _dispatchable +__all__ = [ + "laplacian_spectrum", + "adjacency_spectrum", + "modularity_spectrum", + "normalized_laplacian_spectrum", + "bethe_hessian_spectrum", +] + @_dispatchable def laplacian_spectrum(G, weight: str = "weight"): ... @_dispatchable diff --git a/stubs/networkx/networkx/readwrite/adjlist.pyi b/stubs/networkx/networkx/readwrite/adjlist.pyi index ad2f31f2e9a5..b9cbfef32166 100644 --- a/stubs/networkx/networkx/readwrite/adjlist.pyi +++ b/stubs/networkx/networkx/readwrite/adjlist.pyi @@ -3,6 +3,8 @@ from collections.abc import Generator from networkx.utils.backends import _dispatchable +__all__ = ["generate_adjlist", "write_adjlist", "parse_adjlist", "read_adjlist"] + def generate_adjlist(G, delimiter: str = " ") -> Generator[Incomplete, None, None]: ... def write_adjlist(G, path, comments: str = "#", delimiter: str = " ", encoding: str = "utf-8") -> None: ... @_dispatchable diff --git a/stubs/networkx/networkx/readwrite/edgelist.pyi b/stubs/networkx/networkx/readwrite/edgelist.pyi index c8587b307881..ddb2b6d889a9 100644 --- a/stubs/networkx/networkx/readwrite/edgelist.pyi +++ b/stubs/networkx/networkx/readwrite/edgelist.pyi @@ -3,6 +3,15 @@ from collections.abc import Generator from networkx.utils.backends import _dispatchable +__all__ = [ + "generate_edgelist", + "write_edgelist", + "parse_edgelist", + "read_edgelist", + "read_weighted_edgelist", + "write_weighted_edgelist", +] + def generate_edgelist(G, delimiter: str = " ", data: bool = True) -> Generator[Incomplete, None, None]: ... def write_edgelist(G, path, comments: str = "#", delimiter: str = " ", data: bool = True, encoding: str = "utf-8") -> None: ... @_dispatchable diff --git a/stubs/networkx/networkx/readwrite/graph6.pyi b/stubs/networkx/networkx/readwrite/graph6.pyi index e1379b4568c9..092bffdf2d77 100644 --- a/stubs/networkx/networkx/readwrite/graph6.pyi +++ b/stubs/networkx/networkx/readwrite/graph6.pyi @@ -2,6 +2,8 @@ from _typeshed import Incomplete from networkx.utils.backends import _dispatchable +__all__ = ["from_graph6_bytes", "read_graph6", "to_graph6_bytes", "write_graph6"] + @_dispatchable def from_graph6_bytes(bytes_in): ... def to_graph6_bytes(G, nodes: Incomplete | None = None, header: bool = True): ... diff --git a/stubs/networkx/networkx/readwrite/json_graph/adjacency.pyi b/stubs/networkx/networkx/readwrite/json_graph/adjacency.pyi index 35953a37d85c..f1745015fad3 100644 --- a/stubs/networkx/networkx/readwrite/json_graph/adjacency.pyi +++ b/stubs/networkx/networkx/readwrite/json_graph/adjacency.pyi @@ -1,5 +1,7 @@ from networkx.utils.backends import _dispatchable +__all__ = ["adjacency_data", "adjacency_graph"] + @_dispatchable def adjacency_data(G, attrs={"id": "id", "key": "key"}): ... @_dispatchable diff --git a/stubs/networkx/networkx/readwrite/json_graph/cytoscape.pyi b/stubs/networkx/networkx/readwrite/json_graph/cytoscape.pyi index 59ce696c83c6..6b5155209711 100644 --- a/stubs/networkx/networkx/readwrite/json_graph/cytoscape.pyi +++ b/stubs/networkx/networkx/readwrite/json_graph/cytoscape.pyi @@ -1,5 +1,7 @@ from networkx.utils.backends import _dispatchable +__all__ = ["cytoscape_data", "cytoscape_graph"] + def cytoscape_data(G, name: str = "name", ident: str = "id"): ... @_dispatchable def cytoscape_graph(data, name: str = "name", ident: str = "id"): ... diff --git a/stubs/networkx/networkx/readwrite/json_graph/node_link.pyi b/stubs/networkx/networkx/readwrite/json_graph/node_link.pyi index cb4c7e6766c2..2f15c525d5bb 100644 --- a/stubs/networkx/networkx/readwrite/json_graph/node_link.pyi +++ b/stubs/networkx/networkx/readwrite/json_graph/node_link.pyi @@ -2,6 +2,8 @@ from _typeshed import Incomplete from networkx.utils.backends import _dispatchable +__all__ = ["node_link_data", "node_link_graph"] + def node_link_data( G, *, diff --git a/stubs/networkx/networkx/readwrite/json_graph/tree.pyi b/stubs/networkx/networkx/readwrite/json_graph/tree.pyi index c9e503fa7270..1b215465316e 100644 --- a/stubs/networkx/networkx/readwrite/json_graph/tree.pyi +++ b/stubs/networkx/networkx/readwrite/json_graph/tree.pyi @@ -1,5 +1,7 @@ from networkx.utils.backends import _dispatchable +__all__ = ["tree_data", "tree_graph"] + def tree_data(G, root, ident: str = "id", children: str = "children"): ... @_dispatchable def tree_graph(data, ident: str = "id", children: str = "children"): ... diff --git a/stubs/networkx/networkx/readwrite/leda.pyi b/stubs/networkx/networkx/readwrite/leda.pyi index 604a687efcd3..733005d1ce64 100644 --- a/stubs/networkx/networkx/readwrite/leda.pyi +++ b/stubs/networkx/networkx/readwrite/leda.pyi @@ -1,5 +1,7 @@ from networkx.utils.backends import _dispatchable +__all__ = ["read_leda", "parse_leda"] + @_dispatchable def read_leda(path, encoding: str = "UTF-8"): ... @_dispatchable diff --git a/stubs/networkx/networkx/readwrite/multiline_adjlist.pyi b/stubs/networkx/networkx/readwrite/multiline_adjlist.pyi index 7764cac9ec73..a83096ae39d3 100644 --- a/stubs/networkx/networkx/readwrite/multiline_adjlist.pyi +++ b/stubs/networkx/networkx/readwrite/multiline_adjlist.pyi @@ -3,6 +3,8 @@ from collections.abc import Generator from networkx.utils.backends import _dispatchable +__all__ = ["generate_multiline_adjlist", "write_multiline_adjlist", "parse_multiline_adjlist", "read_multiline_adjlist"] + def generate_multiline_adjlist(G, delimiter: str = " ") -> Generator[Incomplete, None, None]: ... def write_multiline_adjlist(G, path, delimiter: str = " ", comments: str = "#", encoding: str = "utf-8") -> None: ... @_dispatchable diff --git a/stubs/networkx/networkx/readwrite/pajek.pyi b/stubs/networkx/networkx/readwrite/pajek.pyi index f48590aa91ef..b34dfb83a4d8 100644 --- a/stubs/networkx/networkx/readwrite/pajek.pyi +++ b/stubs/networkx/networkx/readwrite/pajek.pyi @@ -3,6 +3,8 @@ from collections.abc import Generator from networkx.utils.backends import _dispatchable +__all__ = ["read_pajek", "parse_pajek", "generate_pajek", "write_pajek"] + def generate_pajek(G) -> Generator[Incomplete, None, None]: ... def write_pajek(G, path, encoding: str = "UTF-8") -> None: ... @_dispatchable diff --git a/stubs/networkx/networkx/readwrite/sparse6.pyi b/stubs/networkx/networkx/readwrite/sparse6.pyi index a1f2e30e64c1..10a140c0afca 100644 --- a/stubs/networkx/networkx/readwrite/sparse6.pyi +++ b/stubs/networkx/networkx/readwrite/sparse6.pyi @@ -2,6 +2,8 @@ from _typeshed import Incomplete from networkx.utils.backends import _dispatchable +__all__ = ["from_sparse6_bytes", "read_sparse6", "to_sparse6_bytes", "write_sparse6"] + @_dispatchable def from_sparse6_bytes(string): ... def to_sparse6_bytes(G, nodes: Incomplete | None = None, header: bool = True): ... diff --git a/stubs/networkx/networkx/relabel.pyi b/stubs/networkx/networkx/relabel.pyi index 57aa6384395c..4951676ca0e1 100644 --- a/stubs/networkx/networkx/relabel.pyi +++ b/stubs/networkx/networkx/relabel.pyi @@ -11,6 +11,8 @@ from networkx.utils.backends import _dispatchable _X = TypeVar("_X", bound=Hashable) _Y = TypeVar("_Y", bound=Hashable) +__all__ = ["convert_node_labels_to_integers", "relabel_nodes"] + @overload def relabel_nodes(G: MultiDiGraph[_X], mapping: Mapping[_X, _Y], copy: bool = True) -> MultiDiGraph[_X | _Y]: ... @overload diff --git a/stubs/networkx/networkx/utils/__init__.pyi b/stubs/networkx/networkx/utils/__init__.pyi index 7e234b3f75a0..5d8fb921b076 100644 --- a/stubs/networkx/networkx/utils/__init__.pyi +++ b/stubs/networkx/networkx/utils/__init__.pyi @@ -1,5 +1,6 @@ from networkx.utils.backends import _dispatchable as _dispatchable from networkx.utils.configs import * +from networkx.utils.configs import NetworkXConfig from networkx.utils.decorators import * from networkx.utils.heaps import * @@ -23,3 +24,5 @@ from networkx.utils.misc import ( from networkx.utils.random_sequence import * from networkx.utils.rcm import * from networkx.utils.union_find import * + +config: NetworkXConfig # Set by networkx/__init__.py diff --git a/stubs/networkx/networkx/utils/configs.pyi b/stubs/networkx/networkx/utils/configs.pyi index 1e6548df50b9..42f48e2dc433 100644 --- a/stubs/networkx/networkx/utils/configs.pyi +++ b/stubs/networkx/networkx/utils/configs.pyi @@ -2,7 +2,7 @@ from _typeshed import Incomplete from collections.abc import ItemsView, Iterable, Iterator, KeysView, Mapping, ValuesView from typing_extensions import Self -__all__ = ["Config", "config"] +__all__ = ["Config"] class Config(Mapping[str, Incomplete]): def __init_subclass__(cls, strict: bool = True) -> None: ... diff --git a/stubs/networkx/networkx/utils/decorators.pyi b/stubs/networkx/networkx/utils/decorators.pyi index 82cd9492d643..af8b401c8600 100644 --- a/stubs/networkx/networkx/utils/decorators.pyi +++ b/stubs/networkx/networkx/utils/decorators.pyi @@ -1,6 +1,8 @@ from _typeshed import Incomplete from typing import NamedTuple +__all__ = ["not_implemented_for", "open_file", "nodes_or_number", "np_random_state", "py_random_state", "argmap"] + def not_implemented_for(*graph_types): ... def open_file(path_arg, mode: str = "r"): ... def nodes_or_number(which_args): ... diff --git a/stubs/networkx/networkx/utils/heaps.pyi b/stubs/networkx/networkx/utils/heaps.pyi index e7d610aa922b..43cd69b1b358 100644 --- a/stubs/networkx/networkx/utils/heaps.pyi +++ b/stubs/networkx/networkx/utils/heaps.pyi @@ -1,5 +1,7 @@ from _typeshed import Incomplete +__all__ = ["MinHeap", "PairingHeap", "BinaryHeap"] + class MinHeap: class _Item: key: Incomplete diff --git a/stubs/networkx/networkx/utils/mapped_queue.pyi b/stubs/networkx/networkx/utils/mapped_queue.pyi index 9d9fadb46ea5..e5eb305abb25 100644 --- a/stubs/networkx/networkx/utils/mapped_queue.pyi +++ b/stubs/networkx/networkx/utils/mapped_queue.pyi @@ -1,6 +1,8 @@ from _typeshed import Incomplete from collections.abc import Iterator +__all__ = ["MappedQueue"] + class _HeapElement: priority: Incomplete element: Incomplete diff --git a/stubs/networkx/networkx/utils/random_sequence.pyi b/stubs/networkx/networkx/utils/random_sequence.pyi index 921b79a04079..3d89df4281c0 100644 --- a/stubs/networkx/networkx/utils/random_sequence.pyi +++ b/stubs/networkx/networkx/utils/random_sequence.pyi @@ -1,5 +1,14 @@ from _typeshed import Incomplete +__all__ = [ + "powerlaw_sequence", + "zipf_rv", + "cumulative_distribution", + "discrete_sequence", + "random_weighted_sample", + "weighted_choice", +] + def powerlaw_sequence(n, exponent: float = 2.0, seed: Incomplete | None = None): ... def zipf_rv(alpha, xmin: int = 1, seed: Incomplete | None = None): ... def cumulative_distribution(distribution): ... diff --git a/stubs/networkx/networkx/utils/rcm.pyi b/stubs/networkx/networkx/utils/rcm.pyi index 35ff04993603..cfe7c2debc0b 100644 --- a/stubs/networkx/networkx/utils/rcm.pyi +++ b/stubs/networkx/networkx/utils/rcm.pyi @@ -1,5 +1,7 @@ from _typeshed import Incomplete from collections.abc import Generator +__all__ = ["cuthill_mckee_ordering", "reverse_cuthill_mckee_ordering"] + def cuthill_mckee_ordering(G, heuristic: Incomplete | None = None) -> Generator[Incomplete, Incomplete, None]: ... def reverse_cuthill_mckee_ordering(G, heuristic: Incomplete | None = None): ... From 22f5114e1f331a554629fe01046b6a502165f54a Mon Sep 17 00:00:00 2001 From: Hunter Hogan Date: Thu, 8 May 2025 22:40:29 -0500 Subject: [PATCH 299/388] remove _Identifier: typing_extensions.TypeAlias = str (#13954) (#13964) --- stdlib/ast.pyi | 174 ++++++++++++++++++++----------------------------- 1 file changed, 72 insertions(+), 102 deletions(-) diff --git a/stdlib/ast.pyi b/stdlib/ast.pyi index 1a3d3e97d11e..cb6fce435226 100644 --- a/stdlib/ast.pyi +++ b/stdlib/ast.pyi @@ -14,12 +14,6 @@ from typing_extensions import Self, Unpack, deprecated if sys.version_info >= (3, 13): from _ast import PyCF_OPTIMIZED_AST as PyCF_OPTIMIZED_AST -# Alias used for fields that must always be valid identifiers -# A string `x` counts as a valid identifier if both the following are True -# (1) `x.isidentifier()` evaluates to `True` -# (2) `keyword.iskeyword(x)` evaluates to `False` -_Identifier: typing_extensions.TypeAlias = str - # Used for node end positions in constructor keyword arguments _EndPositionT = typing_extensions.TypeVar("_EndPositionT", int, int | None, default=int | None) @@ -111,7 +105,7 @@ class FunctionDef(stmt): __match_args__ = ("name", "args", "body", "decorator_list", "returns", "type_comment", "type_params") elif sys.version_info >= (3, 10): __match_args__ = ("name", "args", "body", "decorator_list", "returns", "type_comment") - name: _Identifier + name: str args: arguments body: list[stmt] decorator_list: list[expr] @@ -122,7 +116,7 @@ class FunctionDef(stmt): if sys.version_info >= (3, 13): def __init__( self, - name: _Identifier, + name: str, args: arguments, body: list[stmt] = ..., decorator_list: list[expr] = ..., @@ -135,7 +129,7 @@ class FunctionDef(stmt): @overload def __init__( self, - name: _Identifier, + name: str, args: arguments, body: list[stmt], decorator_list: list[expr], @@ -147,7 +141,7 @@ class FunctionDef(stmt): @overload def __init__( self, - name: _Identifier, + name: str, args: arguments, body: list[stmt], decorator_list: list[expr], @@ -160,7 +154,7 @@ class FunctionDef(stmt): else: def __init__( self, - name: _Identifier, + name: str, args: arguments, body: list[stmt], decorator_list: list[expr], @@ -173,7 +167,7 @@ class FunctionDef(stmt): def __replace__( self, *, - name: _Identifier = ..., + name: str = ..., args: arguments = ..., body: list[stmt] = ..., decorator_list: list[expr] = ..., @@ -187,7 +181,7 @@ class AsyncFunctionDef(stmt): __match_args__ = ("name", "args", "body", "decorator_list", "returns", "type_comment", "type_params") elif sys.version_info >= (3, 10): __match_args__ = ("name", "args", "body", "decorator_list", "returns", "type_comment") - name: _Identifier + name: str args: arguments body: list[stmt] decorator_list: list[expr] @@ -198,7 +192,7 @@ class AsyncFunctionDef(stmt): if sys.version_info >= (3, 13): def __init__( self, - name: _Identifier, + name: str, args: arguments, body: list[stmt] = ..., decorator_list: list[expr] = ..., @@ -211,7 +205,7 @@ class AsyncFunctionDef(stmt): @overload def __init__( self, - name: _Identifier, + name: str, args: arguments, body: list[stmt], decorator_list: list[expr], @@ -223,7 +217,7 @@ class AsyncFunctionDef(stmt): @overload def __init__( self, - name: _Identifier, + name: str, args: arguments, body: list[stmt], decorator_list: list[expr], @@ -236,7 +230,7 @@ class AsyncFunctionDef(stmt): else: def __init__( self, - name: _Identifier, + name: str, args: arguments, body: list[stmt], decorator_list: list[expr], @@ -249,7 +243,7 @@ class AsyncFunctionDef(stmt): def __replace__( self, *, - name: _Identifier = ..., + name: str = ..., args: arguments = ..., body: list[stmt], decorator_list: list[expr], @@ -263,7 +257,7 @@ class ClassDef(stmt): __match_args__ = ("name", "bases", "keywords", "body", "decorator_list", "type_params") elif sys.version_info >= (3, 10): __match_args__ = ("name", "bases", "keywords", "body", "decorator_list") - name: _Identifier + name: str bases: list[expr] keywords: list[keyword] body: list[stmt] @@ -273,7 +267,7 @@ class ClassDef(stmt): if sys.version_info >= (3, 13): def __init__( self, - name: _Identifier, + name: str, bases: list[expr] = ..., keywords: list[keyword] = ..., body: list[stmt] = ..., @@ -284,7 +278,7 @@ class ClassDef(stmt): elif sys.version_info >= (3, 12): def __init__( self, - name: _Identifier, + name: str, bases: list[expr], keywords: list[keyword], body: list[stmt], @@ -295,7 +289,7 @@ class ClassDef(stmt): else: def __init__( self, - name: _Identifier, + name: str, bases: list[expr], keywords: list[keyword], body: list[stmt], @@ -307,7 +301,7 @@ class ClassDef(stmt): def __replace__( self, *, - name: _Identifier, + name: str, bases: list[expr], keywords: list[keyword], body: list[stmt], @@ -774,26 +768,26 @@ class ImportFrom(stmt): class Global(stmt): if sys.version_info >= (3, 10): __match_args__ = ("names",) - names: list[_Identifier] + names: list[str] if sys.version_info >= (3, 13): - def __init__(self, names: list[_Identifier] = ..., **kwargs: Unpack[_Attributes]) -> None: ... + def __init__(self, names: list[str] = ..., **kwargs: Unpack[_Attributes]) -> None: ... else: - def __init__(self, names: list[_Identifier], **kwargs: Unpack[_Attributes]) -> None: ... + def __init__(self, names: list[str], **kwargs: Unpack[_Attributes]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, names: list[_Identifier], **kwargs: Unpack[_Attributes]) -> Self: ... + def __replace__(self, *, names: list[str], **kwargs: Unpack[_Attributes]) -> Self: ... class Nonlocal(stmt): if sys.version_info >= (3, 10): __match_args__ = ("names",) - names: list[_Identifier] + names: list[str] if sys.version_info >= (3, 13): - def __init__(self, names: list[_Identifier] = ..., **kwargs: Unpack[_Attributes]) -> None: ... + def __init__(self, names: list[str] = ..., **kwargs: Unpack[_Attributes]) -> None: ... else: - def __init__(self, names: list[_Identifier], **kwargs: Unpack[_Attributes]) -> None: ... + def __init__(self, names: list[str], **kwargs: Unpack[_Attributes]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, names: list[_Identifier] = ..., **kwargs: Unpack[_Attributes]) -> Self: ... + def __replace__(self, *, names: list[str] = ..., **kwargs: Unpack[_Attributes]) -> Self: ... class Expr(stmt): if sys.version_info >= (3, 10): @@ -1084,13 +1078,13 @@ class Attribute(expr): if sys.version_info >= (3, 10): __match_args__ = ("value", "attr", "ctx") value: expr - attr: _Identifier + attr: str ctx: expr_context # Not present in Python < 3.13 if not passed to `__init__` - def __init__(self, value: expr, attr: _Identifier, ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> None: ... + def __init__(self, value: expr, attr: str, ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> None: ... if sys.version_info >= (3, 14): def __replace__( - self, *, value: expr = ..., attr: _Identifier = ..., ctx: expr_context = ..., **kwargs: Unpack[_Attributes] + self, *, value: expr = ..., attr: str = ..., ctx: expr_context = ..., **kwargs: Unpack[_Attributes] ) -> Self: ... class Subscript(expr): @@ -1119,12 +1113,12 @@ class Starred(expr): class Name(expr): if sys.version_info >= (3, 10): __match_args__ = ("id", "ctx") - id: _Identifier + id: str ctx: expr_context # Not present in Python < 3.13 if not passed to `__init__` - def __init__(self, id: _Identifier, ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> None: ... + def __init__(self, id: str, ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, id: _Identifier = ..., ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> Self: ... + def __replace__(self, *, id: str = ..., ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> Self: ... class List(expr): if sys.version_info >= (3, 10): @@ -1272,30 +1266,23 @@ class ExceptHandler(excepthandler): if sys.version_info >= (3, 10): __match_args__ = ("type", "name", "body") type: expr | None - name: _Identifier | None + name: str | None body: list[stmt] if sys.version_info >= (3, 13): def __init__( - self, type: expr | None = None, name: _Identifier | None = None, body: list[stmt] = ..., **kwargs: Unpack[_Attributes] + self, type: expr | None = None, name: str | None = None, body: list[stmt] = ..., **kwargs: Unpack[_Attributes] ) -> None: ... else: @overload - def __init__( - self, type: expr | None, name: _Identifier | None, body: list[stmt], **kwargs: Unpack[_Attributes] - ) -> None: ... + def __init__(self, type: expr | None, name: str | None, body: list[stmt], **kwargs: Unpack[_Attributes]) -> None: ... @overload def __init__( - self, type: expr | None = None, name: _Identifier | None = None, *, body: list[stmt], **kwargs: Unpack[_Attributes] + self, type: expr | None = None, name: str | None = None, *, body: list[stmt], **kwargs: Unpack[_Attributes] ) -> None: ... if sys.version_info >= (3, 14): def __replace__( - self, - *, - type: expr | None = ..., - name: _Identifier | None = ..., - body: list[stmt] = ..., - **kwargs: Unpack[_Attributes], + self, *, type: expr | None = ..., name: str | None = ..., body: list[stmt] = ..., **kwargs: Unpack[_Attributes] ) -> Self: ... class arguments(AST): @@ -1376,21 +1363,16 @@ class arg(AST): end_col_offset: int | None if sys.version_info >= (3, 10): __match_args__ = ("arg", "annotation", "type_comment") - arg: _Identifier + arg: str annotation: expr | None type_comment: str | None def __init__( - self, arg: _Identifier, annotation: expr | None = None, type_comment: str | None = None, **kwargs: Unpack[_Attributes] + self, arg: str, annotation: expr | None = None, type_comment: str | None = None, **kwargs: Unpack[_Attributes] ) -> None: ... if sys.version_info >= (3, 14): def __replace__( - self, - *, - arg: _Identifier = ..., - annotation: expr | None = ..., - type_comment: str | None = ..., - **kwargs: Unpack[_Attributes], + self, *, arg: str = ..., annotation: expr | None = ..., type_comment: str | None = ..., **kwargs: Unpack[_Attributes] ) -> Self: ... class keyword(AST): @@ -1400,15 +1382,15 @@ class keyword(AST): end_col_offset: int | None if sys.version_info >= (3, 10): __match_args__ = ("arg", "value") - arg: _Identifier | None + arg: str | None value: expr @overload - def __init__(self, arg: _Identifier | None, value: expr, **kwargs: Unpack[_Attributes]) -> None: ... + def __init__(self, arg: str | None, value: expr, **kwargs: Unpack[_Attributes]) -> None: ... @overload - def __init__(self, arg: _Identifier | None = None, *, value: expr, **kwargs: Unpack[_Attributes]) -> None: ... + def __init__(self, arg: str | None = None, *, value: expr, **kwargs: Unpack[_Attributes]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, arg: _Identifier | None = ..., value: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: ... + def __replace__(self, *, arg: str | None = ..., value: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: ... class alias(AST): lineno: int @@ -1418,11 +1400,11 @@ class alias(AST): if sys.version_info >= (3, 10): __match_args__ = ("name", "asname") name: str - asname: _Identifier | None - def __init__(self, name: str, asname: _Identifier | None = None, **kwargs: Unpack[_Attributes]) -> None: ... + asname: str | None + def __init__(self, name: str, asname: str | None = None, **kwargs: Unpack[_Attributes]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, name: str = ..., asname: _Identifier | None = ..., **kwargs: Unpack[_Attributes]) -> Self: ... + def __replace__(self, *, name: str = ..., asname: str | None = ..., **kwargs: Unpack[_Attributes]) -> Self: ... class withitem(AST): if sys.version_info >= (3, 10): @@ -1497,22 +1479,18 @@ if sys.version_info >= (3, 10): __match_args__ = ("keys", "patterns", "rest") keys: list[expr] patterns: list[pattern] - rest: _Identifier | None + rest: str | None if sys.version_info >= (3, 13): def __init__( self, keys: list[expr] = ..., patterns: list[pattern] = ..., - rest: _Identifier | None = None, + rest: str | None = None, **kwargs: Unpack[_Attributes[int]], ) -> None: ... else: def __init__( - self, - keys: list[expr], - patterns: list[pattern], - rest: _Identifier | None = None, - **kwargs: Unpack[_Attributes[int]], + self, keys: list[expr], patterns: list[pattern], rest: str | None = None, **kwargs: Unpack[_Attributes[int]] ) -> None: ... if sys.version_info >= (3, 14): @@ -1521,7 +1499,7 @@ if sys.version_info >= (3, 10): *, keys: list[expr] = ..., patterns: list[pattern] = ..., - rest: _Identifier | None = ..., + rest: str | None = ..., **kwargs: Unpack[_Attributes[int]], ) -> Self: ... @@ -1529,14 +1507,14 @@ if sys.version_info >= (3, 10): __match_args__ = ("cls", "patterns", "kwd_attrs", "kwd_patterns") cls: expr patterns: list[pattern] - kwd_attrs: list[_Identifier] + kwd_attrs: list[str] kwd_patterns: list[pattern] if sys.version_info >= (3, 13): def __init__( self, cls: expr, patterns: list[pattern] = ..., - kwd_attrs: list[_Identifier] = ..., + kwd_attrs: list[str] = ..., kwd_patterns: list[pattern] = ..., **kwargs: Unpack[_Attributes[int]], ) -> None: ... @@ -1545,7 +1523,7 @@ if sys.version_info >= (3, 10): self, cls: expr, patterns: list[pattern], - kwd_attrs: list[_Identifier], + kwd_attrs: list[str], kwd_patterns: list[pattern], **kwargs: Unpack[_Attributes[int]], ) -> None: ... @@ -1556,30 +1534,30 @@ if sys.version_info >= (3, 10): *, cls: expr = ..., patterns: list[pattern] = ..., - kwd_attrs: list[_Identifier] = ..., + kwd_attrs: list[str] = ..., kwd_patterns: list[pattern] = ..., **kwargs: Unpack[_Attributes[int]], ) -> Self: ... class MatchStar(pattern): __match_args__ = ("name",) - name: _Identifier | None - def __init__(self, name: _Identifier | None, **kwargs: Unpack[_Attributes[int]]) -> None: ... + name: str | None + def __init__(self, name: str | None, **kwargs: Unpack[_Attributes[int]]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, name: _Identifier | None = ..., **kwargs: Unpack[_Attributes[int]]) -> Self: ... + def __replace__(self, *, name: str | None = ..., **kwargs: Unpack[_Attributes[int]]) -> Self: ... class MatchAs(pattern): __match_args__ = ("pattern", "name") pattern: _Pattern | None - name: _Identifier | None + name: str | None def __init__( - self, pattern: _Pattern | None = None, name: _Identifier | None = None, **kwargs: Unpack[_Attributes[int]] + self, pattern: _Pattern | None = None, name: str | None = None, **kwargs: Unpack[_Attributes[int]] ) -> None: ... if sys.version_info >= (3, 14): def __replace__( - self, *, pattern: _Pattern | None = ..., name: _Identifier | None = ..., **kwargs: Unpack[_Attributes[int]] + self, *, pattern: _Pattern | None = ..., name: str | None = ..., **kwargs: Unpack[_Attributes[int]] ) -> Self: ... class MatchOr(pattern): @@ -1621,25 +1599,21 @@ if sys.version_info >= (3, 12): __match_args__ = ("name", "bound", "default_value") else: __match_args__ = ("name", "bound") - name: _Identifier + name: str bound: expr | None if sys.version_info >= (3, 13): default_value: expr | None def __init__( - self, - name: _Identifier, - bound: expr | None = None, - default_value: expr | None = None, - **kwargs: Unpack[_Attributes[int]], + self, name: str, bound: expr | None = None, default_value: expr | None = None, **kwargs: Unpack[_Attributes[int]] ) -> None: ... else: - def __init__(self, name: _Identifier, bound: expr | None = None, **kwargs: Unpack[_Attributes[int]]) -> None: ... + def __init__(self, name: str, bound: expr | None = None, **kwargs: Unpack[_Attributes[int]]) -> None: ... if sys.version_info >= (3, 14): def __replace__( self, *, - name: _Identifier = ..., + name: str = ..., bound: expr | None = ..., default_value: expr | None = ..., **kwargs: Unpack[_Attributes[int]], @@ -1650,18 +1624,16 @@ if sys.version_info >= (3, 12): __match_args__ = ("name", "default_value") else: __match_args__ = ("name",) - name: _Identifier + name: str if sys.version_info >= (3, 13): default_value: expr | None - def __init__( - self, name: _Identifier, default_value: expr | None = None, **kwargs: Unpack[_Attributes[int]] - ) -> None: ... + def __init__(self, name: str, default_value: expr | None = None, **kwargs: Unpack[_Attributes[int]]) -> None: ... else: - def __init__(self, name: _Identifier, **kwargs: Unpack[_Attributes[int]]) -> None: ... + def __init__(self, name: str, **kwargs: Unpack[_Attributes[int]]) -> None: ... if sys.version_info >= (3, 14): def __replace__( - self, *, name: _Identifier = ..., default_value: expr | None = ..., **kwargs: Unpack[_Attributes[int]] + self, *, name: str = ..., default_value: expr | None = ..., **kwargs: Unpack[_Attributes[int]] ) -> Self: ... class TypeVarTuple(type_param): @@ -1669,18 +1641,16 @@ if sys.version_info >= (3, 12): __match_args__ = ("name", "default_value") else: __match_args__ = ("name",) - name: _Identifier + name: str if sys.version_info >= (3, 13): default_value: expr | None - def __init__( - self, name: _Identifier, default_value: expr | None = None, **kwargs: Unpack[_Attributes[int]] - ) -> None: ... + def __init__(self, name: str, default_value: expr | None = None, **kwargs: Unpack[_Attributes[int]]) -> None: ... else: - def __init__(self, name: _Identifier, **kwargs: Unpack[_Attributes[int]]) -> None: ... + def __init__(self, name: str, **kwargs: Unpack[_Attributes[int]]) -> None: ... if sys.version_info >= (3, 14): def __replace__( - self, *, name: _Identifier = ..., default_value: expr | None = ..., **kwargs: Unpack[_Attributes[int]] + self, *, name: str = ..., default_value: expr | None = ..., **kwargs: Unpack[_Attributes[int]] ) -> Self: ... class _ABC(type): From 123505a182caf9acb95fc44c9d6706fbb1d6435f Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Fri, 9 May 2025 15:49:21 +0000 Subject: [PATCH 300/388] click-spinner: Replace `Any` return type (#13970) --- stubs/click-spinner/click_spinner/_version.pyi | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/stubs/click-spinner/click_spinner/_version.pyi b/stubs/click-spinner/click_spinner/_version.pyi index feb0b1653bab..1ab0ac18a44b 100644 --- a/stubs/click-spinner/click_spinner/_version.pyi +++ b/stubs/click-spinner/click_spinner/_version.pyi @@ -1,5 +1,12 @@ -from typing import Any +from typing import TypedDict, type_check_only + +@type_check_only +class _Versions(TypedDict): + dirty: bool + error: None + full_revisionid: str + version: str version_json: str -def get_versions() -> dict[str, Any]: ... +def get_versions() -> _Versions: ... From cd234460e20a2b82086582332dc039683d84865c Mon Sep 17 00:00:00 2001 From: Sebastian Rittau Date: Fri, 9 May 2025 19:31:03 +0200 Subject: [PATCH 301/388] [CI] Install apt dependencies when running "pyright: Run test cases" (#13976) --- .github/workflows/tests.yml | 7 +++++++ tests/get_external_apt_dependencies.py | 20 ++++++++++++++++++++ 2 files changed, 27 insertions(+) create mode 100755 tests/get_external_apt_dependencies.py diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 5031b000ddb5..e9ef6b394a4e 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -103,6 +103,13 @@ jobs: - name: Install typeshed test-suite requirements # Install these so we can run `get_external_stub_requirements.py` run: uv pip install -r requirements-tests.txt --system + - name: Install required APT packages + run: | + DEPENDENCIES=$( python tests/get_external_apt_dependencies.py ) + if [ -n "$DEPENDENCIES" ]; then + printf "Installing APT packages:\n $(echo $DEPENDENCIES | sed 's/ /\n /g')\n" + sudo apt-get install -qy $DEPENDENCIES + fi - name: Create an isolated venv for testing run: uv venv .venv - name: Install 3rd-party stub dependencies diff --git a/tests/get_external_apt_dependencies.py b/tests/get_external_apt_dependencies.py new file mode 100755 index 000000000000..8e3f4ddfdeb1 --- /dev/null +++ b/tests/get_external_apt_dependencies.py @@ -0,0 +1,20 @@ +#!/usr/bin/env python3 + +import itertools +import os +import sys + +from ts_utils.metadata import read_metadata +from ts_utils.paths import STUBS_PATH + +if __name__ == "__main__": + distributions = sys.argv[1:] + if not distributions: + distributions = os.listdir(STUBS_PATH) + dependencies = set( + itertools.chain.from_iterable( + read_metadata(distribution).stubtest_settings.apt_dependencies for distribution in distributions + ) + ) + for dependency in sorted(dependencies): + print(dependency) From 5beda816f77d1de4fd372e43229962fd3191118d Mon Sep 17 00:00:00 2001 From: Sebastian Rittau Date: Fri, 9 May 2025 20:26:16 +0200 Subject: [PATCH 302/388] [geopandas] Fix CI tests in some circumstances (#13978) Add "libproj-dev" and "proj-bin" to apt_dependencies. These packages are necessary to build the pyproj dependency if a pre-built wheel is not available. --- stubs/geopandas/METADATA.toml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/stubs/geopandas/METADATA.toml b/stubs/geopandas/METADATA.toml index 01330a7ed61b..43adce04a0d4 100644 --- a/stubs/geopandas/METADATA.toml +++ b/stubs/geopandas/METADATA.toml @@ -2,3 +2,8 @@ version = "1.0.1" # Requires a version of numpy with a `py.typed` file requires = ["numpy>=1.20", "pandas-stubs", "types-shapely", "pyproj"] upstream_repository = "https://github.com/geopandas/geopandas" + +[tool.stubtest] +# libproj-dev and proj-bin are required to build pyproj if wheels for the +# target Python version are not available. +apt_dependencies = ["libproj-dev", "proj-bin"] From 6aec7c13336bdbb60c4d6e1fb7ba31cbe15a093c Mon Sep 17 00:00:00 2001 From: Sebastian Rittau Date: Fri, 9 May 2025 20:26:53 +0200 Subject: [PATCH 303/388] [CI] Fix "mypy: Check stubs" when using specific Python version (#13975) --- .github/workflows/tests.yml | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index e9ef6b394a4e..c2078c4b6302 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -70,7 +70,11 @@ jobs: python-version: ${{ matrix.python-version }} - run: curl -LsSf https://astral.sh/uv/install.sh | sh - run: uv pip install -r requirements-tests.txt --system - - run: python ./tests/mypy_test.py --platform=${{ matrix.platform }} --python-version=${{ matrix.python-version }} + - run: | + # python-version can sometimes be pinned to a specific version or to "-dev", but + # mypy understands only X.Y version numbers. + MYPY_PY_VERSION=$(echo ${{ matrix.python-version }} | cut -d - -f 1 | cut -d . -f 1-2) + python ./tests/mypy_test.py --platform=${{ matrix.platform }} --python-version=${MYPY_PY_VERSION} regression-tests: name: "mypy: Run test cases" From 2bb48b4c67b77f086d4c84a6385bdb39c5529e25 Mon Sep 17 00:00:00 2001 From: Sebastian Rittau Date: Fri, 9 May 2025 21:04:35 +0200 Subject: [PATCH 304/388] Fix errors when type checking stdlib with Python 3.14 (#13977) --- stdlib/ast.pyi | 33 ++++++++++++++++++--------------- stdlib/asyncio/events.pyi | 4 +++- stdlib/sqlite3/__init__.pyi | 4 +++- stdlib/typing.pyi | 4 +++- 4 files changed, 27 insertions(+), 18 deletions(-) diff --git a/stdlib/ast.pyi b/stdlib/ast.pyi index cb6fce435226..be7788edfd50 100644 --- a/stdlib/ast.pyi +++ b/stdlib/ast.pyi @@ -174,6 +174,7 @@ class FunctionDef(stmt): returns: expr | None = ..., type_comment: str | None = ..., type_params: list[type_param] = ..., + **kwargs: Unpack[_Attributes], ) -> Self: ... class AsyncFunctionDef(stmt): @@ -245,11 +246,11 @@ class AsyncFunctionDef(stmt): *, name: str = ..., args: arguments = ..., - body: list[stmt], - decorator_list: list[expr], - returns: expr | None, - type_comment: str | None, - type_params: list[type_param], + body: list[stmt] = ..., + decorator_list: list[expr] = ..., + returns: expr | None = ..., + type_comment: str | None = ..., + type_params: list[type_param] = ..., ) -> Self: ... class ClassDef(stmt): @@ -301,12 +302,12 @@ class ClassDef(stmt): def __replace__( self, *, - name: str, - bases: list[expr], - keywords: list[keyword], - body: list[stmt], - decorator_list: list[expr], - type_params: list[type_param], + name: str = ..., + bases: list[expr] = ..., + keywords: list[keyword] = ..., + body: list[stmt] = ..., + decorator_list: list[expr] = ..., + type_params: list[type_param] = ..., **kwargs: Unpack[_Attributes], ) -> Self: ... @@ -377,7 +378,7 @@ if sys.version_info >= (3, 12): ) -> None: ... if sys.version_info >= (3, 14): - def __replace__( + def __replace__( # type: ignore[override] self, *, name: Name = ..., @@ -540,7 +541,9 @@ class While(stmt): def __init__(self, test: expr, body: list[stmt], orelse: list[stmt], **kwargs: Unpack[_Attributes]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, test: expr, body: list[stmt], orelse: list[stmt], **kwargs: Unpack[_Attributes]) -> Self: ... + def __replace__( + self, *, test: expr = ..., body: list[stmt] = ..., orelse: list[stmt] = ..., **kwargs: Unpack[_Attributes] + ) -> Self: ... class If(stmt): if sys.version_info >= (3, 10): @@ -725,7 +728,7 @@ class Assert(stmt): def __init__(self, test: expr, msg: expr | None = None, **kwargs: Unpack[_Attributes]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, test: expr, msg: expr | None, **kwargs: Unpack[_Attributes]) -> Self: ... + def __replace__(self, *, test: expr = ..., msg: expr | None = ..., **kwargs: Unpack[_Attributes]) -> Self: ... class Import(stmt): if sys.version_info >= (3, 10): @@ -775,7 +778,7 @@ class Global(stmt): def __init__(self, names: list[str], **kwargs: Unpack[_Attributes]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, names: list[str], **kwargs: Unpack[_Attributes]) -> Self: ... + def __replace__(self, *, names: list[str] = ..., **kwargs: Unpack[_Attributes]) -> Self: ... class Nonlocal(stmt): if sys.version_info >= (3, 10): diff --git a/stdlib/asyncio/events.pyi b/stdlib/asyncio/events.pyi index afe912d01fe1..af43d2f5937d 100644 --- a/stdlib/asyncio/events.pyi +++ b/stdlib/asyncio/events.pyi @@ -21,7 +21,9 @@ from .futures import Future from .protocols import BaseProtocol from .tasks import Task from .transports import BaseTransport, DatagramTransport, ReadTransport, SubprocessTransport, Transport, WriteTransport -from .unix_events import AbstractChildWatcher + +if sys.version_info < (3, 14): + from .unix_events import AbstractChildWatcher # Keep asyncio.__all__ updated with any changes to __all__ here if sys.version_info >= (3, 14): diff --git a/stdlib/sqlite3/__init__.pyi b/stdlib/sqlite3/__init__.pyi index b83516b4d4eb..5d3c2330be5e 100644 --- a/stdlib/sqlite3/__init__.pyi +++ b/stdlib/sqlite3/__init__.pyi @@ -60,12 +60,14 @@ from sqlite3.dbapi2 import ( sqlite_version as sqlite_version, sqlite_version_info as sqlite_version_info, threadsafety as threadsafety, - version_info as version_info, ) from types import TracebackType from typing import Any, Literal, Protocol, SupportsIndex, TypeVar, final, overload, type_check_only from typing_extensions import Self, TypeAlias +if sys.version_info < (3, 14): + from sqlite3.dbapi2 import version_info as version_info + if sys.version_info >= (3, 12): from sqlite3.dbapi2 import ( LEGACY_TRANSACTION_CONTROL as LEGACY_TRANSACTION_CONTROL, diff --git a/stdlib/typing.pyi b/stdlib/typing.pyi index df753cfd9bca..189ff3e89720 100644 --- a/stdlib/typing.pyi +++ b/stdlib/typing.pyi @@ -37,7 +37,6 @@ __all__ = [ "AsyncIterator", "Awaitable", "BinaryIO", - "ByteString", "Callable", "ChainMap", "ClassVar", @@ -106,6 +105,9 @@ __all__ = [ "runtime_checkable", ] +if sys.version_info < (3, 14): + __all__ += ["ByteString"] + if sys.version_info >= (3, 10): __all__ += ["Concatenate", "ParamSpec", "ParamSpecArgs", "ParamSpecKwargs", "TypeAlias", "TypeGuard", "is_typeddict"] From 80a118d6a5044d92c08b45c3493417d7f3fbdd8a Mon Sep 17 00:00:00 2001 From: Sebastian Rittau Date: Fri, 9 May 2025 21:46:00 +0200 Subject: [PATCH 305/388] [CI] Install apt dependencies when running "mypy: Check stubs" (#13979) Extracted from #13957 --- .github/workflows/tests.yml | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index c2078c4b6302..d81261fc8a71 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -70,6 +70,13 @@ jobs: python-version: ${{ matrix.python-version }} - run: curl -LsSf https://astral.sh/uv/install.sh | sh - run: uv pip install -r requirements-tests.txt --system + - name: Install required APT packages + run: | + DEPENDENCIES=$( python tests/get_external_apt_dependencies.py ) + if [ -n "$DEPENDENCIES" ]; then + printf "Installing APT packages:\n $(echo $DEPENDENCIES | sed 's/ /\n /g')\n" + sudo apt-get install -qy $DEPENDENCIES + fi - run: | # python-version can sometimes be pinned to a specific version or to "-dev", but # mypy understands only X.Y version numbers. From 571167c9f6ef96639a9d4c274d16a009e3e5b5b4 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Sat, 10 May 2025 11:45:53 +0200 Subject: [PATCH 306/388] [stubsabot] Bump setuptools to 80.4.* (#13980) --- stubs/setuptools/METADATA.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stubs/setuptools/METADATA.toml b/stubs/setuptools/METADATA.toml index 9e2fd9acc0a1..3000300b6f35 100644 --- a/stubs/setuptools/METADATA.toml +++ b/stubs/setuptools/METADATA.toml @@ -1,4 +1,4 @@ -version = "80.3.*" +version = "80.4.*" upstream_repository = "https://github.com/pypa/setuptools" extra_description = """\ Given that `pkg_resources` is typed since `setuptools >= 71.1`, \ From 29fe2af052f76bb4917c4988027fdf279d0bfd0f Mon Sep 17 00:00:00 2001 From: Sebastian Rittau Date: Sat, 10 May 2025 15:27:40 +0200 Subject: [PATCH 307/388] Support Python 3.14 (#13957) --- .github/workflows/daily.yml | 2 +- .github/workflows/stubtest_stdlib.yml | 2 +- .github/workflows/tests.yml | 20 +- README.md | 2 +- requirements-tests.txt | 11 +- stdlib/@tests/stubtest_allowlists/common.txt | 17 - .../stubtest_allowlists/darwin-py314.txt | 23 + .../stubtest_allowlists/linux-py314.txt | 36 + stdlib/@tests/stubtest_allowlists/py310.txt | 26 + stdlib/@tests/stubtest_allowlists/py311.txt | 26 + stdlib/@tests/stubtest_allowlists/py312.txt | 26 + stdlib/@tests/stubtest_allowlists/py313.txt | 26 + stdlib/@tests/stubtest_allowlists/py314.txt | 798 ++++++++++++++++++ stdlib/@tests/stubtest_allowlists/py39.txt | 26 + .../stubtest_allowlists/win32-py314.txt | 59 ++ stdlib/ast.pyi | 1 + stubs/pyflakes/pyflakes/checker.pyi | 7 +- tests/mypy_test.py | 2 +- tests/regr_test.py | 2 +- tests/runtests.py | 2 +- tests/typecheck_typeshed.py | 2 +- 21 files changed, 1082 insertions(+), 34 deletions(-) create mode 100644 stdlib/@tests/stubtest_allowlists/darwin-py314.txt create mode 100644 stdlib/@tests/stubtest_allowlists/linux-py314.txt create mode 100644 stdlib/@tests/stubtest_allowlists/py314.txt create mode 100644 stdlib/@tests/stubtest_allowlists/win32-py314.txt diff --git a/.github/workflows/daily.yml b/.github/workflows/daily.yml index be0d5b7cad71..f42527f995aa 100644 --- a/.github/workflows/daily.yml +++ b/.github/workflows/daily.yml @@ -35,7 +35,7 @@ jobs: strategy: matrix: os: ["ubuntu-latest", "windows-latest", "macos-latest"] - python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"] + python-version: ["3.9", "3.10", "3.11", "3.12", "3.13", "3.14-dev"] fail-fast: false steps: diff --git a/.github/workflows/stubtest_stdlib.yml b/.github/workflows/stubtest_stdlib.yml index b2ac305aefd2..2a574584ff08 100644 --- a/.github/workflows/stubtest_stdlib.yml +++ b/.github/workflows/stubtest_stdlib.yml @@ -31,7 +31,7 @@ jobs: strategy: matrix: os: ["ubuntu-latest", "windows-latest", "macos-latest"] - python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"] + python-version: ["3.9", "3.10", "3.11", "3.12", "3.13", "3.14-dev"] fail-fast: false steps: diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index d81261fc8a71..e5e037be2393 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -61,7 +61,8 @@ jobs: strategy: matrix: platform: ["linux", "win32", "darwin"] - python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"] + # TODO (2025-05-10) "3.13.2" should be "3.14-dev", see below. + python-version: ["3.9", "3.10", "3.11", "3.12", "3.13", "3.13.2"] fail-fast: false steps: - uses: actions/checkout@v4 @@ -77,10 +78,17 @@ jobs: printf "Installing APT packages:\n $(echo $DEPENDENCIES | sed 's/ /\n /g')\n" sudo apt-get install -qy $DEPENDENCIES fi - - run: | - # python-version can sometimes be pinned to a specific version or to "-dev", but - # mypy understands only X.Y version numbers. - MYPY_PY_VERSION=$(echo ${{ matrix.python-version }} | cut -d - -f 1 | cut -d . -f 1-2) + - name: Run mypy_test.py + run: | + # TODO: (2025-05-10) This is a bad hack to work around mypy crashing + # when running on Python 3.14. See https://github.com/python/mypy/pull/19020. + if [[ "${{ matrix.python-version }}" == "3.13.2" ]]; then + MYPY_PY_VERSION="3.14" + else + # python-version can sometimes be pinned to a specific version or to "-dev", but + # mypy understands only X.Y version numbers. + MYPY_PY_VERSION=$(echo ${{ matrix.python-version }} | cut -d - -f 1 | cut -d . -f 1-2) + fi python ./tests/mypy_test.py --platform=${{ matrix.platform }} --python-version=${MYPY_PY_VERSION} regression-tests: @@ -103,7 +111,7 @@ jobs: strategy: matrix: python-platform: ["Linux", "Windows", "Darwin"] - python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"] + python-version: ["3.9", "3.10", "3.11", "3.12", "3.13", "3.14"] fail-fast: false steps: - uses: actions/checkout@v4 diff --git a/README.md b/README.md index b52ecf3a5de9..ee09529b967a 100644 --- a/README.md +++ b/README.md @@ -21,7 +21,7 @@ the project the stubs are for, but instead report them here to typeshed.** Further documentation on stub files, typeshed, and Python's typing system in general, can also be found at https://typing.readthedocs.io/en/latest/. -Typeshed supports Python versions 3.9 to 3.13. +Typeshed supports Python versions 3.9 to 3.14. ## Using diff --git a/requirements-tests.txt b/requirements-tests.txt index a81e7cf21f89..6005614b24d8 100644 --- a/requirements-tests.txt +++ b/requirements-tests.txt @@ -6,15 +6,20 @@ pyright==1.1.400 pytype==2024.10.11; platform_system != "Windows" and python_version >= "3.10" and python_version < "3.13" # Libraries used by our various scripts. -aiohttp==3.11.15 -grpcio-tools>=1.66.2 # For grpc_tools.protoc +# TODO (2025-05-09): Installing this on Python 3.14 on Windows fails at +# the moment. +aiohttp==3.11.15; python_version < "3.14" +# TODO (2025-05-09): No wheels exist for Python 3.14 yet, slowing down CI +# considerably and prone to fail. +grpcio-tools>=1.66.2; python_version < "3.14" # For grpc_tools.protoc mypy-protobuf==3.6.0 packaging==24.2 pathspec>=0.11.1 pre-commit # Required by create_baseline_stubs.py. Must match .pre-commit-config.yaml. ruff==0.11.4 -stubdefaulter==0.1.0 +# TODO (2025-05-07): Dependency libcst doesn't support Python 3.14 yet. +stubdefaulter==0.1.0; python_version < "3.14" termcolor>=2.3 tomli==2.2.1 tomlkit==0.13.2 diff --git a/stdlib/@tests/stubtest_allowlists/common.txt b/stdlib/@tests/stubtest_allowlists/common.txt index 7746a8082a51..a036c227f118 100644 --- a/stdlib/@tests/stubtest_allowlists/common.txt +++ b/stdlib/@tests/stubtest_allowlists/common.txt @@ -183,11 +183,6 @@ _collections_abc.AsyncGenerator.__anext__ _collections_abc.AsyncGenerator.aclose _collections_abc.AsyncIterator.__anext__ -# Pretend typing.ByteString is a Union, to better match its documented semantics. -# As a side effect, this changes the definition of collections.abc.ByteString, which is okay, -# because it's not an ABC that makes any sense and was deprecated in 3.12 -_collections_abc.ByteString - _collections_abc.Callable # Typing-related weirdness _collections_abc.Mapping.get # Adding None to the Union messed up mypy _collections_abc.Sequence.index # Supporting None in end is not mandatory @@ -215,12 +210,7 @@ _?ast.stmt.__init__ _ast.ImportFrom.level # None on the class, but never None on instances -ast.Bytes.__new__ # runtime is *args, **kwargs due to a wrapper, but we have more accurate signatures in the stubs -ast.Ellipsis.__new__ # runtime is *args, **kwargs due to a wrapper, but we have more accurate signatures in the stubs ast.ImportFrom.level # None on the class, but never None on instances -ast.NameConstant.__new__ # runtime is *args, **kwargs due to a wrapper, but we have more accurate signatures in the stubs -ast.Num.__new__ # runtime is *args, **kwargs due to a wrapper, but we have more accurate signatures in the stubs -ast.Str.__new__ # runtime is *args, **kwargs due to a wrapper, but we have more accurate signatures in the stubs ast.NodeVisitor.visit_\w+ # Methods are discovered dynamically, see #3796 _?asyncio.Future.__init__ # Usually initialized from c object asyncio.futures.Future.__init__ # Usually initialized from c object @@ -312,7 +302,6 @@ enum.auto.__xor__ # enum.auto is magic, see comments functools._lru_cache_wrapper.cache_parameters # Cannot be detected statically functools.cached_property.__set__ # doesn't exist, but cached_property is settable by another mechanism -_?hashlib.scrypt # Raises TypeError if salt, n, r or p are None hmac.new # Raises TypeError if optional argument digestmod is not provided http.HTTPStatus.description # set in __new__; work-around for enum wierdness http.HTTPStatus.phrase # set in __new__; work-around for enum wierdness @@ -325,7 +314,6 @@ importlib._bootstrap_external.FileLoader.get_resource_reader importlib._bootstrap_external.FileLoader.load_module importlib.abc.FileLoader.get_filename importlib.abc.FileLoader.load_module -importlib.abc.Traversable.open # Problematic protocol signature at runtime, see source code comments. importlib.machinery.ExtensionFileLoader.get_filename # We can't distinguish not having a default value from having a default value of inspect.Parameter.empty @@ -472,7 +460,6 @@ typing_extensions\.TypeVar.* typing_extensions\._SpecialForm.* # Special primitives -typing\.ByteString typing(_extensions)?\.AbstractSet typing(_extensions)?\.AsyncGenerator typing(_extensions)?\.AsyncIterable @@ -527,10 +514,6 @@ typing(_extensions)?\.(Async)?ContextManager typing(_extensions)?\.IO\.__iter__ typing(_extensions)?\.IO\.__next__ -# Will always raise. Not included to avoid type checkers inferring that -# TypeAliasType instances are callable. -typing_extensions.TypeAliasType.__call__ - types.MethodType.__closure__ # read-only but not actually a property; stubtest thinks it doesn't exist. types.MethodType.__code__ # read-only but not actually a property; stubtest thinks it doesn't exist. types.MethodType.__defaults__ # read-only but not actually a property; stubtest thinks it doesn't exist. diff --git a/stdlib/@tests/stubtest_allowlists/darwin-py314.txt b/stdlib/@tests/stubtest_allowlists/darwin-py314.txt new file mode 100644 index 000000000000..31c60195d9b9 --- /dev/null +++ b/stdlib/@tests/stubtest_allowlists/darwin-py314.txt @@ -0,0 +1,23 @@ +# ==================================================================== +# TODO: New errors in Python 3.14 that need to be fixed or moved below +# ==================================================================== + +_curses.assume_default_colors +_posixsubprocess.fork_exec +asyncio.tools +asyncio.unix_events.__all__ +asyncio.unix_events.DefaultEventLoopPolicy +asyncio.unix_events._DefaultEventLoopPolicy +ctypes.c_double_complex._type_ +ctypes.c_float_complex._type_ +ctypes.c_longdouble_complex._type_ +multiprocessing.popen_fork.Popen.interrupt +multiprocessing.reduction.ACKNOWLEDGE +posix.readinto + + +# ======= +# >= 3.13 +# ======= + +(mmap.MAP_32BIT)? # Exists locally on MacOS but not on GitHub diff --git a/stdlib/@tests/stubtest_allowlists/linux-py314.txt b/stdlib/@tests/stubtest_allowlists/linux-py314.txt new file mode 100644 index 000000000000..c6e1bc3bda5f --- /dev/null +++ b/stdlib/@tests/stubtest_allowlists/linux-py314.txt @@ -0,0 +1,36 @@ +# ==================================================================== +# TODO: New errors in Python 3.14 that need to be fixed or moved below +# ==================================================================== + +_curses.assume_default_colors +_posixsubprocess.fork_exec +_socket.CAN_RAW_ERR_FILTER +_socket.IPV6_RECVERR +_socket.IP_FREEBIND +_socket.IP_RECVERR +_socket.IP_RECVORIGDSTADDR +_socket.SO_ORIGINAL_DST +_socket.VMADDR_CID_LOCAL +asyncio.tools +asyncio.unix_events.__all__ +asyncio.unix_events.DefaultEventLoopPolicy +asyncio.unix_events._DefaultEventLoopPolicy +ctypes.c_double_complex._type_ +ctypes.c_float_complex._type_ +ctypes.c_longdouble_complex._type_ +errno.EHWPOISON +multiprocessing.popen_fork.Popen.interrupt +multiprocessing.reduction.ACKNOWLEDGE +os.SCHED_DEADLINE +os.SCHED_NORMAL +posix.SCHED_DEADLINE +posix.SCHED_NORMAL +posix.readinto +select.EPOLLWAKEUP +socket.CAN_RAW_ERR_FILTER +socket.IPV6_RECVERR +socket.IP_FREEBIND +socket.IP_RECVERR +socket.IP_RECVORIGDSTADDR +socket.SO_ORIGINAL_DST +socket.VMADDR_CID_LOCAL diff --git a/stdlib/@tests/stubtest_allowlists/py310.txt b/stdlib/@tests/stubtest_allowlists/py310.txt index 73bc3da9eb6e..cb686b140098 100644 --- a/stdlib/@tests/stubtest_allowlists/py310.txt +++ b/stdlib/@tests/stubtest_allowlists/py310.txt @@ -150,6 +150,32 @@ tkinter.tix.TclVersion tkinter.tix.TkVersion +# ======= +# <= 3.13 +# ======= + +# Pretend typing.ByteString is a Union, to better match its documented semantics. +# As a side effect, this changes the definition of collections.abc.ByteString, which is okay, +# because it's not an ABC that makes any sense and was deprecated in 3.12 +_collections_abc.ByteString + +ast.Bytes.__new__ # runtime is *args, **kwargs due to a wrapper, but we have more accurate signatures in the stubs +ast.Ellipsis.__new__ # runtime is *args, **kwargs due to a wrapper, but we have more accurate signatures in the stubs +ast.NameConstant.__new__ # runtime is *args, **kwargs due to a wrapper, but we have more accurate signatures in the stubs +ast.Num.__new__ # runtime is *args, **kwargs due to a wrapper, but we have more accurate signatures in the stubs +ast.Str.__new__ # runtime is *args, **kwargs due to a wrapper, but we have more accurate signatures in the stubs + +_?hashlib.scrypt # Raises TypeError if salt, n, r or p are None + +importlib.abc.Traversable.open # Problematic protocol signature at runtime, see source code comments. + +typing\.ByteString + +# Will always raise. Not included to avoid type checkers inferring that +# TypeAliasType instances are callable. +typing_extensions.TypeAliasType.__call__ + + # ============================================================= # Allowlist entries that cannot or should not be fixed; >= 3.10 # ============================================================= diff --git a/stdlib/@tests/stubtest_allowlists/py311.txt b/stdlib/@tests/stubtest_allowlists/py311.txt index 655e603c06d1..80b55805b3e6 100644 --- a/stdlib/@tests/stubtest_allowlists/py311.txt +++ b/stdlib/@tests/stubtest_allowlists/py311.txt @@ -117,6 +117,32 @@ tkinter.tix.TclVersion tkinter.tix.TkVersion +# ======= +# <= 3.13 +# ======= + +# Pretend typing.ByteString is a Union, to better match its documented semantics. +# As a side effect, this changes the definition of collections.abc.ByteString, which is okay, +# because it's not an ABC that makes any sense and was deprecated in 3.12 +_collections_abc.ByteString + +ast.Bytes.__new__ # runtime is *args, **kwargs due to a wrapper, but we have more accurate signatures in the stubs +ast.Ellipsis.__new__ # runtime is *args, **kwargs due to a wrapper, but we have more accurate signatures in the stubs +ast.NameConstant.__new__ # runtime is *args, **kwargs due to a wrapper, but we have more accurate signatures in the stubs +ast.Num.__new__ # runtime is *args, **kwargs due to a wrapper, but we have more accurate signatures in the stubs +ast.Str.__new__ # runtime is *args, **kwargs due to a wrapper, but we have more accurate signatures in the stubs + +_?hashlib.scrypt # Raises TypeError if salt, n, r or p are None + +importlib.abc.Traversable.open # Problematic protocol signature at runtime, see source code comments. + +typing\.ByteString + +# Will always raise. Not included to avoid type checkers inferring that +# TypeAliasType instances are callable. +typing_extensions.TypeAliasType.__call__ + + # ============================================================= # Allowlist entries that cannot or should not be fixed; >= 3.11 # ============================================================= diff --git a/stdlib/@tests/stubtest_allowlists/py312.txt b/stdlib/@tests/stubtest_allowlists/py312.txt index 3222fed0d8aa..6b225d0594bd 100644 --- a/stdlib/@tests/stubtest_allowlists/py312.txt +++ b/stdlib/@tests/stubtest_allowlists/py312.txt @@ -100,6 +100,32 @@ tkinter.tix.TclVersion tkinter.tix.TkVersion +# ======= +# <= 3.13 +# ======= + +# Pretend typing.ByteString is a Union, to better match its documented semantics. +# As a side effect, this changes the definition of collections.abc.ByteString, which is okay, +# because it's not an ABC that makes any sense and was deprecated in 3.12 +_collections_abc.ByteString + +ast.Bytes.__new__ # runtime is *args, **kwargs due to a wrapper, but we have more accurate signatures in the stubs +ast.Ellipsis.__new__ # runtime is *args, **kwargs due to a wrapper, but we have more accurate signatures in the stubs +ast.NameConstant.__new__ # runtime is *args, **kwargs due to a wrapper, but we have more accurate signatures in the stubs +ast.Num.__new__ # runtime is *args, **kwargs due to a wrapper, but we have more accurate signatures in the stubs +ast.Str.__new__ # runtime is *args, **kwargs due to a wrapper, but we have more accurate signatures in the stubs + +_?hashlib.scrypt # Raises TypeError if salt, n, r or p are None + +importlib.abc.Traversable.open # Problematic protocol signature at runtime, see source code comments. + +typing\.ByteString + +# Will always raise. Not included to avoid type checkers inferring that +# TypeAliasType instances are callable. +typing_extensions.TypeAliasType.__call__ + + # ============================================================= # Allowlist entries that cannot or should not be fixed; >= 3.12 # ============================================================= diff --git a/stdlib/@tests/stubtest_allowlists/py313.txt b/stdlib/@tests/stubtest_allowlists/py313.txt index ba44606f9179..9ef94b218899 100644 --- a/stdlib/@tests/stubtest_allowlists/py313.txt +++ b/stdlib/@tests/stubtest_allowlists/py313.txt @@ -61,6 +61,32 @@ typing(_extensions)?\.IO\.write typing(_extensions)?\.IO\.writelines +# ======= +# <= 3.13 +# ======= + +# Pretend typing.ByteString is a Union, to better match its documented semantics. +# As a side effect, this changes the definition of collections.abc.ByteString, which is okay, +# because it's not an ABC that makes any sense and was deprecated in 3.12 +_collections_abc.ByteString + +ast.Bytes.__new__ # runtime is *args, **kwargs due to a wrapper, but we have more accurate signatures in the stubs +ast.Ellipsis.__new__ # runtime is *args, **kwargs due to a wrapper, but we have more accurate signatures in the stubs +ast.NameConstant.__new__ # runtime is *args, **kwargs due to a wrapper, but we have more accurate signatures in the stubs +ast.Num.__new__ # runtime is *args, **kwargs due to a wrapper, but we have more accurate signatures in the stubs +ast.Str.__new__ # runtime is *args, **kwargs due to a wrapper, but we have more accurate signatures in the stubs + +_?hashlib.scrypt # Raises TypeError if salt, n, r or p are None + +importlib.abc.Traversable.open # Problematic protocol signature at runtime, see source code comments. + +typing\.ByteString + +# Will always raise. Not included to avoid type checkers inferring that +# TypeAliasType instances are callable. +typing_extensions.TypeAliasType.__call__ + + # ============================================================= # Allowlist entries that cannot or should not be fixed; >= 3.13 # ============================================================= diff --git a/stdlib/@tests/stubtest_allowlists/py314.txt b/stdlib/@tests/stubtest_allowlists/py314.txt new file mode 100644 index 000000000000..5c6f2fd01493 --- /dev/null +++ b/stdlib/@tests/stubtest_allowlists/py314.txt @@ -0,0 +1,798 @@ +# ==================================================================== +# TODO: New errors in Python 3.14 that need to be fixed or moved below +# ==================================================================== + +_ast.Add.__annotations_cache__ +_ast.And.__annotations_cache__ +_ast.AnnAssign.__annotations_cache__ +_ast.Assert.__annotations_cache__ +_ast.Assign.__annotations_cache__ +_ast.AsyncFor.__annotations_cache__ +_ast.AsyncFunctionDef.__annotations_cache__ +_ast.AsyncWith.__annotations_cache__ +_ast.Attribute.__annotations_cache__ +_ast.AugAssign.__annotations_cache__ +_ast.Await.__annotations_cache__ +_ast.BinOp.__annotations_cache__ +_ast.BitAnd.__annotations_cache__ +_ast.BitOr.__annotations_cache__ +_ast.BitXor.__annotations_cache__ +_ast.BoolOp.__annotations_cache__ +_ast.Break.__annotations_cache__ +_ast.Call.__annotations_cache__ +_ast.ClassDef.__annotations_cache__ +_ast.Compare.__annotations_cache__ +_ast.Constant.__annotations_cache__ +_ast.Continue.__annotations_cache__ +_ast.Del.__annotations_cache__ +_ast.Delete.__annotations_cache__ +_ast.Dict.__annotations_cache__ +_ast.DictComp.__annotations_cache__ +_ast.Div.__annotations_cache__ +_ast.Eq.__annotations_cache__ +_ast.ExceptHandler.__annotations_cache__ +_ast.Expr.__annotations_cache__ +_ast.Expression.__annotations_cache__ +_ast.FloorDiv.__annotations_cache__ +_ast.For.__annotations_cache__ +_ast.FormattedValue.__annotations_cache__ +_ast.FunctionDef.__annotations_cache__ +_ast.FunctionType.__annotations_cache__ +_ast.GeneratorExp.__annotations_cache__ +_ast.Global.__annotations_cache__ +_ast.Gt.__annotations_cache__ +_ast.GtE.__annotations_cache__ +_ast.If.__annotations_cache__ +_ast.IfExp.__annotations_cache__ +_ast.Import.__annotations_cache__ +_ast.ImportFrom.__annotations_cache__ +_ast.In.__annotations_cache__ +_ast.Interactive.__annotations_cache__ +_ast.Invert.__annotations_cache__ +_ast.Is.__annotations_cache__ +_ast.IsNot.__annotations_cache__ +_ast.JoinedStr.__annotations_cache__ +_ast.LShift.__annotations_cache__ +_ast.Lambda.__annotations_cache__ +_ast.List.__annotations_cache__ +_ast.ListComp.__annotations_cache__ +_ast.Load.__annotations_cache__ +_ast.Lt.__annotations_cache__ +_ast.LtE.__annotations_cache__ +_ast.MatMult.__annotations_cache__ +_ast.Match.__annotations_cache__ +_ast.MatchAs.__annotations_cache__ +_ast.MatchClass.__annotations_cache__ +_ast.MatchMapping.__annotations_cache__ +_ast.MatchOr.__annotations_cache__ +_ast.MatchSequence.__annotations_cache__ +_ast.MatchSingleton.__annotations_cache__ +_ast.MatchStar.__annotations_cache__ +_ast.MatchValue.__annotations_cache__ +_ast.Mod.__annotations_cache__ +_ast.Module.__annotations_cache__ +_ast.Mult.__annotations_cache__ +_ast.Name.__annotations_cache__ +_ast.NamedExpr.__annotations_cache__ +_ast.Nonlocal.__annotations_cache__ +_ast.Not.__annotations_cache__ +_ast.NotEq.__annotations_cache__ +_ast.NotIn.__annotations_cache__ +_ast.Or.__annotations_cache__ +_ast.ParamSpec.__annotations_cache__ +_ast.Pass.__annotations_cache__ +_ast.Pow.__annotations_cache__ +_ast.RShift.__annotations_cache__ +_ast.Raise.__annotations_cache__ +_ast.Return.__annotations_cache__ +_ast.Set.__annotations_cache__ +_ast.SetComp.__annotations_cache__ +_ast.Slice.__annotations_cache__ +_ast.Starred.__annotations_cache__ +_ast.Store.__annotations_cache__ +_ast.Sub.__annotations_cache__ +_ast.Subscript.__annotations_cache__ +_ast.Try.__annotations_cache__ +_ast.TryStar.__annotations_cache__ +_ast.Tuple.__annotations_cache__ +_ast.TypeAlias.__annotations_cache__ +_ast.TypeIgnore.__annotations_cache__ +_ast.TypeVar.__annotations_cache__ +_ast.TypeVarTuple.__annotations_cache__ +_ast.UAdd.__annotations_cache__ +_ast.USub.__annotations_cache__ +_ast.UnaryOp.__annotations_cache__ +_ast.While.__annotations_cache__ +_ast.With.__annotations_cache__ +_ast.Yield.__annotations_cache__ +_ast.YieldFrom.__annotations_cache__ +_ast.alias.__annotations_cache__ +_ast.arg.__annotations_cache__ +_ast.arguments.__annotations_cache__ +_ast.comprehension.__annotations_cache__ +_ast.keyword.__annotations_cache__ +_ast.match_case.__annotations_cache__ +_ast.withitem.__annotations_cache__ +_asyncio.all_tasks +_asyncio.future_add_to_awaited_by +_asyncio.future_discard_from_awaited_by +_compression +_contextvars.Token.__enter__ +_contextvars.Token.__exit__ +_ctypes.POINTER +_ctypes.byref +_ctypes.byref +_ctypes.pointer +_decimal.Decimal.from_number +_decimal.IEEEContext +_decimal.IEEE_CONTEXT_MAX_BITS +_heapq.heapify_max +_heapq.heappop_max +_heapq.heappush_max +_heapq.heappushpop_max +_heapq.heapreplace_max +_imp.pyc_magic_number_token +_socket.IP_RECVTTL +_socket.if_indextoname +_ssl.HAS_PHA +_thread.RLock.locked +_thread.set_name +annotationlib +argparse.HelpFormatter.__init__ +argparse.HelpFormatter.__init__ +ast.Add.__annotations_cache__ +ast.And.__annotations_cache__ +ast.AnnAssign.__annotations_cache__ +ast.Assert.__annotations_cache__ +ast.Assign.__annotations_cache__ +ast.AsyncFor.__annotations_cache__ +ast.AsyncFunctionDef.__annotations_cache__ +ast.AsyncWith.__annotations_cache__ +ast.Attribute.__annotations_cache__ +ast.AugAssign.__annotations_cache__ +ast.Await.__annotations_cache__ +ast.BinOp.__annotations_cache__ +ast.BitAnd.__annotations_cache__ +ast.BitOr.__annotations_cache__ +ast.BitXor.__annotations_cache__ +ast.BoolOp.__annotations_cache__ +ast.Break.__annotations_cache__ +ast.Call.__annotations_cache__ +ast.ClassDef.__annotations_cache__ +ast.Compare.__annotations_cache__ +ast.Constant.__annotations_cache__ +ast.Continue.__annotations_cache__ +ast.Del.__annotations_cache__ +ast.Delete.__annotations_cache__ +ast.Dict.__annotations_cache__ +ast.DictComp.__annotations_cache__ +ast.Div.__annotations_cache__ +ast.Eq.__annotations_cache__ +ast.ExceptHandler.__annotations_cache__ +ast.Expr.__annotations_cache__ +ast.Expression.__annotations_cache__ +ast.FloorDiv.__annotations_cache__ +ast.For.__annotations_cache__ +ast.FormattedValue.__annotations_cache__ +ast.FunctionDef.__annotations_cache__ +ast.FunctionType.__annotations_cache__ +ast.GeneratorExp.__annotations_cache__ +ast.Global.__annotations_cache__ +ast.Gt.__annotations_cache__ +ast.GtE.__annotations_cache__ +ast.If.__annotations_cache__ +ast.IfExp.__annotations_cache__ +ast.Import.__annotations_cache__ +ast.ImportFrom.__annotations_cache__ +ast.In.__annotations_cache__ +ast.Interactive.__annotations_cache__ +ast.Interpolation +ast.Invert.__annotations_cache__ +ast.Is.__annotations_cache__ +ast.IsNot.__annotations_cache__ +ast.JoinedStr.__annotations_cache__ +ast.LShift.__annotations_cache__ +ast.Lambda.__annotations_cache__ +ast.List.__annotations_cache__ +ast.ListComp.__annotations_cache__ +ast.Load.__annotations_cache__ +ast.Lt.__annotations_cache__ +ast.LtE.__annotations_cache__ +ast.MatMult.__annotations_cache__ +ast.Match.__annotations_cache__ +ast.MatchAs.__annotations_cache__ +ast.MatchClass.__annotations_cache__ +ast.MatchMapping.__annotations_cache__ +ast.MatchOr.__annotations_cache__ +ast.MatchSequence.__annotations_cache__ +ast.MatchSingleton.__annotations_cache__ +ast.MatchStar.__annotations_cache__ +ast.MatchValue.__annotations_cache__ +ast.Mod.__annotations_cache__ +ast.Module.__annotations_cache__ +ast.Mult.__annotations_cache__ +ast.Name.__annotations_cache__ +ast.NamedExpr.__annotations_cache__ +ast.Nonlocal.__annotations_cache__ +ast.Not.__annotations_cache__ +ast.NotEq.__annotations_cache__ +ast.NotIn.__annotations_cache__ +ast.Or.__annotations_cache__ +ast.ParamSpec.__annotations_cache__ +ast.Pass.__annotations_cache__ +ast.Pow.__annotations_cache__ +ast.RShift.__annotations_cache__ +ast.Raise.__annotations_cache__ +ast.Return.__annotations_cache__ +ast.Set.__annotations_cache__ +ast.SetComp.__annotations_cache__ +ast.Slice.__annotations_cache__ +ast.Starred.__annotations_cache__ +ast.Store.__annotations_cache__ +ast.Sub.__annotations_cache__ +ast.Subscript.__annotations_cache__ +ast.TemplateStr +ast.Try.__annotations_cache__ +ast.TryStar.__annotations_cache__ +ast.Tuple.__annotations_cache__ +ast.TypeAlias.__annotations_cache__ +ast.TypeIgnore.__annotations_cache__ +ast.TypeVar.__annotations_cache__ +ast.TypeVarTuple.__annotations_cache__ +ast.UAdd.__annotations_cache__ +ast.USub.__annotations_cache__ +ast.UnaryOp.__annotations_cache__ +ast.While.__annotations_cache__ +ast.With.__annotations_cache__ +ast.Yield.__annotations_cache__ +ast.YieldFrom.__annotations_cache__ +ast.alias.__annotations_cache__ +ast.arg.__annotations_cache__ +ast.arguments.__annotations_cache__ +ast.comprehension.__annotations_cache__ +ast.keyword.__annotations_cache__ +ast.main +ast.match_case.__annotations_cache__ +ast.withitem.__annotations_cache__ +asyncio.__all__ +asyncio.FrameCallGraphEntry +asyncio.FutureCallGraph +asyncio._AbstractEventLoopPolicy +asyncio._DefaultEventLoopPolicy +asyncio.__all__ +asyncio._get_event_loop_policy +asyncio._set_event_loop_policy +asyncio.capture_call_graph +asyncio.eager_task_factory +asyncio.eager_task_factory +asyncio.eager_task_factory +asyncio.eager_task_factory +asyncio.format_call_graph +asyncio.future_add_to_awaited_by +asyncio.future_discard_from_awaited_by +asyncio.print_call_graph +asyncio.events.__all__ +asyncio.events.AbstractEventLoopPolicy +asyncio.events.BaseDefaultEventLoopPolicy +asyncio.events._AbstractEventLoopPolicy +asyncio.events.__all__ +asyncio.events._get_event_loop_policy +asyncio.events._set_event_loop_policy +asyncio.futures.__all__ +asyncio.futures.__all__ +asyncio.futures.future_add_to_awaited_by +asyncio.futures.future_discard_from_awaited_by +asyncio.graph +asyncio.tasks.eager_task_factory +asyncio.tasks.eager_task_factory +asyncio.tasks.eager_task_factory +asyncio.tasks.eager_task_factory +bdb.Bdb.__init__ +bdb.Bdb.disable_current_event +bdb.Bdb.restart_events +bdb.Bdb.start_trace +bdb.Bdb.stop_trace +builtins.BaseExceptionGroup.split +builtins.BaseExceptionGroup.subgroup +builtins.ExceptionGroup.split +builtins.ExceptionGroup.subgroup +builtins.bytearray.resize +builtins.classmethod.__annotate__ +builtins.classmethod.__class_getitem__ +builtins.complex.from_number +builtins.float.from_number +builtins.int.__round__ +builtins.memoryview.__class_getitem__ +builtins.staticmethod.__annotate__ +builtins.staticmethod.__class_getitem__ +builtins.type.__annotate__ +code.compile_command +codeop.compile_command +compression +compression.bz2 +compression.gzip +compression.lzma +compression.zlib +compression.zstd +concurrent.futures.__all__ +concurrent.futures.Executor.map +concurrent.futures.InterpreterPoolExecutor +concurrent.futures.ProcessPoolExecutor.kill_workers +concurrent.futures.ProcessPoolExecutor.map +concurrent.futures.ProcessPoolExecutor.terminate_workers +concurrent.futures.ThreadPoolExecutor.BROKEN +concurrent.futures.ThreadPoolExecutor.prepare_context +concurrent.futures.__all__ +concurrent.futures._base.Executor.map +concurrent.futures.interpreter +concurrent.futures.process.ProcessPoolExecutor.kill_workers +concurrent.futures.process.ProcessPoolExecutor.map +concurrent.futures.process.ProcessPoolExecutor.terminate_workers +concurrent.futures.thread.ThreadPoolExecutor.BROKEN +concurrent.futures.thread.ThreadPoolExecutor.prepare_context +concurrent.futures.thread.WorkerContext +concurrent.futures.thread._WorkItem.__init__ +concurrent.futures.thread._WorkItem.__init__ +concurrent.futures.thread._WorkItem.__init__ +concurrent.futures.thread._WorkItem.run +concurrent.futures.thread._worker +concurrent.futures.thread._worker +concurrent.futures.thread._worker +configparser.__all__ +configparser.InvalidWriteError +configparser.UnnamedSectionDisabledError +configparser.__all__ +contextvars.Token.__enter__ +contextvars.Token.__exit__ +ctypes.POINTER +ctypes.POINTER +ctypes.byref +ctypes.byref +ctypes.memoryview_at +ctypes.pointer +ctypes.py_object.__class_getitem__ +ctypes.util.dllist +ctypes.wintypes.HCONV +ctypes.wintypes.HCONVLIST +ctypes.wintypes.HCURSOR +ctypes.wintypes.HDDEDATA +ctypes.wintypes.HDROP +ctypes.wintypes.HFILE +ctypes.wintypes.HRESULT +ctypes.wintypes.HSZ +dataclasses.Field.__init__ +dataclasses.Field.doc +dataclasses.field +dataclasses.make_dataclass +datetime.date.strptime +datetime.time.strptime +decimal.Decimal.from_number +decimal.DecimalTuple.__annotate_func__ +decimal.DecimalTuple.__annotations_cache__ +decimal.IEEE_CONTEXT_MAX_BITS +dis.Bytecode.__init__ +dis.Instruction.make +dis.dis +dis.disassemble +dis.distb +enum.Enum.__signature__ +enum.EnumMeta.__signature__ +enum.EnumType.__signature__ +faulthandler.dump_c_stack +fnmatch.__all__ +fnmatch.filterfalse +fractions.Fraction.__pow__ +fractions.Fraction.__rpow__ +fractions.Fraction.from_number +functools.__all__ +functools.Placeholder +functools.WRAPPER_ASSIGNMENTS +functools.partialmethod.__new__ +functools.partialmethod.__new__ +functools.partialmethod.__new__ +functools.reduce +functools.reduce +functools.update_wrapper +functools.wraps +getpass.getpass +gzip.GzipFile.readinto +gzip.GzipFile.readinto +gzip.GzipFile.readinto1 +gzip.GzipFile.readinto1 +gzip.compress +http.server.__all__ +http.server.HTTPSServer +http.server.ThreadingHTTPSServer +imaplib.IMAP4.file +imaplib.IMAP4.idle +imaplib.IMAP4_SSL.file +imaplib.IMAP4_stream.file +importlib.abc.ResourceReader +importlib.abc.Traversable +importlib.abc.TraversableResources +importlib.machinery.__all__ +importlib.machinery.AppleFrameworkLoader +importlib.metadata.PackageMetadata.__annotate_func__ +importlib.metadata.PackageMetadata.__annotations_cache__ +importlib.metadata._meta.PackageMetadata.__annotate_func__ +importlib.metadata._meta.PackageMetadata.__annotations_cache__ +importlib.metadata._meta.SimplePath.__annotate_func__ +importlib.metadata._meta.SimplePath.__annotations_cache__ +importlib.resources.abc.Traversable.__annotate_func__ +importlib.resources.abc.Traversable.__annotations_cache__ +importlib.util.__all__ +importlib.util.Loader +inspect.__all__ +inspect.CO_HAS_DOCSTRING +inspect.CO_METHOD +inspect.Signature.format +inspect.Signature.from_callable +inspect.formatannotation +inspect.get_annotations +inspect.ispackage +inspect.signature +io.__all__ +io.Reader +io.Writer +ipaddress._IPAddressBase.version +logging.handlers.QueueListener.__enter__ +logging.handlers.QueueListener.__exit__ +logging.handlers.SysLogHandler.__init__ +marshal.dump +marshal.dumps +multiprocessing.forkserver.main +multiprocessing.managers.BaseListProxy.clear +multiprocessing.managers.BaseListProxy.copy +multiprocessing.managers.DictProxy.__ior__ +multiprocessing.managers.SyncManager.set +multiprocessing.managers._BaseDictProxy.__ior__ +multiprocessing.managers._BaseDictProxy.__or__ +multiprocessing.managers._BaseDictProxy.__reversed__ +multiprocessing.managers._BaseDictProxy.__ror__ +multiprocessing.managers._BaseDictProxy.fromkeys +multiprocessing.process.BaseProcess.interrupt +multiprocessing.synchronize.SemLock.locked +os.__all__ +os.readinto +pathlib.Path.copy_into +pathlib.Path.copytree +pathlib.Path.delete +pathlib.Path.info +pathlib.Path.move +pathlib.Path.move_into +pathlib.Path.rmtree +pathlib.PurePath.is_relative_to +pathlib.PurePath.is_relative_to +pathlib.PurePath.relative_to +pathlib.PurePath.relative_to +pathlib.types +pdb.__all__ +pdb.Pdb.__init__ +pdb.Pdb.__init__ +pdb.Pdb.__init__ +pdb.Pdb.checkline +pdb.Pdb.complete_multiline_names +pdb.Pdb.print_stack_trace +pdb.Pdb.set_trace +pdb.Pdb.set_trace_async +pdb.get_default_backend +pdb.set_default_backend +pdb.set_trace +pkgutil.__all__ +pkgutil.find_loader +pkgutil.get_loader +platform.invalidate_caches +pstats.FunctionProfile.__annotate_func__ +pstats.FunctionProfile.__annotations_cache__ +pstats.StatsProfile.__annotate_func__ +pstats.StatsProfile.__annotations_cache__ +pyexpat.errors.XML_ERROR_NOT_STARTED +shutil.__all__ +socket.__all__ +socket.IP_RECVTTL +socket.if_indextoname +sre_compile.CH_NEGATE +sre_constants.CH_NEGATE +sre_parse.CH_NEGATE +string.Template.flags +string.templatelib +sys.is_remote_debug_enabled +sys.remote_exec +tarfile.TarFile.zstopen +threading.Thread.__init__ +threading._RLock.locked +tkinter.Event.__class_getitem__ +token.__all__ +token.TSTRING_END +token.TSTRING_MIDDLE +token.TSTRING_START +tokenize.__all__ +tokenize.TSTRING_END +tokenize.TSTRING_MIDDLE +tokenize.TSTRING_START +tomllib.TOMLDecodeError.__init__ +tomllib.TOMLDecodeError.__init__ +tomllib.TOMLDecodeError.__init__ +traceback.__all__ +turtle.__all__ +turtle.RawTurtle.fill +turtle.RawTurtle.poly +turtle.TurtleScreen.no_animation +turtle.TurtleScreen.save +turtle.fill +turtle.no_animation +turtle.poly +turtle.save +types.CodeType.co_branches +types.FrameType.f_generator +types.FunctionType.__annotate__ +types.LambdaType.__annotate__ +types.ModuleType.__annotate__ +types.UnionType.__class_getitem__ +types.UnionType.__mro_entries__ +types.UnionType.__name__ +types.UnionType.__qualname__ +typing.__all__ +typing.ForwardRef.__arg__ +typing.ForwardRef.__ast_node__ +typing.ForwardRef.__cell__ +typing.ForwardRef.__code__ +typing.ForwardRef.__extra_names__ +typing.ForwardRef.__forward_arg__ +typing.ForwardRef.__forward_code__ +typing.ForwardRef.__globals__ +typing.ForwardRef.__init__ +typing.ForwardRef.__init__ +typing.ForwardRef.__init__ +typing.ForwardRef.__init_subclass__ +typing.ForwardRef.__owner__ +typing.ForwardRef.__stringifier_dict__ +typing.ForwardRef.evaluate +typing.ParamSpec.evaluate_default +typing.SupportsAbs.__annotate_func__ +typing.SupportsAbs.__annotations_cache__ +typing.SupportsBytes.__annotate_func__ +typing.SupportsBytes.__annotations_cache__ +typing.SupportsComplex.__annotate_func__ +typing.SupportsComplex.__annotations_cache__ +typing.SupportsFloat.__annotate_func__ +typing.SupportsFloat.__annotations_cache__ +typing.SupportsIndex.__annotate_func__ +typing.SupportsIndex.__annotations_cache__ +typing.SupportsInt.__annotate_func__ +typing.SupportsInt.__annotations_cache__ +typing.SupportsRound.__annotate_func__ +typing.SupportsRound.__annotations_cache__ +typing.TypeAliasType.evaluate_value +typing.TypeVar.evaluate_bound +typing.TypeVar.evaluate_constraints +typing.TypeVar.evaluate_default +typing.TypeVarTuple.evaluate_default +typing.Union +typing.evaluate_forward_ref +typing.get_type_hints +typing_extensions.ForwardRef.__arg__ +typing_extensions.ForwardRef.__ast_node__ +typing_extensions.ForwardRef.__cell__ +typing_extensions.ForwardRef.__code__ +typing_extensions.ForwardRef.__extra_names__ +typing_extensions.ForwardRef.__forward_arg__ +typing_extensions.ForwardRef.__forward_code__ +typing_extensions.ForwardRef.__globals__ +typing_extensions.ForwardRef.__init__ +typing_extensions.ForwardRef.__init__ +typing_extensions.ForwardRef.__init__ +typing_extensions.ForwardRef.__init_subclass__ +typing_extensions.ForwardRef.__owner__ +typing_extensions.ForwardRef.__stringifier_dict__ +typing_extensions.ForwardRef.evaluate +typing_extensions.SupportsAbs.__annotate_func__ +typing_extensions.SupportsAbs.__annotations_cache__ +typing_extensions.SupportsBytes.__annotate_func__ +typing_extensions.SupportsBytes.__annotations_cache__ +typing_extensions.SupportsComplex.__annotate_func__ +typing_extensions.SupportsComplex.__annotations_cache__ +typing_extensions.SupportsFloat.__annotate_func__ +typing_extensions.SupportsFloat.__annotations_cache__ +typing_extensions.SupportsIndex.__annotate_func__ +typing_extensions.SupportsIndex.__annotations_cache__ +typing_extensions.SupportsInt.__annotate_func__ +typing_extensions.SupportsInt.__annotations_cache__ +typing_extensions.SupportsRound.__annotate_func__ +typing_extensions.SupportsRound.__annotations_cache__ +typing_extensions.TypeAliasType.evaluate_value +typing_extensions.Union +typing_extensions.evaluate_forward_ref +typing_extensions.get_type_hints +unittest.TestCase.assertEndsWith +unittest.TestCase.assertHasAttr +unittest.TestCase.assertIsSubclass +unittest.TestCase.assertNotEndsWith +unittest.TestCase.assertNotHasAttr +unittest.TestCase.assertNotIsSubclass +unittest.TestCase.assertNotStartsWith +unittest.TestCase.assertStartsWith +unittest.case.TestCase.assertEndsWith +unittest.case.TestCase.assertHasAttr +unittest.case.TestCase.assertIsSubclass +unittest.case.TestCase.assertNotEndsWith +unittest.case.TestCase.assertNotHasAttr +unittest.case.TestCase.assertNotIsSubclass +unittest.case.TestCase.assertNotStartsWith +unittest.case.TestCase.assertStartsWith +urllib.request.__all__ +urllib.request.FancyURLopener +urllib.request.URLopener +urllib.request.pathname2url +urllib.request.url2pathname +urllib.request.url2pathname +urllib.request.url2pathname +uuid.MAX +uuid.NIL +uuid.uuid6 +uuid.uuid7 +uuid.uuid8 +wsgiref.types.ErrorStream.__annotate_func__ +wsgiref.types.ErrorStream.__annotations_cache__ +wsgiref.types.FileWrapper.__annotate_func__ +wsgiref.types.FileWrapper.__annotations_cache__ +wsgiref.types.InputStream.__annotate_func__ +wsgiref.types.InputStream.__annotations_cache__ +wsgiref.types.StartResponse.__annotate_func__ +wsgiref.types.StartResponse.__annotations_cache__ +wsgiref.types._Readable.__annotate_func__ +wsgiref.types._Readable.__annotations_cache__ +xml.parsers.expat.errors.XML_ERROR_NOT_STARTED +xml.sax.__all__ +xml.sax.InputSource +zipfile.ZipFile.data_offset +zipfile._path.glob.Translator.__annotate_func__ + + +# ========================= +# New errors in Python 3.14 +# ========================= + + +# ==================================== +# Pre-existing errors from Python 3.13 +# ==================================== + + +# ======= +# >= 3.12 +# ======= + +zoneinfo.ZoneInfo.from_file # Pos-only parameters had different "names" in different Python versions + + +# ======= +# >= 3.11 +# ======= + +typing.NewType.__mro_entries__ + + +# ======= +# >= 3.10 +# ======= + +builtins.ellipsis # type is not exposed anywhere +importlib._abc.Loader.exec_module # See Lib/importlib/_abc.py. Might be defined for backwards compatibility + +# positional-only complaints caused by differences between typing aliases and the "real" classes in the stdlib +_collections_abc.Coroutine.send +_collections_abc.Coroutine.throw +_collections_abc.Generator.send +_collections_abc.Generator.throw + +# These are not positional-only at runtime, but we treat them as positional-only to match dict. +_collections_abc.MutableMapping.pop +_collections_abc.MutableMapping.setdefault + +# These three have a pos-or-keyword first parameter at runtime, but deliberately have a pos-only first parameter in the stub. #6812 +posixpath.join +ntpath.join +os.path.join + +# typing.IO uses positional-or-keyword arguments, but in the stubs we prefer +# to mark these as positional-only for compatibility with existing sub-classes. +typing(_extensions)?\.BinaryIO\.write +typing(_extensions)?\.IO\.read +typing(_extensions)?\.IO\.readline +typing(_extensions)?\.IO\.readlines +typing(_extensions)?\.IO\.seek +typing(_extensions)?\.IO\.truncate +typing(_extensions)?\.IO\.write +typing(_extensions)?\.IO\.writelines + + +# ============================================================= +# Allowlist entries that cannot or should not be fixed; >= 3.13 +# ============================================================= + +_pyrepl\..+ # The internal implementation of the REPL on py313+; not for public consumption +codecs.backslashreplace_errors # Runtime incorrectly has `self` +codecs.ignore_errors # Runtime incorrectly has `self` +codecs.namereplace_errors # Runtime incorrectly has `self` +codecs.replace_errors # Runtime incorrectly has `self` +codecs.strict_errors # Runtime incorrectly has `self` +codecs.xmlcharrefreplace_errors # Runtime incorrectly has `self` + +# These multiprocessing proxy methods have *args, **kwargs signatures at runtime, +# But have more precise (accurate) signatures in the stub +multiprocessing.managers._BaseDictProxy.__iter__ +multiprocessing.managers._BaseDictProxy.__len__ +multiprocessing.managers._BaseDictProxy.clear +multiprocessing.managers._BaseDictProxy.copy +multiprocessing.managers._BaseDictProxy.items +multiprocessing.managers._BaseDictProxy.keys +multiprocessing.managers._BaseDictProxy.popitem +multiprocessing.managers._BaseDictProxy.values + +# To match `dict`, we lie about the runtime, but use overloads to match the correct behavior +types.MappingProxyType.get + +typing_extensions.Protocol # Super-special typing primitive + + +# ============================================================= +# Allowlist entries that cannot or should not be fixed; >= 3.12 +# ============================================================= + +# Runtime AST node runtime constructor behaviour is too loose. +# For static typing, the loose behaviour is undesirable (https://github.com/python/typeshed/issues/8378). +# For the runtime, the loose behaviour is deprecated in Python 3.13 (https://github.com/python/cpython/issues/105858) +_?ast.type_param.__init__ + +# Deprecation wrapper classes; their methods are just pass-through, so we can ignore them. +importlib.metadata.DeprecatedNonAbstract.__new__ + +# Deprecated argument is supported at runtime by renaming it through a decorator. +importlib.resources._common.files +importlib.resources.files + +sys._monitoring # Doesn't really exist. See comments in the stub. +sys.last_exc # not always defined + +# These only exist to give a better error message if you try to subclass an instance +typing.ParamSpec.__mro_entries__ +typing.ParamSpecArgs.__mro_entries__ +typing.ParamSpecKwargs.__mro_entries__ +typing.TypeVar.__mro_entries__ +typing.TypeVarTuple.__mro_entries__ + +# These exist at runtime because the protocol uses PEP-695 syntax in CPython +typing.SupportsAbs.__type_params__ +typing.SupportsRound.__type_params__ +typing_extensions.SupportsAbs.__type_params__ +typing_extensions.SupportsRound.__type_params__ + + +# ============================================================= +# Allowlist entries that cannot or should not be fixed; >= 3.11 +# ============================================================= + +enum.auto.__init__ # The stub for enum.auto is nothing like the implementation +enum.auto.value # The stub for enum.auto is nothing like the implementation +http.HTTPMethod.description # mutable instance attribute at runtime but we pretend it's a property +importlib.resources.abc.Traversable.open # Problematic protocol signature at runtime, see source code comments. +inspect._ParameterKind.description # Still exists, but stubtest can't see it +typing\._SpecialForm.* # Super-special typing primitive +typing\.LiteralString # Super-special typing primitive + + +# ============================================================= +# Allowlist entries that cannot or should not be fixed; >= 3.10 +# ============================================================= + +# Runtime AST node runtime constructor behaviour is too loose. +# For static typing, the loose behaviour is undesirable (https://github.com/python/typeshed/issues/8378). +# For the runtime, the loose behaviour is deprecated in Python 3.13 (https://github.com/python/cpython/issues/105858) +_?ast.pattern.__init__ + +_collections_abc.AsyncGenerator.athrow # async at runtime, deliberately not in the stub, see #7491. Pos-only differences also. +builtins.property.__set_name__ # Doesn't actually exist +collections\.UserList\.index # ignoring pos-or-keyword parameter +dataclasses.KW_ONLY # white lies around defaults +importlib.metadata._meta.SimplePath.joinpath # Runtime definition of protocol is incorrect diff --git a/stdlib/@tests/stubtest_allowlists/py39.txt b/stdlib/@tests/stubtest_allowlists/py39.txt index a912b6f85b5b..496eb7231453 100644 --- a/stdlib/@tests/stubtest_allowlists/py39.txt +++ b/stdlib/@tests/stubtest_allowlists/py39.txt @@ -97,6 +97,32 @@ tkinter.tix.TclVersion tkinter.tix.TkVersion +# ======= +# <= 3.13 +# ======= + +# Pretend typing.ByteString is a Union, to better match its documented semantics. +# As a side effect, this changes the definition of collections.abc.ByteString, which is okay, +# because it's not an ABC that makes any sense and was deprecated in 3.12 +_collections_abc.ByteString + +ast.Bytes.__new__ # runtime is *args, **kwargs due to a wrapper, but we have more accurate signatures in the stubs +ast.Ellipsis.__new__ # runtime is *args, **kwargs due to a wrapper, but we have more accurate signatures in the stubs +ast.NameConstant.__new__ # runtime is *args, **kwargs due to a wrapper, but we have more accurate signatures in the stubs +ast.Num.__new__ # runtime is *args, **kwargs due to a wrapper, but we have more accurate signatures in the stubs +ast.Str.__new__ # runtime is *args, **kwargs due to a wrapper, but we have more accurate signatures in the stubs + +_?hashlib.scrypt # Raises TypeError if salt, n, r or p are None + +importlib.abc.Traversable.open # Problematic protocol signature at runtime, see source code comments. + +typing\.ByteString + +# Will always raise. Not included to avoid type checkers inferring that +# TypeAliasType instances are callable. +typing_extensions.TypeAliasType.__call__ + + # ============================================================== # Allowlist entries that cannot or should not be fixed; 3.9 only # ============================================================== diff --git a/stdlib/@tests/stubtest_allowlists/win32-py314.txt b/stdlib/@tests/stubtest_allowlists/win32-py314.txt new file mode 100644 index 000000000000..9e2f612dbd39 --- /dev/null +++ b/stdlib/@tests/stubtest_allowlists/win32-py314.txt @@ -0,0 +1,59 @@ +# ==================================================================== +# TODO: New errors in Python 3.14 that need to be fixed or moved below +# ==================================================================== + +_socket.IPV6_RECVERR +_socket.IP_RECVERR +_socket.SOL_RFCOMM +_socket.SO_ORIGINAL_DST +_socket.SO_BTH_ENCRYPT +_socket.SO_BTH_MTU +_socket.SO_BTH_MTU_MAX +_socket.SO_BTH_MTU_MIN +_socket.TCP_QUICKACK +_winapi.COPY_FILE_DIRECTORY +asyncio.WindowsProactorEventLoopPolicy.get_child_watcher +asyncio.WindowsProactorEventLoopPolicy.set_child_watcher +asyncio._WindowsProactorEventLoopPolicy +asyncio._WindowsSelectorEventLoopPolicy +asyncio.windows_events.__all__ +asyncio.windows_events.DefaultEventLoopPolicy +asyncio.windows_events.WindowsProactorEventLoopPolicy +asyncio.windows_events.WindowsSelectorEventLoopPolicy +asyncio.windows_events._DefaultEventLoopPolicy +asyncio.windows_events._WindowsProactorEventLoopPolicy +asyncio.windows_events._WindowsSelectorEventLoopPolicy +ctypes.c_double_complex +ctypes.c_float_complex +ctypes.c_longdouble_complex +encodings.win32_code_page_search_function +nt.readinto +pathlib.Path.group +pathlib.Path.owner +socket.IPV6_RECVERR +socket.IP_RECVERR +socket.SOL_RFCOMM +socket.SO_ORIGINAL_DST +socket.SO_BTH_ENCRYPT +socket.SO_BTH_MTU +socket.SO_BTH_MTU_MAX +socket.SO_BTH_MTU_MIN +socket.TCP_QUICKACK +winsound.MB_ICONERROR +winsound.MB_ICONINFORMATION +winsound.MB_ICONSTOP +winsound.MB_ICONWARNING +winsound.SND_SENTRY +winsound.SND_SYNC +winsound.SND_SYSTEM +zlib.ZLIBNG_VERSION + + +# ======= +# >= 3.12 +# ======= + +# Undocumented internal method, not really for public consumption. +# (Hard to add types for unless we add stubs for the undocumented _overlapped module...) +asyncio.IocpProactor.finish_socket_func +asyncio.windows_events.IocpProactor.finish_socket_func diff --git a/stdlib/ast.pyi b/stdlib/ast.pyi index be7788edfd50..bced1cb45c96 100644 --- a/stdlib/ast.pyi +++ b/stdlib/ast.pyi @@ -251,6 +251,7 @@ class AsyncFunctionDef(stmt): returns: expr | None = ..., type_comment: str | None = ..., type_params: list[type_param] = ..., + **kwargs: Unpack[_Attributes], ) -> Self: ... class ClassDef(stmt): diff --git a/stubs/pyflakes/pyflakes/checker.pyi b/stubs/pyflakes/pyflakes/checker.pyi index 3f6dbc726b3f..45486100fa6c 100644 --- a/stubs/pyflakes/pyflakes/checker.pyi +++ b/stubs/pyflakes/pyflakes/checker.pyi @@ -185,6 +185,11 @@ else: _TypeVarTuple: TypeAlias = Never _TypeAlias: TypeAlias = Never +if sys.version_info >= (3, 14): + _NameConstant: TypeAlias = Never +else: + _NameConstant: TypeAlias = ast.NameConstant + class Checker: nodeDepth: int offset: tuple[int, int] | None @@ -254,7 +259,7 @@ class Checker: def SET(self, tree: ast.Set, omit: _OmitType = None) -> None: ... def ATTRIBUTE(self, tree: ast.Attribute, omit: _OmitType = None) -> None: ... def STARRED(self, tree: ast.Starred, omit: _OmitType = None) -> None: ... - def NAMECONSTANT(self, tree: ast.NameConstant, omit: _OmitType = None) -> None: ... + def NAMECONSTANT(self, tree: _NameConstant, omit: _OmitType = None) -> None: ... def NAMEDEXPR(self, tree: ast.NamedExpr, omit: _OmitType = None) -> None: ... def SUBSCRIPT(self, node: ast.Subscript) -> None: ... def CALL(self, node: ast.Call) -> None: ... diff --git a/tests/mypy_test.py b/tests/mypy_test.py index 3607199431d4..fd26e05ca85e 100755 --- a/tests/mypy_test.py +++ b/tests/mypy_test.py @@ -44,7 +44,7 @@ print_error("Cannot import mypy. Did you install it?") sys.exit(1) -SUPPORTED_VERSIONS = ["3.13", "3.12", "3.11", "3.10", "3.9"] +SUPPORTED_VERSIONS = ["3.14", "3.13", "3.12", "3.11", "3.10", "3.9"] SUPPORTED_PLATFORMS = ("linux", "win32", "darwin") DIRECTORIES_TO_TEST = [STDLIB_PATH, STUBS_PATH] diff --git a/tests/regr_test.py b/tests/regr_test.py index 32bfb259390c..0693974a901f 100755 --- a/tests/regr_test.py +++ b/tests/regr_test.py @@ -41,7 +41,7 @@ TYPESHED = "typeshed" SUPPORTED_PLATFORMS = ["linux", "darwin", "win32"] -SUPPORTED_VERSIONS = ["3.13", "3.12", "3.11", "3.10", "3.9"] +SUPPORTED_VERSIONS = ["3.14", "3.13", "3.12", "3.11", "3.10", "3.9"] def distribution_with_test_cases(distribution_name: str) -> DistributionTests: diff --git a/tests/runtests.py b/tests/runtests.py index 5e1e9e96cc99..64346f4e58fd 100755 --- a/tests/runtests.py +++ b/tests/runtests.py @@ -53,7 +53,7 @@ def main() -> None: parser.add_argument( "--python-version", default=_PYTHON_VERSION, - choices=("3.9", "3.10", "3.11", "3.12", "3.13"), + choices=("3.9", "3.10", "3.11", "3.12", "3.13", "3.14"), help="Target Python version for the test (default: %(default)s).", ) parser.add_argument("path", help="Path of the stub to test in format /, from the root of the project.") diff --git a/tests/typecheck_typeshed.py b/tests/typecheck_typeshed.py index a70ddf9b4a83..90d7afcaf706 100755 --- a/tests/typecheck_typeshed.py +++ b/tests/typecheck_typeshed.py @@ -14,7 +14,7 @@ ReturnCode: TypeAlias = int SUPPORTED_PLATFORMS = ("linux", "darwin", "win32") -SUPPORTED_VERSIONS = ("3.13", "3.12", "3.11", "3.10", "3.9") +SUPPORTED_VERSIONS = ("3.14", "3.13", "3.12", "3.11", "3.10", "3.9") LOWEST_SUPPORTED_VERSION = min(SUPPORTED_VERSIONS, key=lambda x: int(x.split(".")[1])) DIRECTORIES_TO_TEST = ("scripts", "tests") EMPTY: list[str] = [] From 23ca5fc125e62a301084bc8f5df7419c338059d5 Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Sat, 10 May 2025 15:17:04 +0000 Subject: [PATCH 308/388] Bump `getpass` to 3.14 (#13967) --- stdlib/@tests/stubtest_allowlists/py314.txt | 1 - stdlib/getpass.pyi | 8 +++++++- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/stdlib/@tests/stubtest_allowlists/py314.txt b/stdlib/@tests/stubtest_allowlists/py314.txt index 5c6f2fd01493..3cf65860e6a6 100644 --- a/stdlib/@tests/stubtest_allowlists/py314.txt +++ b/stdlib/@tests/stubtest_allowlists/py314.txt @@ -394,7 +394,6 @@ functools.reduce functools.reduce functools.update_wrapper functools.wraps -getpass.getpass gzip.GzipFile.readinto gzip.GzipFile.readinto gzip.GzipFile.readinto1 diff --git a/stdlib/getpass.pyi b/stdlib/getpass.pyi index 6104e0dedfee..bb3013dfbf39 100644 --- a/stdlib/getpass.pyi +++ b/stdlib/getpass.pyi @@ -1,8 +1,14 @@ +import sys from typing import TextIO __all__ = ["getpass", "getuser", "GetPassWarning"] -def getpass(prompt: str = "Password: ", stream: TextIO | None = None) -> str: ... +if sys.version_info >= (3, 14): + def getpass(prompt: str = "Password: ", stream: TextIO | None = None, *, echo_char: str | None = None) -> str: ... + +else: + def getpass(prompt: str = "Password: ", stream: TextIO | None = None) -> str: ... + def getuser() -> str: ... class GetPassWarning(UserWarning): ... From ad70ad11d6b3ccc76979434b299c57f0e7ec7b19 Mon Sep 17 00:00:00 2001 From: Max Muoto Date: Sat, 10 May 2025 12:21:10 -0500 Subject: [PATCH 309/388] Use literal constant for `QuotingType` in `csv` Module (#13983) --- stdlib/_csv.pyi | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/stdlib/_csv.pyi b/stdlib/_csv.pyi index aa9fc538417e..ecea4878907c 100644 --- a/stdlib/_csv.pyi +++ b/stdlib/_csv.pyi @@ -2,7 +2,7 @@ import csv import sys from _typeshed import SupportsWrite from collections.abc import Iterable -from typing import Any, Final, type_check_only +from typing import Any, Final, Literal, type_check_only from typing_extensions import Self, TypeAlias __version__: Final[str] @@ -15,9 +15,10 @@ if sys.version_info >= (3, 12): QUOTE_STRINGS: Final = 4 QUOTE_NOTNULL: Final = 5 -# Ideally this would be `QUOTE_ALL | QUOTE_MINIMAL | QUOTE_NONE | QUOTE_NONNUMERIC` -# However, using literals in situations like these can cause false-positives (see #7258) -_QuotingType: TypeAlias = int +if sys.version_info >= (3, 12): + _QuotingType: TypeAlias = Literal[0, 1, 2, 3, 4, 5] +else: + _QuotingType: TypeAlias = Literal[0, 1, 2, 3] class Error(Exception): ... From b5520cddd7860c5e0480f0443660bf2f4d5d9052 Mon Sep 17 00:00:00 2001 From: Avasam Date: Sat, 10 May 2025 13:26:23 -0400 Subject: [PATCH 310/388] Python 3.14: Remove `SupportsTrunc` from `ConvertibleToInt` (#13986) --- stdlib/_typeshed/__init__.pyi | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/stdlib/_typeshed/__init__.pyi b/stdlib/_typeshed/__init__.pyi index a503637998d0..7ed8a079ea09 100644 --- a/stdlib/_typeshed/__init__.pyi +++ b/stdlib/_typeshed/__init__.pyi @@ -353,7 +353,10 @@ class DataclassInstance(Protocol): __dataclass_fields__: ClassVar[dict[str, Field[Any]]] # Anything that can be passed to the int/float constructors -ConvertibleToInt: TypeAlias = str | ReadableBuffer | SupportsInt | SupportsIndex | SupportsTrunc +if sys.version_info >= (3, 14): + ConvertibleToInt: TypeAlias = str | ReadableBuffer | SupportsInt | SupportsIndex +else: + ConvertibleToInt: TypeAlias = str | ReadableBuffer | SupportsInt | SupportsIndex | SupportsTrunc ConvertibleToFloat: TypeAlias = str | ReadableBuffer | SupportsFloat | SupportsIndex # A few classes updated from Foo(str, Enum) to Foo(StrEnum). This is a convenience so these From a748cce6143e47320740ca615dee8ed7e9d6633e Mon Sep 17 00:00:00 2001 From: Max Muoto Date: Sat, 10 May 2025 13:21:41 -0500 Subject: [PATCH 311/388] Add `strict` argument for `map` (3.14) (#13987) --- stdlib/builtins.pyi | 144 +++++++++++++++++++++++++++++++------------- 1 file changed, 102 insertions(+), 42 deletions(-) diff --git a/stdlib/builtins.pyi b/stdlib/builtins.pyi index b75250aad3de..2091a76f8da3 100644 --- a/stdlib/builtins.pyi +++ b/stdlib/builtins.pyi @@ -1494,48 +1494,108 @@ license: _sitebuiltins._Printer def locals() -> dict[str, Any]: ... class map(Generic[_S]): - @overload - def __new__(cls, func: Callable[[_T1], _S], iterable: Iterable[_T1], /) -> Self: ... - @overload - def __new__(cls, func: Callable[[_T1, _T2], _S], iterable: Iterable[_T1], iter2: Iterable[_T2], /) -> Self: ... - @overload - def __new__( - cls, func: Callable[[_T1, _T2, _T3], _S], iterable: Iterable[_T1], iter2: Iterable[_T2], iter3: Iterable[_T3], / - ) -> Self: ... - @overload - def __new__( - cls, - func: Callable[[_T1, _T2, _T3, _T4], _S], - iterable: Iterable[_T1], - iter2: Iterable[_T2], - iter3: Iterable[_T3], - iter4: Iterable[_T4], - /, - ) -> Self: ... - @overload - def __new__( - cls, - func: Callable[[_T1, _T2, _T3, _T4, _T5], _S], - iterable: Iterable[_T1], - iter2: Iterable[_T2], - iter3: Iterable[_T3], - iter4: Iterable[_T4], - iter5: Iterable[_T5], - /, - ) -> Self: ... - @overload - def __new__( - cls, - func: Callable[..., _S], - iterable: Iterable[Any], - iter2: Iterable[Any], - iter3: Iterable[Any], - iter4: Iterable[Any], - iter5: Iterable[Any], - iter6: Iterable[Any], - /, - *iterables: Iterable[Any], - ) -> Self: ... + # 3.14 adds `strict` argument. + if sys.version_info >= (3, 14): + @overload + def __new__(cls, func: Callable[[_T1], _S], iterable: Iterable[_T1], /, *, strict: bool = False) -> Self: ... + @overload + def __new__( + cls, func: Callable[[_T1, _T2], _S], iterable: Iterable[_T1], iter2: Iterable[_T2], /, *, strict: bool = False + ) -> Self: ... + @overload + def __new__( + cls, + func: Callable[[_T1, _T2, _T3], _S], + iterable: Iterable[_T1], + iter2: Iterable[_T2], + iter3: Iterable[_T3], + /, + *, + strict: bool = False, + ) -> Self: ... + @overload + def __new__( + cls, + func: Callable[[_T1, _T2, _T3, _T4], _S], + iterable: Iterable[_T1], + iter2: Iterable[_T2], + iter3: Iterable[_T3], + iter4: Iterable[_T4], + /, + *, + strict: bool = False, + ) -> Self: ... + @overload + def __new__( + cls, + func: Callable[[_T1, _T2, _T3, _T4, _T5], _S], + iterable: Iterable[_T1], + iter2: Iterable[_T2], + iter3: Iterable[_T3], + iter4: Iterable[_T4], + iter5: Iterable[_T5], + /, + *, + strict: bool = False, + ) -> Self: ... + @overload + def __new__( + cls, + func: Callable[..., _S], + iterable: Iterable[Any], + iter2: Iterable[Any], + iter3: Iterable[Any], + iter4: Iterable[Any], + iter5: Iterable[Any], + iter6: Iterable[Any], + /, + *iterables: Iterable[Any], + strict: bool = False, + ) -> Self: ... + else: + @overload + def __new__(cls, func: Callable[[_T1], _S], iterable: Iterable[_T1], /) -> Self: ... + @overload + def __new__(cls, func: Callable[[_T1, _T2], _S], iterable: Iterable[_T1], iter2: Iterable[_T2], /) -> Self: ... + @overload + def __new__( + cls, func: Callable[[_T1, _T2, _T3], _S], iterable: Iterable[_T1], iter2: Iterable[_T2], iter3: Iterable[_T3], / + ) -> Self: ... + @overload + def __new__( + cls, + func: Callable[[_T1, _T2, _T3, _T4], _S], + iterable: Iterable[_T1], + iter2: Iterable[_T2], + iter3: Iterable[_T3], + iter4: Iterable[_T4], + /, + ) -> Self: ... + @overload + def __new__( + cls, + func: Callable[[_T1, _T2, _T3, _T4, _T5], _S], + iterable: Iterable[_T1], + iter2: Iterable[_T2], + iter3: Iterable[_T3], + iter4: Iterable[_T4], + iter5: Iterable[_T5], + /, + ) -> Self: ... + @overload + def __new__( + cls, + func: Callable[..., _S], + iterable: Iterable[Any], + iter2: Iterable[Any], + iter3: Iterable[Any], + iter4: Iterable[Any], + iter5: Iterable[Any], + iter6: Iterable[Any], + /, + *iterables: Iterable[Any], + ) -> Self: ... + def __iter__(self) -> Self: ... def __next__(self) -> _S: ... From 437133274c26128d6a4ca8632e1a069cbd1d8cc1 Mon Sep 17 00:00:00 2001 From: Max Muoto Date: Sat, 10 May 2025 13:22:34 -0500 Subject: [PATCH 312/388] Add `invalidate_caches` for 3.14 (#13988) --- stdlib/@tests/stubtest_allowlists/py314.txt | 1 - stdlib/platform.pyi | 3 +++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/stdlib/@tests/stubtest_allowlists/py314.txt b/stdlib/@tests/stubtest_allowlists/py314.txt index 3cf65860e6a6..e9a9bb9ad501 100644 --- a/stdlib/@tests/stubtest_allowlists/py314.txt +++ b/stdlib/@tests/stubtest_allowlists/py314.txt @@ -480,7 +480,6 @@ pdb.set_trace pkgutil.__all__ pkgutil.find_loader pkgutil.get_loader -platform.invalidate_caches pstats.FunctionProfile.__annotate_func__ pstats.FunctionProfile.__annotations_cache__ pstats.StatsProfile.__annotate_func__ diff --git a/stdlib/platform.pyi b/stdlib/platform.pyi index 19fac26134eb..fbc73c6c9177 100644 --- a/stdlib/platform.pyi +++ b/stdlib/platform.pyi @@ -82,3 +82,6 @@ if sys.version_info >= (3, 13): is_emulator: bool = False, ) -> AndroidVer: ... def ios_ver(system: str = "", release: str = "", model: str = "", is_simulator: bool = False) -> IOSVersionInfo: ... + +if sys.version_info >= (3, 14): + def invalidate_caches() -> None: ... From b9a4bdcc1ac5036487628097d2626f4b3f0fdb73 Mon Sep 17 00:00:00 2001 From: Max Muoto Date: Sat, 10 May 2025 13:51:32 -0500 Subject: [PATCH 313/388] Update `uuid` for 3.14, other minor improvements (#13990) --- stdlib/@tests/stubtest_allowlists/py314.txt | 5 ---- stdlib/uuid.pyi | 28 ++++++++++++++------- 2 files changed, 19 insertions(+), 14 deletions(-) diff --git a/stdlib/@tests/stubtest_allowlists/py314.txt b/stdlib/@tests/stubtest_allowlists/py314.txt index e9a9bb9ad501..aedbbcde0084 100644 --- a/stdlib/@tests/stubtest_allowlists/py314.txt +++ b/stdlib/@tests/stubtest_allowlists/py314.txt @@ -625,11 +625,6 @@ urllib.request.pathname2url urllib.request.url2pathname urllib.request.url2pathname urllib.request.url2pathname -uuid.MAX -uuid.NIL -uuid.uuid6 -uuid.uuid7 -uuid.uuid8 wsgiref.types.ErrorStream.__annotate_func__ wsgiref.types.ErrorStream.__annotations_cache__ wsgiref.types.FileWrapper.__annotate_func__ diff --git a/stdlib/uuid.pyi b/stdlib/uuid.pyi index 3202ae212cae..99ac6eb223ef 100644 --- a/stdlib/uuid.pyi +++ b/stdlib/uuid.pyi @@ -1,7 +1,8 @@ import builtins import sys from enum import Enum -from typing_extensions import TypeAlias +from typing import Final +from typing_extensions import LiteralString, TypeAlias _FieldsType: TypeAlias = tuple[int, int, int, int, int, int] @@ -67,6 +68,11 @@ class UUID: def getnode() -> int: ... def uuid1(node: int | None = None, clock_seq: int | None = None) -> UUID: ... +if sys.version_info >= (3, 14): + def uuid6(node: int | None = None, clock_seq: int | None = None) -> UUID: ... + def uuid7() -> UUID: ... + def uuid8(a: int | None = None, b: int | None = None, c: int | None = None) -> UUID: ... + if sys.version_info >= (3, 12): def uuid3(namespace: UUID, name: str | bytes) -> UUID: ... @@ -81,14 +87,18 @@ if sys.version_info >= (3, 12): else: def uuid5(namespace: UUID, name: str) -> UUID: ... -NAMESPACE_DNS: UUID -NAMESPACE_URL: UUID -NAMESPACE_OID: UUID -NAMESPACE_X500: UUID -RESERVED_NCS: str -RFC_4122: str -RESERVED_MICROSOFT: str -RESERVED_FUTURE: str +if sys.version_info >= (3, 14): + NIL: Final[UUID] + MAX: Final[UUID] + +NAMESPACE_DNS: Final[UUID] +NAMESPACE_URL: Final[UUID] +NAMESPACE_OID: Final[UUID] +NAMESPACE_X500: Final[UUID] +RESERVED_NCS: Final[LiteralString] +RFC_4122: Final[LiteralString] +RESERVED_MICROSOFT: Final[LiteralString] +RESERVED_FUTURE: Final[LiteralString] if sys.version_info >= (3, 12): def main() -> None: ... From bbedbe79d6fff7caa9ac63d50bae20ec036a6d4e Mon Sep 17 00:00:00 2001 From: Max Muoto Date: Sat, 10 May 2025 14:02:43 -0500 Subject: [PATCH 314/388] Add strptime for `datetime.time` and `datetime.date` (3.14) (#13991) --- stdlib/@tests/stubtest_allowlists/py314.txt | 2 -- stdlib/datetime.pyi | 10 ++++++++++ 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/stdlib/@tests/stubtest_allowlists/py314.txt b/stdlib/@tests/stubtest_allowlists/py314.txt index aedbbcde0084..bfbf9cd9f9d5 100644 --- a/stdlib/@tests/stubtest_allowlists/py314.txt +++ b/stdlib/@tests/stubtest_allowlists/py314.txt @@ -364,8 +364,6 @@ dataclasses.Field.__init__ dataclasses.Field.doc dataclasses.field dataclasses.make_dataclass -datetime.date.strptime -datetime.time.strptime decimal.Decimal.from_number decimal.DecimalTuple.__annotate_func__ decimal.DecimalTuple.__annotations_cache__ diff --git a/stdlib/datetime.pyi b/stdlib/datetime.pyi index 72fb5fceb1fb..37d6a06dfff9 100644 --- a/stdlib/datetime.pyi +++ b/stdlib/datetime.pyi @@ -73,6 +73,11 @@ class date: @property def day(self) -> int: ... def ctime(self) -> str: ... + + if sys.version_info >= (3, 14): + @classmethod + def strptime(cls, date_string: str, format: str, /) -> Self: ... + # On <3.12, the name of the parameter in the pure-Python implementation # didn't match the name in the C implementation, # meaning it is only *safe* to pass it as a keyword argument on 3.12+ @@ -142,6 +147,11 @@ class time: def isoformat(self, timespec: str = ...) -> str: ... @classmethod def fromisoformat(cls, time_string: str, /) -> Self: ... + + if sys.version_info >= (3, 14): + @classmethod + def strptime(cls, date_string: str, format: str, /) -> Self: ... + # On <3.12, the name of the parameter in the pure-Python implementation # didn't match the name in the C implementation, # meaning it is only *safe* to pass it as a keyword argument on 3.12+ From 40338991c216cf3af71437cdc880268fbc170384 Mon Sep 17 00:00:00 2001 From: Anton Pilipenko <9156442+VelikiiNehochuha@users.noreply.github.com> Date: Sat, 10 May 2025 23:27:07 +0400 Subject: [PATCH 315/388] Fix tkinter.PhotoImage.put arguments (#13971) * allow bytes * put to x1 y1 ?x2 y2? --------- Co-authored-by: Anton Pilipenko --- stdlib/tkinter/__init__.pyi | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/stdlib/tkinter/__init__.pyi b/stdlib/tkinter/__init__.pyi index dcac61d77e0a..e2b4eca1e62a 100644 --- a/stdlib/tkinter/__init__.pyi +++ b/stdlib/tkinter/__init__.pyi @@ -3736,6 +3736,7 @@ class PhotoImage(Image, _PhotoImageLike): self, data: ( str + | bytes | list[str] | list[list[str]] | list[tuple[str, ...]] @@ -3743,7 +3744,7 @@ class PhotoImage(Image, _PhotoImageLike): | tuple[list[str], ...] | tuple[tuple[str, ...], ...] ), - to: tuple[int, int] | None = None, + to: tuple[int, int] | tuple[int, int, int, int] | None = None, ) -> None: ... if sys.version_info >= (3, 13): def read( From f6a6ca2ea8f8e1255085edf2844b23988ae808db Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Sat, 10 May 2025 19:32:20 +0000 Subject: [PATCH 316/388] Update `ast.main` function for Python 3.14 (#13982) --- stdlib/@tests/stubtest_allowlists/py314.txt | 1 - stdlib/ast.pyi | 9 +++++++-- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/stdlib/@tests/stubtest_allowlists/py314.txt b/stdlib/@tests/stubtest_allowlists/py314.txt index bfbf9cd9f9d5..8caa76c90d34 100644 --- a/stdlib/@tests/stubtest_allowlists/py314.txt +++ b/stdlib/@tests/stubtest_allowlists/py314.txt @@ -251,7 +251,6 @@ ast.arg.__annotations_cache__ ast.arguments.__annotations_cache__ ast.comprehension.__annotations_cache__ ast.keyword.__annotations_cache__ -ast.main ast.match_case.__annotations_cache__ ast.withitem.__annotations_cache__ asyncio.__all__ diff --git a/stdlib/ast.pyi b/stdlib/ast.pyi index bced1cb45c96..64dc5f2af95b 100644 --- a/stdlib/ast.pyi +++ b/stdlib/ast.pyi @@ -7,7 +7,7 @@ from _ast import ( PyCF_TYPE_COMMENTS as PyCF_TYPE_COMMENTS, ) from _typeshed import ReadableBuffer, Unused -from collections.abc import Iterable, Iterator +from collections.abc import Iterable, Iterator, Sequence from typing import Any, ClassVar, Generic, Literal, TypedDict, TypeVar as _TypeVar, overload from typing_extensions import Self, Unpack, deprecated @@ -2016,4 +2016,9 @@ class NodeTransformer(NodeVisitor): # is also allowed in some cases -- this needs to be mapped. def unparse(ast_obj: AST) -> str: ... -def main() -> None: ... + +if sys.version_info >= (3, 14): + def main(args: Sequence[str] | None = None) -> None: ... + +else: + def main() -> None: ... From 14a99c8c93bd700c107a60b97d9d668225056a43 Mon Sep 17 00:00:00 2001 From: Akuli Date: Sat, 10 May 2025 23:08:02 +0300 Subject: [PATCH 317/388] Delete duplicate lines from stdlib/@tests/stubtest_allowlists/py314.txt (#13993) --- stdlib/@tests/stubtest_allowlists/py314.txt | 36 --------------------- 1 file changed, 36 deletions(-) diff --git a/stdlib/@tests/stubtest_allowlists/py314.txt b/stdlib/@tests/stubtest_allowlists/py314.txt index 8caa76c90d34..5f2563a34789 100644 --- a/stdlib/@tests/stubtest_allowlists/py314.txt +++ b/stdlib/@tests/stubtest_allowlists/py314.txt @@ -121,7 +121,6 @@ _contextvars.Token.__enter__ _contextvars.Token.__exit__ _ctypes.POINTER _ctypes.byref -_ctypes.byref _ctypes.pointer _decimal.Decimal.from_number _decimal.IEEEContext @@ -139,7 +138,6 @@ _thread.RLock.locked _thread.set_name annotationlib argparse.HelpFormatter.__init__ -argparse.HelpFormatter.__init__ ast.Add.__annotations_cache__ ast.And.__annotations_cache__ ast.AnnAssign.__annotations_cache__ @@ -258,14 +256,10 @@ asyncio.FrameCallGraphEntry asyncio.FutureCallGraph asyncio._AbstractEventLoopPolicy asyncio._DefaultEventLoopPolicy -asyncio.__all__ asyncio._get_event_loop_policy asyncio._set_event_loop_policy asyncio.capture_call_graph asyncio.eager_task_factory -asyncio.eager_task_factory -asyncio.eager_task_factory -asyncio.eager_task_factory asyncio.format_call_graph asyncio.future_add_to_awaited_by asyncio.future_discard_from_awaited_by @@ -274,18 +268,13 @@ asyncio.events.__all__ asyncio.events.AbstractEventLoopPolicy asyncio.events.BaseDefaultEventLoopPolicy asyncio.events._AbstractEventLoopPolicy -asyncio.events.__all__ asyncio.events._get_event_loop_policy asyncio.events._set_event_loop_policy asyncio.futures.__all__ -asyncio.futures.__all__ asyncio.futures.future_add_to_awaited_by asyncio.futures.future_discard_from_awaited_by asyncio.graph asyncio.tasks.eager_task_factory -asyncio.tasks.eager_task_factory -asyncio.tasks.eager_task_factory -asyncio.tasks.eager_task_factory bdb.Bdb.__init__ bdb.Bdb.disable_current_event bdb.Bdb.restart_events @@ -321,7 +310,6 @@ concurrent.futures.ProcessPoolExecutor.map concurrent.futures.ProcessPoolExecutor.terminate_workers concurrent.futures.ThreadPoolExecutor.BROKEN concurrent.futures.ThreadPoolExecutor.prepare_context -concurrent.futures.__all__ concurrent.futures._base.Executor.map concurrent.futures.interpreter concurrent.futures.process.ProcessPoolExecutor.kill_workers @@ -331,21 +319,14 @@ concurrent.futures.thread.ThreadPoolExecutor.BROKEN concurrent.futures.thread.ThreadPoolExecutor.prepare_context concurrent.futures.thread.WorkerContext concurrent.futures.thread._WorkItem.__init__ -concurrent.futures.thread._WorkItem.__init__ -concurrent.futures.thread._WorkItem.__init__ concurrent.futures.thread._WorkItem.run concurrent.futures.thread._worker -concurrent.futures.thread._worker -concurrent.futures.thread._worker configparser.__all__ configparser.InvalidWriteError configparser.UnnamedSectionDisabledError -configparser.__all__ contextvars.Token.__enter__ contextvars.Token.__exit__ ctypes.POINTER -ctypes.POINTER -ctypes.byref ctypes.byref ctypes.memoryview_at ctypes.pointer @@ -385,15 +366,10 @@ functools.__all__ functools.Placeholder functools.WRAPPER_ASSIGNMENTS functools.partialmethod.__new__ -functools.partialmethod.__new__ -functools.partialmethod.__new__ -functools.reduce functools.reduce functools.update_wrapper functools.wraps gzip.GzipFile.readinto -gzip.GzipFile.readinto -gzip.GzipFile.readinto1 gzip.GzipFile.readinto1 gzip.compress http.server.__all__ @@ -458,14 +434,10 @@ pathlib.Path.move pathlib.Path.move_into pathlib.Path.rmtree pathlib.PurePath.is_relative_to -pathlib.PurePath.is_relative_to -pathlib.PurePath.relative_to pathlib.PurePath.relative_to pathlib.types pdb.__all__ pdb.Pdb.__init__ -pdb.Pdb.__init__ -pdb.Pdb.__init__ pdb.Pdb.checkline pdb.Pdb.complete_multiline_names pdb.Pdb.print_stack_trace @@ -506,8 +478,6 @@ tokenize.TSTRING_END tokenize.TSTRING_MIDDLE tokenize.TSTRING_START tomllib.TOMLDecodeError.__init__ -tomllib.TOMLDecodeError.__init__ -tomllib.TOMLDecodeError.__init__ traceback.__all__ turtle.__all__ turtle.RawTurtle.fill @@ -537,8 +507,6 @@ typing.ForwardRef.__forward_arg__ typing.ForwardRef.__forward_code__ typing.ForwardRef.__globals__ typing.ForwardRef.__init__ -typing.ForwardRef.__init__ -typing.ForwardRef.__init__ typing.ForwardRef.__init_subclass__ typing.ForwardRef.__owner__ typing.ForwardRef.__stringifier_dict__ @@ -575,8 +543,6 @@ typing_extensions.ForwardRef.__forward_arg__ typing_extensions.ForwardRef.__forward_code__ typing_extensions.ForwardRef.__globals__ typing_extensions.ForwardRef.__init__ -typing_extensions.ForwardRef.__init__ -typing_extensions.ForwardRef.__init__ typing_extensions.ForwardRef.__init_subclass__ typing_extensions.ForwardRef.__owner__ typing_extensions.ForwardRef.__stringifier_dict__ @@ -620,8 +586,6 @@ urllib.request.FancyURLopener urllib.request.URLopener urllib.request.pathname2url urllib.request.url2pathname -urllib.request.url2pathname -urllib.request.url2pathname wsgiref.types.ErrorStream.__annotate_func__ wsgiref.types.ErrorStream.__annotations_cache__ wsgiref.types.FileWrapper.__annotate_func__ From b9b0369f92cd947a9f06ffe18fe8b79191c6ab41 Mon Sep 17 00:00:00 2001 From: Max Muoto Date: Sat, 10 May 2025 15:43:14 -0500 Subject: [PATCH 318/388] Update `ProcessPoolExecutor` for 3.14 (#13994) --- stdlib/@tests/stubtest_allowlists/py314.txt | 8 -------- stdlib/concurrent/futures/_base.pyi | 17 ++++++++++++++--- stdlib/concurrent/futures/process.pyi | 4 ++++ 3 files changed, 18 insertions(+), 11 deletions(-) diff --git a/stdlib/@tests/stubtest_allowlists/py314.txt b/stdlib/@tests/stubtest_allowlists/py314.txt index 5f2563a34789..084d866182f6 100644 --- a/stdlib/@tests/stubtest_allowlists/py314.txt +++ b/stdlib/@tests/stubtest_allowlists/py314.txt @@ -303,18 +303,10 @@ compression.lzma compression.zlib compression.zstd concurrent.futures.__all__ -concurrent.futures.Executor.map concurrent.futures.InterpreterPoolExecutor -concurrent.futures.ProcessPoolExecutor.kill_workers -concurrent.futures.ProcessPoolExecutor.map -concurrent.futures.ProcessPoolExecutor.terminate_workers concurrent.futures.ThreadPoolExecutor.BROKEN concurrent.futures.ThreadPoolExecutor.prepare_context -concurrent.futures._base.Executor.map concurrent.futures.interpreter -concurrent.futures.process.ProcessPoolExecutor.kill_workers -concurrent.futures.process.ProcessPoolExecutor.map -concurrent.futures.process.ProcessPoolExecutor.terminate_workers concurrent.futures.thread.ThreadPoolExecutor.BROKEN concurrent.futures.thread.ThreadPoolExecutor.prepare_context concurrent.futures.thread.WorkerContext diff --git a/stdlib/concurrent/futures/_base.pyi b/stdlib/concurrent/futures/_base.pyi index 7294b69567d6..fbf07a3fc78f 100644 --- a/stdlib/concurrent/futures/_base.pyi +++ b/stdlib/concurrent/futures/_base.pyi @@ -54,9 +54,20 @@ class Future(Generic[_T]): class Executor: def submit(self, fn: Callable[_P, _T], /, *args: _P.args, **kwargs: _P.kwargs) -> Future[_T]: ... - def map( - self, fn: Callable[..., _T], *iterables: Iterable[Any], timeout: float | None = None, chunksize: int = 1 - ) -> Iterator[_T]: ... + if sys.version_info >= (3, 14): + def map( + self, + fn: Callable[..., _T], + *iterables: Iterable[Any], + timeout: float | None = None, + chunksize: int = 1, + buffersize: int | None = None, + ) -> Iterator[_T]: ... + else: + def map( + self, fn: Callable[..., _T], *iterables: Iterable[Any], timeout: float | None = None, chunksize: int = 1 + ) -> Iterator[_T]: ... + def shutdown(self, wait: bool = True, *, cancel_futures: bool = False) -> None: ... def __enter__(self) -> Self: ... def __exit__( diff --git a/stdlib/concurrent/futures/process.pyi b/stdlib/concurrent/futures/process.pyi index 9c904f793fa9..607990100369 100644 --- a/stdlib/concurrent/futures/process.pyi +++ b/stdlib/concurrent/futures/process.pyi @@ -236,3 +236,7 @@ class ProcessPoolExecutor(Executor): def _start_executor_manager_thread(self) -> None: ... def _adjust_process_count(self) -> None: ... + + if sys.version_info >= (3, 14): + def kill_workers(self) -> None: ... + def terminate_workers(self) -> None: ... From bfb16abcc632a508f87d25c14e45cdf3898b1297 Mon Sep 17 00:00:00 2001 From: Max Muoto Date: Sat, 10 May 2025 16:18:37 -0500 Subject: [PATCH 319/388] Add context manager support for QueueListener (3.14) (#13996) --- stdlib/@tests/stubtest_allowlists/py314.txt | 2 -- stdlib/logging/handlers.pyi | 8 ++++++++ 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/stdlib/@tests/stubtest_allowlists/py314.txt b/stdlib/@tests/stubtest_allowlists/py314.txt index 084d866182f6..a2bd88b696d0 100644 --- a/stdlib/@tests/stubtest_allowlists/py314.txt +++ b/stdlib/@tests/stubtest_allowlists/py314.txt @@ -399,8 +399,6 @@ io.__all__ io.Reader io.Writer ipaddress._IPAddressBase.version -logging.handlers.QueueListener.__enter__ -logging.handlers.QueueListener.__exit__ logging.handlers.SysLogHandler.__init__ marshal.dump marshal.dumps diff --git a/stdlib/logging/handlers.pyi b/stdlib/logging/handlers.pyi index 2c7ec05afe9a..b58999e9d995 100644 --- a/stdlib/logging/handlers.pyi +++ b/stdlib/logging/handlers.pyi @@ -8,7 +8,9 @@ from logging import FileHandler, Handler, LogRecord from re import Pattern from socket import SocketKind, socket from threading import Thread +from types import TracebackType from typing import Any, ClassVar, Final, Protocol, TypeVar +from typing_extensions import Self _T = TypeVar("_T") @@ -237,3 +239,9 @@ class QueueListener: def stop(self) -> None: ... def enqueue_sentinel(self) -> None: ... def handle(self, record: LogRecord) -> None: ... + + if sys.version_info >= (3, 14): + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None + ) -> None: ... From 721f637a63266d8008a4bd394fdf12983cfeacef Mon Sep 17 00:00:00 2001 From: Jelle Zijlstra Date: Sat, 10 May 2025 14:25:06 -0700 Subject: [PATCH 320/388] 3.14: add annotationlib, update typing and inspect (#13985) --- stdlib/@tests/stubtest_allowlists/py314.txt | 359 ++------------------ stdlib/VERSIONS | 1 + stdlib/_typeshed/__init__.pyi | 11 + stdlib/annotationlib.pyi | 132 +++++++ stdlib/builtins.pyi | 11 +- stdlib/inspect.pyi | 60 +++- stdlib/types.pyi | 13 +- stdlib/typing.pyi | 224 +++++++----- stdlib/typing_extensions.pyi | 153 ++++++--- tests/stubtest_stdlib.py | 1 + 10 files changed, 488 insertions(+), 477 deletions(-) create mode 100644 stdlib/annotationlib.pyi diff --git a/stdlib/@tests/stubtest_allowlists/py314.txt b/stdlib/@tests/stubtest_allowlists/py314.txt index a2bd88b696d0..bb6b5ecf795a 100644 --- a/stdlib/@tests/stubtest_allowlists/py314.txt +++ b/stdlib/@tests/stubtest_allowlists/py314.txt @@ -2,117 +2,6 @@ # TODO: New errors in Python 3.14 that need to be fixed or moved below # ==================================================================== -_ast.Add.__annotations_cache__ -_ast.And.__annotations_cache__ -_ast.AnnAssign.__annotations_cache__ -_ast.Assert.__annotations_cache__ -_ast.Assign.__annotations_cache__ -_ast.AsyncFor.__annotations_cache__ -_ast.AsyncFunctionDef.__annotations_cache__ -_ast.AsyncWith.__annotations_cache__ -_ast.Attribute.__annotations_cache__ -_ast.AugAssign.__annotations_cache__ -_ast.Await.__annotations_cache__ -_ast.BinOp.__annotations_cache__ -_ast.BitAnd.__annotations_cache__ -_ast.BitOr.__annotations_cache__ -_ast.BitXor.__annotations_cache__ -_ast.BoolOp.__annotations_cache__ -_ast.Break.__annotations_cache__ -_ast.Call.__annotations_cache__ -_ast.ClassDef.__annotations_cache__ -_ast.Compare.__annotations_cache__ -_ast.Constant.__annotations_cache__ -_ast.Continue.__annotations_cache__ -_ast.Del.__annotations_cache__ -_ast.Delete.__annotations_cache__ -_ast.Dict.__annotations_cache__ -_ast.DictComp.__annotations_cache__ -_ast.Div.__annotations_cache__ -_ast.Eq.__annotations_cache__ -_ast.ExceptHandler.__annotations_cache__ -_ast.Expr.__annotations_cache__ -_ast.Expression.__annotations_cache__ -_ast.FloorDiv.__annotations_cache__ -_ast.For.__annotations_cache__ -_ast.FormattedValue.__annotations_cache__ -_ast.FunctionDef.__annotations_cache__ -_ast.FunctionType.__annotations_cache__ -_ast.GeneratorExp.__annotations_cache__ -_ast.Global.__annotations_cache__ -_ast.Gt.__annotations_cache__ -_ast.GtE.__annotations_cache__ -_ast.If.__annotations_cache__ -_ast.IfExp.__annotations_cache__ -_ast.Import.__annotations_cache__ -_ast.ImportFrom.__annotations_cache__ -_ast.In.__annotations_cache__ -_ast.Interactive.__annotations_cache__ -_ast.Invert.__annotations_cache__ -_ast.Is.__annotations_cache__ -_ast.IsNot.__annotations_cache__ -_ast.JoinedStr.__annotations_cache__ -_ast.LShift.__annotations_cache__ -_ast.Lambda.__annotations_cache__ -_ast.List.__annotations_cache__ -_ast.ListComp.__annotations_cache__ -_ast.Load.__annotations_cache__ -_ast.Lt.__annotations_cache__ -_ast.LtE.__annotations_cache__ -_ast.MatMult.__annotations_cache__ -_ast.Match.__annotations_cache__ -_ast.MatchAs.__annotations_cache__ -_ast.MatchClass.__annotations_cache__ -_ast.MatchMapping.__annotations_cache__ -_ast.MatchOr.__annotations_cache__ -_ast.MatchSequence.__annotations_cache__ -_ast.MatchSingleton.__annotations_cache__ -_ast.MatchStar.__annotations_cache__ -_ast.MatchValue.__annotations_cache__ -_ast.Mod.__annotations_cache__ -_ast.Module.__annotations_cache__ -_ast.Mult.__annotations_cache__ -_ast.Name.__annotations_cache__ -_ast.NamedExpr.__annotations_cache__ -_ast.Nonlocal.__annotations_cache__ -_ast.Not.__annotations_cache__ -_ast.NotEq.__annotations_cache__ -_ast.NotIn.__annotations_cache__ -_ast.Or.__annotations_cache__ -_ast.ParamSpec.__annotations_cache__ -_ast.Pass.__annotations_cache__ -_ast.Pow.__annotations_cache__ -_ast.RShift.__annotations_cache__ -_ast.Raise.__annotations_cache__ -_ast.Return.__annotations_cache__ -_ast.Set.__annotations_cache__ -_ast.SetComp.__annotations_cache__ -_ast.Slice.__annotations_cache__ -_ast.Starred.__annotations_cache__ -_ast.Store.__annotations_cache__ -_ast.Sub.__annotations_cache__ -_ast.Subscript.__annotations_cache__ -_ast.Try.__annotations_cache__ -_ast.TryStar.__annotations_cache__ -_ast.Tuple.__annotations_cache__ -_ast.TypeAlias.__annotations_cache__ -_ast.TypeIgnore.__annotations_cache__ -_ast.TypeVar.__annotations_cache__ -_ast.TypeVarTuple.__annotations_cache__ -_ast.UAdd.__annotations_cache__ -_ast.USub.__annotations_cache__ -_ast.UnaryOp.__annotations_cache__ -_ast.While.__annotations_cache__ -_ast.With.__annotations_cache__ -_ast.Yield.__annotations_cache__ -_ast.YieldFrom.__annotations_cache__ -_ast.alias.__annotations_cache__ -_ast.arg.__annotations_cache__ -_ast.arguments.__annotations_cache__ -_ast.comprehension.__annotations_cache__ -_ast.keyword.__annotations_cache__ -_ast.match_case.__annotations_cache__ -_ast.withitem.__annotations_cache__ _asyncio.all_tasks _asyncio.future_add_to_awaited_by _asyncio.future_discard_from_awaited_by @@ -136,121 +25,9 @@ _socket.if_indextoname _ssl.HAS_PHA _thread.RLock.locked _thread.set_name -annotationlib argparse.HelpFormatter.__init__ -ast.Add.__annotations_cache__ -ast.And.__annotations_cache__ -ast.AnnAssign.__annotations_cache__ -ast.Assert.__annotations_cache__ -ast.Assign.__annotations_cache__ -ast.AsyncFor.__annotations_cache__ -ast.AsyncFunctionDef.__annotations_cache__ -ast.AsyncWith.__annotations_cache__ -ast.Attribute.__annotations_cache__ -ast.AugAssign.__annotations_cache__ -ast.Await.__annotations_cache__ -ast.BinOp.__annotations_cache__ -ast.BitAnd.__annotations_cache__ -ast.BitOr.__annotations_cache__ -ast.BitXor.__annotations_cache__ -ast.BoolOp.__annotations_cache__ -ast.Break.__annotations_cache__ -ast.Call.__annotations_cache__ -ast.ClassDef.__annotations_cache__ -ast.Compare.__annotations_cache__ -ast.Constant.__annotations_cache__ -ast.Continue.__annotations_cache__ -ast.Del.__annotations_cache__ -ast.Delete.__annotations_cache__ -ast.Dict.__annotations_cache__ -ast.DictComp.__annotations_cache__ -ast.Div.__annotations_cache__ -ast.Eq.__annotations_cache__ -ast.ExceptHandler.__annotations_cache__ -ast.Expr.__annotations_cache__ -ast.Expression.__annotations_cache__ -ast.FloorDiv.__annotations_cache__ -ast.For.__annotations_cache__ -ast.FormattedValue.__annotations_cache__ -ast.FunctionDef.__annotations_cache__ -ast.FunctionType.__annotations_cache__ -ast.GeneratorExp.__annotations_cache__ -ast.Global.__annotations_cache__ -ast.Gt.__annotations_cache__ -ast.GtE.__annotations_cache__ -ast.If.__annotations_cache__ -ast.IfExp.__annotations_cache__ -ast.Import.__annotations_cache__ -ast.ImportFrom.__annotations_cache__ -ast.In.__annotations_cache__ -ast.Interactive.__annotations_cache__ ast.Interpolation -ast.Invert.__annotations_cache__ -ast.Is.__annotations_cache__ -ast.IsNot.__annotations_cache__ -ast.JoinedStr.__annotations_cache__ -ast.LShift.__annotations_cache__ -ast.Lambda.__annotations_cache__ -ast.List.__annotations_cache__ -ast.ListComp.__annotations_cache__ -ast.Load.__annotations_cache__ -ast.Lt.__annotations_cache__ -ast.LtE.__annotations_cache__ -ast.MatMult.__annotations_cache__ -ast.Match.__annotations_cache__ -ast.MatchAs.__annotations_cache__ -ast.MatchClass.__annotations_cache__ -ast.MatchMapping.__annotations_cache__ -ast.MatchOr.__annotations_cache__ -ast.MatchSequence.__annotations_cache__ -ast.MatchSingleton.__annotations_cache__ -ast.MatchStar.__annotations_cache__ -ast.MatchValue.__annotations_cache__ -ast.Mod.__annotations_cache__ -ast.Module.__annotations_cache__ -ast.Mult.__annotations_cache__ -ast.Name.__annotations_cache__ -ast.NamedExpr.__annotations_cache__ -ast.Nonlocal.__annotations_cache__ -ast.Not.__annotations_cache__ -ast.NotEq.__annotations_cache__ -ast.NotIn.__annotations_cache__ -ast.Or.__annotations_cache__ -ast.ParamSpec.__annotations_cache__ -ast.Pass.__annotations_cache__ -ast.Pow.__annotations_cache__ -ast.RShift.__annotations_cache__ -ast.Raise.__annotations_cache__ -ast.Return.__annotations_cache__ -ast.Set.__annotations_cache__ -ast.SetComp.__annotations_cache__ -ast.Slice.__annotations_cache__ -ast.Starred.__annotations_cache__ -ast.Store.__annotations_cache__ -ast.Sub.__annotations_cache__ -ast.Subscript.__annotations_cache__ ast.TemplateStr -ast.Try.__annotations_cache__ -ast.TryStar.__annotations_cache__ -ast.Tuple.__annotations_cache__ -ast.TypeAlias.__annotations_cache__ -ast.TypeIgnore.__annotations_cache__ -ast.TypeVar.__annotations_cache__ -ast.TypeVarTuple.__annotations_cache__ -ast.UAdd.__annotations_cache__ -ast.USub.__annotations_cache__ -ast.UnaryOp.__annotations_cache__ -ast.While.__annotations_cache__ -ast.With.__annotations_cache__ -ast.Yield.__annotations_cache__ -ast.YieldFrom.__annotations_cache__ -ast.alias.__annotations_cache__ -ast.arg.__annotations_cache__ -ast.arguments.__annotations_cache__ -ast.comprehension.__annotations_cache__ -ast.keyword.__annotations_cache__ -ast.match_case.__annotations_cache__ -ast.withitem.__annotations_cache__ asyncio.__all__ asyncio.FrameCallGraphEntry asyncio.FutureCallGraph @@ -293,7 +70,6 @@ builtins.int.__round__ builtins.memoryview.__class_getitem__ builtins.staticmethod.__annotate__ builtins.staticmethod.__class_getitem__ -builtins.type.__annotate__ code.compile_command codeop.compile_command compression @@ -337,8 +113,6 @@ dataclasses.Field.doc dataclasses.field dataclasses.make_dataclass decimal.Decimal.from_number -decimal.DecimalTuple.__annotate_func__ -decimal.DecimalTuple.__annotations_cache__ decimal.IEEE_CONTEXT_MAX_BITS dis.Bytecode.__init__ dis.Instruction.make @@ -376,25 +150,8 @@ importlib.abc.Traversable importlib.abc.TraversableResources importlib.machinery.__all__ importlib.machinery.AppleFrameworkLoader -importlib.metadata.PackageMetadata.__annotate_func__ -importlib.metadata.PackageMetadata.__annotations_cache__ -importlib.metadata._meta.PackageMetadata.__annotate_func__ -importlib.metadata._meta.PackageMetadata.__annotations_cache__ -importlib.metadata._meta.SimplePath.__annotate_func__ -importlib.metadata._meta.SimplePath.__annotations_cache__ -importlib.resources.abc.Traversable.__annotate_func__ -importlib.resources.abc.Traversable.__annotations_cache__ importlib.util.__all__ importlib.util.Loader -inspect.__all__ -inspect.CO_HAS_DOCSTRING -inspect.CO_METHOD -inspect.Signature.format -inspect.Signature.from_callable -inspect.formatannotation -inspect.get_annotations -inspect.ispackage -inspect.signature io.__all__ io.Reader io.Writer @@ -439,10 +196,6 @@ pdb.set_trace pkgutil.__all__ pkgutil.find_loader pkgutil.get_loader -pstats.FunctionProfile.__annotate_func__ -pstats.FunctionProfile.__annotations_cache__ -pstats.StatsProfile.__annotate_func__ -pstats.StatsProfile.__annotations_cache__ pyexpat.errors.XML_ERROR_NOT_STARTED shutil.__all__ socket.__all__ @@ -480,81 +233,6 @@ turtle.poly turtle.save types.CodeType.co_branches types.FrameType.f_generator -types.FunctionType.__annotate__ -types.LambdaType.__annotate__ -types.ModuleType.__annotate__ -types.UnionType.__class_getitem__ -types.UnionType.__mro_entries__ -types.UnionType.__name__ -types.UnionType.__qualname__ -typing.__all__ -typing.ForwardRef.__arg__ -typing.ForwardRef.__ast_node__ -typing.ForwardRef.__cell__ -typing.ForwardRef.__code__ -typing.ForwardRef.__extra_names__ -typing.ForwardRef.__forward_arg__ -typing.ForwardRef.__forward_code__ -typing.ForwardRef.__globals__ -typing.ForwardRef.__init__ -typing.ForwardRef.__init_subclass__ -typing.ForwardRef.__owner__ -typing.ForwardRef.__stringifier_dict__ -typing.ForwardRef.evaluate -typing.ParamSpec.evaluate_default -typing.SupportsAbs.__annotate_func__ -typing.SupportsAbs.__annotations_cache__ -typing.SupportsBytes.__annotate_func__ -typing.SupportsBytes.__annotations_cache__ -typing.SupportsComplex.__annotate_func__ -typing.SupportsComplex.__annotations_cache__ -typing.SupportsFloat.__annotate_func__ -typing.SupportsFloat.__annotations_cache__ -typing.SupportsIndex.__annotate_func__ -typing.SupportsIndex.__annotations_cache__ -typing.SupportsInt.__annotate_func__ -typing.SupportsInt.__annotations_cache__ -typing.SupportsRound.__annotate_func__ -typing.SupportsRound.__annotations_cache__ -typing.TypeAliasType.evaluate_value -typing.TypeVar.evaluate_bound -typing.TypeVar.evaluate_constraints -typing.TypeVar.evaluate_default -typing.TypeVarTuple.evaluate_default -typing.Union -typing.evaluate_forward_ref -typing.get_type_hints -typing_extensions.ForwardRef.__arg__ -typing_extensions.ForwardRef.__ast_node__ -typing_extensions.ForwardRef.__cell__ -typing_extensions.ForwardRef.__code__ -typing_extensions.ForwardRef.__extra_names__ -typing_extensions.ForwardRef.__forward_arg__ -typing_extensions.ForwardRef.__forward_code__ -typing_extensions.ForwardRef.__globals__ -typing_extensions.ForwardRef.__init__ -typing_extensions.ForwardRef.__init_subclass__ -typing_extensions.ForwardRef.__owner__ -typing_extensions.ForwardRef.__stringifier_dict__ -typing_extensions.ForwardRef.evaluate -typing_extensions.SupportsAbs.__annotate_func__ -typing_extensions.SupportsAbs.__annotations_cache__ -typing_extensions.SupportsBytes.__annotate_func__ -typing_extensions.SupportsBytes.__annotations_cache__ -typing_extensions.SupportsComplex.__annotate_func__ -typing_extensions.SupportsComplex.__annotations_cache__ -typing_extensions.SupportsFloat.__annotate_func__ -typing_extensions.SupportsFloat.__annotations_cache__ -typing_extensions.SupportsIndex.__annotate_func__ -typing_extensions.SupportsIndex.__annotations_cache__ -typing_extensions.SupportsInt.__annotate_func__ -typing_extensions.SupportsInt.__annotations_cache__ -typing_extensions.SupportsRound.__annotate_func__ -typing_extensions.SupportsRound.__annotations_cache__ -typing_extensions.TypeAliasType.evaluate_value -typing_extensions.Union -typing_extensions.evaluate_forward_ref -typing_extensions.get_type_hints unittest.TestCase.assertEndsWith unittest.TestCase.assertHasAttr unittest.TestCase.assertIsSubclass @@ -576,21 +254,10 @@ urllib.request.FancyURLopener urllib.request.URLopener urllib.request.pathname2url urllib.request.url2pathname -wsgiref.types.ErrorStream.__annotate_func__ -wsgiref.types.ErrorStream.__annotations_cache__ -wsgiref.types.FileWrapper.__annotate_func__ -wsgiref.types.FileWrapper.__annotations_cache__ -wsgiref.types.InputStream.__annotate_func__ -wsgiref.types.InputStream.__annotations_cache__ -wsgiref.types.StartResponse.__annotate_func__ -wsgiref.types.StartResponse.__annotations_cache__ -wsgiref.types._Readable.__annotate_func__ -wsgiref.types._Readable.__annotations_cache__ xml.parsers.expat.errors.XML_ERROR_NOT_STARTED xml.sax.__all__ xml.sax.InputSource zipfile.ZipFile.data_offset -zipfile._path.glob.Translator.__annotate_func__ # ========================= @@ -598,6 +265,14 @@ zipfile._path.glob.Translator.__annotate_func__ # ========================= +# Union and UnionType are aliases in 3.14 but type checkers need some changes +typing_extensions.Union +typing.Union +types.UnionType.__class_getitem__ +types.UnionType.__mro_entries__ +types.UnionType.__name__ +types.UnionType.__qualname__ + # ==================================== # Pre-existing errors from Python 3.13 # ==================================== @@ -650,6 +325,24 @@ typing(_extensions)?\.IO\.truncate typing(_extensions)?\.IO\.write typing(_extensions)?\.IO\.writelines +# ============================================================= +# Allowlist entries that cannot or should not be fixed; >= 3.14 +# ============================================================= + +# Internal annotations machinery +.*\.__annotate_func__ +.*\.__annotations_cache__ + +# Undocumented private attributes +.*\.ForwardRef\.__arg__ +.*\.ForwardRef\.__ast_node__ +.*\.ForwardRef\.__cell__ +.*\.ForwardRef\.__code__ +.*\.ForwardRef\.__extra_names__ +.*\.ForwardRef\.__globals__ +.*\.ForwardRef\.__init_subclass__ +.*\.ForwardRef\.__owner__ +.*\.ForwardRef\.__stringifier_dict__ # ============================================================= # Allowlist entries that cannot or should not be fixed; >= 3.13 diff --git a/stdlib/VERSIONS b/stdlib/VERSIONS index fec56ce59e36..717cf7b4d71a 100644 --- a/stdlib/VERSIONS +++ b/stdlib/VERSIONS @@ -78,6 +78,7 @@ _weakrefset: 3.0- _winapi: 3.3- abc: 3.0- aifc: 3.0-3.12 +annotationlib: 3.14- antigravity: 3.0- argparse: 3.0- array: 3.0- diff --git a/stdlib/_typeshed/__init__.pyi b/stdlib/_typeshed/__init__.pyi index 7ed8a079ea09..c37d55a7d9ec 100644 --- a/stdlib/_typeshed/__init__.pyi +++ b/stdlib/_typeshed/__init__.pyi @@ -367,3 +367,14 @@ else: from enum import Enum class StrEnum(str, Enum): ... + +# Objects that appear in annotations or in type expressions. +# Similar to PEP 747's TypeForm but a little broader. +AnnotationForm: TypeAlias = Any + +if sys.version_info >= (3, 14): + from annotationlib import Format + + # These return annotations, which can be arbitrary objects + AnnotateFunc: TypeAlias = Callable[[Format], dict[str, AnnotationForm]] + EvaluateFunc: TypeAlias = Callable[[Format], AnnotationForm] diff --git a/stdlib/annotationlib.pyi b/stdlib/annotationlib.pyi new file mode 100644 index 000000000000..7590c632d785 --- /dev/null +++ b/stdlib/annotationlib.pyi @@ -0,0 +1,132 @@ +import sys +from typing import Literal + +if sys.version_info >= (3, 14): + import enum + import types + from _typeshed import AnnotateFunc, AnnotationForm, EvaluateFunc, SupportsItems + from collections.abc import Mapping + from typing import Any, ParamSpec, TypeVar, TypeVarTuple, final, overload + from warnings import deprecated + + __all__ = [ + "Format", + "ForwardRef", + "call_annotate_function", + "call_evaluate_function", + "get_annotate_from_class_namespace", + "get_annotations", + "annotations_to_string", + "type_repr", + ] + + class Format(enum.IntEnum): + VALUE = 1 + VALUE_WITH_FAKE_GLOBALS = 2 + FORWARDREF = 3 + STRING = 4 + + @final + class ForwardRef: + __forward_is_argument__: bool + __forward_is_class__: bool + __forward_module__: str | None + def __init__( + self, arg: str, *, module: str | None = None, owner: object = None, is_argument: bool = True, is_class: bool = False + ) -> None: ... + @overload + def evaluate( + self, + *, + globals: dict[str, Any] | None = None, + locals: Mapping[str, Any] | None = None, + type_params: tuple[TypeVar | ParamSpec | TypeVarTuple, ...] | None = None, + owner: object = None, + format: Literal[Format.STRING], + ) -> str: ... + @overload + def evaluate( + self, + *, + globals: dict[str, Any] | None = None, + locals: Mapping[str, Any] | None = None, + type_params: tuple[TypeVar | ParamSpec | TypeVarTuple, ...] | None = None, + owner: object = None, + format: Literal[Format.FORWARDREF], + ) -> AnnotationForm | ForwardRef: ... + @overload + def evaluate( + self, + *, + globals: dict[str, Any] | None = None, + locals: Mapping[str, Any] | None = None, + type_params: tuple[TypeVar | ParamSpec | TypeVarTuple, ...] | None = None, + owner: object = None, + format: Format = Format.VALUE, # noqa: Y011 + ) -> AnnotationForm: ... + @deprecated("Use ForwardRef.evaluate() or typing.evaluate_forward_ref() instead.") + def _evaluate( + self, + globalns: dict[str, Any] | None, + localns: Mapping[str, Any] | None, + type_params: tuple[TypeVar | ParamSpec | TypeVarTuple, ...] = ..., + *, + recursive_guard: frozenset[str], + ) -> AnnotationForm: ... + @property + def __forward_arg__(self) -> str: ... + @property + def __forward_code__(self) -> types.CodeType: ... + def __eq__(self, other: object) -> bool: ... + def __hash__(self) -> int: ... + def __or__(self, other: Any) -> types.UnionType: ... + def __ror__(self, other: Any) -> types.UnionType: ... + + @overload + def call_evaluate_function(evaluate: EvaluateFunc, format: Literal[Format.STRING], *, owner: object = None) -> str: ... + @overload + def call_evaluate_function( + evaluate: EvaluateFunc, format: Literal[Format.FORWARDREF], *, owner: object = None + ) -> AnnotationForm | ForwardRef: ... + @overload + def call_evaluate_function(evaluate: EvaluateFunc, format: Format, *, owner: object = None) -> AnnotationForm: ... + @overload + def call_annotate_function( + annotate: AnnotateFunc, format: Literal[Format.STRING], *, owner: object = None + ) -> dict[str, str]: ... + @overload + def call_annotate_function( + annotate: AnnotateFunc, format: Literal[Format.FORWARDREF], *, owner: object = None + ) -> dict[str, AnnotationForm | ForwardRef]: ... + @overload + def call_annotate_function(annotate: AnnotateFunc, format: Format, *, owner: object = None) -> dict[str, AnnotationForm]: ... + def get_annotate_from_class_namespace(obj: Mapping[str, object]) -> AnnotateFunc | None: ... + @overload + def get_annotations( + obj: Any, # any object with __annotations__ or __annotate__ + *, + globals: dict[str, object] | None = None, + locals: Mapping[str, object] | None = None, + eval_str: bool = False, + format: Literal[Format.STRING], + ) -> dict[str, str]: ... + @overload + def get_annotations( + obj: Any, + *, + globals: dict[str, object] | None = None, + locals: Mapping[str, object] | None = None, + eval_str: bool = False, + format: Literal[Format.FORWARDREF], + ) -> dict[str, AnnotationForm | ForwardRef]: ... + @overload + def get_annotations( + obj: Any, + *, + globals: dict[str, object] | None = None, + locals: Mapping[str, object] | None = None, + eval_str: bool = False, + format: Format = Format.VALUE, # noqa: Y011 + ) -> dict[str, AnnotationForm]: ... + def type_repr(value: object) -> str: ... + def annotations_to_string(annotations: SupportsItems[str, object]) -> dict[str, str]: ... diff --git a/stdlib/builtins.pyi b/stdlib/builtins.pyi index 2091a76f8da3..5a1d4dd8afb9 100644 --- a/stdlib/builtins.pyi +++ b/stdlib/builtins.pyi @@ -5,6 +5,7 @@ import sys import types from _collections_abc import dict_items, dict_keys, dict_values from _typeshed import ( + AnnotationForm, AnyStr_co, ConvertibleToFloat, ConvertibleToInt, @@ -72,6 +73,9 @@ from typing_extensions import ( # noqa: Y023 deprecated, ) +if sys.version_info >= (3, 14): + from _typeshed import AnnotateFunc + _T = TypeVar("_T") _I = TypeVar("_I", default=int) _T_co = TypeVar("_T_co", covariant=True) @@ -215,6 +219,9 @@ class type: def __ror__(self, value: Any, /) -> types.UnionType: ... if sys.version_info >= (3, 12): __type_params__: tuple[TypeVar | ParamSpec | TypeVarTuple, ...] + __annotations__: dict[str, AnnotationForm] + if sys.version_info >= (3, 14): + __annotate__: AnnotateFunc | None class super: @overload @@ -1017,7 +1024,9 @@ class function: def __globals__(self) -> dict[str, Any]: ... __name__: str __qualname__: str - __annotations__: dict[str, Any] + __annotations__: dict[str, AnnotationForm] + if sys.version_info >= (3, 14): + __annotate__: AnnotateFunc | None __kwdefaults__: dict[str, Any] if sys.version_info >= (3, 10): @property diff --git a/stdlib/inspect.pyi b/stdlib/inspect.pyi index c525418c104b..e19c2a634aa0 100644 --- a/stdlib/inspect.pyi +++ b/stdlib/inspect.pyi @@ -2,7 +2,7 @@ import dis import enum import sys import types -from _typeshed import StrPath +from _typeshed import AnnotationForm, StrPath from collections import OrderedDict from collections.abc import AsyncGenerator, Awaitable, Callable, Coroutine, Generator, Mapping, Sequence, Set as AbstractSet from types import ( @@ -28,6 +28,9 @@ from types import ( from typing import Any, ClassVar, Final, Literal, NamedTuple, Protocol, TypeVar, overload from typing_extensions import ParamSpec, Self, TypeAlias, TypeGuard, TypeIs +if sys.version_info >= (3, 14): + from annotationlib import Format + if sys.version_info >= (3, 11): __all__ = [ "ArgInfo", @@ -139,6 +142,8 @@ if sys.version_info >= (3, 11): "getasyncgenstate", "BufferFlags", ] + if sys.version_info >= (3, 14): + __all__ += ["CO_HAS_DOCSTRING", "CO_METHOD", "ispackage"] _P = ParamSpec("_P") _T = TypeVar("_T") @@ -172,6 +177,9 @@ CO_COROUTINE: Final = 128 CO_ITERABLE_COROUTINE: Final = 256 CO_ASYNC_GENERATOR: Final = 512 TPFLAGS_IS_ABSTRACT: Final = 1048576 +if sys.version_info >= (3, 14): + CO_HAS_DOCSTRING: Final = 67108864 + CO_METHOD: Final = 134217728 modulesbyfile: dict[str, Any] @@ -199,6 +207,11 @@ def getmodulename(path: StrPath) -> str | None: ... def ismodule(object: object) -> TypeIs[ModuleType]: ... def isclass(object: object) -> TypeIs[type[Any]]: ... def ismethod(object: object) -> TypeIs[MethodType]: ... + +if sys.version_info >= (3, 14): + # Not TypeIs because it does not return True for all modules + def ispackage(object: object) -> TypeGuard[ModuleType]: ... + def isfunction(object: object) -> TypeIs[FunctionType]: ... if sys.version_info >= (3, 12): @@ -294,7 +307,18 @@ _IntrospectableCallable: TypeAlias = Callable[..., Any] # # Introspecting callables with the Signature object # -if sys.version_info >= (3, 10): +if sys.version_info >= (3, 14): + def signature( + obj: _IntrospectableCallable, + *, + follow_wrapped: bool = True, + globals: Mapping[str, Any] | None = None, + locals: Mapping[str, Any] | None = None, + eval_str: bool = False, + annotation_format: Format = Format.VALUE, # noqa: Y011 + ) -> Signature: ... + +elif sys.version_info >= (3, 10): def signature( obj: _IntrospectableCallable, *, @@ -323,7 +347,19 @@ class Signature: def bind_partial(self, *args: Any, **kwargs: Any) -> BoundArguments: ... def replace(self, *, parameters: Sequence[Parameter] | type[_void] | None = ..., return_annotation: Any = ...) -> Self: ... __replace__ = replace - if sys.version_info >= (3, 10): + if sys.version_info >= (3, 14): + @classmethod + def from_callable( + cls, + obj: _IntrospectableCallable, + *, + follow_wrapped: bool = True, + globals: Mapping[str, Any] | None = None, + locals: Mapping[str, Any] | None = None, + eval_str: bool = False, + annotation_format: Format = Format.VALUE, # noqa: Y011 + ) -> Self: ... + elif sys.version_info >= (3, 10): @classmethod def from_callable( cls, @@ -337,20 +373,24 @@ class Signature: else: @classmethod def from_callable(cls, obj: _IntrospectableCallable, *, follow_wrapped: bool = True) -> Self: ... - if sys.version_info >= (3, 13): + if sys.version_info >= (3, 14): + def format(self, *, max_width: int | None = None, quote_annotation_strings: bool = True) -> str: ... + elif sys.version_info >= (3, 13): def format(self, *, max_width: int | None = None) -> str: ... def __eq__(self, other: object) -> bool: ... def __hash__(self) -> int: ... -if sys.version_info >= (3, 10): +if sys.version_info >= (3, 14): + from annotationlib import get_annotations as get_annotations +elif sys.version_info >= (3, 10): def get_annotations( obj: Callable[..., object] | type[object] | ModuleType, # any callable, class, or module *, globals: Mapping[str, Any] | None = None, # value types depend on the key locals: Mapping[str, Any] | None = None, # value types depend on the key eval_str: bool = False, - ) -> dict[str, Any]: ... # values are type expressions + ) -> dict[str, AnnotationForm]: ... # values are type expressions # The name is the same as the enum's name in CPython class _ParameterKind(enum.IntEnum): @@ -461,7 +501,13 @@ class ArgInfo(NamedTuple): locals: dict[str, Any] def getargvalues(frame: FrameType) -> ArgInfo: ... -def formatannotation(annotation: object, base_module: str | None = None) -> str: ... + +if sys.version_info >= (3, 14): + def formatannotation(annotation: object, base_module: str | None = None, *, quote_annotation_strings: bool = True) -> str: ... + +else: + def formatannotation(annotation: object, base_module: str | None = None) -> str: ... + def formatannotationrelativeto(object: object) -> Callable[[object], str]: ... if sys.version_info < (3, 11): diff --git a/stdlib/types.pyi b/stdlib/types.pyi index fe443be27121..1163d71d2c95 100644 --- a/stdlib/types.pyi +++ b/stdlib/types.pyi @@ -1,5 +1,5 @@ import sys -from _typeshed import MaybeNone, SupportsKeysAndGetItem +from _typeshed import AnnotationForm, MaybeNone, SupportsKeysAndGetItem from _typeshed.importlib import LoaderProtocol from collections.abc import ( AsyncGenerator, @@ -19,6 +19,9 @@ from importlib.machinery import ModuleSpec from typing import Any, ClassVar, Literal, TypeVar, final, overload from typing_extensions import ParamSpec, Self, TypeAliasType, TypeVarTuple, deprecated +if sys.version_info >= (3, 14): + from _typeshed import AnnotateFunc + __all__ = [ "FunctionType", "LambdaType", @@ -77,7 +80,9 @@ class FunctionType: def __globals__(self) -> dict[str, Any]: ... __name__: str __qualname__: str - __annotations__: dict[str, Any] + __annotations__: dict[str, AnnotationForm] + if sys.version_info >= (3, 14): + __annotate__: AnnotateFunc | None __kwdefaults__: dict[str, Any] | None if sys.version_info >= (3, 10): @property @@ -352,6 +357,10 @@ class ModuleType: # Redeclaring `__doc__` here helps some type checkers understand that `__doc__` is available # as an implicit global in all modules, similar to `__name__`, `__file__`, `__spec__`, etc. __doc__: str | None + __annotations__: dict[str, AnnotationForm] + if sys.version_info >= (3, 14): + __annotate__: AnnotateFunc | None + def __init__(self, name: str, doc: str | None = ...) -> None: ... # __getattr__ doesn't exist at runtime, # but having it here in typeshed makes dynamic imports diff --git a/stdlib/typing.pyi b/stdlib/typing.pyi index 189ff3e89720..6b6c2654d247 100644 --- a/stdlib/typing.pyi +++ b/stdlib/typing.pyi @@ -6,7 +6,7 @@ import collections # noqa: F401 # pyright: ignore[reportUnusedImport] import sys import typing_extensions from _collections_abc import dict_items, dict_keys, dict_values -from _typeshed import IdentityFunction, ReadableBuffer, SupportsKeysAndGetItem +from _typeshed import AnnotationForm, IdentityFunction, ReadableBuffer, SupportsKeysAndGetItem from abc import ABCMeta, abstractmethod from re import Match as Match, Pattern as Pattern from types import ( @@ -23,6 +23,11 @@ from types import ( ) from typing_extensions import Never as _Never, ParamSpec as _ParamSpec, deprecated +if sys.version_info >= (3, 14): + from _typeshed import EvaluateFunc + + from annotationlib import Format + if sys.version_info >= (3, 10): from types import UnionType @@ -108,6 +113,9 @@ __all__ = [ if sys.version_info < (3, 14): __all__ += ["ByteString"] +if sys.version_info >= (3, 14): + __all__ += ["evaluate_forward_ref"] + if sys.version_info >= (3, 10): __all__ += ["Concatenate", "ParamSpec", "ParamSpecArgs", "ParamSpecKwargs", "TypeAlias", "TypeGuard", "is_typeddict"] @@ -143,9 +151,9 @@ class TypeVar: @property def __name__(self) -> str: ... @property - def __bound__(self) -> Any | None: ... + def __bound__(self) -> AnnotationForm | None: ... @property - def __constraints__(self) -> tuple[Any, ...]: ... + def __constraints__(self) -> tuple[AnnotationForm, ...]: ... @property def __covariant__(self) -> bool: ... @property @@ -155,44 +163,61 @@ class TypeVar: def __infer_variance__(self) -> bool: ... if sys.version_info >= (3, 13): @property - def __default__(self) -> Any: ... + def __default__(self) -> AnnotationForm: ... if sys.version_info >= (3, 13): def __new__( cls, name: str, - *constraints: Any, - bound: Any | None = None, + *constraints: AnnotationForm, + bound: AnnotationForm | None = None, contravariant: bool = False, covariant: bool = False, infer_variance: bool = False, - default: Any = ..., + default: AnnotationForm = ..., ) -> Self: ... elif sys.version_info >= (3, 12): def __new__( cls, name: str, - *constraints: Any, - bound: Any | None = None, + *constraints: AnnotationForm, + bound: AnnotationForm | None = None, covariant: bool = False, contravariant: bool = False, infer_variance: bool = False, ) -> Self: ... elif sys.version_info >= (3, 11): def __new__( - cls, name: str, *constraints: Any, bound: Any | None = None, covariant: bool = False, contravariant: bool = False + cls, + name: str, + *constraints: AnnotationForm, + bound: AnnotationForm | None = None, + covariant: bool = False, + contravariant: bool = False, ) -> Self: ... else: def __init__( - self, name: str, *constraints: Any, bound: Any | None = None, covariant: bool = False, contravariant: bool = False + self, + name: str, + *constraints: AnnotationForm, + bound: AnnotationForm | None = None, + covariant: bool = False, + contravariant: bool = False, ) -> None: ... if sys.version_info >= (3, 10): - def __or__(self, right: Any) -> _SpecialForm: ... - def __ror__(self, left: Any) -> _SpecialForm: ... + def __or__(self, right: AnnotationForm) -> _SpecialForm: ... + def __ror__(self, left: AnnotationForm) -> _SpecialForm: ... if sys.version_info >= (3, 11): def __typing_subst__(self, arg: Any) -> Any: ... if sys.version_info >= (3, 13): def __typing_prepare_subst__(self, alias: Any, args: Any) -> tuple[Any, ...]: ... def has_default(self) -> bool: ... + if sys.version_info >= (3, 14): + @property + def evaluate_bound(self) -> EvaluateFunc | None: ... + @property + def evaluate_constraints(self) -> EvaluateFunc | None: ... + @property + def evaluate_default(self) -> EvaluateFunc | None: ... # Used for an undocumented mypy feature. Does not exist at runtime. _promote = object() @@ -234,10 +259,10 @@ if sys.version_info >= (3, 11): def __name__(self) -> str: ... if sys.version_info >= (3, 13): @property - def __default__(self) -> Any: ... + def __default__(self) -> AnnotationForm: ... def has_default(self) -> bool: ... if sys.version_info >= (3, 13): - def __new__(cls, name: str, *, default: Any = ...) -> Self: ... + def __new__(cls, name: str, *, default: AnnotationForm = ...) -> Self: ... elif sys.version_info >= (3, 12): def __new__(cls, name: str) -> Self: ... else: @@ -246,6 +271,9 @@ if sys.version_info >= (3, 11): def __iter__(self) -> Any: ... def __typing_subst__(self, arg: Never) -> Never: ... def __typing_prepare_subst__(self, alias: Any, args: Any) -> tuple[Any, ...]: ... + if sys.version_info >= (3, 14): + @property + def evaluate_default(self) -> EvaluateFunc | None: ... if sys.version_info >= (3, 10): @final @@ -277,7 +305,7 @@ if sys.version_info >= (3, 10): @property def __name__(self) -> str: ... @property - def __bound__(self) -> Any | None: ... + def __bound__(self) -> AnnotationForm | None: ... @property def __covariant__(self) -> bool: ... @property @@ -287,35 +315,35 @@ if sys.version_info >= (3, 10): def __infer_variance__(self) -> bool: ... if sys.version_info >= (3, 13): @property - def __default__(self) -> Any: ... + def __default__(self) -> AnnotationForm: ... if sys.version_info >= (3, 13): def __new__( cls, name: str, *, - bound: Any | None = None, + bound: AnnotationForm | None = None, contravariant: bool = False, covariant: bool = False, infer_variance: bool = False, - default: Any = ..., + default: AnnotationForm = ..., ) -> Self: ... elif sys.version_info >= (3, 12): def __new__( cls, name: str, *, - bound: Any | None = None, + bound: AnnotationForm | None = None, contravariant: bool = False, covariant: bool = False, infer_variance: bool = False, ) -> Self: ... elif sys.version_info >= (3, 11): def __new__( - cls, name: str, *, bound: Any | None = None, contravariant: bool = False, covariant: bool = False + cls, name: str, *, bound: AnnotationForm | None = None, contravariant: bool = False, covariant: bool = False ) -> Self: ... else: def __init__( - self, name: str, *, bound: Any | None = None, contravariant: bool = False, covariant: bool = False + self, name: str, *, bound: AnnotationForm | None = None, contravariant: bool = False, covariant: bool = False ) -> None: ... @property @@ -330,13 +358,16 @@ if sys.version_info >= (3, 10): def __ror__(self, left: Any) -> _SpecialForm: ... if sys.version_info >= (3, 13): def has_default(self) -> bool: ... + if sys.version_info >= (3, 14): + @property + def evaluate_default(self) -> EvaluateFunc | None: ... Concatenate: _SpecialForm TypeAlias: _SpecialForm TypeGuard: _SpecialForm class NewType: - def __init__(self, name: str, tp: Any) -> None: ... + def __init__(self, name: str, tp: AnnotationForm) -> None: ... if sys.version_info >= (3, 11): @staticmethod def __call__(x: _T, /) -> _T: ... @@ -860,13 +891,25 @@ _get_type_hints_obj_allowed_types: typing_extensions.TypeAlias = ( # noqa: Y042 | MethodDescriptorType ) -def get_type_hints( - obj: _get_type_hints_obj_allowed_types, - globalns: dict[str, Any] | None = None, - localns: Mapping[str, Any] | None = None, - include_extras: bool = False, -) -> dict[str, Any]: ... -def get_args(tp: Any) -> tuple[Any, ...]: ... +if sys.version_info >= (3, 14): + def get_type_hints( + obj: _get_type_hints_obj_allowed_types, + globalns: dict[str, Any] | None = None, + localns: Mapping[str, Any] | None = None, + include_extras: bool = False, + *, + format: Format | None = None, + ) -> dict[str, AnnotationForm]: ... + +else: + def get_type_hints( + obj: _get_type_hints_obj_allowed_types, + globalns: dict[str, Any] | None = None, + localns: Mapping[str, Any] | None = None, + include_extras: bool = False, + ) -> dict[str, AnnotationForm]: ... + +def get_args(tp: AnnotationForm) -> tuple[AnnotationForm, ...]: ... if sys.version_info >= (3, 10): @overload @@ -877,7 +920,7 @@ if sys.version_info >= (3, 10): @overload def get_origin(tp: GenericAlias) -> type: ... @overload -def get_origin(tp: Any) -> Any | None: ... +def get_origin(tp: AnnotationForm) -> AnnotationForm | None: ... @overload def cast(typ: type[_T], val: Any) -> _T: ... @overload @@ -888,7 +931,7 @@ def cast(typ: object, val: Any) -> Any: ... if sys.version_info >= (3, 11): def reveal_type(obj: _T, /) -> _T: ... def assert_never(arg: Never, /) -> Never: ... - def assert_type(val: _T, typ: Any, /) -> _T: ... + def assert_type(val: _T, typ: AnnotationForm, /) -> _T: ... def clear_overloads() -> None: ... def get_overloads(func: Callable[..., object]) -> Sequence[Callable[..., object]]: ... def dataclass_transform( @@ -962,56 +1005,70 @@ class _TypedDict(Mapping[str, object], metaclass=ABCMeta): # supposedly incompatible definitions of __or__ and __ior__ def __ior__(self, value: typing_extensions.Self, /) -> typing_extensions.Self: ... # type: ignore[misc] -@final -class ForwardRef(_Final): - __forward_arg__: str - __forward_code__: CodeType - __forward_evaluated__: bool - __forward_value__: Any | None - __forward_is_argument__: bool - __forward_is_class__: bool - __forward_module__: Any | None +if sys.version_info >= (3, 14): + from annotationlib import ForwardRef as ForwardRef - def __init__(self, arg: str, is_argument: bool = True, module: Any | None = None, *, is_class: bool = False) -> None: ... + def evaluate_forward_ref( + forward_ref: ForwardRef, + *, + owner: object = None, + globals: dict[str, Any] | None = None, + locals: Mapping[str, Any] | None = None, + type_params: tuple[TypeVar, ParamSpec, TypeVarTuple] | None = None, + format: Format | None = None, + ) -> AnnotationForm: ... - if sys.version_info >= (3, 13): - @overload - @deprecated( - "Failing to pass a value to the 'type_params' parameter of ForwardRef._evaluate() is deprecated, " - "as it leads to incorrect behaviour when evaluating a stringified annotation " - "that references a PEP 695 type parameter. It will be disallowed in Python 3.15." - ) - def _evaluate( - self, globalns: dict[str, Any] | None, localns: Mapping[str, Any] | None, *, recursive_guard: frozenset[str] - ) -> Any | None: ... - @overload - def _evaluate( - self, - globalns: dict[str, Any] | None, - localns: Mapping[str, Any] | None, - type_params: tuple[TypeVar | ParamSpec | TypeVarTuple, ...], - *, - recursive_guard: frozenset[str], - ) -> Any | None: ... - elif sys.version_info >= (3, 12): - def _evaluate( - self, - globalns: dict[str, Any] | None, - localns: Mapping[str, Any] | None, - type_params: tuple[TypeVar | ParamSpec | TypeVarTuple, ...] | None = None, - *, - recursive_guard: frozenset[str], - ) -> Any | None: ... - else: - def _evaluate( - self, globalns: dict[str, Any] | None, localns: Mapping[str, Any] | None, recursive_guard: frozenset[str] - ) -> Any | None: ... +else: + @final + class ForwardRef(_Final): + __forward_arg__: str + __forward_code__: CodeType + __forward_evaluated__: bool + __forward_value__: AnnotationForm | None + __forward_is_argument__: bool + __forward_is_class__: bool + __forward_module__: Any | None - def __eq__(self, other: object) -> bool: ... - def __hash__(self) -> int: ... - if sys.version_info >= (3, 11): - def __or__(self, other: Any) -> _SpecialForm: ... - def __ror__(self, other: Any) -> _SpecialForm: ... + def __init__(self, arg: str, is_argument: bool = True, module: Any | None = None, *, is_class: bool = False) -> None: ... + + if sys.version_info >= (3, 13): + @overload + @deprecated( + "Failing to pass a value to the 'type_params' parameter of ForwardRef._evaluate() is deprecated, " + "as it leads to incorrect behaviour when evaluating a stringified annotation " + "that references a PEP 695 type parameter. It will be disallowed in Python 3.15." + ) + def _evaluate( + self, globalns: dict[str, Any] | None, localns: Mapping[str, Any] | None, *, recursive_guard: frozenset[str] + ) -> AnnotationForm | None: ... + @overload + def _evaluate( + self, + globalns: dict[str, Any] | None, + localns: Mapping[str, Any] | None, + type_params: tuple[TypeVar | ParamSpec | TypeVarTuple, ...], + *, + recursive_guard: frozenset[str], + ) -> AnnotationForm | None: ... + elif sys.version_info >= (3, 12): + def _evaluate( + self, + globalns: dict[str, Any] | None, + localns: Mapping[str, Any] | None, + type_params: tuple[TypeVar | ParamSpec | TypeVarTuple, ...] | None = None, + *, + recursive_guard: frozenset[str], + ) -> AnnotationForm | None: ... + else: + def _evaluate( + self, globalns: dict[str, Any] | None, localns: Mapping[str, Any] | None, recursive_guard: frozenset[str] + ) -> AnnotationForm | None: ... + + def __eq__(self, other: object) -> bool: ... + def __hash__(self) -> int: ... + if sys.version_info >= (3, 11): + def __or__(self, other: Any) -> _SpecialForm: ... + def __ror__(self, other: Any) -> _SpecialForm: ... if sys.version_info >= (3, 10): def is_typeddict(tp: object) -> bool: ... @@ -1024,19 +1081,22 @@ if sys.version_info >= (3, 12): class TypeAliasType: def __new__(cls, name: str, value: Any, *, type_params: tuple[TypeVar | ParamSpec | TypeVarTuple, ...] = ()) -> Self: ... @property - def __value__(self) -> Any: ... + def __value__(self) -> AnnotationForm: ... @property def __type_params__(self) -> tuple[TypeVar | ParamSpec | TypeVarTuple, ...]: ... @property - def __parameters__(self) -> tuple[Any, ...]: ... + def __parameters__(self) -> tuple[AnnotationForm, ...]: ... @property def __name__(self) -> str: ... # It's writable on types, but not on instances of TypeAliasType. @property def __module__(self) -> str | None: ... # type: ignore[override] - def __getitem__(self, parameters: Any) -> GenericAlias: ... + def __getitem__(self, parameters: AnnotationForm) -> GenericAlias: ... def __or__(self, right: Any) -> _SpecialForm: ... def __ror__(self, left: Any) -> _SpecialForm: ... + if sys.version_info >= (3, 14): + @property + def evaluate_value(self) -> EvaluateFunc: ... if sys.version_info >= (3, 13): def is_protocol(tp: type, /) -> bool: ... diff --git a/stdlib/typing_extensions.pyi b/stdlib/typing_extensions.pyi index bad5fae880c0..37f8e8ba6a4b 100644 --- a/stdlib/typing_extensions.pyi +++ b/stdlib/typing_extensions.pyi @@ -2,7 +2,7 @@ import abc import enum import sys from _collections_abc import dict_items, dict_keys, dict_values -from _typeshed import IdentityFunction, Incomplete, Unused +from _typeshed import AnnotationForm, IdentityFunction, Incomplete, Unused from collections.abc import ( AsyncGenerator as AsyncGenerator, AsyncIterable as AsyncIterable, @@ -241,7 +241,7 @@ class _TypedDict(Mapping[str, object], metaclass=abc.ABCMeta): __mutable_keys__: ClassVar[frozenset[str]] # PEP 728 __closed__: ClassVar[bool] - __extra_items__: ClassVar[Any] + __extra_items__: ClassVar[AnnotationForm] def copy(self) -> Self: ... # Using Never so that only calls using mypy plugin hook that specialize the signature # can go through. @@ -267,13 +267,14 @@ class _TypedDict(Mapping[str, object], metaclass=abc.ABCMeta): OrderedDict = _Alias() -def get_type_hints( - obj: Callable[..., Any], - globalns: dict[str, Any] | None = None, - localns: Mapping[str, Any] | None = None, - include_extras: bool = False, -) -> dict[str, Any]: ... -def get_args(tp: Any) -> tuple[Any, ...]: ... +if sys.version_info >= (3, 13): + from typing import get_type_hints as get_type_hints +else: + def get_type_hints( + obj: Any, globalns: dict[str, Any] | None = None, localns: Mapping[str, Any] | None = None, include_extras: bool = False + ) -> dict[str, AnnotationForm]: ... + +def get_args(tp: AnnotationForm) -> tuple[AnnotationForm, ...]: ... if sys.version_info >= (3, 10): @overload @@ -284,7 +285,7 @@ def get_origin(tp: GenericAlias) -> type: ... @overload def get_origin(tp: ParamSpecArgs | ParamSpecKwargs) -> ParamSpec: ... @overload -def get_origin(tp: Any) -> Any | None: ... +def get_origin(tp: AnnotationForm) -> AnnotationForm | None: ... Annotated: _SpecialForm _AnnotatedAlias: Any # undocumented @@ -340,7 +341,7 @@ else: Never: _SpecialForm def reveal_type(obj: _T, /) -> _T: ... def assert_never(arg: Never, /) -> Never: ... - def assert_type(val: _T, typ: Any, /) -> _T: ... + def assert_type(val: _T, typ: AnnotationForm, /) -> _T: ... def clear_overloads() -> None: ... def get_overloads(func: Callable[..., object]) -> Sequence[Callable[..., object]]: ... @@ -373,7 +374,7 @@ else: def _replace(self, **kwargs: Any) -> Self: ... class NewType: - def __init__(self, name: str, tp: Any) -> None: ... + def __init__(self, name: str, tp: AnnotationForm) -> None: ... def __call__(self, obj: _T, /) -> _T: ... __supertype__: type | NewType if sys.version_info >= (3, 10): @@ -480,9 +481,9 @@ else: @property def __name__(self) -> str: ... @property - def __bound__(self) -> Any | None: ... + def __bound__(self) -> AnnotationForm | None: ... @property - def __constraints__(self) -> tuple[Any, ...]: ... + def __constraints__(self) -> tuple[AnnotationForm, ...]: ... @property def __covariant__(self) -> bool: ... @property @@ -490,15 +491,15 @@ else: @property def __infer_variance__(self) -> bool: ... @property - def __default__(self) -> Any: ... + def __default__(self) -> AnnotationForm: ... def __init__( self, name: str, - *constraints: Any, - bound: Any | None = None, + *constraints: AnnotationForm, + bound: AnnotationForm | None = None, covariant: bool = False, contravariant: bool = False, - default: Any = ..., + default: AnnotationForm = ..., infer_variance: bool = False, ) -> None: ... def has_default(self) -> bool: ... @@ -514,7 +515,7 @@ else: @property def __name__(self) -> str: ... @property - def __bound__(self) -> Any | None: ... + def __bound__(self) -> AnnotationForm | None: ... @property def __covariant__(self) -> bool: ... @property @@ -522,15 +523,15 @@ else: @property def __infer_variance__(self) -> bool: ... @property - def __default__(self) -> Any: ... + def __default__(self) -> AnnotationForm: ... def __init__( self, name: str, *, - bound: None | type[Any] | str = None, + bound: None | AnnotationForm | str = None, contravariant: bool = False, covariant: bool = False, - default: Any = ..., + default: AnnotationForm = ..., ) -> None: ... @property def args(self) -> ParamSpecArgs: ... @@ -547,8 +548,8 @@ else: @property def __name__(self) -> str: ... @property - def __default__(self) -> Any: ... - def __init__(self, name: str, *, default: Any = ...) -> None: ... + def __default__(self) -> AnnotationForm: ... + def __init__(self, name: str, *, default: AnnotationForm = ...) -> None: ... def __iter__(self) -> Any: ... # Unpack[Self] def has_default(self) -> bool: ... def __typing_prepare_subst__(self, alias: Any, args: Any) -> tuple[Any, ...]: ... @@ -563,23 +564,23 @@ else: @final class TypeAliasType: def __init__( - self, name: str, value: Any, *, type_params: tuple[TypeVar | ParamSpec | TypeVarTuple, ...] = () - ) -> None: ... # value is a type expression + self, name: str, value: AnnotationForm, *, type_params: tuple[TypeVar | ParamSpec | TypeVarTuple, ...] = () + ) -> None: ... @property - def __value__(self) -> Any: ... # a type expression + def __value__(self) -> AnnotationForm: ... @property def __type_params__(self) -> tuple[TypeVar | ParamSpec | TypeVarTuple, ...]: ... @property # `__parameters__` can include special forms if a `TypeVarTuple` was # passed as a `type_params` element to the constructor method. - def __parameters__(self) -> tuple[TypeVar | ParamSpec | Any, ...]: ... + def __parameters__(self) -> tuple[TypeVar | ParamSpec | AnnotationForm, ...]: ... @property def __name__(self) -> str: ... # It's writable on types, but not on instances of TypeAliasType. @property def __module__(self) -> str | None: ... # type: ignore[override] # Returns typing._GenericAlias, which isn't stubbed. - def __getitem__(self, parameters: Incomplete | tuple[Incomplete, ...]) -> Any: ... + def __getitem__(self, parameters: Incomplete | tuple[Incomplete, ...]) -> AnnotationForm: ... def __init_subclass__(cls, *args: Unused, **kwargs: Unused) -> NoReturn: ... if sys.version_info >= (3, 10): def __or__(self, right: Any) -> _SpecialForm: ... @@ -600,27 +601,75 @@ NoExtraItems: _NoExtraItemsType # PEP 747 TypeForm: _SpecialForm -class Format(enum.IntEnum): - VALUE = 1 - FORWARDREF = 2 - STRING = 3 - # PEP 649/749 -def get_annotations( - obj: Callable[..., object] | type[object] | ModuleType, # any callable, class, or module - *, - globals: Mapping[str, Any] | None = None, # value types depend on the key - locals: Mapping[str, Any] | None = None, # value types depend on the key - eval_str: bool = False, - format: Format = Format.VALUE, # noqa: Y011 -) -> dict[str, Any]: ... # values are type expressions -def evaluate_forward_ref( - forward_ref: ForwardRef, - *, - owner: Callable[..., object] | type[object] | ModuleType | None = None, # any callable, class, or module - globals: Mapping[str, Any] | None = None, # value types depend on the key - locals: Mapping[str, Any] | None = None, # value types depend on the key - type_params: Iterable[TypeVar | ParamSpec | TypeVarTuple] | None = None, - format: Format = Format.VALUE, # noqa: Y011 - _recursive_guard: Container[str] = ..., -) -> Any: ... # str if format is Format.STRING, otherwise a type expression +if sys.version_info >= (3, 14): + from typing import evaluate_forward_ref as evaluate_forward_ref + + from annotationlib import Format as Format, get_annotations as get_annotations +else: + class Format(enum.IntEnum): + VALUE = 1 + VALUE_WITH_FAKE_GLOBALS = 2 + FORWARDREF = 3 + STRING = 4 + + @overload + def get_annotations( + obj: Any, # any object with __annotations__ or __annotate__ + *, + globals: Mapping[str, Any] | None = None, # value types depend on the key + locals: Mapping[str, Any] | None = None, # value types depend on the key + eval_str: bool = False, + format: Literal[Format.STRING], + ) -> dict[str, str]: ... + @overload + def get_annotations( + obj: Any, # any object with __annotations__ or __annotate__ + *, + globals: Mapping[str, Any] | None = None, # value types depend on the key + locals: Mapping[str, Any] | None = None, # value types depend on the key + eval_str: bool = False, + format: Literal[Format.FORWARDREF], + ) -> dict[str, AnnotationForm | ForwardRef]: ... + @overload + def get_annotations( + obj: Any, # any object with __annotations__ or __annotate__ + *, + globals: Mapping[str, Any] | None = None, # value types depend on the key + locals: Mapping[str, Any] | None = None, # value types depend on the key + eval_str: bool = False, + format: Format = Format.VALUE, # noqa: Y011 + ) -> dict[str, AnnotationForm]: ... + @overload + def evaluate_forward_ref( + forward_ref: ForwardRef, + *, + owner: Callable[..., object] | type[object] | ModuleType | None = None, # any callable, class, or module + globals: Mapping[str, Any] | None = None, # value types depend on the key + locals: Mapping[str, Any] | None = None, # value types depend on the key + type_params: Iterable[TypeVar | ParamSpec | TypeVarTuple] | None = None, + format: Literal[Format.STRING], + _recursive_guard: Container[str] = ..., + ) -> str: ... + @overload + def evaluate_forward_ref( + forward_ref: ForwardRef, + *, + owner: Callable[..., object] | type[object] | ModuleType | None = None, # any callable, class, or module + globals: Mapping[str, Any] | None = None, # value types depend on the key + locals: Mapping[str, Any] | None = None, # value types depend on the key + type_params: Iterable[TypeVar | ParamSpec | TypeVarTuple] | None = None, + format: Literal[Format.FORWARDREF], + _recursive_guard: Container[str] = ..., + ) -> AnnotationForm | ForwardRef: ... + @overload + def evaluate_forward_ref( + forward_ref: ForwardRef, + *, + owner: Callable[..., object] | type[object] | ModuleType | None = None, # any callable, class, or module + globals: Mapping[str, Any] | None = None, # value types depend on the key + locals: Mapping[str, Any] | None = None, # value types depend on the key + type_params: Iterable[TypeVar | ParamSpec | TypeVarTuple] | None = None, + format: Format = Format.VALUE, # noqa: Y011 + _recursive_guard: Container[str] = ..., + ) -> AnnotationForm: ... diff --git a/tests/stubtest_stdlib.py b/tests/stubtest_stdlib.py index a2d2afa90ba2..ac794eb4bacd 100755 --- a/tests/stubtest_stdlib.py +++ b/tests/stubtest_stdlib.py @@ -27,6 +27,7 @@ def run_stubtest(typeshed_dir: Path) -> int: "-m", "mypy.stubtest", "--check-typeshed", + "--show-traceback", "--custom-typeshed-dir", str(typeshed_dir), *allowlist_stubtest_arguments("stdlib"), From 69252c8b1450878ceab82e07d726319841dab146 Mon Sep 17 00:00:00 2001 From: Max Muoto Date: Sat, 10 May 2025 16:33:33 -0500 Subject: [PATCH 321/388] Add context manager for `contextvars.Token` (3.14) (#13997) --- stdlib/@tests/stubtest_allowlists/py314.txt | 4 ---- stdlib/_contextvars.pyi | 8 +++++++- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/stdlib/@tests/stubtest_allowlists/py314.txt b/stdlib/@tests/stubtest_allowlists/py314.txt index bb6b5ecf795a..9f79c767bf7d 100644 --- a/stdlib/@tests/stubtest_allowlists/py314.txt +++ b/stdlib/@tests/stubtest_allowlists/py314.txt @@ -6,8 +6,6 @@ _asyncio.all_tasks _asyncio.future_add_to_awaited_by _asyncio.future_discard_from_awaited_by _compression -_contextvars.Token.__enter__ -_contextvars.Token.__exit__ _ctypes.POINTER _ctypes.byref _ctypes.pointer @@ -92,8 +90,6 @@ concurrent.futures.thread._worker configparser.__all__ configparser.InvalidWriteError configparser.UnnamedSectionDisabledError -contextvars.Token.__enter__ -contextvars.Token.__exit__ ctypes.POINTER ctypes.byref ctypes.memoryview_at diff --git a/stdlib/_contextvars.pyi b/stdlib/_contextvars.pyi index 33df799a768c..e2e2e4df9d08 100644 --- a/stdlib/_contextvars.pyi +++ b/stdlib/_contextvars.pyi @@ -1,5 +1,6 @@ +import sys from collections.abc import Callable, Iterator, Mapping -from types import GenericAlias +from types import GenericAlias, TracebackType from typing import Any, ClassVar, Generic, TypeVar, final, overload from typing_extensions import ParamSpec, Self @@ -35,6 +36,11 @@ class Token(Generic[_T]): MISSING: ClassVar[object] __hash__: ClassVar[None] # type: ignore[assignment] def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + if sys.version_info >= (3, 14): + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None + ) -> None: ... def copy_context() -> Context: ... From 3d48e5aafc25e825fece4725778b2ce28586ef75 Mon Sep 17 00:00:00 2001 From: Jelle Zijlstra Date: Sat, 10 May 2025 16:39:55 -0700 Subject: [PATCH 322/388] [click-web] Fixes after click 8.2.0 release (#14001) --- stubs/click-web/click_web/__init__.pyi | 2 +- stubs/click-web/click_web/resources/input_fields.pyi | 6 +++++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/stubs/click-web/click_web/__init__.pyi b/stubs/click-web/click_web/__init__.pyi index 3761937200df..3f97be32e833 100644 --- a/stubs/click-web/click_web/__init__.pyi +++ b/stubs/click-web/click_web/__init__.pyi @@ -13,4 +13,4 @@ OUTPUT_FOLDER: str _flask_app: flask.Flask | None logger: logging.Logger | None -def create_click_web_app(module: types.ModuleType, command: click.BaseCommand, root: str = "/") -> flask.Flask: ... +def create_click_web_app(module: types.ModuleType, command: click.Command, root: str = "/") -> flask.Flask: ... diff --git a/stubs/click-web/click_web/resources/input_fields.pyi b/stubs/click-web/click_web/resources/input_fields.pyi index 92e2289e9b85..bffb313868b2 100644 --- a/stubs/click-web/click_web/resources/input_fields.pyi +++ b/stubs/click-web/click_web/resources/input_fields.pyi @@ -1,3 +1,4 @@ +import sys from typing import Any, ClassVar, Final import click @@ -46,7 +47,10 @@ class BaseInput: def _build_name(self, name: str): ... class ChoiceInput(BaseInput): - param_type_cls: type[click.Choice] + if sys.version_info >= (3, 10): + param_type_cls: type[click.Choice[Any]] + else: + param_type_cls: type[click.Choice] class FlagInput(BaseInput): param_type_cls: None From 4b8b01b1cbbcf4e1e31d851700658c19c8c93044 Mon Sep 17 00:00:00 2001 From: Max Muoto Date: Sat, 10 May 2025 23:09:02 -0500 Subject: [PATCH 323/388] Update functools for 3.14 (#14002) --- stdlib/@tests/stubtest_allowlists/py314.txt | 10 +-- stdlib/functools.pyi | 71 ++++++++++++++++----- 2 files changed, 59 insertions(+), 22 deletions(-) diff --git a/stdlib/@tests/stubtest_allowlists/py314.txt b/stdlib/@tests/stubtest_allowlists/py314.txt index 9f79c767bf7d..161310ad18bb 100644 --- a/stdlib/@tests/stubtest_allowlists/py314.txt +++ b/stdlib/@tests/stubtest_allowlists/py314.txt @@ -124,13 +124,6 @@ fnmatch.filterfalse fractions.Fraction.__pow__ fractions.Fraction.__rpow__ fractions.Fraction.from_number -functools.__all__ -functools.Placeholder -functools.WRAPPER_ASSIGNMENTS -functools.partialmethod.__new__ -functools.reduce -functools.update_wrapper -functools.wraps gzip.GzipFile.readinto gzip.GzipFile.readinto1 gzip.compress @@ -269,6 +262,9 @@ types.UnionType.__mro_entries__ types.UnionType.__name__ types.UnionType.__qualname__ +# Assigning `__new__` causes `func` not to get recognized. +functools.partialmethod.__new__ + # ==================================== # Pre-existing errors from Python 3.13 # ==================================== diff --git a/stdlib/functools.pyi b/stdlib/functools.pyi index d35c295754e5..e31399fb8705 100644 --- a/stdlib/functools.pyi +++ b/stdlib/functools.pyi @@ -3,7 +3,7 @@ import types from _typeshed import SupportsAllComparisons, SupportsItems from collections.abc import Callable, Hashable, Iterable, Sized from types import GenericAlias -from typing import Any, Generic, Literal, NamedTuple, TypedDict, TypeVar, final, overload +from typing import Any, Final, Generic, Literal, NamedTuple, TypedDict, TypeVar, final, overload from typing_extensions import ParamSpec, Self, TypeAlias __all__ = [ @@ -31,10 +31,16 @@ _RWrapped = TypeVar("_RWrapped") _PWrapper = ParamSpec("_PWrapper") _RWrapper = TypeVar("_RWrapper") +if sys.version_info >= (3, 14): + @overload + def reduce(function: Callable[[_T, _S], _T], iterable: Iterable[_S], /, initial: _T) -> _T: ... + +else: + @overload + def reduce(function: Callable[[_T, _S], _T], iterable: Iterable[_S], initial: _T, /) -> _T: ... + @overload -def reduce(function: Callable[[_T, _S], _T], sequence: Iterable[_S], initial: _T, /) -> _T: ... -@overload -def reduce(function: Callable[[_T, _T], _T], sequence: Iterable[_T], /) -> _T: ... +def reduce(function: Callable[[_T, _T], _T], iterable: Iterable[_T], /) -> _T: ... class _CacheInfo(NamedTuple): hits: int @@ -61,19 +67,33 @@ def lru_cache(maxsize: int | None = 128, typed: bool = False) -> Callable[[Calla @overload def lru_cache(maxsize: Callable[..., _T], typed: bool = False) -> _lru_cache_wrapper[_T]: ... -if sys.version_info >= (3, 12): - WRAPPER_ASSIGNMENTS: tuple[ - Literal["__module__"], - Literal["__name__"], - Literal["__qualname__"], - Literal["__doc__"], - Literal["__annotations__"], - Literal["__type_params__"], +if sys.version_info >= (3, 14): + WRAPPER_ASSIGNMENTS: Final[ + tuple[ + Literal["__module__"], + Literal["__name__"], + Literal["__qualname__"], + Literal["__doc__"], + Literal["__annotate__"], + Literal["__type_params__"], + ] + ] +elif sys.version_info >= (3, 12): + WRAPPER_ASSIGNMENTS: Final[ + tuple[ + Literal["__module__"], + Literal["__name__"], + Literal["__qualname__"], + Literal["__doc__"], + Literal["__annotations__"], + Literal["__type_params__"], + ] ] else: - WRAPPER_ASSIGNMENTS: tuple[ - Literal["__module__"], Literal["__name__"], Literal["__qualname__"], Literal["__doc__"], Literal["__annotations__"] + WRAPPER_ASSIGNMENTS: Final[ + tuple[Literal["__module__"], Literal["__name__"], Literal["__qualname__"], Literal["__doc__"], Literal["__annotations__"]] ] + WRAPPER_UPDATES: tuple[Literal["__dict__"]] class _Wrapped(Generic[_PWrapped, _RWrapped, _PWrapper, _RWrapper]): @@ -86,7 +106,20 @@ class _Wrapped(Generic[_PWrapped, _RWrapped, _PWrapper, _RWrapper]): class _Wrapper(Generic[_PWrapped, _RWrapped]): def __call__(self, f: Callable[_PWrapper, _RWrapper]) -> _Wrapped[_PWrapped, _RWrapped, _PWrapper, _RWrapper]: ... -if sys.version_info >= (3, 12): +if sys.version_info >= (3, 14): + def update_wrapper( + wrapper: Callable[_PWrapper, _RWrapper], + wrapped: Callable[_PWrapped, _RWrapped], + assigned: Iterable[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotate__", "__type_params__"), + updated: Iterable[str] = ("__dict__",), + ) -> _Wrapped[_PWrapped, _RWrapped, _PWrapper, _RWrapper]: ... + def wraps( + wrapped: Callable[_PWrapped, _RWrapped], + assigned: Iterable[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotate__", "__type_params__"), + updated: Iterable[str] = ("__dict__",), + ) -> _Wrapper[_PWrapped, _RWrapped]: ... + +elif sys.version_info >= (3, 12): def update_wrapper( wrapper: Callable[_PWrapper, _RWrapper], wrapped: Callable[_PWrapped, _RWrapped], @@ -204,3 +237,11 @@ def _make_key( type: Any = ..., len: Callable[[Sized], int] = ..., ) -> Hashable: ... + +if sys.version_info >= (3, 14): + @final + class _PlaceholderType: ... + + Placeholder: Final[_PlaceholderType] + + __all__ += ["Placeholder"] From cf714b7d6ea0f2d79b1fc0ee236b3e9039a5762e Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Sun, 11 May 2025 05:39:53 +0000 Subject: [PATCH 324/388] Update tokens for 3.14 (#14004) --- stdlib/@tests/stubtest_allowlists/py314.txt | 8 -------- stdlib/token.pyi | 8 ++++++++ stdlib/tokenize.pyi | 3 +++ 3 files changed, 11 insertions(+), 8 deletions(-) diff --git a/stdlib/@tests/stubtest_allowlists/py314.txt b/stdlib/@tests/stubtest_allowlists/py314.txt index 161310ad18bb..fbfb7923245a 100644 --- a/stdlib/@tests/stubtest_allowlists/py314.txt +++ b/stdlib/@tests/stubtest_allowlists/py314.txt @@ -201,14 +201,6 @@ tarfile.TarFile.zstopen threading.Thread.__init__ threading._RLock.locked tkinter.Event.__class_getitem__ -token.__all__ -token.TSTRING_END -token.TSTRING_MIDDLE -token.TSTRING_START -tokenize.__all__ -tokenize.TSTRING_END -tokenize.TSTRING_MIDDLE -tokenize.TSTRING_START tomllib.TOMLDecodeError.__init__ traceback.__all__ turtle.__all__ diff --git a/stdlib/token.pyi b/stdlib/token.pyi index 741ce5b035b7..7c13b15d95b7 100644 --- a/stdlib/token.pyi +++ b/stdlib/token.pyi @@ -78,6 +78,9 @@ if sys.version_info >= (3, 10): if sys.version_info >= (3, 12): __all__ += ["EXCLAMATION", "FSTRING_END", "FSTRING_MIDDLE", "FSTRING_START", "EXACT_TOKEN_TYPES"] +if sys.version_info >= (3, 14): + __all__ += ["TSTRING_START", "TSTRING_MIDDLE", "TSTRING_END"] + ENDMARKER: int NAME: int NUMBER: int @@ -155,6 +158,11 @@ if sys.version_info >= (3, 12): FSTRING_MIDDLE: int FSTRING_START: int +if sys.version_info >= (3, 14): + TSTRING_START: int + TSTRING_MIDDLE: int + TSTRING_END: int + def ISTERMINAL(x: int) -> bool: ... def ISNONTERMINAL(x: int) -> bool: ... def ISEOF(x: int) -> bool: ... diff --git a/stdlib/tokenize.pyi b/stdlib/tokenize.pyi index 86e87704eb02..b658740a1ad7 100644 --- a/stdlib/tokenize.pyi +++ b/stdlib/tokenize.pyi @@ -93,6 +93,9 @@ if sys.version_info >= (3, 12): if sys.version_info >= (3, 13): __all__ += ["TokenError", "open"] +if sys.version_info >= (3, 14): + __all__ += ["TSTRING_START", "TSTRING_MIDDLE", "TSTRING_END"] + cookie_re: Pattern[str] blank_re: Pattern[bytes] From c048694dae6062b55bd9d82cfc88f76ac0f40496 Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Sun, 11 May 2025 07:03:41 +0000 Subject: [PATCH 325/388] Bump `configparser` to 3.14 (#14007) --- stdlib/@tests/stubtest_allowlists/py314.txt | 3 -- stdlib/configparser.pyi | 35 ++++++++++++++++++++- 2 files changed, 34 insertions(+), 4 deletions(-) diff --git a/stdlib/@tests/stubtest_allowlists/py314.txt b/stdlib/@tests/stubtest_allowlists/py314.txt index fbfb7923245a..5beb26bac572 100644 --- a/stdlib/@tests/stubtest_allowlists/py314.txt +++ b/stdlib/@tests/stubtest_allowlists/py314.txt @@ -87,9 +87,6 @@ concurrent.futures.thread.WorkerContext concurrent.futures.thread._WorkItem.__init__ concurrent.futures.thread._WorkItem.run concurrent.futures.thread._worker -configparser.__all__ -configparser.InvalidWriteError -configparser.UnnamedSectionDisabledError ctypes.POINTER ctypes.byref ctypes.memoryview_at diff --git a/stdlib/configparser.pyi b/stdlib/configparser.pyi index 8996c85d9a53..15c564c02589 100644 --- a/stdlib/configparser.pyi +++ b/stdlib/configparser.pyi @@ -5,7 +5,33 @@ from re import Pattern from typing import Any, ClassVar, Final, Literal, TypeVar, overload from typing_extensions import TypeAlias -if sys.version_info >= (3, 13): +if sys.version_info >= (3, 14): + __all__ = ( + "NoSectionError", + "DuplicateOptionError", + "DuplicateSectionError", + "NoOptionError", + "InterpolationError", + "InterpolationDepthError", + "InterpolationMissingOptionError", + "InterpolationSyntaxError", + "ParsingError", + "MissingSectionHeaderError", + "MultilineContinuationError", + "UnnamedSectionDisabledError", + "InvalidWriteError", + "ConfigParser", + "RawConfigParser", + "Interpolation", + "BasicInterpolation", + "ExtendedInterpolation", + "SectionProxy", + "ConverterMapping", + "DEFAULTSECT", + "MAX_INTERPOLATION_DEPTH", + "UNNAMED_SECTION", + ) +elif sys.version_info >= (3, 13): __all__ = ( "NoSectionError", "DuplicateOptionError", @@ -429,3 +455,10 @@ if sys.version_info >= (3, 13): lineno: int line: str def __init__(self, filename: str, lineno: int, line: str) -> None: ... + +if sys.version_info >= (3, 14): + class UnnamedSectionDisabledError(Error): + msg: Final = "Support for UNNAMED_SECTION is disabled." + def __init__(self) -> None: ... + + class InvalidWriteError(Error): ... From 23e7a07e190ef544ad6f787fc2a37ae2b2e3c7c9 Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Sun, 11 May 2025 07:23:54 +0000 Subject: [PATCH 326/388] Update `traceback.__all__` (#14011) --- stdlib/@tests/stubtest_allowlists/py314.txt | 1 - stdlib/traceback.pyi | 5 +++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/stdlib/@tests/stubtest_allowlists/py314.txt b/stdlib/@tests/stubtest_allowlists/py314.txt index 5beb26bac572..ac150615575b 100644 --- a/stdlib/@tests/stubtest_allowlists/py314.txt +++ b/stdlib/@tests/stubtest_allowlists/py314.txt @@ -199,7 +199,6 @@ threading.Thread.__init__ threading._RLock.locked tkinter.Event.__class_getitem__ tomllib.TOMLDecodeError.__init__ -traceback.__all__ turtle.__all__ turtle.RawTurtle.fill turtle.RawTurtle.poly diff --git a/stdlib/traceback.pyi b/stdlib/traceback.pyi index 4f132d51c617..4553dbd08384 100644 --- a/stdlib/traceback.pyi +++ b/stdlib/traceback.pyi @@ -27,6 +27,9 @@ __all__ = [ "walk_tb", ] +if sys.version_info >= (3, 14): + __all__ += ["print_list"] + _FrameSummaryTuple: TypeAlias = tuple[str, int, str, str | None] def print_tb(tb: TracebackType | None, limit: int | None = None, file: SupportsWrite[str] | None = None) -> None: ... @@ -81,8 +84,6 @@ def print_stack(f: FrameType | None = None, limit: int | None = None, file: Supp def extract_tb(tb: TracebackType | None, limit: int | None = None) -> StackSummary: ... def extract_stack(f: FrameType | None = None, limit: int | None = None) -> StackSummary: ... def format_list(extracted_list: Iterable[FrameSummary | _FrameSummaryTuple]) -> list[str]: ... - -# undocumented def print_list(extracted_list: Iterable[FrameSummary | _FrameSummaryTuple], file: SupportsWrite[str] | None = None) -> None: ... if sys.version_info >= (3, 13): From 1dc2e21904f53d78cdd0ddf72e99e1fd874be110 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?B=C3=A9n=C3=A9dikt=20Tran?= <10796600+picnixz@users.noreply.github.com> Date: Sun, 11 May 2025 13:15:33 +0200 Subject: [PATCH 327/388] Added the `show_positions` parameter to various `dis` interfaces (#14010) --- stdlib/@tests/stubtest_allowlists/py314.txt | 4 -- stdlib/dis.pyi | 70 ++++++++++++++++++--- 2 files changed, 61 insertions(+), 13 deletions(-) diff --git a/stdlib/@tests/stubtest_allowlists/py314.txt b/stdlib/@tests/stubtest_allowlists/py314.txt index ac150615575b..a5d3a39e63aa 100644 --- a/stdlib/@tests/stubtest_allowlists/py314.txt +++ b/stdlib/@tests/stubtest_allowlists/py314.txt @@ -107,11 +107,7 @@ dataclasses.field dataclasses.make_dataclass decimal.Decimal.from_number decimal.IEEE_CONTEXT_MAX_BITS -dis.Bytecode.__init__ dis.Instruction.make -dis.dis -dis.disassemble -dis.distb enum.Enum.__signature__ enum.EnumMeta.__signature__ enum.EnumType.__signature__ diff --git a/stdlib/dis.pyi b/stdlib/dis.pyi index cb69eac89c92..afd5e00ca40c 100644 --- a/stdlib/dis.pyi +++ b/stdlib/dis.pyi @@ -110,7 +110,21 @@ class Instruction(_Instruction): class Bytecode: codeobj: types.CodeType first_line: int - if sys.version_info >= (3, 13): + if sys.version_info >= (3, 14): + show_positions: bool + # 3.14 added `show_positions` + def __init__( + self, + x: _HaveCodeType | str, + *, + first_line: int | None = None, + current_offset: int | None = None, + show_caches: bool = False, + adaptive: bool = False, + show_offsets: bool = False, + show_positions: bool = False, + ) -> None: ... + elif sys.version_info >= (3, 13): show_offsets: bool # 3.13 added `show_offsets` def __init__( @@ -156,7 +170,39 @@ def findlinestarts(code: _HaveCodeType) -> Iterator[tuple[int, int]]: ... def pretty_flags(flags: int) -> str: ... def code_info(x: _HaveCodeType | str) -> str: ... -if sys.version_info >= (3, 13): +if sys.version_info >= (3, 14): + # 3.14 added `show_positions` + def dis( + x: _HaveCodeType | str | bytes | bytearray | None = None, + *, + file: IO[str] | None = None, + depth: int | None = None, + show_caches: bool = False, + adaptive: bool = False, + show_offsets: bool = False, + show_positions: bool = False, + ) -> None: ... + def disassemble( + co: _HaveCodeType, + lasti: int = -1, + *, + file: IO[str] | None = None, + show_caches: bool = False, + adaptive: bool = False, + show_offsets: bool = False, + show_positions: bool = False, + ) -> None: ... + def distb( + tb: types.TracebackType | None = None, + *, + file: IO[str] | None = None, + show_caches: bool = False, + adaptive: bool = False, + show_offsets: bool = False, + show_positions: bool = False, + ) -> None: ... + +elif sys.version_info >= (3, 13): # 3.13 added `show_offsets` def dis( x: _HaveCodeType | str | bytes | bytearray | None = None, @@ -184,10 +230,6 @@ if sys.version_info >= (3, 13): adaptive: bool = False, show_offsets: bool = False, ) -> None: ... - # 3.13 made `show_cache` `None` by default - def get_instructions( - x: _HaveCodeType, *, first_line: int | None = None, show_caches: bool | None = None, adaptive: bool = False - ) -> Iterator[Instruction]: ... elif sys.version_info >= (3, 11): # 3.11 added `show_caches` and `adaptive` @@ -205,9 +247,6 @@ elif sys.version_info >= (3, 11): def distb( tb: types.TracebackType | None = None, *, file: IO[str] | None = None, show_caches: bool = False, adaptive: bool = False ) -> None: ... - def get_instructions( - x: _HaveCodeType, *, first_line: int | None = None, show_caches: bool = False, adaptive: bool = False - ) -> Iterator[Instruction]: ... else: def dis( @@ -215,6 +254,19 @@ else: ) -> None: ... def disassemble(co: _HaveCodeType, lasti: int = -1, *, file: IO[str] | None = None) -> None: ... def distb(tb: types.TracebackType | None = None, *, file: IO[str] | None = None) -> None: ... + +if sys.version_info >= (3, 13): + # 3.13 made `show_cache` `None` by default + def get_instructions( + x: _HaveCodeType, *, first_line: int | None = None, show_caches: bool | None = None, adaptive: bool = False + ) -> Iterator[Instruction]: ... + +elif sys.version_info >= (3, 11): + def get_instructions( + x: _HaveCodeType, *, first_line: int | None = None, show_caches: bool = False, adaptive: bool = False + ) -> Iterator[Instruction]: ... + +else: def get_instructions(x: _HaveCodeType, *, first_line: int | None = None) -> Iterator[Instruction]: ... def show_code(co: _HaveCodeType, *, file: IO[str] | None = None) -> None: ... From 221118f82f3fe05d92fca8294b145f3f890c4755 Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Sun, 11 May 2025 11:25:40 +0000 Subject: [PATCH 328/388] Update `argparse` to 3.14 (#14005) --- stdlib/@tests/stubtest_allowlists/py314.txt | 1 - stdlib/argparse.pyi | 102 +++++++++++++++----- 2 files changed, 77 insertions(+), 26 deletions(-) diff --git a/stdlib/@tests/stubtest_allowlists/py314.txt b/stdlib/@tests/stubtest_allowlists/py314.txt index a5d3a39e63aa..9c452cd70052 100644 --- a/stdlib/@tests/stubtest_allowlists/py314.txt +++ b/stdlib/@tests/stubtest_allowlists/py314.txt @@ -23,7 +23,6 @@ _socket.if_indextoname _ssl.HAS_PHA _thread.RLock.locked _thread.set_name -argparse.HelpFormatter.__init__ ast.Interpolation ast.TemplateStr asyncio.__all__ diff --git a/stdlib/argparse.pyi b/stdlib/argparse.pyi index 32beaff14696..0c84f52fff71 100644 --- a/stdlib/argparse.pyi +++ b/stdlib/argparse.pyi @@ -130,22 +130,44 @@ class ArgumentParser(_AttributeHolder, _ActionsContainer): _subparsers: _ArgumentGroup | None # Note: the constructor arguments are also used in _SubParsersAction.add_parser. - def __init__( - self, - prog: str | None = None, - usage: str | None = None, - description: str | None = None, - epilog: str | None = None, - parents: Sequence[ArgumentParser] = [], - formatter_class: _FormatterClass = ..., - prefix_chars: str = "-", - fromfile_prefix_chars: str | None = None, - argument_default: Any = None, - conflict_handler: str = "error", - add_help: bool = True, - allow_abbrev: bool = True, - exit_on_error: bool = True, - ) -> None: ... + if sys.version_info >= (3, 14): + def __init__( + self, + prog: str | None = None, + usage: str | None = None, + description: str | None = None, + epilog: str | None = None, + parents: Sequence[ArgumentParser] = [], + formatter_class: _FormatterClass = ..., + prefix_chars: str = "-", + fromfile_prefix_chars: str | None = None, + argument_default: Any = None, + conflict_handler: str = "error", + add_help: bool = True, + allow_abbrev: bool = True, + exit_on_error: bool = True, + *, + suggest_on_error: bool = False, + color: bool = False, + ) -> None: ... + else: + def __init__( + self, + prog: str | None = None, + usage: str | None = None, + description: str | None = None, + epilog: str | None = None, + parents: Sequence[ArgumentParser] = [], + formatter_class: _FormatterClass = ..., + prefix_chars: str = "-", + fromfile_prefix_chars: str | None = None, + argument_default: Any = None, + conflict_handler: str = "error", + add_help: bool = True, + allow_abbrev: bool = True, + exit_on_error: bool = True, + ) -> None: ... + @overload def parse_args(self, args: Sequence[str] | None = None, namespace: None = None) -> Namespace: ... @overload @@ -252,7 +274,21 @@ class HelpFormatter: def __init__(self, formatter: HelpFormatter, parent: Self | None, heading: str | None = None) -> None: ... def format_help(self) -> str: ... - def __init__(self, prog: str, indent_increment: int = 2, max_help_position: int = 24, width: int | None = None) -> None: ... + if sys.version_info >= (3, 14): + def __init__( + self, + prog: str, + indent_increment: int = 2, + max_help_position: int = 24, + width: int | None = None, + prefix_chars: str = "-", + color: bool = False, + ) -> None: ... + else: + def __init__( + self, prog: str, indent_increment: int = 2, max_help_position: int = 24, width: int | None = None + ) -> None: ... + def _indent(self) -> None: ... def _dedent(self) -> None: ... def _add_item(self, func: Callable[..., str], args: Iterable[Any]) -> None: ... @@ -431,14 +467,30 @@ class Namespace(_AttributeHolder): def __eq__(self, other: object) -> bool: ... __hash__: ClassVar[None] # type: ignore[assignment] -class FileType: - # undocumented - _mode: str - _bufsize: int - _encoding: str | None - _errors: str | None - def __init__(self, mode: str = "r", bufsize: int = -1, encoding: str | None = None, errors: str | None = None) -> None: ... - def __call__(self, string: str) -> IO[Any]: ... +if sys.version_info >= (3, 14): + @deprecated("Deprecated in Python 3.14; Simply open files after parsing arguments") + class FileType: + # undocumented + _mode: str + _bufsize: int + _encoding: str | None + _errors: str | None + def __init__( + self, mode: str = "r", bufsize: int = -1, encoding: str | None = None, errors: str | None = None + ) -> None: ... + def __call__(self, string: str) -> IO[Any]: ... + +else: + class FileType: + # undocumented + _mode: str + _bufsize: int + _encoding: str | None + _errors: str | None + def __init__( + self, mode: str = "r", bufsize: int = -1, encoding: str | None = None, errors: str | None = None + ) -> None: ... + def __call__(self, string: str) -> IO[Any]: ... # undocumented class _ArgumentGroup(_ActionsContainer): From 130b49b2fc9a1fd33f19887b48aa14772a994008 Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Sun, 11 May 2025 11:56:08 +0000 Subject: [PATCH 329/388] Bump `http.server` to 3.14 (#13981) --- stdlib/@tests/stubtest_allowlists/py314.txt | 3 -- stdlib/http/server.pyi | 56 +++++++++++++++++++-- 2 files changed, 51 insertions(+), 8 deletions(-) diff --git a/stdlib/@tests/stubtest_allowlists/py314.txt b/stdlib/@tests/stubtest_allowlists/py314.txt index 9c452cd70052..c635972d4d28 100644 --- a/stdlib/@tests/stubtest_allowlists/py314.txt +++ b/stdlib/@tests/stubtest_allowlists/py314.txt @@ -119,9 +119,6 @@ fractions.Fraction.from_number gzip.GzipFile.readinto gzip.GzipFile.readinto1 gzip.compress -http.server.__all__ -http.server.HTTPSServer -http.server.ThreadingHTTPSServer imaplib.IMAP4.file imaplib.IMAP4.idle imaplib.IMAP4_SSL.file diff --git a/stdlib/http/server.pyi b/stdlib/http/server.pyi index 1a6fde6000d9..429bb65bb0ef 100644 --- a/stdlib/http/server.pyi +++ b/stdlib/http/server.pyi @@ -3,12 +3,25 @@ import email.message import io import socketserver import sys -from _typeshed import StrPath, SupportsRead, SupportsWrite -from collections.abc import Mapping, Sequence -from typing import Any, AnyStr, BinaryIO, ClassVar -from typing_extensions import deprecated +from _ssl import _PasswordType +from _typeshed import ReadableBuffer, StrOrBytesPath, StrPath, SupportsRead, SupportsWrite +from collections.abc import Callable, Iterable, Mapping, Sequence +from ssl import Purpose, SSLContext +from typing import Any, AnyStr, BinaryIO, ClassVar, Protocol, type_check_only +from typing_extensions import Self, deprecated -__all__ = ["HTTPServer", "ThreadingHTTPServer", "BaseHTTPRequestHandler", "SimpleHTTPRequestHandler", "CGIHTTPRequestHandler"] +if sys.version_info >= (3, 14): + __all__ = [ + "HTTPServer", + "ThreadingHTTPServer", + "HTTPSServer", + "ThreadingHTTPSServer", + "BaseHTTPRequestHandler", + "SimpleHTTPRequestHandler", + "CGIHTTPRequestHandler", + ] +else: + __all__ = ["HTTPServer", "ThreadingHTTPServer", "BaseHTTPRequestHandler", "SimpleHTTPRequestHandler", "CGIHTTPRequestHandler"] class HTTPServer(socketserver.TCPServer): server_name: str @@ -16,6 +29,39 @@ class HTTPServer(socketserver.TCPServer): class ThreadingHTTPServer(socketserver.ThreadingMixIn, HTTPServer): ... +if sys.version_info >= (3, 14): + @type_check_only + class _SSLModule(Protocol): + @staticmethod + def create_default_context( + purpose: Purpose = ..., + *, + cafile: StrOrBytesPath | None = None, + capath: StrOrBytesPath | None = None, + cadata: str | ReadableBuffer | None = None, + ) -> SSLContext: ... + + class HTTPSServer(HTTPServer): + ssl: _SSLModule + certfile: StrOrBytesPath + keyfile: StrOrBytesPath | None + password: _PasswordType | None + alpn_protocols: Iterable[str] + def __init__( + self, + server_address: socketserver._AfInetAddress, + RequestHandlerClass: Callable[[Any, _socket._RetAddress, Self], socketserver.BaseRequestHandler], + bind_and_activate: bool = True, + *, + certfile: StrOrBytesPath, + keyfile: StrOrBytesPath | None = None, + password: _PasswordType | None = None, + alpn_protocols: Iterable[str] | None = None, + ) -> None: ... + def server_activate(self) -> None: ... + + class ThreadingHTTPSServer(socketserver.ThreadingMixIn, HTTPSServer): ... + class BaseHTTPRequestHandler(socketserver.StreamRequestHandler): client_address: tuple[str, int] close_connection: bool From 1ec17233dc25ff7b9f8befc281dda4df79b68c8f Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Sun, 11 May 2025 12:40:17 +0000 Subject: [PATCH 330/388] Bump `unittest` to 3.14 (#14012) --- stdlib/@tests/stubtest_allowlists/py314.txt | 16 ---------------- stdlib/unittest/case.pyi | 11 +++++++++++ 2 files changed, 11 insertions(+), 16 deletions(-) diff --git a/stdlib/@tests/stubtest_allowlists/py314.txt b/stdlib/@tests/stubtest_allowlists/py314.txt index c635972d4d28..3c5002e2163b 100644 --- a/stdlib/@tests/stubtest_allowlists/py314.txt +++ b/stdlib/@tests/stubtest_allowlists/py314.txt @@ -202,22 +202,6 @@ turtle.poly turtle.save types.CodeType.co_branches types.FrameType.f_generator -unittest.TestCase.assertEndsWith -unittest.TestCase.assertHasAttr -unittest.TestCase.assertIsSubclass -unittest.TestCase.assertNotEndsWith -unittest.TestCase.assertNotHasAttr -unittest.TestCase.assertNotIsSubclass -unittest.TestCase.assertNotStartsWith -unittest.TestCase.assertStartsWith -unittest.case.TestCase.assertEndsWith -unittest.case.TestCase.assertHasAttr -unittest.case.TestCase.assertIsSubclass -unittest.case.TestCase.assertNotEndsWith -unittest.case.TestCase.assertNotHasAttr -unittest.case.TestCase.assertNotIsSubclass -unittest.case.TestCase.assertNotStartsWith -unittest.case.TestCase.assertStartsWith urllib.request.__all__ urllib.request.FancyURLopener urllib.request.URLopener diff --git a/stdlib/unittest/case.pyi b/stdlib/unittest/case.pyi index 7d1a382a54a4..89bcabf104c2 100644 --- a/stdlib/unittest/case.pyi +++ b/stdlib/unittest/case.pyi @@ -18,6 +18,7 @@ _T = TypeVar("_T") _S = TypeVar("_S", bound=SupportsSub[Any, Any]) _E = TypeVar("_E", bound=BaseException) _FT = TypeVar("_FT", bound=Callable[..., Any]) +_SB = TypeVar("_SB", str, bytes, bytearray) _P = ParamSpec("_P") DIFF_OMITTED: Final[str] @@ -289,6 +290,16 @@ class TestCase: # Runtime has *args, **kwargs, but will error if any are supplied def __init_subclass__(cls, *args: Never, **kwargs: Never) -> None: ... + if sys.version_info >= (3, 14): + def assertIsSubclass(self, cls: type, superclass: type | tuple[type, ...], msg: Any = None) -> None: ... + def assertNotIsSubclass(self, cls: type, superclass: type | tuple[type, ...], msg: Any = None) -> None: ... + def assertHasAttr(self, obj: object, name: str, msg: Any = None) -> None: ... + def assertNotHasAttr(self, obj: object, name: str, msg: Any = None) -> None: ... + def assertStartsWith(self, s: _SB, prefix: _SB | tuple[_SB, ...], msg: Any = None) -> None: ... + def assertNotStartsWith(self, s: _SB, prefix: _SB | tuple[_SB, ...], msg: Any = None) -> None: ... + def assertEndsWith(self, s: _SB, suffix: _SB | tuple[_SB, ...], msg: Any = None) -> None: ... + def assertNotEndsWith(self, s: _SB, suffix: _SB | tuple[_SB, ...], msg: Any = None) -> None: ... + class FunctionTestCase(TestCase): def __init__( self, From e6ac04f7167cf0178d4dcf6438dc8baade981bc1 Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Sun, 11 May 2025 12:42:21 +0000 Subject: [PATCH 331/388] Complete `bleach` (#13969) --- pyrightconfig.stricter.json | 1 - stubs/bleach/bleach/linkifier.pyi | 3 +-- stubs/bleach/bleach/sanitizer.pyi | 2 +- 3 files changed, 2 insertions(+), 4 deletions(-) diff --git a/pyrightconfig.stricter.json b/pyrightconfig.stricter.json index 74196972b4ee..a38c73ba6dd1 100644 --- a/pyrightconfig.stricter.json +++ b/pyrightconfig.stricter.json @@ -28,7 +28,6 @@ "stubs/Authlib", "stubs/aws-xray-sdk", "stubs/beautifulsoup4", - "stubs/bleach/bleach/sanitizer.pyi", "stubs/boltons", "stubs/braintree", "stubs/caldav", diff --git a/stubs/bleach/bleach/linkifier.pyi b/stubs/bleach/bleach/linkifier.pyi index efb0c83dbdc0..73fe653c1b62 100644 --- a/stubs/bleach/bleach/linkifier.pyi +++ b/stubs/bleach/bleach/linkifier.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from collections.abc import Container, Iterable, Iterator, Sequence from re import Pattern from typing import Any, Final @@ -59,4 +58,4 @@ class LinkifyFilter(Filter): def handle_links(self, src_iter: Iterable[_Token]) -> Iterator[_Token]: ... def handle_a_tag(self, token_buffer: Sequence[_Token]) -> Iterator[_Token]: ... def extract_entities(self, token: _Token) -> Iterator[_Token]: ... - def __iter__(self) -> Iterator[Incomplete]: ... + def __iter__(self) -> Iterator[_Token]: ... diff --git a/stubs/bleach/bleach/sanitizer.pyi b/stubs/bleach/bleach/sanitizer.pyi index 6771387e71ad..f13e8637867b 100644 --- a/stubs/bleach/bleach/sanitizer.pyi +++ b/stubs/bleach/bleach/sanitizer.pyi @@ -24,7 +24,7 @@ class NoCssSanitizerWarning(UserWarning): ... # A html5lib Filter class class _Filter(Protocol): - def __call__(self, *, source: BleachSanitizerFilter): ... + def __call__(self, *, source: BleachSanitizerFilter) -> BleachSanitizerFilter: ... _AttributeFilter: TypeAlias = Callable[[str, str, str], bool] _AttributeDict: TypeAlias = dict[str, list[str] | _AttributeFilter] | dict[str, list[str]] | dict[str, _AttributeFilter] From 9b09018aacd4aeb687895b7a8b2d57d321073746 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Sun, 11 May 2025 14:43:34 +0200 Subject: [PATCH 332/388] [stubsabot] Bump peewee to 3.18.1 (#13910) --- stubs/peewee/METADATA.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stubs/peewee/METADATA.toml b/stubs/peewee/METADATA.toml index 4ed22e0ed02e..361b69d0ed35 100644 --- a/stubs/peewee/METADATA.toml +++ b/stubs/peewee/METADATA.toml @@ -1,4 +1,4 @@ -version = "3.18.0" +version = "3.18.1" upstream_repository = "https://github.com/coleifer/peewee" # We're not providing stubs for all playhouse modules right now # https://github.com/python/typeshed/pull/11731#issuecomment-2065729058 From 6a1a837d201c5c0c411ff758a23b329807d0beab Mon Sep 17 00:00:00 2001 From: Ali Hamdan Date: Sun, 11 May 2025 14:45:05 +0200 Subject: [PATCH 333/388] Fix annotations of shapely.constructive.make_valid (#13913) --- stubs/shapely/shapely/constructive.pyi | 29 ++++++++++++++++---------- 1 file changed, 18 insertions(+), 11 deletions(-) diff --git a/stubs/shapely/shapely/constructive.pyi b/stubs/shapely/shapely/constructive.pyi index 36dc537dd586..f3a74f56fe17 100644 --- a/stubs/shapely/shapely/constructive.pyi +++ b/stubs/shapely/shapely/constructive.pyi @@ -1,6 +1,5 @@ from collections.abc import Sequence from typing import Any, Literal, SupportsIndex, overload -from typing_extensions import TypeAlias from ._enum import ParamEnum from ._typing import ArrayLike, ArrayLikeSeq, GeoArray, OptGeoArrayLike, OptGeoArrayLikeSeq, OptGeoT @@ -43,8 +42,6 @@ __all__ = [ "voronoi_polygons", ] -_Method: TypeAlias = Literal["linework", "structure"] - class BufferCapStyle(ParamEnum): round = 1 flat = 2 @@ -269,25 +266,35 @@ def build_area(geometry: None, **kwargs) -> None: ... def build_area(geometry: Geometry | None, **kwargs) -> BaseGeometry | None: ... @overload def build_area(geometry: OptGeoArrayLikeSeq, **kwargs) -> GeoArray: ... + +# make_valid with `method="linework"` only accepts `keep_collapsed=True` @overload -def make_valid(geometry: Geometry, *, method: _Method = "linework", keep_collapsed: bool = True, **kwargs) -> BaseGeometry: ... -@overload -def make_valid(geometry: None, *, method: _Method = "linework", keep_collapsed: bool = True, **kwargs) -> None: ... +def make_valid( + geometry: Geometry, *, method: Literal["linework"] = "linework", keep_collapsed: Literal[True] = True, **kwargs +) -> BaseGeometry: ... @overload def make_valid( - geometry: Geometry | None, *, method: Literal["structure"], keep_collapsed: bool = True, **kwargs -) -> BaseGeometry | None: ... + geometry: None, *, method: Literal["linework"] = "linework", keep_collapsed: Literal[True] = True, **kwargs +) -> None: ... @overload def make_valid( - geometry: Geometry | None, *, method: Literal["linework"], keep_collapsed: Literal[True], **kwargs + geometry: Geometry | None, *, method: Literal["linework"] = "linework", keep_collapsed: Literal[True] = True, **kwargs ) -> BaseGeometry | None: ... @overload def make_valid( - geometry: OptGeoArrayLikeSeq, *, method: Literal["structure"], keep_collapsed: bool = True, **kwargs + geometry: OptGeoArrayLikeSeq, *, method: Literal["linework"] = "linework", keep_collapsed: Literal[True] = True, **kwargs ) -> GeoArray: ... @overload +def make_valid(geometry: Geometry, *, method: Literal["structure"], keep_collapsed: bool = True, **kwargs) -> BaseGeometry: ... +@overload +def make_valid(geometry: None, *, method: Literal["structure"], keep_collapsed: bool = True, **kwargs) -> None: ... +@overload def make_valid( - geometry: OptGeoArrayLikeSeq, *, method: Literal["linework"], keep_collapsed: Literal[True], **kwargs + geometry: Geometry | None, *, method: Literal["structure"], keep_collapsed: bool = True, **kwargs +) -> BaseGeometry | None: ... +@overload +def make_valid( + geometry: OptGeoArrayLikeSeq, *, method: Literal["structure"], keep_collapsed: bool = True, **kwargs ) -> GeoArray: ... @overload def minimum_clearance_line(geometry: Point, **kwargs) -> Point: ... From d29e44959d8b78823cf0539dce70457ca8a30ab3 Mon Sep 17 00:00:00 2001 From: Huy Nguyen <49433085+huynguyengl99@users.noreply.github.com> Date: Sun, 11 May 2025 19:45:30 +0700 Subject: [PATCH 334/388] Add mypy plugin support to stubtest configuration (#13948) --- CONTRIBUTING.md | 6 ++++++ lib/ts_utils/metadata.py | 16 +++++++++++++++- lib/ts_utils/mypy.py | 17 +++++++++++++++-- tests/README.md | 17 +++++++++++++++++ tests/check_typeshed_structure.py | 8 +++++++- tests/stubtest_third_party.py | 2 +- 6 files changed, 61 insertions(+), 5 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index e6645ede68bc..47c40eb5e175 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -229,6 +229,12 @@ This has the following keys: If not specified, stubtest is run only on `linux`. Only add extra OSes to the test if there are platform-specific branches in a stubs package. +* `mypy_plugins` (default: `[]`): A list of Python modules to use as mypy plugins +when running stubtest. For example: `mypy_plugins = ["mypy_django_plugin.main"]` +* `mypy_plugins_config` (default: `{}`): A dictionary mapping plugin names to their +configuration dictionaries for use by mypy plugins. For example: +`mypy_plugins_config = {"django-stubs" = {"django_settings_module" = "@tests.django_settings"}}` + `*_dependencies` are usually packages needed to `pip install` the implementation distribution. diff --git a/lib/ts_utils/metadata.py b/lib/ts_utils/metadata.py index ec30f9301425..2cf093ffc4a4 100644 --- a/lib/ts_utils/metadata.py +++ b/lib/ts_utils/metadata.py @@ -11,7 +11,7 @@ from collections.abc import Mapping from dataclasses import dataclass from pathlib import Path -from typing import Annotated, Final, NamedTuple, final +from typing import Annotated, Any, Final, NamedTuple, final from typing_extensions import TypeGuard import tomli @@ -42,6 +42,10 @@ def _is_list_of_strings(obj: object) -> TypeGuard[list[str]]: return isinstance(obj, list) and all(isinstance(item, str) for item in obj) +def _is_nested_dict(obj: object) -> TypeGuard[dict[str, dict[str, Any]]]: + return isinstance(obj, dict) and all(isinstance(k, str) and isinstance(v, dict) for k, v in obj.items()) + + @functools.cache def _get_oldest_supported_python() -> str: with PYPROJECT_PATH.open("rb") as config: @@ -71,6 +75,8 @@ class StubtestSettings: ignore_missing_stub: bool platforms: list[str] stubtest_requirements: list[str] + mypy_plugins: list[str] + mypy_plugins_config: dict[str, dict[str, Any]] def system_requirements_for_platform(self, platform: str) -> list[str]: assert platform in _STUBTEST_PLATFORM_MAPPING, f"Unrecognised platform {platform!r}" @@ -93,6 +99,8 @@ def read_stubtest_settings(distribution: str) -> StubtestSettings: ignore_missing_stub: object = data.get("ignore_missing_stub", False) specified_platforms: object = data.get("platforms", ["linux"]) stubtest_requirements: object = data.get("stubtest_requirements", []) + mypy_plugins: object = data.get("mypy_plugins", []) + mypy_plugins_config: object = data.get("mypy_plugins_config", {}) assert type(skip) is bool assert type(ignore_missing_stub) is bool @@ -104,6 +112,8 @@ def read_stubtest_settings(distribution: str) -> StubtestSettings: assert _is_list_of_strings(choco_dependencies) assert _is_list_of_strings(extras) assert _is_list_of_strings(stubtest_requirements) + assert _is_list_of_strings(mypy_plugins) + assert _is_nested_dict(mypy_plugins_config) unrecognised_platforms = set(specified_platforms) - _STUBTEST_PLATFORM_MAPPING.keys() assert not unrecognised_platforms, f"Unrecognised platforms specified for {distribution!r}: {unrecognised_platforms}" @@ -124,6 +134,8 @@ def read_stubtest_settings(distribution: str) -> StubtestSettings: ignore_missing_stub=ignore_missing_stub, platforms=specified_platforms, stubtest_requirements=stubtest_requirements, + mypy_plugins=mypy_plugins, + mypy_plugins_config=mypy_plugins_config, ) @@ -179,6 +191,8 @@ def is_obsolete(self) -> bool: "ignore_missing_stub", "platforms", "stubtest_requirements", + "mypy_plugins", + "mypy_plugins_config", } } _DIST_NAME_RE: Final = re.compile(r"^[a-z0-9]([a-z0-9._-]*[a-z0-9])?$", re.IGNORECASE) diff --git a/lib/ts_utils/mypy.py b/lib/ts_utils/mypy.py index 7fc050b155d1..39f4255ec011 100644 --- a/lib/ts_utils/mypy.py +++ b/lib/ts_utils/mypy.py @@ -6,7 +6,7 @@ import tomli -from ts_utils.metadata import metadata_path +from ts_utils.metadata import StubtestSettings, metadata_path from ts_utils.utils import NamedTemporaryFile, TemporaryFileWrapper @@ -50,7 +50,9 @@ def validate_configuration(section_name: str, mypy_section: dict[str, Any]) -> M @contextmanager -def temporary_mypy_config_file(configurations: Iterable[MypyDistConf]) -> Generator[TemporaryFileWrapper[str]]: +def temporary_mypy_config_file( + configurations: Iterable[MypyDistConf], stubtest_settings: StubtestSettings | None = None +) -> Generator[TemporaryFileWrapper[str]]: temp = NamedTemporaryFile("w+") try: for dist_conf in configurations: @@ -58,6 +60,17 @@ def temporary_mypy_config_file(configurations: Iterable[MypyDistConf]) -> Genera for k, v in dist_conf.values.items(): temp.write(f"{k} = {v}\n") temp.write("[mypy]\n") + + if stubtest_settings: + if stubtest_settings.mypy_plugins: + temp.write(f"plugins = {'.'.join(stubtest_settings.mypy_plugins)}\n") + + if stubtest_settings.mypy_plugins_config: + for plugin_name, plugin_dict in stubtest_settings.mypy_plugins_config.items(): + temp.write(f"[mypy.plugins.{plugin_name}]\n") + for k, v in plugin_dict.items(): + temp.write(f"{k} = {v}\n") + temp.flush() yield temp finally: diff --git a/tests/README.md b/tests/README.md index a00b1733146c..e2fd81fb7362 100644 --- a/tests/README.md +++ b/tests/README.md @@ -196,6 +196,23 @@ that stubtest reports to be missing should necessarily be added to the stub. For some implementation details, it is often better to add allowlist entries for missing objects rather than trying to match the runtime in every detail. +### Support for mypy plugins in stubtest + +For stubs that require mypy plugins to check correctly (such as Django), stubtest +supports configuring mypy plugins through the METADATA.toml file. This allows stubtest to +leverage type information provided by these plugins when validating stubs. + +To use this feature, add the following configuration to the `tool.stubtest` section in your METADATA.toml: + +```toml +mypy_plugins = ["mypy_django_plugin.main"] +mypy_plugins_config = { "django-stubs" = { "django_settings_module" = "@tests.django_settings" } } +``` + +For Django stubs specifically, you'll need to create a `django_settings.py` file in your `@tests` directory +that contains the Django settings required by the plugin. This file will be referenced by the plugin +configuration to properly validate Django-specific types during stubtest execution. + ## typecheck\_typeshed.py Run using diff --git a/tests/check_typeshed_structure.py b/tests/check_typeshed_structure.py index e64123fb54ba..191173b4256b 100755 --- a/tests/check_typeshed_structure.py +++ b/tests/check_typeshed_structure.py @@ -27,6 +27,10 @@ # consistent CI runs. linters = {"mypy", "pyright", "pytype", "ruff"} +ALLOWED_PY_FILES_IN_TESTS_DIR = { + "django_settings.py" # This file contains Django settings used by the mypy_django_plugin during stubtest execution. +} + def assert_consistent_filetypes( directory: Path, *, kind: str, allowed: set[str], allow_nonidentifier_filenames: bool = False @@ -81,7 +85,9 @@ def check_stubs() -> None: def check_tests_dir(tests_dir: Path) -> None: - py_files_present = any(file.suffix == ".py" for file in tests_dir.iterdir()) + py_files_present = any( + file.suffix == ".py" and file.name not in ALLOWED_PY_FILES_IN_TESTS_DIR for file in tests_dir.iterdir() + ) error_message = f"Test-case files must be in an `{TESTS_DIR}/{TEST_CASES_DIR}` directory, not in the `{TESTS_DIR}` directory" assert not py_files_present, error_message diff --git a/tests/stubtest_third_party.py b/tests/stubtest_third_party.py index 0530f6279628..1b853c1c408e 100755 --- a/tests/stubtest_third_party.py +++ b/tests/stubtest_third_party.py @@ -97,7 +97,7 @@ def run_stubtest( return False mypy_configuration = mypy_configuration_from_distribution(dist_name) - with temporary_mypy_config_file(mypy_configuration) as temp: + with temporary_mypy_config_file(mypy_configuration, stubtest_settings) as temp: ignore_missing_stub = ["--ignore-missing-stub"] if stubtest_settings.ignore_missing_stub else [] packages_to_check = [d.name for d in dist.iterdir() if d.is_dir() and d.name.isidentifier()] modules_to_check = [d.stem for d in dist.iterdir() if d.is_file() and d.suffix == ".pyi"] From badc5b680d6ee89a253315b721db4abc6bf774c3 Mon Sep 17 00:00:00 2001 From: Rogdham <3994389+Rogdham@users.noreply.github.com> Date: Sun, 11 May 2025 15:23:20 +0200 Subject: [PATCH 335/388] 3.14: PEP-784 compression except zstd (#13992) --- pyproject.toml | 2 ++ stdlib/@tests/stubtest_allowlists/py314.txt | 11 ++++---- .../stubtest_allowlists/win32-py314.txt | 1 + stdlib/VERSIONS | 3 ++- stdlib/_compression.pyi | 8 +++--- stdlib/bz2.pyi | 10 +++++--- stdlib/compression/__init__.pyi | 0 stdlib/compression/_common/__init__.pyi | 0 stdlib/compression/_common/_streams.pyi | 25 +++++++++++++++++++ stdlib/compression/bz2/__init__.pyi | 1 + stdlib/compression/gzip/__init__.pyi | 1 + stdlib/compression/lzma/__init__.pyi | 1 + stdlib/compression/zlib/__init__.pyi | 1 + stdlib/gzip.pyi | 10 +++++--- stdlib/lzma.pyi | 7 +++++- 15 files changed, 64 insertions(+), 17 deletions(-) create mode 100644 stdlib/compression/__init__.pyi create mode 100644 stdlib/compression/_common/__init__.pyi create mode 100644 stdlib/compression/_common/_streams.pyi create mode 100644 stdlib/compression/bz2/__init__.pyi create mode 100644 stdlib/compression/gzip/__init__.pyi create mode 100644 stdlib/compression/lzma/__init__.pyi create mode 100644 stdlib/compression/zlib/__init__.pyi diff --git a/pyproject.toml b/pyproject.toml index e1c07d518c89..b4a430f7510c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -245,6 +245,8 @@ extra-standard-library = [ # Extra modules not recognized by Ruff # Added in Python 3.9 "zoneinfo", + # Added in Python 3.14 + "compression", ] known-first-party = ["_utils", "ts_utils"] diff --git a/stdlib/@tests/stubtest_allowlists/py314.txt b/stdlib/@tests/stubtest_allowlists/py314.txt index 3c5002e2163b..f56e9d6db404 100644 --- a/stdlib/@tests/stubtest_allowlists/py314.txt +++ b/stdlib/@tests/stubtest_allowlists/py314.txt @@ -5,7 +5,6 @@ _asyncio.all_tasks _asyncio.future_add_to_awaited_by _asyncio.future_discard_from_awaited_by -_compression _ctypes.POINTER _ctypes.byref _ctypes.pointer @@ -69,11 +68,11 @@ builtins.staticmethod.__annotate__ builtins.staticmethod.__class_getitem__ code.compile_command codeop.compile_command -compression -compression.bz2 -compression.gzip -compression.lzma -compression.zlib +compression.gzip.GzipFile.readinto +compression.gzip.GzipFile.readinto +compression.gzip.GzipFile.readinto1 +compression.gzip.GzipFile.readinto1 +compression.gzip.compress compression.zstd concurrent.futures.__all__ concurrent.futures.InterpreterPoolExecutor diff --git a/stdlib/@tests/stubtest_allowlists/win32-py314.txt b/stdlib/@tests/stubtest_allowlists/win32-py314.txt index 9e2f612dbd39..cf30f5056c0d 100644 --- a/stdlib/@tests/stubtest_allowlists/win32-py314.txt +++ b/stdlib/@tests/stubtest_allowlists/win32-py314.txt @@ -23,6 +23,7 @@ asyncio.windows_events.WindowsSelectorEventLoopPolicy asyncio.windows_events._DefaultEventLoopPolicy asyncio.windows_events._WindowsProactorEventLoopPolicy asyncio.windows_events._WindowsSelectorEventLoopPolicy +compression.zlib.ZLIBNG_VERSION ctypes.c_double_complex ctypes.c_float_complex ctypes.c_longdouble_complex diff --git a/stdlib/VERSIONS b/stdlib/VERSIONS index 717cf7b4d71a..bea644c67a8b 100644 --- a/stdlib/VERSIONS +++ b/stdlib/VERSIONS @@ -28,7 +28,7 @@ _bz2: 3.3- _codecs: 3.0- _collections_abc: 3.3- _compat_pickle: 3.1- -_compression: 3.5- +_compression: 3.5-3.13 _contextvars: 3.7- _csv: 3.0- _ctypes: 3.0- @@ -118,6 +118,7 @@ collections: 3.0- collections.abc: 3.3- colorsys: 3.0- compileall: 3.0- +compression: 3.14- concurrent: 3.2- configparser: 3.0- contextlib: 3.0- diff --git a/stdlib/_compression.pyi b/stdlib/_compression.pyi index a41a8142cc3a..80d38b4db824 100644 --- a/stdlib/_compression.pyi +++ b/stdlib/_compression.pyi @@ -1,4 +1,6 @@ -from _typeshed import WriteableBuffer +# _compression is replaced by compression._common._streams on Python 3.14+ (PEP-784) + +from _typeshed import Incomplete, WriteableBuffer from collections.abc import Callable from io import DEFAULT_BUFFER_SIZE, BufferedIOBase, RawIOBase from typing import Any, Protocol @@ -16,9 +18,9 @@ class DecompressReader(RawIOBase): def __init__( self, fp: _Reader, - decomp_factory: Callable[..., object], + decomp_factory: Callable[..., Incomplete], trailing_error: type[Exception] | tuple[type[Exception], ...] = (), - **decomp_args: Any, + **decomp_args: Any, # These are passed to decomp_factory. ) -> None: ... def readinto(self, b: WriteableBuffer) -> int: ... def read(self, size: int = -1) -> bytes: ... diff --git a/stdlib/bz2.pyi b/stdlib/bz2.pyi index 3b21fbcf7117..0f9d00fbc633 100644 --- a/stdlib/bz2.pyi +++ b/stdlib/bz2.pyi @@ -1,17 +1,21 @@ -import _compression +import sys from _bz2 import BZ2Compressor as BZ2Compressor, BZ2Decompressor as BZ2Decompressor -from _compression import BaseStream from _typeshed import ReadableBuffer, StrOrBytesPath, WriteableBuffer from collections.abc import Iterable from typing import IO, Literal, Protocol, SupportsIndex, TextIO, overload from typing_extensions import Self, TypeAlias +if sys.version_info >= (3, 14): + from compression._common._streams import BaseStream, _Reader +else: + from _compression import BaseStream, _Reader + __all__ = ["BZ2File", "BZ2Compressor", "BZ2Decompressor", "open", "compress", "decompress"] # The following attributes and methods are optional: # def fileno(self) -> int: ... # def close(self) -> object: ... -class _ReadableFileobj(_compression._Reader, Protocol): ... +class _ReadableFileobj(_Reader, Protocol): ... class _WritableFileobj(Protocol): def write(self, b: bytes, /) -> object: ... diff --git a/stdlib/compression/__init__.pyi b/stdlib/compression/__init__.pyi new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/stdlib/compression/_common/__init__.pyi b/stdlib/compression/_common/__init__.pyi new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/stdlib/compression/_common/_streams.pyi b/stdlib/compression/_common/_streams.pyi new file mode 100644 index 000000000000..6303a9b1d460 --- /dev/null +++ b/stdlib/compression/_common/_streams.pyi @@ -0,0 +1,25 @@ +from _typeshed import Incomplete, WriteableBuffer +from collections.abc import Callable +from io import DEFAULT_BUFFER_SIZE, BufferedIOBase, RawIOBase +from typing import Any, Protocol + +BUFFER_SIZE = DEFAULT_BUFFER_SIZE + +class _Reader(Protocol): + def read(self, n: int, /) -> bytes: ... + def seekable(self) -> bool: ... + def seek(self, n: int, /) -> Any: ... + +class BaseStream(BufferedIOBase): ... + +class DecompressReader(RawIOBase): + def __init__( + self, + fp: _Reader, + decomp_factory: Callable[..., Incomplete], # Consider backporting changes to _compression + trailing_error: type[Exception] | tuple[type[Exception], ...] = (), + **decomp_args: Any, # These are passed to decomp_factory. + ) -> None: ... + def readinto(self, b: WriteableBuffer) -> int: ... + def read(self, size: int = -1) -> bytes: ... + def seek(self, offset: int, whence: int = 0) -> int: ... diff --git a/stdlib/compression/bz2/__init__.pyi b/stdlib/compression/bz2/__init__.pyi new file mode 100644 index 000000000000..9ddc39f27c28 --- /dev/null +++ b/stdlib/compression/bz2/__init__.pyi @@ -0,0 +1 @@ +from bz2 import * diff --git a/stdlib/compression/gzip/__init__.pyi b/stdlib/compression/gzip/__init__.pyi new file mode 100644 index 000000000000..9422a735c590 --- /dev/null +++ b/stdlib/compression/gzip/__init__.pyi @@ -0,0 +1 @@ +from gzip import * diff --git a/stdlib/compression/lzma/__init__.pyi b/stdlib/compression/lzma/__init__.pyi new file mode 100644 index 000000000000..936c3813db4f --- /dev/null +++ b/stdlib/compression/lzma/__init__.pyi @@ -0,0 +1 @@ +from lzma import * diff --git a/stdlib/compression/zlib/__init__.pyi b/stdlib/compression/zlib/__init__.pyi new file mode 100644 index 000000000000..78d176c03ee8 --- /dev/null +++ b/stdlib/compression/zlib/__init__.pyi @@ -0,0 +1 @@ +from zlib import * diff --git a/stdlib/gzip.pyi b/stdlib/gzip.pyi index b7fb40fbd82e..883456b1ddc3 100644 --- a/stdlib/gzip.pyi +++ b/stdlib/gzip.pyi @@ -1,4 +1,3 @@ -import _compression import sys import zlib from _typeshed import ReadableBuffer, SizedBuffer, StrOrBytesPath @@ -6,6 +5,11 @@ from io import FileIO, TextIOWrapper from typing import Final, Literal, Protocol, overload from typing_extensions import TypeAlias +if sys.version_info >= (3, 14): + from compression._common._streams import BaseStream, DecompressReader +else: + from _compression import BaseStream, DecompressReader + __all__ = ["BadGzipFile", "GzipFile", "open", "compress", "decompress"] _ReadBinaryMode: TypeAlias = Literal["r", "rb"] @@ -84,7 +88,7 @@ class _PaddedFile: class BadGzipFile(OSError): ... -class GzipFile(_compression.BaseStream): +class GzipFile(BaseStream): myfileobj: FileIO | None mode: object name: str @@ -153,7 +157,7 @@ class GzipFile(_compression.BaseStream): def seek(self, offset: int, whence: int = 0) -> int: ... def readline(self, size: int | None = -1) -> bytes: ... -class _GzipReader(_compression.DecompressReader): +class _GzipReader(DecompressReader): def __init__(self, fp: _ReadableFileobj) -> None: ... def compress(data: SizedBuffer, compresslevel: int = 9, *, mtime: float | None = None) -> bytes: ... diff --git a/stdlib/lzma.pyi b/stdlib/lzma.pyi index 2f0279f5986b..b066d222466b 100644 --- a/stdlib/lzma.pyi +++ b/stdlib/lzma.pyi @@ -1,4 +1,4 @@ -from _compression import BaseStream +import sys from _lzma import ( CHECK_CRC32 as CHECK_CRC32, CHECK_CRC64 as CHECK_CRC64, @@ -38,6 +38,11 @@ from _typeshed import ReadableBuffer, StrOrBytesPath from typing import IO, Literal, TextIO, overload from typing_extensions import Self, TypeAlias +if sys.version_info >= (3, 14): + from compression._common._streams import BaseStream +else: + from _compression import BaseStream + __all__ = [ "CHECK_NONE", "CHECK_CRC32", From c1ef517667966a9b89d5c08a97206c34cf47d06e Mon Sep 17 00:00:00 2001 From: Sebastian Rittau Date: Sun, 11 May 2025 15:41:46 +0200 Subject: [PATCH 336/388] Add io.{Reader,Writer} (#14013) --- stdlib/@tests/stubtest_allowlists/py314.txt | 10 +++++++--- stdlib/io.pyi | 15 ++++++++++++++- 2 files changed, 21 insertions(+), 4 deletions(-) diff --git a/stdlib/@tests/stubtest_allowlists/py314.txt b/stdlib/@tests/stubtest_allowlists/py314.txt index f56e9d6db404..a2fb49e599e3 100644 --- a/stdlib/@tests/stubtest_allowlists/py314.txt +++ b/stdlib/@tests/stubtest_allowlists/py314.txt @@ -129,9 +129,6 @@ importlib.machinery.__all__ importlib.machinery.AppleFrameworkLoader importlib.util.__all__ importlib.util.Loader -io.__all__ -io.Reader -io.Writer ipaddress._IPAddressBase.version logging.handlers.SysLogHandler.__init__ marshal.dump @@ -299,6 +296,13 @@ typing(_extensions)?\.IO\.writelines .*\.ForwardRef\.__owner__ .*\.ForwardRef\.__stringifier_dict__ +# These protocols use ABC hackery at runtime. +io.Reader.__class_getitem__ +io.Reader.read +io.Writer.__class_getitem__ +io.Writer.write + + # ============================================================= # Allowlist entries that cannot or should not be fixed; >= 3.13 # ============================================================= diff --git a/stdlib/io.pyi b/stdlib/io.pyi index 5c26cb245a2f..1313df183d36 100644 --- a/stdlib/io.pyi +++ b/stdlib/io.pyi @@ -20,7 +20,7 @@ from _io import ( open as open, open_code as open_code, ) -from typing import Final +from typing import Final, Protocol, TypeVar __all__ = [ "BlockingIOError", @@ -44,11 +44,17 @@ __all__ = [ "SEEK_END", ] +if sys.version_info >= (3, 14): + __all__ += ["Reader", "Writer"] + if sys.version_info >= (3, 11): from _io import text_encoding as text_encoding __all__ += ["DEFAULT_BUFFER_SIZE", "IncrementalNewlineDecoder", "text_encoding"] +_T_co = TypeVar("_T_co", covariant=True) +_T_contra = TypeVar("_T_contra", contravariant=True) + SEEK_SET: Final = 0 SEEK_CUR: Final = 1 SEEK_END: Final = 2 @@ -58,3 +64,10 @@ class IOBase(_IOBase, metaclass=abc.ABCMeta): ... class RawIOBase(_RawIOBase, IOBase): ... class BufferedIOBase(_BufferedIOBase, IOBase): ... class TextIOBase(_TextIOBase, IOBase): ... + +if sys.version_info >= (3, 14): + class Reader(Protocol[_T_co]): + def read(self, size: int = ..., /) -> _T_co: ... + + class Writer(Protocol[_T_contra]): + def write(self, data: _T_contra, /) -> int: ... From 1079875d2d9f4f2125209be2652b32f9998416ee Mon Sep 17 00:00:00 2001 From: Sebastian Rittau Date: Sun, 11 May 2025 16:07:53 +0200 Subject: [PATCH 337/388] Copy typechecker-internal symbols to `_typeshed._type_checker_internals` (#13816) --- stdlib/_typeshed/_type_checker_internals.pyi | 89 ++++++++++++++++++++ stdlib/builtins.pyi | 1 + stdlib/typing.pyi | 4 + 3 files changed, 94 insertions(+) create mode 100644 stdlib/_typeshed/_type_checker_internals.pyi diff --git a/stdlib/_typeshed/_type_checker_internals.pyi b/stdlib/_typeshed/_type_checker_internals.pyi new file mode 100644 index 000000000000..feb22aae0073 --- /dev/null +++ b/stdlib/_typeshed/_type_checker_internals.pyi @@ -0,0 +1,89 @@ +# Internals used by some type checkers. +# +# Don't use this module directly. It is only for type checkers to use. + +import sys +import typing_extensions +from _collections_abc import dict_items, dict_keys, dict_values +from abc import ABCMeta +from collections.abc import Awaitable, Generator, Iterable, Mapping +from typing import Any, ClassVar, Generic, TypeVar, overload +from typing_extensions import Never + +_T = TypeVar("_T") + +# Used for an undocumented mypy feature. Does not exist at runtime. +promote = object() + +# Fallback type providing methods and attributes that appear on all `TypedDict` types. +# N.B. Keep this mostly in sync with typing_extensions._TypedDict/mypy_extensions._TypedDict +class TypedDictFallback(Mapping[str, object], metaclass=ABCMeta): + __total__: ClassVar[bool] + __required_keys__: ClassVar[frozenset[str]] + __optional_keys__: ClassVar[frozenset[str]] + # __orig_bases__ sometimes exists on <3.12, but not consistently, + # so we only add it to the stub on 3.12+ + if sys.version_info >= (3, 12): + __orig_bases__: ClassVar[tuple[Any, ...]] + if sys.version_info >= (3, 13): + __readonly_keys__: ClassVar[frozenset[str]] + __mutable_keys__: ClassVar[frozenset[str]] + + def copy(self) -> typing_extensions.Self: ... + # Using Never so that only calls using mypy plugin hook that specialize the signature + # can go through. + def setdefault(self, k: Never, default: object) -> object: ... + # Mypy plugin hook for 'pop' expects that 'default' has a type variable type. + def pop(self, k: Never, default: _T = ...) -> object: ... # pyright: ignore[reportInvalidTypeVarUse] + def update(self, m: typing_extensions.Self, /) -> None: ... + def __delitem__(self, k: Never) -> None: ... + def items(self) -> dict_items[str, object]: ... + def keys(self) -> dict_keys[str, object]: ... + def values(self) -> dict_values[str, object]: ... + @overload + def __or__(self, value: typing_extensions.Self, /) -> typing_extensions.Self: ... + @overload + def __or__(self, value: dict[str, Any], /) -> dict[str, object]: ... + @overload + def __ror__(self, value: typing_extensions.Self, /) -> typing_extensions.Self: ... + @overload + def __ror__(self, value: dict[str, Any], /) -> dict[str, object]: ... + # supposedly incompatible definitions of __or__ and __ior__ + def __ior__(self, value: typing_extensions.Self, /) -> typing_extensions.Self: ... # type: ignore[misc] + +# Fallback type providing methods and attributes that appear on all `NamedTuple` types. +class NamedTupleFallback(tuple[Any, ...]): + _field_defaults: ClassVar[dict[str, Any]] + _fields: ClassVar[tuple[str, ...]] + # __orig_bases__ sometimes exists on <3.12, but not consistently + # So we only add it to the stub on 3.12+. + if sys.version_info >= (3, 12): + __orig_bases__: ClassVar[tuple[Any, ...]] + + @overload + def __init__(self, typename: str, fields: Iterable[tuple[str, Any]], /) -> None: ... + @overload + @typing_extensions.deprecated( + "Creating a typing.NamedTuple using keyword arguments is deprecated and support will be removed in Python 3.15" + ) + def __init__(self, typename: str, fields: None = None, /, **kwargs: Any) -> None: ... + @classmethod + def _make(cls, iterable: Iterable[Any]) -> typing_extensions.Self: ... + def _asdict(self) -> dict[str, Any]: ... + def _replace(self, **kwargs: Any) -> typing_extensions.Self: ... + if sys.version_info >= (3, 13): + def __replace__(self, **kwargs: Any) -> typing_extensions.Self: ... + +# Non-default variations to accommodate couroutines, and `AwaitableGenerator` having a 4th type parameter. +_S = TypeVar("_S") +_YieldT_co = TypeVar("_YieldT_co", covariant=True) +_SendT_nd_contra = TypeVar("_SendT_nd_contra", contravariant=True) +_ReturnT_nd_co = TypeVar("_ReturnT_nd_co", covariant=True) + +# The parameters correspond to Generator, but the 4th is the original type. +class AwaitableGenerator( + Awaitable[_ReturnT_nd_co], + Generator[_YieldT_co, _SendT_nd_contra, _ReturnT_nd_co], + Generic[_YieldT_co, _SendT_nd_contra, _ReturnT_nd_co, _S], + metaclass=ABCMeta, +): ... diff --git a/stdlib/builtins.pyi b/stdlib/builtins.pyi index 5a1d4dd8afb9..ad6994cf605b 100644 --- a/stdlib/builtins.pyi +++ b/stdlib/builtins.pyi @@ -1011,6 +1011,7 @@ class tuple(Sequence[_T_co]): # Doesn't exist at runtime, but deleting this breaks mypy and pyright. See: # https://github.com/python/typeshed/issues/7580 # https://github.com/python/mypy/issues/8240 +# Obsolete, use types.FunctionType instead. @final @type_check_only class function: diff --git a/stdlib/typing.pyi b/stdlib/typing.pyi index 6b6c2654d247..1839df8f31dc 100644 --- a/stdlib/typing.pyi +++ b/stdlib/typing.pyi @@ -220,6 +220,7 @@ class TypeVar: def evaluate_default(self) -> EvaluateFunc | None: ... # Used for an undocumented mypy feature. Does not exist at runtime. +# Obsolete, use _typeshed._type_checker_internals.promote instead. _promote = object() # N.B. Keep this definition in sync with typing_extensions._SpecialForm @@ -564,6 +565,7 @@ class Coroutine(Awaitable[_ReturnT_nd_co], Generic[_YieldT_co, _SendT_nd_contra, # NOTE: This type does not exist in typing.py or PEP 484 but mypy needs it to exist. # The parameters correspond to Generator, but the 4th is the original type. +# Obsolete, use _typeshed._type_checker_internals.AwaitableGenerator instead. @type_check_only class AwaitableGenerator( Awaitable[_ReturnT_nd_co], @@ -946,6 +948,7 @@ if sys.version_info >= (3, 11): # Type constructors +# Obsolete, will be changed to a function. Use _typeshed._type_checker_internals.NamedTupleFallback instead. class NamedTuple(tuple[Any, ...]): _field_defaults: ClassVar[dict[str, Any]] _fields: ClassVar[tuple[str, ...]] @@ -970,6 +973,7 @@ class NamedTuple(tuple[Any, ...]): # Internal mypy fallback type for all typed dicts (does not exist at runtime) # N.B. Keep this mostly in sync with typing_extensions._TypedDict/mypy_extensions._TypedDict +# Obsolete, use _typeshed._type_checker_internals.TypedDictFallback instead. @type_check_only class _TypedDict(Mapping[str, object], metaclass=ABCMeta): __total__: ClassVar[bool] From 6b766b613d4bec342fd8f5482dbfd43f3b0c7cca Mon Sep 17 00:00:00 2001 From: Sebastian Rittau Date: Sun, 11 May 2025 16:24:31 +0200 Subject: [PATCH 338/388] Make BufferedReader generic over a protocol (#13533) --- stdlib/@tests/test_cases/check_io.py | 5 +++- stdlib/_io.pyi | 40 +++++++++++++++++++++++----- 2 files changed, 37 insertions(+), 8 deletions(-) diff --git a/stdlib/@tests/test_cases/check_io.py b/stdlib/@tests/test_cases/check_io.py index d0713a26ae86..ce8c34aedbad 100644 --- a/stdlib/@tests/test_cases/check_io.py +++ b/stdlib/@tests/test_cases/check_io.py @@ -1,7 +1,10 @@ +from _io import BufferedReader from gzip import GzipFile -from io import FileIO, TextIOWrapper +from io import FileIO, RawIOBase, TextIOWrapper from typing_extensions import assert_type +BufferedReader(RawIOBase()) + assert_type(TextIOWrapper(FileIO("")).buffer, FileIO) assert_type(TextIOWrapper(FileIO(13)).detach(), FileIO) assert_type(TextIOWrapper(GzipFile("")).buffer, GzipFile) diff --git a/stdlib/_io.pyi b/stdlib/_io.pyi index 54efd3199760..c77d75287c25 100644 --- a/stdlib/_io.pyi +++ b/stdlib/_io.pyi @@ -88,9 +88,36 @@ class BytesIO(BufferedIOBase, _BufferedIOBase, BinaryIO): # type: ignore[misc] def readlines(self, size: int | None = None, /) -> list[bytes]: ... def seek(self, pos: int, whence: int = 0, /) -> int: ... -class BufferedReader(BufferedIOBase, _BufferedIOBase, BinaryIO): # type: ignore[misc] # incompatible definitions of methods in the base classes - raw: RawIOBase - def __init__(self, raw: RawIOBase, buffer_size: int = 8192) -> None: ... +class _BufferedReaderStream(Protocol): + def read(self, n: int = ..., /) -> bytes: ... + # Optional: def readall(self) -> bytes: ... + def readinto(self, b: memoryview, /) -> int | None: ... + def seek(self, pos: int, whence: int, /) -> int: ... + def tell(self) -> int: ... + def truncate(self, size: int, /) -> int: ... + def flush(self) -> object: ... + def close(self) -> object: ... + @property + def closed(self) -> bool: ... + def readable(self) -> bool: ... + def seekable(self) -> bool: ... + + # The following methods just pass through to the underlying stream. Since + # not all streams support them, they are marked as optional here, and will + # raise an AttributeError if called on a stream that does not support them. + + # @property + # def name(self) -> Any: ... # Type is inconsistent between the various I/O types. + # @property + # def mode(self) -> str: ... + # def fileno(self) -> int: ... + # def isatty(self) -> bool: ... + +_BufferedReaderStreamT = TypeVar("_BufferedReaderStreamT", bound=_BufferedReaderStream, default=_BufferedReaderStream) + +class BufferedReader(BufferedIOBase, _BufferedIOBase, BinaryIO, Generic[_BufferedReaderStreamT]): # type: ignore[misc] # incompatible definitions of methods in the base classes + raw: _BufferedReaderStreamT + def __init__(self, raw: _BufferedReaderStreamT, buffer_size: int = 8192) -> None: ... def peek(self, size: int = 0, /) -> bytes: ... def seek(self, target: int, whence: int = 0, /) -> int: ... def truncate(self, pos: int | None = None, /) -> int: ... @@ -111,8 +138,8 @@ class BufferedRandom(BufferedIOBase, _BufferedIOBase, BinaryIO): # type: ignore def peek(self, size: int = 0, /) -> bytes: ... def truncate(self, pos: int | None = None, /) -> int: ... -class BufferedRWPair(BufferedIOBase, _BufferedIOBase): - def __init__(self, reader: RawIOBase, writer: RawIOBase, buffer_size: int = 8192, /) -> None: ... +class BufferedRWPair(BufferedIOBase, _BufferedIOBase, Generic[_BufferedReaderStreamT]): + def __init__(self, reader: _BufferedReaderStreamT, writer: RawIOBase, buffer_size: int = 8192, /) -> None: ... def peek(self, size: int = 0, /) -> bytes: ... class _TextIOBase(_IOBase): @@ -131,8 +158,7 @@ class _TextIOBase(_IOBase): @type_check_only class _WrappedBuffer(Protocol): # "name" is wrapped by TextIOWrapper. Its type is inconsistent between - # the various I/O types, see the comments on TextIOWrapper.name and - # TextIO.name. + # the various I/O types. @property def name(self) -> Any: ... @property From d9175a80a777c30d56e70828c33b70efe37c9dc5 Mon Sep 17 00:00:00 2001 From: LordGvozd <84399421+LordGvozd@users.noreply.github.com> Date: Mon, 12 May 2025 00:05:25 +0600 Subject: [PATCH 339/388] Bump gevent to 25.4.* (#13885) --- .../@tests/stubtest_allowlist_darwin.txt | 4 - .../@tests/stubtest_allowlist_linux.txt | 4 - .../@tests/stubtest_allowlist_win32.txt | 6 -- stubs/gevent/METADATA.toml | 2 +- stubs/gevent/gevent/_config.pyi | 5 + stubs/gevent/gevent/ares.pyi | 5 +- stubs/gevent/gevent/lock.pyi | 1 + stubs/gevent/gevent/pywsgi.pyi | 1 + stubs/gevent/gevent/queue.pyi | 38 +++++--- stubs/gevent/gevent/resolver/ares.pyi | 75 ++++++++------- stubs/gevent/gevent/resolver/cares.pyi | 91 +++++++++---------- stubs/gevent/gevent/resolver_ares.pyi | 5 +- 12 files changed, 113 insertions(+), 124 deletions(-) diff --git a/stubs/gevent/@tests/stubtest_allowlist_darwin.txt b/stubs/gevent/@tests/stubtest_allowlist_darwin.txt index 99849c10b71b..dd72ec9eff22 100644 --- a/stubs/gevent/@tests/stubtest_allowlist_darwin.txt +++ b/stubs/gevent/@tests/stubtest_allowlist_darwin.txt @@ -17,10 +17,6 @@ gevent.libev.watcher.watcher.feed # undocumented argument for internal use only gevent.libev.watcher.watcher.__init__ -# ares_host_result always has the same layout, so we set the arguments on __new__ -# to reflect that fact, we don't care that the implementation accepts any number -# of arguments -gevent.resolver.cares.ares_host_result.__new__ # we have punted on socket, the gevent version of these functions sometimes use # named parameters, while the base implementation only allows positional arguments diff --git a/stubs/gevent/@tests/stubtest_allowlist_linux.txt b/stubs/gevent/@tests/stubtest_allowlist_linux.txt index d768857225d1..83da879a8f8f 100644 --- a/stubs/gevent/@tests/stubtest_allowlist_linux.txt +++ b/stubs/gevent/@tests/stubtest_allowlist_linux.txt @@ -24,10 +24,6 @@ gevent.libev.watcher.watcher.feed # undocumented argument for internal use only gevent.libev.watcher.watcher.__init__ -# ares_host_result always has the same layout, so we set the arguments on __new__ -# to reflect that fact, we don't care that the implementation accepts any number -# of arguments -gevent.resolver.cares.ares_host_result.__new__ # we have punted on socket, the gevent version of these functions sometimes use # named parameters, while the base implementation only allows positional arguments diff --git a/stubs/gevent/@tests/stubtest_allowlist_win32.txt b/stubs/gevent/@tests/stubtest_allowlist_win32.txt index 52eddde03a7c..a8a54020a4c7 100644 --- a/stubs/gevent/@tests/stubtest_allowlist_win32.txt +++ b/stubs/gevent/@tests/stubtest_allowlist_win32.txt @@ -19,9 +19,3 @@ gevent.libev.corecext.* # these won't work until we find out if we can install libev somehow with choco gevent.libev.corecffi gevent.libev.watcher - -# these don't work on windows -gevent.ares -gevent.resolver.ares -gevent.resolver.cares -gevent.resolver_ares diff --git a/stubs/gevent/METADATA.toml b/stubs/gevent/METADATA.toml index 960f4c760e7f..456b719c6819 100644 --- a/stubs/gevent/METADATA.toml +++ b/stubs/gevent/METADATA.toml @@ -1,4 +1,4 @@ -version = "24.11.*" +version = "25.4.*" upstream_repository = "https://github.com/gevent/gevent" requires = ["types-greenlet", "types-psutil"] diff --git a/stubs/gevent/gevent/_config.pyi b/stubs/gevent/gevent/_config.pyi index 802f374fea38..a7824ba9714e 100644 --- a/stubs/gevent/gevent/_config.pyi +++ b/stubs/gevent/gevent/_config.pyi @@ -72,6 +72,7 @@ class Config: ares_udp_port: _SettingDescriptor[str | int | None] ares_tcp_port: _SettingDescriptor[str | int | None] ares_servers: _SettingDescriptor[Sequence[str] | str | None] + print_blocking_reports: _SettingDescriptor[bool] class ImportableSetting(Generic[_T]): default: str | Sequence[str] @@ -141,6 +142,10 @@ class MaxBlockingTime(FloatSettingMixin, Setting[float]): default: float desc: str +class PrintBlockingReports(BoolSettingMixin, Setting[bool]): + default: bool + desc: str + class MonitorMemoryPeriod(FloatSettingMixin, Setting[float]): default: int desc: str diff --git a/stubs/gevent/gevent/ares.pyi b/stubs/gevent/gevent/ares.pyi index 8fa5509e5fa6..6104efb65acc 100644 --- a/stubs/gevent/gevent/ares.pyi +++ b/stubs/gevent/gevent/ares.pyi @@ -1,6 +1,3 @@ -import sys - from gevent.resolver.cares import * -if sys.platform != "win32": - __all__ = ["channel"] +__all__ = ["channel"] diff --git a/stubs/gevent/gevent/lock.pyi b/stubs/gevent/gevent/lock.pyi index ddaf1ab85701..2daa782741fa 100644 --- a/stubs/gevent/gevent/lock.pyi +++ b/stubs/gevent/gevent/lock.pyi @@ -38,3 +38,4 @@ class RLock: def __enter__(self) -> bool: ... def release(self) -> None: ... def __exit__(self, typ: type[BaseException] | None, val: BaseException | None, tb: TracebackType | None) -> None: ... + def locked(self) -> bool: ... diff --git a/stubs/gevent/gevent/pywsgi.pyi b/stubs/gevent/gevent/pywsgi.pyi index 8c25035fd2cb..91400018eabd 100644 --- a/stubs/gevent/gevent/pywsgi.pyi +++ b/stubs/gevent/gevent/pywsgi.pyi @@ -28,6 +28,7 @@ class Input: position: int chunked_input: bool chunk_length: int + send_100_continue_enabled: bool def __init__( self, rfile: BufferedReader, content_length: int | None, socket: _GeventSocket | None = None, chunked_input: bool = False ) -> None: ... diff --git a/stubs/gevent/gevent/queue.pyi b/stubs/gevent/gevent/queue.pyi index 515aae6f5588..b95cb5212be6 100644 --- a/stubs/gevent/gevent/queue.pyi +++ b/stubs/gevent/gevent/queue.pyi @@ -1,9 +1,10 @@ import sys +import types from collections import deque from collections.abc import Iterable # technically it is using _PySimpleQueue, which has the same interface as SimpleQueue -from queue import Empty as Empty, Full as Full, SimpleQueue as SimpleQueue +from queue import Empty as Empty, Full as Full from typing import Any, Generic, Literal, TypeVar, final, overload from typing_extensions import Self @@ -19,13 +20,16 @@ else: _T = TypeVar("_T") -class Queue(Generic[_T]): +class SimpleQueue(Generic[_T]): @property def hub(self) -> Hub: ... # readonly in Cython @property def queue(self) -> deque[_T]: ... # readonly in Cython maxsize: int | None is_shutdown: bool + + @classmethod + def __class_getitem__(cls, item: Any, /) -> types.GenericAlias: ... @overload def __init__(self, maxsize: int | None = None) -> None: ... @overload @@ -42,13 +46,27 @@ class Queue(Generic[_T]): def put(self, item: _T, block: bool = True, timeout: float | None = None) -> None: ... def put_nowait(self, item: _T) -> None: ... def qsize(self) -> int: ... - def shutdown(self, immediate: bool = False) -> None: ... def __bool__(self) -> bool: ... def __iter__(self) -> Self: ... def __len__(self) -> int: ... def __next__(self) -> _T: ... next = __next__ +class Queue(SimpleQueue[_T]): + @property + def unfinished_tasks(self) -> int: ... # readonly in Cython + @overload + def __init__(self, maxsize: int | None = None, *, unfinished_tasks: int | None = None) -> None: ... + @overload + def __init__(self, maxsize: int | None, items: Iterable[_T], unfinished_tasks: int | None = None) -> None: ... + @overload + def __init__(self, maxsize: int | None = None, *, items: Iterable[_T], unfinished_tasks: int | None = None) -> None: ... + def join(self, timeout: float | None = None) -> bool: ... + def task_done(self) -> None: ... + def shutdown(self, immediate: bool = False) -> None: ... + +JoinableQueue = Queue + @final class UnboundQueue(Queue[_T]): @overload @@ -61,18 +79,6 @@ class UnboundQueue(Queue[_T]): class PriorityQueue(Queue[_T]): ... class LifoQueue(Queue[_T]): ... -class JoinableQueue(Queue[_T]): - @property - def unfinished_tasks(self) -> int: ... # readonly in Cython - @overload - def __init__(self, maxsize: int | None = None, *, unfinished_tasks: int | None = None) -> None: ... - @overload - def __init__(self, maxsize: int | None, items: Iterable[_T], unfinished_tasks: int | None = None) -> None: ... - @overload - def __init__(self, maxsize: int | None = None, *, items: Iterable[_T], unfinished_tasks: int | None = None) -> None: ... - def join(self, timeout: float | None = None) -> bool: ... - def task_done(self) -> None: ... - class Channel(Generic[_T]): @property def getters(self) -> deque[Waiter[Any]]: ... # readonly in Cython @@ -81,6 +87,8 @@ class Channel(Generic[_T]): @property def hub(self) -> Hub: ... # readonly in Cython def __init__(self, maxsize: Literal[1] = 1) -> None: ... + @classmethod + def __class_getitem__(cls, item: Any, /) -> types.GenericAlias: ... @property def balance(self) -> int: ... def qsize(self) -> Literal[0]: ... diff --git a/stubs/gevent/gevent/resolver/ares.pyi b/stubs/gevent/gevent/resolver/ares.pyi index a20e6b99a1e3..29c5ea0cb843 100644 --- a/stubs/gevent/gevent/resolver/ares.pyi +++ b/stubs/gevent/gevent/resolver/ares.pyi @@ -1,43 +1,40 @@ -import sys +from collections.abc import Sequence +from typing import TypedDict -if sys.platform != "win32": - from collections.abc import Sequence - from typing import TypedDict +from gevent._types import _Watcher +from gevent.hub import Hub +from gevent.resolver import AbstractResolver +from gevent.resolver.cares import channel - from gevent._types import _Watcher - from gevent.hub import Hub - from gevent.resolver import AbstractResolver - from gevent.resolver.cares import channel +class _ChannelArgs(TypedDict): + flags: str | int | None + timeout: str | float | None + tries: str | int | None + ndots: str | int | None + udp_port: str | int | None + tcp_port: str | int | None + servers: Sequence[str] | str | None - class _ChannelArgs(TypedDict): - flags: str | int | None - timeout: str | float | None - tries: str | int | None - ndots: str | int | None - udp_port: str | int | None - tcp_port: str | int | None - servers: Sequence[str] | str | None +class Resolver(AbstractResolver): + cares_class: type[channel] + hub: Hub + cares: channel + pid: int + params: _ChannelArgs + fork_watcher: _Watcher + def __init__( + self, + hub: Hub | None = None, + use_environ: bool = True, + *, + flags: str | int | None = None, + timeout: str | float | None = None, + tries: str | int | None = None, + ndots: str | int | None = None, + udp_port: str | int | None = None, + tcp_port: str | int | None = None, + servers: Sequence[str] | str | None = None, + ) -> None: ... + def __del__(self) -> None: ... - class Resolver(AbstractResolver): - cares_class: type[channel] - hub: Hub - cares: channel - pid: int - params: _ChannelArgs - fork_watcher: _Watcher - def __init__( - self, - hub: Hub | None = None, - use_environ: bool = True, - *, - flags: str | int | None = None, - timeout: str | float | None = None, - tries: str | int | None = None, - ndots: str | int | None = None, - udp_port: str | int | None = None, - tcp_port: str | int | None = None, - servers: Sequence[str] | str | None = None, - ) -> None: ... - def __del__(self) -> None: ... - - __all__ = ["Resolver"] +__all__ = ["Resolver"] diff --git a/stubs/gevent/gevent/resolver/cares.pyi b/stubs/gevent/gevent/resolver/cares.pyi index b8f3546bf97e..21f3e504c7e8 100644 --- a/stubs/gevent/gevent/resolver/cares.pyi +++ b/stubs/gevent/gevent/resolver/cares.pyi @@ -1,53 +1,50 @@ -import sys +from collections.abc import Callable, Iterable, Sequence +from typing import Any, Generic, TypeVar +from typing_extensions import Self -if sys.platform != "win32": - from collections.abc import Callable, Sequence - from typing import Any, Generic, TypeVar - from typing_extensions import Self +from gevent._types import _AddrinfoResult, _Loop, _NameinfoResult, _SockAddr - from gevent._types import _AddrinfoResult, _Loop, _NameinfoResult, _SockAddr +_T = TypeVar("_T") - _T = TypeVar("_T") +class ares_host_result(tuple[str, list[str], list[str]]): + family: int + def __new__(cls, family: int, iterable: Iterable[Any]) -> Self: ... - class Result(Generic[_T]): - exception: BaseException | None - value: _T | None - def __init__(self, value: _T | None = None, exception: BaseException | None = None) -> None: ... - def get(self) -> Any | None: ... - def successful(self) -> bool: ... +class Result(Generic[_T]): + exception: BaseException | None + value: _T | None + def __init__(self, value: _T | None = None, exception: BaseException | None = None) -> None: ... + def get(self) -> Any | None: ... + def successful(self) -> bool: ... - class ares_host_result(tuple[str, list[str], list[str]]): - family: int - def __new__(cls, family: int, hostname: str, aliases: list[str], addr_list: list[str], /) -> Self: ... +class channel: + @property + def loop(self) -> _Loop: ... + def __init__( + self, + loop: _Loop, + flags: str | int | None = None, + timeout: str | float | None = None, + tries: str | int | None = None, + ndots: str | int | None = None, + udp_port: str | int | None = None, + tcp_port: str | int | None = None, + servers: Sequence[str] | str | None = None, + ) -> None: ... + def destroy(self) -> None: ... + def getaddrinfo( + self, + callback: Callable[[Result[_AddrinfoResult]], object], + name: str, + service: str | None, + family: int = 0, + type: int = 0, + proto: int = 0, + flags: int = 0, + ) -> None: ... + def gethostbyaddr(self, callback: Callable[[Result[ares_host_result]], object], addr: str) -> Any: ... + def gethostbyname(self, callback: Callable[[Result[ares_host_result]], object], name: str, family: int = ...) -> None: ... + def getnameinfo(self, callback: Callable[[Result[_NameinfoResult]], object], sockaddr: _SockAddr, flags: int) -> None: ... + def set_servers(self, servers: Sequence[str] | str | None = None) -> None: ... - class channel: - @property - def loop(self) -> _Loop: ... - def __init__( - self, - loop: _Loop, - flags: str | int | None = None, - timeout: str | float | None = None, - tries: str | int | None = None, - ndots: str | int | None = None, - udp_port: str | int | None = None, - tcp_port: str | int | None = None, - servers: Sequence[str] | str | None = None, - ) -> None: ... - def destroy(self) -> None: ... - def getaddrinfo( - self, - callback: Callable[[Result[_AddrinfoResult]], object], - name: str, - service: str | None, - family: int = 0, - type: int = 0, - proto: int = 0, - flags: int = 0, - ) -> None: ... - def gethostbyaddr(self, callback: Callable[[Result[ares_host_result]], object], addr: str) -> Any: ... - def gethostbyname(self, callback: Callable[[Result[ares_host_result]], object], name: str, family: int = ...) -> None: ... - def getnameinfo(self, callback: Callable[[Result[_NameinfoResult]], object], sockaddr: _SockAddr, flags: int) -> None: ... - def set_servers(self, servers: Sequence[str] | str | None = None) -> None: ... - - __all__ = ["channel"] +__all__ = ["channel"] diff --git a/stubs/gevent/gevent/resolver_ares.pyi b/stubs/gevent/gevent/resolver_ares.pyi index 5eb75234139c..4d1fbbaae20e 100644 --- a/stubs/gevent/gevent/resolver_ares.pyi +++ b/stubs/gevent/gevent/resolver_ares.pyi @@ -1,6 +1,3 @@ -import sys - from gevent.resolver.ares import * -if sys.platform != "win32": - __all__ = ["Resolver"] +__all__ = ["Resolver"] From 83ac75b67beea793753b81005d397501a569eb9d Mon Sep 17 00:00:00 2001 From: Max Muoto Date: Sun, 11 May 2025 14:44:37 -0500 Subject: [PATCH 340/388] Update pathlib for 3.14 (#14006) --- stdlib/@tests/stubtest_allowlists/py314.txt | 10 ---- stdlib/@tests/test_cases/check_pathlib.py | 22 +++++++-- stdlib/VERSIONS | 1 + stdlib/{pathlib.pyi => pathlib/__init__.pyi} | 51 ++++++++++++-------- stdlib/pathlib/types.pyi | 8 +++ 5 files changed, 59 insertions(+), 33 deletions(-) rename stdlib/{pathlib.pyi => pathlib/__init__.pyi} (87%) create mode 100644 stdlib/pathlib/types.pyi diff --git a/stdlib/@tests/stubtest_allowlists/py314.txt b/stdlib/@tests/stubtest_allowlists/py314.txt index a2fb49e599e3..67a725257bbe 100644 --- a/stdlib/@tests/stubtest_allowlists/py314.txt +++ b/stdlib/@tests/stubtest_allowlists/py314.txt @@ -147,16 +147,6 @@ multiprocessing.process.BaseProcess.interrupt multiprocessing.synchronize.SemLock.locked os.__all__ os.readinto -pathlib.Path.copy_into -pathlib.Path.copytree -pathlib.Path.delete -pathlib.Path.info -pathlib.Path.move -pathlib.Path.move_into -pathlib.Path.rmtree -pathlib.PurePath.is_relative_to -pathlib.PurePath.relative_to -pathlib.types pdb.__all__ pdb.Pdb.__init__ pdb.Pdb.checkline diff --git a/stdlib/@tests/test_cases/check_pathlib.py b/stdlib/@tests/test_cases/check_pathlib.py index 9b4d681c9e96..d3e85188b97b 100644 --- a/stdlib/@tests/test_cases/check_pathlib.py +++ b/stdlib/@tests/test_cases/check_pathlib.py @@ -4,6 +4,10 @@ from pathlib import Path, PureWindowsPath from typing_extensions import assert_type + +class MyCustomPath(Path): ... + + if Path("asdf") == Path("asdf"): ... @@ -23,8 +27,20 @@ if sys.version_info >= (3, 13): - - class MyCustomPath(Path): ... - pth = MyCustomPath.from_uri("file:///tmp/abc.txt") assert_type(pth, MyCustomPath) + + +if sys.version_info >= (3, 14): + pth = MyCustomPath("asdf") + # With text path, type should be preserved. + assert_type(pth.move_into("asdf"), MyCustomPath) + assert_type(pth.move("asdf"), MyCustomPath) + assert_type(pth.copy("asdf"), MyCustomPath) + assert_type(pth.copy_into("asdf"), MyCustomPath) + + # With an actual path type, that type should be preserved. + assert_type(pth.move_into(Path("asdf")), Path) + assert_type(pth.move(Path("asdf")), Path) + assert_type(pth.copy(Path("asdf")), Path) + assert_type(pth.copy_into(Path("asdf")), Path) diff --git a/stdlib/VERSIONS b/stdlib/VERSIONS index bea644c67a8b..0b312925465d 100644 --- a/stdlib/VERSIONS +++ b/stdlib/VERSIONS @@ -228,6 +228,7 @@ os: 3.0- ossaudiodev: 3.0-3.12 parser: 3.0-3.9 pathlib: 3.4- +pathlib.types: 3.14- pdb: 3.0- pickle: 3.0- pickletools: 3.0- diff --git a/stdlib/pathlib.pyi b/stdlib/pathlib/__init__.pyi similarity index 87% rename from stdlib/pathlib.pyi rename to stdlib/pathlib/__init__.pyi index 1e4d97770b7b..b84fc69313a1 100644 --- a/stdlib/pathlib.pyi +++ b/stdlib/pathlib/__init__.pyi @@ -15,11 +15,16 @@ from collections.abc import Callable, Generator, Iterator, Sequence from io import BufferedRandom, BufferedReader, BufferedWriter, FileIO, TextIOWrapper from os import PathLike, stat_result from types import GenericAlias, TracebackType -from typing import IO, Any, BinaryIO, ClassVar, Literal, overload +from typing import IO, Any, BinaryIO, ClassVar, Literal, TypeVar, overload from typing_extensions import Never, Self, deprecated +_PathT = TypeVar("_PathT", bound=PurePath) + __all__ = ["PurePath", "PurePosixPath", "PureWindowsPath", "Path", "PosixPath", "WindowsPath"] +if sys.version_info >= (3, 14): + from pathlib.types import PathInfo + if sys.version_info >= (3, 13): __all__ += ["UnsupportedOperation"] @@ -63,7 +68,9 @@ class PurePath(PathLike[str]): def as_uri(self) -> str: ... def is_absolute(self) -> bool: ... def is_reserved(self) -> bool: ... - if sys.version_info >= (3, 12): + if sys.version_info >= (3, 14): + def is_relative_to(self, other: StrPath) -> bool: ... + elif sys.version_info >= (3, 12): def is_relative_to(self, other: StrPath, /, *_deprecated: StrPath) -> bool: ... else: def is_relative_to(self, *other: StrPath) -> bool: ... @@ -73,7 +80,9 @@ class PurePath(PathLike[str]): else: def match(self, path_pattern: str) -> bool: ... - if sys.version_info >= (3, 12): + if sys.version_info >= (3, 14): + def relative_to(self, other: StrPath, *, walk_up: bool = False) -> Self: ... + elif sys.version_info >= (3, 12): def relative_to(self, other: StrPath, /, *_deprecated: StrPath, walk_up: bool = False) -> Self: ... else: def relative_to(self, *other: StrPath) -> Self: ... @@ -154,17 +163,25 @@ class Path(PurePath): def mkdir(self, mode: int = 0o777, parents: bool = False, exist_ok: bool = False) -> None: ... if sys.version_info >= (3, 14): - def copy(self, target: StrPath, *, follow_symlinks: bool = True, preserve_metadata: bool = False) -> None: ... - def copytree( - self, - target: StrPath, - *, - follow_symlinks: bool = True, - preserve_metadata: bool = False, - dirs_exist_ok: bool = False, - ignore: Callable[[Self], bool] | None = None, - on_error: Callable[[OSError], object] | None = None, - ) -> None: ... + + @property + def info(self) -> PathInfo: ... + @overload + def move_into(self, target_dir: _PathT) -> _PathT: ... # type: ignore[overload-overlap] + @overload + def move_into(self, target_dir: StrPath) -> Self: ... # type: ignore[overload-overlap] + @overload + def move(self, target: _PathT) -> _PathT: ... # type: ignore[overload-overlap] + @overload + def move(self, target: StrPath) -> Self: ... # type: ignore[overload-overlap] + @overload + def copy_into(self, target_dir: _PathT, *, follow_symlinks: bool = True, preserve_metadata: bool = False) -> _PathT: ... # type: ignore[overload-overlap] + @overload + def copy_into(self, target_dir: StrPath, *, follow_symlinks: bool = True, preserve_metadata: bool = False) -> Self: ... # type: ignore[overload-overlap] + @overload + def copy(self, target: _PathT, *, follow_symlinks: bool = True, preserve_metadata: bool = False) -> _PathT: ... # type: ignore[overload-overlap] + @overload + def copy(self, target: StrPath, *, follow_symlinks: bool = True, preserve_metadata: bool = False) -> Self: ... # type: ignore[overload-overlap] # Adapted from builtins.open # Text mode: always returns a TextIOWrapper @@ -253,9 +270,6 @@ class Path(PurePath): def resolve(self, strict: bool = False) -> Self: ... def rmdir(self) -> None: ... - if sys.version_info >= (3, 14): - def delete(self, ignore_errors: bool = False, on_error: Callable[[OSError], object] | None = None) -> None: ... - def symlink_to(self, target: StrOrBytesPath, target_is_directory: bool = False) -> None: ... if sys.version_info >= (3, 10): def hardlink_to(self, target: StrOrBytesPath) -> None: ... @@ -286,9 +300,6 @@ class Path(PurePath): self, top_down: bool = ..., on_error: Callable[[OSError], object] | None = ..., follow_symlinks: bool = ... ) -> Iterator[tuple[Self, list[str], list[str]]]: ... - if sys.version_info >= (3, 14): - def rmtree(self, ignore_errors: bool = False, on_error: Callable[[OSError], object] | None = None) -> None: ... - class PosixPath(Path, PurePosixPath): ... class WindowsPath(Path, PureWindowsPath): ... diff --git a/stdlib/pathlib/types.pyi b/stdlib/pathlib/types.pyi new file mode 100644 index 000000000000..9f9a650846de --- /dev/null +++ b/stdlib/pathlib/types.pyi @@ -0,0 +1,8 @@ +from typing import Protocol, runtime_checkable + +@runtime_checkable +class PathInfo(Protocol): + def exists(self, *, follow_symlinks: bool = True) -> bool: ... + def is_dir(self, *, follow_symlinks: bool = True) -> bool: ... + def is_file(self, *, follow_symlinks: bool = True) -> bool: ... + def is_symlink(self) -> bool: ... From eac0378b31d472074bbeb989e5ecd0c520cdf3d5 Mon Sep 17 00:00:00 2001 From: Jelle Zijlstra Date: Sun, 11 May 2025 15:39:50 -0700 Subject: [PATCH 341/388] Give up on AnnotationForm in typing.pyi (#13999) From #13985 --- stdlib/typing.pyi | 92 +++++++++++++++++++++++++++-------------------- 1 file changed, 53 insertions(+), 39 deletions(-) diff --git a/stdlib/typing.pyi b/stdlib/typing.pyi index 1839df8f31dc..5aa85543ed2c 100644 --- a/stdlib/typing.pyi +++ b/stdlib/typing.pyi @@ -6,7 +6,7 @@ import collections # noqa: F401 # pyright: ignore[reportUnusedImport] import sys import typing_extensions from _collections_abc import dict_items, dict_keys, dict_values -from _typeshed import AnnotationForm, IdentityFunction, ReadableBuffer, SupportsKeysAndGetItem +from _typeshed import IdentityFunction, ReadableBuffer, SupportsKeysAndGetItem from abc import ABCMeta, abstractmethod from re import Match as Match, Pattern as Pattern from types import ( @@ -142,6 +142,10 @@ if sys.version_info >= (3, 12): if sys.version_info >= (3, 13): __all__ += ["get_protocol_members", "is_protocol", "NoDefault", "TypeIs", "ReadOnly"] +# We can't use this name here because it leads to issues with mypy, likely +# due to an import cycle. Below instead we use Any with a comment. +# from _typeshed import AnnotationForm + class Any: ... class _Final: ... @@ -151,9 +155,9 @@ class TypeVar: @property def __name__(self) -> str: ... @property - def __bound__(self) -> AnnotationForm | None: ... + def __bound__(self) -> Any | None: ... # AnnotationForm @property - def __constraints__(self) -> tuple[AnnotationForm, ...]: ... + def __constraints__(self) -> tuple[Any, ...]: ... # AnnotationForm @property def __covariant__(self) -> bool: ... @property @@ -163,24 +167,24 @@ class TypeVar: def __infer_variance__(self) -> bool: ... if sys.version_info >= (3, 13): @property - def __default__(self) -> AnnotationForm: ... + def __default__(self) -> Any: ... # AnnotationForm if sys.version_info >= (3, 13): def __new__( cls, name: str, - *constraints: AnnotationForm, - bound: AnnotationForm | None = None, + *constraints: Any, # AnnotationForm + bound: Any | None = None, # AnnotationForm contravariant: bool = False, covariant: bool = False, infer_variance: bool = False, - default: AnnotationForm = ..., + default: Any = ..., # AnnotationForm ) -> Self: ... elif sys.version_info >= (3, 12): def __new__( cls, name: str, - *constraints: AnnotationForm, - bound: AnnotationForm | None = None, + *constraints: Any, # AnnotationForm + bound: Any | None = None, # AnnotationForm covariant: bool = False, contravariant: bool = False, infer_variance: bool = False, @@ -189,8 +193,8 @@ class TypeVar: def __new__( cls, name: str, - *constraints: AnnotationForm, - bound: AnnotationForm | None = None, + *constraints: Any, # AnnotationForm + bound: Any | None = None, # AnnotationForm covariant: bool = False, contravariant: bool = False, ) -> Self: ... @@ -198,14 +202,14 @@ class TypeVar: def __init__( self, name: str, - *constraints: AnnotationForm, - bound: AnnotationForm | None = None, + *constraints: Any, # AnnotationForm + bound: Any | None = None, # AnnotationForm covariant: bool = False, contravariant: bool = False, ) -> None: ... if sys.version_info >= (3, 10): - def __or__(self, right: AnnotationForm) -> _SpecialForm: ... - def __ror__(self, left: AnnotationForm) -> _SpecialForm: ... + def __or__(self, right: Any) -> _SpecialForm: ... # AnnotationForm + def __ror__(self, left: Any) -> _SpecialForm: ... # AnnotationForm if sys.version_info >= (3, 11): def __typing_subst__(self, arg: Any) -> Any: ... if sys.version_info >= (3, 13): @@ -260,10 +264,10 @@ if sys.version_info >= (3, 11): def __name__(self) -> str: ... if sys.version_info >= (3, 13): @property - def __default__(self) -> AnnotationForm: ... + def __default__(self) -> Any: ... # AnnotationForm def has_default(self) -> bool: ... if sys.version_info >= (3, 13): - def __new__(cls, name: str, *, default: AnnotationForm = ...) -> Self: ... + def __new__(cls, name: str, *, default: Any = ...) -> Self: ... # AnnotationForm elif sys.version_info >= (3, 12): def __new__(cls, name: str) -> Self: ... else: @@ -306,7 +310,7 @@ if sys.version_info >= (3, 10): @property def __name__(self) -> str: ... @property - def __bound__(self) -> AnnotationForm | None: ... + def __bound__(self) -> Any | None: ... # AnnotationForm @property def __covariant__(self) -> bool: ... @property @@ -316,35 +320,45 @@ if sys.version_info >= (3, 10): def __infer_variance__(self) -> bool: ... if sys.version_info >= (3, 13): @property - def __default__(self) -> AnnotationForm: ... + def __default__(self) -> Any: ... # AnnotationForm if sys.version_info >= (3, 13): def __new__( cls, name: str, *, - bound: AnnotationForm | None = None, + bound: Any | None = None, # AnnotationForm contravariant: bool = False, covariant: bool = False, infer_variance: bool = False, - default: AnnotationForm = ..., + default: Any = ..., # AnnotationForm ) -> Self: ... elif sys.version_info >= (3, 12): def __new__( cls, name: str, *, - bound: AnnotationForm | None = None, + bound: Any | None = None, # AnnotationForm contravariant: bool = False, covariant: bool = False, infer_variance: bool = False, ) -> Self: ... elif sys.version_info >= (3, 11): def __new__( - cls, name: str, *, bound: AnnotationForm | None = None, contravariant: bool = False, covariant: bool = False + cls, + name: str, + *, + bound: Any | None = None, # AnnotationForm + contravariant: bool = False, + covariant: bool = False, ) -> Self: ... else: def __init__( - self, name: str, *, bound: AnnotationForm | None = None, contravariant: bool = False, covariant: bool = False + self, + name: str, + *, + bound: Any | None = None, # AnnotationForm + contravariant: bool = False, + covariant: bool = False, ) -> None: ... @property @@ -368,7 +382,7 @@ if sys.version_info >= (3, 10): TypeGuard: _SpecialForm class NewType: - def __init__(self, name: str, tp: AnnotationForm) -> None: ... + def __init__(self, name: str, tp: Any) -> None: ... # AnnotationForm if sys.version_info >= (3, 11): @staticmethod def __call__(x: _T, /) -> _T: ... @@ -901,7 +915,7 @@ if sys.version_info >= (3, 14): include_extras: bool = False, *, format: Format | None = None, - ) -> dict[str, AnnotationForm]: ... + ) -> dict[str, Any]: ... # AnnotationForm else: def get_type_hints( @@ -909,9 +923,9 @@ else: globalns: dict[str, Any] | None = None, localns: Mapping[str, Any] | None = None, include_extras: bool = False, - ) -> dict[str, AnnotationForm]: ... + ) -> dict[str, Any]: ... # AnnotationForm -def get_args(tp: AnnotationForm) -> tuple[AnnotationForm, ...]: ... +def get_args(tp: Any) -> tuple[Any, ...]: ... # AnnotationForm if sys.version_info >= (3, 10): @overload @@ -922,7 +936,7 @@ if sys.version_info >= (3, 10): @overload def get_origin(tp: GenericAlias) -> type: ... @overload -def get_origin(tp: AnnotationForm) -> AnnotationForm | None: ... +def get_origin(tp: Any) -> Any | None: ... # AnnotationForm @overload def cast(typ: type[_T], val: Any) -> _T: ... @overload @@ -933,7 +947,7 @@ def cast(typ: object, val: Any) -> Any: ... if sys.version_info >= (3, 11): def reveal_type(obj: _T, /) -> _T: ... def assert_never(arg: Never, /) -> Never: ... - def assert_type(val: _T, typ: AnnotationForm, /) -> _T: ... + def assert_type(val: _T, typ: Any, /) -> _T: ... # AnnotationForm def clear_overloads() -> None: ... def get_overloads(func: Callable[..., object]) -> Sequence[Callable[..., object]]: ... def dataclass_transform( @@ -1020,7 +1034,7 @@ if sys.version_info >= (3, 14): locals: Mapping[str, Any] | None = None, type_params: tuple[TypeVar, ParamSpec, TypeVarTuple] | None = None, format: Format | None = None, - ) -> AnnotationForm: ... + ) -> Any: ... # AnnotationForm else: @final @@ -1028,7 +1042,7 @@ else: __forward_arg__: str __forward_code__: CodeType __forward_evaluated__: bool - __forward_value__: AnnotationForm | None + __forward_value__: Any | None # AnnotationForm __forward_is_argument__: bool __forward_is_class__: bool __forward_module__: Any | None @@ -1044,7 +1058,7 @@ else: ) def _evaluate( self, globalns: dict[str, Any] | None, localns: Mapping[str, Any] | None, *, recursive_guard: frozenset[str] - ) -> AnnotationForm | None: ... + ) -> Any | None: ... # AnnotationForm @overload def _evaluate( self, @@ -1053,7 +1067,7 @@ else: type_params: tuple[TypeVar | ParamSpec | TypeVarTuple, ...], *, recursive_guard: frozenset[str], - ) -> AnnotationForm | None: ... + ) -> Any | None: ... # AnnotationForm elif sys.version_info >= (3, 12): def _evaluate( self, @@ -1062,11 +1076,11 @@ else: type_params: tuple[TypeVar | ParamSpec | TypeVarTuple, ...] | None = None, *, recursive_guard: frozenset[str], - ) -> AnnotationForm | None: ... + ) -> Any | None: ... # AnnotationForm else: def _evaluate( self, globalns: dict[str, Any] | None, localns: Mapping[str, Any] | None, recursive_guard: frozenset[str] - ) -> AnnotationForm | None: ... + ) -> Any | None: ... # AnnotationForm def __eq__(self, other: object) -> bool: ... def __hash__(self) -> int: ... @@ -1085,17 +1099,17 @@ if sys.version_info >= (3, 12): class TypeAliasType: def __new__(cls, name: str, value: Any, *, type_params: tuple[TypeVar | ParamSpec | TypeVarTuple, ...] = ()) -> Self: ... @property - def __value__(self) -> AnnotationForm: ... + def __value__(self) -> Any: ... # AnnotationForm @property def __type_params__(self) -> tuple[TypeVar | ParamSpec | TypeVarTuple, ...]: ... @property - def __parameters__(self) -> tuple[AnnotationForm, ...]: ... + def __parameters__(self) -> tuple[Any, ...]: ... # AnnotationForm @property def __name__(self) -> str: ... # It's writable on types, but not on instances of TypeAliasType. @property def __module__(self) -> str | None: ... # type: ignore[override] - def __getitem__(self, parameters: AnnotationForm) -> GenericAlias: ... + def __getitem__(self, parameters: Any) -> GenericAlias: ... # AnnotationForm def __or__(self, right: Any) -> _SpecialForm: ... def __ror__(self, left: Any) -> _SpecialForm: ... if sys.version_info >= (3, 14): From a25cb8abc075e8cb7bfbf348fbcd163eedd3a108 Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Mon, 12 May 2025 05:23:21 +0000 Subject: [PATCH 342/388] Bump `decimal` to 3.14 (#14017) Co-authored-by: Jelle Zijlstra --- stdlib/@tests/stubtest_allowlists/py314.txt | 5 ----- stdlib/_decimal.pyi | 5 +++++ stdlib/_pydecimal.pyi | 4 ++++ stdlib/decimal.pyi | 8 ++++++++ 4 files changed, 17 insertions(+), 5 deletions(-) diff --git a/stdlib/@tests/stubtest_allowlists/py314.txt b/stdlib/@tests/stubtest_allowlists/py314.txt index 67a725257bbe..342b90283d24 100644 --- a/stdlib/@tests/stubtest_allowlists/py314.txt +++ b/stdlib/@tests/stubtest_allowlists/py314.txt @@ -8,9 +8,6 @@ _asyncio.future_discard_from_awaited_by _ctypes.POINTER _ctypes.byref _ctypes.pointer -_decimal.Decimal.from_number -_decimal.IEEEContext -_decimal.IEEE_CONTEXT_MAX_BITS _heapq.heapify_max _heapq.heappop_max _heapq.heappush_max @@ -103,8 +100,6 @@ dataclasses.Field.__init__ dataclasses.Field.doc dataclasses.field dataclasses.make_dataclass -decimal.Decimal.from_number -decimal.IEEE_CONTEXT_MAX_BITS dis.Instruction.make enum.Enum.__signature__ enum.EnumMeta.__signature__ diff --git a/stdlib/_decimal.pyi b/stdlib/_decimal.pyi index 06c0197dcf07..fd0e6e6ac091 100644 --- a/stdlib/_decimal.pyi +++ b/stdlib/_decimal.pyi @@ -41,6 +41,8 @@ MAX_EMAX: Final[int] MAX_PREC: Final[int] MIN_EMIN: Final[int] MIN_ETINY: Final[int] +if sys.version_info >= (3, 14): + IEEE_CONTEXT_MAX_BITS: Final[int] def setcontext(context: Context, /) -> None: ... def getcontext() -> Context: ... @@ -62,6 +64,9 @@ if sys.version_info >= (3, 11): else: def localcontext(ctx: Context | None = None) -> _ContextManager: ... +if sys.version_info >= (3, 14): + def IEEEContext(bits: int, /) -> Context: ... + DefaultContext: Context BasicContext: Context ExtendedContext: Context diff --git a/stdlib/_pydecimal.pyi b/stdlib/_pydecimal.pyi index faff626ac0ba..a6723f749da6 100644 --- a/stdlib/_pydecimal.pyi +++ b/stdlib/_pydecimal.pyi @@ -1,5 +1,6 @@ # This is a slight lie, the implementations aren't exactly identical # However, in all likelihood, the differences are inconsequential +import sys from _decimal import * __all__ = [ @@ -41,3 +42,6 @@ __all__ = [ "HAVE_THREADS", "HAVE_CONTEXTVAR", ] + +if sys.version_info >= (3, 14): + __all__ += ["IEEEContext", "IEEE_CONTEXT_MAX_BITS"] diff --git a/stdlib/decimal.pyi b/stdlib/decimal.pyi index 4ded21e0b017..b85c00080092 100644 --- a/stdlib/decimal.pyi +++ b/stdlib/decimal.pyi @@ -1,4 +1,5 @@ import numbers +import sys from _decimal import ( HAVE_CONTEXTVAR as HAVE_CONTEXTVAR, HAVE_THREADS as HAVE_THREADS, @@ -28,6 +29,9 @@ from types import TracebackType from typing import Any, ClassVar, Literal, NamedTuple, final, overload, type_check_only from typing_extensions import Self, TypeAlias +if sys.version_info >= (3, 14): + from _decimal import IEEE_CONTEXT_MAX_BITS as IEEE_CONTEXT_MAX_BITS, IEEEContext as IEEEContext + _Decimal: TypeAlias = Decimal | int _DecimalNew: TypeAlias = Decimal | float | str | tuple[int, Sequence[int], int] _ComparableNum: TypeAlias = Decimal | float | numbers.Rational @@ -66,6 +70,10 @@ class FloatOperation(DecimalException, TypeError): ... class Decimal: def __new__(cls, value: _DecimalNew = "0", context: Context | None = None) -> Self: ... + if sys.version_info >= (3, 14): + @classmethod + def from_number(cls, number: Decimal | float, /) -> Self: ... + @classmethod def from_float(cls, f: float, /) -> Self: ... def __bool__(self) -> bool: ... From fc6a13f78d7a6b5311f9eb95fa6e954c97427d56 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Edgar=20Ram=C3=ADrez=20Mondrag=C3=B3n?= Date: Sun, 11 May 2025 23:26:28 -0600 Subject: [PATCH 343/388] Add new Python 3.14 `argparse.ArgumentParser` constructor parameters (#13947) These are - `suggest_on_error`, added by https://github.com/python/cpython/pull/124456, made keyword-only by https://github.com/python/cpython/pull/133302 - `color`, added by https://github.com/python/cpython/pull/132323 Co-authored-by: Sebastian Rittau Co-authored-by: Jelle Zijlstra --- stdlib/argparse.pyi | 33 ++++++++++++++++++++++++++++++++- 1 file changed, 32 insertions(+), 1 deletion(-) diff --git a/stdlib/argparse.pyi b/stdlib/argparse.pyi index 0c84f52fff71..95ad6c7da8eb 100644 --- a/stdlib/argparse.pyi +++ b/stdlib/argparse.pyi @@ -123,6 +123,11 @@ class ArgumentParser(_AttributeHolder, _ActionsContainer): fromfile_prefix_chars: str | None add_help: bool allow_abbrev: bool + exit_on_error: bool + + if sys.version_info >= (3, 14): + suggest_on_error: bool + color: bool # undocumented _positionals: _ArgumentGroup @@ -720,7 +725,33 @@ class _SubParsersAction(Action, Generic[_ArgumentParserT]): # Note: `add_parser` accepts all kwargs of `ArgumentParser.__init__`. It also # accepts its own `help` and `aliases` kwargs. - if sys.version_info >= (3, 13): + if sys.version_info >= (3, 14): + def add_parser( + self, + name: str, + *, + deprecated: bool = False, + help: str | None = ..., + aliases: Sequence[str] = ..., + # Kwargs from ArgumentParser constructor + prog: str | None = ..., + usage: str | None = ..., + description: str | None = ..., + epilog: str | None = ..., + parents: Sequence[_ArgumentParserT] = ..., + formatter_class: _FormatterClass = ..., + prefix_chars: str = ..., + fromfile_prefix_chars: str | None = ..., + argument_default: Any = ..., + conflict_handler: str = ..., + add_help: bool = ..., + allow_abbrev: bool = ..., + exit_on_error: bool = ..., + suggest_on_error: bool = False, + color: bool = False, + **kwargs: Any, # Accepting any additional kwargs for custom parser classes + ) -> _ArgumentParserT: ... + elif sys.version_info >= (3, 13): def add_parser( self, name: str, From bf54b07f5285bfa68979310e47a466ca8d385cf8 Mon Sep 17 00:00:00 2001 From: Max Muoto Date: Mon, 12 May 2025 01:52:33 -0500 Subject: [PATCH 344/388] Add `asyncio.graph`, updates to `asyncio.futures` (3.14) (#14003) --- stdlib/@tests/stubtest_allowlists/py314.txt | 11 --------- stdlib/VERSIONS | 1 + stdlib/asyncio/__init__.pyi | 18 ++++++++++++++ stdlib/asyncio/futures.pyi | 10 +++++++- stdlib/asyncio/graph.pyi | 26 +++++++++++++++++++++ 5 files changed, 54 insertions(+), 12 deletions(-) create mode 100644 stdlib/asyncio/graph.pyi diff --git a/stdlib/@tests/stubtest_allowlists/py314.txt b/stdlib/@tests/stubtest_allowlists/py314.txt index 342b90283d24..0749cba39ba5 100644 --- a/stdlib/@tests/stubtest_allowlists/py314.txt +++ b/stdlib/@tests/stubtest_allowlists/py314.txt @@ -22,28 +22,17 @@ _thread.set_name ast.Interpolation ast.TemplateStr asyncio.__all__ -asyncio.FrameCallGraphEntry -asyncio.FutureCallGraph asyncio._AbstractEventLoopPolicy asyncio._DefaultEventLoopPolicy asyncio._get_event_loop_policy asyncio._set_event_loop_policy -asyncio.capture_call_graph asyncio.eager_task_factory -asyncio.format_call_graph -asyncio.future_add_to_awaited_by -asyncio.future_discard_from_awaited_by -asyncio.print_call_graph asyncio.events.__all__ asyncio.events.AbstractEventLoopPolicy asyncio.events.BaseDefaultEventLoopPolicy asyncio.events._AbstractEventLoopPolicy asyncio.events._get_event_loop_policy asyncio.events._set_event_loop_policy -asyncio.futures.__all__ -asyncio.futures.future_add_to_awaited_by -asyncio.futures.future_discard_from_awaited_by -asyncio.graph asyncio.tasks.eager_task_factory bdb.Bdb.__init__ bdb.Bdb.disable_current_event diff --git a/stdlib/VERSIONS b/stdlib/VERSIONS index 0b312925465d..9defa7c27a97 100644 --- a/stdlib/VERSIONS +++ b/stdlib/VERSIONS @@ -87,6 +87,7 @@ asynchat: 3.0-3.11 asyncio: 3.4- asyncio.exceptions: 3.8- asyncio.format_helpers: 3.7- +asyncio.graph: 3.14- asyncio.mixins: 3.10- asyncio.runners: 3.7- asyncio.staggered: 3.8- diff --git a/stdlib/asyncio/__init__.pyi b/stdlib/asyncio/__init__.pyi index c314acbea1ca..f9118608060e 100644 --- a/stdlib/asyncio/__init__.pyi +++ b/stdlib/asyncio/__init__.pyi @@ -21,6 +21,9 @@ from .tasks import * from .threads import * from .transports import * +if sys.version_info >= (3, 14): + from .graph import * + if sys.version_info >= (3, 11): from .taskgroups import * from .timeouts import * @@ -32,6 +35,7 @@ else: if sys.platform == "win32": if sys.version_info >= (3, 14): + __all__ = ( "BaseEventLoop", # from base_events "Server", # from base_events @@ -60,6 +64,13 @@ if sys.platform == "win32": "Future", # from futures "wrap_future", # from futures "isfuture", # from futures + "future_discard_from_awaited_by", # from futures + "future_add_to_awaited_by", # from futures + "capture_call_graph", # from graph + "format_call_graph", # from graph + "print_call_graph", # from graph + "FrameCallGraphEntry", # from graph + "FutureCallGraph", # from graph "Lock", # from locks "Event", # from locks "Condition", # from locks @@ -527,6 +538,13 @@ else: "Future", # from futures "wrap_future", # from futures "isfuture", # from futures + "future_discard_from_awaited_by", # from futures + "future_add_to_awaited_by", # from futures + "capture_call_graph", # from graph + "format_call_graph", # from graph + "print_call_graph", # from graph + "FrameCallGraphEntry", # from graph + "FutureCallGraph", # from graph "Lock", # from locks "Event", # from locks "Condition", # from locks diff --git a/stdlib/asyncio/futures.pyi b/stdlib/asyncio/futures.pyi index cb2785012fb2..a63de66f02e6 100644 --- a/stdlib/asyncio/futures.pyi +++ b/stdlib/asyncio/futures.pyi @@ -1,3 +1,4 @@ +import sys from _asyncio import Future as Future from concurrent.futures._base import Future as _ConcurrentFuture from typing import Any, TypeVar @@ -6,7 +7,10 @@ from typing_extensions import TypeIs from .events import AbstractEventLoop # Keep asyncio.__all__ updated with any changes to __all__ here -__all__ = ("Future", "wrap_future", "isfuture") +if sys.version_info >= (3, 14): + __all__ = ("Future", "wrap_future", "isfuture", "future_discard_from_awaited_by", "future_add_to_awaited_by") +else: + __all__ = ("Future", "wrap_future", "isfuture") _T = TypeVar("_T") @@ -15,3 +19,7 @@ _T = TypeVar("_T") # That's why the import order is reversed. def isfuture(obj: object) -> TypeIs[Future[Any]]: ... def wrap_future(future: _ConcurrentFuture[_T] | Future[_T], *, loop: AbstractEventLoop | None = None) -> Future[_T]: ... + +if sys.version_info >= (3, 14): + def future_discard_from_awaited_by(future: Future[Any], waiter: Future[Any], /) -> None: ... + def future_add_to_awaited_by(future: Future[Any], waiter: Future[Any], /) -> None: ... diff --git a/stdlib/asyncio/graph.pyi b/stdlib/asyncio/graph.pyi new file mode 100644 index 000000000000..cb2cf0174995 --- /dev/null +++ b/stdlib/asyncio/graph.pyi @@ -0,0 +1,26 @@ +from _typeshed import SupportsWrite +from asyncio import Future +from dataclasses import dataclass +from types import FrameType +from typing import Any, overload + +__all__ = ("capture_call_graph", "format_call_graph", "print_call_graph", "FrameCallGraphEntry", "FutureCallGraph") + +@dataclass(frozen=True) +class FrameCallGraphEntry: + frame: FrameType + +@dataclass(frozen=True) +class FutureCallGraph: + future: Future[Any] + call_stack: tuple[FrameCallGraphEntry, ...] + awaited_by: tuple[FutureCallGraph, ...] + +@overload +def capture_call_graph(future: None = None, /, *, depth: int = 1, limit: int | None = None) -> FutureCallGraph | None: ... +@overload +def capture_call_graph(future: Future[Any], /, *, depth: int = 1, limit: int | None = None) -> FutureCallGraph | None: ... +def format_call_graph(future: Future[Any] | None = None, /, *, depth: int = 1, limit: int | None = None) -> str: ... +def print_call_graph( + future: Future[Any] | None = None, /, *, file: SupportsWrite[str] | None = None, depth: int = 1, limit: int | None = None +) -> None: ... From 54dcaf227fbab8eae42f72b42547f39ef313ce5d Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Mon, 12 May 2025 09:31:37 +0000 Subject: [PATCH 345/388] Bump `tomllib.TOMLDecodeError` to 3.14 (#14019) --- stdlib/@tests/stubtest_allowlists/py314.txt | 1 - stdlib/tomllib.pyi | 20 ++++++++++++++++++-- 2 files changed, 18 insertions(+), 3 deletions(-) diff --git a/stdlib/@tests/stubtest_allowlists/py314.txt b/stdlib/@tests/stubtest_allowlists/py314.txt index 0749cba39ba5..31067099c70b 100644 --- a/stdlib/@tests/stubtest_allowlists/py314.txt +++ b/stdlib/@tests/stubtest_allowlists/py314.txt @@ -160,7 +160,6 @@ tarfile.TarFile.zstopen threading.Thread.__init__ threading._RLock.locked tkinter.Event.__class_getitem__ -tomllib.TOMLDecodeError.__init__ turtle.__all__ turtle.RawTurtle.fill turtle.RawTurtle.poly diff --git a/stdlib/tomllib.pyi b/stdlib/tomllib.pyi index d559568b912b..c160ffc38bfd 100644 --- a/stdlib/tomllib.pyi +++ b/stdlib/tomllib.pyi @@ -1,10 +1,26 @@ +import sys from _typeshed import SupportsRead from collections.abc import Callable -from typing import Any +from typing import Any, overload +from typing_extensions import deprecated __all__ = ("loads", "load", "TOMLDecodeError") -class TOMLDecodeError(ValueError): ... +if sys.version_info >= (3, 14): + class TOMLDecodeError(ValueError): + msg: str + doc: str + pos: int + lineno: int + colno: int + @overload + def __init__(self, msg: str, doc: str, pos: int) -> None: ... + @overload + @deprecated("Deprecated in Python 3.14; Please set 'msg', 'doc' and 'pos' arguments only.") + def __init__(self, msg: str | type = ..., doc: str | type = ..., pos: int | type = ..., *args: Any) -> None: ... + +else: + class TOMLDecodeError(ValueError): ... def load(fp: SupportsRead[bytes], /, *, parse_float: Callable[[str], Any] = ...) -> dict[str, Any]: ... def loads(s: str, /, *, parse_float: Callable[[str], Any] = ...) -> dict[str, Any]: ... From ff68c29e2c9bd78a7828d5a138eb64959c3dc664 Mon Sep 17 00:00:00 2001 From: Julien Danjou Date: Mon, 12 May 2025 17:07:03 +0200 Subject: [PATCH 346/388] [click_default_group] default_cmd_name argument should be named default (#14023) --- stubs/click-default-group/click_default_group.pyi | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stubs/click-default-group/click_default_group.pyi b/stubs/click-default-group/click_default_group.pyi index c8b46e4dabd7..aa521d48b564 100644 --- a/stubs/click-default-group/click_default_group.pyi +++ b/stubs/click-default-group/click_default_group.pyi @@ -18,7 +18,7 @@ class DefaultGroup(click.Group): commands: MutableMapping[str, click.Command] | Sequence[click.Command] | None = None, *, ignore_unknown_options: Literal[True] | None = True, - default_cmd_name: str | None = None, + default: str | None = None, default_if_no_args: bool = False, invoke_without_command: bool = False, no_args_is_help: bool | None = None, From b9645355326eef7886fdaff5c641a4abea5f5b71 Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Mon, 12 May 2025 15:08:32 +0000 Subject: [PATCH 347/388] Bump `ipaddress` to 3.14 (#14024) --- stdlib/@tests/stubtest_allowlists/py314.txt | 1 - stdlib/ipaddress.pyi | 29 ++++++++++++++------- 2 files changed, 19 insertions(+), 11 deletions(-) diff --git a/stdlib/@tests/stubtest_allowlists/py314.txt b/stdlib/@tests/stubtest_allowlists/py314.txt index 31067099c70b..1a4c445ada69 100644 --- a/stdlib/@tests/stubtest_allowlists/py314.txt +++ b/stdlib/@tests/stubtest_allowlists/py314.txt @@ -113,7 +113,6 @@ importlib.machinery.__all__ importlib.machinery.AppleFrameworkLoader importlib.util.__all__ importlib.util.Loader -ipaddress._IPAddressBase.version logging.handlers.SysLogHandler.__init__ marshal.dump marshal.dumps diff --git a/stdlib/ipaddress.pyi b/stdlib/ipaddress.pyi index 6883895fd219..9df6bab7c167 100644 --- a/stdlib/ipaddress.pyi +++ b/stdlib/ipaddress.pyi @@ -28,8 +28,9 @@ class _IPAddressBase: def exploded(self) -> str: ... @property def reverse_pointer(self) -> str: ... - @property - def version(self) -> int: ... + if sys.version_info < (3, 14): + @property + def version(self) -> int: ... class _BaseAddress(_IPAddressBase): def __add__(self, other: int) -> Self: ... @@ -104,10 +105,14 @@ class _BaseNetwork(_IPAddressBase, Generic[_A]): def hostmask(self) -> _A: ... class _BaseV4: - @property - def version(self) -> Literal[4]: ... - @property - def max_prefixlen(self) -> Literal[32]: ... + if sys.version_info >= (3, 14): + version: Final = 4 + max_prefixlen: Final = 32 + else: + @property + def version(self) -> Literal[4]: ... + @property + def max_prefixlen(self) -> Literal[32]: ... class IPv4Address(_BaseV4, _BaseAddress): def __init__(self, address: object) -> None: ... @@ -151,10 +156,14 @@ class IPv4Interface(IPv4Address): def with_prefixlen(self) -> str: ... class _BaseV6: - @property - def version(self) -> Literal[6]: ... - @property - def max_prefixlen(self) -> Literal[128]: ... + if sys.version_info >= (3, 14): + version: Final = 6 + max_prefixlen: Final = 128 + else: + @property + def version(self) -> Literal[6]: ... + @property + def max_prefixlen(self) -> Literal[128]: ... class IPv6Address(_BaseV6, _BaseAddress): def __init__(self, address: object) -> None: ... From aa15a15a4e2a0e0cc5ec6f15db6f617f2f59b61f Mon Sep 17 00:00:00 2001 From: Avasam Date: Mon, 12 May 2025 11:31:45 -0400 Subject: [PATCH 348/388] networkx: `ignore_missing_stub = false` (#13968) --- stubs/networkx/@tests/stubtest_allowlist.txt | 14 ++ stubs/networkx/METADATA.toml | 4 +- .../networkx/algorithms/planarity.pyi | 1 + .../networkx/algorithms/tree/branchings.pyi | 3 +- .../networkx/networkx/algorithms/tree/mst.pyi | 3 +- stubs/networkx/networkx/classes/graph.pyi | 13 +- .../networkx/networkx/classes/multigraph.pyi | 4 +- .../networkx/networkx/classes/reportviews.pyi | 203 +++++++++++++++++- stubs/networkx/networkx/readwrite/text.pyi | 20 +- stubs/networkx/networkx/utils/configs.pyi | 75 +++++-- tests/stubtest_third_party.py | 8 +- 11 files changed, 307 insertions(+), 41 deletions(-) diff --git a/stubs/networkx/@tests/stubtest_allowlist.txt b/stubs/networkx/@tests/stubtest_allowlist.txt index b297bf9b2153..c55b28c08921 100644 --- a/stubs/networkx/@tests/stubtest_allowlist.txt +++ b/stubs/networkx/@tests/stubtest_allowlist.txt @@ -30,3 +30,17 @@ networkx\.algorithms\.bipartite\.(cluster\.)?clustering # failing to account for explicitly passing in the default value." # Which is true, but would require some way of concatenating `backend` to ParamSpec.kwargs networkx\.(utils\.)?(backends\.)?_dispatchable\.__call__ + +# Tests are excluded +networkx.conftest +networkx(\..+?)?\.tests(\..+?)? + +# "..._DT is not present at runtime" but we don't set it in stubs, I don't understand this one +networkx(\.algorithms)?(\.tree)?(\.mst)?\.SpanningTreeIterator\.Partition\._DT +networkx(\.algorithms)?(\.tree)?(\.branchings)?\.ArborescenceIterator\.Partition\._DT + +# variable differs from runtime type abc.ABCMeta +networkx.classes.reportviews.EdgeView.dataview +networkx.classes.reportviews.InEdgeView.dataview +networkx.classes.reportviews.OutEdgeView.dataview +networkx.classes.reportviews.OutMultiEdgeView.dataview diff --git a/stubs/networkx/METADATA.toml b/stubs/networkx/METADATA.toml index 97f4bd7321ca..387904a34abb 100644 --- a/stubs/networkx/METADATA.toml +++ b/stubs/networkx/METADATA.toml @@ -2,9 +2,9 @@ version = "3.4.2" upstream_repository = "https://github.com/networkx/networkx" # requires a version of numpy with a `py.typed` file requires = ["numpy>=1.20"] -partial_stub = true +# Uses more recent dataclass kwargs +requires_python = ">=3.10" [tool.stubtest] -ignore_missing_stub = true # stub_uploader won't allow pandas-stubs in the requires field https://github.com/typeshed-internal/stub_uploader/issues/90 stubtest_requirements = ["pandas"] diff --git a/stubs/networkx/networkx/algorithms/planarity.pyi b/stubs/networkx/networkx/algorithms/planarity.pyi index 9b1ba3b297fe..43d10d50a84e 100644 --- a/stubs/networkx/networkx/algorithms/planarity.pyi +++ b/stubs/networkx/networkx/algorithms/planarity.pyi @@ -67,6 +67,7 @@ class PlanarEmbedding(DiGraph[_Node]): def get_data(self) -> dict[_Node, list[_Node]]: ... def set_data(self, data: Mapping[_Node, Reversible[_Node]]) -> None: ... def neighbors_cw_order(self, v: _Node) -> Generator[_Node, None, None]: ... + def add_half_edge(self, start_node: _Node, end_node: _Node, *, cw: _Node | None = None, ccw: _Node | None = None): ... def check_structure(self) -> None: ... def add_half_edge_ccw(self, start_node: _Node, end_node: _Node, reference_neighbor: _Node) -> None: ... def add_half_edge_cw(self, start_node: _Node, end_node: _Node, reference_neighbor: _Node) -> None: ... diff --git a/stubs/networkx/networkx/algorithms/tree/branchings.pyi b/stubs/networkx/networkx/algorithms/tree/branchings.pyi index f04b808be8ba..bc07c60e06eb 100644 --- a/stubs/networkx/networkx/algorithms/tree/branchings.pyi +++ b/stubs/networkx/networkx/algorithms/tree/branchings.pyi @@ -44,10 +44,11 @@ def minimum_spanning_arborescence( ): ... class ArborescenceIterator: - @dataclass + @dataclass(order=True) class Partition: mst_weight: float partition_dict: dict[Incomplete, Incomplete] + def __copy__(self) -> ArborescenceIterator.Partition: ... G: Incomplete weight: Incomplete diff --git a/stubs/networkx/networkx/algorithms/tree/mst.pyi b/stubs/networkx/networkx/algorithms/tree/mst.pyi index 5cd518995676..30854ffd12e0 100644 --- a/stubs/networkx/networkx/algorithms/tree/mst.pyi +++ b/stubs/networkx/networkx/algorithms/tree/mst.pyi @@ -57,10 +57,11 @@ def random_spanning_tree( ): ... class SpanningTreeIterator: - @dataclass + @dataclass(order=True) class Partition: mst_weight: float partition_dict: dict[Incomplete, Incomplete] + def __copy__(self) -> SpanningTreeIterator.Partition: ... G: Incomplete weight: Incomplete diff --git a/stubs/networkx/networkx/classes/graph.pyi b/stubs/networkx/networkx/classes/graph.pyi index 5fda0dcc5843..0679630a9c07 100644 --- a/stubs/networkx/networkx/classes/graph.pyi +++ b/stubs/networkx/networkx/classes/graph.pyi @@ -29,12 +29,13 @@ _Data: TypeAlias = ( __all__ = ["Graph"] class Graph(Collection[_Node]): - node_dict_factory: ClassVar[_MapFactory] = ... - node_attr_dict_factory: ClassVar[_MapFactory] = ... - adjlist_outer_dict_factory: ClassVar[_MapFactory] = ... - adjlist_inner_dict_factory: ClassVar[_MapFactory] = ... - edge_attr_dict_factory: ClassVar[_MapFactory] = ... - graph_attr_dict_factory: ClassVar[_MapFactory] = ... + __networkx_backend__: ClassVar[str] + node_dict_factory: ClassVar[_MapFactory] + node_attr_dict_factory: ClassVar[_MapFactory] + adjlist_outer_dict_factory: ClassVar[_MapFactory] + adjlist_inner_dict_factory: ClassVar[_MapFactory] + edge_attr_dict_factory: ClassVar[_MapFactory] + graph_attr_dict_factory: ClassVar[_MapFactory] graph: dict[str, Any] diff --git a/stubs/networkx/networkx/classes/multigraph.pyi b/stubs/networkx/networkx/classes/multigraph.pyi index 63b159ad06d9..1fce3b56522e 100644 --- a/stubs/networkx/networkx/classes/multigraph.pyi +++ b/stubs/networkx/networkx/classes/multigraph.pyi @@ -1,9 +1,10 @@ from _typeshed import Incomplete from functools import cached_property +from typing import ClassVar from typing_extensions import TypeAlias from networkx.classes.coreviews import MultiAdjacencyView -from networkx.classes.graph import Graph, _Node +from networkx.classes.graph import Graph, _MapFactory, _Node from networkx.classes.multidigraph import MultiDiGraph from networkx.classes.reportviews import OutMultiEdgeView @@ -12,6 +13,7 @@ _MultiEdge: TypeAlias = tuple[_Node, _Node, int] # noqa: Y047 __all__ = ["MultiGraph"] class MultiGraph(Graph[_Node]): + edge_key_dict_factory: ClassVar[_MapFactory] def __init__(self, incoming_graph_data: Incomplete | None = None, multigraph_input: bool | None = None, **attr) -> None: ... @cached_property def adj(self) -> MultiAdjacencyView[_Node, _Node, dict[str, Incomplete]]: ... diff --git a/stubs/networkx/networkx/classes/reportviews.pyi b/stubs/networkx/networkx/classes/reportviews.pyi index 2a2a5c6d7286..0337ba24c45d 100644 --- a/stubs/networkx/networkx/classes/reportviews.pyi +++ b/stubs/networkx/networkx/classes/reportviews.pyi @@ -85,6 +85,7 @@ class EdgeDataView(OutEdgeDataView[_Node, _D]): ... class InEdgeDataView(OutEdgeDataView[_Node, _D]): ... class OutMultiEdgeDataView(OutEdgeDataView[_Node, _D]): + keys: bool def __init__( self, viewer, nbunch: _NBunch[_Node] = None, data: bool = False, *, default: Incomplete | None = None, keys: bool = False ) -> None: ... @@ -92,12 +93,13 @@ class OutMultiEdgeDataView(OutEdgeDataView[_Node, _D]): class MultiEdgeDataView(OutEdgeDataView[_Node, _D]): ... class InMultiEdgeDataView(OutEdgeDataView[_Node, _D]): ... -class OutEdgeView(AbstractSet[Incomplete], Mapping[Incomplete, Incomplete], Generic[_Node]): +class OutEdgeView(AbstractSet[Incomplete], Mapping[Incomplete, Incomplete], EdgeViewABC, Generic[_Node]): def __init__(self, G: Graph[_Node]) -> None: ... def __len__(self) -> int: ... def __iter__(self) -> Iterator[tuple[_Node, _Node]]: ... def __contains__(self, e: _Edge[_Node]) -> bool: ... # type: ignore[override] def __getitem__(self, e: _Edge[_Node]) -> dict[str, Any]: ... + dataview = OutEdgeDataView @overload def __call__(self, nbunch: None = None, data: Literal[False] = False, *, default: Unused = None) -> Self: ... # type: ignore[overload-overlap] @overload @@ -131,12 +133,82 @@ class OutEdgeView(AbstractSet[Incomplete], Mapping[Incomplete, Incomplete], Gene self, data: str, default: _U | None = None, nbunch: _NBunch[_Node] = None ) -> OutEdgeDataView[_Node, tuple[_Node, _Node, _U]]: ... -class EdgeView(OutEdgeView[_Node]): ... -class InEdgeView(OutEdgeView[_Node]): ... +class EdgeView(OutEdgeView[_Node]): + dataview = EdgeDataView + # Have to override parent's overloads with the proper return type based on dataview + @overload + def __call__(self, nbunch: None = None, data: Literal[False] = False, *, default: Unused = None) -> Self: ... # type: ignore[overload-overlap] + @overload + def __call__( + self, nbunch: _Node | Iterable[_Node], data: Literal[False] = False, *, default: None = None + ) -> EdgeDataView[_Node, tuple[_Node, _Node]]: ... + @overload + def __call__( + self, nbunch: _NBunch[_Node], data: Literal[True], *, default: None = None + ) -> EdgeDataView[_Node, tuple[_Node, _Node, dict[str, Incomplete]]]: ... + @overload + def __call__( + self, nbunch: _NBunch[_Node] = None, *, data: Literal[True], default: None = None + ) -> EdgeDataView[_Node, tuple[_Node, _Node, dict[str, Incomplete]]]: ... + @overload + def __call__( + self, nbunch: _NBunch[_Node], data: str, *, default: _U | None = None + ) -> EdgeDataView[_Node, tuple[_Node, _Node, _U]]: ... + @overload + def __call__( + self, nbunch: _NBunch[_Node] = None, *, data: str, default: _U | None = None + ) -> EdgeDataView[_Node, tuple[_Node, _Node, _U]]: ... + @overload + def data(self, data: Literal[False], default: Unused = None, nbunch: None = None) -> Self: ... + @overload + def data( + self, data: Literal[True] = True, default: None = None, nbunch: _NBunch[_Node] = None + ) -> EdgeDataView[_Node, tuple[_Node, _Node, dict[str, Incomplete]]]: ... + @overload + def data( + self, data: str, default: _U | None = None, nbunch: _NBunch[_Node] = None + ) -> EdgeDataView[_Node, tuple[_Node, _Node, _U]]: ... + +class InEdgeView(OutEdgeView[_Node]): + dataview = InEdgeDataView + # Have to override parent's overloads with the proper return type based on dataview + @overload + def __call__(self, nbunch: None = None, data: Literal[False] = False, *, default: Unused = None) -> Self: ... # type: ignore[overload-overlap] + @overload + def __call__( + self, nbunch: _Node | Iterable[_Node], data: Literal[False] = False, *, default: None = None + ) -> InEdgeDataView[_Node, tuple[_Node, _Node]]: ... + @overload + def __call__( + self, nbunch: _NBunch[_Node], data: Literal[True], *, default: None = None + ) -> InEdgeDataView[_Node, tuple[_Node, _Node, dict[str, Incomplete]]]: ... + @overload + def __call__( + self, nbunch: _NBunch[_Node] = None, *, data: Literal[True], default: None = None + ) -> InEdgeDataView[_Node, tuple[_Node, _Node, dict[str, Incomplete]]]: ... + @overload + def __call__( + self, nbunch: _NBunch[_Node], data: str, *, default: _U | None = None + ) -> InEdgeDataView[_Node, tuple[_Node, _Node, _U]]: ... + @overload + def __call__( + self, nbunch: _NBunch[_Node] = None, *, data: str, default: _U | None = None + ) -> InEdgeDataView[_Node, tuple[_Node, _Node, _U]]: ... + @overload + def data(self, data: Literal[False], default: Unused = None, nbunch: None = None) -> Self: ... + @overload + def data( + self, data: Literal[True] = True, default: None = None, nbunch: _NBunch[_Node] = None + ) -> InEdgeDataView[_Node, tuple[_Node, _Node, dict[str, Incomplete]]]: ... + @overload + def data( + self, data: str, default: _U | None = None, nbunch: _NBunch[_Node] = None + ) -> InEdgeDataView[_Node, tuple[_Node, _Node, _U]]: ... class OutMultiEdgeView(OutEdgeView[_Node]): def __iter__(self) -> Iterator[tuple[_Node, _Node, Incomplete]]: ... # type: ignore[override] def __getitem__(self, e: tuple[_Node, _Node, Incomplete]) -> dict[str, Any]: ... # type: ignore[override] + dataview = OutMultiEdgeDataView @overload # type: ignore[override] # Has an additional `keys` keyword argument def __call__( # type: ignore[overload-overlap] self, nbunch: None = None, data: Literal[False] = False, *, default: Unused = None, keys: Literal[True] @@ -196,5 +268,126 @@ class OutMultiEdgeView(OutEdgeView[_Node]): self, data: str, default: _U | None = None, nbunch: _NBunch[_Node] = None, *, keys: Literal[True] ) -> OutMultiEdgeDataView[_Node, tuple[_Node, _Node, Incomplete, _U]]: ... -class MultiEdgeView(OutMultiEdgeView[_Node]): ... -class InMultiEdgeView(OutMultiEdgeView[_Node]): ... +class MultiEdgeView(OutMultiEdgeView[_Node]): + dataview = MultiEdgeDataView # type: ignore[assignment] + # Have to override parent's overloads with the proper return type based on dataview + @overload # type: ignore[override] # Has an additional `keys` keyword argument + def __call__( # type: ignore[overload-overlap] + self, nbunch: None = None, data: Literal[False] = False, *, default: Unused = None, keys: Literal[True] + ) -> Self: ... + @overload + def __call__( + self, nbunch: _NBunch[_Node] = None, data: Literal[False] = False, *, default: None = None, keys: Literal[False] = False + ) -> MultiEdgeDataView[_Node, tuple[_Node, _Node]]: ... + @overload + def __call__( + self, nbunch: _Node | Iterable[_Node], data: Literal[False] = False, *, default: None = None, keys: Literal[True] + ) -> MultiEdgeDataView[_Node, tuple[_Node, _Node, Incomplete]]: ... + @overload + def __call__( + self, nbunch: _NBunch[_Node] = None, *, data: Literal[True], default: None = None, keys: Literal[True] + ) -> MultiEdgeDataView[_Node, tuple[_Node, _Node, Incomplete, dict[str, Incomplete]]]: ... + @overload + def __call__( + self, nbunch: _NBunch[_Node], data: Literal[True], *, default: None = None, keys: Literal[True] + ) -> MultiEdgeDataView[_Node, tuple[_Node, _Node, Incomplete, dict[str, Incomplete]]]: ... + @overload + def __call__( + self, nbunch: _NBunch[_Node] = None, *, data: Literal[True], default: None = None, keys: Literal[False] = False + ) -> MultiEdgeDataView[_Node, tuple[_Node, _Node, dict[str, Incomplete]]]: ... + @overload + def __call__( + self, nbunch: _NBunch[_Node], data: str, *, default: _U | None = None, keys: Literal[False] = False + ) -> MultiEdgeDataView[_Node, tuple[_Node, _Node, _U]]: ... + @overload + def __call__( + self, nbunch: _NBunch[_Node] = None, *, data: str, default: _U | None = None, keys: Literal[True] + ) -> MultiEdgeDataView[_Node, tuple[_Node, _Node, Incomplete, _U]]: ... + @overload + def __call__( + self, nbunch: _NBunch[_Node] = None, *, data: str, default: _U | None = None, keys: Literal[False] = False + ) -> MultiEdgeDataView[_Node, tuple[_Node, _Node, _U]]: ... + @overload # type: ignore[override] + def data(self, data: Literal[False], default: Unused = None, nbunch: None = None, *, keys: Literal[True]) -> Self: ... + @overload + def data( + self, data: Literal[False], default: None = None, nbunch: _NBunch[_Node] = None, keys: Literal[False] = False + ) -> MultiEdgeDataView[_Node, tuple[_Node, _Node]]: ... + @overload + def data( + self, data: Literal[True] = True, default: None = None, nbunch: _NBunch[_Node] = None, keys: Literal[False] = False + ) -> MultiEdgeDataView[_Node, tuple[_Node, _Node, dict[str, Incomplete]]]: ... + @overload + def data( + self, data: Literal[True] = True, default: None = None, nbunch: _NBunch[_Node] = None, *, keys: Literal[True] + ) -> MultiEdgeDataView[_Node, tuple[_Node, _Node, Incomplete, dict[str, Incomplete]]]: ... + @overload + def data( + self, data: str, default: _U | None = None, nbunch: _NBunch[_Node] = None, keys: Literal[False] = False + ) -> MultiEdgeDataView[_Node, tuple[_Node, _Node, _U]]: ... + @overload + def data( + self, data: str, default: _U | None = None, nbunch: _NBunch[_Node] = None, *, keys: Literal[True] + ) -> MultiEdgeDataView[_Node, tuple[_Node, _Node, Incomplete, _U]]: ... + +class InMultiEdgeView(OutMultiEdgeView[_Node]): + dataview = InMultiEdgeDataView # type: ignore[assignment] + # Have to override parent's overloads with the proper return type based on dataview + @overload # type: ignore[override] + def __call__( # type: ignore[overload-overlap] + self, nbunch: None = None, data: Literal[False] = False, *, default: Unused = None, keys: Literal[True] + ) -> Self: ... + @overload + def __call__( + self, nbunch: _NBunch[_Node] = None, data: Literal[False] = False, *, default: None = None, keys: Literal[False] = False + ) -> InMultiEdgeDataView[_Node, tuple[_Node, _Node]]: ... + @overload + def __call__( + self, nbunch: _Node | Iterable[_Node], data: Literal[False] = False, *, default: None = None, keys: Literal[True] + ) -> InMultiEdgeDataView[_Node, tuple[_Node, _Node, Incomplete]]: ... + @overload + def __call__( + self, nbunch: _NBunch[_Node] = None, *, data: Literal[True], default: None = None, keys: Literal[True] + ) -> InMultiEdgeDataView[_Node, tuple[_Node, _Node, Incomplete, dict[str, Incomplete]]]: ... + @overload + def __call__( + self, nbunch: _NBunch[_Node], data: Literal[True], *, default: None = None, keys: Literal[True] + ) -> InMultiEdgeDataView[_Node, tuple[_Node, _Node, Incomplete, dict[str, Incomplete]]]: ... + @overload + def __call__( + self, nbunch: _NBunch[_Node] = None, *, data: Literal[True], default: None = None, keys: Literal[False] = False + ) -> InMultiEdgeDataView[_Node, tuple[_Node, _Node, dict[str, Incomplete]]]: ... + @overload + def __call__( + self, nbunch: _NBunch[_Node], data: str, *, default: _U | None = None, keys: Literal[False] = False + ) -> InMultiEdgeDataView[_Node, tuple[_Node, _Node, _U]]: ... + @overload + def __call__( + self, nbunch: _NBunch[_Node] = None, *, data: str, default: _U | None = None, keys: Literal[True] + ) -> InMultiEdgeDataView[_Node, tuple[_Node, _Node, Incomplete, _U]]: ... + @overload + def __call__( + self, nbunch: _NBunch[_Node] = None, *, data: str, default: _U | None = None, keys: Literal[False] = False + ) -> InMultiEdgeDataView[_Node, tuple[_Node, _Node, _U]]: ... + @overload # type: ignore[override] + def data(self, data: Literal[False], default: Unused = None, nbunch: None = None, *, keys: Literal[True]) -> Self: ... + @overload + def data( + self, data: Literal[False], default: None = None, nbunch: _NBunch[_Node] = None, keys: Literal[False] = False + ) -> InMultiEdgeDataView[_Node, tuple[_Node, _Node]]: ... + @overload + def data( + self, data: Literal[True] = True, default: None = None, nbunch: _NBunch[_Node] = None, keys: Literal[False] = False + ) -> InMultiEdgeDataView[_Node, tuple[_Node, _Node, dict[str, Incomplete]]]: ... + @overload + def data( + self, data: Literal[True] = True, default: None = None, nbunch: _NBunch[_Node] = None, *, keys: Literal[True] + ) -> InMultiEdgeDataView[_Node, tuple[_Node, _Node, Incomplete, dict[str, Incomplete]]]: ... + @overload + def data( + self, data: str, default: _U | None = None, nbunch: _NBunch[_Node] = None, keys: Literal[False] = False + ) -> InMultiEdgeDataView[_Node, tuple[_Node, _Node, _U]]: ... + @overload + def data( + self, data: str, default: _U | None = None, nbunch: _NBunch[_Node] = None, *, keys: Literal[True] + ) -> InMultiEdgeDataView[_Node, tuple[_Node, _Node, Incomplete, _U]]: ... diff --git a/stubs/networkx/networkx/readwrite/text.pyi b/stubs/networkx/networkx/readwrite/text.pyi index 1f96ae0a135a..8a2bf32fb8f0 100644 --- a/stubs/networkx/networkx/readwrite/text.pyi +++ b/stubs/networkx/networkx/readwrite/text.pyi @@ -4,7 +4,11 @@ from typing import ClassVar __all__ = ["generate_network_text", "write_network_text"] -class _AsciiBaseGlyphs: +class BaseGlyphs: + @classmethod + def as_dict(cls) -> dict[str, str]: ... + +class AsciiBaseGlyphs(BaseGlyphs): empty: ClassVar[str] newtree_last: ClassVar[str] newtree_mid: ClassVar[str] @@ -12,17 +16,19 @@ class _AsciiBaseGlyphs: within_forest: ClassVar[str] within_tree: ClassVar[str] -class AsciiDirectedGlyphs(_AsciiBaseGlyphs): +class AsciiDirectedGlyphs(AsciiBaseGlyphs): last: ClassVar[str] mid: ClassVar[str] backedge: ClassVar[str] + vertical_edge: ClassVar[str] -class AsciiUndirectedGlyphs(_AsciiBaseGlyphs): +class AsciiUndirectedGlyphs(AsciiBaseGlyphs): last: ClassVar[str] mid: ClassVar[str] backedge: ClassVar[str] + vertical_edge: ClassVar[str] -class _UtfBaseGlyphs: +class UtfBaseGlyphs(BaseGlyphs): empty: ClassVar[str] newtree_last: ClassVar[str] newtree_mid: ClassVar[str] @@ -30,15 +36,17 @@ class _UtfBaseGlyphs: within_forest: ClassVar[str] within_tree: ClassVar[str] -class UtfDirectedGlyphs(_UtfBaseGlyphs): +class UtfDirectedGlyphs(UtfBaseGlyphs): last: ClassVar[str] mid: ClassVar[str] backedge: ClassVar[str] + vertical_edge: ClassVar[str] -class UtfUndirectedGlyphs(_UtfBaseGlyphs): +class UtfUndirectedGlyphs(UtfBaseGlyphs): last: ClassVar[str] mid: ClassVar[str] backedge: ClassVar[str] + vertical_edge: ClassVar[str] def generate_network_text( graph, diff --git a/stubs/networkx/networkx/utils/configs.pyi b/stubs/networkx/networkx/utils/configs.pyi index 42f48e2dc433..1d165b9fb1b5 100644 --- a/stubs/networkx/networkx/utils/configs.pyi +++ b/stubs/networkx/networkx/utils/configs.pyi @@ -1,26 +1,65 @@ +import sys from _typeshed import Incomplete -from collections.abc import ItemsView, Iterable, Iterator, KeysView, Mapping, ValuesView +from collections.abc import Callable, ItemsView, Iterable, Iterator, KeysView, ValuesView +from dataclasses import dataclass +from types import TracebackType from typing_extensions import Self __all__ = ["Config"] -class Config(Mapping[str, Incomplete]): - def __init_subclass__(cls, strict: bool = True) -> None: ... - def __new__(cls, **kwargs) -> Self: ... - def __dir__(self) -> Iterable[str]: ... - def __setattr__(self, name: str, value) -> None: ... - def __delattr__(self, name: str) -> None: ... - def __contains__(self, key: object) -> bool: ... - def __iter__(self) -> Iterator[str]: ... - def __len__(self) -> int: ... - def __reversed__(self) -> Iterator[str]: ... - def __getitem__(self, key: str): ... - def __setitem__(self, key: str, value) -> None: ... - def __delitem__(self, key: str) -> None: ... - def get(self, key: str, default: Incomplete | None = None): ... - def items(self) -> ItemsView[str, Incomplete]: ... - def keys(self) -> KeysView[str]: ... - def values(self) -> ValuesView[Incomplete]: ... +# TODO: Our pyright test doesn't understand `requires_python` in METADATA.toml +# https://github.com/python/typeshed/issues/14025 +if sys.version_info >= (3, 10): + @dataclass(init=False, eq=False, slots=True, kw_only=True, match_args=False) + class Config: + def __init_subclass__(cls, strict: bool = True) -> None: ... + def __new__(cls, **kwargs) -> Self: ... + def __dir__(self) -> Iterable[str]: ... + def __setattr__(self, name: str, value) -> None: ... + def __delattr__(self, name: str) -> None: ... + def __contains__(self, key: object) -> bool: ... + def __iter__(self) -> Iterator[str]: ... + def __len__(self) -> int: ... + def __reversed__(self) -> Iterator[str]: ... + def __getitem__(self, key: str): ... + def __setitem__(self, key: str, value) -> None: ... + def __delitem__(self, key: str) -> None: ... + def get(self, key: str, default: Incomplete | None = None): ... + def items(self) -> ItemsView[str, Incomplete]: ... + def keys(self) -> KeysView[str]: ... + def values(self) -> ValuesView[Incomplete]: ... + def __reduce__(self) -> tuple[Callable[..., Self], tuple[type[Self], dict[Incomplete, Incomplete]]]: ... + def __call__(self, **kwargs) -> Self: ... + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None + ) -> None: ... + +else: + @dataclass(init=False, eq=False) + class Config: + def __init_subclass__(cls, strict: bool = True) -> None: ... + def __new__(cls, **kwargs) -> Self: ... + def __dir__(self) -> Iterable[str]: ... + def __setattr__(self, name: str, value) -> None: ... + def __delattr__(self, name: str) -> None: ... + def __contains__(self, key: object) -> bool: ... + def __iter__(self) -> Iterator[str]: ... + def __len__(self) -> int: ... + def __reversed__(self) -> Iterator[str]: ... + def __getitem__(self, key: str): ... + def __setitem__(self, key: str, value) -> None: ... + def __delitem__(self, key: str) -> None: ... + def get(self, key: str, default: Incomplete | None = None): ... + def items(self) -> ItemsView[str, Incomplete]: ... + def keys(self) -> KeysView[str]: ... + def values(self) -> ValuesView[Incomplete]: ... + def __reduce__(self) -> tuple[Callable[..., Self], tuple[type[Self], dict[Incomplete, Incomplete]]]: ... + def __call__(self, **kwargs) -> Self: ... + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None + ) -> None: ... class NetworkXConfig(Config): backend_priority: list[str] diff --git a/tests/stubtest_third_party.py b/tests/stubtest_third_party.py index 1b853c1c408e..f477fb8e2c55 100755 --- a/tests/stubtest_third_party.py +++ b/tests/stubtest_third_party.py @@ -125,7 +125,13 @@ def run_stubtest( # It seems that some other environment variables are needed too, # because the CI fails if we pass only os.environ["DISPLAY"]. I didn't # "bisect" to see which variables are actually needed. - stubtest_env = os.environ | {"MYPYPATH": mypypath, "MYPY_FORCE_COLOR": "1"} + stubtest_env = os.environ | { + "MYPYPATH": mypypath, + "MYPY_FORCE_COLOR": "1", + # Prevent stubtest crash due to special unicode character + # https://github.com/python/mypy/issues/19071 + "PYTHONUTF8": "1", + } # Perform some black magic in order to run stubtest inside uWSGI if dist_name == "uWSGI": From 992ce9dc84a4119f7a3803902438be1a1682397d Mon Sep 17 00:00:00 2001 From: Max Muoto Date: Mon, 12 May 2025 10:48:40 -0500 Subject: [PATCH 349/388] Update dataclasses for 3.14 (#14016) --- stdlib/@tests/stubtest_allowlists/py314.txt | 4 - stdlib/@tests/test_cases/check_dataclasses.py | 42 +++++++ stdlib/dataclasses.pyi | 106 +++++++++++++++++- 3 files changed, 145 insertions(+), 7 deletions(-) diff --git a/stdlib/@tests/stubtest_allowlists/py314.txt b/stdlib/@tests/stubtest_allowlists/py314.txt index 1a4c445ada69..d66b81762c41 100644 --- a/stdlib/@tests/stubtest_allowlists/py314.txt +++ b/stdlib/@tests/stubtest_allowlists/py314.txt @@ -85,10 +85,6 @@ ctypes.wintypes.HDROP ctypes.wintypes.HFILE ctypes.wintypes.HRESULT ctypes.wintypes.HSZ -dataclasses.Field.__init__ -dataclasses.Field.doc -dataclasses.field -dataclasses.make_dataclass dis.Instruction.make enum.Enum.__signature__ enum.EnumMeta.__signature__ diff --git a/stdlib/@tests/test_cases/check_dataclasses.py b/stdlib/@tests/test_cases/check_dataclasses.py index 4582e14ae26b..917331e4c084 100644 --- a/stdlib/@tests/test_cases/check_dataclasses.py +++ b/stdlib/@tests/test_cases/check_dataclasses.py @@ -1,6 +1,7 @@ from __future__ import annotations import dataclasses as dc +import sys from typing import TYPE_CHECKING, Any, Dict, FrozenSet, Tuple, Type, Union from typing_extensions import Annotated, assert_type @@ -99,3 +100,44 @@ def check_other_isdataclass_overloads(x: type, y: object) -> None: # in case a type checker decides to add some special-casing for # `make_dataclass` in the future) assert_type(D.__mro__, Tuple[type, ...]) + + +if sys.version_info >= (3, 14): + from typing import TypeVar + + _T = TypeVar("_T") + + def custom_dataclass( + cls: type[_T], + /, + *, + init: bool = True, + repr: bool = True, + eq: bool = True, + order: bool = False, + unsafe_hash: bool = False, + frozen: bool = False, + match_args: bool = True, + kw_only: bool = False, + slots: bool = False, + weakref_slot: bool = False, + ) -> type[_T]: + custom_dc_maker = dc.dataclass( + init=init, + repr=repr, + eq=eq, + order=order, + unsafe_hash=unsafe_hash, + frozen=frozen, + match_args=match_args, + kw_only=kw_only, + slots=slots, + weakref_slot=weakref_slot, + ) + return custom_dc_maker(cls) + + dc.make_dataclass( + "D", + [("a", Union[int, None]), "y", ("z", Annotated[FrozenSet[bytes], "metadata"], dc.field(default=frozenset({b"foo"})))], + decorator=custom_dataclass, + ) diff --git a/stdlib/dataclasses.pyi b/stdlib/dataclasses.pyi index e08b1919d8e5..bba76c1af1b4 100644 --- a/stdlib/dataclasses.pyi +++ b/stdlib/dataclasses.pyi @@ -5,7 +5,7 @@ from _typeshed import DataclassInstance from builtins import type as Type # alias to avoid name clashes with fields named "type" from collections.abc import Callable, Iterable, Mapping from types import GenericAlias -from typing import Any, Generic, Literal, Protocol, TypeVar, overload +from typing import Any, Generic, Literal, Protocol, TypeVar, overload, type_check_only from typing_extensions import Never, TypeIs _T = TypeVar("_T") @@ -31,6 +31,25 @@ if sys.version_info >= (3, 10): _DataclassT = TypeVar("_DataclassT", bound=DataclassInstance) +@type_check_only +class _DataclassFactory(Protocol): + def __call__( + self, + cls: type[_T], + /, + *, + init: bool = True, + repr: bool = True, + eq: bool = True, + order: bool = False, + unsafe_hash: bool = False, + frozen: bool = False, + match_args: bool = True, + kw_only: bool = False, + slots: bool = False, + weakref_slot: bool = False, + ) -> type[_T]: ... + # define _MISSING_TYPE as an enum within the type stubs, # even though that is not really its type at runtime # this allows us to use Literal[_MISSING_TYPE.MISSING] @@ -114,8 +133,27 @@ class Field(Generic[_T]): init: bool compare: bool metadata: types.MappingProxyType[Any, Any] + + if sys.version_info >= (3, 14): + doc: str | None + if sys.version_info >= (3, 10): kw_only: bool | Literal[_MISSING_TYPE.MISSING] + + if sys.version_info >= (3, 14): + def __init__( + self, + default: _T, + default_factory: Callable[[], _T], + init: bool, + repr: bool, + hash: bool | None, + compare: bool, + metadata: Mapping[Any, Any], + kw_only: bool, + doc: str | None, + ) -> None: ... + elif sys.version_info >= (3, 10): def __init__( self, default: _T, @@ -144,7 +182,48 @@ class Field(Generic[_T]): # NOTE: Actual return type is 'Field[_T]', but we want to help type checkers # to understand the magic that happens at runtime. -if sys.version_info >= (3, 10): +if sys.version_info >= (3, 14): + @overload # `default` and `default_factory` are optional and mutually exclusive. + def field( + *, + default: _T, + default_factory: Literal[_MISSING_TYPE.MISSING] = ..., + init: bool = True, + repr: bool = True, + hash: bool | None = None, + compare: bool = True, + metadata: Mapping[Any, Any] | None = None, + kw_only: bool | Literal[_MISSING_TYPE.MISSING] = ..., + doc: str | None = None, + ) -> _T: ... + @overload + def field( + *, + default: Literal[_MISSING_TYPE.MISSING] = ..., + default_factory: Callable[[], _T], + init: bool = True, + repr: bool = True, + hash: bool | None = None, + compare: bool = True, + metadata: Mapping[Any, Any] | None = None, + kw_only: bool | Literal[_MISSING_TYPE.MISSING] = ..., + doc: str | None = None, + ) -> _T: ... + @overload + def field( + *, + default: Literal[_MISSING_TYPE.MISSING] = ..., + default_factory: Literal[_MISSING_TYPE.MISSING] = ..., + init: bool = True, + repr: bool = True, + hash: bool | None = None, + compare: bool = True, + metadata: Mapping[Any, Any] | None = None, + kw_only: bool | Literal[_MISSING_TYPE.MISSING] = ..., + doc: str | None = None, + ) -> Any: ... + +elif sys.version_info >= (3, 10): @overload # `default` and `default_factory` are optional and mutually exclusive. def field( *, @@ -237,7 +316,28 @@ class InitVar(Generic[_T], metaclass=type): @overload def __class_getitem__(cls, type: Any) -> InitVar[Any]: ... # pyright: ignore[reportInvalidTypeForm] -if sys.version_info >= (3, 12): +if sys.version_info >= (3, 14): + def make_dataclass( + cls_name: str, + fields: Iterable[str | tuple[str, Any] | tuple[str, Any, Any]], + *, + bases: tuple[type, ...] = (), + namespace: dict[str, Any] | None = None, + init: bool = True, + repr: bool = True, + eq: bool = True, + order: bool = False, + unsafe_hash: bool = False, + frozen: bool = False, + match_args: bool = True, + kw_only: bool = False, + slots: bool = False, + weakref_slot: bool = False, + module: str | None = None, + decorator: _DataclassFactory = ..., + ) -> type: ... + +elif sys.version_info >= (3, 12): def make_dataclass( cls_name: str, fields: Iterable[str | tuple[str, Any] | tuple[str, Any, Any]], From 7c86592dc1a021c53b3893796eb460fd24cf8ac2 Mon Sep 17 00:00:00 2001 From: Max Muoto Date: Mon, 12 May 2025 11:58:29 -0500 Subject: [PATCH 350/388] Add `InterpreterPoolExecutor` (3.14) (#14008) --- stdlib/@tests/stubtest_allowlists/py314.txt | 11 -- .../test_cases/check_concurrent_futures.py | 48 +++++++++ stdlib/VERSIONS | 1 + stdlib/concurrent/futures/__init__.pyi | 22 +++- stdlib/concurrent/futures/interpreter.pyi | 102 ++++++++++++++++++ stdlib/concurrent/futures/thread.pyi | 94 +++++++++++++--- 6 files changed, 249 insertions(+), 29 deletions(-) create mode 100644 stdlib/concurrent/futures/interpreter.pyi diff --git a/stdlib/@tests/stubtest_allowlists/py314.txt b/stdlib/@tests/stubtest_allowlists/py314.txt index d66b81762c41..d88a157b445d 100644 --- a/stdlib/@tests/stubtest_allowlists/py314.txt +++ b/stdlib/@tests/stubtest_allowlists/py314.txt @@ -60,17 +60,6 @@ compression.gzip.GzipFile.readinto1 compression.gzip.GzipFile.readinto1 compression.gzip.compress compression.zstd -concurrent.futures.__all__ -concurrent.futures.InterpreterPoolExecutor -concurrent.futures.ThreadPoolExecutor.BROKEN -concurrent.futures.ThreadPoolExecutor.prepare_context -concurrent.futures.interpreter -concurrent.futures.thread.ThreadPoolExecutor.BROKEN -concurrent.futures.thread.ThreadPoolExecutor.prepare_context -concurrent.futures.thread.WorkerContext -concurrent.futures.thread._WorkItem.__init__ -concurrent.futures.thread._WorkItem.run -concurrent.futures.thread._worker ctypes.POINTER ctypes.byref ctypes.memoryview_at diff --git a/stdlib/@tests/test_cases/check_concurrent_futures.py b/stdlib/@tests/test_cases/check_concurrent_futures.py index 962ec23c6b48..ba6ca0845719 100644 --- a/stdlib/@tests/test_cases/check_concurrent_futures.py +++ b/stdlib/@tests/test_cases/check_concurrent_futures.py @@ -1,7 +1,9 @@ from __future__ import annotations +import sys from collections.abc import Callable, Iterator from concurrent.futures import Future, ThreadPoolExecutor, as_completed +from typing import Literal from typing_extensions import assert_type @@ -28,3 +30,49 @@ def execute_callback(callback: Callable[[], Parent], future: Future[Parent]) -> fut: Future[Child] = Future() execute_callback(lambda: Parent(), fut) # type: ignore assert isinstance(fut.result(), Child) + + +if sys.version_info >= (3, 14): + + def _initializer(x: int) -> None: + pass + + def check_interpreter_pool_executor() -> None: + import concurrent.futures.interpreter + from concurrent.futures import InterpreterPoolExecutor + + with InterpreterPoolExecutor(initializer=_initializer, initargs=(1,)): + ... + + with InterpreterPoolExecutor(initializer=_initializer, initargs=("x",)): # type: ignore + ... + + context = InterpreterPoolExecutor.prepare_context(initializer=_initializer, initargs=(1,), shared={}) + worker_context = context[0]() + assert_type(worker_context, concurrent.futures.interpreter.WorkerContext) + resolve_task = context[1] + # Function should enfore that the arguments are correct. + res = resolve_task(_initializer, 1) + assert_type(res, tuple[bytes, Literal["function"]]) + # When the function is a script, the arguments should be a string. + str_res = resolve_task("print('Hello, world!')") + assert_type(str_res, tuple[bytes, Literal["script"]]) + # When a script is passed, no arguments should be provided. + resolve_task("print('Hello, world!')", 1) # type: ignore + + # `WorkerContext.__init__` should accept the result of a resolved task. + concurrent.futures.interpreter.WorkerContext(initdata=res) + + # Run should also accept the result of a resolved task. + worker_context.run(res) + + def check_thread_worker_context() -> None: + import concurrent.futures.thread + + context = concurrent.futures.thread.WorkerContext.prepare(initializer=_initializer, initargs=(1,)) + worker_context = context[0]() + assert_type(worker_context, concurrent.futures.thread.WorkerContext) + resolve_task = context[1] + res = resolve_task(_initializer, (1,), {"test": 1}) + assert_type(res[1], tuple[int]) + assert_type(res[2], dict[str, int]) diff --git a/stdlib/VERSIONS b/stdlib/VERSIONS index 9defa7c27a97..d13340ab345d 100644 --- a/stdlib/VERSIONS +++ b/stdlib/VERSIONS @@ -121,6 +121,7 @@ colorsys: 3.0- compileall: 3.0- compression: 3.14- concurrent: 3.2- +concurrent.futures.interpreter: 3.14- configparser: 3.0- contextlib: 3.0- contextvars: 3.7- diff --git a/stdlib/concurrent/futures/__init__.pyi b/stdlib/concurrent/futures/__init__.pyi index 68fd0bc5acb4..dd1f6da80c4d 100644 --- a/stdlib/concurrent/futures/__init__.pyi +++ b/stdlib/concurrent/futures/__init__.pyi @@ -16,7 +16,27 @@ from ._base import ( from .process import ProcessPoolExecutor as ProcessPoolExecutor from .thread import ThreadPoolExecutor as ThreadPoolExecutor -if sys.version_info >= (3, 13): +if sys.version_info >= (3, 14): + from .interpreter import InterpreterPoolExecutor as InterpreterPoolExecutor + + __all__ = ( + "FIRST_COMPLETED", + "FIRST_EXCEPTION", + "ALL_COMPLETED", + "CancelledError", + "TimeoutError", + "InvalidStateError", + "BrokenExecutor", + "Future", + "Executor", + "wait", + "as_completed", + "ProcessPoolExecutor", + "ThreadPoolExecutor", + "InterpreterPoolExecutor", + ) + +elif sys.version_info >= (3, 13): __all__ = ( "FIRST_COMPLETED", "FIRST_EXCEPTION", diff --git a/stdlib/concurrent/futures/interpreter.pyi b/stdlib/concurrent/futures/interpreter.pyi new file mode 100644 index 000000000000..c1a29e6b0552 --- /dev/null +++ b/stdlib/concurrent/futures/interpreter.pyi @@ -0,0 +1,102 @@ +import sys +from collections.abc import Callable, Mapping +from concurrent.futures import ThreadPoolExecutor +from typing import Final, Literal, Protocol, overload, type_check_only +from typing_extensions import ParamSpec, Self, TypeAlias, TypeVar, TypeVarTuple, Unpack + +_Task: TypeAlias = tuple[bytes, Literal["function", "script"]] + +@type_check_only +class _TaskFunc(Protocol): + @overload + def __call__(self, fn: Callable[_P, _R], *args: _P.args, **kwargs: _P.kwargs) -> tuple[bytes, Literal["function"]]: ... + @overload + def __call__(self, fn: str) -> tuple[bytes, Literal["script"]]: ... + +_Ts = TypeVarTuple("_Ts") +_P = ParamSpec("_P") +_R = TypeVar("_R") + +# A `type.simplenamespace` with `__name__` attribute. +@type_check_only +class _HasName(Protocol): + __name__: str + +# `_interpreters.exec` technically gives us a simple namespace. +@type_check_only +class _ExcInfo(Protocol): + formatted: str + msg: str + type: _HasName + +if sys.version_info >= (3, 14): + from concurrent.futures.thread import BrokenThreadPool, WorkerContext as ThreadWorkerContext + + from _interpreters import InterpreterError + + class ExecutionFailed(InterpreterError): + def __init__(self, excinfo: _ExcInfo) -> None: ... # type: ignore[override] + + UNBOUND: Final = 2 + + class WorkerContext(ThreadWorkerContext): + # Parent class doesn't have `shared` argument, + @overload # type: ignore[override] + @classmethod + def prepare( + cls, initializer: Callable[[Unpack[_Ts]], object], initargs: tuple[Unpack[_Ts]], shared: Mapping[str, object] + ) -> tuple[Callable[[], Self], _TaskFunc]: ... + @overload # type: ignore[override] + @classmethod + def prepare( + cls, initializer: Callable[[], object], initargs: tuple[()], shared: Mapping[str, object] + ) -> tuple[Callable[[], Self], _TaskFunc]: ... + def __init__( + self, initdata: tuple[bytes, Literal["function", "script"]], shared: Mapping[str, object] | None = None + ) -> None: ... # type: ignore[override] + def __del__(self) -> None: ... + def run(self, task: _Task) -> None: ... # type: ignore[override] + + class BrokenInterpreterPool(BrokenThreadPool): ... + + class InterpreterPoolExecutor(ThreadPoolExecutor): + BROKEN: type[BrokenInterpreterPool] + + @overload # type: ignore[override] + @classmethod + def prepare_context( + cls, initializer: Callable[[], object], initargs: tuple[()], shared: Mapping[str, object] + ) -> tuple[Callable[[], WorkerContext], _TaskFunc]: ... + @overload # type: ignore[override] + @classmethod + def prepare_context( + cls, initializer: Callable[[Unpack[_Ts]], object], initargs: tuple[Unpack[_Ts]], shared: Mapping[str, object] + ) -> tuple[Callable[[], WorkerContext], _TaskFunc]: ... + @overload + def __init__( + self, + max_workers: int | None = None, + thread_name_prefix: str = "", + initializer: Callable[[], object] | None = None, + initargs: tuple[()] = (), + shared: Mapping[str, object] | None = None, + ) -> None: ... + @overload + def __init__( + self, + max_workers: int | None = None, + thread_name_prefix: str = "", + *, + initializer: Callable[[Unpack[_Ts]], object], + initargs: tuple[Unpack[_Ts]], + shared: Mapping[str, object] | None = None, + ) -> None: ... + @overload + def __init__( + self, + max_workers: int | None, + thread_name_prefix: str, + initializer: Callable[[Unpack[_Ts]], object], + initargs: tuple[Unpack[_Ts]], + shared: Mapping[str, object] | None = None, + ) -> None: ... diff --git a/stdlib/concurrent/futures/thread.pyi b/stdlib/concurrent/futures/thread.pyi index da3e006b6f13..22df0dca5a3f 100644 --- a/stdlib/concurrent/futures/thread.pyi +++ b/stdlib/concurrent/futures/thread.pyi @@ -1,9 +1,10 @@ import queue +import sys from collections.abc import Callable, Iterable, Mapping, Set as AbstractSet from threading import Lock, Semaphore, Thread from types import GenericAlias -from typing import Any, Generic, TypeVar, overload -from typing_extensions import TypeVarTuple, Unpack +from typing import Any, Generic, Protocol, TypeVar, overload, type_check_only +from typing_extensions import Self, TypeAlias, TypeVarTuple, Unpack from weakref import ref from ._base import BrokenExecutor, Executor, Future @@ -18,25 +19,71 @@ def _python_exit() -> None: ... _S = TypeVar("_S") -class _WorkItem(Generic[_S]): - future: Future[_S] - fn: Callable[..., _S] - args: Iterable[Any] - kwargs: Mapping[str, Any] - def __init__(self, future: Future[_S], fn: Callable[..., _S], args: Iterable[Any], kwargs: Mapping[str, Any]) -> None: ... - def run(self) -> None: ... - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... - -def _worker( - executor_reference: ref[Any], - work_queue: queue.SimpleQueue[Any], - initializer: Callable[[Unpack[_Ts]], object], - initargs: tuple[Unpack[_Ts]], -) -> None: ... +_Task: TypeAlias = tuple[Callable[..., Any], tuple[Any, ...], dict[str, Any]] + +_C = TypeVar("_C", bound=Callable[..., object]) +_KT = TypeVar("_KT", bound=str) +_VT = TypeVar("_VT") + +@type_check_only +class _ResolveTaskFunc(Protocol): + def __call__( + self, func: _C, args: tuple[Unpack[_Ts]], kwargs: dict[_KT, _VT] + ) -> tuple[_C, tuple[Unpack[_Ts]], dict[_KT, _VT]]: ... + +if sys.version_info >= (3, 14): + class WorkerContext: + @overload + @classmethod + def prepare( + cls, initializer: Callable[[Unpack[_Ts]], object], initargs: tuple[Unpack[_Ts]] + ) -> tuple[Callable[[], Self], _ResolveTaskFunc]: ... + @overload + @classmethod + def prepare( + cls, initializer: Callable[[], object], initargs: tuple[()] + ) -> tuple[Callable[[], Self], _ResolveTaskFunc]: ... + @overload + def __init__(self, initializer: Callable[[Unpack[_Ts]], object], initargs: tuple[Unpack[_Ts]]) -> None: ... + @overload + def __init__(self, initializer: Callable[[], object], initargs: tuple[()]) -> None: ... + def initialize(self) -> None: ... + def finalize(self) -> None: ... + def run(self, task: _Task) -> None: ... + +if sys.version_info >= (3, 14): + class _WorkItem(Generic[_S]): + future: Future[Any] + task: _Task + def __init__(self, future: Future[Any], task: _Task) -> None: ... + def run(self, ctx: WorkerContext) -> None: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + + def _worker(executor_reference: ref[Any], ctx: WorkerContext, work_queue: queue.SimpleQueue[Any]) -> None: ... + +else: + class _WorkItem(Generic[_S]): + future: Future[_S] + fn: Callable[..., _S] + args: Iterable[Any] + kwargs: Mapping[str, Any] + def __init__(self, future: Future[_S], fn: Callable[..., _S], args: Iterable[Any], kwargs: Mapping[str, Any]) -> None: ... + def run(self) -> None: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + + def _worker( + executor_reference: ref[Any], + work_queue: queue.SimpleQueue[Any], + initializer: Callable[[Unpack[_Ts]], object], + initargs: tuple[Unpack[_Ts]], + ) -> None: ... class BrokenThreadPool(BrokenExecutor): ... class ThreadPoolExecutor(Executor): + if sys.version_info >= (3, 14): + BROKEN: type[BrokenThreadPool] + _max_workers: int _idle_semaphore: Semaphore _threads: AbstractSet[Thread] @@ -47,6 +94,19 @@ class ThreadPoolExecutor(Executor): _initializer: Callable[..., None] | None _initargs: tuple[Any, ...] _work_queue: queue.SimpleQueue[_WorkItem[Any]] + + if sys.version_info >= (3, 14): + @overload + @classmethod + def prepare_context( + cls, initializer: Callable[[], object], initargs: tuple[()] + ) -> tuple[Callable[[], Self], _ResolveTaskFunc]: ... + @overload + @classmethod + def prepare_context( + cls, initializer: Callable[[Unpack[_Ts]], object], initargs: tuple[Unpack[_Ts]] + ) -> tuple[Callable[[], Self], _ResolveTaskFunc]: ... + @overload def __init__( self, From eaa7dceb7696e631c7060f7787c7b0a2f3b186e7 Mon Sep 17 00:00:00 2001 From: Mickel Edward Date: Tue, 13 May 2025 00:43:48 +0700 Subject: [PATCH 351/388] tqdm: Fix return type of `async for` loop (#14028) --- stubs/tqdm/tqdm/asyncio.pyi | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stubs/tqdm/tqdm/asyncio.pyi b/stubs/tqdm/tqdm/asyncio.pyi index 78161f0d4c72..c0c1af4f3314 100644 --- a/stubs/tqdm/tqdm/asyncio.pyi +++ b/stubs/tqdm/tqdm/asyncio.pyi @@ -16,7 +16,7 @@ class tqdm_asyncio(std_tqdm[_T]): iterable_iterator: Iterator[_T] def __aiter__(self) -> Self: ... - async def __anext__(self) -> Awaitable[_T]: ... + async def __anext__(self) -> _T: ... def send(self, *args, **kwargs): ... @classmethod def as_completed( From 3e410407fc4a6a4c9ca4ea723bcecd6c3dc57505 Mon Sep 17 00:00:00 2001 From: Sebastian Rittau Date: Tue, 13 May 2025 01:12:32 +0200 Subject: [PATCH 352/388] Fix variance of a few email-related classes (#13952) Closes #13919 --- stdlib/@tests/test_cases/email/check_mime.py | 4 +++ stdlib/email/_policybase.pyi | 19 +++++----- stdlib/email/message.pyi | 38 ++++++++++---------- stdlib/email/mime/text.pyi | 2 +- 4 files changed, 35 insertions(+), 28 deletions(-) create mode 100644 stdlib/@tests/test_cases/email/check_mime.py diff --git a/stdlib/@tests/test_cases/email/check_mime.py b/stdlib/@tests/test_cases/email/check_mime.py new file mode 100644 index 000000000000..e49d2bfacc21 --- /dev/null +++ b/stdlib/@tests/test_cases/email/check_mime.py @@ -0,0 +1,4 @@ +from email.mime.text import MIMEText +from email.policy import SMTP + +msg = MIMEText("", policy=SMTP) diff --git a/stdlib/email/_policybase.pyi b/stdlib/email/_policybase.pyi index b345c84a95cb..0fb890d424b1 100644 --- a/stdlib/email/_policybase.pyi +++ b/stdlib/email/_policybase.pyi @@ -8,6 +8,7 @@ from typing_extensions import Self __all__ = ["Policy", "Compat32", "compat32"] _MessageT = TypeVar("_MessageT", bound=Message[Any, Any], default=Message[str, str]) +_MessageT_co = TypeVar("_MessageT_co", covariant=True, bound=Message[Any, Any], default=Message[str, str]) @type_check_only class _MessageFactory(Protocol[_MessageT]): @@ -16,13 +17,13 @@ class _MessageFactory(Protocol[_MessageT]): # Policy below is the only known direct subclass of _PolicyBase. We therefore # assume that the __init__ arguments and attributes of _PolicyBase are # the same as those of Policy. -class _PolicyBase(Generic[_MessageT]): +class _PolicyBase(Generic[_MessageT_co]): max_line_length: int | None linesep: str cte_type: str raise_on_defect: bool mangle_from_: bool - message_factory: _MessageFactory[_MessageT] | None + message_factory: _MessageFactory[_MessageT_co] | None # Added in Python 3.9.20, 3.10.15, 3.11.10, 3.12.5 verify_generated_headers: bool @@ -34,7 +35,7 @@ class _PolicyBase(Generic[_MessageT]): cte_type: str = "8bit", raise_on_defect: bool = False, mangle_from_: bool = ..., # default depends on sub-class - message_factory: _MessageFactory[_MessageT] | None = None, + message_factory: _MessageFactory[_MessageT_co] | None = None, # Added in Python 3.9.20, 3.10.15, 3.11.10, 3.12.5 verify_generated_headers: bool = True, ) -> None: ... @@ -46,15 +47,17 @@ class _PolicyBase(Generic[_MessageT]): cte_type: str = ..., raise_on_defect: bool = ..., mangle_from_: bool = ..., - message_factory: _MessageFactory[_MessageT] | None = ..., + message_factory: _MessageFactory[_MessageT_co] | None = ..., # Added in Python 3.9.20, 3.10.15, 3.11.10, 3.12.5 verify_generated_headers: bool = ..., ) -> Self: ... def __add__(self, other: Policy) -> Self: ... -class Policy(_PolicyBase[_MessageT], metaclass=ABCMeta): - def handle_defect(self, obj: _MessageT, defect: MessageDefect) -> None: ... - def register_defect(self, obj: _MessageT, defect: MessageDefect) -> None: ... +class Policy(_PolicyBase[_MessageT_co], metaclass=ABCMeta): + # Every Message object has a `defects` attribute, so the following + # methods will work for any Message object. + def handle_defect(self, obj: Message[Any, Any], defect: MessageDefect) -> None: ... + def register_defect(self, obj: Message[Any, Any], defect: MessageDefect) -> None: ... def header_max_count(self, name: str) -> int | None: ... @abstractmethod def header_source_parse(self, sourcelines: list[str]) -> tuple[str, str]: ... @@ -67,7 +70,7 @@ class Policy(_PolicyBase[_MessageT], metaclass=ABCMeta): @abstractmethod def fold_binary(self, name: str, value: str) -> bytes: ... -class Compat32(Policy[_MessageT]): +class Compat32(Policy[_MessageT_co]): def header_source_parse(self, sourcelines: list[str]) -> tuple[str, str]: ... def header_store_parse(self, name: str, value: str) -> tuple[str, str]: ... def header_fetch_parse(self, name: str, value: str) -> str | Header: ... # type: ignore[override] diff --git a/stdlib/email/message.pyi b/stdlib/email/message.pyi index ebad05a1cf7b..e4d14992168a 100644 --- a/stdlib/email/message.pyi +++ b/stdlib/email/message.pyi @@ -12,12 +12,12 @@ __all__ = ["Message", "EmailMessage"] _T = TypeVar("_T") # Type returned by Policy.header_fetch_parse, often str or Header. -_HeaderT = TypeVar("_HeaderT", default=str) -_HeaderParamT = TypeVar("_HeaderParamT", default=str) +_HeaderT_co = TypeVar("_HeaderT_co", covariant=True, default=str) +_HeaderParamT_contra = TypeVar("_HeaderParamT_contra", contravariant=True, default=str) # Represents headers constructed by HeaderRegistry. Those are sub-classes # of BaseHeader and another header type. -_HeaderRegistryT = TypeVar("_HeaderRegistryT", default=Any) -_HeaderRegistryParamT = TypeVar("_HeaderRegistryParamT", default=Any) +_HeaderRegistryT_co = TypeVar("_HeaderRegistryT_co", covariant=True, default=Any) +_HeaderRegistryParamT_contra = TypeVar("_HeaderRegistryParamT_contra", contravariant=True, default=Any) _PayloadType: TypeAlias = Message | str _EncodedPayloadType: TypeAlias = Message | bytes @@ -30,7 +30,7 @@ class _SupportsEncodeToPayload(Protocol): class _SupportsDecodeToPayload(Protocol): def decode(self, encoding: str, errors: str, /) -> _PayloadType | _MultipartPayloadType: ... -class Message(Generic[_HeaderT, _HeaderParamT]): +class Message(Generic[_HeaderT_co, _HeaderParamT_contra]): # The policy attributes and arguments in this class and its subclasses # would ideally use Policy[Self], but this is not possible. policy: Policy[Any] # undocumented @@ -76,22 +76,22 @@ class Message(Generic[_HeaderT, _HeaderParamT]): # This is important for protocols using __getitem__, like SupportsKeysAndGetItem # Morally, the return type should be `AnyOf[_HeaderType, None]`, # so using "the Any trick" instead. - def __getitem__(self, name: str) -> _HeaderT | MaybeNone: ... - def __setitem__(self, name: str, val: _HeaderParamT) -> None: ... + def __getitem__(self, name: str) -> _HeaderT_co | MaybeNone: ... + def __setitem__(self, name: str, val: _HeaderParamT_contra) -> None: ... def __delitem__(self, name: str) -> None: ... def keys(self) -> list[str]: ... - def values(self) -> list[_HeaderT]: ... - def items(self) -> list[tuple[str, _HeaderT]]: ... + def values(self) -> list[_HeaderT_co]: ... + def items(self) -> list[tuple[str, _HeaderT_co]]: ... @overload - def get(self, name: str, failobj: None = None) -> _HeaderT | None: ... + def get(self, name: str, failobj: None = None) -> _HeaderT_co | None: ... @overload - def get(self, name: str, failobj: _T) -> _HeaderT | _T: ... + def get(self, name: str, failobj: _T) -> _HeaderT_co | _T: ... @overload - def get_all(self, name: str, failobj: None = None) -> list[_HeaderT] | None: ... + def get_all(self, name: str, failobj: None = None) -> list[_HeaderT_co] | None: ... @overload - def get_all(self, name: str, failobj: _T) -> list[_HeaderT] | _T: ... + def get_all(self, name: str, failobj: _T) -> list[_HeaderT_co] | _T: ... def add_header(self, _name: str, _value: str, **_params: _ParamsType) -> None: ... - def replace_header(self, _name: str, _value: _HeaderParamT) -> None: ... + def replace_header(self, _name: str, _value: _HeaderParamT_contra) -> None: ... def get_content_type(self) -> str: ... def get_content_maintype(self) -> str: ... def get_content_subtype(self) -> str: ... @@ -144,18 +144,18 @@ class Message(Generic[_HeaderT, _HeaderParamT]): replace: bool = False, ) -> None: ... # The following two methods are undocumented, but a source code comment states that they are public API - def set_raw(self, name: str, value: _HeaderParamT) -> None: ... - def raw_items(self) -> Iterator[tuple[str, _HeaderT]]: ... + def set_raw(self, name: str, value: _HeaderParamT_contra) -> None: ... + def raw_items(self) -> Iterator[tuple[str, _HeaderT_co]]: ... -class MIMEPart(Message[_HeaderRegistryT, _HeaderRegistryParamT]): +class MIMEPart(Message[_HeaderRegistryT_co, _HeaderRegistryParamT_contra]): def __init__(self, policy: Policy[Any] | None = None) -> None: ... - def get_body(self, preferencelist: Sequence[str] = ("related", "html", "plain")) -> MIMEPart[_HeaderRegistryT] | None: ... + def get_body(self, preferencelist: Sequence[str] = ("related", "html", "plain")) -> MIMEPart[_HeaderRegistryT_co] | None: ... def attach(self, payload: Self) -> None: ... # type: ignore[override] # The attachments are created via type(self) in the attach method. It's theoretically # possible to sneak other attachment types into a MIMEPart instance, but could cause # cause unforseen consequences. def iter_attachments(self) -> Iterator[Self]: ... - def iter_parts(self) -> Iterator[MIMEPart[_HeaderRegistryT]]: ... + def iter_parts(self) -> Iterator[MIMEPart[_HeaderRegistryT_co]]: ... def get_content(self, *args: Any, content_manager: ContentManager | None = None, **kw: Any) -> Any: ... def set_content(self, *args: Any, content_manager: ContentManager | None = None, **kw: Any) -> None: ... def make_related(self, boundary: str | None = None) -> None: ... diff --git a/stdlib/email/mime/text.pyi b/stdlib/email/mime/text.pyi index 74d5ef4c5cae..edfa67a09242 100644 --- a/stdlib/email/mime/text.pyi +++ b/stdlib/email/mime/text.pyi @@ -1,5 +1,5 @@ +from email._policybase import Policy from email.mime.nonmultipart import MIMENonMultipart -from email.policy import Policy __all__ = ["MIMEText"] From 44b354a534713a80ea63bceeb4d30dfdd748030e Mon Sep 17 00:00:00 2001 From: Max Muoto Date: Mon, 12 May 2025 18:17:26 -0500 Subject: [PATCH 353/388] `threading` updates for 3.14 (#14032) --- stdlib/@tests/stubtest_allowlists/py314.txt | 2 -- stdlib/threading.pyi | 38 +++++++++++++++------ 2 files changed, 28 insertions(+), 12 deletions(-) diff --git a/stdlib/@tests/stubtest_allowlists/py314.txt b/stdlib/@tests/stubtest_allowlists/py314.txt index d88a157b445d..8bcf628f4378 100644 --- a/stdlib/@tests/stubtest_allowlists/py314.txt +++ b/stdlib/@tests/stubtest_allowlists/py314.txt @@ -141,8 +141,6 @@ string.templatelib sys.is_remote_debug_enabled sys.remote_exec tarfile.TarFile.zstopen -threading.Thread.__init__ -threading._RLock.locked tkinter.Event.__class_getitem__ turtle.__all__ turtle.RawTurtle.fill diff --git a/stdlib/threading.pyi b/stdlib/threading.pyi index 99f5c8d2a516..d31351754d05 100644 --- a/stdlib/threading.pyi +++ b/stdlib/threading.pyi @@ -3,6 +3,7 @@ import sys from _thread import _excepthook, _ExceptHookArgs, get_native_id as get_native_id from _typeshed import ProfileFunction, TraceFunction from collections.abc import Callable, Iterable, Mapping +from contextvars import ContextVar from types import TracebackType from typing import Any, TypeVar, final from typing_extensions import deprecated @@ -76,16 +77,30 @@ class Thread: @property def ident(self) -> int | None: ... daemon: bool - def __init__( - self, - group: None = None, - target: Callable[..., object] | None = None, - name: str | None = None, - args: Iterable[Any] = (), - kwargs: Mapping[str, Any] | None = None, - *, - daemon: bool | None = None, - ) -> None: ... + if sys.version_info >= (3, 14): + def __init__( + self, + group: None = None, + target: Callable[..., object] | None = None, + name: str | None = None, + args: Iterable[Any] = (), + kwargs: Mapping[str, Any] | None = None, + *, + daemon: bool | None = None, + context: ContextVar[Any] | None = None, + ) -> None: ... + else: + def __init__( + self, + group: None = None, + target: Callable[..., object] | None = None, + name: str | None = None, + args: Iterable[Any] = (), + kwargs: Mapping[str, Any] | None = None, + *, + daemon: bool | None = None, + ) -> None: ... + def start(self) -> None: ... def run(self) -> None: ... def join(self, timeout: float | None = None) -> None: ... @@ -116,6 +131,9 @@ class _RLock: __enter__ = acquire def __exit__(self, t: type[BaseException] | None, v: BaseException | None, tb: TracebackType | None) -> None: ... + if sys.version_info >= (3, 14): + def locked(self) -> bool: ... + RLock = _thread.RLock # Actually a function at runtime. class Condition: From 623cc4cf022d4c9b8a78544407198ab26285aeb5 Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Mon, 12 May 2025 23:20:07 +0000 Subject: [PATCH 354/388] Bump `imaplib` to 3.14 (#14022) --- stdlib/@tests/stubtest_allowlists/py314.txt | 4 -- stdlib/imaplib.pyi | 41 ++++++++++++++++++--- 2 files changed, 35 insertions(+), 10 deletions(-) diff --git a/stdlib/@tests/stubtest_allowlists/py314.txt b/stdlib/@tests/stubtest_allowlists/py314.txt index 8bcf628f4378..ccf1541bfb1a 100644 --- a/stdlib/@tests/stubtest_allowlists/py314.txt +++ b/stdlib/@tests/stubtest_allowlists/py314.txt @@ -87,10 +87,6 @@ fractions.Fraction.from_number gzip.GzipFile.readinto gzip.GzipFile.readinto1 gzip.compress -imaplib.IMAP4.file -imaplib.IMAP4.idle -imaplib.IMAP4_SSL.file -imaplib.IMAP4_stream.file importlib.abc.ResourceReader importlib.abc.Traversable importlib.abc.TraversableResources diff --git a/stdlib/imaplib.pyi b/stdlib/imaplib.pyi index ccee92bd5e88..536985a592b7 100644 --- a/stdlib/imaplib.pyi +++ b/stdlib/imaplib.pyi @@ -1,16 +1,16 @@ import subprocess import sys import time -from _typeshed import ReadableBuffer, SizedBuffer +from _typeshed import ReadableBuffer, SizedBuffer, Unused from builtins import list as _list # conflicts with a method named "list" -from collections.abc import Callable +from collections.abc import Callable, Generator from datetime import datetime from re import Pattern from socket import socket as _socket from ssl import SSLContext, SSLSocket from types import TracebackType from typing import IO, Any, Literal, SupportsAbs, SupportsInt -from typing_extensions import Self, TypeAlias +from typing_extensions import Self, TypeAlias, deprecated __all__ = ["IMAP4", "IMAP4_stream", "Internaldate2tuple", "Int2AP", "ParseFlags", "Time2Internaldate", "IMAP4_SSL"] @@ -42,11 +42,17 @@ class IMAP4: PROTOCOL_VERSION: str def __init__(self, host: str = "", port: int = 143, timeout: float | None = None) -> None: ... def open(self, host: str = "", port: int = 143, timeout: float | None = None) -> None: ... + if sys.version_info >= (3, 14): + @property + @deprecated("IMAP4.file is unsupported, can cause errors, and may be removed.") + def file(self) -> IO[str] | IO[bytes]: ... + else: + file: IO[str] | IO[bytes] + def __getattr__(self, attr: str) -> Any: ... host: str port: int sock: _socket - file: IO[str] | IO[bytes] def read(self, size: int) -> bytes: ... def readline(self) -> bytes: ... def send(self, data: ReadableBuffer) -> None: ... @@ -72,6 +78,9 @@ class IMAP4: def getannotation(self, mailbox: str, entry: str, attribute: str) -> _CommandResults: ... def getquota(self, root: str) -> _CommandResults: ... def getquotaroot(self, mailbox: str) -> _CommandResults: ... + if sys.version_info >= (3, 14): + def idle(self, duration: float | None = None) -> Idler: ... + def list(self, directory: str = '""', pattern: str = "*") -> tuple[str, _AnyResponseData]: ... def login(self, user: str, password: str) -> tuple[Literal["OK"], _list[bytes]]: ... def login_cram_md5(self, user: str, password: str) -> _CommandResults: ... @@ -100,6 +109,15 @@ class IMAP4: def xatom(self, name: str, *args: str) -> _CommandResults: ... def print_log(self) -> None: ... +if sys.version_info >= (3, 14): + class Idler: + def __init__(self, imap: IMAP4, duration: float | None = None) -> None: ... + def __enter__(self) -> Self: ... + def __exit__(self, exc_type: object, exc_val: Unused, exc_tb: Unused) -> Literal[False]: ... + def __iter__(self) -> Self: ... + def __next__(self) -> tuple[str, float | None]: ... + def burst(self, interval: float = 0.1) -> Generator[tuple[str, float | None]]: ... + class IMAP4_SSL(IMAP4): if sys.version_info < (3, 12): keyfile: str @@ -119,14 +137,25 @@ class IMAP4_SSL(IMAP4): timeout: float | None = None, ) -> None: ... sslobj: SSLSocket - file: IO[Any] + if sys.version_info >= (3, 14): + @property + @deprecated("IMAP4_SSL.file is unsupported, can cause errors, and may be removed.") + def file(self) -> IO[Any]: ... + else: + file: IO[Any] + def open(self, host: str = "", port: int | None = 993, timeout: float | None = None) -> None: ... def ssl(self) -> SSLSocket: ... class IMAP4_stream(IMAP4): command: str def __init__(self, command: str) -> None: ... - file: IO[Any] + if sys.version_info >= (3, 14): + @property + @deprecated("IMAP4_stream.file is unsupported, can cause errors, and may be removed.") + def file(self) -> IO[Any]: ... + else: + file: IO[Any] process: subprocess.Popen[bytes] writefile: IO[Any] readfile: IO[Any] From aafd9758a83ee7ed7507349712d8c3b7cbd8c0ae Mon Sep 17 00:00:00 2001 From: sobolevn Date: Tue, 13 May 2025 03:14:35 +0300 Subject: [PATCH 355/388] Add `HAS_PHA` to `_ssl` (#14033) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- stdlib/@tests/stubtest_allowlists/py314.txt | 1 - stdlib/_ssl.pyi | 2 ++ 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/stdlib/@tests/stubtest_allowlists/py314.txt b/stdlib/@tests/stubtest_allowlists/py314.txt index ccf1541bfb1a..238ea1e3b58e 100644 --- a/stdlib/@tests/stubtest_allowlists/py314.txt +++ b/stdlib/@tests/stubtest_allowlists/py314.txt @@ -16,7 +16,6 @@ _heapq.heapreplace_max _imp.pyc_magic_number_token _socket.IP_RECVTTL _socket.if_indextoname -_ssl.HAS_PHA _thread.RLock.locked _thread.set_name ast.Interpolation diff --git a/stdlib/_ssl.pyi b/stdlib/_ssl.pyi index e39ab5eb6de8..7ab880e4def7 100644 --- a/stdlib/_ssl.pyi +++ b/stdlib/_ssl.pyi @@ -283,6 +283,8 @@ HAS_TLSv1: bool HAS_TLSv1_1: bool HAS_TLSv1_2: bool HAS_TLSv1_3: bool +if sys.version_info >= (3, 14): + HAS_PHA: bool # version info OPENSSL_VERSION_NUMBER: int From 512d2499c95a6f81f7b5525f48430a96fafc5010 Mon Sep 17 00:00:00 2001 From: Sebastian Rittau Date: Tue, 13 May 2025 08:53:39 +0200 Subject: [PATCH 356/388] Replace incomplete module markers (#14030) --- pyrightconfig.stricter.json | 4 ++++ stdlib/__main__.pyi | 4 +--- stdlib/encodings/__init__.pyi | 3 +-- stubs/Pygments/pygments/lexers/__init__.pyi | 4 ++-- stubs/Pygments/pygments/styles/__init__.pyi | 3 +-- stubs/docutils/docutils/examples.pyi | 4 +--- stubs/docutils/docutils/parsers/recommonmark_wrapper.pyi | 4 +--- stubs/docutils/docutils/parsers/rst/directives/body.pyi | 4 +--- stubs/docutils/docutils/parsers/rst/directives/html.pyi | 4 +--- stubs/docutils/docutils/parsers/rst/directives/images.pyi | 4 +--- stubs/docutils/docutils/parsers/rst/directives/tables.pyi | 4 +--- stubs/docutils/docutils/parsers/rst/states.pyi | 2 +- stubs/docutils/docutils/readers/pep.pyi | 4 +--- stubs/docutils/docutils/readers/standalone.pyi | 4 +--- stubs/docutils/docutils/transforms/__init__.pyi | 2 +- stubs/docutils/docutils/writers/docutils_xml.pyi | 4 +--- stubs/docutils/docutils/writers/html4css1.pyi | 4 +--- stubs/docutils/docutils/writers/html5_polyglot.pyi | 4 +--- stubs/docutils/docutils/writers/latex2e.pyi | 3 +-- stubs/docutils/docutils/writers/manpage.pyi | 4 +--- stubs/docutils/docutils/writers/null.pyi | 4 +--- stubs/docutils/docutils/writers/odf_odt.pyi | 4 +--- stubs/docutils/docutils/writers/pep_html.pyi | 4 +--- stubs/docutils/docutils/writers/pseudoxml.pyi | 4 +--- stubs/docutils/docutils/writers/s5_html.pyi | 4 +--- stubs/docutils/docutils/writers/xetex.pyi | 4 +--- stubs/flake8-typing-imports/flake8_typing_imports.pyi | 3 +-- stubs/gdb/gdb/dap/breakpoint.pyi | 4 +--- stubs/gdb/gdb/dap/bt.pyi | 4 +--- stubs/gdb/gdb/dap/disassemble.pyi | 4 +--- stubs/gdb/gdb/dap/evaluate.pyi | 4 +--- stubs/gdb/gdb/dap/events.pyi | 4 +--- stubs/gdb/gdb/dap/frames.pyi | 4 +--- stubs/gdb/gdb/dap/io.pyi | 4 +--- stubs/gdb/gdb/dap/launch.pyi | 4 +--- stubs/gdb/gdb/dap/locations.pyi | 4 +--- stubs/gdb/gdb/dap/memory.pyi | 4 +--- stubs/gdb/gdb/dap/modules.pyi | 4 +--- stubs/gdb/gdb/dap/next.pyi | 4 +--- stubs/gdb/gdb/dap/pause.pyi | 4 +--- stubs/gdb/gdb/dap/scopes.pyi | 4 +--- stubs/gdb/gdb/dap/server.pyi | 4 +--- stubs/gdb/gdb/dap/sources.pyi | 4 +--- stubs/gdb/gdb/dap/startup.pyi | 4 +--- stubs/gdb/gdb/dap/state.pyi | 4 +--- stubs/gdb/gdb/dap/threads.pyi | 4 +--- stubs/gdb/gdb/dap/typecheck.pyi | 4 +--- stubs/gdb/gdb/dap/varref.pyi | 4 +--- stubs/python-datemath/datemath/helpers.pyi | 4 +--- stubs/tensorflow/tensorflow/__init__.pyi | 2 +- stubs/tensorflow/tensorflow/config/__init__.pyi | 3 +-- stubs/tensorflow/tensorflow/config/experimental.pyi | 3 +-- stubs/tensorflow/tensorflow/data/__init__.pyi | 2 +- stubs/tensorflow/tensorflow/data/experimental.pyi | 3 +-- stubs/tensorflow/tensorflow/distribute/__init__.pyi | 4 +--- stubs/tensorflow/tensorflow/distribute/coordinator.pyi | 4 +--- .../tensorflow/distribute/experimental/coordinator.pyi | 3 +-- stubs/tensorflow/tensorflow/dtypes.pyi | 2 +- stubs/tensorflow/tensorflow/experimental/__init__.pyi | 2 +- stubs/tensorflow/tensorflow/experimental/dtensor.pyi | 2 +- stubs/tensorflow/tensorflow/io/__init__.pyi | 3 +-- stubs/tensorflow/tensorflow/io/gfile.pyi | 4 ++-- stubs/tensorflow/tensorflow/keras/__init__.pyi | 4 +--- stubs/tensorflow/tensorflow/keras/constraints.pyi | 3 +-- stubs/tensorflow/tensorflow/keras/initializers.pyi | 3 +-- stubs/tensorflow/tensorflow/keras/layers/__init__.pyi | 2 +- stubs/tensorflow/tensorflow/keras/losses.pyi | 2 +- stubs/tensorflow/tensorflow/keras/metrics.pyi | 3 +-- stubs/tensorflow/tensorflow/keras/models.pyi | 2 +- stubs/tensorflow/tensorflow/keras/optimizers/__init__.pyi | 2 +- .../tensorflow/keras/optimizers/legacy/__init__.pyi | 3 +-- stubs/tensorflow/tensorflow/keras/regularizers.pyi | 3 +-- stubs/tensorflow/tensorflow/linalg.pyi | 3 +-- stubs/tensorflow/tensorflow/math.pyi | 3 +-- stubs/tensorflow/tensorflow/nn.pyi | 3 +-- stubs/tensorflow/tensorflow/python/__init__.pyi | 4 +--- .../tensorflow/python/distribute/distribute_lib.pyi | 2 +- stubs/tensorflow/tensorflow/python/keras/__init__.pyi | 4 +--- stubs/tensorflow/tensorflow/python/trackable/resource.pyi | 4 +--- stubs/tensorflow/tensorflow/raw_ops.pyi | 3 +-- stubs/tensorflow/tensorflow/sparse.pyi | 2 +- stubs/tensorflow/tensorflow/train/__init__.pyi | 2 +- stubs/tensorflow/tensorflow/train/experimental.pyi | 3 +-- stubs/tensorflow/tensorflow/types/experimental.pyi | 3 +-- 84 files changed, 89 insertions(+), 199 deletions(-) diff --git a/pyrightconfig.stricter.json b/pyrightconfig.stricter.json index a38c73ba6dd1..b24a9abf2af3 100644 --- a/pyrightconfig.stricter.json +++ b/pyrightconfig.stricter.json @@ -8,8 +8,10 @@ "exclude": [ // test cases use a custom pyrightconfig file "**/@tests/test_cases", + "stdlib/__main__.pyi", "stdlib/distutils/command", "stdlib/distutils/dist.pyi", + "stdlib/encodings/__init__.pyi", "stdlib/lib2to3/fixes/*.pyi", "stdlib/numbers.pyi", "stdlib/optparse.pyi", @@ -38,6 +40,7 @@ "stubs/defusedxml", "stubs/docker", "stubs/docutils", + "stubs/flake8-typing-imports", "stubs/Flask-SocketIO", "stubs/fpdf2", "stubs/gdb", @@ -79,6 +82,7 @@ "stubs/Pygments", "stubs/PyMySQL", "stubs/python-crontab", + "stubs/python-datemath", "stubs/python-dateutil", "stubs/python-http-client", "stubs/python-jose", diff --git a/stdlib/__main__.pyi b/stdlib/__main__.pyi index e27843e53382..5b0f74feb261 100644 --- a/stdlib/__main__.pyi +++ b/stdlib/__main__.pyi @@ -1,3 +1 @@ -from typing import Any - -def __getattr__(name: str) -> Any: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stdlib/encodings/__init__.pyi b/stdlib/encodings/__init__.pyi index 2e83f0f65a71..12ec6792d49b 100644 --- a/stdlib/encodings/__init__.pyi +++ b/stdlib/encodings/__init__.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from codecs import CodecInfo class CodecRegistryError(LookupError, SystemError): ... @@ -7,4 +6,4 @@ def normalize_encoding(encoding: str | bytes) -> str: ... def search_function(encoding: str) -> CodecInfo | None: ... # Needed for submodules -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/Pygments/pygments/lexers/__init__.pyi b/stubs/Pygments/pygments/lexers/__init__.pyi index 12dadbcf3fd8..128c5dcaf71b 100644 --- a/stubs/Pygments/pygments/lexers/__init__.pyi +++ b/stubs/Pygments/pygments/lexers/__init__.pyi @@ -1,4 +1,4 @@ -from _typeshed import FileDescriptorOrPath, Incomplete, StrPath +from _typeshed import FileDescriptorOrPath, StrPath from collections.abc import Iterator from typing import Any @@ -16,4 +16,4 @@ def guess_lexer_for_filename(_fn: StrPath, _text: str, **options: Any) -> Lexer: def guess_lexer(_text: str | bytes, **options: Any) -> Lexer: ... # Having every lexer class here doesn't seem to be worth it -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/Pygments/pygments/styles/__init__.pyi b/stubs/Pygments/pygments/styles/__init__.pyi index 77d42db8f586..6aeb9cf90f95 100644 --- a/stubs/Pygments/pygments/styles/__init__.pyi +++ b/stubs/Pygments/pygments/styles/__init__.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from collections.abc import Iterator, Mapping from pygments.style import Style @@ -10,4 +9,4 @@ def get_style_by_name(name) -> type[Style]: ... def get_all_styles() -> Iterator[str]: ... # Having every style class here doesn't seem to be worth it -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/docutils/docutils/examples.pyi b/stubs/docutils/docutils/examples.pyi index 0f6820f054ea..5b0f74feb261 100644 --- a/stubs/docutils/docutils/examples.pyi +++ b/stubs/docutils/docutils/examples.pyi @@ -1,3 +1 @@ -from _typeshed import Incomplete - -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/docutils/docutils/parsers/recommonmark_wrapper.pyi b/stubs/docutils/docutils/parsers/recommonmark_wrapper.pyi index 0f6820f054ea..5b0f74feb261 100644 --- a/stubs/docutils/docutils/parsers/recommonmark_wrapper.pyi +++ b/stubs/docutils/docutils/parsers/recommonmark_wrapper.pyi @@ -1,3 +1 @@ -from _typeshed import Incomplete - -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/docutils/docutils/parsers/rst/directives/body.pyi b/stubs/docutils/docutils/parsers/rst/directives/body.pyi index 0f6820f054ea..5b0f74feb261 100644 --- a/stubs/docutils/docutils/parsers/rst/directives/body.pyi +++ b/stubs/docutils/docutils/parsers/rst/directives/body.pyi @@ -1,3 +1 @@ -from _typeshed import Incomplete - -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/docutils/docutils/parsers/rst/directives/html.pyi b/stubs/docutils/docutils/parsers/rst/directives/html.pyi index 0f6820f054ea..5b0f74feb261 100644 --- a/stubs/docutils/docutils/parsers/rst/directives/html.pyi +++ b/stubs/docutils/docutils/parsers/rst/directives/html.pyi @@ -1,3 +1 @@ -from _typeshed import Incomplete - -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/docutils/docutils/parsers/rst/directives/images.pyi b/stubs/docutils/docutils/parsers/rst/directives/images.pyi index 0f6820f054ea..5b0f74feb261 100644 --- a/stubs/docutils/docutils/parsers/rst/directives/images.pyi +++ b/stubs/docutils/docutils/parsers/rst/directives/images.pyi @@ -1,3 +1 @@ -from _typeshed import Incomplete - -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/docutils/docutils/parsers/rst/directives/tables.pyi b/stubs/docutils/docutils/parsers/rst/directives/tables.pyi index 0f6820f054ea..5b0f74feb261 100644 --- a/stubs/docutils/docutils/parsers/rst/directives/tables.pyi +++ b/stubs/docutils/docutils/parsers/rst/directives/tables.pyi @@ -1,3 +1 @@ -from _typeshed import Incomplete - -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/docutils/docutils/parsers/rst/states.pyi b/stubs/docutils/docutils/parsers/rst/states.pyi index 93b706272254..33d80a454093 100644 --- a/stubs/docutils/docutils/parsers/rst/states.pyi +++ b/stubs/docutils/docutils/parsers/rst/states.pyi @@ -141,4 +141,4 @@ class Inliner: def implicit_inline(self, text: str, lineno: int) -> list[nodes.Text]: ... dispatch: dict[str, Callable[[Match[str], int], tuple[str, list[nodes.problematic], str, list[nodes.system_message]]]] = ... -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/docutils/docutils/readers/pep.pyi b/stubs/docutils/docutils/readers/pep.pyi index 0f6820f054ea..5b0f74feb261 100644 --- a/stubs/docutils/docutils/readers/pep.pyi +++ b/stubs/docutils/docutils/readers/pep.pyi @@ -1,3 +1 @@ -from _typeshed import Incomplete - -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/docutils/docutils/readers/standalone.pyi b/stubs/docutils/docutils/readers/standalone.pyi index 0f6820f054ea..5b0f74feb261 100644 --- a/stubs/docutils/docutils/readers/standalone.pyi +++ b/stubs/docutils/docutils/readers/standalone.pyi @@ -1,3 +1 @@ -from _typeshed import Incomplete - -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/docutils/docutils/transforms/__init__.pyi b/stubs/docutils/docutils/transforms/__init__.pyi index 13323fcbb468..589817290244 100644 --- a/stubs/docutils/docutils/transforms/__init__.pyi +++ b/stubs/docutils/docutils/transforms/__init__.pyi @@ -11,4 +11,4 @@ class Transformer: def add_transform(self, transform_class: type[Transform], priority: int | None = None, **kwargs) -> None: ... def __getattr__(self, name: str, /) -> Incomplete: ... -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/docutils/docutils/writers/docutils_xml.pyi b/stubs/docutils/docutils/writers/docutils_xml.pyi index 0f6820f054ea..5b0f74feb261 100644 --- a/stubs/docutils/docutils/writers/docutils_xml.pyi +++ b/stubs/docutils/docutils/writers/docutils_xml.pyi @@ -1,3 +1 @@ -from _typeshed import Incomplete - -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/docutils/docutils/writers/html4css1.pyi b/stubs/docutils/docutils/writers/html4css1.pyi index 0f6820f054ea..5b0f74feb261 100644 --- a/stubs/docutils/docutils/writers/html4css1.pyi +++ b/stubs/docutils/docutils/writers/html4css1.pyi @@ -1,3 +1 @@ -from _typeshed import Incomplete - -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/docutils/docutils/writers/html5_polyglot.pyi b/stubs/docutils/docutils/writers/html5_polyglot.pyi index 0f6820f054ea..5b0f74feb261 100644 --- a/stubs/docutils/docutils/writers/html5_polyglot.pyi +++ b/stubs/docutils/docutils/writers/html5_polyglot.pyi @@ -1,3 +1 @@ -from _typeshed import Incomplete - -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/docutils/docutils/writers/latex2e.pyi b/stubs/docutils/docutils/writers/latex2e.pyi index 9719f2759620..69f1fda5bc6f 100644 --- a/stubs/docutils/docutils/writers/latex2e.pyi +++ b/stubs/docutils/docutils/writers/latex2e.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from typing import ClassVar from docutils.utils import Reporter @@ -17,4 +16,4 @@ class Babel: def language_name(self, language_code: str) -> str: ... def get_language(self) -> str: ... -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/docutils/docutils/writers/manpage.pyi b/stubs/docutils/docutils/writers/manpage.pyi index 0f6820f054ea..5b0f74feb261 100644 --- a/stubs/docutils/docutils/writers/manpage.pyi +++ b/stubs/docutils/docutils/writers/manpage.pyi @@ -1,3 +1 @@ -from _typeshed import Incomplete - -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/docutils/docutils/writers/null.pyi b/stubs/docutils/docutils/writers/null.pyi index 0f6820f054ea..5b0f74feb261 100644 --- a/stubs/docutils/docutils/writers/null.pyi +++ b/stubs/docutils/docutils/writers/null.pyi @@ -1,3 +1 @@ -from _typeshed import Incomplete - -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/docutils/docutils/writers/odf_odt.pyi b/stubs/docutils/docutils/writers/odf_odt.pyi index 0f6820f054ea..5b0f74feb261 100644 --- a/stubs/docutils/docutils/writers/odf_odt.pyi +++ b/stubs/docutils/docutils/writers/odf_odt.pyi @@ -1,3 +1 @@ -from _typeshed import Incomplete - -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/docutils/docutils/writers/pep_html.pyi b/stubs/docutils/docutils/writers/pep_html.pyi index 0f6820f054ea..5b0f74feb261 100644 --- a/stubs/docutils/docutils/writers/pep_html.pyi +++ b/stubs/docutils/docutils/writers/pep_html.pyi @@ -1,3 +1 @@ -from _typeshed import Incomplete - -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/docutils/docutils/writers/pseudoxml.pyi b/stubs/docutils/docutils/writers/pseudoxml.pyi index 0f6820f054ea..5b0f74feb261 100644 --- a/stubs/docutils/docutils/writers/pseudoxml.pyi +++ b/stubs/docutils/docutils/writers/pseudoxml.pyi @@ -1,3 +1 @@ -from _typeshed import Incomplete - -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/docutils/docutils/writers/s5_html.pyi b/stubs/docutils/docutils/writers/s5_html.pyi index 0f6820f054ea..5b0f74feb261 100644 --- a/stubs/docutils/docutils/writers/s5_html.pyi +++ b/stubs/docutils/docutils/writers/s5_html.pyi @@ -1,3 +1 @@ -from _typeshed import Incomplete - -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/docutils/docutils/writers/xetex.pyi b/stubs/docutils/docutils/writers/xetex.pyi index 0f6820f054ea..5b0f74feb261 100644 --- a/stubs/docutils/docutils/writers/xetex.pyi +++ b/stubs/docutils/docutils/writers/xetex.pyi @@ -1,3 +1 @@ -from _typeshed import Incomplete - -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/flake8-typing-imports/flake8_typing_imports.pyi b/stubs/flake8-typing-imports/flake8_typing_imports.pyi index 93212549f941..e9da0dae47c4 100644 --- a/stubs/flake8-typing-imports/flake8_typing_imports.pyi +++ b/stubs/flake8-typing-imports/flake8_typing_imports.pyi @@ -1,6 +1,5 @@ import argparse import ast -from _typeshed import Incomplete from collections.abc import Generator from typing import Any, ClassVar @@ -14,4 +13,4 @@ class Plugin: def __init__(self, tree: ast.AST) -> None: ... def run(self) -> Generator[tuple[int, int, str, type[Any]], None, None]: ... -def __getattr__(name: str) -> Incomplete: ... # incomplete (other attributes are normally not accessed) +def __getattr__(name: str): ... # incomplete module (other attributes are normally not accessed) diff --git a/stubs/gdb/gdb/dap/breakpoint.pyi b/stubs/gdb/gdb/dap/breakpoint.pyi index 0f6820f054ea..5b0f74feb261 100644 --- a/stubs/gdb/gdb/dap/breakpoint.pyi +++ b/stubs/gdb/gdb/dap/breakpoint.pyi @@ -1,3 +1 @@ -from _typeshed import Incomplete - -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/gdb/gdb/dap/bt.pyi b/stubs/gdb/gdb/dap/bt.pyi index 0f6820f054ea..5b0f74feb261 100644 --- a/stubs/gdb/gdb/dap/bt.pyi +++ b/stubs/gdb/gdb/dap/bt.pyi @@ -1,3 +1 @@ -from _typeshed import Incomplete - -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/gdb/gdb/dap/disassemble.pyi b/stubs/gdb/gdb/dap/disassemble.pyi index 0f6820f054ea..5b0f74feb261 100644 --- a/stubs/gdb/gdb/dap/disassemble.pyi +++ b/stubs/gdb/gdb/dap/disassemble.pyi @@ -1,3 +1 @@ -from _typeshed import Incomplete - -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/gdb/gdb/dap/evaluate.pyi b/stubs/gdb/gdb/dap/evaluate.pyi index 0f6820f054ea..5b0f74feb261 100644 --- a/stubs/gdb/gdb/dap/evaluate.pyi +++ b/stubs/gdb/gdb/dap/evaluate.pyi @@ -1,3 +1 @@ -from _typeshed import Incomplete - -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/gdb/gdb/dap/events.pyi b/stubs/gdb/gdb/dap/events.pyi index 0f6820f054ea..5b0f74feb261 100644 --- a/stubs/gdb/gdb/dap/events.pyi +++ b/stubs/gdb/gdb/dap/events.pyi @@ -1,3 +1 @@ -from _typeshed import Incomplete - -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/gdb/gdb/dap/frames.pyi b/stubs/gdb/gdb/dap/frames.pyi index 0f6820f054ea..5b0f74feb261 100644 --- a/stubs/gdb/gdb/dap/frames.pyi +++ b/stubs/gdb/gdb/dap/frames.pyi @@ -1,3 +1 @@ -from _typeshed import Incomplete - -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/gdb/gdb/dap/io.pyi b/stubs/gdb/gdb/dap/io.pyi index 0f6820f054ea..5b0f74feb261 100644 --- a/stubs/gdb/gdb/dap/io.pyi +++ b/stubs/gdb/gdb/dap/io.pyi @@ -1,3 +1 @@ -from _typeshed import Incomplete - -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/gdb/gdb/dap/launch.pyi b/stubs/gdb/gdb/dap/launch.pyi index 0f6820f054ea..5b0f74feb261 100644 --- a/stubs/gdb/gdb/dap/launch.pyi +++ b/stubs/gdb/gdb/dap/launch.pyi @@ -1,3 +1 @@ -from _typeshed import Incomplete - -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/gdb/gdb/dap/locations.pyi b/stubs/gdb/gdb/dap/locations.pyi index 0f6820f054ea..5b0f74feb261 100644 --- a/stubs/gdb/gdb/dap/locations.pyi +++ b/stubs/gdb/gdb/dap/locations.pyi @@ -1,3 +1 @@ -from _typeshed import Incomplete - -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/gdb/gdb/dap/memory.pyi b/stubs/gdb/gdb/dap/memory.pyi index 0f6820f054ea..5b0f74feb261 100644 --- a/stubs/gdb/gdb/dap/memory.pyi +++ b/stubs/gdb/gdb/dap/memory.pyi @@ -1,3 +1 @@ -from _typeshed import Incomplete - -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/gdb/gdb/dap/modules.pyi b/stubs/gdb/gdb/dap/modules.pyi index 0f6820f054ea..5b0f74feb261 100644 --- a/stubs/gdb/gdb/dap/modules.pyi +++ b/stubs/gdb/gdb/dap/modules.pyi @@ -1,3 +1 @@ -from _typeshed import Incomplete - -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/gdb/gdb/dap/next.pyi b/stubs/gdb/gdb/dap/next.pyi index 0f6820f054ea..5b0f74feb261 100644 --- a/stubs/gdb/gdb/dap/next.pyi +++ b/stubs/gdb/gdb/dap/next.pyi @@ -1,3 +1 @@ -from _typeshed import Incomplete - -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/gdb/gdb/dap/pause.pyi b/stubs/gdb/gdb/dap/pause.pyi index 0f6820f054ea..5b0f74feb261 100644 --- a/stubs/gdb/gdb/dap/pause.pyi +++ b/stubs/gdb/gdb/dap/pause.pyi @@ -1,3 +1 @@ -from _typeshed import Incomplete - -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/gdb/gdb/dap/scopes.pyi b/stubs/gdb/gdb/dap/scopes.pyi index 0f6820f054ea..5b0f74feb261 100644 --- a/stubs/gdb/gdb/dap/scopes.pyi +++ b/stubs/gdb/gdb/dap/scopes.pyi @@ -1,3 +1 @@ -from _typeshed import Incomplete - -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/gdb/gdb/dap/server.pyi b/stubs/gdb/gdb/dap/server.pyi index 0f6820f054ea..5b0f74feb261 100644 --- a/stubs/gdb/gdb/dap/server.pyi +++ b/stubs/gdb/gdb/dap/server.pyi @@ -1,3 +1 @@ -from _typeshed import Incomplete - -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/gdb/gdb/dap/sources.pyi b/stubs/gdb/gdb/dap/sources.pyi index 0f6820f054ea..5b0f74feb261 100644 --- a/stubs/gdb/gdb/dap/sources.pyi +++ b/stubs/gdb/gdb/dap/sources.pyi @@ -1,3 +1 @@ -from _typeshed import Incomplete - -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/gdb/gdb/dap/startup.pyi b/stubs/gdb/gdb/dap/startup.pyi index 0f6820f054ea..5b0f74feb261 100644 --- a/stubs/gdb/gdb/dap/startup.pyi +++ b/stubs/gdb/gdb/dap/startup.pyi @@ -1,3 +1 @@ -from _typeshed import Incomplete - -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/gdb/gdb/dap/state.pyi b/stubs/gdb/gdb/dap/state.pyi index 0f6820f054ea..5b0f74feb261 100644 --- a/stubs/gdb/gdb/dap/state.pyi +++ b/stubs/gdb/gdb/dap/state.pyi @@ -1,3 +1 @@ -from _typeshed import Incomplete - -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/gdb/gdb/dap/threads.pyi b/stubs/gdb/gdb/dap/threads.pyi index 0f6820f054ea..5b0f74feb261 100644 --- a/stubs/gdb/gdb/dap/threads.pyi +++ b/stubs/gdb/gdb/dap/threads.pyi @@ -1,3 +1 @@ -from _typeshed import Incomplete - -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/gdb/gdb/dap/typecheck.pyi b/stubs/gdb/gdb/dap/typecheck.pyi index 0f6820f054ea..5b0f74feb261 100644 --- a/stubs/gdb/gdb/dap/typecheck.pyi +++ b/stubs/gdb/gdb/dap/typecheck.pyi @@ -1,3 +1 @@ -from _typeshed import Incomplete - -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/gdb/gdb/dap/varref.pyi b/stubs/gdb/gdb/dap/varref.pyi index 0f6820f054ea..5b0f74feb261 100644 --- a/stubs/gdb/gdb/dap/varref.pyi +++ b/stubs/gdb/gdb/dap/varref.pyi @@ -1,3 +1 @@ -from _typeshed import Incomplete - -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/python-datemath/datemath/helpers.pyi b/stubs/python-datemath/datemath/helpers.pyi index ea600a2c4750..a8063825e2af 100644 --- a/stubs/python-datemath/datemath/helpers.pyi +++ b/stubs/python-datemath/datemath/helpers.pyi @@ -1,5 +1,3 @@ -from _typeshed import Incomplete - import arrow class DateMathException(Exception): ... @@ -7,4 +5,4 @@ class DateMathException(Exception): ... def parse( expression: str, now: arrow.Arrow | None = None, tz: str = "UTC", type: str | None = None, roundDown: bool = True ) -> arrow.Arrow: ... -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/tensorflow/tensorflow/__init__.pyi b/stubs/tensorflow/tensorflow/__init__.pyi index 298bc5d834e0..647e29ab0988 100644 --- a/stubs/tensorflow/tensorflow/__init__.pyi +++ b/stubs/tensorflow/tensorflow/__init__.pyi @@ -432,4 +432,4 @@ def gather_nd( name: str | None = None, bad_indices_policy: Literal["", "DEFAULT", "ERROR", "IGNORE"] = "", ) -> Tensor: ... -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/tensorflow/tensorflow/config/__init__.pyi b/stubs/tensorflow/tensorflow/config/__init__.pyi index 78721953f462..1eb72d000ff9 100644 --- a/stubs/tensorflow/tensorflow/config/__init__.pyi +++ b/stubs/tensorflow/tensorflow/config/__init__.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from typing import NamedTuple from tensorflow.config import experimental as experimental @@ -10,4 +9,4 @@ class PhysicalDevice(NamedTuple): def list_physical_devices(device_type: None | str = None) -> list[PhysicalDevice]: ... def get_visible_devices(device_type: None | str = None) -> list[PhysicalDevice]: ... def set_visible_devices(devices: list[PhysicalDevice] | PhysicalDevice, device_type: None | str = None) -> None: ... -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/tensorflow/tensorflow/config/experimental.pyi b/stubs/tensorflow/tensorflow/config/experimental.pyi index 53b4277656ab..1cb74feba722 100644 --- a/stubs/tensorflow/tensorflow/config/experimental.pyi +++ b/stubs/tensorflow/tensorflow/config/experimental.pyi @@ -1,5 +1,4 @@ import typing_extensions -from _typeshed import Incomplete from typing import TypedDict from tensorflow.config import PhysicalDevice @@ -14,4 +13,4 @@ def reset_memory_stats(device: str) -> None: ... def get_memory_usage(device: PhysicalDevice) -> int: ... def get_memory_growth(device: PhysicalDevice) -> bool: ... def set_memory_growth(device: PhysicalDevice, enable: bool) -> None: ... -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/tensorflow/tensorflow/data/__init__.pyi b/stubs/tensorflow/tensorflow/data/__init__.pyi index 305043f7248f..37520262c444 100644 --- a/stubs/tensorflow/tensorflow/data/__init__.pyi +++ b/stubs/tensorflow/tensorflow/data/__init__.pyi @@ -269,4 +269,4 @@ class TFRecordDataset(Dataset[tf.Tensor]): @property def element_spec(self) -> tf.TensorSpec: ... -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/tensorflow/tensorflow/data/experimental.pyi b/stubs/tensorflow/tensorflow/data/experimental.pyi index ad2394dab4ec..ecae0f23c976 100644 --- a/stubs/tensorflow/tensorflow/data/experimental.pyi +++ b/stubs/tensorflow/tensorflow/data/experimental.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from collections.abc import Callable, Sequence from typing import Final, TypeVar @@ -30,4 +29,4 @@ def sample_from_datasets( seed: int | None = None, stop_on_empty_dataset: bool = False, ) -> Dataset[_T1]: ... -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/tensorflow/tensorflow/distribute/__init__.pyi b/stubs/tensorflow/tensorflow/distribute/__init__.pyi index e129db61aab8..2b3c667c643a 100644 --- a/stubs/tensorflow/tensorflow/distribute/__init__.pyi +++ b/stubs/tensorflow/tensorflow/distribute/__init__.pyi @@ -1,5 +1,3 @@ -from _typeshed import Incomplete - from tensorflow.python.distribute.distribute_lib import Strategy as Strategy -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/tensorflow/tensorflow/distribute/coordinator.pyi b/stubs/tensorflow/tensorflow/distribute/coordinator.pyi index 0cda9f390770..717bda55e32e 100644 --- a/stubs/tensorflow/tensorflow/distribute/coordinator.pyi +++ b/stubs/tensorflow/tensorflow/distribute/coordinator.pyi @@ -1,5 +1,3 @@ -from _typeshed import Incomplete - from .experimental.coordinator import RemoteValue as RemoteValue -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/tensorflow/tensorflow/distribute/experimental/coordinator.pyi b/stubs/tensorflow/tensorflow/distribute/experimental/coordinator.pyi index 92d1f6bcb9ed..a6058895d03f 100644 --- a/stubs/tensorflow/tensorflow/distribute/experimental/coordinator.pyi +++ b/stubs/tensorflow/tensorflow/distribute/experimental/coordinator.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from typing import Generic, TypeVar from tensorflow._aliases import AnyArray @@ -9,4 +8,4 @@ class RemoteValue(Generic[_Value_co]): def fetch(self) -> AnyArray: ... def get(self) -> _Value_co: ... -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/tensorflow/tensorflow/dtypes.pyi b/stubs/tensorflow/tensorflow/dtypes.pyi index a17fb5d4d16f..a65dbd1f68e3 100644 --- a/stubs/tensorflow/tensorflow/dtypes.pyi +++ b/stubs/tensorflow/tensorflow/dtypes.pyi @@ -54,4 +54,4 @@ quint16: DType string: DType def as_dtype(type_value: DTypeLike) -> DType: ... -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/tensorflow/tensorflow/experimental/__init__.pyi b/stubs/tensorflow/tensorflow/experimental/__init__.pyi index 1f4add7b37ca..7ed331669113 100644 --- a/stubs/tensorflow/tensorflow/experimental/__init__.pyi +++ b/stubs/tensorflow/tensorflow/experimental/__init__.pyi @@ -7,4 +7,4 @@ _T_co = TypeVar("_T_co", covariant=True) class Optional(ABC, Generic[_T_co]): def __getattr__(self, name: str) -> Incomplete: ... -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/tensorflow/tensorflow/experimental/dtensor.pyi b/stubs/tensorflow/tensorflow/experimental/dtensor.pyi index 178d1211f45a..5e374720e647 100644 --- a/stubs/tensorflow/tensorflow/experimental/dtensor.pyi +++ b/stubs/tensorflow/tensorflow/experimental/dtensor.pyi @@ -16,4 +16,4 @@ class Mesh: use_xla_spmd: bool = False, ) -> None: ... -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/tensorflow/tensorflow/io/__init__.pyi b/stubs/tensorflow/tensorflow/io/__init__.pyi index 26499ce558fb..4f7a2f795d94 100644 --- a/stubs/tensorflow/tensorflow/io/__init__.pyi +++ b/stubs/tensorflow/tensorflow/io/__init__.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from collections.abc import Iterable, Mapping from types import TracebackType from typing import Literal, NamedTuple @@ -105,4 +104,4 @@ class RaggedFeature(NamedTuple): def parse_example( serialized: TensorCompatible, features: _FeatureSpecs, example_names: Iterable[str] | None = None, name: str | None = None ) -> dict[str, TensorLike]: ... -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/tensorflow/tensorflow/io/gfile.pyi b/stubs/tensorflow/tensorflow/io/gfile.pyi index ed5e3d6815d3..85ac9ae663c8 100644 --- a/stubs/tensorflow/tensorflow/io/gfile.pyi +++ b/stubs/tensorflow/tensorflow/io/gfile.pyi @@ -1,4 +1,4 @@ -from _typeshed import Incomplete, StrOrBytesPath +from _typeshed import StrOrBytesPath from collections.abc import Iterable def rmtree(path: StrOrBytesPath) -> None: ... @@ -8,4 +8,4 @@ def exists(path: StrOrBytesPath) -> bool: ... def copy(src: StrOrBytesPath, dst: StrOrBytesPath, overwrite: bool = False) -> None: ... def makedirs(path: StrOrBytesPath) -> None: ... def glob(pattern: str | bytes | Iterable[str | bytes]) -> list[str]: ... -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/tensorflow/tensorflow/keras/__init__.pyi b/stubs/tensorflow/tensorflow/keras/__init__.pyi index 4aa06e9b9e67..c0fd26f618ad 100644 --- a/stubs/tensorflow/tensorflow/keras/__init__.pyi +++ b/stubs/tensorflow/tensorflow/keras/__init__.pyi @@ -1,5 +1,3 @@ -from _typeshed import Incomplete - from tensorflow.keras import ( activations as activations, callbacks as callbacks, @@ -14,4 +12,4 @@ from tensorflow.keras import ( ) from tensorflow.keras.models import Model as Model -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/tensorflow/tensorflow/keras/constraints.pyi b/stubs/tensorflow/tensorflow/keras/constraints.pyi index 29e362ee2e35..ccc6a92f445b 100644 --- a/stubs/tensorflow/tensorflow/keras/constraints.pyi +++ b/stubs/tensorflow/tensorflow/keras/constraints.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from collections.abc import Callable from typing import Any, overload @@ -14,4 +13,4 @@ def get(identifier: None) -> None: ... def get(identifier: str | dict[str, Any] | Constraint) -> Constraint: ... @overload def get(identifier: Callable[[Tensor], Tensor]) -> Callable[[Tensor], Tensor]: ... -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/tensorflow/tensorflow/keras/initializers.pyi b/stubs/tensorflow/tensorflow/keras/initializers.pyi index 4f79faaf285d..f24c31b42ca4 100644 --- a/stubs/tensorflow/tensorflow/keras/initializers.pyi +++ b/stubs/tensorflow/tensorflow/keras/initializers.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from collections.abc import Callable from typing import Any, overload from typing_extensions import Self, TypeAlias @@ -48,4 +47,4 @@ def get(identifier: None) -> None: ... def get(identifier: str | Initializer | dict[str, Any] | type[Initializer]) -> Initializer: ... @overload def get(identifier: Callable[[ShapeLike], Tensor]) -> Callable[[ShapeLike], Tensor]: ... -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/tensorflow/tensorflow/keras/layers/__init__.pyi b/stubs/tensorflow/tensorflow/keras/layers/__init__.pyi index 21e4606f6680..db7a6b91aed7 100644 --- a/stubs/tensorflow/tensorflow/keras/layers/__init__.pyi +++ b/stubs/tensorflow/tensorflow/keras/layers/__init__.pyi @@ -444,4 +444,4 @@ class GaussianDropout(Layer[tf.Tensor, tf.Tensor]): name: str | None = None, ) -> None: ... -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/tensorflow/tensorflow/keras/losses.pyi b/stubs/tensorflow/tensorflow/keras/losses.pyi index 5c07272f7906..e6ea69e47553 100644 --- a/stubs/tensorflow/tensorflow/keras/losses.pyi +++ b/stubs/tensorflow/tensorflow/keras/losses.pyi @@ -206,4 +206,4 @@ def get(identifier: _FuncT) -> _FuncT: ... # This is complete with respect to methods documented defined here, # but many methods get re-exported here from tf.keras.metrics that aren't # covered yet. -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/tensorflow/tensorflow/keras/metrics.pyi b/stubs/tensorflow/tensorflow/keras/metrics.pyi index 9e3d1e4564de..1d2c21918ae9 100644 --- a/stubs/tensorflow/tensorflow/keras/metrics.pyi +++ b/stubs/tensorflow/tensorflow/keras/metrics.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from abc import ABCMeta, abstractmethod from collections.abc import Callable, Iterable, Sequence from typing import Any, Literal @@ -116,4 +115,4 @@ def binary_crossentropy( def categorical_crossentropy( y_true: TensorCompatible, y_pred: TensorCompatible, from_logits: bool = False, label_smoothing: float = 0.0, axis: int = -1 ) -> Tensor: ... -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/tensorflow/tensorflow/keras/models.pyi b/stubs/tensorflow/tensorflow/keras/models.pyi index 262bd49c897f..079066d4bf47 100644 --- a/stubs/tensorflow/tensorflow/keras/models.pyi +++ b/stubs/tensorflow/tensorflow/keras/models.pyi @@ -166,4 +166,4 @@ class Model(Layer[_InputT_contra, _OutputT_co]): def compile_from_config(self, config: dict[str, Any]) -> Self: ... def export(self, filepath: str | Path, format: str = "tf_saved_model", verbose: bool = True) -> None: ... -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/tensorflow/tensorflow/keras/optimizers/__init__.pyi b/stubs/tensorflow/tensorflow/keras/optimizers/__init__.pyi index a1ccef2f9a4f..1b2ae8fb8226 100644 --- a/stubs/tensorflow/tensorflow/keras/optimizers/__init__.pyi +++ b/stubs/tensorflow/tensorflow/keras/optimizers/__init__.pyi @@ -4,4 +4,4 @@ from tensorflow.keras.optimizers import legacy as legacy, schedules as schedules Optimizer = Incomplete -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/tensorflow/tensorflow/keras/optimizers/legacy/__init__.pyi b/stubs/tensorflow/tensorflow/keras/optimizers/legacy/__init__.pyi index ad73797241c0..dc69ac7e2d51 100644 --- a/stubs/tensorflow/tensorflow/keras/optimizers/legacy/__init__.pyi +++ b/stubs/tensorflow/tensorflow/keras/optimizers/legacy/__init__.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from collections.abc import Callable, Iterable from typing import Any from typing_extensions import TypeAlias @@ -59,4 +58,4 @@ class SGD(Optimizer): self, learning_rate: _LearningRate = 0.01, momentum: float = 0.0, nesterov: bool = False, name: str = "SGD", **kwargs: Any ) -> None: ... -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/tensorflow/tensorflow/keras/regularizers.pyi b/stubs/tensorflow/tensorflow/keras/regularizers.pyi index b8cbac2506b3..448fb65870f3 100644 --- a/stubs/tensorflow/tensorflow/keras/regularizers.pyi +++ b/stubs/tensorflow/tensorflow/keras/regularizers.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from collections.abc import Callable from typing import Any, overload from typing_extensions import Self, TypeAlias @@ -19,4 +18,4 @@ def get(identifier: None) -> None: ... def get(identifier: str | dict[str, Any] | Regularizer) -> Regularizer: ... @overload def get(identifier: Callable[[Tensor], Tensor]) -> Callable[[Tensor], Tensor]: ... -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/tensorflow/tensorflow/linalg.pyi b/stubs/tensorflow/tensorflow/linalg.pyi index 6c364f27362a..7294144cf88d 100644 --- a/stubs/tensorflow/tensorflow/linalg.pyi +++ b/stubs/tensorflow/tensorflow/linalg.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from builtins import bool as _bool from collections.abc import Iterable from typing import Literal, overload @@ -53,4 +52,4 @@ def eye( name: str | None = None, ) -> Tensor: ... def band_part(input: TensorCompatible, num_lower: Integer, num_upper: Integer, name: str | None = None) -> Tensor: ... -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/tensorflow/tensorflow/math.pyi b/stubs/tensorflow/tensorflow/math.pyi index 2e72ada0a8f2..b64aeae674b5 100644 --- a/stubs/tensorflow/tensorflow/math.pyi +++ b/stubs/tensorflow/tensorflow/math.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from collections.abc import Iterable from typing import TypeVar, overload from typing_extensions import TypeAlias @@ -296,4 +295,4 @@ def count_nonzero( dtype: DTypeLike = ..., name: str | None = None, ) -> Tensor: ... -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/tensorflow/tensorflow/nn.pyi b/stubs/tensorflow/tensorflow/nn.pyi index 4e902e6b6bf1..10b1b88b6111 100644 --- a/stubs/tensorflow/tensorflow/nn.pyi +++ b/stubs/tensorflow/tensorflow/nn.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from collections.abc import Sequence from typing import Any, Literal, overload @@ -192,4 +191,4 @@ def safe_embedding_lookup_sparse( name: str | None = None, allow_fast_lookup: bool = False, ) -> Tensor: ... -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/tensorflow/tensorflow/python/__init__.pyi b/stubs/tensorflow/tensorflow/python/__init__.pyi index 0f6820f054ea..5b0f74feb261 100644 --- a/stubs/tensorflow/tensorflow/python/__init__.pyi +++ b/stubs/tensorflow/tensorflow/python/__init__.pyi @@ -1,3 +1 @@ -from _typeshed import Incomplete - -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/tensorflow/tensorflow/python/distribute/distribute_lib.pyi b/stubs/tensorflow/tensorflow/python/distribute/distribute_lib.pyi index 87bbb0425c99..ee0c2fc8cbe8 100644 --- a/stubs/tensorflow/tensorflow/python/distribute/distribute_lib.pyi +++ b/stubs/tensorflow/tensorflow/python/distribute/distribute_lib.pyi @@ -2,4 +2,4 @@ from _typeshed import Incomplete Strategy = Incomplete -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/tensorflow/tensorflow/python/keras/__init__.pyi b/stubs/tensorflow/tensorflow/python/keras/__init__.pyi index 0f6820f054ea..5b0f74feb261 100644 --- a/stubs/tensorflow/tensorflow/python/keras/__init__.pyi +++ b/stubs/tensorflow/tensorflow/python/keras/__init__.pyi @@ -1,3 +1 @@ -from _typeshed import Incomplete - -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/tensorflow/tensorflow/python/trackable/resource.pyi b/stubs/tensorflow/tensorflow/python/trackable/resource.pyi index 2d50d091bfb4..bfa997840f30 100644 --- a/stubs/tensorflow/tensorflow/python/trackable/resource.pyi +++ b/stubs/tensorflow/tensorflow/python/trackable/resource.pyi @@ -1,5 +1,3 @@ -from _typeshed import Incomplete - from tensorflow.python.trackable.base import Trackable class _ResourceMetaclass(type): ... @@ -8,4 +6,4 @@ class _ResourceMetaclass(type): ... # it is needed for the public signatures of some APIs. class CapturableResource(Trackable, metaclass=_ResourceMetaclass): ... -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/tensorflow/tensorflow/raw_ops.pyi b/stubs/tensorflow/tensorflow/raw_ops.pyi index a7cf1cb71b70..8f84ed018f03 100644 --- a/stubs/tensorflow/tensorflow/raw_ops.pyi +++ b/stubs/tensorflow/tensorflow/raw_ops.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from typing import Literal from tensorflow import Operation, Tensor @@ -42,4 +41,4 @@ def ResourceApplyAdam( use_nesterov: bool = False, name: str | None = None, ) -> Operation: ... -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/tensorflow/tensorflow/sparse.pyi b/stubs/tensorflow/tensorflow/sparse.pyi index dd6f3b74bf58..1cd1da7f361d 100644 --- a/stubs/tensorflow/tensorflow/sparse.pyi +++ b/stubs/tensorflow/tensorflow/sparse.pyi @@ -28,4 +28,4 @@ class SparseTensor(metaclass=ABCMeta): def __mul__(self, y: _SparseTensorCompatible) -> SparseTensor: ... def __getattr__(self, name: str) -> Incomplete: ... -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/tensorflow/tensorflow/train/__init__.pyi b/stubs/tensorflow/tensorflow/train/__init__.pyi index 210272635eae..c40249a92c5e 100644 --- a/stubs/tensorflow/tensorflow/train/__init__.pyi +++ b/stubs/tensorflow/tensorflow/train/__init__.pyi @@ -72,4 +72,4 @@ class CheckpointManager: def latest_checkpoint(checkpoint_dir: str, latest_filename: str | None = None) -> str: ... def load_variable(ckpt_dir_or_file: str, name: str) -> np.ndarray[Any, Any]: ... def list_variables(ckpt_dir_or_file: str) -> list[tuple[str, list[int]]]: ... -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/tensorflow/tensorflow/train/experimental.pyi b/stubs/tensorflow/tensorflow/train/experimental.pyi index c7f3c0ded5ec..46dc7d0dad11 100644 --- a/stubs/tensorflow/tensorflow/train/experimental.pyi +++ b/stubs/tensorflow/tensorflow/train/experimental.pyi @@ -1,5 +1,4 @@ import abc -from _typeshed import Incomplete from typing_extensions import Self from tensorflow.python.trackable.base import Trackable @@ -10,4 +9,4 @@ class PythonState(Trackable, metaclass=abc.ABCMeta): @abc.abstractmethod def deserialize(self, string_value: str) -> Self: ... -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module diff --git a/stubs/tensorflow/tensorflow/types/experimental.pyi b/stubs/tensorflow/tensorflow/types/experimental.pyi index 5d4a792cb2e3..178a3e92ba0b 100644 --- a/stubs/tensorflow/tensorflow/types/experimental.pyi +++ b/stubs/tensorflow/tensorflow/types/experimental.pyi @@ -1,5 +1,4 @@ import abc -from _typeshed import Incomplete from typing import Any, Generic, TypeVar, overload from typing_extensions import ParamSpec @@ -28,4 +27,4 @@ class PolymorphicFunction(Callable[_P, _R_co], metaclass=abc.ABCMeta): GenericFunction = PolymorphicFunction -def __getattr__(name: str) -> Incomplete: ... +def __getattr__(name: str): ... # incomplete module From 0f3ac8eb4d8b9d89a12106954791880b681eadb7 Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Tue, 13 May 2025 09:15:58 +0000 Subject: [PATCH 357/388] Bump `pdb` and `bdb` to 3.14 (#14042) --- stdlib/@tests/stubtest_allowlists/py314.txt | 15 ----- stdlib/bdb.pyi | 17 ++++- stdlib/pdb.pyi | 74 ++++++++++++++++----- 3 files changed, 73 insertions(+), 33 deletions(-) diff --git a/stdlib/@tests/stubtest_allowlists/py314.txt b/stdlib/@tests/stubtest_allowlists/py314.txt index 238ea1e3b58e..9697fef404e9 100644 --- a/stdlib/@tests/stubtest_allowlists/py314.txt +++ b/stdlib/@tests/stubtest_allowlists/py314.txt @@ -33,11 +33,6 @@ asyncio.events._AbstractEventLoopPolicy asyncio.events._get_event_loop_policy asyncio.events._set_event_loop_policy asyncio.tasks.eager_task_factory -bdb.Bdb.__init__ -bdb.Bdb.disable_current_event -bdb.Bdb.restart_events -bdb.Bdb.start_trace -bdb.Bdb.stop_trace builtins.BaseExceptionGroup.split builtins.BaseExceptionGroup.subgroup builtins.ExceptionGroup.split @@ -110,16 +105,6 @@ multiprocessing.process.BaseProcess.interrupt multiprocessing.synchronize.SemLock.locked os.__all__ os.readinto -pdb.__all__ -pdb.Pdb.__init__ -pdb.Pdb.checkline -pdb.Pdb.complete_multiline_names -pdb.Pdb.print_stack_trace -pdb.Pdb.set_trace -pdb.Pdb.set_trace_async -pdb.get_default_backend -pdb.set_default_backend -pdb.set_trace pkgutil.__all__ pkgutil.find_loader pkgutil.get_loader diff --git a/stdlib/bdb.pyi b/stdlib/bdb.pyi index 2004874a52b2..b73f894093ce 100644 --- a/stdlib/bdb.pyi +++ b/stdlib/bdb.pyi @@ -3,13 +3,14 @@ from _typeshed import ExcInfo, TraceFunction, Unused from collections.abc import Callable, Iterable, Iterator, Mapping from contextlib import contextmanager from types import CodeType, FrameType, TracebackType -from typing import IO, Any, Final, SupportsInt, TypeVar -from typing_extensions import ParamSpec +from typing import IO, Any, Final, Literal, SupportsInt, TypeVar +from typing_extensions import ParamSpec, TypeAlias __all__ = ["BdbQuit", "Bdb", "Breakpoint"] _T = TypeVar("_T") _P = ParamSpec("_P") +_Backend: TypeAlias = Literal["settrace", "monitoring"] # A union of code-object flags at runtime. # The exact values of code-object flags are implementation details, @@ -28,7 +29,12 @@ class Bdb: stopframe: FrameType | None returnframe: FrameType | None stoplineno: int - def __init__(self, skip: Iterable[str] | None = None) -> None: ... + if sys.version_info >= (3, 14): + backend: _Backend + def __init__(self, skip: Iterable[str] | None = None, backend: _Backend = "settrace") -> None: ... + else: + def __init__(self, skip: Iterable[str] | None = None) -> None: ... + def canonic(self, filename: str) -> str: ... def reset(self) -> None: ... if sys.version_info >= (3, 12): @@ -85,6 +91,11 @@ class Bdb: def runeval(self, expr: str, globals: dict[str, Any] | None = None, locals: Mapping[str, Any] | None = None) -> None: ... def runctx(self, cmd: str | CodeType, globals: dict[str, Any] | None, locals: Mapping[str, Any] | None) -> None: ... def runcall(self, func: Callable[_P, _T], /, *args: _P.args, **kwds: _P.kwargs) -> _T | None: ... + if sys.version_info >= (3, 14): + def start_trace(self) -> None: ... + def stop_trace(self) -> None: ... + def disable_current_event(self) -> None: ... + def restart_events(self) -> None: ... class Breakpoint: next: int diff --git a/stdlib/pdb.pyi b/stdlib/pdb.pyi index 61e8b7176e84..ad69fcab16de 100644 --- a/stdlib/pdb.pyi +++ b/stdlib/pdb.pyi @@ -1,17 +1,21 @@ import signal import sys -from bdb import Bdb +from bdb import Bdb, _Backend from cmd import Cmd from collections.abc import Callable, Iterable, Mapping, Sequence from inspect import _SourceObjectType +from linecache import _ModuleGlobals from types import CodeType, FrameType, TracebackType -from typing import IO, Any, ClassVar, Final, TypeVar -from typing_extensions import ParamSpec, Self +from typing import IO, Any, ClassVar, Final, Literal, TypeVar +from typing_extensions import ParamSpec, Self, TypeAlias __all__ = ["run", "pm", "Pdb", "runeval", "runctx", "runcall", "set_trace", "post_mortem", "help"] +if sys.version_info >= (3, 14): + __all__ += ["set_default_backend", "get_default_backend"] _T = TypeVar("_T") _P = ParamSpec("_P") +_Mode: TypeAlias = Literal["inline", "cli"] line_prefix: str # undocumented @@ -21,7 +25,16 @@ def run(statement: str, globals: dict[str, Any] | None = None, locals: Mapping[s def runeval(expression: str, globals: dict[str, Any] | None = None, locals: Mapping[str, Any] | None = None) -> Any: ... def runctx(statement: str, globals: dict[str, Any], locals: Mapping[str, Any]) -> None: ... def runcall(func: Callable[_P, _T], *args: _P.args, **kwds: _P.kwargs) -> _T | None: ... -def set_trace(*, header: str | None = None) -> None: ... + +if sys.version_info >= (3, 14): + def set_default_backend(backend: _Backend) -> None: ... + def get_default_backend() -> _Backend: ... + def set_trace(*, header: str | None = None, commands: Iterable[str] | None = None) -> None: ... + async def set_trace_async(*, header: str | None = None, commands: Iterable[str] | None = None) -> None: ... + +else: + def set_trace(*, header: str | None = None) -> None: ... + def post_mortem(t: TracebackType | None = None) -> None: ... def pm() -> None: ... @@ -47,15 +60,35 @@ class Pdb(Bdb, Cmd): curindex: int curframe: FrameType | None curframe_locals: Mapping[str, Any] - def __init__( - self, - completekey: str = "tab", - stdin: IO[str] | None = None, - stdout: IO[str] | None = None, - skip: Iterable[str] | None = None, - nosigint: bool = False, - readrc: bool = True, - ) -> None: ... + if sys.version_info >= (3, 14): + mode: _Mode | None + colorize: bool + def __init__( + self, + completekey: str = "tab", + stdin: IO[str] | None = None, + stdout: IO[str] | None = None, + skip: Iterable[str] | None = None, + nosigint: bool = False, + readrc: bool = True, + mode: _Mode | None = None, + backend: _Backend | None = None, + colorize: bool = False, + ) -> None: ... + else: + def __init__( + self, + completekey: str = "tab", + stdin: IO[str] | None = None, + stdout: IO[str] | None = None, + skip: Iterable[str] | None = None, + nosigint: bool = False, + readrc: bool = True, + ) -> None: ... + if sys.version_info >= (3, 14): + def set_trace(self, frame: FrameType | None = None, *, commands: Iterable[str] | None = None) -> None: ... + async def set_trace_async(self, frame: FrameType | None = None, *, commands: Iterable[str] | None = None) -> None: ... + def forget(self) -> None: ... def setup(self, f: FrameType | None, tb: TracebackType | None) -> None: ... if sys.version_info < (3, 11): @@ -75,14 +108,25 @@ class Pdb(Bdb, Cmd): def handle_command_def(self, line: str) -> bool: ... def defaultFile(self) -> str: ... def lineinfo(self, identifier: str) -> tuple[None, None, None] | tuple[str, str, int]: ... - def checkline(self, filename: str, lineno: int) -> int: ... + if sys.version_info >= (3, 14): + def checkline(self, filename: str, lineno: int, module_globals: _ModuleGlobals | None = None) -> int: ... + else: + def checkline(self, filename: str, lineno: int) -> int: ... + def _getval(self, arg: str) -> object: ... - def print_stack_trace(self) -> None: ... + if sys.version_info >= (3, 14): + def print_stack_trace(self, count: int | None = None) -> None: ... + else: + def print_stack_trace(self) -> None: ... + def print_stack_entry(self, frame_lineno: tuple[FrameType, int], prompt_prefix: str = "\n-> ") -> None: ... def lookupmodule(self, filename: str) -> str | None: ... if sys.version_info < (3, 11): def _runscript(self, filename: str) -> None: ... + if sys.version_info >= (3, 14): + def complete_multiline_names(self, text: str, line: str, begidx: int, endidx: int) -> list[str]: ... + if sys.version_info >= (3, 13): def completedefault(self, text: str, line: str, begidx: int, endidx: int) -> list[str]: ... From 29604c12b10aed0d33dcf0a0e530d5dc3210a5cf Mon Sep 17 00:00:00 2001 From: Avasam Date: Tue, 13 May 2025 05:43:01 -0400 Subject: [PATCH 358/388] networkx: consistent `Unknown | None = None` (#14027) --- .../networkx/networkx/algorithms/boundary.pyi | 14 ++--- .../algorithms/centrality/flow_matrix.pyi | 4 +- .../algorithms/coloring/greedy_coloring.pyi | 2 +- .../algorithms/isomorphism/ismags.pyi | 9 +-- .../algorithms/isomorphism/isomorphvf2.pyi | 4 +- .../isomorphism/temporalisomorphvf2.pyi | 4 +- .../algorithms/isomorphism/vf2userfunc.pyi | 4 +- .../algorithms/lowest_common_ancestors.pyi | 2 +- .../networkx/algorithms/planarity.pyi | 2 +- .../networkx/algorithms/tree/branchings.pyi | 2 +- .../networkx/networkx/algorithms/vitality.pyi | 6 +- stubs/networkx/networkx/classes/function.pyi | 4 +- stubs/networkx/networkx/classes/graph.pyi | 2 +- .../networkx/networkx/classes/multigraph.pyi | 10 ++-- .../networkx/networkx/classes/reportviews.pyi | 18 ++---- stubs/networkx/networkx/convert.pyi | 2 +- stubs/networkx/networkx/drawing/layout.pyi | 51 +++++----------- stubs/networkx/networkx/drawing/nx_agraph.pyi | 2 +- stubs/networkx/networkx/drawing/nx_latex.pyi | 6 +- stubs/networkx/networkx/drawing/nx_pydot.pyi | 6 +- stubs/networkx/networkx/drawing/nx_pylab.pyi | 60 +++++++++---------- .../networkx/networkx/generators/classic.pyi | 34 +++++------ .../networkx/networkx/generators/cographs.pyi | 4 +- .../networkx/generators/community.pyi | 22 +++---- .../networkx/generators/degree_seq.pyi | 18 +++--- .../networkx/networkx/generators/directed.pyi | 16 +++-- .../networkx/generators/duplication.pyi | 6 +- stubs/networkx/networkx/generators/ego.pyi | 4 +- .../networkx/generators/expanders.pyi | 8 +-- .../networkx/generators/geometric.pyi | 48 ++------------- .../networkx/generators/harary_graph.pyi | 6 +- .../generators/internet_as_graphs.pyi | 2 +- .../networkx/generators/intersection.pyi | 8 +-- .../networkx/generators/joint_degree_seq.pyi | 6 +- .../networkx/networkx/generators/lattice.pyi | 12 +--- stubs/networkx/networkx/generators/line.pyi | 4 +- .../networkx/generators/random_graphs.pyi | 36 ++++++----- stubs/networkx/networkx/generators/small.pyi | 44 +++++++------- .../generators/spectral_graph_forge.pyi | 4 +- stubs/networkx/networkx/lazy_imports.pyi | 3 +- .../networkx/linalg/algebraicconnectivity.pyi | 32 ++-------- stubs/networkx/networkx/linalg/attrmatrix.pyi | 21 +------ .../networkx/linalg/bethehessianmatrix.pyi | 2 +- .../networkx/networkx/linalg/graphmatrix.pyi | 12 +--- .../networkx/linalg/laplacianmatrix.pyi | 14 +---- .../networkx/linalg/modularitymatrix.pyi | 4 +- stubs/networkx/networkx/linalg/spectrum.pyi | 4 +- stubs/networkx/networkx/readwrite/adjlist.pyi | 17 +----- .../networkx/networkx/readwrite/edgelist.pyi | 24 ++------ stubs/networkx/networkx/readwrite/gexf.pyi | 10 ++-- stubs/networkx/networkx/readwrite/gml.pyi | 8 +-- stubs/networkx/networkx/readwrite/graph6.pyi | 6 +- stubs/networkx/networkx/readwrite/graphml.pyi | 26 ++++---- .../readwrite/json_graph/node_link.pyi | 4 +- .../networkx/readwrite/multiline_adjlist.pyi | 17 +----- stubs/networkx/networkx/readwrite/sparse6.pyi | 6 +- stubs/networkx/networkx/readwrite/text.pyi | 13 ++-- stubs/networkx/networkx/relabel.pyi | 3 +- stubs/networkx/networkx/utils/configs.pyi | 4 +- stubs/networkx/networkx/utils/heaps.pyi | 6 +- .../networkx/networkx/utils/mapped_queue.pyi | 6 +- stubs/networkx/networkx/utils/misc.pyi | 11 ++-- .../networkx/utils/random_sequence.pyi | 14 ++--- stubs/networkx/networkx/utils/rcm.pyi | 4 +- stubs/networkx/networkx/utils/union_find.pyi | 2 +- 65 files changed, 269 insertions(+), 500 deletions(-) diff --git a/stubs/networkx/networkx/algorithms/boundary.pyi b/stubs/networkx/networkx/algorithms/boundary.pyi index 08fb9203269f..7cbec735e1b5 100644 --- a/stubs/networkx/networkx/algorithms/boundary.pyi +++ b/stubs/networkx/networkx/algorithms/boundary.pyi @@ -15,7 +15,7 @@ def edge_boundary( nbunch2: Iterable[Incomplete] | None = None, data=False, keys: bool = False, - default: Incomplete | None = None, + default=None, ) -> Generator[tuple[_Node, _Node], None, None]: ... @overload def edge_boundary( @@ -24,7 +24,7 @@ def edge_boundary( nbunch2: Iterable[Incomplete] | None = None, data=False, keys: bool = False, - default: Incomplete | None = None, + default=None, ) -> Generator[tuple[_Node, _Node, dict[str, Incomplete]], None, None]: ... @overload def edge_boundary( @@ -33,7 +33,7 @@ def edge_boundary( nbunch2: Iterable[Incomplete] | None = None, data=False, keys: bool = False, - default: Incomplete | None = None, + default=None, ) -> Generator[tuple[_Node, _Node, dict[str, Incomplete]], None, None]: ... @overload def edge_boundary( @@ -60,7 +60,7 @@ def edge_boundary( nbunch2: Iterable[Incomplete] | None = None, data=False, keys: bool = False, - default: Incomplete | None = None, + default=None, ) -> Generator[tuple[_Node, _Node, int], None, None]: ... @overload def edge_boundary( @@ -69,7 +69,7 @@ def edge_boundary( nbunch2: Iterable[Incomplete] | None = None, data=False, keys: bool = False, - default: Incomplete | None = None, + default=None, ) -> Generator[tuple[_Node, _Node, int], None, None]: ... @overload def edge_boundary( @@ -78,7 +78,7 @@ def edge_boundary( nbunch2: Iterable[Incomplete] | None = None, data=False, keys: bool = False, - default: Incomplete | None = None, + default=None, ) -> Generator[tuple[_Node, _Node, int, dict[str, Incomplete]], None, None]: ... @overload def edge_boundary( @@ -87,7 +87,7 @@ def edge_boundary( nbunch2: Iterable[Incomplete] | None = None, data=False, keys: bool = False, - default: Incomplete | None = None, + default=None, ) -> Generator[tuple[_Node, _Node, int, dict[str, Incomplete]], None, None]: ... @overload def edge_boundary( diff --git a/stubs/networkx/networkx/algorithms/centrality/flow_matrix.pyi b/stubs/networkx/networkx/algorithms/centrality/flow_matrix.pyi index 7b7f43908ffb..b2cc13b1419e 100644 --- a/stubs/networkx/networkx/algorithms/centrality/flow_matrix.pyi +++ b/stubs/networkx/networkx/algorithms/centrality/flow_matrix.pyi @@ -4,7 +4,7 @@ from collections.abc import Generator from networkx.utils.backends import _dispatchable @_dispatchable -def flow_matrix_row(G, weight: Incomplete | None = None, dtype=..., solver: str = "lu") -> Generator[Incomplete, None, None]: ... +def flow_matrix_row(G, weight=None, dtype=..., solver: str = "lu") -> Generator[Incomplete, None, None]: ... class InverseLaplacian: dtype: Incomplete @@ -13,7 +13,7 @@ class InverseLaplacian: C: Incomplete L1: Incomplete - def __init__(self, L, width: Incomplete | None = None, dtype: Incomplete | None = None) -> None: ... + def __init__(self, L, width=None, dtype=None) -> None: ... def init_solver(self, L) -> None: ... def solve(self, r) -> None: ... def solve_inverse(self, r) -> None: ... diff --git a/stubs/networkx/networkx/algorithms/coloring/greedy_coloring.pyi b/stubs/networkx/networkx/algorithms/coloring/greedy_coloring.pyi index 0ce06b531e84..9b7943f13bf2 100644 --- a/stubs/networkx/networkx/algorithms/coloring/greedy_coloring.pyi +++ b/stubs/networkx/networkx/algorithms/coloring/greedy_coloring.pyi @@ -19,7 +19,7 @@ __all__ = [ @_dispatchable def strategy_largest_first(G, colors): ... @_dispatchable -def strategy_random_sequential(G, colors, seed: Incomplete | None = None): ... +def strategy_random_sequential(G, colors, seed=None): ... @_dispatchable def strategy_smallest_last(G, colors): ... @_dispatchable diff --git a/stubs/networkx/networkx/algorithms/isomorphism/ismags.pyi b/stubs/networkx/networkx/algorithms/isomorphism/ismags.pyi index e0dae00bd5fe..1a3866cdc974 100644 --- a/stubs/networkx/networkx/algorithms/isomorphism/ismags.pyi +++ b/stubs/networkx/networkx/algorithms/isomorphism/ismags.pyi @@ -9,14 +9,7 @@ class ISMAGS: node_equality: Incomplete edge_equality: Incomplete - def __init__( - self, - graph, - subgraph, - node_match: Incomplete | None = None, - edge_match: Incomplete | None = None, - cache: Incomplete | None = None, - ) -> None: ... + def __init__(self, graph, subgraph, node_match=None, edge_match=None, cache=None) -> None: ... def find_isomorphisms(self, symmetry: bool = True) -> Generator[Incomplete, Incomplete, Incomplete]: ... def largest_common_subgraph(self, symmetry: bool = True) -> Generator[Incomplete, Incomplete, None]: ... def analyze_symmetry(self, graph, node_partitions, edge_colors): ... diff --git a/stubs/networkx/networkx/algorithms/isomorphism/isomorphvf2.pyi b/stubs/networkx/networkx/algorithms/isomorphism/isomorphvf2.pyi index dd57778fcc7a..59aa12d827f2 100644 --- a/stubs/networkx/networkx/algorithms/isomorphism/isomorphvf2.pyi +++ b/stubs/networkx/networkx/algorithms/isomorphism/isomorphvf2.pyi @@ -54,7 +54,7 @@ class GMState: G2_node: Incomplete depth: Incomplete - def __init__(self, GM, G1_node: Incomplete | None = None, G2_node: Incomplete | None = None) -> None: ... + def __init__(self, GM, G1_node=None, G2_node=None) -> None: ... def restore(self) -> None: ... class DiGMState: @@ -63,5 +63,5 @@ class DiGMState: G2_node: Incomplete depth: Incomplete - def __init__(self, GM, G1_node: Incomplete | None = None, G2_node: Incomplete | None = None) -> None: ... + def __init__(self, GM, G1_node=None, G2_node=None) -> None: ... def restore(self) -> None: ... diff --git a/stubs/networkx/networkx/algorithms/isomorphism/temporalisomorphvf2.pyi b/stubs/networkx/networkx/algorithms/isomorphism/temporalisomorphvf2.pyi index 060f3d193541..7ab32a2abcb9 100644 --- a/stubs/networkx/networkx/algorithms/isomorphism/temporalisomorphvf2.pyi +++ b/stubs/networkx/networkx/algorithms/isomorphism/temporalisomorphvf2.pyi @@ -23,8 +23,8 @@ class TimeRespectingDiGraphMatcher(DiGraphMatcher): def one_hop(self, Gx, Gx_node, core_x, pred, succ): ... def two_hop_pred(self, Gx, Gx_node, core_x, pred): ... def two_hop_succ(self, Gx, Gx_node, core_x, succ): ... - def preds(self, Gx, core_x, v, Gx_node: Incomplete | None = None): ... - def succs(self, Gx, core_x, v, Gx_node: Incomplete | None = None): ... + def preds(self, Gx, core_x, v, Gx_node=None): ... + def succs(self, Gx, core_x, v, Gx_node=None): ... def test_one(self, pred_dates, succ_dates): ... def test_two(self, pred_dates, succ_dates): ... def semantic_feasibility(self, G1_node, G2_node): ... diff --git a/stubs/networkx/networkx/algorithms/isomorphism/vf2userfunc.pyi b/stubs/networkx/networkx/algorithms/isomorphism/vf2userfunc.pyi index 3800601bc5e9..475020aa5b05 100644 --- a/stubs/networkx/networkx/algorithms/isomorphism/vf2userfunc.pyi +++ b/stubs/networkx/networkx/algorithms/isomorphism/vf2userfunc.pyi @@ -10,7 +10,7 @@ class GraphMatcher(vf2.GraphMatcher): G1_adj: Incomplete G2_adj: Incomplete - def __init__(self, G1, G2, node_match: Incomplete | None = None, edge_match: Incomplete | None = None) -> None: ... + def __init__(self, G1, G2, node_match=None, edge_match=None) -> None: ... semantic_feasibility: Incomplete class DiGraphMatcher(vf2.DiGraphMatcher): @@ -19,7 +19,7 @@ class DiGraphMatcher(vf2.DiGraphMatcher): G1_adj: Incomplete G2_adj: Incomplete - def __init__(self, G1, G2, node_match: Incomplete | None = None, edge_match: Incomplete | None = None) -> None: ... + def __init__(self, G1, G2, node_match=None, edge_match=None) -> None: ... def semantic_feasibility(self, G1_node, G2_node): ... class MultiGraphMatcher(GraphMatcher): ... diff --git a/stubs/networkx/networkx/algorithms/lowest_common_ancestors.pyi b/stubs/networkx/networkx/algorithms/lowest_common_ancestors.pyi index 480a56cfdf5e..ab7a8e5472cd 100644 --- a/stubs/networkx/networkx/algorithms/lowest_common_ancestors.pyi +++ b/stubs/networkx/networkx/algorithms/lowest_common_ancestors.pyi @@ -10,7 +10,7 @@ __all__ = ["all_pairs_lowest_common_ancestor", "tree_all_pairs_lowest_common_anc @_dispatchable def all_pairs_lowest_common_ancestor(G: DiGraph[_Node], pairs=None): ... @_dispatchable -def lowest_common_ancestor(G: DiGraph[_Node], node1, node2, default: Incomplete | None = None): ... +def lowest_common_ancestor(G: DiGraph[_Node], node1, node2, default=None): ... @_dispatchable def tree_all_pairs_lowest_common_ancestor( G: DiGraph[_Node], root: _Node | None = None, pairs=None diff --git a/stubs/networkx/networkx/algorithms/planarity.pyi b/stubs/networkx/networkx/algorithms/planarity.pyi index 43d10d50a84e..4aae6dfb9391 100644 --- a/stubs/networkx/networkx/algorithms/planarity.pyi +++ b/stubs/networkx/networkx/algorithms/planarity.pyi @@ -16,7 +16,7 @@ class Interval: low: Incomplete high: Incomplete - def __init__(self, low: Incomplete | None = None, high: Incomplete | None = None) -> None: ... + def __init__(self, low=None, high=None) -> None: ... def empty(self): ... def copy(self): ... def conflicting(self, b, planarity_state): ... diff --git a/stubs/networkx/networkx/algorithms/tree/branchings.pyi b/stubs/networkx/networkx/algorithms/tree/branchings.pyi index bc07c60e06eb..4232605cfb6f 100644 --- a/stubs/networkx/networkx/algorithms/tree/branchings.pyi +++ b/stubs/networkx/networkx/algorithms/tree/branchings.pyi @@ -57,7 +57,7 @@ class ArborescenceIterator: partition_key: str init_partition: Incomplete - def __init__(self, G, weight: str = "weight", minimum: bool = True, init_partition: Incomplete | None = None) -> None: ... + def __init__(self, G, weight: str = "weight", minimum: bool = True, init_partition=None) -> None: ... partition_queue: Incomplete def __iter__(self) -> Iterator[Incomplete]: ... diff --git a/stubs/networkx/networkx/algorithms/vitality.pyi b/stubs/networkx/networkx/algorithms/vitality.pyi index a9b84a11c10e..1d5af2c9935a 100644 --- a/stubs/networkx/networkx/algorithms/vitality.pyi +++ b/stubs/networkx/networkx/algorithms/vitality.pyi @@ -1,11 +1,7 @@ -from _typeshed import Incomplete - from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable __all__ = ["closeness_vitality"] @_dispatchable -def closeness_vitality( - G: Graph[_Node], node: Incomplete | None = None, weight: str | None = None, wiener_index: float | None = None -): ... +def closeness_vitality(G: Graph[_Node], node=None, weight: str | None = None, wiener_index: float | None = None): ... diff --git a/stubs/networkx/networkx/classes/function.pyi b/stubs/networkx/networkx/classes/function.pyi index aa2d8623aec2..4af2327955a1 100644 --- a/stubs/networkx/networkx/classes/function.pyi +++ b/stubs/networkx/networkx/classes/function.pyi @@ -53,8 +53,8 @@ __all__ = [ _U = TypeVar("_U") def nodes(G): ... -def edges(G, nbunch: Incomplete | None = None): ... -def degree(G, nbunch: Incomplete | None = None, weight: Incomplete | None = None): ... +def edges(G, nbunch=None): ... +def degree(G, nbunch=None, weight=None): ... def neighbors(G, n): ... def number_of_nodes(G): ... def number_of_edges(G): ... diff --git a/stubs/networkx/networkx/classes/graph.pyi b/stubs/networkx/networkx/classes/graph.pyi index 0679630a9c07..557448a229ab 100644 --- a/stubs/networkx/networkx/classes/graph.pyi +++ b/stubs/networkx/networkx/classes/graph.pyi @@ -79,7 +79,7 @@ class Graph(Collection[_Node]): def neighbors(self, n: _Node) -> Iterator[_Node]: ... @cached_property def edges(self) -> OutEdgeView[_Node]: ... - def get_edge_data(self, u: _Node, v: _Node, default: Incomplete | None = None) -> Mapping[str, Incomplete]: ... + def get_edge_data(self, u: _Node, v: _Node, default=None) -> Mapping[str, Incomplete]: ... def adjacency(self) -> Iterator[tuple[_Node, Mapping[_Node, Mapping[str, Incomplete]]]]: ... @cached_property def degree(self) -> DiDegreeView[_Node]: ... diff --git a/stubs/networkx/networkx/classes/multigraph.pyi b/stubs/networkx/networkx/classes/multigraph.pyi index 1fce3b56522e..e5628285ac6e 100644 --- a/stubs/networkx/networkx/classes/multigraph.pyi +++ b/stubs/networkx/networkx/classes/multigraph.pyi @@ -14,15 +14,15 @@ __all__ = ["MultiGraph"] class MultiGraph(Graph[_Node]): edge_key_dict_factory: ClassVar[_MapFactory] - def __init__(self, incoming_graph_data: Incomplete | None = None, multigraph_input: bool | None = None, **attr) -> None: ... + def __init__(self, incoming_graph_data=None, multigraph_input: bool | None = None, **attr) -> None: ... @cached_property def adj(self) -> MultiAdjacencyView[_Node, _Node, dict[str, Incomplete]]: ... def new_edge_key(self, u: _Node, v: _Node) -> int: ... - def add_edge(self, u_for_edge, v_for_edge, key: Incomplete | None = None, **attr): ... # type: ignore[override] # Has an additional `key` keyword argument - def remove_edge(self, u, v, key: Incomplete | None = None): ... - def has_edge(self, u, v, key: Incomplete | None = None): ... + def add_edge(self, u_for_edge, v_for_edge, key=None, **attr): ... # type: ignore[override] # Has an additional `key` keyword argument + def remove_edge(self, u, v, key=None): ... + def has_edge(self, u, v, key=None): ... def get_edge_data( # type: ignore[override] # Has an additional `key` keyword argument - self, u, v, key: Incomplete | None = None, default: Incomplete | None = None + self, u, v, key=None, default=None ): ... def copy(self, as_view: bool = False) -> MultiGraph[_Node]: ... def to_directed(self, as_view: bool = False) -> MultiDiGraph[_Node]: ... diff --git a/stubs/networkx/networkx/classes/reportviews.pyi b/stubs/networkx/networkx/classes/reportviews.pyi index 0337ba24c45d..ff9078493a19 100644 --- a/stubs/networkx/networkx/classes/reportviews.pyi +++ b/stubs/networkx/networkx/classes/reportviews.pyi @@ -41,17 +41,13 @@ class NodeView(Mapping[_Node, dict[str, Any]], AbstractSet[_Node]): def __getitem__(self, n: _Node) -> dict[str, Any]: ... def __contains__(self, n: object) -> bool: ... @overload - def __call__(self, data: Literal[False] = False, default: Incomplete | None = None) -> Iterator[_Node]: ... + def __call__(self, data: Literal[False] = False, default=None) -> Iterator[_Node]: ... @overload - def __call__( - self, data: Literal[True] | str, default: Incomplete | None = None - ) -> Iterator[tuple[_Node, dict[str, Any]]]: ... - def data(self, data: bool | str = True, default: Incomplete | None = None) -> NodeDataView[_Node]: ... + def __call__(self, data: Literal[True] | str, default=None) -> Iterator[tuple[_Node, dict[str, Any]]]: ... + def data(self, data: bool | str = True, default=None) -> NodeDataView[_Node]: ... class NodeDataView(AbstractSet[_Node]): - def __init__( - self, nodedict: Mapping[str, Incomplete], data: bool | str = False, default: Incomplete | None = None - ) -> None: ... + def __init__(self, nodedict: Mapping[str, Incomplete], data: bool | str = False, default=None) -> None: ... def __len__(self) -> int: ... def __iter__(self) -> Iterator[tuple[_Node, Incomplete]]: ... # type: ignore[override] def __contains__(self, n: object) -> bool: ... @@ -74,9 +70,7 @@ class OutMultiDegreeView(DiDegreeView[_Node]): ... class EdgeViewABC(ABC): ... class OutEdgeDataView(EdgeViewABC, Generic[_Node, _D]): - def __init__( - self, viewer, nbunch: _NBunch[_Node] = None, data: bool = False, *, default: Incomplete | None = None - ) -> None: ... + def __init__(self, viewer, nbunch: _NBunch[_Node] = None, data: bool = False, *, default=None) -> None: ... def __len__(self) -> int: ... def __iter__(self) -> Iterator[_D]: ... def __contains__(self, e: _Edge[_Node]) -> bool: ... @@ -87,7 +81,7 @@ class InEdgeDataView(OutEdgeDataView[_Node, _D]): ... class OutMultiEdgeDataView(OutEdgeDataView[_Node, _D]): keys: bool def __init__( - self, viewer, nbunch: _NBunch[_Node] = None, data: bool = False, *, default: Incomplete | None = None, keys: bool = False + self, viewer, nbunch: _NBunch[_Node] = None, data: bool = False, *, default=None, keys: bool = False ) -> None: ... class MultiEdgeDataView(OutEdgeDataView[_Node, _D]): ... diff --git a/stubs/networkx/networkx/convert.pyi b/stubs/networkx/networkx/convert.pyi index a85a117a939c..0f25eabffa87 100644 --- a/stubs/networkx/networkx/convert.pyi +++ b/stubs/networkx/networkx/convert.pyi @@ -20,7 +20,7 @@ def to_networkx_graph( @_dispatchable def to_dict_of_lists(G: Graph[_Node], nodelist: Collection[_Node] | None = None) -> dict[_Node, list[_Node]]: ... @_dispatchable -def from_dict_of_lists(d: dict[_Node, Iterable[_Node]], create_using: Incomplete | None = None) -> Graph[_Node]: ... +def from_dict_of_lists(d: dict[_Node, Iterable[_Node]], create_using=None) -> Graph[_Node]: ... def to_dict_of_dicts( G: Graph[_Node], nodelist: Collection[_Node] | None = None, edge_data=None ) -> dict[Incomplete, Incomplete]: ... diff --git a/stubs/networkx/networkx/drawing/layout.pyi b/stubs/networkx/networkx/drawing/layout.pyi index fae316ed4d09..56bafec5db6a 100644 --- a/stubs/networkx/networkx/drawing/layout.pyi +++ b/stubs/networkx/networkx/drawing/layout.pyi @@ -21,55 +21,34 @@ __all__ = [ "arf_layout", ] -def random_layout(G, center: Incomplete | None = None, dim: int = 2, seed: Incomplete | None = None): ... -def circular_layout(G, scale: float = 1, center: Incomplete | None = None, dim: int = 2): ... -def shell_layout( - G, - nlist: Incomplete | None = None, - rotate: Incomplete | None = None, - scale: float = 1, - center: Incomplete | None = None, - dim: int = 2, -): ... -def bipartite_layout( - G, nodes, align: str = "vertical", scale: float = 1, center: Incomplete | None = None, aspect_ratio: float = ... -): ... +def random_layout(G, center=None, dim: int = 2, seed=None): ... +def circular_layout(G, scale: float = 1, center=None, dim: int = 2): ... +def shell_layout(G, nlist=None, rotate=None, scale: float = 1, center=None, dim: int = 2): ... +def bipartite_layout(G, nodes, align: str = "vertical", scale: float = 1, center=None, aspect_ratio: float = ...): ... def spring_layout( G, - k: Incomplete | None = None, - pos: Incomplete | None = None, - fixed: Incomplete | None = None, + k=None, + pos=None, + fixed=None, iterations: int = 50, threshold: float = 0.0001, weight: str = "weight", scale: float = 1, - center: Incomplete | None = None, + center=None, dim: int = 2, - seed: Incomplete | None = None, + seed=None, ): ... fruchterman_reingold_layout = spring_layout -def kamada_kawai_layout( - G, - dist: Incomplete | None = None, - pos: Incomplete | None = None, - weight: str = "weight", - scale: float = 1, - center: Incomplete | None = None, - dim: int = 2, -): ... -def spectral_layout(G, weight: str = "weight", scale: float = 1, center: Incomplete | None = None, dim: int = 2): ... -def planar_layout(G, scale: float = 1, center: Incomplete | None = None, dim: int = 2): ... -def spiral_layout( - G, scale: float = 1, center: Incomplete | None = None, dim: int = 2, resolution: float = 0.35, equidistant: bool = False -): ... -def multipartite_layout( - G, subset_key: str = "subset", align: str = "vertical", scale: float = 1, center: Incomplete | None = None -): ... +def kamada_kawai_layout(G, dist=None, pos=None, weight: str = "weight", scale: float = 1, center=None, dim: int = 2): ... +def spectral_layout(G, weight: str = "weight", scale: float = 1, center=None, dim: int = 2): ... +def planar_layout(G, scale: float = 1, center=None, dim: int = 2): ... +def spiral_layout(G, scale: float = 1, center=None, dim: int = 2, resolution: float = 0.35, equidistant: bool = False): ... +def multipartite_layout(G, subset_key: str = "subset", align: str = "vertical", scale: float = 1, center=None): ... def arf_layout( G, - pos: Incomplete | None = None, + pos=None, scaling: float = 1, a: float = 1.1, etol: float = 1e-06, diff --git a/stubs/networkx/networkx/drawing/nx_agraph.pyi b/stubs/networkx/networkx/drawing/nx_agraph.pyi index 0c744f10e408..4082bd6153ec 100644 --- a/stubs/networkx/networkx/drawing/nx_agraph.pyi +++ b/stubs/networkx/networkx/drawing/nx_agraph.pyi @@ -12,7 +12,7 @@ _AGraph: TypeAlias = Incomplete __all__ = ["from_agraph", "to_agraph", "write_dot", "read_dot", "graphviz_layout", "pygraphviz_layout", "view_pygraphviz"] @_dispatchable -def from_agraph(A, create_using: Incomplete | None = None) -> Graph[Incomplete]: ... +def from_agraph(A, create_using=None) -> Graph[Incomplete]: ... def to_agraph(N: Graph[Hashable]) -> _AGraph: ... def write_dot(G: Graph[Hashable], path: str | TextIOBase) -> None: ... @_dispatchable diff --git a/stubs/networkx/networkx/drawing/nx_latex.pyi b/stubs/networkx/networkx/drawing/nx_latex.pyi index a7ca26c18821..1dcf1df0f7a8 100644 --- a/stubs/networkx/networkx/drawing/nx_latex.pyi +++ b/stubs/networkx/networkx/drawing/nx_latex.pyi @@ -1,5 +1,3 @@ -from _typeshed import Incomplete - __all__ = ["to_latex_raw", "to_latex", "write_latex"] def to_latex_raw( @@ -27,8 +25,8 @@ def to_latex( edge_label_options: str = "edge_label_options", caption: str = "", latex_label: str = "", - sub_captions: Incomplete | None = None, - sub_labels: Incomplete | None = None, + sub_captions=None, + sub_labels=None, n_rows: int = 1, as_document: bool = True, document_wrapper: str = ..., diff --git a/stubs/networkx/networkx/drawing/nx_pydot.pyi b/stubs/networkx/networkx/drawing/nx_pydot.pyi index dbb039f03b56..1a84f8d510f0 100644 --- a/stubs/networkx/networkx/drawing/nx_pydot.pyi +++ b/stubs/networkx/networkx/drawing/nx_pydot.pyi @@ -1,5 +1,3 @@ -from _typeshed import Incomplete - from networkx.utils.backends import _dispatchable __all__ = ["write_dot", "read_dot", "graphviz_layout", "pydot_layout", "to_pydot", "from_pydot"] @@ -10,5 +8,5 @@ def read_dot(path): ... @_dispatchable def from_pydot(P): ... def to_pydot(N): ... -def graphviz_layout(G, prog: str = "neato", root: Incomplete | None = None): ... -def pydot_layout(G, prog: str = "neato", root: Incomplete | None = None): ... +def graphviz_layout(G, prog: str = "neato", root=None): ... +def pydot_layout(G, prog: str = "neato", root=None): ... diff --git a/stubs/networkx/networkx/drawing/nx_pylab.pyi b/stubs/networkx/networkx/drawing/nx_pylab.pyi index 9ed051b6707b..042f1e1dd15a 100644 --- a/stubs/networkx/networkx/drawing/nx_pylab.pyi +++ b/stubs/networkx/networkx/drawing/nx_pylab.pyi @@ -18,10 +18,8 @@ __all__ = [ "draw_forceatlas2", ] -def draw(G, pos: Incomplete | None = None, ax: Incomplete | None = None, **kwds) -> None: ... -def draw_networkx( - G, pos: Incomplete | None = None, arrows: Incomplete | None = None, with_labels: bool = True, **kwds -) -> None: ... +def draw(G, pos=None, ax=None, **kwds) -> None: ... +def draw_networkx(G, pos=None, arrows=None, with_labels: bool = True, **kwds) -> None: ... def draw_networkx_nodes( G, pos, @@ -29,33 +27,33 @@ def draw_networkx_nodes( node_size: Incomplete | int = 300, node_color: str = "#1f78b4", node_shape: str = "o", - alpha: Incomplete | None = None, - cmap: Incomplete | None = None, - vmin: Incomplete | None = None, - vmax: Incomplete | None = None, - ax: Incomplete | None = None, - linewidths: Incomplete | None = None, - edgecolors: Incomplete | None = None, - label: Incomplete | None = None, - margins: Incomplete | None = None, + alpha=None, + cmap=None, + vmin=None, + vmax=None, + ax=None, + linewidths=None, + edgecolors=None, + label=None, + margins=None, hide_ticks: bool = True, ): ... def draw_networkx_edges( G, pos, - edgelist: Incomplete | None = None, + edgelist=None, width: float = 1.0, edge_color: str = "k", style: str = "solid", - alpha: Incomplete | None = None, - arrowstyle: Incomplete | None = None, + alpha=None, + arrowstyle=None, arrowsize: int = 10, - edge_cmap: Incomplete | None = None, - edge_vmin: Incomplete | None = None, - edge_vmax: Incomplete | None = None, - ax: Incomplete | None = None, - arrows: Incomplete | None = None, - label: Incomplete | None = None, + edge_cmap=None, + edge_vmin=None, + edge_vmax=None, + ax=None, + arrows=None, + label=None, node_size: Incomplete | int = 300, nodelist: list[Incomplete] | None = None, node_shape: str = "o", @@ -67,33 +65,33 @@ def draw_networkx_edges( def draw_networkx_labels( G, pos, - labels: Incomplete | None = None, + labels=None, font_size: int = 12, font_color: str = "k", font_family: str = "sans-serif", font_weight: str = "normal", - alpha: Incomplete | None = None, - bbox: Incomplete | None = None, + alpha=None, + bbox=None, horizontalalignment: str = "center", verticalalignment: str = "center", - ax: Incomplete | None = None, + ax=None, clip_on: bool = True, hide_ticks: bool = True, ): ... def draw_networkx_edge_labels( G, pos, - edge_labels: Incomplete | None = None, + edge_labels=None, label_pos: float = 0.5, font_size: int = 10, font_color: str = "k", font_family: str = "sans-serif", font_weight: str = "normal", - alpha: Incomplete | None = None, - bbox: Incomplete | None = None, + alpha=None, + bbox=None, horizontalalignment: str = "center", verticalalignment: str = "center", - ax: Incomplete | None = None, + ax=None, rotate: bool = True, clip_on: bool = True, node_size: int = 300, @@ -106,6 +104,6 @@ def draw_kamada_kawai(G, **kwargs) -> None: ... def draw_random(G, **kwargs) -> None: ... def draw_spectral(G, **kwargs) -> None: ... def draw_spring(G, **kwargs) -> None: ... -def draw_shell(G, nlist: Incomplete | None = None, **kwargs) -> None: ... +def draw_shell(G, nlist=None, **kwargs) -> None: ... def draw_planar(G, **kwargs) -> None: ... def draw_forceatlas2(G, **kwargs) -> None: ... diff --git a/stubs/networkx/networkx/generators/classic.pyi b/stubs/networkx/networkx/generators/classic.pyi index 99b8a6d8413b..5c46ad3a2add 100644 --- a/stubs/networkx/networkx/generators/classic.pyi +++ b/stubs/networkx/networkx/generators/classic.pyi @@ -28,44 +28,44 @@ __all__ = [ ] @_dispatchable -def full_rary_tree(r, n, create_using: Incomplete | None = None): ... +def full_rary_tree(r, n, create_using=None): ... @_dispatchable def kneser_graph(n, k) -> Graph[Incomplete]: ... @_dispatchable -def balanced_tree(r, h, create_using: Incomplete | None = None): ... +def balanced_tree(r, h, create_using=None): ... @_dispatchable -def barbell_graph(m1, m2, create_using: Incomplete | None = None): ... +def barbell_graph(m1, m2, create_using=None): ... @_dispatchable -def binomial_tree(n, create_using: Incomplete | None = None): ... +def binomial_tree(n, create_using=None): ... @_dispatchable -def complete_graph(n, create_using: Incomplete | None = None): ... +def complete_graph(n, create_using=None): ... @_dispatchable -def circular_ladder_graph(n, create_using: Incomplete | None = None): ... +def circular_ladder_graph(n, create_using=None): ... @_dispatchable -def circulant_graph(n, offsets, create_using: Incomplete | None = None): ... +def circulant_graph(n, offsets, create_using=None): ... @_dispatchable -def cycle_graph(n, create_using: Incomplete | None = None): ... +def cycle_graph(n, create_using=None): ... @_dispatchable -def dorogovtsev_goltsev_mendes_graph(n, create_using: Incomplete | None = None): ... +def dorogovtsev_goltsev_mendes_graph(n, create_using=None): ... @_dispatchable -def empty_graph(n: Incomplete | int = 0, create_using: Incomplete | None = None, default=...): ... +def empty_graph(n: Incomplete | int = 0, create_using=None, default=...): ... @_dispatchable -def ladder_graph(n, create_using: Incomplete | None = None): ... +def ladder_graph(n, create_using=None): ... @_dispatchable -def lollipop_graph(m, n, create_using: Incomplete | None = None): ... +def lollipop_graph(m, n, create_using=None): ... @_dispatchable -def null_graph(create_using: Incomplete | None = None): ... +def null_graph(create_using=None): ... @_dispatchable -def path_graph(n, create_using: Incomplete | None = None): ... +def path_graph(n, create_using=None): ... @_dispatchable -def star_graph(n, create_using: Incomplete | None = None): ... +def star_graph(n, create_using=None): ... @_dispatchable def tadpole_graph(m, n, create_using=None) -> Graph[Incomplete] | Incomplete: ... @_dispatchable -def trivial_graph(create_using: Incomplete | None = None): ... +def trivial_graph(create_using=None): ... @_dispatchable def turan_graph(n, r): ... @_dispatchable -def wheel_graph(n, create_using: Incomplete | None = None): ... +def wheel_graph(n, create_using=None): ... @_dispatchable def complete_multipartite_graph(*subset_sizes): ... diff --git a/stubs/networkx/networkx/generators/cographs.pyi b/stubs/networkx/networkx/generators/cographs.pyi index a3081cae6274..ba2edca7a2b6 100644 --- a/stubs/networkx/networkx/generators/cographs.pyi +++ b/stubs/networkx/networkx/generators/cographs.pyi @@ -1,8 +1,6 @@ -from _typeshed import Incomplete - from networkx.utils.backends import _dispatchable __all__ = ["random_cograph"] @_dispatchable -def random_cograph(n, seed: Incomplete | None = None): ... +def random_cograph(n, seed=None): ... diff --git a/stubs/networkx/networkx/generators/community.pyi b/stubs/networkx/networkx/generators/community.pyi index d95c6eeee30b..706b03dbd544 100644 --- a/stubs/networkx/networkx/generators/community.pyi +++ b/stubs/networkx/networkx/generators/community.pyi @@ -21,13 +21,13 @@ def caveman_graph(l, k): ... @_dispatchable def connected_caveman_graph(l, k): ... @_dispatchable -def relaxed_caveman_graph(l, k, p, seed: Incomplete | None = None): ... +def relaxed_caveman_graph(l, k, p, seed=None): ... @_dispatchable -def random_partition_graph(sizes, p_in, p_out, seed: Incomplete | None = None, directed: bool = False): ... +def random_partition_graph(sizes, p_in, p_out, seed=None, directed: bool = False): ... @_dispatchable -def planted_partition_graph(l, k, p_in, p_out, seed: Incomplete | None = None, directed: bool = False): ... +def planted_partition_graph(l, k, p_in, p_out, seed=None, directed: bool = False): ... @_dispatchable -def gaussian_random_partition_graph(n, s, v, p_in, p_out, directed: bool = False, seed: Incomplete | None = None): ... +def gaussian_random_partition_graph(n, s, v, p_in, p_out, directed: bool = False, seed=None): ... @_dispatchable def ring_of_cliques(num_cliques, clique_size): ... @_dispatchable @@ -37,7 +37,7 @@ def stochastic_block_model( sizes, p, nodelist: Collection[Incomplete] | None = None, - seed: Incomplete | None = None, + seed=None, directed: bool = False, selfloops: bool = False, sparse: bool = True, @@ -48,12 +48,12 @@ def LFR_benchmark_graph( tau1, tau2, mu, - average_degree: Incomplete | None = None, - min_degree: Incomplete | None = None, - max_degree: Incomplete | None = None, - min_community: Incomplete | None = None, - max_community: Incomplete | None = None, + average_degree=None, + min_degree=None, + max_degree=None, + min_community=None, + max_community=None, tol: float = 1e-07, max_iters: int = 500, - seed: Incomplete | None = None, + seed=None, ): ... diff --git a/stubs/networkx/networkx/generators/degree_seq.pyi b/stubs/networkx/networkx/generators/degree_seq.pyi index b24d74eea48e..325b6d911e6a 100644 --- a/stubs/networkx/networkx/generators/degree_seq.pyi +++ b/stubs/networkx/networkx/generators/degree_seq.pyi @@ -13,21 +13,19 @@ __all__ = [ ] @_dispatchable -def configuration_model(deg_sequence, create_using: Incomplete | None = None, seed: Incomplete | None = None): ... +def configuration_model(deg_sequence, create_using=None, seed=None): ... @_dispatchable -def directed_configuration_model( - in_degree_sequence, out_degree_sequence, create_using: Incomplete | None = None, seed: Incomplete | None = None -): ... +def directed_configuration_model(in_degree_sequence, out_degree_sequence, create_using=None, seed=None): ... @_dispatchable -def expected_degree_graph(w, seed: Incomplete | None = None, selfloops: bool = True): ... +def expected_degree_graph(w, seed=None, selfloops: bool = True): ... @_dispatchable -def havel_hakimi_graph(deg_sequence, create_using: Incomplete | None = None): ... +def havel_hakimi_graph(deg_sequence, create_using=None): ... @_dispatchable -def directed_havel_hakimi_graph(in_deg_sequence, out_deg_sequence, create_using: Incomplete | None = None): ... +def directed_havel_hakimi_graph(in_deg_sequence, out_deg_sequence, create_using=None): ... @_dispatchable -def degree_sequence_tree(deg_sequence, create_using: Incomplete | None = None): ... +def degree_sequence_tree(deg_sequence, create_using=None): ... @_dispatchable -def random_degree_sequence_graph(sequence, seed: Incomplete | None = None, tries: int = 10): ... +def random_degree_sequence_graph(sequence, seed=None, tries: int = 10): ... class DegreeSequenceRandomGraph: rng: Incomplete @@ -38,7 +36,7 @@ class DegreeSequenceRandomGraph: remaining_degree: Incomplete graph: Incomplete def generate(self): ... - def update_remaining(self, u, v, aux_graph: Incomplete | None = None) -> None: ... + def update_remaining(self, u, v, aux_graph=None) -> None: ... def p(self, u, v): ... def q(self, u, v): ... def suitable_edge(self): ... diff --git a/stubs/networkx/networkx/generators/directed.pyi b/stubs/networkx/networkx/generators/directed.pyi index bb8f174dcde3..295a3cba39e3 100644 --- a/stubs/networkx/networkx/generators/directed.pyi +++ b/stubs/networkx/networkx/generators/directed.pyi @@ -1,15 +1,13 @@ -from _typeshed import Incomplete - from networkx.utils.backends import _dispatchable __all__ = ["gn_graph", "gnc_graph", "gnr_graph", "random_k_out_graph", "scale_free_graph"] @_dispatchable -def gn_graph(n, kernel: Incomplete | None = None, create_using: Incomplete | None = None, seed: Incomplete | None = None): ... +def gn_graph(n, kernel=None, create_using=None, seed=None): ... @_dispatchable -def gnr_graph(n, p, create_using: Incomplete | None = None, seed: Incomplete | None = None): ... +def gnr_graph(n, p, create_using=None, seed=None): ... @_dispatchable -def gnc_graph(n, create_using: Incomplete | None = None, seed: Incomplete | None = None): ... +def gnc_graph(n, create_using=None, seed=None): ... @_dispatchable def scale_free_graph( n, @@ -18,9 +16,9 @@ def scale_free_graph( gamma: float = 0.05, delta_in: float = 0.2, delta_out: float = 0, - create_using: Incomplete | None = None, - seed: Incomplete | None = None, - initial_graph: Incomplete | None = None, + create_using=None, + seed=None, + initial_graph=None, ): ... @_dispatchable -def random_k_out_graph(n, k, alpha, self_loops: bool = True, seed: Incomplete | None = None): ... +def random_k_out_graph(n, k, alpha, self_loops: bool = True, seed=None): ... diff --git a/stubs/networkx/networkx/generators/duplication.pyi b/stubs/networkx/networkx/generators/duplication.pyi index 4b62c92c75e2..7371b6dc5ffd 100644 --- a/stubs/networkx/networkx/generators/duplication.pyi +++ b/stubs/networkx/networkx/generators/duplication.pyi @@ -1,10 +1,8 @@ -from _typeshed import Incomplete - from networkx.utils.backends import _dispatchable __all__ = ["partial_duplication_graph", "duplication_divergence_graph"] @_dispatchable -def partial_duplication_graph(N, n, p, q, seed: Incomplete | None = None): ... +def partial_duplication_graph(N, n, p, q, seed=None): ... @_dispatchable -def duplication_divergence_graph(n, p, seed: Incomplete | None = None): ... +def duplication_divergence_graph(n, p, seed=None): ... diff --git a/stubs/networkx/networkx/generators/ego.pyi b/stubs/networkx/networkx/generators/ego.pyi index 83725a552c50..188fa2374932 100644 --- a/stubs/networkx/networkx/generators/ego.pyi +++ b/stubs/networkx/networkx/generators/ego.pyi @@ -1,8 +1,6 @@ -from _typeshed import Incomplete - from networkx.utils.backends import _dispatchable __all__ = ["ego_graph"] @_dispatchable -def ego_graph(G, n, radius: float = 1, center: bool = True, undirected: bool = False, distance: Incomplete | None = None): ... +def ego_graph(G, n, radius: float = 1, center: bool = True, undirected: bool = False, distance=None): ... diff --git a/stubs/networkx/networkx/generators/expanders.pyi b/stubs/networkx/networkx/generators/expanders.pyi index f59596b797b2..6970592a3ffb 100644 --- a/stubs/networkx/networkx/generators/expanders.pyi +++ b/stubs/networkx/networkx/generators/expanders.pyi @@ -1,5 +1,3 @@ -from _typeshed import Incomplete - from networkx.utils.backends import _dispatchable __all__ = [ @@ -12,11 +10,11 @@ __all__ = [ ] @_dispatchable -def margulis_gabber_galil_graph(n, create_using: Incomplete | None = None): ... +def margulis_gabber_galil_graph(n, create_using=None): ... @_dispatchable -def chordal_cycle_graph(p, create_using: Incomplete | None = None): ... +def chordal_cycle_graph(p, create_using=None): ... @_dispatchable -def paley_graph(p, create_using: Incomplete | None = None): ... +def paley_graph(p, create_using=None): ... @_dispatchable def maybe_regular_expander(n, d, *, create_using=None, max_tries=100, seed=None): ... @_dispatchable diff --git a/stubs/networkx/networkx/generators/geometric.pyi b/stubs/networkx/networkx/generators/geometric.pyi index abaaba251721..2147ac1b0104 100644 --- a/stubs/networkx/networkx/generators/geometric.pyi +++ b/stubs/networkx/networkx/generators/geometric.pyi @@ -17,55 +17,19 @@ __all__ = [ @_dispatchable def geometric_edges(G, radius, p: float = 2): ... @_dispatchable -def random_geometric_graph( - n, radius, dim: int = 2, pos: Incomplete | None = None, p: float = 2, seed: Incomplete | None = None -): ... +def random_geometric_graph(n, radius, dim: int = 2, pos=None, p: float = 2, seed=None): ... @_dispatchable -def soft_random_geometric_graph( - n, - radius, - dim: int = 2, - pos: Incomplete | None = None, - p: float = 2, - p_dist: Incomplete | None = None, - seed: Incomplete | None = None, -): ... +def soft_random_geometric_graph(n, radius, dim: int = 2, pos=None, p: float = 2, p_dist=None, seed=None): ... @_dispatchable -def geographical_threshold_graph( - n, - theta, - dim: int = 2, - pos: Incomplete | None = None, - weight: Incomplete | None = None, - metric: Incomplete | None = None, - p_dist: Incomplete | None = None, - seed: Incomplete | None = None, -): ... +def geographical_threshold_graph(n, theta, dim: int = 2, pos=None, weight=None, metric=None, p_dist=None, seed=None): ... @_dispatchable -def waxman_graph( - n, - beta: float = 0.4, - alpha: float = 0.1, - L: Incomplete | None = None, - domain=(0, 0, 1, 1), - metric: Incomplete | None = None, - seed: Incomplete | None = None, -): ... +def waxman_graph(n, beta: float = 0.4, alpha: float = 0.1, L=None, domain=(0, 0, 1, 1), metric=None, seed=None): ... # docstring marks p as int, but it still works with floats. So I think it's better for consistency @_dispatchable -def navigable_small_world_graph(n, p: float = 1, q: int = 1, r: float = 2, dim: int = 2, seed: Incomplete | None = None): ... +def navigable_small_world_graph(n, p: float = 1, q: int = 1, r: float = 2, dim: int = 2, seed=None): ... @_dispatchable -def thresholded_random_geometric_graph( - n, - radius, - theta, - dim: int = 2, - pos: Incomplete | None = None, - weight: Incomplete | None = None, - p: float = 2, - seed: Incomplete | None = None, -): ... +def thresholded_random_geometric_graph(n, radius, theta, dim: int = 2, pos=None, weight=None, p: float = 2, seed=None): ... @_dispatchable def geometric_soft_configuration_graph( *, beta, n=None, gamma=None, mean_degree=None, kappas=None, seed=None diff --git a/stubs/networkx/networkx/generators/harary_graph.pyi b/stubs/networkx/networkx/generators/harary_graph.pyi index 341308b8992d..2f709f2c94bd 100644 --- a/stubs/networkx/networkx/generators/harary_graph.pyi +++ b/stubs/networkx/networkx/generators/harary_graph.pyi @@ -1,10 +1,8 @@ -from _typeshed import Incomplete - from networkx.utils.backends import _dispatchable __all__ = ["hnm_harary_graph", "hkn_harary_graph"] @_dispatchable -def hnm_harary_graph(n, m, create_using: Incomplete | None = None): ... +def hnm_harary_graph(n, m, create_using=None): ... @_dispatchable -def hkn_harary_graph(k, n, create_using: Incomplete | None = None): ... +def hkn_harary_graph(k, n, create_using=None): ... diff --git a/stubs/networkx/networkx/generators/internet_as_graphs.pyi b/stubs/networkx/networkx/generators/internet_as_graphs.pyi index 8c9542f45480..f951ee8347c5 100644 --- a/stubs/networkx/networkx/generators/internet_as_graphs.pyi +++ b/stubs/networkx/networkx/generators/internet_as_graphs.pyi @@ -38,4 +38,4 @@ class AS_graph_generator: def generate(self): ... @_dispatchable -def random_internet_as_graph(n, seed: Incomplete | None = None): ... +def random_internet_as_graph(n, seed=None): ... diff --git a/stubs/networkx/networkx/generators/intersection.pyi b/stubs/networkx/networkx/generators/intersection.pyi index 73a5a7528c22..271deaec7bc4 100644 --- a/stubs/networkx/networkx/generators/intersection.pyi +++ b/stubs/networkx/networkx/generators/intersection.pyi @@ -1,12 +1,10 @@ -from _typeshed import Incomplete - from networkx.utils.backends import _dispatchable __all__ = ["uniform_random_intersection_graph", "k_random_intersection_graph", "general_random_intersection_graph"] @_dispatchable -def uniform_random_intersection_graph(n, m, p, seed: Incomplete | None = None): ... +def uniform_random_intersection_graph(n, m, p, seed=None): ... @_dispatchable -def k_random_intersection_graph(n, m, k, seed: Incomplete | None = None): ... +def k_random_intersection_graph(n, m, k, seed=None): ... @_dispatchable -def general_random_intersection_graph(n, m, p, seed: Incomplete | None = None): ... +def general_random_intersection_graph(n, m, p, seed=None): ... diff --git a/stubs/networkx/networkx/generators/joint_degree_seq.pyi b/stubs/networkx/networkx/generators/joint_degree_seq.pyi index b9f976ac343b..98fea472a969 100644 --- a/stubs/networkx/networkx/generators/joint_degree_seq.pyi +++ b/stubs/networkx/networkx/generators/joint_degree_seq.pyi @@ -1,5 +1,3 @@ -from _typeshed import Incomplete - from networkx.utils.backends import _dispatchable __all__ = ["is_valid_joint_degree", "is_valid_directed_joint_degree", "joint_degree_graph", "directed_joint_degree_graph"] @@ -7,8 +5,8 @@ __all__ = ["is_valid_joint_degree", "is_valid_directed_joint_degree", "joint_deg @_dispatchable def is_valid_joint_degree(joint_degrees): ... @_dispatchable -def joint_degree_graph(joint_degrees, seed: Incomplete | None = None): ... +def joint_degree_graph(joint_degrees, seed=None): ... @_dispatchable def is_valid_directed_joint_degree(in_degrees, out_degrees, nkk): ... @_dispatchable -def directed_joint_degree_graph(in_degrees, out_degrees, nkk, seed: Incomplete | None = None): ... +def directed_joint_degree_graph(in_degrees, out_degrees, nkk, seed=None): ... diff --git a/stubs/networkx/networkx/generators/lattice.pyi b/stubs/networkx/networkx/generators/lattice.pyi index 29d8e5fa972f..e82eb72ae6da 100644 --- a/stubs/networkx/networkx/generators/lattice.pyi +++ b/stubs/networkx/networkx/generators/lattice.pyi @@ -1,20 +1,14 @@ -from _typeshed import Incomplete - from networkx.utils.backends import _dispatchable __all__ = ["grid_2d_graph", "grid_graph", "hypercube_graph", "triangular_lattice_graph", "hexagonal_lattice_graph"] @_dispatchable -def grid_2d_graph(m, n, periodic: bool = False, create_using: Incomplete | None = None): ... +def grid_2d_graph(m, n, periodic: bool = False, create_using=None): ... @_dispatchable def grid_graph(dim, periodic: bool = False): ... @_dispatchable def hypercube_graph(n): ... @_dispatchable -def triangular_lattice_graph( - m, n, periodic: bool = False, with_positions: bool = True, create_using: Incomplete | None = None -): ... +def triangular_lattice_graph(m, n, periodic: bool = False, with_positions: bool = True, create_using=None): ... @_dispatchable -def hexagonal_lattice_graph( - m, n, periodic: bool = False, with_positions: bool = True, create_using: Incomplete | None = None -): ... +def hexagonal_lattice_graph(m, n, periodic: bool = False, with_positions: bool = True, create_using=None): ... diff --git a/stubs/networkx/networkx/generators/line.pyi b/stubs/networkx/networkx/generators/line.pyi index 01a36dc8312f..84e0bf1d21cd 100644 --- a/stubs/networkx/networkx/generators/line.pyi +++ b/stubs/networkx/networkx/generators/line.pyi @@ -1,10 +1,8 @@ -from _typeshed import Incomplete - from networkx.utils.backends import _dispatchable __all__ = ["line_graph", "inverse_line_graph"] @_dispatchable -def line_graph(G, create_using: Incomplete | None = None): ... +def line_graph(G, create_using=None): ... @_dispatchable def inverse_line_graph(G): ... diff --git a/stubs/networkx/networkx/generators/random_graphs.pyi b/stubs/networkx/networkx/generators/random_graphs.pyi index a0e78626e796..958a5028e064 100644 --- a/stubs/networkx/networkx/generators/random_graphs.pyi +++ b/stubs/networkx/networkx/generators/random_graphs.pyi @@ -1,5 +1,3 @@ -from _typeshed import Incomplete - from networkx.utils.backends import _dispatchable __all__ = [ @@ -25,40 +23,40 @@ __all__ = [ ] @_dispatchable -def fast_gnp_random_graph(n, p, seed: Incomplete | None = None, directed: bool = False): ... +def fast_gnp_random_graph(n, p, seed=None, directed: bool = False): ... @_dispatchable -def gnp_random_graph(n, p, seed: Incomplete | None = None, directed: bool = False): ... +def gnp_random_graph(n, p, seed=None, directed: bool = False): ... binomial_graph = gnp_random_graph erdos_renyi_graph = gnp_random_graph @_dispatchable -def dense_gnm_random_graph(n, m, seed: Incomplete | None = None): ... +def dense_gnm_random_graph(n, m, seed=None): ... @_dispatchable -def gnm_random_graph(n, m, seed: Incomplete | None = None, directed: bool = False): ... +def gnm_random_graph(n, m, seed=None, directed: bool = False): ... @_dispatchable -def newman_watts_strogatz_graph(n, k, p, seed: Incomplete | None = None): ... +def newman_watts_strogatz_graph(n, k, p, seed=None): ... @_dispatchable -def watts_strogatz_graph(n, k, p, seed: Incomplete | None = None): ... +def watts_strogatz_graph(n, k, p, seed=None): ... @_dispatchable -def connected_watts_strogatz_graph(n, k, p, tries: int = 100, seed: Incomplete | None = None): ... +def connected_watts_strogatz_graph(n, k, p, tries: int = 100, seed=None): ... @_dispatchable -def random_regular_graph(d, n, seed: Incomplete | None = None): ... +def random_regular_graph(d, n, seed=None): ... @_dispatchable -def barabasi_albert_graph(n, m, seed: Incomplete | None = None, initial_graph: Incomplete | None = None): ... +def barabasi_albert_graph(n, m, seed=None, initial_graph=None): ... @_dispatchable -def dual_barabasi_albert_graph(n, m1, m2, p, seed: Incomplete | None = None, initial_graph: Incomplete | None = None): ... +def dual_barabasi_albert_graph(n, m1, m2, p, seed=None, initial_graph=None): ... @_dispatchable -def extended_barabasi_albert_graph(n, m, p, q, seed: Incomplete | None = None): ... +def extended_barabasi_albert_graph(n, m, p, q, seed=None): ... @_dispatchable -def powerlaw_cluster_graph(n, m, p, seed: Incomplete | None = None): ... +def powerlaw_cluster_graph(n, m, p, seed=None): ... @_dispatchable -def random_lobster(n, p1, p2, seed: Incomplete | None = None): ... +def random_lobster(n, p1, p2, seed=None): ... @_dispatchable -def random_shell_graph(constructor, seed: Incomplete | None = None): ... +def random_shell_graph(constructor, seed=None): ... @_dispatchable -def random_powerlaw_tree(n, gamma: float = 3, seed: Incomplete | None = None, tries: int = 100): ... +def random_powerlaw_tree(n, gamma: float = 3, seed=None, tries: int = 100): ... @_dispatchable -def random_powerlaw_tree_sequence(n, gamma: float = 3, seed: Incomplete | None = None, tries: int = 100): ... +def random_powerlaw_tree_sequence(n, gamma: float = 3, seed=None, tries: int = 100): ... @_dispatchable -def random_kernel_graph(n, kernel_integral, kernel_root: Incomplete | None = None, seed: Incomplete | None = None): ... +def random_kernel_graph(n, kernel_integral, kernel_root=None, seed=None): ... diff --git a/stubs/networkx/networkx/generators/small.pyi b/stubs/networkx/networkx/generators/small.pyi index 97c1057d8a02..4d5f7dc952a9 100644 --- a/stubs/networkx/networkx/generators/small.pyi +++ b/stubs/networkx/networkx/generators/small.pyi @@ -1,5 +1,3 @@ -from _typeshed import Incomplete - from networkx.utils.backends import _dispatchable __all__ = [ @@ -29,48 +27,48 @@ __all__ = [ ] @_dispatchable -def LCF_graph(n, shift_list, repeats, create_using: Incomplete | None = None): ... +def LCF_graph(n, shift_list, repeats, create_using=None): ... @_dispatchable -def bull_graph(create_using: Incomplete | None = None): ... +def bull_graph(create_using=None): ... @_dispatchable -def chvatal_graph(create_using: Incomplete | None = None): ... +def chvatal_graph(create_using=None): ... @_dispatchable -def cubical_graph(create_using: Incomplete | None = None): ... +def cubical_graph(create_using=None): ... @_dispatchable -def desargues_graph(create_using: Incomplete | None = None): ... +def desargues_graph(create_using=None): ... @_dispatchable -def diamond_graph(create_using: Incomplete | None = None): ... +def diamond_graph(create_using=None): ... @_dispatchable -def dodecahedral_graph(create_using: Incomplete | None = None): ... +def dodecahedral_graph(create_using=None): ... @_dispatchable -def frucht_graph(create_using: Incomplete | None = None): ... +def frucht_graph(create_using=None): ... @_dispatchable -def heawood_graph(create_using: Incomplete | None = None): ... +def heawood_graph(create_using=None): ... @_dispatchable def hoffman_singleton_graph(): ... @_dispatchable -def house_graph(create_using: Incomplete | None = None): ... +def house_graph(create_using=None): ... @_dispatchable -def house_x_graph(create_using: Incomplete | None = None): ... +def house_x_graph(create_using=None): ... @_dispatchable -def icosahedral_graph(create_using: Incomplete | None = None): ... +def icosahedral_graph(create_using=None): ... @_dispatchable -def krackhardt_kite_graph(create_using: Incomplete | None = None): ... +def krackhardt_kite_graph(create_using=None): ... @_dispatchable -def moebius_kantor_graph(create_using: Incomplete | None = None): ... +def moebius_kantor_graph(create_using=None): ... @_dispatchable -def octahedral_graph(create_using: Incomplete | None = None): ... +def octahedral_graph(create_using=None): ... @_dispatchable def pappus_graph(): ... @_dispatchable -def petersen_graph(create_using: Incomplete | None = None): ... +def petersen_graph(create_using=None): ... @_dispatchable -def sedgewick_maze_graph(create_using: Incomplete | None = None): ... +def sedgewick_maze_graph(create_using=None): ... @_dispatchable -def tetrahedral_graph(create_using: Incomplete | None = None): ... +def tetrahedral_graph(create_using=None): ... @_dispatchable -def truncated_cube_graph(create_using: Incomplete | None = None): ... +def truncated_cube_graph(create_using=None): ... @_dispatchable -def truncated_tetrahedron_graph(create_using: Incomplete | None = None): ... +def truncated_tetrahedron_graph(create_using=None): ... @_dispatchable -def tutte_graph(create_using: Incomplete | None = None): ... +def tutte_graph(create_using=None): ... diff --git a/stubs/networkx/networkx/generators/spectral_graph_forge.pyi b/stubs/networkx/networkx/generators/spectral_graph_forge.pyi index 6fc6902a354f..a4af66065a7b 100644 --- a/stubs/networkx/networkx/generators/spectral_graph_forge.pyi +++ b/stubs/networkx/networkx/generators/spectral_graph_forge.pyi @@ -1,8 +1,6 @@ -from _typeshed import Incomplete - from networkx.utils.backends import _dispatchable __all__ = ["spectral_graph_forge"] @_dispatchable -def spectral_graph_forge(G, alpha, transformation: str = "identity", seed: Incomplete | None = None): ... +def spectral_graph_forge(G, alpha, transformation: str = "identity", seed=None): ... diff --git a/stubs/networkx/networkx/lazy_imports.pyi b/stubs/networkx/networkx/lazy_imports.pyi index b932e90af2c2..c5bc64e495c0 100644 --- a/stubs/networkx/networkx/lazy_imports.pyi +++ b/stubs/networkx/networkx/lazy_imports.pyi @@ -1,9 +1,8 @@ import types -from _typeshed import Incomplete __all__ = ["attach", "_lazy_import"] -def attach(module_name, submodules: Incomplete | None = None, submod_attrs: Incomplete | None = None): ... +def attach(module_name, submodules=None, submod_attrs=None): ... class DelayedImportErrorModule(types.ModuleType): def __init__(self, frame_data, *args, **kwargs) -> None: ... diff --git a/stubs/networkx/networkx/linalg/algebraicconnectivity.pyi b/stubs/networkx/networkx/linalg/algebraicconnectivity.pyi index fa12fa8c1acd..6e74030449e9 100644 --- a/stubs/networkx/networkx/linalg/algebraicconnectivity.pyi +++ b/stubs/networkx/networkx/linalg/algebraicconnectivity.pyi @@ -1,5 +1,3 @@ -from _typeshed import Incomplete - from networkx.utils.backends import _dispatchable __all__ = ["algebraic_connectivity", "fiedler_vector", "spectral_ordering", "spectral_bisection"] @@ -10,41 +8,21 @@ class _PCGSolver: class _LUSolver: def __init__(self, A) -> None: ... - def solve(self, B, tol: Incomplete | None = None): ... + def solve(self, B, tol=None): ... @_dispatchable def algebraic_connectivity( - G, - weight: str = "weight", - normalized: bool = False, - tol: float = 1e-08, - method: str = "tracemin_pcg", - seed: Incomplete | None = None, + G, weight: str = "weight", normalized: bool = False, tol: float = 1e-08, method: str = "tracemin_pcg", seed=None ): ... @_dispatchable def fiedler_vector( - G, - weight: str = "weight", - normalized: bool = False, - tol: float = 1e-08, - method: str = "tracemin_pcg", - seed: Incomplete | None = None, + G, weight: str = "weight", normalized: bool = False, tol: float = 1e-08, method: str = "tracemin_pcg", seed=None ): ... @_dispatchable def spectral_ordering( - G, - weight: str = "weight", - normalized: bool = False, - tol: float = 1e-08, - method: str = "tracemin_pcg", - seed: Incomplete | None = None, + G, weight: str = "weight", normalized: bool = False, tol: float = 1e-08, method: str = "tracemin_pcg", seed=None ): ... @_dispatchable def spectral_bisection( - G, - weight: str = "weight", - normalized: bool = False, - tol: float = 1e-08, - method: str = "tracemin_pcg", - seed: Incomplete | None = None, + G, weight: str = "weight", normalized: bool = False, tol: float = 1e-08, method: str = "tracemin_pcg", seed=None ): ... diff --git a/stubs/networkx/networkx/linalg/attrmatrix.pyi b/stubs/networkx/networkx/linalg/attrmatrix.pyi index 3e645842ec26..01fba7355c3b 100644 --- a/stubs/networkx/networkx/linalg/attrmatrix.pyi +++ b/stubs/networkx/networkx/linalg/attrmatrix.pyi @@ -1,25 +1,8 @@ -from _typeshed import Incomplete - from networkx.utils.backends import _dispatchable __all__ = ["attr_matrix", "attr_sparse_matrix"] @_dispatchable -def attr_matrix( - G, - edge_attr: Incomplete | None = None, - node_attr: Incomplete | None = None, - normalized: bool = False, - rc_order: Incomplete | None = None, - dtype: Incomplete | None = None, - order: Incomplete | None = None, -): ... +def attr_matrix(G, edge_attr=None, node_attr=None, normalized: bool = False, rc_order=None, dtype=None, order=None): ... @_dispatchable -def attr_sparse_matrix( - G, - edge_attr: Incomplete | None = None, - node_attr: Incomplete | None = None, - normalized: bool = False, - rc_order: Incomplete | None = None, - dtype: Incomplete | None = None, -): ... +def attr_sparse_matrix(G, edge_attr=None, node_attr=None, normalized: bool = False, rc_order=None, dtype=None): ... diff --git a/stubs/networkx/networkx/linalg/bethehessianmatrix.pyi b/stubs/networkx/networkx/linalg/bethehessianmatrix.pyi index 2e98b1cdb995..b2713db482ab 100644 --- a/stubs/networkx/networkx/linalg/bethehessianmatrix.pyi +++ b/stubs/networkx/networkx/linalg/bethehessianmatrix.pyi @@ -6,4 +6,4 @@ from networkx.utils.backends import _dispatchable __all__ = ["bethe_hessian_matrix"] @_dispatchable -def bethe_hessian_matrix(G, r: Incomplete | None = None, nodelist: Collection[Incomplete] | None = None): ... +def bethe_hessian_matrix(G, r=None, nodelist: Collection[Incomplete] | None = None): ... diff --git a/stubs/networkx/networkx/linalg/graphmatrix.pyi b/stubs/networkx/networkx/linalg/graphmatrix.pyi index 97fb7371cb81..d4277651bd65 100644 --- a/stubs/networkx/networkx/linalg/graphmatrix.pyi +++ b/stubs/networkx/networkx/linalg/graphmatrix.pyi @@ -6,14 +6,6 @@ from networkx.utils.backends import _dispatchable __all__ = ["incidence_matrix", "adjacency_matrix"] @_dispatchable -def incidence_matrix( - G, - nodelist: Collection[Incomplete] | None = None, - edgelist: Incomplete | None = None, - oriented: bool = False, - weight: Incomplete | None = None, -): ... +def incidence_matrix(G, nodelist: Collection[Incomplete] | None = None, edgelist=None, oriented: bool = False, weight=None): ... @_dispatchable -def adjacency_matrix( - G, nodelist: Collection[Incomplete] | None = None, dtype: Incomplete | None = None, weight: str = "weight" -): ... +def adjacency_matrix(G, nodelist: Collection[Incomplete] | None = None, dtype=None, weight: str = "weight"): ... diff --git a/stubs/networkx/networkx/linalg/laplacianmatrix.pyi b/stubs/networkx/networkx/linalg/laplacianmatrix.pyi index ebbfd847905e..26eccdf28fe9 100644 --- a/stubs/networkx/networkx/linalg/laplacianmatrix.pyi +++ b/stubs/networkx/networkx/linalg/laplacianmatrix.pyi @@ -16,20 +16,12 @@ def laplacian_matrix(G, nodelist: Collection[Incomplete] | None = None, weight: @_dispatchable def normalized_laplacian_matrix(G, nodelist: Collection[Incomplete] | None = None, weight: str = "weight"): ... @_dispatchable -def total_spanning_tree_weight(G, weight: Incomplete | None = None): ... +def total_spanning_tree_weight(G, weight=None): ... @_dispatchable def directed_laplacian_matrix( - G, - nodelist: Collection[Incomplete] | None = None, - weight: str = "weight", - walk_type: Incomplete | None = None, - alpha: float = 0.95, + G, nodelist: Collection[Incomplete] | None = None, weight: str = "weight", walk_type=None, alpha: float = 0.95 ): ... @_dispatchable def directed_combinatorial_laplacian_matrix( - G, - nodelist: Collection[Incomplete] | None = None, - weight: str = "weight", - walk_type: Incomplete | None = None, - alpha: float = 0.95, + G, nodelist: Collection[Incomplete] | None = None, weight: str = "weight", walk_type=None, alpha: float = 0.95 ): ... diff --git a/stubs/networkx/networkx/linalg/modularitymatrix.pyi b/stubs/networkx/networkx/linalg/modularitymatrix.pyi index 03b0e1ef1193..c1f9be8d5e11 100644 --- a/stubs/networkx/networkx/linalg/modularitymatrix.pyi +++ b/stubs/networkx/networkx/linalg/modularitymatrix.pyi @@ -6,6 +6,6 @@ from networkx.utils.backends import _dispatchable __all__ = ["modularity_matrix", "directed_modularity_matrix"] @_dispatchable -def modularity_matrix(G, nodelist: Collection[Incomplete] | None = None, weight: Incomplete | None = None): ... +def modularity_matrix(G, nodelist: Collection[Incomplete] | None = None, weight=None): ... @_dispatchable -def directed_modularity_matrix(G, nodelist: Collection[Incomplete] | None = None, weight: Incomplete | None = None): ... +def directed_modularity_matrix(G, nodelist: Collection[Incomplete] | None = None, weight=None): ... diff --git a/stubs/networkx/networkx/linalg/spectrum.pyi b/stubs/networkx/networkx/linalg/spectrum.pyi index 3051a272c3ec..c30aa5061aa8 100644 --- a/stubs/networkx/networkx/linalg/spectrum.pyi +++ b/stubs/networkx/networkx/linalg/spectrum.pyi @@ -1,5 +1,3 @@ -from _typeshed import Incomplete - from networkx.utils.backends import _dispatchable __all__ = [ @@ -19,4 +17,4 @@ def adjacency_spectrum(G, weight: str = "weight"): ... @_dispatchable def modularity_spectrum(G): ... @_dispatchable -def bethe_hessian_spectrum(G, r: Incomplete | None = None): ... +def bethe_hessian_spectrum(G, r=None): ... diff --git a/stubs/networkx/networkx/readwrite/adjlist.pyi b/stubs/networkx/networkx/readwrite/adjlist.pyi index b9cbfef32166..2f482e88bef1 100644 --- a/stubs/networkx/networkx/readwrite/adjlist.pyi +++ b/stubs/networkx/networkx/readwrite/adjlist.pyi @@ -8,19 +8,6 @@ __all__ = ["generate_adjlist", "write_adjlist", "parse_adjlist", "read_adjlist"] def generate_adjlist(G, delimiter: str = " ") -> Generator[Incomplete, None, None]: ... def write_adjlist(G, path, comments: str = "#", delimiter: str = " ", encoding: str = "utf-8") -> None: ... @_dispatchable -def parse_adjlist( - lines, - comments: str = "#", - delimiter: Incomplete | None = None, - create_using: Incomplete | None = None, - nodetype: Incomplete | None = None, -): ... +def parse_adjlist(lines, comments: str = "#", delimiter=None, create_using=None, nodetype=None): ... @_dispatchable -def read_adjlist( - path, - comments: str = "#", - delimiter: Incomplete | None = None, - create_using: Incomplete | None = None, - nodetype: Incomplete | None = None, - encoding: str = "utf-8", -): ... +def read_adjlist(path, comments: str = "#", delimiter=None, create_using=None, nodetype=None, encoding: str = "utf-8"): ... diff --git a/stubs/networkx/networkx/readwrite/edgelist.pyi b/stubs/networkx/networkx/readwrite/edgelist.pyi index ddb2b6d889a9..fb7d6d743adf 100644 --- a/stubs/networkx/networkx/readwrite/edgelist.pyi +++ b/stubs/networkx/networkx/readwrite/edgelist.pyi @@ -15,32 +15,20 @@ __all__ = [ def generate_edgelist(G, delimiter: str = " ", data: bool = True) -> Generator[Incomplete, None, None]: ... def write_edgelist(G, path, comments: str = "#", delimiter: str = " ", data: bool = True, encoding: str = "utf-8") -> None: ... @_dispatchable -def parse_edgelist( - lines, - comments: str = "#", - delimiter: Incomplete | None = None, - create_using: Incomplete | None = None, - nodetype: Incomplete | None = None, - data: bool = True, -): ... +def parse_edgelist(lines, comments: str = "#", delimiter=None, create_using=None, nodetype=None, data: bool = True): ... @_dispatchable def read_edgelist( path, comments: str = "#", - delimiter: Incomplete | None = None, - create_using: Incomplete | None = None, - nodetype: Incomplete | None = None, + delimiter=None, + create_using=None, + nodetype=None, data: bool = True, - edgetype: Incomplete | None = None, + edgetype=None, encoding: str = "utf-8", ): ... def write_weighted_edgelist(G, path, comments: str = "#", delimiter: str = " ", encoding: str = "utf-8") -> None: ... @_dispatchable def read_weighted_edgelist( - path, - comments: str = "#", - delimiter: Incomplete | None = None, - create_using: Incomplete | None = None, - nodetype: Incomplete | None = None, - encoding: str = "utf-8", + path, comments: str = "#", delimiter=None, create_using=None, nodetype=None, encoding: str = "utf-8" ): ... diff --git a/stubs/networkx/networkx/readwrite/gexf.pyi b/stubs/networkx/networkx/readwrite/gexf.pyi index 8de89fcd392f..f7b39bd1cd2e 100644 --- a/stubs/networkx/networkx/readwrite/gexf.pyi +++ b/stubs/networkx/networkx/readwrite/gexf.pyi @@ -10,7 +10,7 @@ def generate_gexf( G, encoding: str = "utf-8", prettyprint: bool = True, version: str = "1.2draft" ) -> Generator[Incomplete, Incomplete, None]: ... @_dispatchable -def read_gexf(path, node_type: Incomplete | None = None, relabel: bool = False, version: str = "1.2draft"): ... +def read_gexf(path, node_type=None, relabel: bool = False, version: str = "1.2draft"): ... class GEXF: versions: Incomplete @@ -34,9 +34,7 @@ class GEXFWriter(GEXF): attr_id: Incomplete all_edge_ids: Incomplete attr: Incomplete - def __init__( - self, graph: Incomplete | None = None, encoding: str = "utf-8", prettyprint: bool = True, version: str = "1.2draft" - ) -> None: ... + def __init__(self, graph=None, encoding: str = "utf-8", prettyprint: bool = True, version: str = "1.2draft") -> None: ... graph_element: Incomplete def add_graph(self, G) -> None: ... def add_nodes(self, G, graph_element) -> None: ... @@ -54,12 +52,12 @@ class GEXFWriter(GEXF): class GEXFReader(GEXF): node_type: Incomplete simple_graph: bool - def __init__(self, node_type: Incomplete | None = None, version: str = "1.2draft") -> None: ... + def __init__(self, node_type=None, version: str = "1.2draft") -> None: ... xml: Incomplete def __call__(self, stream): ... timeformat: Incomplete def make_graph(self, graph_xml): ... - def add_node(self, G, node_xml, node_attr, node_pid: Incomplete | None = None) -> None: ... + def add_node(self, G, node_xml, node_attr, node_pid=None) -> None: ... def add_start_end(self, data, xml): ... def add_viz(self, data, node_xml): ... def add_parents(self, data, node_xml): ... diff --git a/stubs/networkx/networkx/readwrite/gml.pyi b/stubs/networkx/networkx/readwrite/gml.pyi index 63fae3c7b3e1..53be3c51c240 100644 --- a/stubs/networkx/networkx/readwrite/gml.pyi +++ b/stubs/networkx/networkx/readwrite/gml.pyi @@ -10,9 +10,9 @@ _T = TypeVar("_T") __all__ = ["read_gml", "parse_gml", "generate_gml", "write_gml"] @_dispatchable -def read_gml(path, label: str = "label", destringizer: Incomplete | None = None): ... +def read_gml(path, label: str = "label", destringizer=None): ... @_dispatchable -def parse_gml(lines, label: str = "label", destringizer: Incomplete | None = None): ... +def parse_gml(lines, label: str = "label", destringizer=None): ... class Pattern(Enum): KEYS = 0 @@ -29,5 +29,5 @@ class Token(NamedTuple, Generic[_T]): line: int position: int -def generate_gml(G, stringizer: Incomplete | None = None) -> Generator[Incomplete, Incomplete, None]: ... -def write_gml(G, path, stringizer: Incomplete | None = None) -> None: ... +def generate_gml(G, stringizer=None) -> Generator[Incomplete, Incomplete, None]: ... +def write_gml(G, path, stringizer=None) -> None: ... diff --git a/stubs/networkx/networkx/readwrite/graph6.pyi b/stubs/networkx/networkx/readwrite/graph6.pyi index 092bffdf2d77..748be2976c4d 100644 --- a/stubs/networkx/networkx/readwrite/graph6.pyi +++ b/stubs/networkx/networkx/readwrite/graph6.pyi @@ -1,12 +1,10 @@ -from _typeshed import Incomplete - from networkx.utils.backends import _dispatchable __all__ = ["from_graph6_bytes", "read_graph6", "to_graph6_bytes", "write_graph6"] @_dispatchable def from_graph6_bytes(bytes_in): ... -def to_graph6_bytes(G, nodes: Incomplete | None = None, header: bool = True): ... +def to_graph6_bytes(G, nodes=None, header: bool = True): ... @_dispatchable def read_graph6(path): ... -def write_graph6(G, path, nodes: Incomplete | None = None, header: bool = True): ... +def write_graph6(G, path, nodes=None, header: bool = True): ... diff --git a/stubs/networkx/networkx/readwrite/graphml.pyi b/stubs/networkx/networkx/readwrite/graphml.pyi index 2ef976b5118c..60cc90a5cea5 100644 --- a/stubs/networkx/networkx/readwrite/graphml.pyi +++ b/stubs/networkx/networkx/readwrite/graphml.pyi @@ -22,7 +22,7 @@ def write_graphml_xml( prettyprint: bool = True, infer_numeric_types: bool = False, named_key_ids: bool = False, - edge_id_from_attribute: Incomplete | None = None, + edge_id_from_attribute=None, ) -> None: ... def write_graphml_lxml( G, @@ -31,14 +31,10 @@ def write_graphml_lxml( prettyprint: bool = True, infer_numeric_types: bool = False, named_key_ids: bool = False, - edge_id_from_attribute: Incomplete | None = None, + edge_id_from_attribute=None, ): ... def generate_graphml( - G, - encoding: str = "utf-8", - prettyprint: bool = True, - named_key_ids: bool = False, - edge_id_from_attribute: Incomplete | None = None, + G, encoding: str = "utf-8", prettyprint: bool = True, named_key_ids: bool = False, edge_id_from_attribute=None ) -> Generator[Incomplete, Incomplete, None]: ... @_dispatchable def read_graphml(path, node_type=..., edge_key_type=..., force_multigraph: bool = False): ... @@ -69,16 +65,16 @@ class GraphMLWriter(GraphML): attribute_types: Incomplete def __init__( self, - graph: Incomplete | None = None, + graph=None, encoding: str = "utf-8", prettyprint: bool = True, infer_numeric_types: bool = False, named_key_ids: bool = False, - edge_id_from_attribute: Incomplete | None = None, + edge_id_from_attribute=None, ) -> None: ... def attr_type(self, name, scope, value): ... def get_key(self, name, attr_type, scope, default): ... - def add_data(self, name, element_type, value, scope: str = "all", default: Incomplete | None = None): ... + def add_data(self, name, element_type, value, scope: str = "all", default=None): ... def add_attributes(self, scope, xml_obj, data, default) -> None: ... def add_nodes(self, G, graph_element) -> None: ... def add_edges(self, G, graph_element) -> None: ... @@ -104,16 +100,16 @@ class GraphMLWriterLxml(GraphMLWriter): def __init__( self, path, - graph: Incomplete | None = None, + graph=None, encoding: str = "utf-8", prettyprint: bool = True, infer_numeric_types: bool = False, named_key_ids: bool = False, - edge_id_from_attribute: Incomplete | None = None, + edge_id_from_attribute=None, ) -> None: ... def add_graph_element(self, G) -> None: ... def add_attributes(self, scope, xml_obj, data, default) -> None: ... - def dump(self, stream: Incomplete | None = None) -> None: ... + def dump(self, stream=None) -> None: ... write_graphml = write_graphml_lxml @@ -124,8 +120,8 @@ class GraphMLReader(GraphML): edge_ids: Incomplete def __init__(self, node_type=..., edge_key_type=..., force_multigraph: bool = False) -> None: ... xml: Incomplete - def __call__(self, path: Incomplete | None = None, string: Incomplete | None = None) -> Generator[Incomplete, None, None]: ... - def make_graph(self, graph_xml, graphml_keys, defaults, G: Incomplete | None = None): ... + def __call__(self, path=None, string=None) -> Generator[Incomplete, None, None]: ... + def make_graph(self, graph_xml, graphml_keys, defaults, G=None): ... def add_node(self, G, node_xml, graphml_keys, defaults) -> None: ... def add_edge(self, G, edge_element, graphml_keys) -> None: ... def decode_data_elements(self, graphml_keys, obj_xml): ... diff --git a/stubs/networkx/networkx/readwrite/json_graph/node_link.pyi b/stubs/networkx/networkx/readwrite/json_graph/node_link.pyi index 2f15c525d5bb..9bec26badcb4 100644 --- a/stubs/networkx/networkx/readwrite/json_graph/node_link.pyi +++ b/stubs/networkx/networkx/readwrite/json_graph/node_link.pyi @@ -1,5 +1,3 @@ -from _typeshed import Incomplete - from networkx.utils.backends import _dispatchable __all__ = ["node_link_data", "node_link_graph"] @@ -20,7 +18,7 @@ def node_link_graph( data, directed: bool = False, multigraph: bool = True, - attrs: Incomplete | None = None, + attrs=None, *, source: str = "source", target: str = "target", diff --git a/stubs/networkx/networkx/readwrite/multiline_adjlist.pyi b/stubs/networkx/networkx/readwrite/multiline_adjlist.pyi index a83096ae39d3..a75f916bb626 100644 --- a/stubs/networkx/networkx/readwrite/multiline_adjlist.pyi +++ b/stubs/networkx/networkx/readwrite/multiline_adjlist.pyi @@ -8,21 +8,8 @@ __all__ = ["generate_multiline_adjlist", "write_multiline_adjlist", "parse_multi def generate_multiline_adjlist(G, delimiter: str = " ") -> Generator[Incomplete, None, None]: ... def write_multiline_adjlist(G, path, delimiter: str = " ", comments: str = "#", encoding: str = "utf-8") -> None: ... @_dispatchable -def parse_multiline_adjlist( - lines, - comments: str = "#", - delimiter: Incomplete | None = None, - create_using: Incomplete | None = None, - nodetype: Incomplete | None = None, - edgetype: Incomplete | None = None, -): ... +def parse_multiline_adjlist(lines, comments: str = "#", delimiter=None, create_using=None, nodetype=None, edgetype=None): ... @_dispatchable def read_multiline_adjlist( - path, - comments: str = "#", - delimiter: Incomplete | None = None, - create_using: Incomplete | None = None, - nodetype: Incomplete | None = None, - edgetype: Incomplete | None = None, - encoding: str = "utf-8", + path, comments: str = "#", delimiter=None, create_using=None, nodetype=None, edgetype=None, encoding: str = "utf-8" ): ... diff --git a/stubs/networkx/networkx/readwrite/sparse6.pyi b/stubs/networkx/networkx/readwrite/sparse6.pyi index 10a140c0afca..b1e6e56a50d3 100644 --- a/stubs/networkx/networkx/readwrite/sparse6.pyi +++ b/stubs/networkx/networkx/readwrite/sparse6.pyi @@ -1,12 +1,10 @@ -from _typeshed import Incomplete - from networkx.utils.backends import _dispatchable __all__ = ["from_sparse6_bytes", "read_sparse6", "to_sparse6_bytes", "write_sparse6"] @_dispatchable def from_sparse6_bytes(string): ... -def to_sparse6_bytes(G, nodes: Incomplete | None = None, header: bool = True): ... +def to_sparse6_bytes(G, nodes=None, header: bool = True): ... @_dispatchable def read_sparse6(path): ... -def write_sparse6(G, path, nodes: Incomplete | None = None, header: bool = True) -> None: ... +def write_sparse6(G, path, nodes=None, header: bool = True) -> None: ... diff --git a/stubs/networkx/networkx/readwrite/text.pyi b/stubs/networkx/networkx/readwrite/text.pyi index 8a2bf32fb8f0..3a1fe2e87e48 100644 --- a/stubs/networkx/networkx/readwrite/text.pyi +++ b/stubs/networkx/networkx/readwrite/text.pyi @@ -49,19 +49,14 @@ class UtfUndirectedGlyphs(UtfBaseGlyphs): vertical_edge: ClassVar[str] def generate_network_text( - graph, - with_labels: bool = True, - sources: Incomplete | None = None, - max_depth: Incomplete | None = None, - ascii_only: bool = False, - vertical_chains: bool = False, + graph, with_labels: bool = True, sources=None, max_depth=None, ascii_only: bool = False, vertical_chains: bool = False ) -> Generator[Incomplete, None, Incomplete]: ... def write_network_text( graph, - path: Incomplete | None = None, + path=None, with_labels: bool = True, - sources: Incomplete | None = None, - max_depth: Incomplete | None = None, + sources=None, + max_depth=None, ascii_only: bool = False, end: str = "\n", vertical_chains=False, diff --git a/stubs/networkx/networkx/relabel.pyi b/stubs/networkx/networkx/relabel.pyi index 4951676ca0e1..71e03625f022 100644 --- a/stubs/networkx/networkx/relabel.pyi +++ b/stubs/networkx/networkx/relabel.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from collections.abc import Hashable, Mapping from typing import Literal, TypeVar, overload @@ -26,5 +25,5 @@ def convert_node_labels_to_integers( G: Graph[Hashable], first_label: int = 0, ordering: Literal["default", "sorted", "increasing degree", "decreasing degree"] = "default", - label_attribute: Incomplete | None = None, + label_attribute=None, ) -> Graph[int]: ... diff --git a/stubs/networkx/networkx/utils/configs.pyi b/stubs/networkx/networkx/utils/configs.pyi index 1d165b9fb1b5..780ccf7233a2 100644 --- a/stubs/networkx/networkx/utils/configs.pyi +++ b/stubs/networkx/networkx/utils/configs.pyi @@ -24,7 +24,7 @@ if sys.version_info >= (3, 10): def __getitem__(self, key: str): ... def __setitem__(self, key: str, value) -> None: ... def __delitem__(self, key: str) -> None: ... - def get(self, key: str, default: Incomplete | None = None): ... + def get(self, key: str, default=None): ... def items(self) -> ItemsView[str, Incomplete]: ... def keys(self) -> KeysView[str]: ... def values(self) -> ValuesView[Incomplete]: ... @@ -50,7 +50,7 @@ else: def __getitem__(self, key: str): ... def __setitem__(self, key: str, value) -> None: ... def __delitem__(self, key: str) -> None: ... - def get(self, key: str, default: Incomplete | None = None): ... + def get(self, key: str, default=None): ... def items(self) -> ItemsView[str, Incomplete]: ... def keys(self) -> KeysView[str]: ... def values(self) -> ValuesView[Incomplete]: ... diff --git a/stubs/networkx/networkx/utils/heaps.pyi b/stubs/networkx/networkx/utils/heaps.pyi index 43cd69b1b358..95862f2da880 100644 --- a/stubs/networkx/networkx/utils/heaps.pyi +++ b/stubs/networkx/networkx/utils/heaps.pyi @@ -11,7 +11,7 @@ class MinHeap: def __init__(self) -> None: ... def min(self) -> None: ... def pop(self) -> None: ... - def get(self, key, default: Incomplete | None = None) -> None: ... + def get(self, key, default=None) -> None: ... def insert(self, key, value, allow_increase: bool = False) -> None: ... def __nonzero__(self): ... def __bool__(self) -> bool: ... @@ -29,12 +29,12 @@ class PairingHeap(MinHeap): def __init__(self) -> None: ... def min(self): ... def pop(self): ... - def get(self, key, default: Incomplete | None = None): ... + def get(self, key, default=None): ... def insert(self, key, value, allow_increase: bool = False): ... class BinaryHeap(MinHeap): def __init__(self) -> None: ... def min(self): ... def pop(self): ... - def get(self, key, default: Incomplete | None = None): ... + def get(self, key, default=None): ... def insert(self, key, value, allow_increase: bool = False): ... diff --git a/stubs/networkx/networkx/utils/mapped_queue.pyi b/stubs/networkx/networkx/utils/mapped_queue.pyi index e5eb305abb25..86e8c0e0ecde 100644 --- a/stubs/networkx/networkx/utils/mapped_queue.pyi +++ b/stubs/networkx/networkx/utils/mapped_queue.pyi @@ -17,9 +17,9 @@ class _HeapElement: class MappedQueue: heap: Incomplete position: Incomplete - def __init__(self, data: Incomplete | None = None) -> None: ... + def __init__(self, data=None) -> None: ... def __len__(self) -> int: ... - def push(self, elt, priority: Incomplete | None = None): ... + def push(self, elt, priority=None): ... def pop(self): ... - def update(self, elt, new, priority: Incomplete | None = None) -> None: ... + def update(self, elt, new, priority=None) -> None: ... def remove(self, elt) -> None: ... diff --git a/stubs/networkx/networkx/utils/misc.pyi b/stubs/networkx/networkx/utils/misc.pyi index 261e030b1df2..62fb3c6b3b94 100644 --- a/stubs/networkx/networkx/utils/misc.pyi +++ b/stubs/networkx/networkx/utils/misc.pyi @@ -1,5 +1,4 @@ import random -from _typeshed import Incomplete from types import ModuleType from typing_extensions import TypeAlias @@ -27,23 +26,23 @@ _RandomNumberGenerator: TypeAlias = ( ) _RandomState: TypeAlias = int | _RandomNumberGenerator | None -def flatten(obj, result: Incomplete | None = None): ... +def flatten(obj, result=None): ... def make_list_of_ints(sequence): ... -def dict_to_numpy_array(d, mapping: Incomplete | None = None): ... +def dict_to_numpy_array(d, mapping=None): ... def arbitrary_element(iterable): ... def pairwise(iterable, cyclic: bool = False): ... def groups(many_to_one): ... -def create_random_state(random_state: Incomplete | None = None): ... +def create_random_state(random_state=None): ... class PythonRandomViaNumpyBits(random.Random): def __init__(self, rng: numpy.random.Generator | None = None) -> None: ... def getrandbits(self, k: int) -> int: ... class PythonRandomInterface: - def __init__(self, rng: Incomplete | None = None) -> None: ... + def __init__(self, rng=None) -> None: ... def random(self): ... def uniform(self, a, b): ... - def randrange(self, a, b: Incomplete | None = None): ... + def randrange(self, a, b=None): ... def choice(self, seq): ... def gauss(self, mu, sigma): ... def shuffle(self, seq): ... diff --git a/stubs/networkx/networkx/utils/random_sequence.pyi b/stubs/networkx/networkx/utils/random_sequence.pyi index 3d89df4281c0..101004bba6ba 100644 --- a/stubs/networkx/networkx/utils/random_sequence.pyi +++ b/stubs/networkx/networkx/utils/random_sequence.pyi @@ -1,5 +1,3 @@ -from _typeshed import Incomplete - __all__ = [ "powerlaw_sequence", "zipf_rv", @@ -9,11 +7,9 @@ __all__ = [ "weighted_choice", ] -def powerlaw_sequence(n, exponent: float = 2.0, seed: Incomplete | None = None): ... -def zipf_rv(alpha, xmin: int = 1, seed: Incomplete | None = None): ... +def powerlaw_sequence(n, exponent: float = 2.0, seed=None): ... +def zipf_rv(alpha, xmin: int = 1, seed=None): ... def cumulative_distribution(distribution): ... -def discrete_sequence( - n, distribution: Incomplete | None = None, cdistribution: Incomplete | None = None, seed: Incomplete | None = None -): ... -def random_weighted_sample(mapping, k, seed: Incomplete | None = None): ... -def weighted_choice(mapping, seed: Incomplete | None = None): ... +def discrete_sequence(n, distribution=None, cdistribution=None, seed=None): ... +def random_weighted_sample(mapping, k, seed=None): ... +def weighted_choice(mapping, seed=None): ... diff --git a/stubs/networkx/networkx/utils/rcm.pyi b/stubs/networkx/networkx/utils/rcm.pyi index cfe7c2debc0b..cf8ffb8abf2b 100644 --- a/stubs/networkx/networkx/utils/rcm.pyi +++ b/stubs/networkx/networkx/utils/rcm.pyi @@ -3,5 +3,5 @@ from collections.abc import Generator __all__ = ["cuthill_mckee_ordering", "reverse_cuthill_mckee_ordering"] -def cuthill_mckee_ordering(G, heuristic: Incomplete | None = None) -> Generator[Incomplete, Incomplete, None]: ... -def reverse_cuthill_mckee_ordering(G, heuristic: Incomplete | None = None): ... +def cuthill_mckee_ordering(G, heuristic=None) -> Generator[Incomplete, Incomplete, None]: ... +def reverse_cuthill_mckee_ordering(G, heuristic=None): ... diff --git a/stubs/networkx/networkx/utils/union_find.pyi b/stubs/networkx/networkx/utils/union_find.pyi index fe50ede3c4df..97df662a3984 100644 --- a/stubs/networkx/networkx/utils/union_find.pyi +++ b/stubs/networkx/networkx/utils/union_find.pyi @@ -4,7 +4,7 @@ from collections.abc import Generator, Iterator class UnionFind: parents: Incomplete weights: Incomplete - def __init__(self, elements: Incomplete | None = None) -> None: ... + def __init__(self, elements=None) -> None: ... def __getitem__(self, object): ... def __iter__(self) -> Iterator[Incomplete]: ... def to_sets(self) -> Generator[Incomplete, Incomplete, None]: ... From 1e78265586507b8f93610eaf798f95a9979877fa Mon Sep 17 00:00:00 2001 From: David Salvisberg Date: Tue, 13 May 2025 12:23:36 +0200 Subject: [PATCH 359/388] Fix regression in definition of `bleach.sanitizer._Filter` (#14041) --- stubs/bleach/bleach/sanitizer.pyi | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/stubs/bleach/bleach/sanitizer.pyi b/stubs/bleach/bleach/sanitizer.pyi index f13e8637867b..a86be65feae6 100644 --- a/stubs/bleach/bleach/sanitizer.pyi +++ b/stubs/bleach/bleach/sanitizer.pyi @@ -1,6 +1,6 @@ from collections.abc import Callable, Container, Iterable, Iterator from re import Pattern -from typing import Final, Protocol +from typing import Final, Protocol, type_check_only from typing_extensions import TypeAlias from html5lib.filters.base import Filter @@ -22,9 +22,13 @@ INVISIBLE_REPLACEMENT_CHAR: Final = "?" class NoCssSanitizerWarning(UserWarning): ... -# A html5lib Filter class -class _Filter(Protocol): - def __call__(self, *, source: BleachSanitizerFilter) -> BleachSanitizerFilter: ... +@type_check_only +class _FilterConstructor(Protocol): + def __call__(self, *, source: BleachSanitizerFilter) -> Filter: ... + +# _FilterConstructor used to be called _Filter +# this alias is obsolete and can potentially be removed in the future +_Filter: TypeAlias = _FilterConstructor # noqa: Y047 _AttributeFilter: TypeAlias = Callable[[str, str, str], bool] _AttributeDict: TypeAlias = dict[str, list[str] | _AttributeFilter] | dict[str, list[str]] | dict[str, _AttributeFilter] @@ -48,7 +52,7 @@ class Cleaner: protocols: Iterable[str] = ..., strip: bool = False, strip_comments: bool = True, - filters: Iterable[_Filter] | None = None, + filters: Iterable[_FilterConstructor] | None = None, css_sanitizer: CSSSanitizer | None = None, ) -> None: ... def clean(self, text: str) -> str: ... From 032dd6f1e014ea1866a9fabddc6436f824772364 Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Tue, 13 May 2025 10:28:14 +0000 Subject: [PATCH 360/388] Improve `oauthlib.openid.connect.core` (#13966) --- .../connect/core/endpoints/pre_configured.pyi | 69 ++++++++++++------- .../connect/core/endpoints/userinfo.pyi | 24 ++++--- .../openid/connect/core/exceptions.pyi | 6 +- .../core/grant_types/authorization_code.pyi | 25 +++++-- .../openid/connect/core/grant_types/base.pyi | 14 ++-- .../connect/core/grant_types/dispatchers.pyi | 37 +++++----- .../connect/core/grant_types/hybrid.pyi | 30 +++++--- .../connect/core/grant_types/implicit.pyi | 27 ++++++-- .../core/grant_types/refresh_token.pyi | 22 ++++-- .../openid/connect/core/request_validator.pyi | 28 +++++--- .../oauthlib/openid/connect/core/tokens.pyi | 28 ++++---- 11 files changed, 201 insertions(+), 109 deletions(-) diff --git a/stubs/oauthlib/oauthlib/openid/connect/core/endpoints/pre_configured.pyi b/stubs/oauthlib/oauthlib/openid/connect/core/endpoints/pre_configured.pyi index 7ad48ddb26d4..60af36016533 100644 --- a/stubs/oauthlib/oauthlib/openid/connect/core/endpoints/pre_configured.pyi +++ b/stubs/oauthlib/oauthlib/openid/connect/core/endpoints/pre_configured.pyi @@ -1,36 +1,53 @@ -from _typeshed import Incomplete +from collections.abc import Callable from typing import Any +from oauthlib.common import Request from oauthlib.oauth2.rfc6749.endpoints import ( - AuthorizationEndpoint as AuthorizationEndpoint, - IntrospectEndpoint as IntrospectEndpoint, - ResourceEndpoint as ResourceEndpoint, - RevocationEndpoint as RevocationEndpoint, - TokenEndpoint as TokenEndpoint, + AuthorizationEndpoint, + IntrospectEndpoint, + ResourceEndpoint, + RevocationEndpoint, + TokenEndpoint, ) +from oauthlib.oauth2.rfc6749.grant_types import ( + AuthorizationCodeGrant as OAuth2AuthorizationCodeGrant, + ClientCredentialsGrant, + ImplicitGrant as OAuth2ImplicitGrant, + RefreshTokenGrant, + ResourceOwnerPasswordCredentialsGrant, +) +from oauthlib.oauth2.rfc6749.request_validator import RequestValidator as OAuth2RequestValidator +from oauthlib.oauth2.rfc6749.tokens import BearerToken -from .userinfo import UserInfoEndpoint as UserInfoEndpoint +from ..grant_types import AuthorizationCodeGrant, HybridGrant, ImplicitGrant +from ..grant_types.dispatchers import ( + AuthorizationCodeGrantDispatcher, + AuthorizationTokenGrantDispatcher, + ImplicitTokenGrantDispatcher, +) +from ..tokens import JWTToken +from .userinfo import UserInfoEndpoint class Server(AuthorizationEndpoint, IntrospectEndpoint, TokenEndpoint, ResourceEndpoint, RevocationEndpoint, UserInfoEndpoint): - auth_grant: Any - implicit_grant: Any - password_grant: Any - credentials_grant: Any - refresh_grant: Any - openid_connect_auth: Any - openid_connect_implicit: Any - openid_connect_hybrid: Any - bearer: Any - jwt: Any - auth_grant_choice: Any - implicit_grant_choice: Any - token_grant_choice: Any + auth_grant: OAuth2AuthorizationCodeGrant + implicit_grant: OAuth2ImplicitGrant + password_grant: ResourceOwnerPasswordCredentialsGrant + credentials_grant: ClientCredentialsGrant + refresh_grant: RefreshTokenGrant + openid_connect_auth: AuthorizationCodeGrant + openid_connect_implicit: ImplicitGrant + openid_connect_hybrid: HybridGrant + bearer: BearerToken + jwt: JWTToken + auth_grant_choice: AuthorizationCodeGrantDispatcher + implicit_grant_choice: ImplicitTokenGrantDispatcher + token_grant_choice: AuthorizationTokenGrantDispatcher def __init__( self, - request_validator, - token_expires_in: Incomplete | None = None, - token_generator: Incomplete | None = None, - refresh_token_generator: Incomplete | None = None, - *args, - **kwargs, + request_validator: OAuth2RequestValidator, + token_expires_in: int | Callable[[Request], int] | None = None, + token_generator: Callable[[Request], str] | None = None, + refresh_token_generator: Callable[[Request], str] | None = None, + *args: Any, # actually, these are not used + **kwargs: Any, # actually, these are not used ) -> None: ... diff --git a/stubs/oauthlib/oauthlib/openid/connect/core/endpoints/userinfo.pyi b/stubs/oauthlib/oauthlib/openid/connect/core/endpoints/userinfo.pyi index 1b22e896f817..13a8d4826a71 100644 --- a/stubs/oauthlib/oauthlib/openid/connect/core/endpoints/userinfo.pyi +++ b/stubs/oauthlib/oauthlib/openid/connect/core/endpoints/userinfo.pyi @@ -1,16 +1,22 @@ -from _typeshed import Incomplete +from collections.abc import Mapping from logging import Logger -from typing import Any -from oauthlib.oauth2.rfc6749.endpoints.base import BaseEndpoint as BaseEndpoint +from oauthlib.common import Request, _HTTPMethod +from oauthlib.oauth2.rfc6749.endpoints.base import BaseEndpoint +from oauthlib.oauth2.rfc6749.request_validator import RequestValidator as OAuth2RequestValidator +from oauthlib.oauth2.rfc6749.tokens import BearerToken log: Logger class UserInfoEndpoint(BaseEndpoint): - bearer: Any - request_validator: Any - def __init__(self, request_validator) -> None: ... + bearer: BearerToken + request_validator: OAuth2RequestValidator + def __init__(self, request_validator: OAuth2RequestValidator) -> None: ... def create_userinfo_response( - self, uri, http_method: str = "GET", body: Incomplete | None = None, headers: Incomplete | None = None - ): ... - def validate_userinfo_request(self, request) -> None: ... + self, + uri: str, + http_method: _HTTPMethod = "GET", + body: str | dict[str, str] | list[tuple[str, str]] | None = None, + headers: Mapping[str, str] | None = None, + ) -> tuple[dict[str, str], str, int]: ... + def validate_userinfo_request(self, request: Request) -> None: ... diff --git a/stubs/oauthlib/oauthlib/openid/connect/core/exceptions.pyi b/stubs/oauthlib/oauthlib/openid/connect/core/exceptions.pyi index ceb668657a43..b552251df96a 100644 --- a/stubs/oauthlib/oauthlib/openid/connect/core/exceptions.pyi +++ b/stubs/oauthlib/oauthlib/openid/connect/core/exceptions.pyi @@ -1,6 +1,4 @@ -from _typeshed import Incomplete - -from oauthlib.oauth2.rfc6749.errors import FatalClientError as FatalClientError, OAuth2Error as OAuth2Error +from oauthlib.oauth2.rfc6749.errors import FatalClientError, OAuth2Error class FatalOpenIDClientError(FatalClientError): ... class OpenIDClientError(OAuth2Error): ... @@ -50,4 +48,4 @@ class InsufficientScopeError(OAuth2Error): status_code: int description: str -def raise_from_error(error, params: Incomplete | None = None) -> None: ... +def raise_from_error(error: object, params: dict[str, str] | None = None) -> None: ... diff --git a/stubs/oauthlib/oauthlib/openid/connect/core/grant_types/authorization_code.pyi b/stubs/oauthlib/oauthlib/openid/connect/core/grant_types/authorization_code.pyi index a244bccd5eb4..ff419e356d09 100644 --- a/stubs/oauthlib/oauthlib/openid/connect/core/grant_types/authorization_code.pyi +++ b/stubs/oauthlib/oauthlib/openid/connect/core/grant_types/authorization_code.pyi @@ -1,12 +1,25 @@ -from _typeshed import Incomplete +from collections.abc import Iterable from logging import Logger -from typing import Any -from .base import GrantTypeBase as GrantTypeBase +from oauthlib.common import Request +from oauthlib.oauth2.rfc6749.grant_types.authorization_code import AuthorizationCodeGrant as OAuth2AuthorizationCodeGrant +from oauthlib.oauth2.rfc6749.grant_types.base import _AuthValidator, _TokenValidator +from oauthlib.oauth2.rfc6749.request_validator import RequestValidator as OAuth2RequestValidator + +from .base import GrantTypeBase log: Logger class AuthorizationCodeGrant(GrantTypeBase): - proxy_target: Any - def __init__(self, request_validator: Incomplete | None = None, **kwargs) -> None: ... - def add_id_token(self, token, token_handler, request): ... # type: ignore[override] + proxy_target: OAuth2AuthorizationCodeGrant + def __init__( + self, + request_validator: OAuth2RequestValidator | None = None, + *, + post_auth: Iterable[_AuthValidator] | None = None, + post_token: Iterable[_TokenValidator] | None = None, + pre_auth: Iterable[_AuthValidator] | None = None, + pre_token: Iterable[_TokenValidator] | None = None, + **kwargs, + ) -> None: ... + def add_id_token(self, token, token_handler, request: Request): ... # type: ignore[override] diff --git a/stubs/oauthlib/oauthlib/openid/connect/core/grant_types/base.pyi b/stubs/oauthlib/oauthlib/openid/connect/core/grant_types/base.pyi index 0e049a5bcb0a..24bb8d409449 100644 --- a/stubs/oauthlib/oauthlib/openid/connect/core/grant_types/base.pyi +++ b/stubs/oauthlib/oauthlib/openid/connect/core/grant_types/base.pyi @@ -1,14 +1,20 @@ +from _hashlib import HASH from _typeshed import Incomplete +from collections.abc import Callable from logging import Logger +from oauthlib.common import Request + log: Logger class GrantTypeBase: def __getattr__(self, attr: str): ... def __setattr__(self, attr: str, value) -> None: ... - def validate_authorization_request(self, request): ... - def id_token_hash(self, value, hashfunc=...): ... - def add_id_token(self, token, token_handler, request, nonce: Incomplete | None = None): ... - def openid_authorization_validator(self, request): ... + def validate_authorization_request(self, request: Request): ... + def id_token_hash( + self, value: str, hashfunc: Callable[..., HASH] = ... # Arguments: ReadableBuffer (string) and bool (usedforsecurity) + ) -> str: ... + def add_id_token(self, token, token_handler, request: Request, nonce: Incomplete | None = None): ... + def openid_authorization_validator(self, request: Request): ... OpenIDConnectBase = GrantTypeBase diff --git a/stubs/oauthlib/oauthlib/openid/connect/core/grant_types/dispatchers.pyi b/stubs/oauthlib/oauthlib/openid/connect/core/grant_types/dispatchers.pyi index bfc918c0b74b..4dcf5a690771 100644 --- a/stubs/oauthlib/oauthlib/openid/connect/core/grant_types/dispatchers.pyi +++ b/stubs/oauthlib/oauthlib/openid/connect/core/grant_types/dispatchers.pyi @@ -1,32 +1,37 @@ from _typeshed import Incomplete from logging import Logger -from typing import Any + +from oauthlib.common import Request +from oauthlib.oauth2.rfc6749.request_validator import RequestValidator as OAuth2RequestValidator log: Logger class Dispatcher: - default_grant: Any - oidc_grant: Any + default_grant: Incomplete | None + oidc_grant: Incomplete | None class AuthorizationCodeGrantDispatcher(Dispatcher): - default_grant: Any - oidc_grant: Any + default_grant: Incomplete | None + oidc_grant: Incomplete | None def __init__(self, default_grant: Incomplete | None = None, oidc_grant: Incomplete | None = None) -> None: ... - def create_authorization_response(self, request, token_handler): ... - def validate_authorization_request(self, request): ... + def create_authorization_response(self, request: Request, token_handler): ... + def validate_authorization_request(self, request: Request): ... class ImplicitTokenGrantDispatcher(Dispatcher): - default_grant: Any - oidc_grant: Any + default_grant: Incomplete | None + oidc_grant: Incomplete | None def __init__(self, default_grant: Incomplete | None = None, oidc_grant: Incomplete | None = None) -> None: ... - def create_authorization_response(self, request, token_handler): ... - def validate_authorization_request(self, request): ... + def create_authorization_response(self, request: Request, token_handler): ... + def validate_authorization_request(self, request: Request): ... class AuthorizationTokenGrantDispatcher(Dispatcher): - default_grant: Any - oidc_grant: Any - request_validator: Any + default_grant: Incomplete | None + oidc_grant: Incomplete | None + request_validator: OAuth2RequestValidator def __init__( - self, request_validator, default_grant: Incomplete | None = None, oidc_grant: Incomplete | None = None + self, + request_validator: OAuth2RequestValidator, + default_grant: Incomplete | None = None, + oidc_grant: Incomplete | None = None, ) -> None: ... - def create_token_response(self, request, token_handler): ... + def create_token_response(self, request: Request, token_handler): ... diff --git a/stubs/oauthlib/oauthlib/openid/connect/core/grant_types/hybrid.pyi b/stubs/oauthlib/oauthlib/openid/connect/core/grant_types/hybrid.pyi index 9c909a884835..58532a495671 100644 --- a/stubs/oauthlib/oauthlib/openid/connect/core/grant_types/hybrid.pyi +++ b/stubs/oauthlib/oauthlib/openid/connect/core/grant_types/hybrid.pyi @@ -1,17 +1,29 @@ -from _typeshed import Incomplete +from collections.abc import Iterable from logging import Logger -from typing import Any +from oauthlib.common import Request from oauthlib.oauth2.rfc6749.errors import InvalidRequestError as InvalidRequestError +from oauthlib.oauth2.rfc6749.grant_types.authorization_code import AuthorizationCodeGrant as OAuth2AuthorizationCodeGrant +from oauthlib.oauth2.rfc6749.grant_types.base import _AuthValidator, _TokenValidator +from oauthlib.oauth2.rfc6749.request_validator import RequestValidator as OAuth2RequestValidator -from ..request_validator import RequestValidator as RequestValidator -from .base import GrantTypeBase as GrantTypeBase +from ..request_validator import RequestValidator +from .base import GrantTypeBase log: Logger class HybridGrant(GrantTypeBase): - request_validator: Any - proxy_target: Any - def __init__(self, request_validator: Incomplete | None = None, **kwargs) -> None: ... - def add_id_token(self, token, token_handler, request): ... # type: ignore[override] - def openid_authorization_validator(self, request): ... + request_validator: OAuth2RequestValidator | RequestValidator + proxy_target: OAuth2AuthorizationCodeGrant + def __init__( + self, + request_validator: OAuth2RequestValidator | RequestValidator | None = None, + *, + post_auth: Iterable[_AuthValidator] | None = None, + post_token: Iterable[_TokenValidator] | None = None, + pre_auth: Iterable[_AuthValidator] | None = None, + pre_token: Iterable[_TokenValidator] | None = None, + **kwargs, + ) -> None: ... + def add_id_token(self, token, token_handler, request: Request): ... # type: ignore[override] + def openid_authorization_validator(self, request: Request): ... diff --git a/stubs/oauthlib/oauthlib/openid/connect/core/grant_types/implicit.pyi b/stubs/oauthlib/oauthlib/openid/connect/core/grant_types/implicit.pyi index 138ab6e228ee..366627c0b008 100644 --- a/stubs/oauthlib/oauthlib/openid/connect/core/grant_types/implicit.pyi +++ b/stubs/oauthlib/oauthlib/openid/connect/core/grant_types/implicit.pyi @@ -1,13 +1,26 @@ -from _typeshed import Incomplete +from collections.abc import Iterable from logging import Logger -from typing import Any -from .base import GrantTypeBase as GrantTypeBase +from oauthlib.common import Request +from oauthlib.oauth2.rfc6749.grant_types.base import _AuthValidator, _TokenValidator +from oauthlib.oauth2.rfc6749.grant_types.implicit import ImplicitGrant as OAuth2ImplicitGrant +from oauthlib.oauth2.rfc6749.request_validator import RequestValidator as OAuth2RequestValidator + +from .base import GrantTypeBase log: Logger class ImplicitGrant(GrantTypeBase): - proxy_target: Any - def __init__(self, request_validator: Incomplete | None = None, **kwargs) -> None: ... - def add_id_token(self, token, token_handler, request): ... # type: ignore[override] - def openid_authorization_validator(self, request): ... + proxy_target: OAuth2ImplicitGrant + def __init__( + self, + request_validator: OAuth2RequestValidator | None = None, + *, + post_auth: Iterable[_AuthValidator] | None = None, + post_token: Iterable[_TokenValidator] | None = None, + pre_auth: Iterable[_AuthValidator] | None = None, + pre_token: Iterable[_TokenValidator] | None = None, + **kwargs, + ) -> None: ... + def add_id_token(self, token, token_handler, request: Request): ... # type: ignore[override] + def openid_authorization_validator(self, request: Request): ... diff --git a/stubs/oauthlib/oauthlib/openid/connect/core/grant_types/refresh_token.pyi b/stubs/oauthlib/oauthlib/openid/connect/core/grant_types/refresh_token.pyi index cf126c21c85e..b9b953ebe478 100644 --- a/stubs/oauthlib/oauthlib/openid/connect/core/grant_types/refresh_token.pyi +++ b/stubs/oauthlib/oauthlib/openid/connect/core/grant_types/refresh_token.pyi @@ -1,11 +1,25 @@ -from _typeshed import Incomplete +from collections.abc import Iterable from logging import Logger +from oauthlib.common import Request +from oauthlib.oauth2.rfc6749.grant_types.base import _AuthValidator, _TokenValidator +from oauthlib.oauth2.rfc6749.grant_types.refresh_token import RefreshTokenGrant as OAuth2RefreshTokenGrant +from oauthlib.oauth2.rfc6749.request_validator import RequestValidator as OAuth2RequestValidator + from .base import GrantTypeBase log: Logger class RefreshTokenGrant(GrantTypeBase): - proxy_target: Incomplete - def __init__(self, request_validator: Incomplete | None = None, **kwargs) -> None: ... - def add_id_token(self, token, token_handler, request): ... # type: ignore[override] + proxy_target: OAuth2RefreshTokenGrant + def __init__( + self, + request_validator: OAuth2RequestValidator | None = None, + *, + post_auth: Iterable[_AuthValidator] | None = None, + post_token: Iterable[_TokenValidator] | None = None, + pre_auth: Iterable[_AuthValidator] | None = None, + pre_token: Iterable[_TokenValidator] | None = None, + **kwargs, + ) -> None: ... + def add_id_token(self, token, token_handler, request: Request): ... # type: ignore[override] diff --git a/stubs/oauthlib/oauthlib/openid/connect/core/request_validator.pyi b/stubs/oauthlib/oauthlib/openid/connect/core/request_validator.pyi index 4e29f272031d..ca6de4bf5752 100644 --- a/stubs/oauthlib/oauthlib/openid/connect/core/request_validator.pyi +++ b/stubs/oauthlib/oauthlib/openid/connect/core/request_validator.pyi @@ -1,18 +1,24 @@ +from _typeshed import Incomplete +from collections.abc import Callable from logging import Logger +from oauthlib.common import Request from oauthlib.oauth2.rfc6749.request_validator import RequestValidator as OAuth2RequestValidator log: Logger class RequestValidator(OAuth2RequestValidator): - def get_authorization_code_scopes(self, client_id, code, redirect_uri, request) -> None: ... - def get_authorization_code_nonce(self, client_id, code, redirect_uri, request) -> None: ... - def get_jwt_bearer_token(self, token, token_handler, request) -> None: ... - def get_id_token(self, token, token_handler, request) -> None: ... - def finalize_id_token(self, id_token, token, token_handler, request) -> None: ... - def validate_jwt_bearer_token(self, token, scopes, request) -> None: ... - def validate_id_token(self, token, scopes, request) -> None: ... - def validate_silent_authorization(self, request) -> None: ... - def validate_silent_login(self, request) -> None: ... - def validate_user_match(self, id_token_hint, scopes, claims, request) -> None: ... - def get_userinfo_claims(self, request) -> None: ... + def get_authorization_code_scopes(self, client_id: str, code: str, redirect_uri: str, request) -> list[str]: ... + def get_authorization_code_nonce(self, client_id: str, code: str, redirect_uri: str, request) -> str: ... + def get_jwt_bearer_token(self, token: dict[str, Incomplete], token_handler, request: Request) -> str: ... + def get_id_token(self, token: dict[str, Incomplete], token_handler, request: Request) -> str: ... + def finalize_id_token( + self, id_token: dict[str, Incomplete], token: dict[str, Incomplete], token_handler: Callable[..., str], request: Request + ) -> str: ... + def validate_jwt_bearer_token(self, token: str, scopes, request: Request) -> bool: ... + def validate_id_token(self, token: str, scopes, request: Request) -> bool: ... + def validate_silent_authorization(self, request: Request) -> bool: ... + def validate_silent_login(self, request: Request) -> bool: ... + def validate_user_match(self, id_token_hint: str, scopes, claims: dict[str, Incomplete], request: Request) -> bool: ... + def get_userinfo_claims(self, request: Request) -> dict[str, Incomplete] | str: ... + def refresh_id_token(self, request: Request) -> bool: ... diff --git a/stubs/oauthlib/oauthlib/openid/connect/core/tokens.pyi b/stubs/oauthlib/oauthlib/openid/connect/core/tokens.pyi index 4bff4f27f0eb..081db3dec53a 100644 --- a/stubs/oauthlib/oauthlib/openid/connect/core/tokens.pyi +++ b/stubs/oauthlib/oauthlib/openid/connect/core/tokens.pyi @@ -1,20 +1,22 @@ -from _typeshed import Incomplete -from typing import Any +from collections.abc import Callable +from oauthlib.common import Request from oauthlib.oauth2.rfc6749.tokens import TokenBase as TokenBase +from .request_validator import RequestValidator + class JWTToken(TokenBase): - request_validator: Any - token_generator: Any - refresh_token_generator: Any - expires_in: Any + request_validator: RequestValidator + token_generator: Callable[[Request], str] | Callable[[Request, bool], str] + refresh_token_generator: Callable[[Request], str] | Callable[[Request, bool], str] + expires_in: int | Callable[[Request], int] def __init__( self, - request_validator: Incomplete | None = None, - token_generator: Incomplete | None = None, - expires_in: Incomplete | None = None, - refresh_token_generator: Incomplete | None = None, + request_validator: RequestValidator | None = None, + token_generator: Callable[[Request], str] | None = None, + expires_in: int | Callable[[Request], int] | None = None, + refresh_token_generator: Callable[[Request], str] | None = None, ) -> None: ... - def create_token(self, request, refresh_token: bool = False): ... - def validate_request(self, request): ... - def estimate_type(self, request): ... + def create_token(self, request: Request, refresh_token: bool = False): ... + def validate_request(self, request: Request): ... + def estimate_type(self, request: Request): ... From 7176c7ee7a2425aa926fd78260e7b7220f5985a8 Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Tue, 13 May 2025 10:45:07 +0000 Subject: [PATCH 361/388] Bump icalendar to 6.2.* (#13963) --- stubs/icalendar/@tests/stubtest_allowlist.txt | 1 + stubs/icalendar/METADATA.toml | 2 +- stubs/icalendar/icalendar/__init__.pyi | 38 +++- stubs/icalendar/icalendar/alarms.pyi | 20 +- stubs/icalendar/icalendar/attr.pyi | 24 +++ stubs/icalendar/icalendar/cal.pyi | 176 +++++++++++++++++- stubs/icalendar/icalendar/enums.pyi | 44 +++++ stubs/icalendar/icalendar/error.pyi | 15 ++ stubs/icalendar/icalendar/param.pyi | 62 ++++++ stubs/icalendar/icalendar/parser.pyi | 22 ++- stubs/icalendar/icalendar/prop.pyi | 28 ++- 11 files changed, 393 insertions(+), 39 deletions(-) create mode 100644 stubs/icalendar/icalendar/attr.pyi create mode 100644 stubs/icalendar/icalendar/enums.pyi create mode 100644 stubs/icalendar/icalendar/error.pyi create mode 100644 stubs/icalendar/icalendar/param.pyi diff --git a/stubs/icalendar/@tests/stubtest_allowlist.txt b/stubs/icalendar/@tests/stubtest_allowlist.txt index b03841be6f7f..517487d97d7d 100644 --- a/stubs/icalendar/@tests/stubtest_allowlist.txt +++ b/stubs/icalendar/@tests/stubtest_allowlist.txt @@ -12,6 +12,7 @@ icalendar.cal.Component.set_inline # Stubtest gets confused by multiple inheritance. icalendar.prop.vSkip.__new__ +icalendar.vSkip.__new__ # Stubtest incorrectly reports that stub argument "cls" should be # positional or keyword. diff --git a/stubs/icalendar/METADATA.toml b/stubs/icalendar/METADATA.toml index 2a7b8178f541..4dca3baa6c54 100644 --- a/stubs/icalendar/METADATA.toml +++ b/stubs/icalendar/METADATA.toml @@ -1,4 +1,4 @@ -version = "6.1.3" +version = "6.2.*" upstream_repository = "https://github.com/collective/icalendar" requires = ["types-python-dateutil", "types-pytz"] diff --git a/stubs/icalendar/icalendar/__init__.pyi b/stubs/icalendar/icalendar/__init__.pyi index 0e862945c3e2..e793464c5b2e 100644 --- a/stubs/icalendar/icalendar/__init__.pyi +++ b/stubs/icalendar/icalendar/__init__.pyi @@ -1,12 +1,5 @@ from . import version as version_mod -from .alarms import ( - Alarms as Alarms, - AlarmTime as AlarmTime, - ComponentEndMissing as ComponentEndMissing, - ComponentStartMissing as ComponentStartMissing, - IncompleteAlarmInformation as IncompleteAlarmInformation, - LocalTimezoneMissing as LocalTimezoneMissing, -) +from .alarms import Alarms as Alarms, AlarmTime as AlarmTime from .cal import ( Alarm as Alarm, Calendar as Calendar, @@ -14,14 +7,29 @@ from .cal import ( ComponentFactory as ComponentFactory, Event as Event, FreeBusy as FreeBusy, - IncompleteComponent as IncompleteComponent, - InvalidCalendar as InvalidCalendar, Journal as Journal, Timezone as Timezone, TimezoneDaylight as TimezoneDaylight, TimezoneStandard as TimezoneStandard, Todo as Todo, ) +from .enums import ( + CUTYPE as CUTYPE, + FBTYPE as FBTYPE, + PARTSTAT as PARTSTAT, + RANGE as RANGE, + RELATED as RELATED, + RELTYPE as RELTYPE, + ROLE as ROLE, +) +from .error import ( + ComponentEndMissing as ComponentEndMissing, + ComponentStartMissing as ComponentStartMissing, + IncompleteAlarmInformation as IncompleteAlarmInformation, + IncompleteComponent as IncompleteComponent, + InvalidCalendar as InvalidCalendar, + LocalTimezoneMissing as LocalTimezoneMissing, +) from .parser import Parameters as Parameters, q_join as q_join, q_split as q_split from .prop import ( TypesFactory as TypesFactory, @@ -39,6 +47,7 @@ from .prop import ( vMonth as vMonth, vPeriod as vPeriod, vRecur as vRecur, + vSkip as vSkip, vText as vText, vTime as vTime, vUri as vUri, @@ -96,6 +105,15 @@ __all__ = [ "ComponentStartMissing", "IncompleteAlarmInformation", "LocalTimezoneMissing", + "CUTYPE", + "FBTYPE", + "PARTSTAT", + "RANGE", + "vSkip", + "RELATED", + "vSkip", + "RELTYPE", + "ROLE", ] __version__ = version_mod.__version__ diff --git a/stubs/icalendar/icalendar/alarms.pyi b/stubs/icalendar/icalendar/alarms.pyi index 52eab13aaebf..d62f5641d024 100644 --- a/stubs/icalendar/icalendar/alarms.pyi +++ b/stubs/icalendar/icalendar/alarms.pyi @@ -2,23 +2,17 @@ import datetime from typing_extensions import TypeAlias from .cal import Alarm, Event, Todo +from .error import ( + ComponentEndMissing as ComponentEndMissing, + ComponentStartMissing as ComponentStartMissing, + IncompleteAlarmInformation as IncompleteAlarmInformation, + LocalTimezoneMissing as LocalTimezoneMissing, +) -__all__ = [ - "Alarms", - "AlarmTime", - "IncompleteAlarmInformation", - "ComponentEndMissing", - "ComponentStartMissing", - "LocalTimezoneMissing", -] +__all__ = ["Alarms", "AlarmTime", "IncompleteAlarmInformation", "ComponentEndMissing", "ComponentStartMissing"] Parent: TypeAlias = Event | Todo -class IncompleteAlarmInformation(ValueError): ... -class ComponentStartMissing(IncompleteAlarmInformation): ... -class ComponentEndMissing(IncompleteAlarmInformation): ... -class LocalTimezoneMissing(IncompleteAlarmInformation): ... - class AlarmTime: def __init__( self, diff --git a/stubs/icalendar/icalendar/attr.pyi b/stubs/icalendar/icalendar/attr.pyi new file mode 100644 index 000000000000..963a54752d3f --- /dev/null +++ b/stubs/icalendar/icalendar/attr.pyi @@ -0,0 +1,24 @@ +rdates_property: property +exdates_property: property +rrules_property: property + +def multi_language_text_property(main_prop: str, compatibility_prop: str, doc: str) -> property: ... +def single_int_property(prop: str, default: int, doc: str) -> property: ... +def single_utc_property(name: str, docs: str) -> property: ... +def single_string_property(name: str, docs: str, other_name: str | None = None) -> property: ... + +color_property: property +sequence_property: property +categories_property: property + +__all__ = [ + "single_utc_property", + "color_property", + "multi_language_text_property", + "single_int_property", + "sequence_property", + "categories_property", + "rdates_property", + "exdates_property", + "rrules_property", +] diff --git a/stubs/icalendar/icalendar/cal.pyi b/stubs/icalendar/icalendar/cal.pyi index dfd5161666bb..23b4942e03da 100644 --- a/stubs/icalendar/icalendar/cal.pyi +++ b/stubs/icalendar/icalendar/cal.pyi @@ -6,8 +6,9 @@ from typing_extensions import Self from .alarms import Alarms from .caselessdict import CaselessDict +from .error import IncompleteComponent as IncompleteComponent from .parser import Contentline, Contentlines -from .prop import TypesFactory +from .prop import TypesFactory, vRecur from .timezone.tzp import TZP __all__ = [ @@ -26,7 +27,6 @@ __all__ = [ "component_factory", "get_example", "IncompleteComponent", - "InvalidCalendar", ] def get_example(component_directory: str, example_name: str) -> bytes: ... @@ -36,11 +36,6 @@ class ComponentFactory(CaselessDict[Incomplete]): INLINE: CaselessDict[int] -class InvalidCalendar(ValueError): ... -class IncompleteComponent(ValueError): ... - -def create_utc_property(name: str, docs: str) -> property: ... - class Component(CaselessDict[Incomplete]): name: ClassVar[str | None] required: ClassVar[tuple[str, ...]] @@ -84,10 +79,14 @@ class Component(CaselessDict[Incomplete]): def DTSTAMP(self) -> datetime.datetime | None: ... @DTSTAMP.setter def DTSTAMP(self, value: datetime.datetime) -> None: ... + @DTSTAMP.deleter + def DTSTAMP(self) -> None: ... @property def LAST_MODIFIED(self) -> datetime.datetime | None: ... @LAST_MODIFIED.setter def LAST_MODIFIED(self, value: datetime.datetime) -> None: ... + @LAST_MODIFIED.deleter + def LAST_MODIFIED(self) -> None: ... def is_thunderbird(self) -> bool: ... # type_def is a TypeForm @@ -105,14 +104,20 @@ class Event(Component): def DTSTART(self) -> datetime.date | datetime.datetime | None: ... @DTSTART.setter def DTSTART(self, value: datetime.date | datetime.datetime | None) -> None: ... + @DTSTART.deleter + def DTSTART(self) -> None: ... @property def DTEND(self) -> datetime.date | datetime.datetime | None: ... @DTEND.setter def DTEND(self, value: datetime.date | datetime.datetime | None) -> None: ... + @DTEND.deleter + def DTEND(self) -> None: ... @property def DURATION(self) -> datetime.timedelta | None: ... @DURATION.setter def DURATION(self, value: datetime.timedelta | None) -> None: ... + @DURATION.deleter + def DURATION(self) -> None: ... @property def duration(self) -> datetime.timedelta: ... @property @@ -127,10 +132,40 @@ class Event(Component): def X_MOZ_SNOOZE_TIME(self) -> datetime.datetime | None: ... @X_MOZ_SNOOZE_TIME.setter def X_MOZ_SNOOZE_TIME(self, value: datetime.datetime) -> None: ... + @X_MOZ_SNOOZE_TIME.deleter + def X_MOZ_SNOOZE_TIME(self) -> None: ... @property def X_MOZ_LASTACK(self) -> datetime.datetime | None: ... @X_MOZ_LASTACK.setter def X_MOZ_LASTACK(self, value: datetime.datetime) -> None: ... + @X_MOZ_LASTACK.deleter + def X_MOZ_LASTACK(self) -> None: ... + @property + def color(self) -> str: ... + @color.setter + def color(self, value: str) -> None: ... + @color.deleter + def color(self) -> None: ... + @property + def sequence(self) -> int: ... + @sequence.setter + def sequence(self, value: int) -> None: ... + @sequence.deleter + def sequence(self) -> None: ... + @property + def categories(self) -> list[str]: ... + @categories.setter + def categories(self, cats: list[str]) -> None: ... + @categories.deleter + def categories(self) -> None: ... + @property + def rdates( + self, + ) -> list[tuple[datetime.date, None] | tuple[datetime.datetime, None] | tuple[datetime.datetime, datetime.datetime]]: ... + @property + def exdates(self) -> list[datetime.date | datetime.datetime]: ... + @property + def rrules(self) -> list[vRecur]: ... class Todo(Component): name: ClassVar[Literal["VTODO"]] @@ -138,14 +173,20 @@ class Todo(Component): def DTSTART(self) -> datetime.datetime | datetime.date | None: ... @DTSTART.setter def DTSTART(self, value: datetime.datetime | datetime.date | None) -> None: ... + @DTSTART.deleter + def DTSTART(self) -> None: ... @property def DUE(self) -> datetime.datetime | datetime.date | None: ... @DUE.setter def DUE(self, value: datetime.datetime | datetime.date | None) -> None: ... + @DUE.deleter + def DUE(self) -> None: ... @property def DURATION(self) -> datetime.timedelta | None: ... @DURATION.setter def DURATION(self, value: datetime.timedelta | None) -> None: ... + @DURATION.deleter + def DURATION(self) -> None: ... @property def start(self) -> datetime.datetime | datetime.date: ... @start.setter @@ -160,12 +201,42 @@ class Todo(Component): def X_MOZ_SNOOZE_TIME(self) -> datetime.datetime | None: ... @X_MOZ_SNOOZE_TIME.setter def X_MOZ_SNOOZE_TIME(self, value: datetime.datetime) -> None: ... + @X_MOZ_SNOOZE_TIME.deleter + def X_MOZ_SNOOZE_TIME(self) -> None: ... @property def X_MOZ_LASTACK(self) -> datetime.datetime | None: ... @X_MOZ_LASTACK.setter def X_MOZ_LASTACK(self, value: datetime.datetime) -> None: ... + @X_MOZ_LASTACK.deleter + def X_MOZ_LASTACK(self) -> None: ... @property def alarms(self) -> Alarms: ... + @property + def color(self) -> str: ... + @color.setter + def color(self, value: str) -> None: ... + @color.deleter + def color(self) -> None: ... + @property + def sequence(self) -> int: ... + @sequence.setter + def sequence(self, value: int) -> None: ... + @sequence.deleter + def sequence(self) -> None: ... + @property + def categories(self) -> list[str]: ... + @categories.setter + def categories(self, cats: list[str]) -> None: ... + @categories.deleter + def categories(self) -> None: ... + @property + def rdates( + self, + ) -> list[tuple[datetime.date, None] | tuple[datetime.datetime, None] | tuple[datetime.datetime, datetime.datetime]]: ... + @property + def exdates(self) -> list[datetime.date | datetime.datetime]: ... + @property + def rrules(self) -> list[vRecur]: ... class Journal(Component): name: ClassVar[Literal["VJOURNAL"]] @@ -173,6 +244,8 @@ class Journal(Component): def DTSTART(self) -> datetime.date | datetime.datetime | None: ... @DTSTART.setter def DTSTART(self, value: datetime.date | datetime.datetime | None) -> None: ... + @DTSTART.deleter + def DTSTART(self) -> None: ... @property def start(self) -> datetime.date | datetime.datetime: ... @start.setter @@ -180,11 +253,38 @@ class Journal(Component): end = start @property def duration(self) -> datetime.timedelta: ... + @property + def color(self) -> str: ... + @color.setter + def color(self, value: str) -> None: ... + @color.deleter + def color(self) -> None: ... + @property + def sequence(self) -> int: ... + @sequence.setter + def sequence(self, value: int) -> None: ... + @sequence.deleter + def sequence(self) -> None: ... + @property + def categories(self) -> list[str]: ... + @categories.setter + def categories(self, cats: list[str]) -> None: ... + @categories.deleter + def categories(self) -> None: ... + @property + def rdates( + self, + ) -> list[tuple[datetime.date, None] | tuple[datetime.datetime, None] | tuple[datetime.datetime, datetime.datetime]]: ... + @property + def exdates(self) -> list[datetime.date | datetime.datetime]: ... + @property + def rrules(self) -> list[vRecur]: ... class FreeBusy(Component): name: ClassVar[Literal["VFREEBUSY"]] class Timezone(Component): + subcomponents: list[TimezoneStandard | TimezoneDaylight] name: ClassVar[Literal["VTIMEZONE"]] @classmethod def example(cls, name: str = "pacific_fiji") -> Calendar: ... @@ -209,14 +309,28 @@ class TimezoneStandard(Component): def DTSTART(self) -> datetime.date | datetime.datetime | None: ... @DTSTART.setter def DTSTART(self, value: datetime.date | datetime.datetime | None) -> None: ... + @DTSTART.deleter + def DTSTART(self) -> None: ... @property def TZOFFSETTO(self) -> datetime.timedelta | None: ... @TZOFFSETTO.setter def TZOFFSETTO(self, value: datetime.timedelta | None) -> None: ... + @TZOFFSETTO.deleter + def TZOFFSETTO(self) -> None: ... @property def TZOFFSETFROM(self) -> datetime.timedelta | None: ... @TZOFFSETFROM.setter def TZOFFSETFROM(self, value: datetime.timedelta | None) -> None: ... + @TZOFFSETFROM.deleter + def TZOFFSETFROM(self) -> None: ... + @property + def rdates( + self, + ) -> list[tuple[datetime.date, None] | tuple[datetime.datetime, None] | tuple[datetime.datetime, datetime.datetime]]: ... + @property + def exdates(self) -> list[datetime.date | datetime.datetime]: ... + @property + def rrules(self) -> list[vRecur]: ... class TimezoneDaylight(Component): name: ClassVar[Literal["DAYLIGHT"]] @@ -224,14 +338,28 @@ class TimezoneDaylight(Component): def DTSTART(self) -> datetime.date | datetime.datetime | None: ... @DTSTART.setter def DTSTART(self, value: datetime.date | datetime.datetime | None) -> None: ... + @DTSTART.deleter + def DTSTART(self) -> None: ... @property def TZOFFSETTO(self) -> datetime.timedelta | None: ... @TZOFFSETTO.setter def TZOFFSETTO(self, value: datetime.timedelta | None) -> None: ... + @TZOFFSETTO.deleter + def TZOFFSETTO(self) -> None: ... @property def TZOFFSETFROM(self) -> datetime.timedelta | None: ... @TZOFFSETFROM.setter def TZOFFSETFROM(self, value: datetime.timedelta | None) -> None: ... + @TZOFFSETFROM.deleter + def TZOFFSETFROM(self) -> None: ... + @property + def rdates( + self, + ) -> list[tuple[datetime.date, None] | tuple[datetime.datetime, None] | tuple[datetime.datetime, datetime.datetime]]: ... + @property + def exdates(self) -> list[datetime.date | datetime.datetime]: ... + @property + def rrules(self) -> list[vRecur]: ... class Alarm(Component): name: ClassVar[Literal["VALARM"]] @@ -239,18 +367,26 @@ class Alarm(Component): def REPEAT(self) -> int: ... @REPEAT.setter def REPEAT(self, value: int) -> None: ... + @REPEAT.deleter + def REPEAT(self) -> None: ... @property def DURATION(self) -> datetime.timedelta | None: ... @DURATION.setter def DURATION(self, value: datetime.timedelta | None) -> None: ... + @DURATION.deleter + def DURATION(self) -> None: ... @property def ACKNOWLEDGED(self) -> datetime.datetime | None: ... @ACKNOWLEDGED.setter def ACKNOWLEDGED(self, value: datetime.datetime | None) -> None: ... + @ACKNOWLEDGED.deleter + def ACKNOWLEDGED(self) -> None: ... @property def TRIGGER(self) -> datetime.timedelta | datetime.datetime | None: ... @TRIGGER.setter def TRIGGER(self, value: datetime.timedelta | datetime.datetime | None) -> None: ... + @TRIGGER.deleter + def TRIGGER(self) -> None: ... @property def TRIGGER_RELATED(self) -> Literal["START", "END"]: ... @TRIGGER_RELATED.setter @@ -269,6 +405,8 @@ class Calendar(Component): @classmethod def example(cls, name: str = "example") -> Calendar: ... @property + def freebusy(self) -> list[FreeBusy]: ... + @property def events(self) -> list[Event]: ... @property def todos(self) -> list[Todo]: ... @@ -277,6 +415,30 @@ class Calendar(Component): @property def timezones(self) -> list[Timezone]: ... def add_missing_timezones(self, first_date: datetime.date = ..., last_date: datetime.date = ...) -> None: ... + @property + def calendar_name(self) -> str | None: ... + @calendar_name.setter + def calendar_name(self, value: str) -> None: ... + @calendar_name.deleter + def calendar_name(self) -> None: ... + @property + def description(self) -> str | None: ... + @description.setter + def description(self, value: str) -> None: ... + @description.deleter + def description(self) -> None: ... + @property + def color(self) -> str: ... + @color.setter + def color(self, value: str) -> None: ... + @color.deleter + def color(self) -> None: ... + @property + def categories(self) -> list[str]: ... + @categories.setter + def categories(self, cats: list[str]) -> None: ... + @categories.deleter + def categories(self) -> None: ... types_factory: Final[TypesFactory] component_factory: Final[ComponentFactory] diff --git a/stubs/icalendar/icalendar/enums.pyi b/stubs/icalendar/icalendar/enums.pyi new file mode 100644 index 000000000000..cfe73902b5c0 --- /dev/null +++ b/stubs/icalendar/icalendar/enums.pyi @@ -0,0 +1,44 @@ +from enum import Enum + +class PARTSTAT(Enum): + NEEDS_ACTION = "NEEDS-ACTION" + ACCEPTED = "ACCEPTED" + DECLINED = "DECLINED" + TENTATIVE = "TENTATIVE" + DELEGATED = "DELEGATED" + COMPLETED = "COMPLETED" + IN_PROCESS = "IN-PROCESS" + +class FBTYPE(Enum): + FREE = "FREE" + BUSY = "BUSY" + BUSY_UNAVAILABLE = "BUSY-UNAVAILABLE" + BUSY_TENTATIVE = "BUSY-TENTATIVE" + +class CUTYPE(Enum): + INDIVIDUAL = "INDIVIDUAL" + GROUP = "GROUP" + RESOURCE = "RESOURCE" + ROOM = "ROOM" + UNKNOWN = "UNKNOWN" + +class RELTYPE(Enum): + PARENT = "PARENT" + CHILD = "CHILD" + SIBLING = "SIBLING" + +class RANGE(Enum): + THISANDFUTURE = "THISANDFUTURE" + THISANDPRIOR = "THISANDPRIOR" # deprecated + +class RELATED(Enum): + START = "START" + END = "END" + +class ROLE(Enum): + CHAIR = "CHAIR" + REQ_PARTICIPANT = "REQ-PARTICIPANT" + OPT_PARTICIPANT = "OPT-PARTICIPANT" + NON_PARTICIPANT = "NON-PARTICIPANT" + +__all__ = ["PARTSTAT", "FBTYPE", "CUTYPE", "RANGE", "RELATED", "ROLE", "RELTYPE"] diff --git a/stubs/icalendar/icalendar/error.pyi b/stubs/icalendar/icalendar/error.pyi new file mode 100644 index 000000000000..042db5aa1d07 --- /dev/null +++ b/stubs/icalendar/icalendar/error.pyi @@ -0,0 +1,15 @@ +class InvalidCalendar(ValueError): ... +class IncompleteComponent(ValueError): ... +class IncompleteAlarmInformation(ValueError): ... +class LocalTimezoneMissing(IncompleteAlarmInformation): ... +class ComponentEndMissing(IncompleteAlarmInformation): ... +class ComponentStartMissing(IncompleteAlarmInformation): ... + +__all__ = [ + "InvalidCalendar", + "IncompleteComponent", + "IncompleteAlarmInformation", + "LocalTimezoneMissing", + "ComponentEndMissing", + "ComponentStartMissing", +] diff --git a/stubs/icalendar/icalendar/param.pyi b/stubs/icalendar/icalendar/param.pyi new file mode 100644 index 000000000000..9620a3a6580b --- /dev/null +++ b/stubs/icalendar/icalendar/param.pyi @@ -0,0 +1,62 @@ +from collections.abc import Callable +from typing import TypeVar + +from .parser import Parameters + +class IcalendarProperty: + params: Parameters + +_T = TypeVar("_T") + +def string_parameter( + name: str, + doc: str, + default: Callable[..., str | None] = ..., + convert: Callable[[str], _T] | None = None, + convert_to: Callable[[_T], str] | None = None, +) -> property: ... + +ALTREP: property +CN: property +CUTYPE: property + +def quoted_list_parameter(name: str, doc: str) -> property: ... + +DELEGATED_FROM: property +DELEGATED_TO: property +DIR: property +FBTYPE: property +LANGUAGE: property +MEMBER: property +PARTSTAT: property +RANGE: property +RELATED: property +ROLE: property + +def boolean_parameter(name: str, default: bool, doc: str) -> property: ... + +RSVP: property +SENT_BY: property +TZID: property +RELTYPE: property + +__all__ = [ + "string_parameter", + "quoted_list_parameter", + "ALTREP", + "CN", + "CUTYPE", + "DELEGATED_FROM", + "DELEGATED_TO", + "DIR", + "FBTYPE", + "LANGUAGE", + "MEMBER", + "PARTSTAT", + "RANGE", + "RELATED", + "ROLE", + "RSVP", + "SENT_BY", + "TZID", +] diff --git a/stubs/icalendar/icalendar/parser.pyi b/stubs/icalendar/icalendar/parser.pyi index 1ea43a8101f7..787cafef7bd2 100644 --- a/stubs/icalendar/icalendar/parser.pyi +++ b/stubs/icalendar/icalendar/parser.pyi @@ -2,10 +2,11 @@ from _collections_abc import dict_keys from _typeshed import Incomplete from collections.abc import Iterable from re import Pattern -from typing import AnyStr, Final, overload +from typing import AnyStr, ClassVar, Final, overload from typing_extensions import Self from .caselessdict import CaselessDict +from .parser_tools import ICAL_TYPE __all__ = [ "Contentline", @@ -24,6 +25,8 @@ __all__ = [ "param_value", "q_join", "q_split", + "rfc_6868_escape", + "rfc_6868_unescape", "uFOLD", "unescape_char", "unescape_list_or_string", @@ -35,7 +38,7 @@ __all__ = [ def escape_char(text: str) -> str: ... def unescape_char(text: AnyStr) -> AnyStr: ... def foldline(line: str, limit: int = 75, fold_sep: str = "\r\n ") -> str: ... -def param_value(value: str | list[str] | tuple[str, ...] | Incomplete) -> str: ... +def param_value(value: str | list[str] | tuple[str, ...] | Incomplete, always_quote: bool = False) -> str: ... NAME: Final[Pattern[str]] UNSAFE_CHAR: Final[Pattern[str]] @@ -49,11 +52,12 @@ def validate_param_value(value: str, quoted: bool = True) -> None: ... QUOTABLE: Final[Pattern[str]] -def dquote(val: str) -> str: ... +def dquote(val: str, always_quote: bool = False) -> str: ... def q_split(st: str, sep: str = ",", maxsplit: int = -1) -> list[str]: ... -def q_join(lst: Iterable[str], sep: str = ",") -> str: ... +def q_join(lst: Iterable[str], sep: str = ",", always_quote: bool = False) -> str: ... class Parameters(CaselessDict[str]): + always_quoted: ClassVar[tuple[str, ...]] def params(self) -> dict_keys[str, str]: ... def to_ical(self, sorted: bool = True) -> bytes: ... @classmethod @@ -61,6 +65,14 @@ class Parameters(CaselessDict[str]): def escape_string(val: str) -> str: ... def unescape_string(val: str) -> str: ... + +RFC_6868_UNESCAPE_REGEX: Final[Pattern[str]] + +def rfc_6868_unescape(param_value: str) -> str: ... + +RFC_6868_ESCAPE_REGEX: Final[Pattern[str]] + +def rfc_6868_escape(param_value: str) -> str: ... @overload def unescape_list_or_string(val: list[str]) -> list[str]: ... @overload @@ -70,7 +82,7 @@ class Contentline(str): strict: bool def __new__(cls, value: str | bytes, strict: bool = False, encoding: str = ...) -> Self: ... @classmethod - def from_parts(cls, name: str, params: Parameters, values, sorted: bool = True) -> Self: ... + def from_parts(cls, name: ICAL_TYPE, params: Parameters, values, sorted: bool = True) -> Self: ... def parts(self) -> tuple[str, Parameters, str]: ... @classmethod def from_ical(cls, ical: str | bytes, strict: bool = False) -> Self: ... diff --git a/stubs/icalendar/icalendar/prop.pyi b/stubs/icalendar/icalendar/prop.pyi index ae5a7ec6942e..366d10ef3830 100644 --- a/stubs/icalendar/icalendar/prop.pyi +++ b/stubs/icalendar/icalendar/prop.pyi @@ -16,6 +16,7 @@ __all__ = [ "TimeBase", "TypesFactory", "WEEKDAY_RULE", + "tzid_from_dt", "vBinary", "vBoolean", "vCalAddress", @@ -33,14 +34,13 @@ __all__ = [ "vMonth", "vPeriod", "vRecur", - "vSkip", "vText", "vTime", "vUTCOffset", "vUri", "vWeekday", - "tzid_from_dt", "tzid_from_tzinfo", + "vSkip", ] _PropType: TypeAlias = type[Any] # any of the v* classes in this file @@ -77,6 +77,9 @@ class vText(str): def to_ical(self) -> bytes: ... @classmethod def from_ical(cls, ical: ICAL_TYPE) -> Self: ... + ALTREP: property + LANGUAGE: property + RELTYPE: property class vCalAddress(str): params: Parameters @@ -90,6 +93,18 @@ class vCalAddress(str): def name(self) -> str: ... @name.setter def name(self, value: str) -> None: ... + @name.deleter + def name(self) -> None: ... + CN: property + CUTYPE: property + DELEGATED_FROM: property + DELEGATED_TO: property + DIR: property + LANGUAGE: property + PARTSTAT: property + ROLE: property + RSVP: property + SENT_BY: property class vFloat(float): params: Parameters @@ -123,10 +138,16 @@ class vCategory: @staticmethod def from_ical(ical: ICAL_TYPE) -> str: ... def __eq__(self, other: object) -> bool: ... + RANGE: property + RELATED: property + TZID: property class TimeBase: def __eq__(self, other: object) -> bool: ... def __hash__(self) -> int: ... + RANGE: property + RELATED: property + TZID: property class vDDDTypes(TimeBase): params: Parameters @@ -186,6 +207,7 @@ class vPeriod(TimeBase): def from_ical(ical: str, timezone: datetime.timezone | str | None = None) -> tuple[Any, Any]: ... @property def dt(self) -> _PeriodTuple: ... + FBTYPE: property class vWeekday(str): week_days: Final[CaselessDict[int]] @@ -221,7 +243,7 @@ class vSkip(vText, Enum): FORWARD = "FORWARD" BACKWARD = "BACKWARD" - def __reduce_ex__(self, proto: Unused) -> tuple[Any, ...]: ... + def __reduce_ex__(self, _p: Unused) -> tuple[Self, tuple[str]]: ... # The type of the values depend on the key. Each key maps to a v* class, and # the allowed types are the types that the corresponding v* class can parse. From 47093e222f6c1a01b801929d09d7de81f698ceaf Mon Sep 17 00:00:00 2001 From: sobolevn Date: Tue, 13 May 2025 14:29:49 +0300 Subject: [PATCH 362/388] Update `_ctypes` to 3.14 (#14043) --- stdlib/@tests/stubtest_allowlists/py314.txt | 6 ------ stdlib/_ctypes.pyi | 17 +++++++++++------ stdlib/ctypes/__init__.pyi | 21 ++++++++++++++++----- 3 files changed, 27 insertions(+), 17 deletions(-) diff --git a/stdlib/@tests/stubtest_allowlists/py314.txt b/stdlib/@tests/stubtest_allowlists/py314.txt index 9697fef404e9..9e4eb2142fb6 100644 --- a/stdlib/@tests/stubtest_allowlists/py314.txt +++ b/stdlib/@tests/stubtest_allowlists/py314.txt @@ -5,9 +5,6 @@ _asyncio.all_tasks _asyncio.future_add_to_awaited_by _asyncio.future_discard_from_awaited_by -_ctypes.POINTER -_ctypes.byref -_ctypes.pointer _heapq.heapify_max _heapq.heappop_max _heapq.heappush_max @@ -54,10 +51,7 @@ compression.gzip.GzipFile.readinto1 compression.gzip.GzipFile.readinto1 compression.gzip.compress compression.zstd -ctypes.POINTER -ctypes.byref ctypes.memoryview_at -ctypes.pointer ctypes.py_object.__class_getitem__ ctypes.util.dllist ctypes.wintypes.HCONV diff --git a/stdlib/_ctypes.pyi b/stdlib/_ctypes.pyi index 4cbb030bb136..944685646c36 100644 --- a/stdlib/_ctypes.pyi +++ b/stdlib/_ctypes.pyi @@ -131,18 +131,23 @@ class _Pointer(_PointerLike, _CData, Generic[_CT], metaclass=_PyCPointerType): def __getitem__(self, key: slice, /) -> list[Any]: ... def __setitem__(self, key: int, value: Any, /) -> None: ... -@overload -def POINTER(type: None, /) -> type[c_void_p]: ... -@overload -def POINTER(type: type[_CT], /) -> type[_Pointer[_CT]]: ... -def pointer(obj: _CT, /) -> _Pointer[_CT]: ... +if sys.version_info < (3, 14): + @overload + def POINTER(type: None, /) -> type[c_void_p]: ... + @overload + def POINTER(type: type[_CT], /) -> type[_Pointer[_CT]]: ... + def pointer(obj: _CT, /) -> _Pointer[_CT]: ... # This class is not exposed. It calls itself _ctypes.CArgObject. @final @type_check_only class _CArgObject: ... -def byref(obj: _CData | _CDataType, offset: int = ...) -> _CArgObject: ... +if sys.version_info >= (3, 14): + def byref(obj: _CData | _CDataType, offset: int = 0, /) -> _CArgObject: ... + +else: + def byref(obj: _CData | _CDataType, offset: int = 0) -> _CArgObject: ... _ECT: TypeAlias = Callable[[_CData | _CDataType | None, CFuncPtr, tuple[_CData | _CDataType, ...]], _CDataType] _PF: TypeAlias = tuple[int] | tuple[int, str | None] | tuple[int, str | None, Any] diff --git a/stdlib/ctypes/__init__.pyi b/stdlib/ctypes/__init__.pyi index a7e19483301c..200c6d6f1fcb 100644 --- a/stdlib/ctypes/__init__.pyi +++ b/stdlib/ctypes/__init__.pyi @@ -1,6 +1,5 @@ import sys from _ctypes import ( - POINTER as POINTER, RTLD_GLOBAL as RTLD_GLOBAL, RTLD_LOCAL as RTLD_LOCAL, Array as Array, @@ -19,7 +18,6 @@ from _ctypes import ( alignment as alignment, byref as byref, get_errno as get_errno, - pointer as pointer, resize as resize, set_errno as set_errno, sizeof as sizeof, @@ -27,7 +25,7 @@ from _ctypes import ( from _typeshed import StrPath from ctypes._endian import BigEndianStructure as BigEndianStructure, LittleEndianStructure as LittleEndianStructure from types import GenericAlias -from typing import Any, ClassVar, Generic, Literal, TypeVar, type_check_only +from typing import Any, ClassVar, Generic, Literal, TypeVar, overload, type_check_only from typing_extensions import Self, TypeAlias, deprecated if sys.platform == "win32": @@ -36,9 +34,22 @@ if sys.platform == "win32": if sys.version_info >= (3, 11): from ctypes._endian import BigEndianUnion as BigEndianUnion, LittleEndianUnion as LittleEndianUnion +_CT = TypeVar("_CT", bound=_CData) _T = TypeVar("_T", default=Any) _DLLT = TypeVar("_DLLT", bound=CDLL) -_CT = TypeVar("_CT", bound=_CData) + +if sys.version_info >= (3, 14): + @overload + @deprecated("ctypes.POINTER with string") + def POINTER(obj: str) -> type[Any]: ... + @overload + def POINTER(obj: None) -> type[c_void_p]: ... + @overload + def POINTER(obj: type[_CT]) -> type[_Pointer[_CT]]: ... + def pointer(obj: _CT) -> _Pointer[_CT]: ... + +else: + from _ctypes import POINTER as POINTER, pointer as pointer DEFAULT_MODE: int @@ -148,7 +159,7 @@ c_buffer = create_string_buffer def create_unicode_buffer(init: int | str, size: int | None = None) -> Array[c_wchar]: ... @deprecated("Deprecated in Python 3.13; removal scheduled for Python 3.15") -def SetPointerType(pointer: type[_Pointer[Any]], cls: Any) -> None: ... # noqa: F811 +def SetPointerType(pointer: type[_Pointer[Any]], cls: Any) -> None: ... def ARRAY(typ: _CT, len: int) -> Array[_CT]: ... # Soft Deprecated, no plans to remove if sys.platform == "win32": From bfc7ab3ad49fc36087ca486430bbb4421079396e Mon Sep 17 00:00:00 2001 From: sobolevn Date: Tue, 13 May 2025 14:56:11 +0300 Subject: [PATCH 363/388] Add `string.templatelib` in 3.14 (#14044) --- stdlib/@tests/stubtest_allowlists/py314.txt | 1 - stdlib/VERSIONS | 1 + stdlib/{string.pyi => string/__init__.pyi} | 0 stdlib/string/templatelib.pyi | 28 +++++++++++++++++++++ 4 files changed, 29 insertions(+), 1 deletion(-) rename stdlib/{string.pyi => string/__init__.pyi} (100%) create mode 100644 stdlib/string/templatelib.pyi diff --git a/stdlib/@tests/stubtest_allowlists/py314.txt b/stdlib/@tests/stubtest_allowlists/py314.txt index 9e4eb2142fb6..055cae41beeb 100644 --- a/stdlib/@tests/stubtest_allowlists/py314.txt +++ b/stdlib/@tests/stubtest_allowlists/py314.txt @@ -111,7 +111,6 @@ sre_compile.CH_NEGATE sre_constants.CH_NEGATE sre_parse.CH_NEGATE string.Template.flags -string.templatelib sys.is_remote_debug_enabled sys.remote_exec tarfile.TarFile.zstopen diff --git a/stdlib/VERSIONS b/stdlib/VERSIONS index d13340ab345d..1ecd8af64559 100644 --- a/stdlib/VERSIONS +++ b/stdlib/VERSIONS @@ -283,6 +283,7 @@ ssl: 3.0- stat: 3.0- statistics: 3.4- string: 3.0- +string.templatelib: 3.14- stringprep: 3.0- struct: 3.0- subprocess: 3.0- diff --git a/stdlib/string.pyi b/stdlib/string/__init__.pyi similarity index 100% rename from stdlib/string.pyi rename to stdlib/string/__init__.pyi diff --git a/stdlib/string/templatelib.pyi b/stdlib/string/templatelib.pyi new file mode 100644 index 000000000000..01b95377a49c --- /dev/null +++ b/stdlib/string/templatelib.pyi @@ -0,0 +1,28 @@ +from collections.abc import Iterator +from typing import Any, Literal, final + +__all__ = ["Interpolation", "Template"] + +@final +class Template: # TODO: consider making `Template` generic on `TypeVarTuple` + strings: tuple[str, ...] + interpolations: tuple[Interpolation, ...] + + def __new__(cls, *args: str | Interpolation) -> Template: ... + def __iter__(self) -> Iterator[str | Interpolation]: ... + def __add__(self, other: Template | str) -> Template: ... + @property + def values(self) -> tuple[Any, ...]: ... # Tuple of interpolation values, which can have any type + +@final +class Interpolation: + value: Any # TODO: consider making `Interpolation` generic in runtime + expression: str + conversion: Literal["a", "r", "s"] | None + format_spec: str + + __match_args__ = ("value", "expression", "conversion", "format_spec") + + def __new__( + cls, value: Any, expression: str, conversion: Literal["a", "r", "s"] | None = None, format_spec: str = "" + ) -> Interpolation: ... From 40cc923b4b29b5ed7c5a0b7dc6af32fd702db1dd Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Tue, 13 May 2025 11:57:14 +0000 Subject: [PATCH 364/388] Improve `oauthlib.oauth2.rfc6749` (#13965) --- .../oauthlib/oauth2/rfc6749/clients/base.pyi | 4 +- .../rfc6749/endpoints/authorization.pyi | 6 +- .../oauth2/rfc6749/endpoints/introspect.pyi | 16 ++-- .../oauth2/rfc6749/endpoints/metadata.pyi | 27 +++--- .../rfc6749/endpoints/pre_configured.pyi | 94 ++++++++++--------- .../oauth2/rfc6749/endpoints/resource.pyi | 10 +- .../oauth2/rfc6749/endpoints/revocation.pyi | 21 +++-- .../oauth2/rfc6749/endpoints/token.pyi | 11 ++- .../oauthlib/oauth2/rfc6749/errors.pyi | 18 ++-- .../oauthlib/oauth2/rfc6749/tokens.pyi | 4 +- .../connect/core/endpoints/pre_configured.pyi | 5 +- 11 files changed, 114 insertions(+), 102 deletions(-) diff --git a/stubs/oauthlib/oauthlib/oauth2/rfc6749/clients/base.pyi b/stubs/oauthlib/oauthlib/oauth2/rfc6749/clients/base.pyi index 94624d1e207e..2d3a88dbd3e0 100644 --- a/stubs/oauthlib/oauthlib/oauth2/rfc6749/clients/base.pyi +++ b/stubs/oauthlib/oauthlib/oauth2/rfc6749/clients/base.pyi @@ -99,8 +99,8 @@ class Client: ) -> tuple[str, dict[str, str], str]: ... def prepare_token_revocation_request( self, - revocation_url, - token, + revocation_url: str, + token: str, token_type_hint: Literal["access_token", "refresh_token"] | None = "access_token", body: str = "", callback: Callable[[Incomplete], Incomplete] | None = None, diff --git a/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/authorization.pyi b/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/authorization.pyi index f99aec2fa801..cf7ead300b4a 100644 --- a/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/authorization.pyi +++ b/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/authorization.pyi @@ -8,11 +8,11 @@ from .base import BaseEndpoint log: Logger class AuthorizationEndpoint(BaseEndpoint): - def __init__(self, default_response_type, default_token_type, response_types: dict[str, Incomplete]) -> None: ... + def __init__(self, default_response_type: str, default_token_type: str, response_types: dict[str, Incomplete]) -> None: ... @property - def response_types(self): ... + def response_types(self) -> dict[str, Incomplete]: ... @property - def default_response_type(self): ... + def default_response_type(self) -> str: ... @property def default_response_type_handler(self): ... @property diff --git a/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/introspect.pyi b/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/introspect.pyi index 56a65c6b3bcd..49ebf9ae0ff2 100644 --- a/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/introspect.pyi +++ b/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/introspect.pyi @@ -1,20 +1,20 @@ -from _typeshed import Incomplete from logging import Logger -from typing import Any +from typing import Literal from oauthlib.common import Request, _HTTPMethod +from ..request_validator import RequestValidator from .base import BaseEndpoint log: Logger class IntrospectEndpoint(BaseEndpoint): - valid_token_types: Any - valid_request_methods: Any - request_validator: Any - supported_token_types: Any - def __init__(self, request_validator, supported_token_types: Incomplete | None = None) -> None: ... + valid_token_types: tuple[Literal["access_token"], Literal["refresh_token"]] + valid_request_methods: tuple[Literal["POST"]] + request_validator: RequestValidator + supported_token_types: tuple[str, ...] + def __init__(self, request_validator: RequestValidator, supported_token_types: tuple[str, ...] | None = None) -> None: ... def create_introspect_response( self, uri: str, http_method: _HTTPMethod = "POST", body: str | None = None, headers: dict[str, str] | None = None - ): ... + ) -> tuple[dict[str, str], str, int]: ... def validate_introspect_request(self, request: Request) -> None: ... diff --git a/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/metadata.pyi b/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/metadata.pyi index c758bd01008e..fcd53ad14f0d 100644 --- a/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/metadata.pyi +++ b/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/metadata.pyi @@ -1,24 +1,27 @@ +from _typeshed import Incomplete +from collections.abc import Iterable from logging import Logger -from typing import Any from .base import BaseEndpoint log: Logger class MetadataEndpoint(BaseEndpoint): - raise_errors: Any - endpoints: Any - initial_claims: Any - claims: Any - def __init__(self, endpoints, claims={}, raise_errors: bool = True) -> None: ... + raise_errors: bool + endpoints: Iterable[BaseEndpoint] + initial_claims: dict[str, Incomplete] + claims: dict[str, Incomplete] + def __init__( + self, endpoints: Iterable[BaseEndpoint], claims: dict[str, Incomplete] = {}, raise_errors: bool = True + ) -> None: ... def create_metadata_response( self, uri: str, http_method: str = "GET", body: str | None = None, headers: dict[str, str] | None = None - ): ... + ) -> tuple[dict[str, str], str, int]: ... def validate_metadata( self, array, key, is_required: bool = False, is_list: bool = False, is_url: bool = False, is_issuer: bool = False ) -> None: ... - def validate_metadata_token(self, claims, endpoint) -> None: ... - def validate_metadata_authorization(self, claims, endpoint): ... - def validate_metadata_revocation(self, claims, endpoint) -> None: ... - def validate_metadata_introspection(self, claims, endpoint) -> None: ... - def validate_metadata_server(self): ... + def validate_metadata_token(self, claims, endpoint: BaseEndpoint) -> None: ... + def validate_metadata_authorization(self, claims, endpoint: BaseEndpoint): ... + def validate_metadata_revocation(self, claims, endpoint: BaseEndpoint) -> None: ... + def validate_metadata_introspection(self, claims, endpoint: BaseEndpoint) -> None: ... + def validate_metadata_server(self) -> dict[str, Incomplete]: ... diff --git a/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/pre_configured.pyi b/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/pre_configured.pyi index c6f6b442fa3f..581c565c82e5 100644 --- a/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/pre_configured.pyi +++ b/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/pre_configured.pyi @@ -1,6 +1,17 @@ -from _typeshed import Incomplete -from typing import Any +from _typeshed import Unused +from collections.abc import Callable +from oauthlib.common import Request + +from ..grant_types import ( + AuthorizationCodeGrant, + ClientCredentialsGrant, + ImplicitGrant, + RefreshTokenGrant, + ResourceOwnerPasswordCredentialsGrant, +) +from ..request_validator import RequestValidator +from ..tokens import BearerToken from .authorization import AuthorizationEndpoint from .introspect import IntrospectEndpoint from .resource import ResourceEndpoint @@ -8,68 +19,63 @@ from .revocation import RevocationEndpoint from .token import TokenEndpoint class Server(AuthorizationEndpoint, IntrospectEndpoint, TokenEndpoint, ResourceEndpoint, RevocationEndpoint): - auth_grant: Any - implicit_grant: Any - password_grant: Any - credentials_grant: Any - refresh_grant: Any - bearer: Any + auth_grant: AuthorizationCodeGrant + implicit_grant: ImplicitGrant + password_grant: ResourceOwnerPasswordCredentialsGrant + credentials_grant: ClientCredentialsGrant + refresh_grant: RefreshTokenGrant + bearer: BearerToken def __init__( self, - request_validator, - token_expires_in: Incomplete | None = None, - token_generator: Incomplete | None = None, - refresh_token_generator: Incomplete | None = None, - *args, - **kwargs, + request_validator: RequestValidator, + token_expires_in: int | Callable[[Request], int] | None = None, + token_generator: Callable[[Request], str] | None = None, + refresh_token_generator: Callable[[Request], str] | None = None, + *args: Unused, ) -> None: ... class WebApplicationServer(AuthorizationEndpoint, IntrospectEndpoint, TokenEndpoint, ResourceEndpoint, RevocationEndpoint): - auth_grant: Any - refresh_grant: Any - bearer: Any + auth_grant: AuthorizationCodeGrant + refresh_grant: RefreshTokenGrant + bearer: BearerToken def __init__( self, - request_validator, - token_generator: Incomplete | None = None, - token_expires_in: Incomplete | None = None, - refresh_token_generator: Incomplete | None = None, - **kwargs, + request_validator: RequestValidator, + token_generator: Callable[[Request], str] | None = None, + token_expires_in: int | Callable[[Request], int] | None = None, + refresh_token_generator: Callable[[Request], str] | None = None, ) -> None: ... class MobileApplicationServer(AuthorizationEndpoint, IntrospectEndpoint, ResourceEndpoint, RevocationEndpoint): - implicit_grant: Any - bearer: Any + implicit_grant: ImplicitGrant + bearer: BearerToken def __init__( self, - request_validator, - token_generator: Incomplete | None = None, - token_expires_in: Incomplete | None = None, - refresh_token_generator: Incomplete | None = None, - **kwargs, + request_validator: RequestValidator, + token_generator: Callable[[Request], str] | None = None, + token_expires_in: int | Callable[[Request], int] | None = None, + refresh_token_generator: Callable[[Request], str] | None = None, ) -> None: ... class LegacyApplicationServer(TokenEndpoint, IntrospectEndpoint, ResourceEndpoint, RevocationEndpoint): - password_grant: Any - refresh_grant: Any - bearer: Any + password_grant: ResourceOwnerPasswordCredentialsGrant + refresh_grant: RefreshTokenGrant + bearer: BearerToken def __init__( self, - request_validator, - token_generator: Incomplete | None = None, - token_expires_in: Incomplete | None = None, - refresh_token_generator: Incomplete | None = None, - **kwargs, + request_validator: RequestValidator, + token_generator: Callable[[Request], str] | None = None, + token_expires_in: int | Callable[[Request], int] | None = None, + refresh_token_generator: Callable[[Request], str] | None = None, ) -> None: ... class BackendApplicationServer(TokenEndpoint, IntrospectEndpoint, ResourceEndpoint, RevocationEndpoint): - credentials_grant: Any - bearer: Any + credentials_grant: ClientCredentialsGrant + bearer: BearerToken def __init__( self, - request_validator, - token_generator: Incomplete | None = None, - token_expires_in: Incomplete | None = None, - refresh_token_generator: Incomplete | None = None, - **kwargs, + request_validator: RequestValidator, + token_generator: Callable[[Request], str] | None = None, + token_expires_in: int | Callable[[Request], int] | None = None, + refresh_token_generator: Callable[[Request], str] | None = None, ) -> None: ... diff --git a/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/resource.pyi b/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/resource.pyi index 34c2f39eb08b..9259ac8d4920 100644 --- a/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/resource.pyi +++ b/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/resource.pyi @@ -8,19 +8,19 @@ from .base import BaseEndpoint log: Logger class ResourceEndpoint(BaseEndpoint): - def __init__(self, default_token, token_types) -> None: ... + def __init__(self, default_token: str, token_types: dict[str, Incomplete]) -> None: ... @property - def default_token(self): ... + def default_token(self) -> str: ... @property def default_token_type_handler(self): ... @property - def tokens(self): ... + def tokens(self) -> dict[str, Incomplete]: ... def verify_request( self, - uri, + uri: str, http_method: _HTTPMethod = "GET", body: str | None = None, headers: dict[str, str] | None = None, scopes: Incomplete | None = None, - ): ... + ) -> tuple[bool, Request]: ... def find_token_type(self, request: Request): ... diff --git a/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/revocation.pyi b/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/revocation.pyi index 6000e99e125a..aae37277292d 100644 --- a/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/revocation.pyi +++ b/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/revocation.pyi @@ -1,23 +1,26 @@ -from _typeshed import Incomplete from logging import Logger -from typing import Any +from typing import Literal from oauthlib.common import Request, _HTTPMethod +from ..request_validator import RequestValidator from .base import BaseEndpoint log: Logger class RevocationEndpoint(BaseEndpoint): - valid_token_types: Any - valid_request_methods: Any - request_validator: Any - supported_token_types: Any - enable_jsonp: Any + valid_token_types: tuple[Literal["access_token"], Literal["refresh_token"]] + valid_request_methods: tuple[Literal["POST"]] + request_validator: RequestValidator + supported_token_types: tuple[str, ...] + enable_jsonp: bool def __init__( - self, request_validator, supported_token_types: Incomplete | None = None, enable_jsonp: bool = False + self, + request_validator: RequestValidator, + supported_token_types: tuple[str, ...] | None = None, + enable_jsonp: bool = False, ) -> None: ... def create_revocation_response( self, uri: str, http_method: _HTTPMethod = "POST", body: str | None = None, headers: dict[str, str] | None = None - ): ... + ) -> tuple[dict[str, str], str, int]: ... def validate_revocation_request(self, request: Request) -> None: ... diff --git a/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/token.pyi b/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/token.pyi index ffce66d48ae2..9b90fe906b6c 100644 --- a/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/token.pyi +++ b/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/token.pyi @@ -1,5 +1,6 @@ from _typeshed import Incomplete from logging import Logger +from typing import Literal from oauthlib.common import Request, _HTTPMethod @@ -8,16 +9,16 @@ from .base import BaseEndpoint log: Logger class TokenEndpoint(BaseEndpoint): - valid_request_methods: tuple[str] - def __init__(self, default_grant_type, default_token_type, grant_types) -> None: ... + valid_request_methods: tuple[Literal["POST"]] + def __init__(self, default_grant_type: str, default_token_type: str, grant_types: dict[str, Incomplete]) -> None: ... @property - def grant_types(self): ... + def grant_types(self) -> dict[str, Incomplete]: ... @property - def default_grant_type(self): ... + def default_grant_type(self) -> str: ... @property def default_grant_type_handler(self): ... @property - def default_token_type(self): ... + def default_token_type(self) -> str: ... def create_token_response( self, uri: str, diff --git a/stubs/oauthlib/oauthlib/oauth2/rfc6749/errors.pyi b/stubs/oauthlib/oauthlib/oauth2/rfc6749/errors.pyi index a10c600c239e..de88e00f0619 100644 --- a/stubs/oauthlib/oauthlib/oauth2/rfc6749/errors.pyi +++ b/stubs/oauthlib/oauthlib/oauth2/rfc6749/errors.pyi @@ -8,24 +8,24 @@ class OAuth2Error(Exception): status_code: int description: str uri: str | None - state: Any - redirect_uri: Any - client_id: Any + state: str | None + redirect_uri: str | None + client_id: str | None scopes: Any - response_type: Any - response_mode: Any - grant_type: Any + response_type: str | None + response_mode: str | None + grant_type: str | None def __init__( self, description: str | None = None, uri: str | None = None, - state: Incomplete | None = None, + state: str | None = None, status_code: int | None = None, request: Request | None = None, ) -> None: ... def in_uri(self, uri: str) -> str: ... @property - def twotuples(self) -> list[tuple[str, Incomplete | str | None]]: ... + def twotuples(self) -> list[tuple[str, str | None]]: ... @property def urlencoded(self) -> str: ... @property @@ -142,7 +142,7 @@ class CustomOAuth2Error(OAuth2Error): error: str, description: str | None = None, uri: str | None = None, - state: Incomplete | None = None, + state: str | None = None, status_code: int | None = None, request: Request | None = None, ) -> None: ... diff --git a/stubs/oauthlib/oauthlib/oauth2/rfc6749/tokens.pyi b/stubs/oauthlib/oauthlib/oauth2/rfc6749/tokens.pyi index e11777274fcb..a0cdc5bdfcf8 100644 --- a/stubs/oauthlib/oauthlib/oauth2/rfc6749/tokens.pyi +++ b/stubs/oauthlib/oauthlib/oauth2/rfc6749/tokens.pyi @@ -54,12 +54,12 @@ class BearerToken(TokenBase): request_validator: RequestValidator | None token_generator: Callable[[Request], str] refresh_token_generator: Callable[[Request], str] - expires_in: int + expires_in: int | Callable[[Request], int] def __init__( self, request_validator: RequestValidator | None = None, token_generator: Callable[[Request], str] | None = None, - expires_in: int | None = None, + expires_in: int | Callable[[Request], int] | None = None, refresh_token_generator: Callable[[Request], str] | None = None, ) -> None: ... def create_token(self, request: Request, refresh_token: bool = False, **kwargs) -> OAuth2Token: ... diff --git a/stubs/oauthlib/oauthlib/openid/connect/core/endpoints/pre_configured.pyi b/stubs/oauthlib/oauthlib/openid/connect/core/endpoints/pre_configured.pyi index 60af36016533..0b6aae6a772e 100644 --- a/stubs/oauthlib/oauthlib/openid/connect/core/endpoints/pre_configured.pyi +++ b/stubs/oauthlib/oauthlib/openid/connect/core/endpoints/pre_configured.pyi @@ -1,5 +1,5 @@ +from _typeshed import Unused from collections.abc import Callable -from typing import Any from oauthlib.common import Request from oauthlib.oauth2.rfc6749.endpoints import ( @@ -48,6 +48,5 @@ class Server(AuthorizationEndpoint, IntrospectEndpoint, TokenEndpoint, ResourceE token_expires_in: int | Callable[[Request], int] | None = None, token_generator: Callable[[Request], str] | None = None, refresh_token_generator: Callable[[Request], str] | None = None, - *args: Any, # actually, these are not used - **kwargs: Any, # actually, these are not used + *args: Unused, ) -> None: ... From 0a1df7a804a84454812bbc240cf17dc929d421db Mon Sep 17 00:00:00 2001 From: Sebastian Rittau Date: Tue, 13 May 2025 14:16:35 +0200 Subject: [PATCH 365/388] [ctypes] Fix argument name (#14048) --- stdlib/ctypes/__init__.pyi | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/stdlib/ctypes/__init__.pyi b/stdlib/ctypes/__init__.pyi index 200c6d6f1fcb..68b75b86def1 100644 --- a/stdlib/ctypes/__init__.pyi +++ b/stdlib/ctypes/__init__.pyi @@ -41,11 +41,11 @@ _DLLT = TypeVar("_DLLT", bound=CDLL) if sys.version_info >= (3, 14): @overload @deprecated("ctypes.POINTER with string") - def POINTER(obj: str) -> type[Any]: ... + def POINTER(cls: str) -> type[Any]: ... @overload - def POINTER(obj: None) -> type[c_void_p]: ... + def POINTER(cls: None) -> type[c_void_p]: ... @overload - def POINTER(obj: type[_CT]) -> type[_Pointer[_CT]]: ... + def POINTER(cls: type[_CT]) -> type[_Pointer[_CT]]: ... def pointer(obj: _CT) -> _Pointer[_CT]: ... else: From 1269757bd4f9af0a06d491b6b4b5b05305763a84 Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Tue, 13 May 2025 12:16:57 +0000 Subject: [PATCH 366/388] Bump `fnmatch` to 3.14 (#14046) --- stdlib/@tests/stubtest_allowlists/py314.txt | 2 -- stdlib/fnmatch.pyi | 6 ++++++ 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/stdlib/@tests/stubtest_allowlists/py314.txt b/stdlib/@tests/stubtest_allowlists/py314.txt index 055cae41beeb..f94c84c1ce70 100644 --- a/stdlib/@tests/stubtest_allowlists/py314.txt +++ b/stdlib/@tests/stubtest_allowlists/py314.txt @@ -67,8 +67,6 @@ enum.Enum.__signature__ enum.EnumMeta.__signature__ enum.EnumType.__signature__ faulthandler.dump_c_stack -fnmatch.__all__ -fnmatch.filterfalse fractions.Fraction.__pow__ fractions.Fraction.__rpow__ fractions.Fraction.from_number diff --git a/stdlib/fnmatch.pyi b/stdlib/fnmatch.pyi index 7051c999c430..345c4576497d 100644 --- a/stdlib/fnmatch.pyi +++ b/stdlib/fnmatch.pyi @@ -1,9 +1,15 @@ +import sys from collections.abc import Iterable from typing import AnyStr __all__ = ["filter", "fnmatch", "fnmatchcase", "translate"] +if sys.version_info >= (3, 14): + __all__ += ["filterfalse"] def fnmatch(name: AnyStr, pat: AnyStr) -> bool: ... def fnmatchcase(name: AnyStr, pat: AnyStr) -> bool: ... def filter(names: Iterable[AnyStr], pat: AnyStr) -> list[AnyStr]: ... def translate(pat: str) -> str: ... + +if sys.version_info >= (3, 14): + def filterfalse(names: Iterable[AnyStr], pat: AnyStr) -> list[AnyStr]: ... From d803d96799d1b4c40ac58de19185311e91b35de1 Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Tue, 13 May 2025 12:29:35 +0000 Subject: [PATCH 367/388] Update `{code,codop}.compile_command` in 3.14 (#14049) --- stdlib/@tests/stubtest_allowlists/py314.txt | 2 -- stdlib/code.pyi | 4 +--- stdlib/codeop.pyi | 6 +++++- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/stdlib/@tests/stubtest_allowlists/py314.txt b/stdlib/@tests/stubtest_allowlists/py314.txt index f94c84c1ce70..fd106a659486 100644 --- a/stdlib/@tests/stubtest_allowlists/py314.txt +++ b/stdlib/@tests/stubtest_allowlists/py314.txt @@ -43,8 +43,6 @@ builtins.int.__round__ builtins.memoryview.__class_getitem__ builtins.staticmethod.__annotate__ builtins.staticmethod.__class_getitem__ -code.compile_command -codeop.compile_command compression.gzip.GzipFile.readinto compression.gzip.GzipFile.readinto compression.gzip.GzipFile.readinto1 diff --git a/stdlib/code.pyi b/stdlib/code.pyi index 16721927c236..0b13c8a5016d 100644 --- a/stdlib/code.pyi +++ b/stdlib/code.pyi @@ -1,5 +1,5 @@ import sys -from codeop import CommandCompiler +from codeop import CommandCompiler, compile_command as compile_command from collections.abc import Callable from types import CodeType from typing import Any @@ -52,5 +52,3 @@ else: local: dict[str, Any] | None = None, exitmsg: str | None = None, ) -> None: ... - -def compile_command(source: str, filename: str = "", symbol: str = "single") -> CodeType | None: ... diff --git a/stdlib/codeop.pyi b/stdlib/codeop.pyi index cfe52e9b35de..8e311343eb89 100644 --- a/stdlib/codeop.pyi +++ b/stdlib/codeop.pyi @@ -3,7 +3,11 @@ from types import CodeType __all__ = ["compile_command", "Compile", "CommandCompiler"] -def compile_command(source: str, filename: str = "", symbol: str = "single") -> CodeType | None: ... +if sys.version_info >= (3, 14): + def compile_command(source: str, filename: str = "", symbol: str = "single", flags: int = 0) -> CodeType | None: ... + +else: + def compile_command(source: str, filename: str = "", symbol: str = "single") -> CodeType | None: ... class Compile: flags: int From a3a7c9e1f8ffd4ce54a9b73ecadd078fa57f2f56 Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Tue, 13 May 2025 16:50:32 +0200 Subject: [PATCH 368/388] Add additional type ignores for mypy (#14050) --- stdlib/tempfile.pyi | 2 +- stdlib/tkinter/ttk.pyi | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/stdlib/tempfile.pyi b/stdlib/tempfile.pyi index c4861f7c6f39..ea6e057e410d 100644 --- a/stdlib/tempfile.pyi +++ b/stdlib/tempfile.pyi @@ -384,7 +384,7 @@ class SpooledTemporaryFile(IO[AnyStr], _SpooledTemporaryFileBase): def write(self: SpooledTemporaryFile[bytes], s: ReadableBuffer) -> int: ... @overload def write(self, s: AnyStr) -> int: ... - @overload + @overload # type: ignore[override] def writelines(self: SpooledTemporaryFile[str], iterable: Iterable[str]) -> None: ... @overload def writelines(self: SpooledTemporaryFile[bytes], iterable: Iterable[ReadableBuffer]) -> None: ... diff --git a/stdlib/tkinter/ttk.pyi b/stdlib/tkinter/ttk.pyi index 5328e461ebdc..ab3c010938be 100644 --- a/stdlib/tkinter/ttk.pyi +++ b/stdlib/tkinter/ttk.pyi @@ -562,7 +562,7 @@ class Notebook(Widget): compound: tkinter._Compound = ..., underline: int = ..., ) -> None: ... - def forget(self, tab_id) -> None: ... + def forget(self, tab_id) -> None: ... # type: ignore[override] def hide(self, tab_id) -> None: ... def identify(self, x: int, y: int) -> str: ... def index(self, tab_id): ... From ed71d4046e9c8a5ddc25d67f5636527f6331f25d Mon Sep 17 00:00:00 2001 From: sobolevn Date: Tue, 13 May 2025 18:12:57 +0300 Subject: [PATCH 369/388] Add new `ast` node from 3.14 (#14034) --- stdlib/@tests/stubtest_allowlists/py314.txt | 2 -- stdlib/ast.pyi | 32 +++++++++++++++++++++ 2 files changed, 32 insertions(+), 2 deletions(-) diff --git a/stdlib/@tests/stubtest_allowlists/py314.txt b/stdlib/@tests/stubtest_allowlists/py314.txt index fd106a659486..516ef9a31ac3 100644 --- a/stdlib/@tests/stubtest_allowlists/py314.txt +++ b/stdlib/@tests/stubtest_allowlists/py314.txt @@ -15,8 +15,6 @@ _socket.IP_RECVTTL _socket.if_indextoname _thread.RLock.locked _thread.set_name -ast.Interpolation -ast.TemplateStr asyncio.__all__ asyncio._AbstractEventLoopPolicy asyncio._DefaultEventLoopPolicy diff --git a/stdlib/ast.pyi b/stdlib/ast.pyi index 64dc5f2af95b..f26ec4d1a08b 100644 --- a/stdlib/ast.pyi +++ b/stdlib/ast.pyi @@ -1,3 +1,4 @@ +import builtins import os import sys import typing_extensions @@ -1063,6 +1064,37 @@ class JoinedStr(expr): if sys.version_info >= (3, 14): def __replace__(self, *, values: list[expr] = ..., **kwargs: Unpack[_Attributes]) -> Self: ... +if sys.version_info >= (3, 14): + class TemplateStr(expr): + __match_args__ = ("values",) + values: list[expr] + def __init__(self, values: list[expr] = ..., **kwargs: Unpack[_Attributes]) -> None: ... + def __replace__(self, *, values: list[expr] = ..., **kwargs: Unpack[_Attributes]) -> Self: ... + + class Interpolation(expr): + __match_args__ = ("value", "str", "conversion", "format_spec") + value: expr + str: builtins.str + conversion: int + format_spec: builtins.str | None = None + def __init__( + self, + value: expr = ..., + str: builtins.str = ..., + conversion: int = ..., + format_spec: builtins.str | None = ..., + **kwargs: Unpack[_Attributes], + ) -> None: ... + def __replace__( + self, + *, + value: expr = ..., + str: builtins.str = ..., + conversion: int = ..., + format_spec: builtins.str | None = ..., + **kwargs: Unpack[_Attributes], + ) -> Self: ... + class Constant(expr): if sys.version_info >= (3, 10): __match_args__ = ("value", "kind") From 2c457a3f35266e0921f8dd4fa3d1420aa36e24c5 Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Tue, 13 May 2025 15:15:11 +0000 Subject: [PATCH 370/388] Add `dis.Instruction.make` for 3.14 (#14039) --- stdlib/@tests/stubtest_allowlists/py314.txt | 1 - stdlib/dis.pyi | 15 +++++++++++++++ 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/stdlib/@tests/stubtest_allowlists/py314.txt b/stdlib/@tests/stubtest_allowlists/py314.txt index 516ef9a31ac3..eac9334bc1dd 100644 --- a/stdlib/@tests/stubtest_allowlists/py314.txt +++ b/stdlib/@tests/stubtest_allowlists/py314.txt @@ -58,7 +58,6 @@ ctypes.wintypes.HDROP ctypes.wintypes.HFILE ctypes.wintypes.HRESULT ctypes.wintypes.HSZ -dis.Instruction.make enum.Enum.__signature__ enum.EnumMeta.__signature__ enum.EnumType.__signature__ diff --git a/stdlib/dis.pyi b/stdlib/dis.pyi index afd5e00ca40c..86b6d01e3120 100644 --- a/stdlib/dis.pyi +++ b/stdlib/dis.pyi @@ -106,6 +106,21 @@ class Instruction(_Instruction): def jump_target(self) -> int: ... @property def is_jump_target(self) -> bool: ... + if sys.version_info >= (3, 14): + @staticmethod + def make( + opname: str, + arg: int | None, + argval: Any, + argrepr: str, + offset: int, + start_offset: int, + starts_line: bool, + line_number: int | None, + label: int | None = None, + positions: Positions | None = None, + cache_info: list[tuple[str, int, Any]] | None = None, + ) -> Instruction: ... class Bytecode: codeobj: types.CodeType From 2bab3aed0eea56409fcc310a3f73cf8a18264194 Mon Sep 17 00:00:00 2001 From: Avasam Date: Tue, 13 May 2025 12:51:46 -0400 Subject: [PATCH 371/388] Support `requires_python` in `runtests.py` (#14051) --- lib/ts_utils/metadata.py | 4 ++-- tests/runtests.py | 19 +++++++++++++------ 2 files changed, 15 insertions(+), 8 deletions(-) diff --git a/lib/ts_utils/metadata.py b/lib/ts_utils/metadata.py index 2cf093ffc4a4..da1a15af0f30 100644 --- a/lib/ts_utils/metadata.py +++ b/lib/ts_utils/metadata.py @@ -47,7 +47,7 @@ def _is_nested_dict(obj: object) -> TypeGuard[dict[str, dict[str, Any]]]: @functools.cache -def _get_oldest_supported_python() -> str: +def get_oldest_supported_python() -> str: with PYPROJECT_PATH.open("rb") as config: val = tomli.load(config)["tool"]["typeshed"]["oldest_supported_python"] assert type(val) is str @@ -276,7 +276,7 @@ def read_metadata(distribution: str) -> StubMetadata: partial_stub: object = data.get("partial_stub", True) assert type(partial_stub) is bool requires_python_str: object = data.get("requires_python") - oldest_supported_python = _get_oldest_supported_python() + oldest_supported_python = get_oldest_supported_python() oldest_supported_python_specifier = Specifier(f">={oldest_supported_python}") if requires_python_str is None: requires_python = oldest_supported_python_specifier diff --git a/tests/runtests.py b/tests/runtests.py index 64346f4e58fd..efffc84df747 100755 --- a/tests/runtests.py +++ b/tests/runtests.py @@ -9,6 +9,7 @@ from importlib.util import find_spec from pathlib import Path +from ts_utils.metadata import get_oldest_supported_python, read_metadata from ts_utils.paths import TEST_CASES_DIR, test_cases_path from ts_utils.utils import colored @@ -19,9 +20,6 @@ _SUCCESS = colored("Success", "green") _SKIPPED = colored("Skipped", "yellow") _FAILED = colored("Failed", "red") -# We're using the oldest fully supported version because it's the most likely to produce errors -# due to unsupported syntax, feature, or bug in a tool. -_PYTHON_VERSION = "3.9" def _parse_jsonc(json_text: str) -> str: @@ -52,15 +50,16 @@ def main() -> None: ) parser.add_argument( "--python-version", - default=_PYTHON_VERSION, + default=None, choices=("3.9", "3.10", "3.11", "3.12", "3.13", "3.14"), - help="Target Python version for the test (default: %(default)s).", + # We're using the oldest fully supported version because it's the most likely to produce errors + # due to unsupported syntax, feature, or bug in a tool. + help="Target Python version for the test (defaults to oldest supported Python version).", ) parser.add_argument("path", help="Path of the stub to test in format /, from the root of the project.") args = parser.parse_args() path = Path(args.path) run_stubtest: bool = args.run_stubtest - python_version: str = args.python_version if len(path.parts) != 2: parser.error("'path' argument should be in format /.") @@ -69,6 +68,14 @@ def main() -> None: parser.error("Only the 'stdlib' and 'stubs' folders are supported.") if not path.exists(): parser.error(f"{path=} does not exist.") + + if args.python_version: + python_version: str = args.python_version + elif folder in "stubs": + python_version = read_metadata(stub).requires_python.version + else: + python_version = get_oldest_supported_python() + stubtest_result: subprocess.CompletedProcess[bytes] | None = None pytype_result: subprocess.CompletedProcess[bytes] | None = None From 2edd6d2c7464c7837e640274a2993c98ca10c86b Mon Sep 17 00:00:00 2001 From: Azraei Yusof Date: Wed, 14 May 2025 03:00:25 +0800 Subject: [PATCH 372/388] jwcrypto: Fix export_to_pem password argument (#14037) --- stubs/jwcrypto/jwcrypto/jwk.pyi | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/stubs/jwcrypto/jwcrypto/jwk.pyi b/stubs/jwcrypto/jwcrypto/jwk.pyi index 9ec8c3866a41..721b4e892e9b 100644 --- a/stubs/jwcrypto/jwcrypto/jwk.pyi +++ b/stubs/jwcrypto/jwcrypto/jwk.pyi @@ -1,3 +1,4 @@ +from _typeshed import Unused from collections.abc import Callable, Sequence from enum import Enum from typing import Any, Literal, NamedTuple, TypeVar, overload @@ -201,7 +202,10 @@ class JWK(dict[str, Any]): ), ) -> None: ... def import_from_pem(self, data: bytes, password: bytes | None = None, kid: str | None = None) -> None: ... - def export_to_pem(self, private_key: bool = False, password: bool = False) -> bytes: ... + @overload + def export_to_pem(self, private_key: Literal[False] = False, password: Unused = False) -> bytes: ... + @overload + def export_to_pem(self, private_key: Literal[True], password: bytes | None) -> bytes: ... @classmethod def from_pyca( cls, From 561cc7f6f2a5c718551e8584d43790d22024bfd0 Mon Sep 17 00:00:00 2001 From: Akuli Date: Tue, 13 May 2025 23:50:13 +0300 Subject: [PATCH 373/388] Make `tkinter.Event` equivalent to `tkinter.Event[tkinter.Misc]` (#14053) --- stdlib/tkinter/__init__.pyi | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stdlib/tkinter/__init__.pyi b/stdlib/tkinter/__init__.pyi index e2b4eca1e62a..e23ab07f123d 100644 --- a/stdlib/tkinter/__init__.pyi +++ b/stdlib/tkinter/__init__.pyi @@ -286,7 +286,7 @@ else: _W = TypeVar("_W", bound=Misc) # Events considered covariant because you should never assign to event.widget. -_W_co = TypeVar("_W_co", covariant=True, bound=Misc) +_W_co = TypeVar("_W_co", covariant=True, bound=Misc, default=Misc) class Event(Generic[_W_co]): serial: int From 616a9b389228ae890c097c809b9f87c7ca80051c Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Wed, 14 May 2025 06:34:53 +0000 Subject: [PATCH 374/388] Bump `os` and `posix` to 3.14 (#14060) --- stdlib/@tests/stubtest_allowlists/darwin-py314.txt | 1 - stdlib/@tests/stubtest_allowlists/linux-py314.txt | 5 ----- stdlib/@tests/stubtest_allowlists/py314.txt | 2 -- stdlib/os/__init__.pyi | 11 +++++++++++ stdlib/posix.pyi | 6 ++++++ 5 files changed, 17 insertions(+), 8 deletions(-) diff --git a/stdlib/@tests/stubtest_allowlists/darwin-py314.txt b/stdlib/@tests/stubtest_allowlists/darwin-py314.txt index 31c60195d9b9..128e49ac2604 100644 --- a/stdlib/@tests/stubtest_allowlists/darwin-py314.txt +++ b/stdlib/@tests/stubtest_allowlists/darwin-py314.txt @@ -13,7 +13,6 @@ ctypes.c_float_complex._type_ ctypes.c_longdouble_complex._type_ multiprocessing.popen_fork.Popen.interrupt multiprocessing.reduction.ACKNOWLEDGE -posix.readinto # ======= diff --git a/stdlib/@tests/stubtest_allowlists/linux-py314.txt b/stdlib/@tests/stubtest_allowlists/linux-py314.txt index c6e1bc3bda5f..091e7257d0b8 100644 --- a/stdlib/@tests/stubtest_allowlists/linux-py314.txt +++ b/stdlib/@tests/stubtest_allowlists/linux-py314.txt @@ -21,11 +21,6 @@ ctypes.c_longdouble_complex._type_ errno.EHWPOISON multiprocessing.popen_fork.Popen.interrupt multiprocessing.reduction.ACKNOWLEDGE -os.SCHED_DEADLINE -os.SCHED_NORMAL -posix.SCHED_DEADLINE -posix.SCHED_NORMAL -posix.readinto select.EPOLLWAKEUP socket.CAN_RAW_ERR_FILTER socket.IPV6_RECVERR diff --git a/stdlib/@tests/stubtest_allowlists/py314.txt b/stdlib/@tests/stubtest_allowlists/py314.txt index eac9334bc1dd..5115fe7a8b57 100644 --- a/stdlib/@tests/stubtest_allowlists/py314.txt +++ b/stdlib/@tests/stubtest_allowlists/py314.txt @@ -90,8 +90,6 @@ multiprocessing.managers._BaseDictProxy.__ror__ multiprocessing.managers._BaseDictProxy.fromkeys multiprocessing.process.BaseProcess.interrupt multiprocessing.synchronize.SemLock.locked -os.__all__ -os.readinto pkgutil.__all__ pkgutil.find_loader pkgutil.get_loader diff --git a/stdlib/os/__init__.pyi b/stdlib/os/__init__.pyi index d0ef614abbce..5286c76d1b06 100644 --- a/stdlib/os/__init__.pyi +++ b/stdlib/os/__init__.pyi @@ -160,6 +160,8 @@ __all__ = [ "walk", "write", ] +if sys.version_info >= (3, 14): + __all__ += ["readinto"] if sys.platform == "darwin" and sys.version_info >= (3, 12): __all__ += ["PRIO_DARWIN_BG", "PRIO_DARWIN_NONUI", "PRIO_DARWIN_PROCESS", "PRIO_DARWIN_THREAD"] if sys.platform == "darwin" and sys.version_info >= (3, 10): @@ -208,6 +210,8 @@ if sys.platform == "linux": "removexattr", "setxattr", ] +if sys.platform == "linux" and sys.version_info >= (3, 14): + __all__ += ["SCHED_DEADLINE", "SCHED_NORMAL"] if sys.platform == "linux" and sys.version_info >= (3, 13): __all__ += [ "POSIX_SPAWN_CLOSEFROM", @@ -570,6 +574,10 @@ if sys.platform == "linux": SCHED_IDLE: int SCHED_RESET_ON_FORK: int +if sys.version_info >= (3, 14) and sys.platform == "linux": + SCHED_DEADLINE: int + SCHED_NORMAL: int + if sys.platform != "win32": RTLD_LAZY: int RTLD_NOW: int @@ -1149,6 +1157,9 @@ if sys.platform != "win32": def readv(fd: int, buffers: SupportsLenAndGetItem[WriteableBuffer], /) -> int: ... def writev(fd: int, buffers: SupportsLenAndGetItem[ReadableBuffer], /) -> int: ... +if sys.version_info >= (3, 14): + def readinto(fd: int, buffer: ReadableBuffer, /) -> int: ... + @final class terminal_size(structseq[int], tuple[int, int]): if sys.version_info >= (3, 10): diff --git a/stdlib/posix.pyi b/stdlib/posix.pyi index 88f4135af2a7..6d0d76ab8217 100644 --- a/stdlib/posix.pyi +++ b/stdlib/posix.pyi @@ -250,6 +250,12 @@ if sys.platform != "win32": timerfd_settime_ns as timerfd_settime_ns, ) + if sys.version_info >= (3, 14): + from os import readinto as readinto + + if sys.version_info >= (3, 14) and sys.platform == "linux": + from os import SCHED_DEADLINE as SCHED_DEADLINE, SCHED_NORMAL as SCHED_NORMAL + if sys.platform != "linux": from os import O_EXLOCK as O_EXLOCK, O_SHLOCK as O_SHLOCK, chflags as chflags, lchflags as lchflags, lchmod as lchmod From 62e0ad55acb481a873373f4ff953b3205eb66f90 Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Wed, 14 May 2025 10:21:46 +0000 Subject: [PATCH 375/388] Bump `urllib.request` to 3.14 (#14062) --- stdlib/@tests/stubtest_allowlists/py314.txt | 5 - stdlib/urllib/request.pyi | 194 ++++++++++---------- stubs/six/six/moves/urllib/request.pyi | 7 +- 3 files changed, 106 insertions(+), 100 deletions(-) diff --git a/stdlib/@tests/stubtest_allowlists/py314.txt b/stdlib/@tests/stubtest_allowlists/py314.txt index 5115fe7a8b57..93c3fd384361 100644 --- a/stdlib/@tests/stubtest_allowlists/py314.txt +++ b/stdlib/@tests/stubtest_allowlists/py314.txt @@ -117,11 +117,6 @@ turtle.poly turtle.save types.CodeType.co_branches types.FrameType.f_generator -urllib.request.__all__ -urllib.request.FancyURLopener -urllib.request.URLopener -urllib.request.pathname2url -urllib.request.url2pathname xml.parsers.expat.errors.XML_ERROR_NOT_STARTED xml.sax.__all__ xml.sax.InputSource diff --git a/stdlib/urllib/request.pyi b/stdlib/urllib/request.pyi index 1f453fd1e1d6..d8fc5e0d8f48 100644 --- a/stdlib/urllib/request.pyi +++ b/stdlib/urllib/request.pyi @@ -7,7 +7,7 @@ from http.client import HTTPConnection, HTTPMessage, HTTPResponse from http.cookiejar import CookieJar from re import Pattern from typing import IO, Any, ClassVar, NoReturn, Protocol, TypeVar, overload -from typing_extensions import TypeAlias +from typing_extensions import TypeAlias, deprecated from urllib.error import HTTPError as HTTPError from urllib.response import addclosehook, addinfourl @@ -43,10 +43,10 @@ __all__ = [ "getproxies", "urlretrieve", "urlcleanup", - "URLopener", - "FancyURLopener", "HTTPSHandler", ] +if sys.version_info < (3, 14): + __all__ += ["URLopener", "FancyURLopener"] _T = TypeVar("_T") _UrlopenRet: TypeAlias = Any @@ -72,11 +72,16 @@ else: def install_opener(opener: OpenerDirector) -> None: ... def build_opener(*handlers: BaseHandler | Callable[[], BaseHandler]) -> OpenerDirector: ... -if sys.platform == "win32": - from nturl2path import pathname2url as pathname2url, url2pathname as url2pathname +if sys.version_info >= (3, 14): + def url2pathname(url: str, *, require_scheme: bool = False, resolve_host: bool = False) -> str: ... + def pathname2url(pathname: str, *, add_scheme: bool = False) -> str: ... + else: - def url2pathname(pathname: str) -> str: ... - def pathname2url(pathname: str) -> str: ... + if sys.platform == "win32": + from nturl2path import pathname2url as pathname2url, url2pathname as url2pathname + else: + def url2pathname(pathname: str) -> str: ... + def pathname2url(pathname: str) -> str: ... def getproxies() -> dict[str, str]: ... def getproxies_environment() -> dict[str, str]: ... @@ -318,91 +323,94 @@ def urlretrieve( ) -> tuple[str, HTTPMessage]: ... def urlcleanup() -> None: ... -class URLopener: - version: ClassVar[str] - def __init__(self, proxies: dict[str, str] | None = None, **x509: str) -> None: ... - def open(self, fullurl: str, data: ReadableBuffer | None = None) -> _UrlopenRet: ... - def open_unknown(self, fullurl: str, data: ReadableBuffer | None = None) -> _UrlopenRet: ... - def retrieve( - self, - url: str, - filename: str | None = None, - reporthook: Callable[[int, int, int], object] | None = None, - data: ReadableBuffer | None = None, - ) -> tuple[str, Message | None]: ... - def addheader(self, *args: tuple[str, str]) -> None: ... # undocumented - def cleanup(self) -> None: ... # undocumented - def close(self) -> None: ... # undocumented - def http_error( - self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: bytes | None = None - ) -> _UrlopenRet: ... # undocumented - def http_error_default( - self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage - ) -> _UrlopenRet: ... # undocumented - def open_data(self, url: str, data: ReadableBuffer | None = None) -> addinfourl: ... # undocumented - def open_file(self, url: str) -> addinfourl: ... # undocumented - def open_ftp(self, url: str) -> addinfourl: ... # undocumented - def open_http(self, url: str, data: ReadableBuffer | None = None) -> _UrlopenRet: ... # undocumented - def open_https(self, url: str, data: ReadableBuffer | None = None) -> _UrlopenRet: ... # undocumented - def open_local_file(self, url: str) -> addinfourl: ... # undocumented - def open_unknown_proxy(self, proxy: str, fullurl: str, data: ReadableBuffer | None = None) -> None: ... # undocumented - def __del__(self) -> None: ... - -class FancyURLopener(URLopener): - def prompt_user_passwd(self, host: str, realm: str) -> tuple[str, str]: ... - def get_user_passwd(self, host: str, realm: str, clear_cache: int = 0) -> tuple[str, str]: ... # undocumented - def http_error_301( - self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: ReadableBuffer | None = None - ) -> _UrlopenRet | addinfourl | None: ... # undocumented - def http_error_302( - self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: ReadableBuffer | None = None - ) -> _UrlopenRet | addinfourl | None: ... # undocumented - def http_error_303( - self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: ReadableBuffer | None = None - ) -> _UrlopenRet | addinfourl | None: ... # undocumented - def http_error_307( - self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: ReadableBuffer | None = None - ) -> _UrlopenRet | addinfourl | None: ... # undocumented - if sys.version_info >= (3, 11): - def http_error_308( +if sys.version_info < (3, 14): + @deprecated("Deprecated since Python 3.3; Removed in 3.14; Use newer urlopen functions and methods.") + class URLopener: + version: ClassVar[str] + def __init__(self, proxies: dict[str, str] | None = None, **x509: str) -> None: ... + def open(self, fullurl: str, data: ReadableBuffer | None = None) -> _UrlopenRet: ... + def open_unknown(self, fullurl: str, data: ReadableBuffer | None = None) -> _UrlopenRet: ... + def retrieve( + self, + url: str, + filename: str | None = None, + reporthook: Callable[[int, int, int], object] | None = None, + data: ReadableBuffer | None = None, + ) -> tuple[str, Message | None]: ... + def addheader(self, *args: tuple[str, str]) -> None: ... # undocumented + def cleanup(self) -> None: ... # undocumented + def close(self) -> None: ... # undocumented + def http_error( + self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: bytes | None = None + ) -> _UrlopenRet: ... # undocumented + def http_error_default( + self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage + ) -> _UrlopenRet: ... # undocumented + def open_data(self, url: str, data: ReadableBuffer | None = None) -> addinfourl: ... # undocumented + def open_file(self, url: str) -> addinfourl: ... # undocumented + def open_ftp(self, url: str) -> addinfourl: ... # undocumented + def open_http(self, url: str, data: ReadableBuffer | None = None) -> _UrlopenRet: ... # undocumented + def open_https(self, url: str, data: ReadableBuffer | None = None) -> _UrlopenRet: ... # undocumented + def open_local_file(self, url: str) -> addinfourl: ... # undocumented + def open_unknown_proxy(self, proxy: str, fullurl: str, data: ReadableBuffer | None = None) -> None: ... # undocumented + def __del__(self) -> None: ... + + @deprecated("Deprecated since Python 3.3; Removed in 3.14; Use newer urlopen functions and methods.") + class FancyURLopener(URLopener): + def prompt_user_passwd(self, host: str, realm: str) -> tuple[str, str]: ... + def get_user_passwd(self, host: str, realm: str, clear_cache: int = 0) -> tuple[str, str]: ... # undocumented + def http_error_301( self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: ReadableBuffer | None = None ) -> _UrlopenRet | addinfourl | None: ... # undocumented - - def http_error_401( - self, - url: str, - fp: IO[bytes], - errcode: int, - errmsg: str, - headers: HTTPMessage, - data: ReadableBuffer | None = None, - retry: bool = False, - ) -> _UrlopenRet | None: ... # undocumented - def http_error_407( - self, - url: str, - fp: IO[bytes], - errcode: int, - errmsg: str, - headers: HTTPMessage, - data: ReadableBuffer | None = None, - retry: bool = False, - ) -> _UrlopenRet | None: ... # undocumented - def http_error_default( - self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage - ) -> addinfourl: ... # undocumented - def redirect_internal( - self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: ReadableBuffer | None - ) -> _UrlopenRet | None: ... # undocumented - def retry_http_basic_auth( - self, url: str, realm: str, data: ReadableBuffer | None = None - ) -> _UrlopenRet | None: ... # undocumented - def retry_https_basic_auth( - self, url: str, realm: str, data: ReadableBuffer | None = None - ) -> _UrlopenRet | None: ... # undocumented - def retry_proxy_http_basic_auth( - self, url: str, realm: str, data: ReadableBuffer | None = None - ) -> _UrlopenRet | None: ... # undocumented - def retry_proxy_https_basic_auth( - self, url: str, realm: str, data: ReadableBuffer | None = None - ) -> _UrlopenRet | None: ... # undocumented + def http_error_302( + self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: ReadableBuffer | None = None + ) -> _UrlopenRet | addinfourl | None: ... # undocumented + def http_error_303( + self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: ReadableBuffer | None = None + ) -> _UrlopenRet | addinfourl | None: ... # undocumented + def http_error_307( + self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: ReadableBuffer | None = None + ) -> _UrlopenRet | addinfourl | None: ... # undocumented + if sys.version_info >= (3, 11): + def http_error_308( + self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: ReadableBuffer | None = None + ) -> _UrlopenRet | addinfourl | None: ... # undocumented + + def http_error_401( + self, + url: str, + fp: IO[bytes], + errcode: int, + errmsg: str, + headers: HTTPMessage, + data: ReadableBuffer | None = None, + retry: bool = False, + ) -> _UrlopenRet | None: ... # undocumented + def http_error_407( + self, + url: str, + fp: IO[bytes], + errcode: int, + errmsg: str, + headers: HTTPMessage, + data: ReadableBuffer | None = None, + retry: bool = False, + ) -> _UrlopenRet | None: ... # undocumented + def http_error_default( + self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage + ) -> addinfourl: ... # undocumented + def redirect_internal( + self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: ReadableBuffer | None + ) -> _UrlopenRet | None: ... # undocumented + def retry_http_basic_auth( + self, url: str, realm: str, data: ReadableBuffer | None = None + ) -> _UrlopenRet | None: ... # undocumented + def retry_https_basic_auth( + self, url: str, realm: str, data: ReadableBuffer | None = None + ) -> _UrlopenRet | None: ... # undocumented + def retry_proxy_http_basic_auth( + self, url: str, realm: str, data: ReadableBuffer | None = None + ) -> _UrlopenRet | None: ... # undocumented + def retry_proxy_https_basic_auth( + self, url: str, realm: str, data: ReadableBuffer | None = None + ) -> _UrlopenRet | None: ... # undocumented diff --git a/stubs/six/six/moves/urllib/request.pyi b/stubs/six/six/moves/urllib/request.pyi index 9b670b4d98b0..69d402c61659 100644 --- a/stubs/six/six/moves/urllib/request.pyi +++ b/stubs/six/six/moves/urllib/request.pyi @@ -1,3 +1,5 @@ +import sys + # Stubs for six.moves.urllib.request # # Note: Commented out items means they weren't implemented at the time. @@ -8,7 +10,6 @@ from urllib.request import ( AbstractDigestAuthHandler as AbstractDigestAuthHandler, BaseHandler as BaseHandler, CacheFTPHandler as CacheFTPHandler, - FancyURLopener as FancyURLopener, FileHandler as FileHandler, FTPHandler as FTPHandler, HTTPBasicAuthHandler as HTTPBasicAuthHandler, @@ -27,7 +28,6 @@ from urllib.request import ( ProxyHandler as ProxyHandler, Request as Request, UnknownHandler as UnknownHandler, - URLopener as URLopener, build_opener as build_opener, getproxies as getproxies, install_opener as install_opener, @@ -39,3 +39,6 @@ from urllib.request import ( urlopen as urlopen, urlretrieve as urlretrieve, ) + +if sys.version_info < (3, 14): + from urllib.request import FancyURLopener as FancyURLopener, URLopener as URLopener From c8823f14e2788945a7916f3a30ebc9a60056a927 Mon Sep 17 00:00:00 2001 From: sobolevn Date: Wed, 14 May 2025 13:23:38 +0300 Subject: [PATCH 376/388] Update `_socket` and `socket` for 3.14 (#14055) --- .../stubtest_allowlists/linux-py314.txt | 14 -------- stdlib/@tests/stubtest_allowlists/py314.txt | 5 --- .../stubtest_allowlists/win32-py314.txt | 18 ---------- stdlib/_socket.pyi | 29 +++++++++++++++- stdlib/socket.pyi | 33 +++++++++++++++++++ 5 files changed, 61 insertions(+), 38 deletions(-) diff --git a/stdlib/@tests/stubtest_allowlists/linux-py314.txt b/stdlib/@tests/stubtest_allowlists/linux-py314.txt index 091e7257d0b8..8611de3f9b29 100644 --- a/stdlib/@tests/stubtest_allowlists/linux-py314.txt +++ b/stdlib/@tests/stubtest_allowlists/linux-py314.txt @@ -4,13 +4,6 @@ _curses.assume_default_colors _posixsubprocess.fork_exec -_socket.CAN_RAW_ERR_FILTER -_socket.IPV6_RECVERR -_socket.IP_FREEBIND -_socket.IP_RECVERR -_socket.IP_RECVORIGDSTADDR -_socket.SO_ORIGINAL_DST -_socket.VMADDR_CID_LOCAL asyncio.tools asyncio.unix_events.__all__ asyncio.unix_events.DefaultEventLoopPolicy @@ -22,10 +15,3 @@ errno.EHWPOISON multiprocessing.popen_fork.Popen.interrupt multiprocessing.reduction.ACKNOWLEDGE select.EPOLLWAKEUP -socket.CAN_RAW_ERR_FILTER -socket.IPV6_RECVERR -socket.IP_FREEBIND -socket.IP_RECVERR -socket.IP_RECVORIGDSTADDR -socket.SO_ORIGINAL_DST -socket.VMADDR_CID_LOCAL diff --git a/stdlib/@tests/stubtest_allowlists/py314.txt b/stdlib/@tests/stubtest_allowlists/py314.txt index 93c3fd384361..476713f109dd 100644 --- a/stdlib/@tests/stubtest_allowlists/py314.txt +++ b/stdlib/@tests/stubtest_allowlists/py314.txt @@ -11,8 +11,6 @@ _heapq.heappush_max _heapq.heappushpop_max _heapq.heapreplace_max _imp.pyc_magic_number_token -_socket.IP_RECVTTL -_socket.if_indextoname _thread.RLock.locked _thread.set_name asyncio.__all__ @@ -95,9 +93,6 @@ pkgutil.find_loader pkgutil.get_loader pyexpat.errors.XML_ERROR_NOT_STARTED shutil.__all__ -socket.__all__ -socket.IP_RECVTTL -socket.if_indextoname sre_compile.CH_NEGATE sre_constants.CH_NEGATE sre_parse.CH_NEGATE diff --git a/stdlib/@tests/stubtest_allowlists/win32-py314.txt b/stdlib/@tests/stubtest_allowlists/win32-py314.txt index cf30f5056c0d..2da8686533fb 100644 --- a/stdlib/@tests/stubtest_allowlists/win32-py314.txt +++ b/stdlib/@tests/stubtest_allowlists/win32-py314.txt @@ -2,15 +2,6 @@ # TODO: New errors in Python 3.14 that need to be fixed or moved below # ==================================================================== -_socket.IPV6_RECVERR -_socket.IP_RECVERR -_socket.SOL_RFCOMM -_socket.SO_ORIGINAL_DST -_socket.SO_BTH_ENCRYPT -_socket.SO_BTH_MTU -_socket.SO_BTH_MTU_MAX -_socket.SO_BTH_MTU_MIN -_socket.TCP_QUICKACK _winapi.COPY_FILE_DIRECTORY asyncio.WindowsProactorEventLoopPolicy.get_child_watcher asyncio.WindowsProactorEventLoopPolicy.set_child_watcher @@ -31,15 +22,6 @@ encodings.win32_code_page_search_function nt.readinto pathlib.Path.group pathlib.Path.owner -socket.IPV6_RECVERR -socket.IP_RECVERR -socket.SOL_RFCOMM -socket.SO_ORIGINAL_DST -socket.SO_BTH_ENCRYPT -socket.SO_BTH_MTU -socket.SO_BTH_MTU_MAX -socket.SO_BTH_MTU_MIN -socket.TCP_QUICKACK winsound.MB_ICONERROR winsound.MB_ICONINFORMATION winsound.MB_ICONSTOP diff --git a/stdlib/_socket.pyi b/stdlib/_socket.pyi index 5399f4edf010..06a8a2ba5fa0 100644 --- a/stdlib/_socket.pyi +++ b/stdlib/_socket.pyi @@ -229,6 +229,28 @@ if sys.platform != "win32": IP_RECVOPTS: int IP_RECVRETOPTS: int IP_RETOPTS: int +if sys.version_info >= (3, 14): + IP_RECVTTL: int + + if sys.platform == "win32" or sys.platform == "linux": + IPV6_RECVERR: int + IP_RECVERR: int + SO_ORIGINAL_DST: int + + if sys.platform == "win32": + SOL_RFCOMM: int + SO_BTH_ENCRYPT: int + SO_BTH_MTU: int + SO_BTH_MTU_MAX: int + SO_BTH_MTU_MIN: int + TCP_QUICKACK: int + + if sys.platform == "linux": + CAN_RAW_ERR_FILTER: int + IP_FREEBIND: int + IP_RECVORIGDSTADDR: int + VMADDR_CID_LOCAL: int + if sys.platform != "win32" and sys.platform != "darwin": IP_TRANSPARENT: int if sys.platform != "win32" and sys.platform != "darwin" and sys.version_info >= (3, 11): @@ -829,6 +851,11 @@ if sys.platform != "win32": def if_nameindex() -> list[tuple[int, str]]: ... def if_nametoindex(oname: str, /) -> int: ... -def if_indextoname(index: int, /) -> str: ... + +if sys.version_info >= (3, 14): + def if_indextoname(if_index: int, /) -> str: ... + +else: + def if_indextoname(index: int, /) -> str: ... CAPI: CapsuleType diff --git a/stdlib/socket.pyi b/stdlib/socket.pyi index ff89dcc72209..1ee006235ee6 100644 --- a/stdlib/socket.pyi +++ b/stdlib/socket.pyi @@ -1023,6 +1023,39 @@ if sys.platform != "linux": __all__ += ["IPPROTO_GGP", "IPPROTO_IPV4", "IPPROTO_MAX", "IPPROTO_ND", "IP_RECVDSTADDR", "SO_USELOOPBACK"] +if sys.version_info >= (3, 14): + from _socket import IP_RECVTTL as IP_RECVTTL + + __all__ += ["IP_RECVTTL"] + + if sys.platform == "win32" or sys.platform == "linux": + from _socket import IP_RECVERR as IP_RECVERR, IPV6_RECVERR as IPV6_RECVERR, SO_ORIGINAL_DST as SO_ORIGINAL_DST + + __all__ += ["IP_RECVERR", "IPV6_RECVERR", "SO_ORIGINAL_DST"] + + if sys.platform == "win32": + from _socket import ( + SO_BTH_ENCRYPT as SO_BTH_ENCRYPT, + SO_BTH_MTU as SO_BTH_MTU, + SO_BTH_MTU_MAX as SO_BTH_MTU_MAX, + SO_BTH_MTU_MIN as SO_BTH_MTU_MIN, + SOL_RFCOMM as SOL_RFCOMM, + TCP_QUICKACK as TCP_QUICKACK, + ) + + __all__ += ["SOL_RFCOMM", "SO_BTH_ENCRYPT", "SO_BTH_MTU", "SO_BTH_MTU_MAX", "SO_BTH_MTU_MIN", "TCP_QUICKACK"] + + if sys.platform == "linux": + from _socket import ( + CAN_RAW_ERR_FILTER as CAN_RAW_ERR_FILTER, + IP_FREEBIND as IP_FREEBIND, + IP_RECVORIGDSTADDR as IP_RECVORIGDSTADDR, + SO_ORIGINAL_DST as SO_ORIGINAL_DST, + VMADDR_CID_LOCAL as VMADDR_CID_LOCAL, + ) + + __all__ += ["CAN_RAW_ERR_FILTER", "IP_FREEBIND", "IP_RECVORIGDSTADDR", "VMADDR_CID_LOCAL"] + # Re-exported from errno EBADF: int EAGAIN: int From 3e51fc9cf70e0a7b15cbf233aaac8334e6c633af Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Wed, 14 May 2025 10:24:09 +0000 Subject: [PATCH 377/388] Remove deprecated get_loader and find_loader from pkgutil (#14061) --- stdlib/@tests/stubtest_allowlists/py314.txt | 3 --- stdlib/pkgutil.pyi | 14 ++++++++------ 2 files changed, 8 insertions(+), 9 deletions(-) diff --git a/stdlib/@tests/stubtest_allowlists/py314.txt b/stdlib/@tests/stubtest_allowlists/py314.txt index 476713f109dd..a63235b2da6d 100644 --- a/stdlib/@tests/stubtest_allowlists/py314.txt +++ b/stdlib/@tests/stubtest_allowlists/py314.txt @@ -88,9 +88,6 @@ multiprocessing.managers._BaseDictProxy.__ror__ multiprocessing.managers._BaseDictProxy.fromkeys multiprocessing.process.BaseProcess.interrupt multiprocessing.synchronize.SemLock.locked -pkgutil.__all__ -pkgutil.find_loader -pkgutil.get_loader pyexpat.errors.XML_ERROR_NOT_STARTED shutil.__all__ sre_compile.CH_NEGATE diff --git a/stdlib/pkgutil.pyi b/stdlib/pkgutil.pyi index d60e9bad53ae..e764d08e79f8 100644 --- a/stdlib/pkgutil.pyi +++ b/stdlib/pkgutil.pyi @@ -8,8 +8,6 @@ from typing_extensions import deprecated __all__ = [ "get_importer", "iter_importers", - "get_loader", - "find_loader", "walk_packages", "iter_modules", "get_data", @@ -17,6 +15,8 @@ __all__ = [ "extend_path", "ModuleInfo", ] +if sys.version_info < (3, 14): + __all__ += ["get_loader", "find_loader"] if sys.version_info < (3, 12): __all__ += ["ImpImporter", "ImpLoader"] @@ -36,11 +36,13 @@ if sys.version_info < (3, 12): class ImpLoader: def __init__(self, fullname: str, file: IO[str], filename: StrOrBytesPath, etc: tuple[str, str, int]) -> None: ... -@deprecated("Use importlib.util.find_spec() instead. Will be removed in Python 3.14.") -def find_loader(fullname: str) -> LoaderProtocol | None: ... +if sys.version_info < (3, 14): + @deprecated("Use importlib.util.find_spec() instead. Will be removed in Python 3.14.") + def find_loader(fullname: str) -> LoaderProtocol | None: ... + @deprecated("Use importlib.util.find_spec() instead. Will be removed in Python 3.14.") + def get_loader(module_or_name: str) -> LoaderProtocol | None: ... + def get_importer(path_item: StrOrBytesPath) -> PathEntryFinderProtocol | None: ... -@deprecated("Use importlib.util.find_spec() instead. Will be removed in Python 3.14.") -def get_loader(module_or_name: str) -> LoaderProtocol | None: ... def iter_importers(fullname: str = "") -> Iterator[MetaPathFinderProtocol | PathEntryFinderProtocol]: ... def iter_modules(path: Iterable[StrOrBytesPath] | None = None, prefix: str = "") -> Iterator[ModuleInfo]: ... def read_code(stream: SupportsRead[bytes]) -> Any: ... # undocumented From 99a5843afffb4c5be9d19db38b3ec656477c28cf Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Wed, 14 May 2025 10:25:22 +0000 Subject: [PATCH 378/388] Update `marshal.dump(s)` for 3.14 (#14059) --- stdlib/@tests/stubtest_allowlists/py314.txt | 2 -- stdlib/marshal.pyi | 20 ++++++++++++++------ 2 files changed, 14 insertions(+), 8 deletions(-) diff --git a/stdlib/@tests/stubtest_allowlists/py314.txt b/stdlib/@tests/stubtest_allowlists/py314.txt index a63235b2da6d..fd7a51734eb3 100644 --- a/stdlib/@tests/stubtest_allowlists/py314.txt +++ b/stdlib/@tests/stubtest_allowlists/py314.txt @@ -74,8 +74,6 @@ importlib.machinery.AppleFrameworkLoader importlib.util.__all__ importlib.util.Loader logging.handlers.SysLogHandler.__init__ -marshal.dump -marshal.dumps multiprocessing.forkserver.main multiprocessing.managers.BaseListProxy.clear multiprocessing.managers.BaseListProxy.copy diff --git a/stdlib/marshal.pyi b/stdlib/marshal.pyi index 6ab202637dda..46c421e4ce30 100644 --- a/stdlib/marshal.pyi +++ b/stdlib/marshal.pyi @@ -2,10 +2,10 @@ import builtins import sys import types from _typeshed import ReadableBuffer, SupportsRead, SupportsWrite -from typing import Any +from typing import Any, Final from typing_extensions import TypeAlias -version: int +version: Final[int] _Marshallable: TypeAlias = ( # handled in w_object() in marshal.c @@ -28,14 +28,22 @@ _Marshallable: TypeAlias = ( | ReadableBuffer ) -if sys.version_info >= (3, 13): +if sys.version_info >= (3, 14): + def dump(value: _Marshallable, file: SupportsWrite[bytes], version: int = 5, /, *, allow_code: bool = True) -> None: ... + def dumps(value: _Marshallable, version: int = 5, /, *, allow_code: bool = True) -> bytes: ... + +elif sys.version_info >= (3, 13): def dump(value: _Marshallable, file: SupportsWrite[bytes], version: int = 4, /, *, allow_code: bool = True) -> None: ... - def load(file: SupportsRead[bytes], /, *, allow_code: bool = True) -> Any: ... def dumps(value: _Marshallable, version: int = 4, /, *, allow_code: bool = True) -> bytes: ... - def loads(bytes: ReadableBuffer, /, *, allow_code: bool = True) -> Any: ... else: def dump(value: _Marshallable, file: SupportsWrite[bytes], version: int = 4, /) -> None: ... - def load(file: SupportsRead[bytes], /) -> Any: ... def dumps(value: _Marshallable, version: int = 4, /) -> bytes: ... + +if sys.version_info >= (3, 13): + def load(file: SupportsRead[bytes], /, *, allow_code: bool = True) -> Any: ... + def loads(bytes: ReadableBuffer, /, *, allow_code: bool = True) -> Any: ... + +else: + def load(file: SupportsRead[bytes], /) -> Any: ... def loads(bytes: ReadableBuffer, /) -> Any: ... From 946249293bd3a979713ad7353c05d025c7c5e378 Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Wed, 14 May 2025 11:16:44 +0000 Subject: [PATCH 379/388] Update `logging.handlers.SysLogHandler` for 3.14 (#14058) * Update `logging.handlers.SysLogHandler` for 3.14 * Fix method name --------- Co-authored-by: Sebastian Rittau --- stdlib/@tests/stubtest_allowlists/py314.txt | 1 - stdlib/logging/handlers.pyi | 16 +++++++++++++--- 2 files changed, 13 insertions(+), 4 deletions(-) diff --git a/stdlib/@tests/stubtest_allowlists/py314.txt b/stdlib/@tests/stubtest_allowlists/py314.txt index fd7a51734eb3..922c0189915f 100644 --- a/stdlib/@tests/stubtest_allowlists/py314.txt +++ b/stdlib/@tests/stubtest_allowlists/py314.txt @@ -73,7 +73,6 @@ importlib.machinery.__all__ importlib.machinery.AppleFrameworkLoader importlib.util.__all__ importlib.util.Loader -logging.handlers.SysLogHandler.__init__ multiprocessing.forkserver.main multiprocessing.managers.BaseListProxy.clear multiprocessing.managers.BaseListProxy.copy diff --git a/stdlib/logging/handlers.pyi b/stdlib/logging/handlers.pyi index b58999e9d995..9636b81dc4f3 100644 --- a/stdlib/logging/handlers.pyi +++ b/stdlib/logging/handlers.pyi @@ -144,9 +144,19 @@ class SysLogHandler(Handler): priority_names: ClassVar[dict[str, int]] # undocumented facility_names: ClassVar[dict[str, int]] # undocumented priority_map: ClassVar[dict[str, str]] # undocumented - def __init__( - self, address: tuple[str, int] | str = ("localhost", 514), facility: str | int = 1, socktype: SocketKind | None = None - ) -> None: ... + if sys.version_info >= (3, 14): + timeout: float | None + def __init__( + self, + address: tuple[str, int] | str = ("localhost", 514), + facility: str | int = 1, + socktype: SocketKind | None = None, + timeout: float | None = None, + ) -> None: ... + else: + def __init__( + self, address: tuple[str, int] | str = ("localhost", 514), facility: str | int = 1, socktype: SocketKind | None = None + ) -> None: ... if sys.version_info >= (3, 11): def createSocket(self) -> None: ... From d20db118c177a0b4c36c2f51fbb5ba3c69df3de7 Mon Sep 17 00:00:00 2001 From: Avasam Date: Wed, 14 May 2025 08:10:29 -0400 Subject: [PATCH 380/388] networkx: type `is_*` and `has_*` functions (#14054) --- .../networkx/algorithms/asteroidal.pyi | 2 +- .../networkx/algorithms/bipartite/basic.pyi | 4 ++-- .../algorithms/coloring/equitable_coloring.pyi | 8 ++++++++ .../algorithms/community/community_utils.pyi | 4 +++- .../algorithms/components/attracting.pyi | 5 ++++- .../algorithms/components/biconnected.pyi | 2 +- .../algorithms/components/connected.pyi | 2 +- .../algorithms/components/semiconnected.pyi | 2 +- .../connectivity/edge_augmentation.pyi | 4 ++-- .../networkx/networkx/algorithms/covering.pyi | 4 ++-- .../networkx/algorithms/d_separation.pyi | 12 ++++++++++-- .../networkx/algorithms/distance_regular.pyi | 4 ++-- .../networkx/algorithms/dominating.pyi | 3 +-- stubs/networkx/networkx/algorithms/euler.pyi | 6 +++--- .../networkx/algorithms/flow/utils.pyi | 2 +- .../networkx/networkx/algorithms/graphical.pyi | 13 +++++++------ stubs/networkx/networkx/algorithms/hybrid.pyi | 2 +- stubs/networkx/networkx/algorithms/isolate.pyi | 2 +- .../networkx/algorithms/isomorphism/ismags.pyi | 2 +- .../algorithms/isomorphism/isomorphvf2.pyi | 2 +- .../networkx/networkx/algorithms/matching.pyi | 10 +++++++--- stubs/networkx/networkx/algorithms/regular.pyi | 4 ++-- .../networkx/algorithms/simple_paths.pyi | 4 ++-- .../networkx/networkx/algorithms/threshold.pyi | 5 ++++- .../networkx/algorithms/tournament.pyi | 6 +++--- .../networkx/algorithms/tree/recognition.pyi | 8 ++++---- stubs/networkx/networkx/algorithms/triads.pyi | 2 +- stubs/networkx/networkx/classes/function.pyi | 6 +++--- stubs/networkx/networkx/classes/multigraph.pyi | 2 +- .../networkx/networkx/generators/expanders.pyi | 3 ++- .../networkx/generators/joint_degree_seq.pyi | 18 ++++++++++++++---- 31 files changed, 96 insertions(+), 57 deletions(-) diff --git a/stubs/networkx/networkx/algorithms/asteroidal.pyi b/stubs/networkx/networkx/algorithms/asteroidal.pyi index eaa859a3bc22..8e3f726b7839 100644 --- a/stubs/networkx/networkx/algorithms/asteroidal.pyi +++ b/stubs/networkx/networkx/algorithms/asteroidal.pyi @@ -6,4 +6,4 @@ __all__ = ["is_at_free", "find_asteroidal_triple"] @_dispatchable def find_asteroidal_triple(G: Graph[_Node]): ... @_dispatchable -def is_at_free(G: Graph[_Node]): ... +def is_at_free(G: Graph[_Node]) -> bool: ... diff --git a/stubs/networkx/networkx/algorithms/bipartite/basic.pyi b/stubs/networkx/networkx/algorithms/bipartite/basic.pyi index 280d44b12d91..fcf13edb9dac 100644 --- a/stubs/networkx/networkx/algorithms/bipartite/basic.pyi +++ b/stubs/networkx/networkx/algorithms/bipartite/basic.pyi @@ -9,9 +9,9 @@ __all__ = ["is_bipartite", "is_bipartite_node_set", "color", "sets", "density", @_dispatchable def color(G: Graph[_Node]): ... @_dispatchable -def is_bipartite(G: Graph[_Node]): ... +def is_bipartite(G: Graph[_Node]) -> bool: ... @_dispatchable -def is_bipartite_node_set(G: Graph[_Node], nodes): ... +def is_bipartite_node_set(G: Graph[_Node], nodes: Iterable[Incomplete]) -> bool: ... @_dispatchable def sets(G: Graph[_Node], top_nodes: Iterable[Incomplete] | None = None): ... @_dispatchable diff --git a/stubs/networkx/networkx/algorithms/coloring/equitable_coloring.pyi b/stubs/networkx/networkx/algorithms/coloring/equitable_coloring.pyi index 1e768dd502b5..57557b5987de 100644 --- a/stubs/networkx/networkx/algorithms/coloring/equitable_coloring.pyi +++ b/stubs/networkx/networkx/algorithms/coloring/equitable_coloring.pyi @@ -1,7 +1,15 @@ +from _typeshed import Incomplete, SupportsGetItem +from collections.abc import Mapping +from typing import SupportsIndex + from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable __all__ = ["equitable_color"] +@_dispatchable +def is_coloring(G: Graph[_Node], coloring: SupportsGetItem[Incomplete, Incomplete]) -> bool: ... +@_dispatchable +def is_equitable(G: Graph[_Node], coloring: Mapping[Incomplete, Incomplete], num_colors: SupportsIndex | None = None) -> bool: ... @_dispatchable def equitable_color(G: Graph[_Node], num_colors): ... diff --git a/stubs/networkx/networkx/algorithms/community/community_utils.pyi b/stubs/networkx/networkx/algorithms/community/community_utils.pyi index 032161edf31c..b51294b995dd 100644 --- a/stubs/networkx/networkx/algorithms/community/community_utils.pyi +++ b/stubs/networkx/networkx/algorithms/community/community_utils.pyi @@ -1,7 +1,9 @@ +from collections.abc import Container, Iterable + from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable __all__ = ["is_partition"] @_dispatchable -def is_partition(G: Graph[_Node], communities): ... +def is_partition(G: Graph[_Node], communities: Iterable[Container[_Node]]) -> bool: ... diff --git a/stubs/networkx/networkx/algorithms/components/attracting.pyi b/stubs/networkx/networkx/algorithms/components/attracting.pyi index 88649cf94a16..655b7d32f7b3 100644 --- a/stubs/networkx/networkx/algorithms/components/attracting.pyi +++ b/stubs/networkx/networkx/algorithms/components/attracting.pyi @@ -1,6 +1,9 @@ from _typeshed import Incomplete from collections.abc import Generator +from networkx.classes.digraph import DiGraph +from networkx.classes.graph import _Node +from networkx.classes.multidigraph import MultiDiGraph from networkx.utils.backends import _dispatchable __all__ = ["number_attracting_components", "attracting_components", "is_attracting_component"] @@ -10,4 +13,4 @@ def attracting_components(G) -> Generator[Incomplete, None, None]: ... @_dispatchable def number_attracting_components(G): ... @_dispatchable -def is_attracting_component(G): ... +def is_attracting_component(G: DiGraph[_Node] | MultiDiGraph[_Node]) -> bool: ... diff --git a/stubs/networkx/networkx/algorithms/components/biconnected.pyi b/stubs/networkx/networkx/algorithms/components/biconnected.pyi index b7056599a5a7..80d10295496d 100644 --- a/stubs/networkx/networkx/algorithms/components/biconnected.pyi +++ b/stubs/networkx/networkx/algorithms/components/biconnected.pyi @@ -7,7 +7,7 @@ from networkx.utils.backends import _dispatchable __all__ = ["biconnected_components", "biconnected_component_edges", "is_biconnected", "articulation_points"] @_dispatchable -def is_biconnected(G: Graph[_Node]): ... +def is_biconnected(G: Graph[_Node]) -> bool: ... @_dispatchable def biconnected_component_edges(G: Graph[_Node]) -> Generator[Incomplete, Incomplete, None]: ... @_dispatchable diff --git a/stubs/networkx/networkx/algorithms/components/connected.pyi b/stubs/networkx/networkx/algorithms/components/connected.pyi index c13a09e93da7..09256c0b6256 100644 --- a/stubs/networkx/networkx/algorithms/components/connected.pyi +++ b/stubs/networkx/networkx/algorithms/components/connected.pyi @@ -11,6 +11,6 @@ def connected_components(G: Graph[_Node]) -> Generator[Incomplete, None, None]: @_dispatchable def number_connected_components(G: Graph[_Node]): ... @_dispatchable -def is_connected(G: Graph[_Node]): ... +def is_connected(G: Graph[_Node]) -> bool: ... @_dispatchable def node_connected_component(G: Graph[_Node], n: _Node): ... diff --git a/stubs/networkx/networkx/algorithms/components/semiconnected.pyi b/stubs/networkx/networkx/algorithms/components/semiconnected.pyi index 9a02ac4e7156..17fc43bac7cf 100644 --- a/stubs/networkx/networkx/algorithms/components/semiconnected.pyi +++ b/stubs/networkx/networkx/algorithms/components/semiconnected.pyi @@ -4,4 +4,4 @@ from networkx.utils.backends import _dispatchable __all__ = ["is_semiconnected"] @_dispatchable -def is_semiconnected(G: Graph[_Node]): ... +def is_semiconnected(G: Graph[_Node]) -> bool: ... diff --git a/stubs/networkx/networkx/algorithms/connectivity/edge_augmentation.pyi b/stubs/networkx/networkx/algorithms/connectivity/edge_augmentation.pyi index 31ef0154fd5e..7be4e048f1ba 100644 --- a/stubs/networkx/networkx/algorithms/connectivity/edge_augmentation.pyi +++ b/stubs/networkx/networkx/algorithms/connectivity/edge_augmentation.pyi @@ -7,9 +7,9 @@ from networkx.utils.backends import _dispatchable __all__ = ["k_edge_augmentation", "is_k_edge_connected", "is_locally_k_edge_connected"] @_dispatchable -def is_k_edge_connected(G: Graph[_Node], k: int): ... +def is_k_edge_connected(G: Graph[_Node], k: int) -> bool: ... @_dispatchable -def is_locally_k_edge_connected(G: Graph[_Node], s: _Node, t: _Node, k: int): ... +def is_locally_k_edge_connected(G: Graph[_Node], s: _Node, t: _Node, k: int) -> bool: ... @_dispatchable def k_edge_augmentation( G: Graph[_Node], diff --git a/stubs/networkx/networkx/algorithms/covering.pyi b/stubs/networkx/networkx/algorithms/covering.pyi index 3dd7daadc967..2bbc194af9d7 100644 --- a/stubs/networkx/networkx/algorithms/covering.pyi +++ b/stubs/networkx/networkx/algorithms/covering.pyi @@ -1,5 +1,5 @@ from _typeshed import Incomplete -from collections.abc import Callable +from collections.abc import Callable, Iterable from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @@ -9,4 +9,4 @@ __all__ = ["min_edge_cover", "is_edge_cover"] @_dispatchable def min_edge_cover(G: Graph[_Node], matching_algorithm: Callable[..., Incomplete] | None = None): ... @_dispatchable -def is_edge_cover(G: Graph[_Node], cover: set[Incomplete]): ... +def is_edge_cover(G: Graph[_Node], cover: Iterable[Iterable[Incomplete]]) -> bool: ... diff --git a/stubs/networkx/networkx/algorithms/d_separation.pyi b/stubs/networkx/networkx/algorithms/d_separation.pyi index ad2bf15dd428..b3de331d8c77 100644 --- a/stubs/networkx/networkx/algorithms/d_separation.pyi +++ b/stubs/networkx/networkx/algorithms/d_separation.pyi @@ -7,7 +7,7 @@ from networkx.utils.backends import _dispatchable __all__ = ["is_d_separator", "is_minimal_d_separator", "find_minimal_d_separator", "d_separated", "minimal_d_separator"] @_dispatchable -def is_d_separator(G, x, y, z) -> bool: ... +def is_d_separator(G: DiGraph[_Node], x: _Node | set[_Node], y: _Node | set[_Node], z: _Node | set[_Node]) -> bool: ... @_dispatchable def find_minimal_d_separator(G, x, y, *, included=None, restricted=None) -> set[Incomplete] | None: ... @_dispatchable @@ -15,4 +15,12 @@ def d_separated(G, x, y, z): ... @_dispatchable def minimal_d_separator(G, u, v): ... @_dispatchable -def is_minimal_d_separator(G: DiGraph[_Node], x, y, z, *, included=None, restricted=None): ... +def is_minimal_d_separator( + G: DiGraph[_Node], + x: _Node | set[_Node], + y: _Node | set[_Node], + z: _Node | set[_Node], + *, + included: _Node | set[_Node] | None = None, + restricted: _Node | set[_Node] | None = None, +) -> bool: ... diff --git a/stubs/networkx/networkx/algorithms/distance_regular.pyi b/stubs/networkx/networkx/algorithms/distance_regular.pyi index 616db02e900e..4fd942c5f6d9 100644 --- a/stubs/networkx/networkx/algorithms/distance_regular.pyi +++ b/stubs/networkx/networkx/algorithms/distance_regular.pyi @@ -4,10 +4,10 @@ from networkx.utils.backends import _dispatchable __all__ = ["is_distance_regular", "is_strongly_regular", "intersection_array", "global_parameters"] @_dispatchable -def is_distance_regular(G: Graph[_Node]): ... +def is_distance_regular(G: Graph[_Node]) -> bool: ... @_dispatchable def global_parameters(b, c): ... @_dispatchable def intersection_array(G: Graph[_Node]): ... @_dispatchable -def is_strongly_regular(G: Graph[_Node]): ... +def is_strongly_regular(G: Graph[_Node]) -> bool: ... diff --git a/stubs/networkx/networkx/algorithms/dominating.pyi b/stubs/networkx/networkx/algorithms/dominating.pyi index a1058271ad9b..4b3983e58a54 100644 --- a/stubs/networkx/networkx/algorithms/dominating.pyi +++ b/stubs/networkx/networkx/algorithms/dominating.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from collections.abc import Iterable from networkx.classes.graph import Graph, _Node @@ -9,4 +8,4 @@ __all__ = ["dominating_set", "is_dominating_set"] @_dispatchable def dominating_set(G: Graph[_Node], start_with: _Node | None = None): ... @_dispatchable -def is_dominating_set(G: Graph[_Node], nbunch: Iterable[Incomplete]): ... +def is_dominating_set(G: Graph[_Node], nbunch: Iterable[_Node]) -> bool: ... diff --git a/stubs/networkx/networkx/algorithms/euler.pyi b/stubs/networkx/networkx/algorithms/euler.pyi index 80f0d6e024e2..278aaec7b9fa 100644 --- a/stubs/networkx/networkx/algorithms/euler.pyi +++ b/stubs/networkx/networkx/algorithms/euler.pyi @@ -7,15 +7,15 @@ from networkx.utils.backends import _dispatchable __all__ = ["is_eulerian", "eulerian_circuit", "eulerize", "is_semieulerian", "has_eulerian_path", "eulerian_path"] @_dispatchable -def is_eulerian(G: Graph[_Node]): ... +def is_eulerian(G: Graph[_Node]) -> bool: ... @_dispatchable -def is_semieulerian(G): ... +def is_semieulerian(G: Graph[_Node]) -> bool: ... @_dispatchable def eulerian_circuit( G: Graph[_Node], source: _Node | None = None, keys: bool = False ) -> Generator[Incomplete, Incomplete, None]: ... @_dispatchable -def has_eulerian_path(G: Graph[_Node], source: _Node | None = None): ... +def has_eulerian_path(G: Graph[_Node], source: _Node | None = None) -> bool: ... @_dispatchable def eulerian_path(G: Graph[_Node], source=None, keys: bool = False) -> Generator[Incomplete, Incomplete, None]: ... @_dispatchable diff --git a/stubs/networkx/networkx/algorithms/flow/utils.pyi b/stubs/networkx/networkx/algorithms/flow/utils.pyi index 0ae3a6bba8cc..cd5bdaf4bfa2 100644 --- a/stubs/networkx/networkx/algorithms/flow/utils.pyi +++ b/stubs/networkx/networkx/algorithms/flow/utils.pyi @@ -18,7 +18,7 @@ class Level: class GlobalRelabelThreshold: def __init__(self, n, m, freq) -> None: ... def add_work(self, work) -> None: ... - def is_reached(self): ... + def is_reached(self) -> bool: ... def clear_work(self) -> None: ... @_dispatchable diff --git a/stubs/networkx/networkx/algorithms/graphical.pyi b/stubs/networkx/networkx/algorithms/graphical.pyi index 54936da7634a..95054c768cbc 100644 --- a/stubs/networkx/networkx/algorithms/graphical.pyi +++ b/stubs/networkx/networkx/algorithms/graphical.pyi @@ -1,5 +1,6 @@ from _typeshed import Incomplete from collections.abc import Iterable +from typing import Literal from networkx.utils.backends import _dispatchable @@ -13,14 +14,14 @@ __all__ = [ ] @_dispatchable -def is_graphical(sequence: Iterable[Incomplete], method="eg"): ... +def is_graphical(sequence: Iterable[Incomplete], method: Literal["eg", "hh"] = "eg") -> bool: ... @_dispatchable -def is_valid_degree_sequence_havel_hakimi(deg_sequence: Iterable[Incomplete]): ... +def is_valid_degree_sequence_havel_hakimi(deg_sequence: Iterable[Incomplete]) -> bool: ... @_dispatchable -def is_valid_degree_sequence_erdos_gallai(deg_sequence: Iterable[Incomplete]): ... +def is_valid_degree_sequence_erdos_gallai(deg_sequence: Iterable[Incomplete]) -> bool: ... @_dispatchable -def is_multigraphical(sequence: Iterable[Incomplete]): ... +def is_multigraphical(sequence: Iterable[Incomplete]) -> bool: ... @_dispatchable -def is_pseudographical(sequence: Iterable[Incomplete]): ... +def is_pseudographical(sequence: Iterable[Incomplete]) -> bool: ... @_dispatchable -def is_digraphical(in_sequence: Iterable[Incomplete], out_sequence: Iterable[Incomplete]): ... +def is_digraphical(in_sequence: Iterable[Incomplete], out_sequence: Iterable[Incomplete]) -> bool: ... diff --git a/stubs/networkx/networkx/algorithms/hybrid.pyi b/stubs/networkx/networkx/algorithms/hybrid.pyi index 5e1326905d26..84b18c083a60 100644 --- a/stubs/networkx/networkx/algorithms/hybrid.pyi +++ b/stubs/networkx/networkx/algorithms/hybrid.pyi @@ -6,4 +6,4 @@ __all__ = ["kl_connected_subgraph", "is_kl_connected"] @_dispatchable def kl_connected_subgraph(G: Graph[_Node], k: int, l: int, low_memory: bool = False, same_as_graph: bool = False): ... @_dispatchable -def is_kl_connected(G: Graph[_Node], k: int, l: int, low_memory: bool = False): ... +def is_kl_connected(G: Graph[_Node], k: int, l: int, low_memory: bool = False) -> bool: ... diff --git a/stubs/networkx/networkx/algorithms/isolate.pyi b/stubs/networkx/networkx/algorithms/isolate.pyi index 4a5715d228f4..afcd43f5f879 100644 --- a/stubs/networkx/networkx/algorithms/isolate.pyi +++ b/stubs/networkx/networkx/algorithms/isolate.pyi @@ -4,7 +4,7 @@ from networkx.utils.backends import _dispatchable __all__ = ["is_isolate", "isolates", "number_of_isolates"] @_dispatchable -def is_isolate(G: Graph[_Node], n: _Node): ... +def is_isolate(G: Graph[_Node], n: _Node) -> bool: ... @_dispatchable def isolates(G: Graph[_Node]): ... @_dispatchable diff --git a/stubs/networkx/networkx/algorithms/isomorphism/ismags.pyi b/stubs/networkx/networkx/algorithms/isomorphism/ismags.pyi index 1a3866cdc974..a16ea3e8ba11 100644 --- a/stubs/networkx/networkx/algorithms/isomorphism/ismags.pyi +++ b/stubs/networkx/networkx/algorithms/isomorphism/ismags.pyi @@ -13,7 +13,7 @@ class ISMAGS: def find_isomorphisms(self, symmetry: bool = True) -> Generator[Incomplete, Incomplete, Incomplete]: ... def largest_common_subgraph(self, symmetry: bool = True) -> Generator[Incomplete, Incomplete, None]: ... def analyze_symmetry(self, graph, node_partitions, edge_colors): ... - def is_isomorphic(self, symmetry: bool = False): ... + def is_isomorphic(self, symmetry: bool = False) -> bool: ... def subgraph_is_isomorphic(self, symmetry: bool = False): ... def isomorphisms_iter(self, symmetry: bool = True) -> Generator[Incomplete, Incomplete, None]: ... def subgraph_isomorphisms_iter(self, symmetry: bool = True): ... diff --git a/stubs/networkx/networkx/algorithms/isomorphism/isomorphvf2.pyi b/stubs/networkx/networkx/algorithms/isomorphism/isomorphvf2.pyi index 59aa12d827f2..47097ed0c177 100644 --- a/stubs/networkx/networkx/algorithms/isomorphism/isomorphvf2.pyi +++ b/stubs/networkx/networkx/algorithms/isomorphism/isomorphvf2.pyi @@ -23,7 +23,7 @@ class GraphMatcher: mapping: Incomplete def initialize(self) -> None: ... - def is_isomorphic(self): ... + def is_isomorphic(self) -> bool: ... def isomorphisms_iter(self) -> Generator[Incomplete, Incomplete, None]: ... def match(self) -> Generator[Incomplete, Incomplete, None]: ... def semantic_feasibility(self, G1_node, G2_node): ... diff --git a/stubs/networkx/networkx/algorithms/matching.pyi b/stubs/networkx/networkx/algorithms/matching.pyi index 4753abee8876..ee66595d0542 100644 --- a/stubs/networkx/networkx/algorithms/matching.pyi +++ b/stubs/networkx/networkx/algorithms/matching.pyi @@ -1,3 +1,6 @@ +from _typeshed import Incomplete +from collections.abc import Iterable, Mapping + from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @@ -12,12 +15,13 @@ __all__ = [ @_dispatchable def maximal_matching(G: Graph[_Node]): ... +def matching_dict_to_set(matching: Mapping[Incomplete, Incomplete]) -> set[Incomplete]: ... @_dispatchable -def is_matching(G: Graph[_Node], matching): ... +def is_matching(G: Graph[_Node], matching: dict[Incomplete, Incomplete] | Iterable[Iterable[Incomplete]]) -> bool: ... @_dispatchable -def is_maximal_matching(G: Graph[_Node], matching): ... +def is_maximal_matching(G: Graph[_Node], matching: dict[Incomplete, Incomplete] | Iterable[Iterable[Incomplete]]) -> bool: ... @_dispatchable -def is_perfect_matching(G: Graph[_Node], matching): ... +def is_perfect_matching(G: Graph[_Node], matching: dict[Incomplete, Incomplete] | Iterable[Iterable[Incomplete]]) -> bool: ... @_dispatchable def min_weight_matching(G: Graph[_Node], weight: str | None = "weight"): ... @_dispatchable diff --git a/stubs/networkx/networkx/algorithms/regular.pyi b/stubs/networkx/networkx/algorithms/regular.pyi index 04d966f9416f..7505f6d5a521 100644 --- a/stubs/networkx/networkx/algorithms/regular.pyi +++ b/stubs/networkx/networkx/algorithms/regular.pyi @@ -4,8 +4,8 @@ from networkx.utils.backends import _dispatchable __all__ = ["is_regular", "is_k_regular", "k_factor"] @_dispatchable -def is_regular(G: Graph[_Node]): ... +def is_regular(G: Graph[_Node]) -> bool: ... @_dispatchable -def is_k_regular(G: Graph[_Node], k): ... +def is_k_regular(G: Graph[_Node], k) -> bool: ... @_dispatchable def k_factor(G: Graph[_Node], k, matching_weight: str | None = "weight"): ... diff --git a/stubs/networkx/networkx/algorithms/simple_paths.pyi b/stubs/networkx/networkx/algorithms/simple_paths.pyi index a33ebdbb3bdc..2aa418a8a859 100644 --- a/stubs/networkx/networkx/algorithms/simple_paths.pyi +++ b/stubs/networkx/networkx/algorithms/simple_paths.pyi @@ -1,5 +1,5 @@ from _typeshed import Incomplete, SupportsGetItem -from collections.abc import Callable, Generator, Iterable +from collections.abc import Callable, Collection, Generator from typing import Any from networkx.classes.graph import Graph, _Node @@ -8,7 +8,7 @@ from networkx.utils.backends import _dispatchable __all__ = ["all_simple_paths", "is_simple_path", "shortest_simple_paths", "all_simple_edge_paths"] @_dispatchable -def is_simple_path(G: Graph[_Node], nodes: Iterable[Incomplete]): ... +def is_simple_path(G: Graph[_Node], nodes: Collection[Incomplete]) -> bool: ... @_dispatchable def all_simple_paths(G: Graph[_Node], source: _Node, target, cutoff: int | None = None) -> Generator[list[_Node], None, None]: ... @_dispatchable diff --git a/stubs/networkx/networkx/algorithms/threshold.pyi b/stubs/networkx/networkx/algorithms/threshold.pyi index d92695ec45d9..83b4357827b7 100644 --- a/stubs/networkx/networkx/algorithms/threshold.pyi +++ b/stubs/networkx/networkx/algorithms/threshold.pyi @@ -1,9 +1,12 @@ +from collections.abc import Sequence + from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable __all__ = ["is_threshold_graph", "find_threshold_graph"] @_dispatchable -def is_threshold_graph(G: Graph[_Node]): ... +def is_threshold_graph(G: Graph[_Node]) -> bool: ... +def is_threshold_sequence(degree_sequence: Sequence[list[int]]) -> bool: ... @_dispatchable def find_threshold_graph(G: Graph[_Node], create_using: Graph[_Node] | None = None): ... diff --git a/stubs/networkx/networkx/algorithms/tournament.pyi b/stubs/networkx/networkx/algorithms/tournament.pyi index f80e6e5b9834..f2de11bbc2af 100644 --- a/stubs/networkx/networkx/algorithms/tournament.pyi +++ b/stubs/networkx/networkx/algorithms/tournament.pyi @@ -5,7 +5,7 @@ from numpy.random import RandomState __all__ = ["hamiltonian_path", "is_reachable", "is_strongly_connected", "is_tournament", "random_tournament", "score_sequence"] @_dispatchable -def is_tournament(G: Graph[_Node]): ... +def is_tournament(G: Graph[_Node]) -> bool: ... @_dispatchable def hamiltonian_path(G: Graph[_Node]): ... @_dispatchable @@ -13,6 +13,6 @@ def random_tournament(n: int, seed: int | RandomState | None = None): ... @_dispatchable def score_sequence(G: Graph[_Node]): ... @_dispatchable -def is_reachable(G: Graph[_Node], s: _Node, t: _Node): ... +def is_reachable(G: Graph[_Node], s: _Node, t: _Node) -> bool: ... @_dispatchable -def is_strongly_connected(G: Graph[_Node]): ... +def is_strongly_connected(G: Graph[_Node]) -> bool: ... diff --git a/stubs/networkx/networkx/algorithms/tree/recognition.pyi b/stubs/networkx/networkx/algorithms/tree/recognition.pyi index efacca7c55ba..14b1b4ae6147 100644 --- a/stubs/networkx/networkx/algorithms/tree/recognition.pyi +++ b/stubs/networkx/networkx/algorithms/tree/recognition.pyi @@ -5,10 +5,10 @@ from networkx.utils.backends import _dispatchable __all__ = ["is_arborescence", "is_branching", "is_forest", "is_tree"] @_dispatchable -def is_arborescence(G: Graph[_Node]): ... +def is_arborescence(G: Graph[_Node]) -> bool: ... @_dispatchable -def is_branching(G: DiGraph[_Node]): ... +def is_branching(G: DiGraph[_Node]) -> bool: ... @_dispatchable -def is_forest(G: Graph[_Node]): ... +def is_forest(G: Graph[_Node]) -> bool: ... @_dispatchable -def is_tree(G: Graph[_Node]): ... +def is_tree(G: Graph[_Node]) -> bool: ... diff --git a/stubs/networkx/networkx/algorithms/triads.pyi b/stubs/networkx/networkx/algorithms/triads.pyi index 8c0c61034b7c..d1fe5a3a4859 100644 --- a/stubs/networkx/networkx/algorithms/triads.pyi +++ b/stubs/networkx/networkx/algorithms/triads.pyi @@ -11,7 +11,7 @@ __all__ = ["triadic_census", "is_triad", "all_triplets", "all_triads", "triads_b @_dispatchable def triadic_census(G: DiGraph[_Node], nodelist: Collection[_Node] | None = None): ... @_dispatchable -def is_triad(G: Graph[_Node]): ... +def is_triad(G: Graph[_Node]) -> bool: ... @_dispatchable def all_triplets(G: DiGraph[_Node]): ... @_dispatchable diff --git a/stubs/networkx/networkx/classes/function.pyi b/stubs/networkx/networkx/classes/function.pyi index 4af2327955a1..1eb41e0a4bb9 100644 --- a/stubs/networkx/networkx/classes/function.pyi +++ b/stubs/networkx/networkx/classes/function.pyi @@ -67,7 +67,7 @@ def is_directed(G: DiGraph[Hashable]) -> Literal[True]: ... # type: ignore[misc @overload def is_directed(G: Graph[Hashable]) -> Literal[False]: ... def freeze(G): ... -def is_frozen(G): ... +def is_frozen(G: Graph[Incomplete]) -> bool: ... def add_star(G_to_add_to, nodes_for_star, **attr) -> None: ... def add_path(G_to_add_to, nodes_for_path, **attr) -> None: ... def add_cycle(G_to_add_to, nodes_for_cycle, **attr) -> None: ... @@ -104,7 +104,7 @@ def non_edges(graph: Graph[_Node]) -> Generator[tuple[_Node, _Node], None, None] def common_neighbors(G: Graph[_Node], u: _Node, v: _Node) -> Generator[_Node, None, None]: ... def is_weighted(G: Graph[_Node], edge: tuple[_Node, _Node] | None = None, weight: str = "weight") -> bool: ... @_dispatchable -def is_negatively_weighted(G: Graph[_Node], edge: tuple[_Node, _Node] | None = None, weight: str = "weight"): ... +def is_negatively_weighted(G: Graph[_Node], edge: tuple[_Node, _Node] | None = None, weight: str = "weight") -> bool: ... def is_empty(G: Graph[Hashable]) -> bool: ... def nodes_with_selfloops(G: Graph[_Node]) -> Generator[_Node, None, None]: ... @overload @@ -136,5 +136,5 @@ def selfloop_edges( G: Graph[_Node], data: str, keys: Literal[True], default: _U | None = None ) -> Generator[tuple[_Node, _Node, int, _U], None, None]: ... def number_of_selfloops(G: Graph[Hashable]) -> int: ... -def is_path(G, path) -> bool: ... +def is_path(G: Graph[_Node], path: Iterable[Incomplete]) -> bool: ... def path_weight(G, path, weight) -> int: ... diff --git a/stubs/networkx/networkx/classes/multigraph.pyi b/stubs/networkx/networkx/classes/multigraph.pyi index e5628285ac6e..58406cbc3c4b 100644 --- a/stubs/networkx/networkx/classes/multigraph.pyi +++ b/stubs/networkx/networkx/classes/multigraph.pyi @@ -20,7 +20,7 @@ class MultiGraph(Graph[_Node]): def new_edge_key(self, u: _Node, v: _Node) -> int: ... def add_edge(self, u_for_edge, v_for_edge, key=None, **attr): ... # type: ignore[override] # Has an additional `key` keyword argument def remove_edge(self, u, v, key=None): ... - def has_edge(self, u, v, key=None): ... + def has_edge(self, u: _Node, v: _Node, key=None) -> bool: ... def get_edge_data( # type: ignore[override] # Has an additional `key` keyword argument self, u, v, key=None, default=None ): ... diff --git a/stubs/networkx/networkx/generators/expanders.pyi b/stubs/networkx/networkx/generators/expanders.pyi index 6970592a3ffb..5786272dced3 100644 --- a/stubs/networkx/networkx/generators/expanders.pyi +++ b/stubs/networkx/networkx/generators/expanders.pyi @@ -1,3 +1,4 @@ +from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable __all__ = [ @@ -18,6 +19,6 @@ def paley_graph(p, create_using=None): ... @_dispatchable def maybe_regular_expander(n, d, *, create_using=None, max_tries=100, seed=None): ... @_dispatchable -def is_regular_expander(G, *, epsilon=0) -> bool: ... +def is_regular_expander(G: Graph[_Node], *, epsilon: float = 0) -> bool: ... @_dispatchable def random_regular_expander_graph(n, d, *, epsilon=0, create_using=None, max_tries=100, seed=None): ... diff --git a/stubs/networkx/networkx/generators/joint_degree_seq.pyi b/stubs/networkx/networkx/generators/joint_degree_seq.pyi index 98fea472a969..2407905149d3 100644 --- a/stubs/networkx/networkx/generators/joint_degree_seq.pyi +++ b/stubs/networkx/networkx/generators/joint_degree_seq.pyi @@ -1,12 +1,22 @@ +from collections.abc import Mapping, Sequence + from networkx.utils.backends import _dispatchable +from numpy.random import RandomState __all__ = ["is_valid_joint_degree", "is_valid_directed_joint_degree", "joint_degree_graph", "directed_joint_degree_graph"] @_dispatchable -def is_valid_joint_degree(joint_degrees): ... +def is_valid_joint_degree(joint_degrees: Mapping[int, Mapping[int, int]]) -> bool: ... @_dispatchable -def joint_degree_graph(joint_degrees, seed=None): ... +def joint_degree_graph(joint_degrees: Mapping[int, Mapping[int, int]], seed: int | RandomState | None = None): ... @_dispatchable -def is_valid_directed_joint_degree(in_degrees, out_degrees, nkk): ... +def is_valid_directed_joint_degree( + in_degrees: Sequence[int], out_degrees: Sequence[int], nkk: Mapping[int, Mapping[int, int]] +) -> bool: ... @_dispatchable -def directed_joint_degree_graph(in_degrees, out_degrees, nkk, seed=None): ... +def directed_joint_degree_graph( + in_degrees: Sequence[int], + out_degrees: Sequence[int], + nkk: Mapping[int, Mapping[int, int]], + seed: int | RandomState | None = None, +): ... From d60dac1a8052da5abfdff976431aa05e270d4be9 Mon Sep 17 00:00:00 2001 From: wyattscarpenter Date: Wed, 14 May 2025 05:36:17 -0700 Subject: [PATCH 381/388] Update nx_pylab.pyi: allow node_color to be a list of strings as well (#14057) --- stubs/networkx/networkx/drawing/nx_pylab.pyi | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/stubs/networkx/networkx/drawing/nx_pylab.pyi b/stubs/networkx/networkx/drawing/nx_pylab.pyi index 042f1e1dd15a..acd59457dead 100644 --- a/stubs/networkx/networkx/drawing/nx_pylab.pyi +++ b/stubs/networkx/networkx/drawing/nx_pylab.pyi @@ -1,5 +1,5 @@ from _typeshed import Incomplete -from collections.abc import Collection +from collections.abc import Collection, Sequence __all__ = [ "draw", @@ -25,7 +25,7 @@ def draw_networkx_nodes( pos, nodelist: Collection[Incomplete] | None = None, node_size: Incomplete | int = 300, - node_color: str = "#1f78b4", + node_color: str | Sequence[str] = "#1f78b4", node_shape: str = "o", alpha=None, cmap=None, From fab2cde1342fed2ea94695293f41df0636274e79 Mon Sep 17 00:00:00 2001 From: Sebastian Rittau Date: Wed, 14 May 2025 15:38:10 +0200 Subject: [PATCH 382/388] [requests] Add a _JSON type alias (#14064) --- stubs/requests/requests/api.pyi | 19 +++++++++---------- stubs/requests/requests/models.pyi | 8 +++++--- stubs/requests/requests/sessions.pyi | 26 +++++++++++++------------- 3 files changed, 27 insertions(+), 26 deletions(-) diff --git a/stubs/requests/requests/api.pyi b/stubs/requests/requests/api.pyi index 02ecab01d4c8..49e0ce247432 100644 --- a/stubs/requests/requests/api.pyi +++ b/stubs/requests/requests/api.pyi @@ -1,9 +1,8 @@ -from _typeshed import Incomplete from collections.abc import Mapping from http.cookiejar import CookieJar from typing_extensions import TypeAlias -from .models import Response +from .models import _JSON, Response from .sessions import _Auth, _Cert, _Data, _Files, _HooksInput, _Params, _TextMapping, _Timeout, _Verify _HeadersMapping: TypeAlias = Mapping[str, str | bytes | None] @@ -25,7 +24,7 @@ def request( stream: bool | None = ..., verify: _Verify | None = ..., cert: _Cert | None = ..., - json: Incomplete | None = ..., + json: _JSON | None = None, ) -> Response: ... def get( url: str | bytes, @@ -43,7 +42,7 @@ def get( stream: bool | None = ..., verify: _Verify | None = ..., cert: _Cert | None = ..., - json: Incomplete | None = ..., + json: _JSON | None = None, ) -> Response: ... def options( url: str | bytes, @@ -61,7 +60,7 @@ def options( stream: bool | None = ..., verify: _Verify | None = ..., cert: _Cert | None = ..., - json: Incomplete | None = ..., + json: _JSON | None = None, ) -> Response: ... def head( url: str | bytes, @@ -79,12 +78,12 @@ def head( stream: bool | None = ..., verify: _Verify | None = ..., cert: _Cert | None = ..., - json: Incomplete | None = ..., + json: _JSON | None = None, ) -> Response: ... def post( url: str | bytes, data: _Data | None = None, - json: Incomplete | None = None, + json: _JSON | None = None, *, params: _Params | None = ..., headers: _HeadersMapping | None = ..., @@ -115,7 +114,7 @@ def put( stream: bool | None = ..., verify: _Verify | None = ..., cert: _Cert | None = ..., - json: Incomplete | None = ..., + json: _JSON | None = None, ) -> Response: ... def patch( url: str | bytes, @@ -133,7 +132,7 @@ def patch( stream: bool | None = ..., verify: _Verify | None = ..., cert: _Cert | None = ..., - json: Incomplete | None = ..., + json: _JSON | None = None, ) -> Response: ... def delete( url: str | bytes, @@ -151,5 +150,5 @@ def delete( stream: bool | None = ..., verify: _Verify | None = ..., cert: _Cert | None = ..., - json: Incomplete | None = ..., + json: _JSON | None = None, ) -> Response: ... diff --git a/stubs/requests/requests/models.pyi b/stubs/requests/requests/models.pyi index d95a7382e530..2747c9799eed 100644 --- a/stubs/requests/requests/models.pyi +++ b/stubs/requests/requests/models.pyi @@ -3,7 +3,7 @@ from _typeshed import Incomplete, MaybeNone, Unused from collections.abc import Callable, Iterator from json import JSONDecoder from typing import Any -from typing_extensions import Self +from typing_extensions import Self, TypeAlias from urllib3 import exceptions as urllib3_exceptions, fields, filepost, util from urllib3.response import HTTPResponse @@ -13,6 +13,8 @@ from .adapters import HTTPAdapter from .cookies import RequestsCookieJar from .structures import CaseInsensitiveDict as CaseInsensitiveDict +_JSON: TypeAlias = Any # any object that can be serialized to JSON + default_hooks = hooks.default_hooks HTTPBasicAuth = auth.HTTPBasicAuth cookiejar_from_dict = cookies.cookiejar_from_dict @@ -63,7 +65,7 @@ class Request(RequestHooksMixin): headers: Incomplete files: Incomplete data: Incomplete - json: Incomplete + json: _JSON | None params: Incomplete auth: Incomplete cookies: Incomplete @@ -78,7 +80,7 @@ class Request(RequestHooksMixin): auth=None, cookies=None, hooks=None, - json=None, + json: _JSON | None = None, ) -> None: ... def prepare(self) -> PreparedRequest: ... diff --git a/stubs/requests/requests/sessions.pyi b/stubs/requests/requests/sessions.pyi index 61f68b914bcf..ec2211c6bc4e 100644 --- a/stubs/requests/requests/sessions.pyi +++ b/stubs/requests/requests/sessions.pyi @@ -1,10 +1,10 @@ -from _typeshed import Incomplete, SupportsItems, SupportsRead, Unused +from _typeshed import SupportsItems, SupportsRead, Unused from collections.abc import Callable, Iterable, Mapping, MutableMapping from typing import Any, TypedDict from typing_extensions import Self, TypeAlias from . import adapters, auth as _auth, compat, cookies, exceptions, hooks, models, status_codes, utils -from .models import Response +from .models import _JSON, Response from .structures import CaseInsensitiveDict as CaseInsensitiveDict _BaseAdapter: TypeAlias = adapters.BaseAdapter @@ -44,10 +44,10 @@ class SessionRedirectMixin: resp, req, stream: bool = False, - timeout: Incomplete | None = None, + timeout=None, verify: bool = True, - cert: Incomplete | None = None, - proxies: Incomplete | None = None, + cert=None, + proxies=None, yield_requests: bool = False, **adapter_kwargs, ): ... @@ -151,7 +151,7 @@ class Session(SessionRedirectMixin): stream: bool | None = None, verify: _Verify | None = None, cert: _Cert | None = None, - json: Incomplete | None = None, + json: _JSON | None = None, ) -> Response: ... def get( self, @@ -170,7 +170,7 @@ class Session(SessionRedirectMixin): stream: bool | None = ..., verify: _Verify | None = ..., cert: _Cert | None = ..., - json: Incomplete | None = ..., + json: _JSON | None = None, ) -> Response: ... def options( self, @@ -189,7 +189,7 @@ class Session(SessionRedirectMixin): stream: bool | None = ..., verify: _Verify | None = ..., cert: _Cert | None = ..., - json: Incomplete | None = ..., + json: _JSON | None = None, ) -> Response: ... def head( self, @@ -208,13 +208,13 @@ class Session(SessionRedirectMixin): stream: bool | None = ..., verify: _Verify | None = ..., cert: _Cert | None = ..., - json: Incomplete | None = ..., + json: _JSON | None = None, ) -> Response: ... def post( self, url: str | bytes, data: _Data | None = None, - json: Incomplete | None = None, + json: _JSON | None = None, *, params: _Params | None = ..., headers: _HeadersUpdateMapping | None = ..., @@ -246,7 +246,7 @@ class Session(SessionRedirectMixin): stream: bool | None = ..., verify: _Verify | None = ..., cert: _Cert | None = ..., - json: Incomplete | None = ..., + json: _JSON | None = None, ) -> Response: ... def patch( self, @@ -265,7 +265,7 @@ class Session(SessionRedirectMixin): stream: bool | None = ..., verify: _Verify | None = ..., cert: _Cert | None = ..., - json: Incomplete | None = ..., + json: _JSON | None = None, ) -> Response: ... def delete( self, @@ -284,7 +284,7 @@ class Session(SessionRedirectMixin): stream: bool | None = ..., verify: _Verify | None = ..., cert: _Cert | None = ..., - json: Incomplete | None = ..., + json: _JSON | None = None, ) -> Response: ... def send( self, From bf4d881e7305edef90974ea64e8e1b08e3a5133d Mon Sep 17 00:00:00 2001 From: Semyon Moroz Date: Wed, 14 May 2025 13:40:39 +0000 Subject: [PATCH 383/388] Deprecate `nturl2path` (#14065) --- stdlib/nturl2path.pyi | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/stdlib/nturl2path.pyi b/stdlib/nturl2path.pyi index b8ad8d682155..c38a359469d2 100644 --- a/stdlib/nturl2path.pyi +++ b/stdlib/nturl2path.pyi @@ -1,2 +1,12 @@ -def url2pathname(url: str) -> str: ... -def pathname2url(p: str) -> str: ... +import sys +from typing_extensions import deprecated + +if sys.version_info >= (3, 14): + @deprecated("nturl2path module was deprecated since Python 3.14") + def url2pathname(url: str) -> str: ... + @deprecated("nturl2path module was deprecated since Python 3.14") + def pathname2url(p: str) -> str: ... + +else: + def url2pathname(url: str) -> str: ... + def pathname2url(p: str) -> str: ... From 0a0d2c5907fc9caff9b2d6fa45995b8238ace607 Mon Sep 17 00:00:00 2001 From: Collin Anderson Date: Wed, 14 May 2025 15:11:50 -0400 Subject: [PATCH 384/388] reportlab: canvas.getPageNumber() returns int (#14066) --- stubs/reportlab/reportlab/pdfgen/canvas.pyi | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stubs/reportlab/reportlab/pdfgen/canvas.pyi b/stubs/reportlab/reportlab/pdfgen/canvas.pyi index 7240c215996c..214fe6383b93 100644 --- a/stubs/reportlab/reportlab/pdfgen/canvas.pyi +++ b/stubs/reportlab/reportlab/pdfgen/canvas.pyi @@ -186,7 +186,7 @@ class Canvas(_PDFColorSetter): kind: str = "URI", **kw, ) -> None: ... - def getPageNumber(self): ... + def getPageNumber(self) -> int: ... def save(self) -> None: ... def getpdfdata(self): ... def setPageSize(self, size: tuple[float, float]) -> None: ... From fe9f461fa8f00401f64b2ceb2daa3b91d912e1e3 Mon Sep 17 00:00:00 2001 From: David Salvisberg Date: Thu, 15 May 2025 12:35:12 +0200 Subject: [PATCH 385/388] gevent: Remove remaining uses of `Incomplete` (#14072) --- stubs/gevent/gevent/_util.pyi | 5 ++--- stubs/gevent/gevent/os.pyi | 13 +++++++++---- 2 files changed, 11 insertions(+), 7 deletions(-) diff --git a/stubs/gevent/gevent/_util.pyi b/stubs/gevent/gevent/_util.pyi index f4528329998a..77c8cbd3a1f0 100644 --- a/stubs/gevent/gevent/_util.pyi +++ b/stubs/gevent/gevent/_util.pyi @@ -1,5 +1,4 @@ -from _typeshed import Incomplete -from collections.abc import Callable, MutableMapping, Sequence +from collections.abc import Callable, Iterable, MutableMapping, Sequence from types import ModuleType from typing import Any, Generic, TypeVar, overload from typing_extensions import Self @@ -13,7 +12,7 @@ def update_wrapper(wrapper: _T, wrapped: object, assigned: Sequence[str] = ..., def copy_globals( source: ModuleType, globs: MutableMapping[str, Any], - only_names: Incomplete | None = None, + only_names: Iterable[str] | None = None, ignore_missing_names: bool = False, names_to_ignore: Sequence[str] = ..., dunder_names_to_keep: Sequence[str] = ..., diff --git a/stubs/gevent/gevent/os.pyi b/stubs/gevent/gevent/os.pyi index b2c6c4145702..279312a03bd2 100644 --- a/stubs/gevent/gevent/os.pyi +++ b/stubs/gevent/gevent/os.pyi @@ -1,9 +1,11 @@ import os import sys -from _typeshed import FileDescriptor, Incomplete, ReadableBuffer +from _typeshed import FileDescriptor, ReadableBuffer from collections.abc import Callable from typing import Literal +from gevent._types import _ChildWatcher, _Loop + def tp_read(fd: FileDescriptor, n: int) -> bytes: ... def tp_write(fd: FileDescriptor, buf: ReadableBuffer) -> int: ... @@ -17,11 +19,14 @@ if sys.platform != "win32": def forkpty_gevent() -> tuple[int, int]: ... waitpid = os.waitpid def fork_and_watch( - callback: Incomplete | None = None, loop: Incomplete | None = None, ref: bool = False, fork: Callable[[], int] = ... + callback: Callable[[_ChildWatcher], object] | None = None, + loop: _Loop | None = None, + ref: bool = False, + fork: Callable[[], int] = ..., ) -> int: ... def forkpty_and_watch( - callback: Incomplete | None = None, - loop: Incomplete | None = None, + callback: Callable[[_ChildWatcher], object] | None = None, + loop: _Loop | None = None, ref: bool = False, forkpty: Callable[[], tuple[int, int]] = ..., ) -> tuple[int, int]: ... From e2bd1ff95144c6754de591a8acf56f230b107449 Mon Sep 17 00:00:00 2001 From: Avasam Date: Thu, 15 May 2025 10:29:04 -0400 Subject: [PATCH 386/388] Bump setuptools to 80.7.* (#14069) --------- Co-authored-by: Sebastian Rittau --- stubs/setuptools/METADATA.toml | 3 +-- stubs/setuptools/setuptools/__init__.pyi | 1 - .../setuptools/command/bdist_egg.pyi | 2 +- .../setuptools/command/build_ext.pyi | 1 - stubs/setuptools/setuptools/dist.pyi | 3 ++- stubs/setuptools/setuptools/installer.pyi | 18 ++++++++++++++++-- 6 files changed, 20 insertions(+), 8 deletions(-) diff --git a/stubs/setuptools/METADATA.toml b/stubs/setuptools/METADATA.toml index 3000300b6f35..cda6196e2c96 100644 --- a/stubs/setuptools/METADATA.toml +++ b/stubs/setuptools/METADATA.toml @@ -1,10 +1,9 @@ -version = "80.4.*" +version = "80.7.*" upstream_repository = "https://github.com/pypa/setuptools" extra_description = """\ Given that `pkg_resources` is typed since `setuptools >= 71.1`, \ it is no longer included with `types-setuptools`. """ -requires = ["setuptools"] # For pkg_resources [tool.stubtest] # darwin is equivalent to linux for OS-specific methods diff --git a/stubs/setuptools/setuptools/__init__.pyi b/stubs/setuptools/setuptools/__init__.pyi index 815e1336ce50..d8d0b22c3048 100644 --- a/stubs/setuptools/setuptools/__init__.pyi +++ b/stubs/setuptools/setuptools/__init__.pyi @@ -109,7 +109,6 @@ class Command(_Command): distribution: Distribution # Any: Dynamic command subclass attributes def __init__(self, dist: Distribution, **kw: Any) -> None: ... - def ensure_string_list(self, option: str) -> None: ... # Note: Commands that setuptools doesn't re-expose are considered deprecated (they must be imported from distutils directly) # So we're not listing them here. This list comes directly from the setuptools/command folder. Minus the test command. @overload # type: ignore[override] diff --git a/stubs/setuptools/setuptools/command/bdist_egg.pyi b/stubs/setuptools/setuptools/command/bdist_egg.pyi index 762976ad5949..264052cda2af 100644 --- a/stubs/setuptools/setuptools/command/bdist_egg.pyi +++ b/stubs/setuptools/setuptools/command/bdist_egg.pyi @@ -17,7 +17,7 @@ class bdist_egg(Command): user_options: ClassVar[list[tuple[str, str | None, str]]] boolean_options: ClassVar[list[str]] bdist_dir: Incomplete - plat_name: Incomplete + plat_name: str keep_temp: bool dist_dir: Incomplete skip_build: bool diff --git a/stubs/setuptools/setuptools/command/build_ext.pyi b/stubs/setuptools/setuptools/command/build_ext.pyi index 6f4e74424a27..85c477b0366e 100644 --- a/stubs/setuptools/setuptools/command/build_ext.pyi +++ b/stubs/setuptools/setuptools/command/build_ext.pyi @@ -9,7 +9,6 @@ have_rtld: bool use_stubs: bool libtype: str -def if_dl(s): ... def get_abi3_suffix(): ... class build_ext(_build_ext): diff --git a/stubs/setuptools/setuptools/dist.pyi b/stubs/setuptools/setuptools/dist.pyi index 4cdbdec2a494..fb04a7b41ce9 100644 --- a/stubs/setuptools/setuptools/dist.pyi +++ b/stubs/setuptools/setuptools/dist.pyi @@ -1,5 +1,6 @@ from _typeshed import Incomplete, StrPath from collections.abc import Iterable, Iterator, MutableMapping +from importlib import metadata from typing import Literal, TypeVar, overload from . import Command, SetuptoolsDeprecationWarning @@ -39,7 +40,7 @@ class Distribution(_Distribution): setup_requires: list[str] def __init__(self, attrs: MutableMapping[str, Incomplete] | None = None) -> None: ... def parse_config_files(self, filenames: Iterable[StrPath] | None = None, ignore_option_errors: bool = False) -> None: ... - def fetch_build_eggs(self, requires: str | Iterable[str]): ... + def fetch_build_eggs(self, requires: str | Iterable[str]) -> list[metadata.Distribution]: ... def get_egg_cache_dir(self) -> str: ... def fetch_build_egg(self, req): ... # NOTE: Commands that setuptools doesn't re-expose are considered deprecated (they must be imported from distutils directly) diff --git a/stubs/setuptools/setuptools/installer.pyi b/stubs/setuptools/setuptools/installer.pyi index 0d3eaf68c0c4..b53a7f6fcc2e 100644 --- a/stubs/setuptools/setuptools/installer.pyi +++ b/stubs/setuptools/setuptools/installer.pyi @@ -1,2 +1,16 @@ -def fetch_build_egg(dist, req): ... -def strip_marker(req): ... +from importlib import metadata +from typing import Any +from typing_extensions import deprecated + +@deprecated( + """ + `setuptools.installer` and `fetch_build_eggs` are deprecated. + Requirements should be satisfied by a PEP 517 installer. + If you are using pip, you can try `pip install --use-pep517`. + """ +) +def fetch_build_egg(dist, req) -> metadata.Distribution | metadata.PathDistribution: ... + +# Returns packaging.requirements.Requirement +# But since this module is deprecated, we avoid declaring a dependency on packaging +def strip_marker(req) -> Any: ... From 7913fb8fdff939295bbf1b1cfc66ffe71a2c0080 Mon Sep 17 00:00:00 2001 From: Sebastian Rittau Date: Thu, 15 May 2025 18:47:56 +0200 Subject: [PATCH 387/388] Update reportlab to 4.4.1 (#14073) --- stubs/reportlab/@tests/stubtest_allowlist.txt | 5 +++++ stubs/reportlab/METADATA.toml | 2 +- stubs/reportlab/reportlab/graphics/charts/doughnut.pyi | 3 ++- stubs/reportlab/reportlab/graphics/charts/piecharts.pyi | 3 ++- stubs/reportlab/reportlab/pdfbase/ttfonts.pyi | 4 ++-- 5 files changed, 12 insertions(+), 5 deletions(-) diff --git a/stubs/reportlab/@tests/stubtest_allowlist.txt b/stubs/reportlab/@tests/stubtest_allowlist.txt index c96ce50c36d6..745bf361913c 100644 --- a/stubs/reportlab/@tests/stubtest_allowlist.txt +++ b/stubs/reportlab/@tests/stubtest_allowlist.txt @@ -6,6 +6,11 @@ reportlab.rl_config.__all__ reportlab.graphics.barcode.eanbc.__all__ reportlab.graphics.barcode.ecc200datamatrix.__all__ +# shapeFragWord has two incompatible definitions, depending on whether +# uharfbuzz is installed or not. We use the version where uharfbuzz is +# installed. +reportlab.pdfbase.ttfonts.shapeFragWord + # Error: is inconsistent # ====================== # The drawOn method violates LSP all over the place and it's usually diff --git a/stubs/reportlab/METADATA.toml b/stubs/reportlab/METADATA.toml index d1a58be6ea2c..e05df1b21d12 100644 --- a/stubs/reportlab/METADATA.toml +++ b/stubs/reportlab/METADATA.toml @@ -1,4 +1,4 @@ -version = "4.4.*" +version = "~= 4.4.1" # GitHub mirror of https://hg.reportlab.com/hg-public/reportlab/file upstream_repository = "https://github.com/MrBitBucket/reportlab-mirror" diff --git a/stubs/reportlab/reportlab/graphics/charts/doughnut.pyi b/stubs/reportlab/reportlab/graphics/charts/doughnut.pyi index 84204a325ad0..a25351583a30 100644 --- a/stubs/reportlab/reportlab/graphics/charts/doughnut.pyi +++ b/stubs/reportlab/reportlab/graphics/charts/doughnut.pyi @@ -22,7 +22,8 @@ class Doughnut(AbstractPieChart): sideLabels: int innerRadiusFraction: Incomplete slices: Incomplete - def __init__(self) -> None: ... + angleRange: int + def __init__(self, *, angleRange: int = 360, **kwds) -> None: ... def demo(self): ... def normalizeData(self, data: Incomplete | None = None): ... def makeSectors(self): ... diff --git a/stubs/reportlab/reportlab/graphics/charts/piecharts.pyi b/stubs/reportlab/reportlab/graphics/charts/piecharts.pyi index 577c3279b8e5..c2be4eacf55a 100644 --- a/stubs/reportlab/reportlab/graphics/charts/piecharts.pyi +++ b/stubs/reportlab/reportlab/graphics/charts/piecharts.pyi @@ -80,7 +80,8 @@ class Pie(AbstractPieChart): sideLabels: int sideLabelsOffset: float slices: Incomplete - def __init__(self, **kwd) -> None: ... + angleRange: int + def __init__(self, *, angleRange: int = 360, **kwds) -> None: ... def demo(self): ... centerx: Incomplete centery: Incomplete diff --git a/stubs/reportlab/reportlab/pdfbase/ttfonts.pyi b/stubs/reportlab/reportlab/pdfbase/ttfonts.pyi index 7a77835e46d6..c3f4d20dcc7c 100644 --- a/stubs/reportlab/reportlab/pdfbase/ttfonts.pyi +++ b/stubs/reportlab/reportlab/pdfbase/ttfonts.pyi @@ -180,7 +180,7 @@ class ShapedStr(str): def __new__(cls, s, shapeData: ShapeData | None = None) -> Self: ... def __radd__(self, other) -> Self: ... -def shapeStr(s: str, fontName: str, fontSize: float): ... +def shapeStr(s: str, fontName: str, fontSize: float, force: bool = False): ... def freshTTFont(ttfn, ttfpath, **kwds) -> TTFont: ... def makeShapedFragWord(w, K: list[Incomplete] = [], V: list[Incomplete] = []) -> type[ShapedFragWord]: ... -def shapeFragWord(w, features: Incomplete | None = None): ... +def shapeFragWord(w, features=..., force: bool = False): ... From 0ed49d83df07be55db83b4e3081453a1ec0b5d50 Mon Sep 17 00:00:00 2001 From: Sebastian Rittau Date: Thu, 15 May 2025 21:37:43 +0200 Subject: [PATCH 388/388] Replace `Incomplete | None = None` in third party stubs (#14063) --- pyrightconfig.stricter.json | 6 +- .../integrations/base_client/async_app.pyi | 14 +- .../integrations/base_client/async_openid.pyi | 4 +- .../base_client/framework_integration.pyi | 4 +- .../integrations/base_client/registry.pyi | 4 +- .../integrations/base_client/sync_app.pyi | 70 +++---- .../integrations/base_client/sync_openid.pyi | 4 +- .../authlib/jose/drafts/_jwe_algorithms.pyi | 8 +- stubs/Authlib/authlib/jose/jwk.pyi | 6 +- stubs/Authlib/authlib/jose/rfc7515/jws.pyi | 8 +- stubs/Authlib/authlib/jose/rfc7516/jwe.pyi | 14 +- stubs/Authlib/authlib/jose/rfc7516/models.pyi | 8 +- .../jose/rfc7517/_cryptography_key.pyi | 6 +- .../authlib/jose/rfc7517/asymmetric_key.pyi | 16 +- .../Authlib/authlib/jose/rfc7517/base_key.pyi | 2 +- stubs/Authlib/authlib/jose/rfc7517/jwk.pyi | 2 +- stubs/Authlib/authlib/jose/rfc7518/ec_key.pyi | 2 +- .../Authlib/authlib/jose/rfc7518/jwe_algs.pyi | 12 +- .../Authlib/authlib/jose/rfc7518/oct_key.pyi | 6 +- .../Authlib/authlib/jose/rfc7518/rsa_key.pyi | 4 +- stubs/Authlib/authlib/jose/rfc7519/claims.pyi | 4 +- stubs/Authlib/authlib/jose/rfc7519/jwt.pyi | 11 +- .../Authlib/authlib/jose/rfc8037/okp_key.pyi | 4 +- stubs/Authlib/authlib/oauth1/client.pyi | 18 +- .../oauth1/rfc5849/authorization_server.pyi | 4 +- .../authlib/oauth1/rfc5849/client_auth.pyi | 14 +- .../Authlib/authlib/oauth1/rfc5849/errors.pyi | 6 +- .../authlib/oauth1/rfc5849/parameters.pyi | 4 +- .../authlib/oauth1/rfc5849/signature.pyi | 6 +- .../authlib/oauth1/rfc5849/wrapper.pyi | 2 +- stubs/Authlib/authlib/oauth2/auth.pyi | 4 +- stubs/Authlib/authlib/oauth2/base.pyi | 14 +- stubs/Authlib/authlib/oauth2/client.pyi | 70 ++----- .../oauth2/rfc6749/authorization_server.pyi | 13 +- .../Authlib/authlib/oauth2/rfc6749/errors.pyi | 2 +- .../authlib/oauth2/rfc6749/grants/base.pyi | 2 +- .../authlib/oauth2/rfc6749/parameters.pyi | 18 +- .../authlib/oauth2/rfc6749/requests.pyi | 2 +- .../oauth2/rfc6749/resource_protector.pyi | 2 +- .../Authlib/authlib/oauth2/rfc6750/errors.pyi | 10 +- .../authlib/oauth2/rfc6750/parameters.pyi | 6 +- .../Authlib/authlib/oauth2/rfc6750/token.pyi | 5 +- .../authlib/oauth2/rfc7009/parameters.pyi | 6 +- .../Authlib/authlib/oauth2/rfc7521/client.pyi | 8 +- .../authlib/oauth2/rfc7523/assertion.pyi | 20 +- stubs/Authlib/authlib/oauth2/rfc7523/auth.pyi | 8 +- .../authlib/oauth2/rfc7523/jwt_bearer.pyi | 11 +- .../Authlib/authlib/oauth2/rfc7523/token.pyi | 23 +-- .../authlib/oauth2/rfc7523/validator.pyi | 4 +- .../authlib/oauth2/rfc7591/endpoint.pyi | 2 +- .../authlib/oauth2/rfc7592/endpoint.pyi | 2 +- .../Authlib/authlib/oauth2/rfc9068/claims.pyi | 4 +- .../authlib/oauth2/rfc9068/introspection.pyi | 2 +- .../authlib/oauth2/rfc9068/revocation.pyi | 2 +- .../Authlib/authlib/oauth2/rfc9068/token.pyi | 8 +- .../oauth2/rfc9068/token_validator.pyi | 10 +- stubs/Authlib/authlib/oidc/core/claims.pyi | 4 +- .../authlib/oidc/core/grants/implicit.pyi | 2 +- .../flask_socketio/namespace.pyi | 13 +- stubs/PyMySQL/pymysql/connections.pyi | 52 +++--- stubs/PyYAML/yaml/cyaml.pyi | 18 +- stubs/Pygments/pygments/cmdline.pyi | 3 +- stubs/Pygments/pygments/filter.pyi | 3 +- stubs/Pygments/pygments/formatters/html.pyi | 7 +- .../pygments/formatters/terminal256.pyi | 10 +- stubs/Pygments/pygments/lexer.pyi | 10 +- stubs/Pygments/pygments/util.pyi | 9 +- stubs/aiofiles/aiofiles/tempfile/__init__.pyi | 43 +++-- .../aiofiles/aiofiles/threadpool/__init__.pyi | 11 +- .../antlr4/ParserRuleContext.pyi | 2 +- .../antlr4/atn/ATNConfigSet.pyi | 2 +- .../antlr4/atn/ATNDeserializer.pyi | 2 +- .../antlr4/error/Errors.pyi | 14 +- .../antlr4/xpath/XPathLexer.pyi | 2 +- stubs/auth0-python/auth0/exceptions.pyi | 2 +- .../aws_xray_sdk/core/async_context.pyi | 6 +- .../aws_xray_sdk/core/async_recorder.pyi | 7 +- .../aws_xray_sdk/core/lambda_launcher.pyi | 3 +- .../aws_xray_sdk/core/models/entity.pyi | 5 +- .../core/models/facade_segment.pyi | 3 +- .../aws_xray_sdk/core/models/subsegment.pyi | 3 +- .../aws_xray_sdk/core/plugins/ec2_plugin.pyi | 5 +- .../core/sampling/local/sampler.pyi | 3 +- .../aws_xray_sdk/core/sampling/sampler.pyi | 5 +- .../core/sampling/sampling_rule.pyi | 13 +- .../aws_xray_sdk/core/utils/stacktrace.pyi | 4 +- stubs/beautifulsoup4/bs4/__init__.pyi | 32 +--- stubs/beautifulsoup4/bs4/builder/__init__.pyi | 7 +- .../bs4/builder/_htmlparser.pyi | 9 +- stubs/beautifulsoup4/bs4/builder/_lxml.pyi | 9 +- stubs/beautifulsoup4/bs4/element.pyi | 12 +- stubs/boltons/boltons/cacheutils.pyi | 6 +- stubs/boltons/boltons/funcutils.pyi | 25 +-- stubs/boltons/boltons/ioutils.pyi | 12 +- stubs/boltons/boltons/iterutils.pyi | 52 +++--- stubs/boltons/boltons/tableutils.pyi | 12 +- stubs/boltons/boltons/urlutils.pyi | 14 +- .../braintree/braintree/braintree_gateway.pyi | 2 +- stubs/braintree/braintree/client_token.pyi | 4 +- .../braintree/client_token_gateway.pyi | 2 +- stubs/braintree/braintree/configuration.pyi | 8 +- .../braintree/exchange_rate_quote_gateway.pyi | 2 +- .../merchant_account/merchant_account.pyi | 3 +- stubs/braintree/braintree/payment_method.pyi | 2 +- .../braintree/payment_method_gateway.pyi | 4 +- stubs/braintree/braintree/paypal_account.pyi | 4 +- .../braintree/paypal_account_gateway.pyi | 2 +- stubs/braintree/braintree/plan.pyi | 6 +- stubs/braintree/braintree/plan_gateway.pyi | 4 +- .../braintree/settlement_batch_summary.pyi | 4 +- .../settlement_batch_summary_gateway.pyi | 2 +- stubs/braintree/braintree/subscription.pyi | 6 +- .../braintree/subscription_gateway.pyi | 6 +- stubs/braintree/braintree/transaction.pyi | 14 +- .../braintree/transaction_gateway.pyi | 10 +- .../braintree/util/graphql_client.pyi | 2 +- stubs/caldav/caldav/objects.pyi | 38 ++-- stubs/cffi/cffi/api.pyi | 4 +- stubs/cffi/cffi/backend_ctypes.pyi | 2 +- stubs/cffi/cffi/cparser.pyi | 4 +- stubs/cffi/cffi/ffiplatform.pyi | 4 +- stubs/cffi/cffi/model.pyi | 6 +- stubs/cffi/cffi/recompiler.pyi | 2 +- stubs/cffi/cffi/vengine_cpy.pyi | 2 +- stubs/cffi/cffi/verifier.pyi | 2 +- stubs/corus/corus/io.pyi | 2 +- stubs/corus/corus/readme.pyi | 4 +- stubs/corus/corus/sources/lenta.pyi | 2 +- stubs/corus/corus/sources/meta.pyi | 17 +- stubs/corus/corus/sources/morphoru.pyi | 2 +- stubs/corus/corus/sources/taiga/arzamas.pyi | 4 +- stubs/corus/corus/sources/taiga/common.pyi | 32 ++-- stubs/corus/corus/sources/taiga/fontanka.pyi | 4 +- stubs/corus/corus/sources/taiga/interfax.pyi | 4 +- stubs/corus/corus/sources/taiga/kp.pyi | 4 +- stubs/corus/corus/sources/taiga/lenta.pyi | 4 +- stubs/corus/corus/sources/taiga/magazines.pyi | 4 +- stubs/corus/corus/sources/taiga/nplus1.pyi | 4 +- stubs/corus/corus/sources/taiga/proza.pyi | 6 +- stubs/corus/corus/sources/taiga/subtitles.pyi | 2 +- stubs/corus/corus/third/WikiExtractor.pyi | 14 +- .../dateparser/freshness_date_parser.pyi | 2 +- .../dateparser/languages/dictionary.pyi | 4 +- .../dateparser/search/detection.pyi | 7 +- stubs/dateparser/dateparser/search/search.pyi | 13 +- .../dateparser/dateparser/utils/__init__.pyi | 3 +- stubs/defusedxml/defusedxml/ElementTree.pyi | 3 +- stubs/defusedxml/defusedxml/minidom.pyi | 9 +- stubs/docker/docker/api/config.pyi | 6 +- stubs/docker/docker/api/container.pyi | 24 +-- stubs/docker/docker/api/exec_api.pyi | 10 +- stubs/docker/docker/api/image.pyi | 41 ++--- stubs/docker/docker/api/network.pyi | 17 +- stubs/docker/docker/api/plugin.pyi | 4 +- stubs/docker/docker/api/service.pyi | 44 +++-- stubs/docker/docker/api/swarm.pyi | 25 +-- stubs/docker/docker/auth.pyi | 13 +- stubs/docker/docker/credentials/store.pyi | 2 +- stubs/docker/docker/models/containers.pyi | 28 +-- stubs/docker/docker/models/plugins.pyi | 4 +- stubs/docker/docker/models/services.pyi | 6 +- stubs/docker/docker/transport/npipeconn.pyi | 2 +- stubs/docker/docker/transport/npipesocket.pyi | 6 +- stubs/docker/docker/transport/sshconn.pyi | 8 +- stubs/docker/docker/transport/unixconn.pyi | 2 +- stubs/docker/docker/types/networks.pyi | 2 +- stubs/docker/docker/utils/build.pyi | 8 +- stubs/docutils/docutils/core.pyi | 174 ++++++++---------- stubs/docutils/docutils/frontend.pyi | 86 ++------- stubs/docutils/docutils/io.pyi | 10 +- .../docutils/docutils/parsers/rst/states.pyi | 8 +- stubs/fpdf2/fpdf/annotations.pyi | 2 +- stubs/fpdf2/fpdf/drawing.pyi | 40 ++-- stubs/fpdf2/fpdf/fonts.pyi | 10 +- stubs/fpdf2/fpdf/fpdf.pyi | 22 +-- stubs/fpdf2/fpdf/image_parsing.pyi | 3 +- stubs/fpdf2/fpdf/output.pyi | 23 +-- stubs/fpdf2/fpdf/pattern.pyi | 2 +- stubs/fpdf2/fpdf/prefs.pyi | 3 +- stubs/fpdf2/fpdf/sign.pyi | 8 +- stubs/fpdf2/fpdf/svg.pyi | 6 +- stubs/fpdf2/fpdf/syntax.pyi | 12 +- stubs/fpdf2/fpdf/table.pyi | 18 +- stubs/fpdf2/fpdf/template.pyi | 9 +- stubs/fpdf2/fpdf/text_region.pyi | 24 +-- stubs/gdb/gdb/disassembler.pyi | 2 +- stubs/html5lib/html5lib/_ihatexml.pyi | 3 +- stubs/html5lib/html5lib/_inputstream.pyi | 4 +- stubs/html5lib/html5lib/_tokenizer.pyi | 5 +- stubs/html5lib/html5lib/_trie/_base.pyi | 3 +- stubs/html5lib/html5lib/_trie/py.pyi | 4 +- stubs/html5lib/html5lib/_utils.pyi | 3 +- stubs/html5lib/html5lib/html5parser.pyi | 8 +- stubs/html5lib/html5lib/serializer.pyi | 4 +- .../html5lib/treebuilders/__init__.pyi | 3 +- stubs/html5lib/html5lib/treebuilders/base.pyi | 11 +- .../html5lib/treebuilders/etree_lxml.pyi | 5 +- .../html5lib/treewalkers/__init__.pyi | 4 +- stubs/html5lib/html5lib/treewalkers/base.pyi | 3 +- stubs/httplib2/httplib2/__init__.pyi | 56 ++---- stubs/httplib2/httplib2/socks.pyi | 21 +-- stubs/hvac/hvac/api/auth_methods/approle.pyi | 49 ++--- stubs/hvac/hvac/api/auth_methods/aws.pyi | 95 +++++----- stubs/hvac/hvac/api/auth_methods/azure.pyi | 40 ++-- stubs/hvac/hvac/api/auth_methods/gcp.pyi | 35 ++-- stubs/hvac/hvac/api/auth_methods/github.pyi | 15 +- stubs/hvac/hvac/api/auth_methods/jwt.pyi | 82 ++++----- .../hvac/hvac/api/auth_methods/kubernetes.pyi | 20 +- stubs/hvac/hvac/api/auth_methods/ldap.pyi | 78 ++++---- .../hvac/hvac/api/auth_methods/legacy_mfa.pyi | 4 +- stubs/hvac/hvac/api/auth_methods/oidc.pyi | 42 ++--- stubs/hvac/hvac/api/auth_methods/okta.pyi | 17 +- stubs/hvac/hvac/api/auth_methods/radius.pyi | 13 +- stubs/hvac/hvac/api/auth_methods/token.pyi | 62 +++---- stubs/hvac/hvac/api/auth_methods/userpass.pyi | 6 +- .../api/secrets_engines/active_directory.pyi | 20 +- stubs/hvac/hvac/api/secrets_engines/aws.pyi | 31 +--- stubs/hvac/hvac/api/secrets_engines/azure.pyi | 14 +- .../hvac/hvac/api/secrets_engines/consul.pyi | 14 +- .../hvac/api/secrets_engines/database.pyi | 18 +- stubs/hvac/hvac/api/secrets_engines/gcp.pyi | 35 +--- .../hvac/api/secrets_engines/identity.pyi | 108 +++-------- stubs/hvac/hvac/api/secrets_engines/kv_v1.pyi | 4 +- stubs/hvac/hvac/api/secrets_engines/kv_v2.pyi | 24 +-- stubs/hvac/hvac/api/secrets_engines/pki.pyi | 34 +--- stubs/hvac/hvac/api/secrets_engines/ssh.pyi | 12 +- .../hvac/api/secrets_engines/transform.pyi | 56 ++---- .../hvac/hvac/api/secrets_engines/transit.pyi | 117 +++++------- stubs/hvac/hvac/api/system_backend/audit.pyi | 11 +- stubs/hvac/hvac/api/system_backend/auth.pyi | 25 +-- .../hvac/api/system_backend/capabilities.pyi | 4 +- stubs/hvac/hvac/api/system_backend/health.pyi | 16 +- stubs/hvac/hvac/api/system_backend/init.pyi | 18 +- stubs/hvac/hvac/api/system_backend/key.pyi | 10 +- stubs/hvac/hvac/api/system_backend/lease.pyi | 4 +- stubs/hvac/hvac/api/system_backend/mount.pyi | 32 ++-- stubs/hvac/hvac/api/system_backend/quota.pyi | 12 +- stubs/hvac/hvac/api/system_backend/raft.pyi | 8 +- stubs/hvac/hvac/api/system_backend/seal.pyi | 4 +- .../hvac/hvac/api/system_backend/wrapping.pyi | 2 +- stubs/jsonschema/jsonschema/exceptions.pyi | 2 +- stubs/jsonschema/jsonschema/validators.pyi | 25 +-- stubs/jwcrypto/jwcrypto/jws.pyi | 10 +- stubs/jwcrypto/jwcrypto/jwt.pyi | 13 +- stubs/ldap3/ldap3/abstract/attrDef.pyi | 17 +- stubs/ldap3/ldap3/abstract/cursor.pyi | 40 ++-- stubs/ldap3/ldap3/abstract/entry.pyi | 20 +- stubs/ldap3/ldap3/abstract/objectDef.pyi | 12 +- stubs/ldap3/ldap3/core/connection.pyi | 68 +++---- stubs/ldap3/ldap3/core/exceptions.pyi | 21 +-- stubs/ldap3/ldap3/core/pooling.pyi | 8 +- stubs/ldap3/ldap3/core/server.pyi | 32 +--- stubs/ldap3/ldap3/core/tls.pyi | 23 ++- stubs/ldap3/ldap3/extend/__init__.pyi | 52 ++---- .../ldap3/extend/microsoft/modifyPassword.pyi | 4 +- .../extend/microsoft/persistentSearch.pyi | 5 +- .../ldap3/extend/microsoft/unlockAccount.pyi | 4 +- .../ldap3/extend/novell/endTransaction.pyi | 2 +- .../ldap3/extend/novell/listReplicas.pyi | 4 +- .../novell/nmasGetUniversalPassword.pyi | 4 +- .../novell/nmasSetUniversalPassword.pyi | 4 +- .../extend/novell/partition_entry_count.pyi | 4 +- .../ldap3/ldap3/extend/novell/replicaInfo.pyi | 4 +- .../ldap3/extend/novell/startTransaction.pyi | 2 +- stubs/ldap3/ldap3/extend/operation.pyi | 4 +- .../ldap3/extend/standard/PagedSearch.pyi | 10 +- .../extend/standard/PersistentSearch.pyi | 5 +- .../ldap3/extend/standard/modifyPassword.pyi | 11 +- stubs/ldap3/ldap3/operation/add.pyi | 6 +- stubs/ldap3/ldap3/operation/bind.pyi | 16 +- stubs/ldap3/ldap3/operation/compare.pyi | 12 +- stubs/ldap3/ldap3/operation/modify.pyi | 5 +- stubs/ldap3/ldap3/operation/modifyDn.pyi | 4 +- stubs/ldap3/ldap3/operation/search.pyi | 13 +- stubs/ldap3/ldap3/protocol/convert.pyi | 6 +- stubs/ldap3/ldap3/protocol/rfc2696.pyi | 3 +- stubs/ldap3/ldap3/protocol/rfc2849.pyi | 13 +- stubs/ldap3/ldap3/protocol/rfc4512.pyi | 151 +++++++-------- stubs/ldap3/ldap3/strategy/base.pyi | 7 +- stubs/ldap3/ldap3/strategy/ldifProducer.pyi | 3 +- stubs/ldap3/ldap3/strategy/mockAsync.pyi | 3 +- stubs/ldap3/ldap3/strategy/mockBase.pyi | 5 +- stubs/ldap3/ldap3/strategy/restartable.pyi | 3 +- stubs/ldap3/ldap3/strategy/reusable.pyi | 5 +- stubs/ldap3/ldap3/utils/ciDict.pyi | 5 +- stubs/ldap3/ldap3/utils/conv.pyi | 4 +- stubs/ldap3/ldap3/utils/hashed.pyi | 3 +- stubs/m3u8/m3u8/__init__.pyi | 3 +- stubs/m3u8/m3u8/model.pyi | 10 +- stubs/mock/mock/mock.pyi | 59 ++---- stubs/mysqlclient/MySQLdb/cursors.pyi | 6 +- stubs/oauthlib/oauthlib/common.pyi | 2 +- .../oauthlib/oauth1/rfc5849/__init__.pyi | 27 +-- .../oauth1/rfc5849/endpoints/access_token.pyi | 10 +- .../rfc5849/endpoints/authorization.pyi | 14 +- .../oauth1/rfc5849/endpoints/base.pyi | 3 +- .../rfc5849/endpoints/request_token.pyi | 10 +- .../oauth1/rfc5849/endpoints/resource.pyi | 10 +- .../rfc5849/endpoints/signature_only.pyi | 5 +- .../oauthlib/oauth1/rfc5849/errors.pyi | 9 +- .../oauthlib/oauth1/rfc5849/signature.pyi | 10 +- .../rfc6749/endpoints/authorization.pyi | 2 +- .../oauth2/rfc6749/endpoints/resource.pyi | 2 +- .../oauth2/rfc6749/endpoints/token.pyi | 6 +- .../openid/connect/core/grant_types/base.pyi | 3 +- .../connect/core/grant_types/dispatchers.pyi | 11 +- stubs/openpyxl/openpyxl/cell/_writer.pyi | 4 +- stubs/openpyxl/openpyxl/chart/area_chart.pyi | 2 +- stubs/openpyxl/openpyxl/chart/axis.pyi | 10 +- stubs/openpyxl/openpyxl/chart/bar_chart.pyi | 7 +- stubs/openpyxl/openpyxl/chart/chartspace.pyi | 4 +- stubs/openpyxl/openpyxl/chart/data_source.pyi | 2 +- stubs/openpyxl/openpyxl/chart/line_chart.pyi | 2 +- stubs/openpyxl/openpyxl/chart/pie_chart.pyi | 2 +- stubs/openpyxl/openpyxl/chart/shapes.pyi | 2 +- stubs/openpyxl/openpyxl/chart/text.pyi | 4 +- stubs/openpyxl/openpyxl/chartsheet/custom.pyi | 4 +- .../openpyxl/chartsheet/protection.pyi | 6 +- .../openpyxl/openpyxl/chartsheet/publish.pyi | 2 +- .../openpyxl/openpyxl/chartsheet/relation.pyi | 2 +- stubs/openpyxl/openpyxl/chartsheet/views.pyi | 2 +- .../openpyxl/comments/comment_sheet.pyi | 6 +- stubs/openpyxl/openpyxl/drawing/colors.pyi | 2 +- stubs/openpyxl/openpyxl/drawing/fill.pyi | 8 +- stubs/openpyxl/openpyxl/drawing/geometry.pyi | 8 +- stubs/openpyxl/openpyxl/drawing/graphic.pyi | 3 +- stubs/openpyxl/openpyxl/drawing/line.pyi | 2 +- stubs/openpyxl/openpyxl/drawing/picture.pyi | 6 +- .../openpyxl/openpyxl/drawing/properties.pyi | 8 +- .../openpyxl/drawing/spreadsheet_drawing.pyi | 2 +- stubs/openpyxl/openpyxl/drawing/text.pyi | 14 +- stubs/openpyxl/openpyxl/formatting/rule.pyi | 61 ++---- stubs/openpyxl/openpyxl/formula/translate.pyi | 2 +- stubs/openpyxl/openpyxl/packaging/core.pyi | 2 +- .../openpyxl/openpyxl/packaging/workbook.pyi | 8 +- stubs/openpyxl/openpyxl/pivot/cache.pyi | 16 +- stubs/openpyxl/openpyxl/pivot/fields.pyi | 28 +-- stubs/openpyxl/openpyxl/pivot/record.pyi | 12 +- stubs/openpyxl/openpyxl/pivot/table.pyi | 10 +- stubs/openpyxl/openpyxl/styles/alignment.pyi | 14 +- stubs/openpyxl/openpyxl/styles/borders.pyi | 7 +- .../openpyxl/openpyxl/styles/named_styles.pyi | 2 +- .../openpyxl/workbook/defined_name.pyi | 2 +- .../workbook/external_link/external.pyi | 6 +- .../openpyxl/openpyxl/workbook/properties.pyi | 2 +- .../openpyxl/openpyxl/workbook/protection.pyi | 18 +- stubs/openpyxl/openpyxl/workbook/views.pyi | 2 +- .../openpyxl/openpyxl/worksheet/controls.pyi | 2 +- .../openpyxl/worksheet/datavalidation.pyi | 2 +- .../openpyxl/worksheet/dimensions.pyi | 14 +- stubs/openpyxl/openpyxl/worksheet/drawing.pyi | 2 +- stubs/openpyxl/openpyxl/worksheet/errors.pyi | 2 +- stubs/openpyxl/openpyxl/worksheet/filters.pyi | 12 +- stubs/openpyxl/openpyxl/worksheet/formula.pyi | 6 +- .../openpyxl/openpyxl/worksheet/hyperlink.pyi | 2 +- stubs/openpyxl/openpyxl/worksheet/merge.pyi | 2 +- stubs/openpyxl/openpyxl/worksheet/page.pyi | 8 +- .../openpyxl/openpyxl/worksheet/pagebreak.pyi | 2 +- .../openpyxl/worksheet/protection.pyi | 6 +- stubs/openpyxl/openpyxl/worksheet/related.pyi | 2 +- stubs/openpyxl/openpyxl/worksheet/table.pyi | 8 +- stubs/openpyxl/openpyxl/worksheet/views.pyi | 2 +- stubs/opentracing/opentracing/span.pyi | 3 +- stubs/paramiko/paramiko/_winapi.pyi | 2 +- stubs/passlib/passlib/context.pyi | 32 ++-- stubs/passlib/passlib/crypto/_md4.pyi | 4 +- stubs/passlib/passlib/crypto/digest.pyi | 5 +- stubs/passlib/passlib/exc.pyi | 19 +- stubs/passlib/passlib/ext/django/utils.pyi | 9 +- stubs/passlib/passlib/handlers/argon2.pyi | 27 +-- stubs/passlib/passlib/handlers/bcrypt.pyi | 5 +- stubs/passlib/passlib/handlers/digests.pyi | 7 +- stubs/passlib/passlib/handlers/misc.pyi | 7 +- stubs/passlib/passlib/handlers/scram.pyi | 4 +- stubs/passlib/passlib/handlers/scrypt.pyi | 5 +- stubs/passlib/passlib/handlers/windows.pyi | 3 +- stubs/passlib/passlib/utils/decor.pyi | 19 +- stubs/passlib/passlib/utils/handlers.pyi | 53 ++---- stubs/passlib/passlib/utils/pbkdf2.pyi | 6 +- stubs/peewee/peewee.pyi | 42 ++--- stubs/pexpect/pexpect/FSM.pyi | 12 +- stubs/pexpect/pexpect/replwrap.pyi | 2 +- stubs/pika/pika/adapters/base_connection.pyi | 4 +- .../pika/adapters/blocking_connection.pyi | 53 ++---- .../pika/pika/adapters/select_connection.pyi | 14 +- .../pika/adapters/utils/io_services_utils.pyi | 5 +- .../pika/adapters/utils/nbio_interface.pyi | 5 +- .../utils/selector_ioloop_adapter.pyi | 2 +- stubs/pika/pika/callback.pyi | 4 +- stubs/pika/pika/connection.pyi | 2 +- stubs/pika/pika/spec.pyi | 99 +++------- stubs/protobuf/google/protobuf/descriptor.pyi | 21 +-- .../google/protobuf/descriptor_pool.pyi | 6 +- .../protobuf/internal/well_known_types.pyi | 2 +- stubs/psutil/psutil/__init__.pyi | 4 +- stubs/psutil/psutil/_common.pyi | 14 +- stubs/psutil/psutil/_psposix.pyi | 13 +- stubs/psutil/psutil/_pswindows.pyi | 4 +- stubs/psycopg2/psycopg2/_psycopg.pyi | 5 +- stubs/psycopg2/psycopg2/extras.pyi | 51 +++-- stubs/psycopg2/psycopg2/pool.pyi | 4 +- stubs/pyasn1/pyasn1/codec/ber/decoder.pyi | 8 +- stubs/pyasn1/pyasn1/codec/ber/encoder.pyi | 4 +- stubs/pyasn1/pyasn1/codec/streaming.pyi | 2 +- stubs/python-dateutil/dateutil/rrule.pyi | 2 +- stubs/python-dateutil/dateutil/tz/tz.pyi | 3 +- .../dateutil/zoneinfo/__init__.pyi | 2 +- .../dateutil/zoneinfo/rebuild.pyi | 8 +- .../jose/backends/cryptography_backend.pyi | 7 +- stubs/pytz/pytz/lazy.pyi | 5 +- stubs/pywin32/win32/lib/win32evtlogutil.pyi | 7 +- .../win32comext/axscript/client/framework.pyi | 8 +- .../reportlab/graphics/barcode/lto.pyi | 8 +- .../reportlab/graphics/barcode/qr.pyi | 2 +- .../reportlab/graphics/charts/axes.pyi | 16 +- .../reportlab/graphics/charts/barcharts.pyi | 4 +- .../reportlab/graphics/charts/dotbox.pyi | 2 +- .../reportlab/graphics/charts/doughnut.pyi | 2 +- .../reportlab/graphics/charts/legends.pyi | 2 +- .../reportlab/graphics/charts/linecharts.pyi | 2 +- .../reportlab/graphics/charts/lineplots.pyi | 6 +- .../reportlab/graphics/charts/piecharts.pyi | 4 +- .../reportlab/graphics/charts/slidebox.pyi | 2 +- .../reportlab/graphics/charts/spider.pyi | 2 +- .../reportlab/graphics/charts/utils.pyi | 10 +- .../reportlab/reportlab/graphics/renderPM.pyi | 27 +-- .../reportlab/reportlab/graphics/renderPS.pyi | 12 +- .../reportlab/graphics/renderSVG.pyi | 48 ++--- .../reportlab/graphics/renderbase.pyi | 3 +- .../reportlab/reportlab/graphics/svgpath.pyi | 2 +- stubs/reportlab/reportlab/graphics/utils.pyi | 4 +- .../reportlab/graphics/widgetbase.pyi | 6 +- .../reportlab/graphics/widgets/grids.pyi | 2 +- stubs/reportlab/reportlab/lib/PyFontify.pyi | 2 +- stubs/reportlab/reportlab/lib/attrmap.pyi | 20 +- stubs/reportlab/reportlab/lib/fontfinder.pyi | 12 +- stubs/reportlab/reportlab/lib/formatters.pyi | 9 +- stubs/reportlab/reportlab/lib/normalDate.pyi | 2 +- stubs/reportlab/reportlab/lib/pdfencrypt.pyi | 22 +-- .../reportlab/reportlab/lib/rl_safe_eval.pyi | 41 +---- stubs/reportlab/reportlab/lib/rparsexml.pyi | 2 +- stubs/reportlab/reportlab/lib/sequencer.pyi | 14 +- stubs/reportlab/reportlab/lib/testutils.pyi | 10 +- stubs/reportlab/reportlab/lib/utils.pyi | 16 +- stubs/reportlab/reportlab/lib/validators.pyi | 10 +- .../reportlab/reportlab/pdfbase/acroform.pyi | 80 ++++---- .../reportlab/reportlab/pdfbase/cidfonts.pyi | 4 +- stubs/reportlab/reportlab/pdfbase/pdfdoc.pyi | 51 ++--- .../reportlab/pdfbase/pdfmetrics.pyi | 14 +- .../reportlab/reportlab/pdfbase/pdfutils.pyi | 7 +- stubs/reportlab/reportlab/pdfbase/ttfonts.pyi | 2 +- stubs/reportlab/reportlab/pdfgen/canvas.pyi | 117 ++++-------- .../reportlab/reportlab/pdfgen/pathobject.pyi | 3 +- .../reportlab/reportlab/pdfgen/pdfimages.pyi | 6 +- .../reportlab/platypus/doctemplate.pyi | 38 ++-- .../reportlab/reportlab/platypus/figures.pyi | 28 +-- .../reportlab/platypus/flowables.pyi | 14 +- stubs/reportlab/reportlab/platypus/frames.pyi | 5 +- stubs/reportlab/reportlab/platypus/tables.pyi | 2 +- .../requests_oauthlib/oauth1_auth.pyi | 19 +- .../requests_oauthlib/oauth1_session.pyi | 22 +-- .../requests_oauthlib/oauth2_auth.pyi | 6 +- .../requests_oauthlib/oauth2_session.pyi | 54 +++--- stubs/seaborn/seaborn/matrix.pyi | 8 +- .../setuptools/setuptools/_distutils/dist.pyi | 4 +- stubs/setuptools/setuptools/archive_util.pyi | 2 +- .../setuptools/command/build_ext.pyi | 18 +- stubs/setuptools/setuptools/depends.pyi | 21 +-- stubs/tensorflow/tensorflow/__init__.pyi | 4 +- stubs/tensorflow/tensorflow/data/__init__.pyi | 2 +- stubs/tensorflow/tensorflow/keras/losses.pyi | 66 ++----- stubs/tensorflow/tensorflow/keras/models.pyi | 10 +- .../tensorflow/saved_model/__init__.pyi | 4 +- stubs/tensorflow/tensorflow/summary.pyi | 5 +- .../tensorflow/tensorflow/train/__init__.pyi | 3 +- stubs/tqdm/tqdm/contrib/__init__.pyi | 2 +- stubs/tqdm/tqdm/keras.pyi | 12 +- stubs/tqdm/tqdm/std.pyi | 2 +- stubs/tqdm/tqdm/tk.pyi | 2 +- stubs/vobject/vobject/base.pyi | 41 ++--- stubs/vobject/vobject/hcalendar.pyi | 4 +- stubs/vobject/vobject/icalendar.pyi | 6 +- stubs/vobject/vobject/vcard.pyi | 2 +- .../workalendar/africa/south_africa.pyi | 3 +- stubs/workalendar/workalendar/asia/china.pyi | 20 +- stubs/workalendar/workalendar/core.pyi | 39 +--- .../workalendar/workalendar/europe/russia.pyi | 2 +- stubs/workalendar/workalendar/registry.pyi | 2 +- 488 files changed, 2259 insertions(+), 4044 deletions(-) diff --git a/pyrightconfig.stricter.json b/pyrightconfig.stricter.json index b24a9abf2af3..7448dd51a48c 100644 --- a/pyrightconfig.stricter.json +++ b/pyrightconfig.stricter.json @@ -24,7 +24,7 @@ "stdlib/tkinter/scrolledtext.pyi", "stdlib/tkinter/tix.pyi", "stdlib/tkinter/ttk.pyi", - "stubs/aiofiles/aiofiles/tempfile/temptypes.pyi", + "stubs/aiofiles", "stubs/antlr4-python3-runtime", "stubs/auth0-python", "stubs/Authlib", @@ -62,6 +62,7 @@ "stubs/ldap3", "stubs/m3u8", "stubs/Markdown", + "stubs/mock/mock/mock.pyi", "stubs/mysqlclient", "stubs/netaddr/netaddr/core.pyi", "stubs/netaddr/netaddr/ip/__init__.pyi", @@ -69,6 +70,8 @@ "stubs/networkx", "stubs/oauthlib", "stubs/openpyxl", + "stubs/opentracing/opentracing/span.pyi", + "stubs/paramiko/paramiko/_winapi.pyi", "stubs/parsimonious/parsimonious/nodes.pyi", "stubs/passlib", "stubs/peewee", @@ -86,6 +89,7 @@ "stubs/python-dateutil", "stubs/python-http-client", "stubs/python-jose", + "stubs/pytz/pytz/lazy.pyi", "stubs/pywin32", "stubs/pyxdg", "stubs/PyYAML", diff --git a/stubs/Authlib/authlib/integrations/base_client/async_app.pyi b/stubs/Authlib/authlib/integrations/base_client/async_app.pyi index e32e154bc1be..422ba2755dc0 100644 --- a/stubs/Authlib/authlib/integrations/base_client/async_app.pyi +++ b/stubs/Authlib/authlib/integrations/base_client/async_app.pyi @@ -1,16 +1,14 @@ -from _typeshed import Incomplete - from authlib.integrations.base_client.sync_app import OAuth1Base, OAuth2Base __all__ = ["AsyncOAuth1Mixin", "AsyncOAuth2Mixin"] class AsyncOAuth1Mixin(OAuth1Base): - async def request(self, method, url, token: Incomplete | None = None, **kwargs): ... - async def create_authorization_url(self, redirect_uri: Incomplete | None = None, **kwargs): ... - async def fetch_access_token(self, request_token: Incomplete | None = None, **kwargs): ... + async def request(self, method, url, token=None, **kwargs): ... + async def create_authorization_url(self, redirect_uri=None, **kwargs): ... + async def fetch_access_token(self, request_token=None, **kwargs): ... class AsyncOAuth2Mixin(OAuth2Base): async def load_server_metadata(self): ... - async def request(self, method, url, token: Incomplete | None = None, **kwargs): ... - async def create_authorization_url(self, redirect_uri: Incomplete | None = None, **kwargs): ... - async def fetch_access_token(self, redirect_uri: Incomplete | None = None, **kwargs): ... + async def request(self, method, url, token=None, **kwargs): ... + async def create_authorization_url(self, redirect_uri=None, **kwargs): ... + async def fetch_access_token(self, redirect_uri=None, **kwargs): ... diff --git a/stubs/Authlib/authlib/integrations/base_client/async_openid.pyi b/stubs/Authlib/authlib/integrations/base_client/async_openid.pyi index 1a18fe689f9d..24a8c634ad98 100644 --- a/stubs/Authlib/authlib/integrations/base_client/async_openid.pyi +++ b/stubs/Authlib/authlib/integrations/base_client/async_openid.pyi @@ -1,8 +1,6 @@ -from _typeshed import Incomplete - __all__ = ["AsyncOpenIDMixin"] class AsyncOpenIDMixin: async def fetch_jwk_set(self, force: bool = False): ... async def userinfo(self, **kwargs): ... - async def parse_id_token(self, token, nonce, claims_options: Incomplete | None = None): ... + async def parse_id_token(self, token, nonce, claims_options=None): ... diff --git a/stubs/Authlib/authlib/integrations/base_client/framework_integration.pyi b/stubs/Authlib/authlib/integrations/base_client/framework_integration.pyi index dbf68a324fda..c6252514276e 100644 --- a/stubs/Authlib/authlib/integrations/base_client/framework_integration.pyi +++ b/stubs/Authlib/authlib/integrations/base_client/framework_integration.pyi @@ -4,10 +4,10 @@ class FrameworkIntegration: expires_in: int name: Incomplete cache: Incomplete - def __init__(self, name, cache: Incomplete | None = None) -> None: ... + def __init__(self, name, cache=None) -> None: ... def get_state_data(self, session, state): ... def set_state_data(self, session, state, data): ... def clear_state_data(self, session, state): ... - def update_token(self, token, refresh_token: Incomplete | None = None, access_token: Incomplete | None = None) -> None: ... + def update_token(self, token, refresh_token=None, access_token=None) -> None: ... @staticmethod def load_config(oauth, name, params) -> None: ... diff --git a/stubs/Authlib/authlib/integrations/base_client/registry.pyi b/stubs/Authlib/authlib/integrations/base_client/registry.pyi index b9cd8de7ce6e..24c1f415c99a 100644 --- a/stubs/Authlib/authlib/integrations/base_client/registry.pyi +++ b/stubs/Authlib/authlib/integrations/base_client/registry.pyi @@ -11,9 +11,7 @@ class BaseOAuth: cache: Incomplete fetch_token: Incomplete update_token: Incomplete - def __init__( - self, cache: Incomplete | None = None, fetch_token: Incomplete | None = None, update_token: Incomplete | None = None - ) -> None: ... + def __init__(self, cache=None, fetch_token=None, update_token=None) -> None: ... def create_client(self, name): ... def register(self, name, overwrite: bool = False, **kwargs): ... def generate_client_kwargs(self, name, overwrite, **kwargs): ... diff --git a/stubs/Authlib/authlib/integrations/base_client/sync_app.pyi b/stubs/Authlib/authlib/integrations/base_client/sync_app.pyi index 2497632afb47..748cd7b0fc38 100644 --- a/stubs/Authlib/authlib/integrations/base_client/sync_app.pyi +++ b/stubs/Authlib/authlib/integrations/base_client/sync_app.pyi @@ -3,7 +3,7 @@ from _typeshed import Incomplete class BaseApp: client_cls: Incomplete OAUTH_APP_CONFIG: Incomplete - def request(self, method, url, token: Incomplete | None = None, **kwargs): ... + def request(self, method, url, token=None, **kwargs): ... def get(self, url, **kwargs): ... def post(self, url, **kwargs): ... def patch(self, url, **kwargs): ... @@ -29,26 +29,26 @@ class OAuth1Base: def __init__( self, framework, - name: Incomplete | None = None, - fetch_token: Incomplete | None = None, - client_id: Incomplete | None = None, - client_secret: Incomplete | None = None, - request_token_url: Incomplete | None = None, - request_token_params: Incomplete | None = None, - access_token_url: Incomplete | None = None, - access_token_params: Incomplete | None = None, - authorize_url: Incomplete | None = None, - authorize_params: Incomplete | None = None, - api_base_url: Incomplete | None = None, - client_kwargs: Incomplete | None = None, - user_agent: Incomplete | None = None, + name=None, + fetch_token=None, + client_id=None, + client_secret=None, + request_token_url=None, + request_token_params=None, + access_token_url=None, + access_token_params=None, + authorize_url=None, + authorize_params=None, + api_base_url=None, + client_kwargs=None, + user_agent=None, **kwargs, ) -> None: ... class OAuth1Mixin(_RequestMixin, OAuth1Base): - def request(self, method, url, token: Incomplete | None = None, **kwargs): ... - def create_authorization_url(self, redirect_uri: Incomplete | None = None, **kwargs): ... - def fetch_access_token(self, request_token: Incomplete | None = None, **kwargs): ... + def request(self, method, url, token=None, **kwargs): ... + def create_authorization_url(self, redirect_uri=None, **kwargs): ... + def fetch_access_token(self, request_token=None, **kwargs): ... class OAuth2Base: client_cls: Incomplete @@ -68,26 +68,26 @@ class OAuth2Base: def __init__( self, framework, - name: Incomplete | None = None, - fetch_token: Incomplete | None = None, - update_token: Incomplete | None = None, - client_id: Incomplete | None = None, - client_secret: Incomplete | None = None, - access_token_url: Incomplete | None = None, - access_token_params: Incomplete | None = None, - authorize_url: Incomplete | None = None, - authorize_params: Incomplete | None = None, - api_base_url: Incomplete | None = None, - client_kwargs: Incomplete | None = None, - server_metadata_url: Incomplete | None = None, - compliance_fix: Incomplete | None = None, - client_auth_methods: Incomplete | None = None, - user_agent: Incomplete | None = None, + name=None, + fetch_token=None, + update_token=None, + client_id=None, + client_secret=None, + access_token_url=None, + access_token_params=None, + authorize_url=None, + authorize_params=None, + api_base_url=None, + client_kwargs=None, + server_metadata_url=None, + compliance_fix=None, + client_auth_methods=None, + user_agent=None, **kwargs, ) -> None: ... class OAuth2Mixin(_RequestMixin, OAuth2Base): - def request(self, method, url, token: Incomplete | None = None, **kwargs): ... + def request(self, method, url, token=None, **kwargs): ... def load_server_metadata(self): ... - def create_authorization_url(self, redirect_uri: Incomplete | None = None, **kwargs): ... - def fetch_access_token(self, redirect_uri: Incomplete | None = None, **kwargs): ... + def create_authorization_url(self, redirect_uri=None, **kwargs): ... + def fetch_access_token(self, redirect_uri=None, **kwargs): ... diff --git a/stubs/Authlib/authlib/integrations/base_client/sync_openid.pyi b/stubs/Authlib/authlib/integrations/base_client/sync_openid.pyi index 8fa2426a112d..374b3fb9eb63 100644 --- a/stubs/Authlib/authlib/integrations/base_client/sync_openid.pyi +++ b/stubs/Authlib/authlib/integrations/base_client/sync_openid.pyi @@ -1,7 +1,5 @@ -from _typeshed import Incomplete - class OpenIDMixin: def fetch_jwk_set(self, force: bool = False): ... def userinfo(self, **kwargs): ... - def parse_id_token(self, token, nonce, claims_options: Incomplete | None = None, leeway: int = 120): ... + def parse_id_token(self, token, nonce, claims_options=None, leeway: int = 120): ... def create_load_key(self): ... diff --git a/stubs/Authlib/authlib/jose/drafts/_jwe_algorithms.pyi b/stubs/Authlib/authlib/jose/drafts/_jwe_algorithms.pyi index df0f388935d9..3d95fa080026 100644 --- a/stubs/Authlib/authlib/jose/drafts/_jwe_algorithms.pyi +++ b/stubs/Authlib/authlib/jose/drafts/_jwe_algorithms.pyi @@ -9,7 +9,7 @@ class ECDH1PUAlgorithm(JWEAlgorithmWithTagAwareKeyAgreement): description: str key_size: Incomplete aeskw: Incomplete - def __init__(self, key_size: Incomplete | None = None) -> None: ... + def __init__(self, key_size=None) -> None: ... def prepare_key(self, raw_data): ... def generate_preset(self, enc_alg, key): ... def compute_shared_key(self, shared_key_e, shared_key_s): ... @@ -17,10 +17,10 @@ class ECDH1PUAlgorithm(JWEAlgorithmWithTagAwareKeyAgreement): def compute_derived_key(self, shared_key, fixed_info, bit_size): ... def deliver_at_sender(self, sender_static_key, sender_ephemeral_key, recipient_pubkey, headers, bit_size, tag): ... def deliver_at_recipient(self, recipient_key, sender_static_pubkey, sender_ephemeral_pubkey, headers, bit_size, tag): ... - def generate_keys_and_prepare_headers(self, enc_alg, key, sender_key, preset: Incomplete | None = None): ... + def generate_keys_and_prepare_headers(self, enc_alg, key, sender_key, preset=None): ... def agree_upon_key_and_wrap_cek(self, enc_alg, headers, key, sender_key, epk, cek, tag): ... - def wrap(self, enc_alg, headers, key, sender_key, preset: Incomplete | None = None): ... - def unwrap(self, enc_alg, ek, headers, key, sender_key, tag: Incomplete | None = None): ... + def wrap(self, enc_alg, headers, key, sender_key, preset=None): ... + def unwrap(self, enc_alg, ek, headers, key, sender_key, tag=None): ... JWE_DRAFT_ALG_ALGORITHMS: Incomplete diff --git a/stubs/Authlib/authlib/jose/jwk.pyi b/stubs/Authlib/authlib/jose/jwk.pyi index 0ca658ddc97a..79246560f402 100644 --- a/stubs/Authlib/authlib/jose/jwk.pyi +++ b/stubs/Authlib/authlib/jose/jwk.pyi @@ -1,4 +1,2 @@ -from _typeshed import Incomplete - -def loads(obj, kid: Incomplete | None = None): ... -def dumps(key, kty: Incomplete | None = None, **params): ... +def loads(obj, kid=None): ... +def dumps(key, kty=None, **params): ... diff --git a/stubs/Authlib/authlib/jose/rfc7515/jws.pyi b/stubs/Authlib/authlib/jose/rfc7515/jws.pyi index 2fcd024394a0..fbb1083981aa 100644 --- a/stubs/Authlib/authlib/jose/rfc7515/jws.pyi +++ b/stubs/Authlib/authlib/jose/rfc7515/jws.pyi @@ -3,12 +3,12 @@ from _typeshed import Incomplete class JsonWebSignature: REGISTERED_HEADER_PARAMETER_NAMES: Incomplete ALGORITHMS_REGISTRY: Incomplete - def __init__(self, algorithms: Incomplete | None = None, private_headers: Incomplete | None = None) -> None: ... + def __init__(self, algorithms=None, private_headers=None) -> None: ... @classmethod def register_algorithm(cls, algorithm) -> None: ... def serialize_compact(self, protected, payload, key): ... - def deserialize_compact(self, s, key, decode: Incomplete | None = None): ... + def deserialize_compact(self, s, key, decode=None): ... def serialize_json(self, header_obj, payload, key): ... - def deserialize_json(self, obj, key, decode: Incomplete | None = None): ... + def deserialize_json(self, obj, key, decode=None): ... def serialize(self, header, payload, key): ... - def deserialize(self, s, key, decode: Incomplete | None = None): ... + def deserialize(self, s, key, decode=None): ... diff --git a/stubs/Authlib/authlib/jose/rfc7516/jwe.pyi b/stubs/Authlib/authlib/jose/rfc7516/jwe.pyi index 87de596064f1..34e57122b87b 100644 --- a/stubs/Authlib/authlib/jose/rfc7516/jwe.pyi +++ b/stubs/Authlib/authlib/jose/rfc7516/jwe.pyi @@ -5,15 +5,15 @@ class JsonWebEncryption: ALG_REGISTRY: Incomplete ENC_REGISTRY: Incomplete ZIP_REGISTRY: Incomplete - def __init__(self, algorithms: Incomplete | None = None, private_headers: Incomplete | None = None) -> None: ... + def __init__(self, algorithms=None, private_headers=None) -> None: ... @classmethod def register_algorithm(cls, algorithm) -> None: ... - def serialize_compact(self, protected, payload, key, sender_key: Incomplete | None = None): ... - def serialize_json(self, header_obj, payload, keys, sender_key: Incomplete | None = None): ... - def serialize(self, header, payload, key, sender_key: Incomplete | None = None): ... - def deserialize_compact(self, s, key, decode: Incomplete | None = None, sender_key: Incomplete | None = None): ... - def deserialize_json(self, obj, key, decode: Incomplete | None = None, sender_key: Incomplete | None = None): ... - def deserialize(self, obj, key, decode: Incomplete | None = None, sender_key: Incomplete | None = None): ... + def serialize_compact(self, protected, payload, key, sender_key=None): ... + def serialize_json(self, header_obj, payload, keys, sender_key=None): ... + def serialize(self, header, payload, key, sender_key=None): ... + def deserialize_compact(self, s, key, decode=None, sender_key=None): ... + def deserialize_json(self, obj, key, decode=None, sender_key=None): ... + def deserialize(self, obj, key, decode=None, sender_key=None): ... @staticmethod def parse_json(obj): ... def get_header_alg(self, header): ... diff --git a/stubs/Authlib/authlib/jose/rfc7516/models.pyi b/stubs/Authlib/authlib/jose/rfc7516/models.pyi index 77879db640b5..794b2aafbd19 100644 --- a/stubs/Authlib/authlib/jose/rfc7516/models.pyi +++ b/stubs/Authlib/authlib/jose/rfc7516/models.pyi @@ -11,14 +11,14 @@ class JWEAlgorithmBase(metaclass=ABCMeta): def generate_preset(self, enc_alg, key) -> None: ... class JWEAlgorithm(JWEAlgorithmBase, metaclass=ABCMeta): - def wrap(self, enc_alg, headers, key, preset: Incomplete | None = None) -> None: ... + def wrap(self, enc_alg, headers, key, preset=None) -> None: ... def unwrap(self, enc_alg, ek, headers, key) -> None: ... class JWEAlgorithmWithTagAwareKeyAgreement(JWEAlgorithmBase, metaclass=ABCMeta): - def generate_keys_and_prepare_headers(self, enc_alg, key, sender_key, preset: Incomplete | None = None) -> None: ... + def generate_keys_and_prepare_headers(self, enc_alg, key, sender_key, preset=None) -> None: ... def agree_upon_key_and_wrap_cek(self, enc_alg, headers, key, sender_key, epk, cek, tag) -> None: ... - def wrap(self, enc_alg, headers, key, sender_key, preset: Incomplete | None = None) -> None: ... - def unwrap(self, enc_alg, ek, headers, key, sender_key, tag: Incomplete | None = None) -> None: ... + def wrap(self, enc_alg, headers, key, sender_key, preset=None) -> None: ... + def unwrap(self, enc_alg, ek, headers, key, sender_key, tag=None) -> None: ... class JWEEncAlgorithm: name: Incomplete diff --git a/stubs/Authlib/authlib/jose/rfc7517/_cryptography_key.pyi b/stubs/Authlib/authlib/jose/rfc7517/_cryptography_key.pyi index efb320af7c3c..9c2d57f6ca43 100644 --- a/stubs/Authlib/authlib/jose/rfc7517/_cryptography_key.pyi +++ b/stubs/Authlib/authlib/jose/rfc7517/_cryptography_key.pyi @@ -1,5 +1 @@ -from _typeshed import Incomplete - -def load_pem_key( - raw, ssh_type: Incomplete | None = None, key_type: Incomplete | None = None, password: Incomplete | None = None -): ... +def load_pem_key(raw, ssh_type=None, key_type=None, password=None): ... diff --git a/stubs/Authlib/authlib/jose/rfc7517/asymmetric_key.pyi b/stubs/Authlib/authlib/jose/rfc7517/asymmetric_key.pyi index ee40b4ee58ec..a2749aadf95e 100644 --- a/stubs/Authlib/authlib/jose/rfc7517/asymmetric_key.pyi +++ b/stubs/Authlib/authlib/jose/rfc7517/asymmetric_key.pyi @@ -10,9 +10,7 @@ class AsymmetricKey(Key): SSH_PUBLIC_PREFIX: bytes private_key: Incomplete public_key: Incomplete - def __init__( - self, private_key: Incomplete | None = None, public_key: Incomplete | None = None, options: Incomplete | None = None - ) -> None: ... + def __init__(self, private_key=None, public_key=None, options=None) -> None: ... @property def public_only(self): ... def get_op_key(self, operation): ... @@ -26,14 +24,14 @@ class AsymmetricKey(Key): def load_public_key(self) -> None: ... def as_dict(self, is_private: bool = False, **params): ... def as_key(self, is_private: bool = False): ... - def as_bytes(self, encoding: Incomplete | None = None, is_private: bool = False, password: Incomplete | None = None): ... - def as_pem(self, is_private: bool = False, password: Incomplete | None = None): ... - def as_der(self, is_private: bool = False, password: Incomplete | None = None): ... + def as_bytes(self, encoding=None, is_private: bool = False, password=None): ... + def as_pem(self, is_private: bool = False, password=None): ... + def as_der(self, is_private: bool = False, password=None): ... @classmethod - def import_dict_key(cls, raw, options: Incomplete | None = None): ... + def import_dict_key(cls, raw, options=None): ... @classmethod - def import_key(cls, raw, options: Incomplete | None = None): ... + def import_key(cls, raw, options=None): ... @classmethod def validate_raw_key(cls, key): ... @classmethod - def generate_key(cls, crv_or_size, options: Incomplete | None = None, is_private: bool = False) -> AsymmetricKey: ... + def generate_key(cls, crv_or_size, options=None, is_private: bool = False) -> AsymmetricKey: ... diff --git a/stubs/Authlib/authlib/jose/rfc7517/base_key.pyi b/stubs/Authlib/authlib/jose/rfc7517/base_key.pyi index 001944c64841..379b24806eda 100644 --- a/stubs/Authlib/authlib/jose/rfc7517/base_key.pyi +++ b/stubs/Authlib/authlib/jose/rfc7517/base_key.pyi @@ -7,7 +7,7 @@ class Key: PUBLIC_KEY_OPS: Incomplete REQUIRED_JSON_FIELDS: Incomplete options: Incomplete - def __init__(self, options: Incomplete | None = None) -> None: ... + def __init__(self, options=None) -> None: ... @property def tokens(self): ... @property diff --git a/stubs/Authlib/authlib/jose/rfc7517/jwk.pyi b/stubs/Authlib/authlib/jose/rfc7517/jwk.pyi index c386e2d8d856..365e438e3e29 100644 --- a/stubs/Authlib/authlib/jose/rfc7517/jwk.pyi +++ b/stubs/Authlib/authlib/jose/rfc7517/jwk.pyi @@ -6,7 +6,7 @@ from authlib.jose.rfc7517 import Key, KeySet class JsonWebKey: JWK_KEY_CLS: Incomplete @classmethod - def generate_key(cls, kty, crv_or_size, options: Incomplete | None = None, is_private: bool = False): ... + def generate_key(cls, kty, crv_or_size, options=None, is_private: bool = False): ... @classmethod def import_key(cls, raw: Mapping[str, object], options: Mapping[str, object] | None = None) -> Key: ... @classmethod diff --git a/stubs/Authlib/authlib/jose/rfc7518/ec_key.pyi b/stubs/Authlib/authlib/jose/rfc7518/ec_key.pyi index dbdf15be2993..c1aa6adb9da6 100644 --- a/stubs/Authlib/authlib/jose/rfc7518/ec_key.pyi +++ b/stubs/Authlib/authlib/jose/rfc7518/ec_key.pyi @@ -21,4 +21,4 @@ class ECKey(AsymmetricKey): def dumps_private_key(self): ... def dumps_public_key(self): ... @classmethod - def generate_key(cls, crv: str = "P-256", options: Incomplete | None = None, is_private: bool = False) -> ECKey: ... + def generate_key(cls, crv: str = "P-256", options=None, is_private: bool = False) -> ECKey: ... diff --git a/stubs/Authlib/authlib/jose/rfc7518/jwe_algs.pyi b/stubs/Authlib/authlib/jose/rfc7518/jwe_algs.pyi index ff4fb5cb27a3..b046d71bd697 100644 --- a/stubs/Authlib/authlib/jose/rfc7518/jwe_algs.pyi +++ b/stubs/Authlib/authlib/jose/rfc7518/jwe_algs.pyi @@ -7,7 +7,7 @@ class DirectAlgorithm(JWEAlgorithm): description: str def prepare_key(self, raw_data): ... def generate_preset(self, enc_alg, key): ... - def wrap(self, enc_alg, headers, key, preset: Incomplete | None = None): ... + def wrap(self, enc_alg, headers, key, preset=None): ... def unwrap(self, enc_alg, ek, headers, key): ... class RSAAlgorithm(JWEAlgorithm): @@ -18,7 +18,7 @@ class RSAAlgorithm(JWEAlgorithm): def __init__(self, name, description, pad_fn) -> None: ... def prepare_key(self, raw_data): ... def generate_preset(self, enc_alg, key): ... - def wrap(self, enc_alg, headers, key, preset: Incomplete | None = None): ... + def wrap(self, enc_alg, headers, key, preset=None): ... def unwrap(self, enc_alg, ek, headers, key): ... class AESAlgorithm(JWEAlgorithm): @@ -29,7 +29,7 @@ class AESAlgorithm(JWEAlgorithm): def prepare_key(self, raw_data): ... def generate_preset(self, enc_alg, key): ... def wrap_cek(self, cek, key): ... - def wrap(self, enc_alg, headers, key, preset: Incomplete | None = None): ... + def wrap(self, enc_alg, headers, key, preset=None): ... def unwrap(self, enc_alg, ek, headers, key): ... class AESGCMAlgorithm(JWEAlgorithm): @@ -40,7 +40,7 @@ class AESGCMAlgorithm(JWEAlgorithm): def __init__(self, key_size) -> None: ... def prepare_key(self, raw_data): ... def generate_preset(self, enc_alg, key): ... - def wrap(self, enc_alg, headers, key, preset: Incomplete | None = None): ... + def wrap(self, enc_alg, headers, key, preset=None): ... def unwrap(self, enc_alg, ek, headers, key): ... class ECDHESAlgorithm(JWEAlgorithm): @@ -50,13 +50,13 @@ class ECDHESAlgorithm(JWEAlgorithm): description: str key_size: Incomplete aeskw: Incomplete - def __init__(self, key_size: Incomplete | None = None) -> None: ... + def __init__(self, key_size=None) -> None: ... def prepare_key(self, raw_data): ... def generate_preset(self, enc_alg, key): ... def compute_fixed_info(self, headers, bit_size): ... def compute_derived_key(self, shared_key, fixed_info, bit_size): ... def deliver(self, key, pubkey, headers, bit_size): ... - def wrap(self, enc_alg, headers, key, preset: Incomplete | None = None): ... + def wrap(self, enc_alg, headers, key, preset=None): ... def unwrap(self, enc_alg, ek, headers, key): ... def u32be_len_input(s, base64: bool = False): ... diff --git a/stubs/Authlib/authlib/jose/rfc7518/oct_key.pyi b/stubs/Authlib/authlib/jose/rfc7518/oct_key.pyi index 0a7674138585..1211c58280ae 100644 --- a/stubs/Authlib/authlib/jose/rfc7518/oct_key.pyi +++ b/stubs/Authlib/authlib/jose/rfc7518/oct_key.pyi @@ -8,7 +8,7 @@ class OctKey(Key): kty: str REQUIRED_JSON_FIELDS: Incomplete raw_key: Incomplete - def __init__(self, raw_key: Incomplete | None = None, options: Incomplete | None = None) -> None: ... + def __init__(self, raw_key=None, options=None) -> None: ... @property def public_only(self): ... def get_op_key(self, operation): ... @@ -18,6 +18,6 @@ class OctKey(Key): @classmethod def validate_raw_key(cls, key): ... @classmethod - def import_key(cls, raw, options: Incomplete | None = None): ... + def import_key(cls, raw, options=None): ... @classmethod - def generate_key(cls, key_size: int = 256, options: Incomplete | None = None, is_private: bool = True): ... + def generate_key(cls, key_size: int = 256, options=None, is_private: bool = True): ... diff --git a/stubs/Authlib/authlib/jose/rfc7518/rsa_key.pyi b/stubs/Authlib/authlib/jose/rfc7518/rsa_key.pyi index b2c78670128b..136f1dc00e53 100644 --- a/stubs/Authlib/authlib/jose/rfc7518/rsa_key.pyi +++ b/stubs/Authlib/authlib/jose/rfc7518/rsa_key.pyi @@ -16,8 +16,8 @@ class RSAKey(AsymmetricKey): def load_private_key(self): ... def load_public_key(self): ... @classmethod - def generate_key(cls, key_size: int = 2048, options: Incomplete | None = None, is_private: bool = False) -> RSAKey: ... + def generate_key(cls, key_size: int = 2048, options=None, is_private: bool = False) -> RSAKey: ... @classmethod - def import_dict_key(cls, raw, options: Incomplete | None = None): ... + def import_dict_key(cls, raw, options=None): ... def has_all_prime_factors(obj): ... diff --git a/stubs/Authlib/authlib/jose/rfc7519/claims.pyi b/stubs/Authlib/authlib/jose/rfc7519/claims.pyi index fc77d9392b4b..060f627c483c 100644 --- a/stubs/Authlib/authlib/jose/rfc7519/claims.pyi +++ b/stubs/Authlib/authlib/jose/rfc7519/claims.pyi @@ -6,13 +6,13 @@ class BaseClaims(dict[str, Any]): # dict values are key-dependent header: Incomplete options: Incomplete params: Incomplete - def __init__(self, payload, header, options: Incomplete | None = None, params: Incomplete | None = None) -> None: ... + def __init__(self, payload, header, options=None, params=None) -> None: ... # TODO: Adds an attribute for each key in REGISTERED_CLAIMS def __getattr__(self, key: str): ... def get_registered_claims(self) -> dict[str, Incomplete]: ... class JWTClaims(BaseClaims): - def validate(self, now: Incomplete | None = None, leeway: int = 0) -> None: ... + def validate(self, now=None, leeway: int = 0) -> None: ... def validate_iss(self) -> None: ... def validate_sub(self) -> None: ... def validate_aud(self) -> None: ... diff --git a/stubs/Authlib/authlib/jose/rfc7519/jwt.pyi b/stubs/Authlib/authlib/jose/rfc7519/jwt.pyi index 270722ede34f..0494dd418094 100644 --- a/stubs/Authlib/authlib/jose/rfc7519/jwt.pyi +++ b/stubs/Authlib/authlib/jose/rfc7519/jwt.pyi @@ -3,17 +3,10 @@ from _typeshed import Incomplete class JsonWebToken: SENSITIVE_NAMES: Incomplete SENSITIVE_VALUES: Incomplete - def __init__(self, algorithms, private_headers: Incomplete | None = None) -> None: ... + def __init__(self, algorithms, private_headers=None) -> None: ... def check_sensitive_data(self, payload) -> None: ... def encode(self, header, payload, key, check: bool = True): ... - def decode( - self, - s, - key, - claims_cls: Incomplete | None = None, - claims_options: Incomplete | None = None, - claims_params: Incomplete | None = None, - ): ... + def decode(self, s, key, claims_cls=None, claims_options=None, claims_params=None): ... def decode_payload(bytes_payload): ... def prepare_raw_key(raw): ... diff --git a/stubs/Authlib/authlib/jose/rfc8037/okp_key.pyi b/stubs/Authlib/authlib/jose/rfc8037/okp_key.pyi index 1b938de4ed8b..aea552297898 100644 --- a/stubs/Authlib/authlib/jose/rfc8037/okp_key.pyi +++ b/stubs/Authlib/authlib/jose/rfc8037/okp_key.pyi @@ -19,6 +19,6 @@ class OKPKey(AsymmetricKey): def load_private_key(self): ... def load_public_key(self): ... def dumps_private_key(self): ... - def dumps_public_key(self, public_key: Incomplete | None = None): ... + def dumps_public_key(self, public_key=None): ... @classmethod - def generate_key(cls, crv: str = "Ed25519", options: Incomplete | None = None, is_private: bool = False) -> OKPKey: ... + def generate_key(cls, crv: str = "Ed25519", options=None, is_private: bool = False) -> OKPKey: ... diff --git a/stubs/Authlib/authlib/oauth1/client.pyi b/stubs/Authlib/authlib/oauth1/client.pyi index a7823d5211e9..32ac22ba735c 100644 --- a/stubs/Authlib/authlib/oauth1/client.pyi +++ b/stubs/Authlib/authlib/oauth1/client.pyi @@ -10,16 +10,16 @@ class OAuth1Client: self, session, client_id, - client_secret: Incomplete | None = None, - token: Incomplete | None = None, - token_secret: Incomplete | None = None, - redirect_uri: Incomplete | None = None, - rsa_key: Incomplete | None = None, - verifier: Incomplete | None = None, + client_secret=None, + token=None, + token_secret=None, + redirect_uri=None, + rsa_key=None, + verifier=None, signature_method="HMAC-SHA1", signature_type="HEADER", force_include_body: bool = False, - realm: Incomplete | None = None, + realm=None, **kwargs, ) -> None: ... @property @@ -30,9 +30,9 @@ class OAuth1Client: def token(self): ... @token.setter def token(self, token) -> None: ... - def create_authorization_url(self, url, request_token: Incomplete | None = None, **kwargs): ... + def create_authorization_url(self, url, request_token=None, **kwargs): ... def fetch_request_token(self, url, **kwargs): ... - def fetch_access_token(self, url, verifier: Incomplete | None = None, **kwargs): ... + def fetch_access_token(self, url, verifier=None, **kwargs): ... def parse_authorization_response(self, url): ... def parse_response_token(self, status_code, text): ... @staticmethod diff --git a/stubs/Authlib/authlib/oauth1/rfc5849/authorization_server.pyi b/stubs/Authlib/authlib/oauth1/rfc5849/authorization_server.pyi index 62073bb460f4..894cd4646f82 100644 --- a/stubs/Authlib/authlib/oauth1/rfc5849/authorization_server.pyi +++ b/stubs/Authlib/authlib/oauth1/rfc5849/authorization_server.pyi @@ -9,9 +9,9 @@ class AuthorizationServer(BaseServer): def handle_response(self, status_code, payload, headers) -> None: ... def handle_error_response(self, error): ... def validate_temporary_credentials_request(self, request): ... - def create_temporary_credentials_response(self, request: Incomplete | None = None): ... + def create_temporary_credentials_response(self, request=None): ... def validate_authorization_request(self, request): ... - def create_authorization_response(self, request, grant_user: Incomplete | None = None): ... + def create_authorization_response(self, request, grant_user=None): ... def validate_token_request(self, request): ... def create_token_response(self, request): ... def create_temporary_credential(self, request) -> None: ... diff --git a/stubs/Authlib/authlib/oauth1/rfc5849/client_auth.pyi b/stubs/Authlib/authlib/oauth1/rfc5849/client_auth.pyi index 05e033dbb42c..5a4b54f1efdc 100644 --- a/stubs/Authlib/authlib/oauth1/rfc5849/client_auth.pyi +++ b/stubs/Authlib/authlib/oauth1/rfc5849/client_auth.pyi @@ -21,15 +21,15 @@ class ClientAuth: def __init__( self, client_id, - client_secret: Incomplete | None = None, - token: Incomplete | None = None, - token_secret: Incomplete | None = None, - redirect_uri: Incomplete | None = None, - rsa_key: Incomplete | None = None, - verifier: Incomplete | None = None, + client_secret=None, + token=None, + token_secret=None, + redirect_uri=None, + rsa_key=None, + verifier=None, signature_method="HMAC-SHA1", signature_type="HEADER", - realm: Incomplete | None = None, + realm=None, force_include_body: bool = False, ) -> None: ... def get_oauth_signature(self, method, uri, headers, body): ... diff --git a/stubs/Authlib/authlib/oauth1/rfc5849/errors.pyi b/stubs/Authlib/authlib/oauth1/rfc5849/errors.pyi index 9c5f35d53b35..7853be8199c0 100644 --- a/stubs/Authlib/authlib/oauth1/rfc5849/errors.pyi +++ b/stubs/Authlib/authlib/oauth1/rfc5849/errors.pyi @@ -1,11 +1,7 @@ -from _typeshed import Incomplete - from authlib.common.errors import AuthlibHTTPError class OAuth1Error(AuthlibHTTPError): - def __init__( - self, description: Incomplete | None = None, uri: Incomplete | None = None, status_code: Incomplete | None = None - ) -> None: ... + def __init__(self, description=None, uri=None, status_code=None) -> None: ... def get_headers(self): ... class InsecureTransportError(OAuth1Error): diff --git a/stubs/Authlib/authlib/oauth1/rfc5849/parameters.pyi b/stubs/Authlib/authlib/oauth1/rfc5849/parameters.pyi index 98bd2f2ba389..266c3f7fb910 100644 --- a/stubs/Authlib/authlib/oauth1/rfc5849/parameters.pyi +++ b/stubs/Authlib/authlib/oauth1/rfc5849/parameters.pyi @@ -1,5 +1,3 @@ -from _typeshed import Incomplete - -def prepare_headers(oauth_params, headers: Incomplete | None = None, realm: Incomplete | None = None): ... +def prepare_headers(oauth_params, headers=None, realm=None): ... def prepare_form_encoded_body(oauth_params, body): ... def prepare_request_uri_query(oauth_params, uri): ... diff --git a/stubs/Authlib/authlib/oauth1/rfc5849/signature.pyi b/stubs/Authlib/authlib/oauth1/rfc5849/signature.pyi index 8279b465dc09..5e459220c58b 100644 --- a/stubs/Authlib/authlib/oauth1/rfc5849/signature.pyi +++ b/stubs/Authlib/authlib/oauth1/rfc5849/signature.pyi @@ -1,5 +1,3 @@ -from _typeshed import Incomplete - SIGNATURE_HMAC_SHA1: str SIGNATURE_RSA_SHA1: str SIGNATURE_PLAINTEXT: str @@ -7,8 +5,8 @@ SIGNATURE_TYPE_HEADER: str SIGNATURE_TYPE_QUERY: str SIGNATURE_TYPE_BODY: str -def construct_base_string(method, uri, params, host: Incomplete | None = None): ... -def normalize_base_string_uri(uri, host: Incomplete | None = None): ... +def construct_base_string(method, uri, params, host=None): ... +def normalize_base_string_uri(uri, host=None): ... def normalize_parameters(params): ... def generate_signature_base_string(request): ... def hmac_sha1_signature(base_string, client_secret, token_secret): ... diff --git a/stubs/Authlib/authlib/oauth1/rfc5849/wrapper.pyi b/stubs/Authlib/authlib/oauth1/rfc5849/wrapper.pyi index 21696ccac9b9..ed0c1c936359 100644 --- a/stubs/Authlib/authlib/oauth1/rfc5849/wrapper.pyi +++ b/stubs/Authlib/authlib/oauth1/rfc5849/wrapper.pyi @@ -12,7 +12,7 @@ class OAuth1Request: query_params: Incomplete body_params: Incomplete params: Incomplete - def __init__(self, method, uri, body: Incomplete | None = None, headers: Incomplete | None = None) -> None: ... + def __init__(self, method, uri, body=None, headers=None) -> None: ... @property def client_id(self): ... @property diff --git a/stubs/Authlib/authlib/oauth2/auth.pyi b/stubs/Authlib/authlib/oauth2/auth.pyi index c06c35cafdbc..1e4cc2a8b2ae 100644 --- a/stubs/Authlib/authlib/oauth2/auth.pyi +++ b/stubs/Authlib/authlib/oauth2/auth.pyi @@ -9,7 +9,7 @@ class ClientAuth: client_id: Incomplete client_secret: Incomplete auth_method: Incomplete - def __init__(self, client_id, client_secret, auth_method: Incomplete | None = None) -> None: ... + def __init__(self, client_id, client_secret, auth_method=None) -> None: ... def prepare(self, method, uri, headers, body): ... class TokenAuth: @@ -19,6 +19,6 @@ class TokenAuth: token_placement: Incomplete client: Incomplete hooks: Incomplete - def __init__(self, token, token_placement: str = "header", client: Incomplete | None = None) -> None: ... + def __init__(self, token, token_placement: str = "header", client=None) -> None: ... def set_token(self, token) -> None: ... def prepare(self, uri, headers, body): ... diff --git a/stubs/Authlib/authlib/oauth2/base.pyi b/stubs/Authlib/authlib/oauth2/base.pyi index 4359ffb53120..fdf2f1f22504 100644 --- a/stubs/Authlib/authlib/oauth2/base.pyi +++ b/stubs/Authlib/authlib/oauth2/base.pyi @@ -8,13 +8,13 @@ class OAuth2Error(AuthlibHTTPError): redirect_fragment: Incomplete def __init__( self, - description: Incomplete | None = None, - uri: Incomplete | None = None, - status_code: Incomplete | None = None, - state: Incomplete | None = None, - redirect_uri: Incomplete | None = None, + description=None, + uri=None, + status_code=None, + state=None, + redirect_uri=None, redirect_fragment: bool = False, - error: Incomplete | None = None, + error=None, ) -> None: ... def get_body(self): ... - def __call__(self, uri: Incomplete | None = None): ... + def __call__(self, uri=None): ... diff --git a/stubs/Authlib/authlib/oauth2/client.pyi b/stubs/Authlib/authlib/oauth2/client.pyi index 4656ed8f2b28..b9c81298a0f9 100644 --- a/stubs/Authlib/authlib/oauth2/client.pyi +++ b/stubs/Authlib/authlib/oauth2/client.pyi @@ -27,17 +27,17 @@ class OAuth2Client: def __init__( self, session, - client_id: Incomplete | None = None, - client_secret: Incomplete | None = None, - token_endpoint_auth_method: Incomplete | None = None, - revocation_endpoint_auth_method: Incomplete | None = None, - scope: Incomplete | None = None, - state: Incomplete | None = None, - redirect_uri: Incomplete | None = None, - code_challenge_method: Incomplete | None = None, - token: Incomplete | None = None, + client_id=None, + client_secret=None, + token_endpoint_auth_method=None, + revocation_endpoint_auth_method=None, + scope=None, + state=None, + redirect_uri=None, + code_challenge_method=None, + token=None, token_placement: str = "header", - update_token: Incomplete | None = None, + update_token=None, leeway: int = 60, **metadata, ) -> None: ... @@ -47,50 +47,14 @@ class OAuth2Client: def token(self): ... @token.setter def token(self, token) -> None: ... - def create_authorization_url( - self, url, state: Incomplete | None = None, code_verifier: Incomplete | None = None, **kwargs - ): ... + def create_authorization_url(self, url, state=None, code_verifier=None, **kwargs): ... def fetch_token( - self, - url: Incomplete | None = None, - body: str = "", - method: str = "POST", - headers: Incomplete | None = None, - auth: Incomplete | None = None, - grant_type: Incomplete | None = None, - state: Incomplete | None = None, - **kwargs, - ): ... - def token_from_fragment(self, authorization_response, state: Incomplete | None = None): ... - def refresh_token( - self, - url: Incomplete | None = None, - refresh_token: Incomplete | None = None, - body: str = "", - auth: Incomplete | None = None, - headers: Incomplete | None = None, - **kwargs, - ): ... - def ensure_active_token(self, token: Incomplete | None = None): ... - def revoke_token( - self, - url, - token: Incomplete | None = None, - token_type_hint: Incomplete | None = None, - body: Incomplete | None = None, - auth: Incomplete | None = None, - headers: Incomplete | None = None, - **kwargs, - ): ... - def introspect_token( - self, - url, - token: Incomplete | None = None, - token_type_hint: Incomplete | None = None, - body: Incomplete | None = None, - auth: Incomplete | None = None, - headers: Incomplete | None = None, - **kwargs, + self, url=None, body: str = "", method: str = "POST", headers=None, auth=None, grant_type=None, state=None, **kwargs ): ... + def token_from_fragment(self, authorization_response, state=None): ... + def refresh_token(self, url=None, refresh_token=None, body: str = "", auth=None, headers=None, **kwargs): ... + def ensure_active_token(self, token=None): ... + def revoke_token(self, url, token=None, token_type_hint=None, body=None, auth=None, headers=None, **kwargs): ... + def introspect_token(self, url, token=None, token_type_hint=None, body=None, auth=None, headers=None, **kwargs): ... def register_compliance_hook(self, hook_type, hook) -> None: ... def parse_response_token(self, resp): ... diff --git a/stubs/Authlib/authlib/oauth2/rfc6749/authorization_server.pyi b/stubs/Authlib/authlib/oauth2/rfc6749/authorization_server.pyi index 69c1d6ee50dd..396cb7e89b36 100644 --- a/stubs/Authlib/authlib/oauth2/rfc6749/authorization_server.pyi +++ b/stubs/Authlib/authlib/oauth2/rfc6749/authorization_server.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from collections.abc import Callable, Collection, Mapping from typing_extensions import TypeAlias @@ -17,7 +16,7 @@ class AuthorizationServer: self, grant_type: str, client: ClientMixin, - user: Incomplete | None = None, + user=None, scope: str | None = None, expires_in: int | None = None, include_refresh_token: bool = True, @@ -36,11 +35,9 @@ class AuthorizationServer: ) -> None: ... def register_endpoint(self, endpoint) -> None: ... def get_authorization_grant(self, request: OAuth2Request) -> BaseGrant: ... - def get_consent_grant(self, request: Incomplete | None = None, end_user: Incomplete | None = None): ... + def get_consent_grant(self, request=None, end_user=None): ... def get_token_grant(self, request: OAuth2Request) -> BaseGrant: ... - def create_endpoint_response(self, name, request: Incomplete | None = None): ... - def create_authorization_response( - self, request: Incomplete | None = None, grant_user: Incomplete | None = None - ) -> object: ... - def create_token_response(self, request: Incomplete | None = None) -> _ServerResponse: ... + def create_endpoint_response(self, name, request=None): ... + def create_authorization_response(self, request=None, grant_user=None) -> object: ... + def create_token_response(self, request=None) -> _ServerResponse: ... def handle_error_response(self, request: OAuth2Request, error: OAuth2Error) -> object: ... diff --git a/stubs/Authlib/authlib/oauth2/rfc6749/errors.pyi b/stubs/Authlib/authlib/oauth2/rfc6749/errors.pyi index c5be5e1368dc..5244780497f4 100644 --- a/stubs/Authlib/authlib/oauth2/rfc6749/errors.pyi +++ b/stubs/Authlib/authlib/oauth2/rfc6749/errors.pyi @@ -65,7 +65,7 @@ class ForbiddenError(OAuth2Error): status_code: int auth_type: Incomplete realm: Incomplete - def __init__(self, auth_type: Incomplete | None = None, realm: Incomplete | None = None) -> None: ... + def __init__(self, auth_type=None, realm=None) -> None: ... def get_headers(self): ... class MissingAuthorizationError(ForbiddenError): diff --git a/stubs/Authlib/authlib/oauth2/rfc6749/grants/base.pyi b/stubs/Authlib/authlib/oauth2/rfc6749/grants/base.pyi index 5aea41fe8096..65f2dcc92645 100644 --- a/stubs/Authlib/authlib/oauth2/rfc6749/grants/base.pyi +++ b/stubs/Authlib/authlib/oauth2/rfc6749/grants/base.pyi @@ -20,7 +20,7 @@ class BaseGrant: def client(self): ... def generate_token( self, - user: Incomplete | None = None, + user=None, scope: str | None = None, grant_type: str | None = None, expires_in: int | None = None, diff --git a/stubs/Authlib/authlib/oauth2/rfc6749/parameters.pyi b/stubs/Authlib/authlib/oauth2/rfc6749/parameters.pyi index 2ee2fd5361ea..1a69dd8247de 100644 --- a/stubs/Authlib/authlib/oauth2/rfc6749/parameters.pyi +++ b/stubs/Authlib/authlib/oauth2/rfc6749/parameters.pyi @@ -1,14 +1,4 @@ -from _typeshed import Incomplete - -def prepare_grant_uri( - uri, - client_id, - response_type, - redirect_uri: Incomplete | None = None, - scope: Incomplete | None = None, - state: Incomplete | None = None, - **kwargs, -): ... -def prepare_token_request(grant_type, body: str = "", redirect_uri: Incomplete | None = None, **kwargs): ... -def parse_authorization_code_response(uri, state: Incomplete | None = None): ... -def parse_implicit_response(uri, state: Incomplete | None = None): ... +def prepare_grant_uri(uri, client_id, response_type, redirect_uri=None, scope=None, state=None, **kwargs): ... +def prepare_token_request(grant_type, body: str = "", redirect_uri=None, **kwargs): ... +def parse_authorization_code_response(uri, state=None): ... +def parse_implicit_response(uri, state=None): ... diff --git a/stubs/Authlib/authlib/oauth2/rfc6749/requests.pyi b/stubs/Authlib/authlib/oauth2/rfc6749/requests.pyi index 505c61e06802..6b9056ef2eb9 100644 --- a/stubs/Authlib/authlib/oauth2/rfc6749/requests.pyi +++ b/stubs/Authlib/authlib/oauth2/rfc6749/requests.pyi @@ -43,6 +43,6 @@ class JsonRequest: uri: Incomplete body: Incomplete headers: Incomplete - def __init__(self, method, uri, body: Incomplete | None = None, headers: Incomplete | None = None) -> None: ... + def __init__(self, method, uri, body=None, headers=None) -> None: ... @property def data(self): ... diff --git a/stubs/Authlib/authlib/oauth2/rfc6749/resource_protector.pyi b/stubs/Authlib/authlib/oauth2/rfc6749/resource_protector.pyi index 27ab2765730a..db0f51966333 100644 --- a/stubs/Authlib/authlib/oauth2/rfc6749/resource_protector.pyi +++ b/stubs/Authlib/authlib/oauth2/rfc6749/resource_protector.pyi @@ -4,7 +4,7 @@ class TokenValidator: TOKEN_TYPE: str realm: Incomplete extra_attributes: Incomplete - def __init__(self, realm: Incomplete | None = None, **extra_attributes) -> None: ... + def __init__(self, realm=None, **extra_attributes) -> None: ... @staticmethod def scope_insufficient(token_scopes, required_scopes): ... def authenticate_token(self, token_string) -> None: ... diff --git a/stubs/Authlib/authlib/oauth2/rfc6750/errors.pyi b/stubs/Authlib/authlib/oauth2/rfc6750/errors.pyi index 2c4fdca5115a..5144b65718d8 100644 --- a/stubs/Authlib/authlib/oauth2/rfc6750/errors.pyi +++ b/stubs/Authlib/authlib/oauth2/rfc6750/errors.pyi @@ -10,15 +10,7 @@ class InvalidTokenError(OAuth2Error): status_code: int realm: Incomplete extra_attributes: Incomplete - def __init__( - self, - description: Incomplete | None = None, - uri: Incomplete | None = None, - status_code: Incomplete | None = None, - state: Incomplete | None = None, - realm: Incomplete | None = None, - **extra_attributes, - ) -> None: ... + def __init__(self, description=None, uri=None, status_code=None, state=None, realm=None, **extra_attributes) -> None: ... def get_headers(self): ... class InsufficientScopeError(OAuth2Error): diff --git a/stubs/Authlib/authlib/oauth2/rfc6750/parameters.pyi b/stubs/Authlib/authlib/oauth2/rfc6750/parameters.pyi index de547a8a9797..3eb07c4cc26d 100644 --- a/stubs/Authlib/authlib/oauth2/rfc6750/parameters.pyi +++ b/stubs/Authlib/authlib/oauth2/rfc6750/parameters.pyi @@ -1,6 +1,4 @@ -from _typeshed import Incomplete - def add_to_uri(token, uri): ... -def add_to_headers(token, headers: Incomplete | None = None): ... -def add_to_body(token, body: Incomplete | None = None): ... +def add_to_headers(token, headers=None): ... +def add_to_body(token, body=None): ... def add_bearer_token(token, uri, headers, body, placement: str = "header"): ... diff --git a/stubs/Authlib/authlib/oauth2/rfc6750/token.pyi b/stubs/Authlib/authlib/oauth2/rfc6750/token.pyi index 44955fba002a..63a388bdb65d 100644 --- a/stubs/Authlib/authlib/oauth2/rfc6750/token.pyi +++ b/stubs/Authlib/authlib/oauth2/rfc6750/token.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from collections.abc import Callable from typing import Protocol @@ -25,7 +24,7 @@ class BearerTokenGenerator: self, grant_type: str, client: ClientMixin, - user: Incomplete | None = None, + user=None, scope: str | None = None, expires_in: int | None = None, include_refresh_token: bool = True, @@ -34,7 +33,7 @@ class BearerTokenGenerator: self, grant_type: str, client: ClientMixin, - user: Incomplete | None = None, + user=None, scope: str | None = None, expires_in: int | None = None, include_refresh_token: bool = True, diff --git a/stubs/Authlib/authlib/oauth2/rfc7009/parameters.pyi b/stubs/Authlib/authlib/oauth2/rfc7009/parameters.pyi index 75b11c378ffa..4b3804813015 100644 --- a/stubs/Authlib/authlib/oauth2/rfc7009/parameters.pyi +++ b/stubs/Authlib/authlib/oauth2/rfc7009/parameters.pyi @@ -1,5 +1 @@ -from _typeshed import Incomplete - -def prepare_revoke_token_request( - token, token_type_hint: Incomplete | None = None, body: Incomplete | None = None, headers: Incomplete | None = None -): ... +def prepare_revoke_token_request(token, token_type_hint=None, body=None, headers=None): ... diff --git a/stubs/Authlib/authlib/oauth2/rfc7521/client.pyi b/stubs/Authlib/authlib/oauth2/rfc7521/client.pyi index e2be7fece77f..3aa0c2097a8b 100644 --- a/stubs/Authlib/authlib/oauth2/rfc7521/client.pyi +++ b/stubs/Authlib/authlib/oauth2/rfc7521/client.pyi @@ -23,11 +23,11 @@ class AssertionClient: token_endpoint, issuer, subject, - audience: Incomplete | None = None, - grant_type: Incomplete | None = None, - claims: Incomplete | None = None, + audience=None, + grant_type=None, + claims=None, token_placement: str = "header", - scope: Incomplete | None = None, + scope=None, leeway: int = 60, **kwargs, ) -> None: ... diff --git a/stubs/Authlib/authlib/oauth2/rfc7523/assertion.pyi b/stubs/Authlib/authlib/oauth2/rfc7523/assertion.pyi index 059422d8da34..01ddea88ac92 100644 --- a/stubs/Authlib/authlib/oauth2/rfc7523/assertion.pyi +++ b/stubs/Authlib/authlib/oauth2/rfc7523/assertion.pyi @@ -1,19 +1,5 @@ -from _typeshed import Incomplete - def sign_jwt_bearer_assertion( - key, - issuer, - audience, - subject: Incomplete | None = None, - issued_at: Incomplete | None = None, - expires_at: Incomplete | None = None, - claims: Incomplete | None = None, - header: Incomplete | None = None, - **kwargs, -): ... -def client_secret_jwt_sign( - client_secret, client_id, token_endpoint, alg: str = "HS256", claims: Incomplete | None = None, **kwargs -): ... -def private_key_jwt_sign( - private_key, client_id, token_endpoint, alg: str = "RS256", claims: Incomplete | None = None, **kwargs + key, issuer, audience, subject=None, issued_at=None, expires_at=None, claims=None, header=None, **kwargs ): ... +def client_secret_jwt_sign(client_secret, client_id, token_endpoint, alg: str = "HS256", claims=None, **kwargs): ... +def private_key_jwt_sign(private_key, client_id, token_endpoint, alg: str = "RS256", claims=None, **kwargs): ... diff --git a/stubs/Authlib/authlib/oauth2/rfc7523/auth.pyi b/stubs/Authlib/authlib/oauth2/rfc7523/auth.pyi index d346c96f2210..0ca742f08e3c 100644 --- a/stubs/Authlib/authlib/oauth2/rfc7523/auth.pyi +++ b/stubs/Authlib/authlib/oauth2/rfc7523/auth.pyi @@ -6,13 +6,7 @@ class ClientSecretJWT: token_endpoint: Incomplete claims: Incomplete headers: Incomplete - def __init__( - self, - token_endpoint: Incomplete | None = None, - claims: Incomplete | None = None, - headers: Incomplete | None = None, - alg: Incomplete | None = None, - ) -> None: ... + def __init__(self, token_endpoint=None, claims=None, headers=None, alg=None) -> None: ... def sign(self, auth, token_endpoint): ... def __call__(self, auth, method, uri, headers, body): ... diff --git a/stubs/Authlib/authlib/oauth2/rfc7523/jwt_bearer.pyi b/stubs/Authlib/authlib/oauth2/rfc7523/jwt_bearer.pyi index b78a2f639904..fde65f14309d 100644 --- a/stubs/Authlib/authlib/oauth2/rfc7523/jwt_bearer.pyi +++ b/stubs/Authlib/authlib/oauth2/rfc7523/jwt_bearer.pyi @@ -8,16 +8,7 @@ class JWTBearerGrant(BaseGrant, TokenEndpointMixin): GRANT_TYPE = JWT_BEARER_GRANT_TYPE CLAIMS_OPTIONS: Incomplete @staticmethod - def sign( - key, - issuer, - audience, - subject: Incomplete | None = None, - issued_at: Incomplete | None = None, - expires_at: Incomplete | None = None, - claims: Incomplete | None = None, - **kwargs, - ): ... + def sign(key, issuer, audience, subject=None, issued_at=None, expires_at=None, claims=None, **kwargs): ... def process_assertion_claims(self, assertion): ... def resolve_public_key(self, headers, payload): ... def validate_token_request(self) -> None: ... diff --git a/stubs/Authlib/authlib/oauth2/rfc7523/token.pyi b/stubs/Authlib/authlib/oauth2/rfc7523/token.pyi index d505d608fb82..e8612390e482 100644 --- a/stubs/Authlib/authlib/oauth2/rfc7523/token.pyi +++ b/stubs/Authlib/authlib/oauth2/rfc7523/token.pyi @@ -5,26 +5,11 @@ class JWTBearerTokenGenerator: secret_key: Incomplete issuer: Incomplete alg: Incomplete - def __init__(self, secret_key, issuer: Incomplete | None = None, alg: str = "RS256") -> None: ... + def __init__(self, secret_key, issuer=None, alg: str = "RS256") -> None: ... @staticmethod def get_allowed_scope(client, scope): ... @staticmethod def get_sub_value(user): ... - def get_token_data(self, grant_type, client, expires_in, user: Incomplete | None = None, scope: Incomplete | None = None): ... - def generate( - self, - grant_type, - client, - user: Incomplete | None = None, - scope: Incomplete | None = None, - expires_in: Incomplete | None = None, - ): ... - def __call__( - self, - grant_type, - client, - user: Incomplete | None = None, - scope: Incomplete | None = None, - expires_in: Incomplete | None = None, - include_refresh_token: bool = True, - ): ... + def get_token_data(self, grant_type, client, expires_in, user=None, scope=None): ... + def generate(self, grant_type, client, user=None, scope=None, expires_in=None): ... + def __call__(self, grant_type, client, user=None, scope=None, expires_in=None, include_refresh_token: bool = True): ... diff --git a/stubs/Authlib/authlib/oauth2/rfc7523/validator.pyi b/stubs/Authlib/authlib/oauth2/rfc7523/validator.pyi index 561b7219b59e..e9f1d0c7f535 100644 --- a/stubs/Authlib/authlib/oauth2/rfc7523/validator.pyi +++ b/stubs/Authlib/authlib/oauth2/rfc7523/validator.pyi @@ -18,7 +18,5 @@ class JWTBearerTokenValidator(BearerTokenValidator): token_cls = JWTBearerToken public_key: Incomplete claims_options: Incomplete - def __init__( - self, public_key, issuer: Incomplete | None = None, realm: Incomplete | None = None, **extra_attributes - ) -> None: ... + def __init__(self, public_key, issuer=None, realm=None, **extra_attributes) -> None: ... def authenticate_token(self, token_string): ... diff --git a/stubs/Authlib/authlib/oauth2/rfc7591/endpoint.pyi b/stubs/Authlib/authlib/oauth2/rfc7591/endpoint.pyi index c0af6f0c57ea..1ff8927c09e9 100644 --- a/stubs/Authlib/authlib/oauth2/rfc7591/endpoint.pyi +++ b/stubs/Authlib/authlib/oauth2/rfc7591/endpoint.pyi @@ -6,7 +6,7 @@ class ClientRegistrationEndpoint: software_statement_alg_values_supported: Incomplete server: Incomplete claims_classes: list[type[Incomplete]] - def __init__(self, server: Incomplete | None = None, claims_classes: list[type[Incomplete]] | None = None) -> None: ... + def __init__(self, server=None, claims_classes: list[type[Incomplete]] | None = None) -> None: ... def __call__(self, request) -> dict[Incomplete, Incomplete]: ... def create_registration_response(self, request): ... def extract_client_metadata(self, request): ... diff --git a/stubs/Authlib/authlib/oauth2/rfc7592/endpoint.pyi b/stubs/Authlib/authlib/oauth2/rfc7592/endpoint.pyi index 9c393a221416..4f0b0e527669 100644 --- a/stubs/Authlib/authlib/oauth2/rfc7592/endpoint.pyi +++ b/stubs/Authlib/authlib/oauth2/rfc7592/endpoint.pyi @@ -5,7 +5,7 @@ class ClientConfigurationEndpoint: ENDPOINT_NAME: Final = "client_configuration" server: Incomplete claims_classes: list[type[Incomplete]] - def __init__(self, server: Incomplete | None = None, claims_classes: list[type[Incomplete]] | None = None) -> None: ... + def __init__(self, server=None, claims_classes: list[type[Incomplete]] | None = None) -> None: ... def __call__(self, request): ... def create_configuration_response(self, request): ... def create_endpoint_request(self, request): ... diff --git a/stubs/Authlib/authlib/oauth2/rfc9068/claims.pyi b/stubs/Authlib/authlib/oauth2/rfc9068/claims.pyi index 6351408041fa..b845e2d7129c 100644 --- a/stubs/Authlib/authlib/oauth2/rfc9068/claims.pyi +++ b/stubs/Authlib/authlib/oauth2/rfc9068/claims.pyi @@ -1,9 +1,7 @@ -from _typeshed import Incomplete - from authlib.jose import JWTClaims class JWTAccessTokenClaims(JWTClaims): - def validate(self, now: Incomplete | None = None, leeway: int = 0, **kwargs) -> None: ... + def validate(self, now=None, leeway: int = 0, **kwargs) -> None: ... def validate_typ(self) -> None: ... def validate_client_id(self): ... def validate_auth_time(self) -> None: ... diff --git a/stubs/Authlib/authlib/oauth2/rfc9068/introspection.pyi b/stubs/Authlib/authlib/oauth2/rfc9068/introspection.pyi index 01b9140d6a18..3e36571ed83a 100644 --- a/stubs/Authlib/authlib/oauth2/rfc9068/introspection.pyi +++ b/stubs/Authlib/authlib/oauth2/rfc9068/introspection.pyi @@ -5,7 +5,7 @@ from authlib.oauth2.rfc7662 import IntrospectionEndpoint class JWTIntrospectionEndpoint(IntrospectionEndpoint): ENDPOINT_NAME: str issuer: Incomplete - def __init__(self, issuer, server: Incomplete | None = None, *args, **kwargs) -> None: ... + def __init__(self, issuer, server=None, *args, **kwargs) -> None: ... def create_endpoint_response(self, request): ... def authenticate_token(self, request, client): ... def create_introspection_payload(self, token): ... diff --git a/stubs/Authlib/authlib/oauth2/rfc9068/revocation.pyi b/stubs/Authlib/authlib/oauth2/rfc9068/revocation.pyi index 263a06c72d2e..df8ac21b5f12 100644 --- a/stubs/Authlib/authlib/oauth2/rfc9068/revocation.pyi +++ b/stubs/Authlib/authlib/oauth2/rfc9068/revocation.pyi @@ -4,6 +4,6 @@ from authlib.oauth2.rfc7009 import RevocationEndpoint class JWTRevocationEndpoint(RevocationEndpoint): issuer: Incomplete - def __init__(self, issuer, server: Incomplete | None = None, *args, **kwargs) -> None: ... + def __init__(self, issuer, server=None, *args, **kwargs) -> None: ... def authenticate_token(self, request, client) -> None: ... def get_jwks(self) -> None: ... diff --git a/stubs/Authlib/authlib/oauth2/rfc9068/token.pyi b/stubs/Authlib/authlib/oauth2/rfc9068/token.pyi index 84e438df0b66..959143518793 100644 --- a/stubs/Authlib/authlib/oauth2/rfc9068/token.pyi +++ b/stubs/Authlib/authlib/oauth2/rfc9068/token.pyi @@ -5,13 +5,7 @@ from authlib.oauth2.rfc6750 import BearerTokenGenerator class JWTBearerTokenGenerator(BearerTokenGenerator): issuer: Incomplete alg: Incomplete - def __init__( - self, - issuer, - alg: str = "RS256", - refresh_token_generator: Incomplete | None = None, - expires_generator: Incomplete | None = None, - ) -> None: ... + def __init__(self, issuer, alg: str = "RS256", refresh_token_generator=None, expires_generator=None) -> None: ... def get_jwks(self) -> None: ... def get_extra_claims(self, client, grant_type, user, scope): ... def get_audiences(self, client, user, scope) -> str | list[str]: ... diff --git a/stubs/Authlib/authlib/oauth2/rfc9068/token_validator.pyi b/stubs/Authlib/authlib/oauth2/rfc9068/token_validator.pyi index 6b5a58f9ca81..70650d744335 100644 --- a/stubs/Authlib/authlib/oauth2/rfc9068/token_validator.pyi +++ b/stubs/Authlib/authlib/oauth2/rfc9068/token_validator.pyi @@ -9,12 +9,4 @@ class JWTBearerTokenValidator(BearerTokenValidator): def get_jwks(self) -> None: ... def validate_iss(self, claims, iss: str) -> bool: ... def authenticate_token(self, token_string): ... - def validate_token( - self, - token, - scopes, - request, - groups: Incomplete | None = None, - roles: Incomplete | None = None, - entitlements: Incomplete | None = None, - ) -> None: ... + def validate_token(self, token, scopes, request, groups=None, roles=None, entitlements=None) -> None: ... diff --git a/stubs/Authlib/authlib/oidc/core/claims.pyi b/stubs/Authlib/authlib/oidc/core/claims.pyi index 96342e7ff69e..1fbd121ddd8f 100644 --- a/stubs/Authlib/authlib/oidc/core/claims.pyi +++ b/stubs/Authlib/authlib/oidc/core/claims.pyi @@ -6,7 +6,7 @@ __all__ = ["IDToken", "CodeIDToken", "ImplicitIDToken", "HybridIDToken", "UserIn class IDToken(JWTClaims): ESSENTIAL_CLAIMS: Incomplete - def validate(self, now: Incomplete | None = None, leeway: int = 0) -> None: ... + def validate(self, now=None, leeway: int = 0) -> None: ... def validate_auth_time(self) -> None: ... def validate_nonce(self) -> None: ... def validate_acr(self): ... @@ -24,7 +24,7 @@ class ImplicitIDToken(IDToken): class HybridIDToken(ImplicitIDToken): RESPONSE_TYPES: Incomplete - def validate(self, now: Incomplete | None = None, leeway: int = 0) -> None: ... + def validate(self, now=None, leeway: int = 0) -> None: ... def validate_c_hash(self) -> None: ... class UserInfo(dict[str, object]): diff --git a/stubs/Authlib/authlib/oidc/core/grants/implicit.pyi b/stubs/Authlib/authlib/oidc/core/grants/implicit.pyi index cc0e4dec23d4..e487fc127ea6 100644 --- a/stubs/Authlib/authlib/oidc/core/grants/implicit.pyi +++ b/stubs/Authlib/authlib/oidc/core/grants/implicit.pyi @@ -13,4 +13,4 @@ class OpenIDImplicitGrant(ImplicitGrant): def validate_consent_request(self) -> None: ... def create_authorization_response(self, redirect_uri, grant_user): ... def create_granted_params(self, grant_user): ... - def process_implicit_token(self, token, code: Incomplete | None = None): ... + def process_implicit_token(self, token, code=None): ... diff --git a/stubs/Flask-SocketIO/flask_socketio/namespace.pyi b/stubs/Flask-SocketIO/flask_socketio/namespace.pyi index 349f6fadb096..50ef75dceba2 100644 --- a/stubs/Flask-SocketIO/flask_socketio/namespace.pyi +++ b/stubs/Flask-SocketIO/flask_socketio/namespace.pyi @@ -14,7 +14,7 @@ class _Namespace(_BaseNamespace, Protocol): def emit( self, event: str, - data: Incomplete | None = None, + data=None, to=None, room: str | None = None, skip_sid=None, @@ -33,14 +33,7 @@ class _Namespace(_BaseNamespace, Protocol): ignore_queue: bool = False, ) -> None: ... def call( - self, - event: str, - data: Incomplete | None = None, - to=None, - sid=None, - namespace: str | None = None, - timeout=None, - ignore_queue: bool = False, + self, event: str, data=None, to=None, sid=None, namespace: str | None = None, timeout=None, ignore_queue: bool = False ): ... def enter_room(self, sid, room: str, namespace: str | None = None): ... def leave_room(self, sid, room: str, namespace: str | None = None): ... @@ -57,7 +50,7 @@ class Namespace(_Namespace): def emit( # type: ignore[override] self, event: str, - data: Incomplete | None = None, + data=None, room: str | None = None, include_self: bool = True, namespace: str | None = None, diff --git a/stubs/PyMySQL/pymysql/connections.pyi b/stubs/PyMySQL/pymysql/connections.pyi index 967f9f9499bc..63acfc6e5c99 100644 --- a/stubs/PyMySQL/pymysql/connections.pyi +++ b/stubs/PyMySQL/pymysql/connections.pyi @@ -86,20 +86,20 @@ class Connection(Generic[_C]): self: Connection[Cursor], # different between overloads *, host: str | None = None, - user: Incomplete | None = None, + user=None, password: str = "", - database: Incomplete | None = None, + database=None, port: int = 0, - unix_socket: Incomplete | None = None, + unix_socket=None, charset: str = "", collation: str | None = None, - sql_mode: Incomplete | None = None, - read_default_file: Incomplete | None = None, + sql_mode=None, + read_default_file=None, conv=None, use_unicode: bool | None = True, client_flag: int = 0, cursorclass: None = None, # different between overloads - init_command: Incomplete | None = None, + init_command=None, connect_timeout: int | None = 10, ssl: Mapping[Any, Any] | None = None, ssl_ca=None, @@ -109,21 +109,21 @@ class Connection(Generic[_C]): ssl_key_password: _PasswordType | None = None, ssl_verify_cert=None, ssl_verify_identity=None, - read_default_group: Incomplete | None = None, - compress: Incomplete | None = None, - named_pipe: Incomplete | None = None, + read_default_group=None, + compress=None, + named_pipe=None, autocommit: bool | None = False, - db: Incomplete | None = None, - passwd: Incomplete | None = None, + db=None, + passwd=None, local_infile: Incomplete | None = False, max_allowed_packet: int = 16777216, defer_connect: bool | None = False, auth_plugin_map: Mapping[Any, Any] | None = None, read_timeout: float | None = None, write_timeout: float | None = None, - bind_address: Incomplete | None = None, + bind_address=None, binary_prefix: bool | None = False, - program_name: Incomplete | None = None, + program_name=None, server_public_key: bytes | None = None, ) -> None: ... @overload @@ -132,20 +132,20 @@ class Connection(Generic[_C]): self: Connection[_C], # pyright: ignore[reportInvalidTypeVarUse] #11780 *, host: str | None = None, - user: Incomplete | None = None, + user=None, password: str = "", - database: Incomplete | None = None, + database=None, port: int = 0, - unix_socket: Incomplete | None = None, + unix_socket=None, charset: str = "", collation: str | None = None, - sql_mode: Incomplete | None = None, - read_default_file: Incomplete | None = None, + sql_mode=None, + read_default_file=None, conv=None, use_unicode: bool | None = True, client_flag: int = 0, cursorclass: type[_C] = ..., # different between overloads - init_command: Incomplete | None = None, + init_command=None, connect_timeout: int | None = 10, ssl: Mapping[Any, Any] | None = None, ssl_ca=None, @@ -154,21 +154,21 @@ class Connection(Generic[_C]): ssl_key=None, ssl_verify_cert=None, ssl_verify_identity=None, - read_default_group: Incomplete | None = None, - compress: Incomplete | None = None, - named_pipe: Incomplete | None = None, + read_default_group=None, + compress=None, + named_pipe=None, autocommit: bool | None = False, - db: Incomplete | None = None, - passwd: Incomplete | None = None, + db=None, + passwd=None, local_infile: Incomplete | None = False, max_allowed_packet: int = 16777216, defer_connect: bool | None = False, auth_plugin_map: Mapping[Any, Any] | None = None, read_timeout: float | None = None, write_timeout: float | None = None, - bind_address: Incomplete | None = None, + bind_address=None, binary_prefix: bool | None = False, - program_name: Incomplete | None = None, + program_name=None, server_public_key: bytes | None = None, ) -> None: ... socket: Any diff --git a/stubs/PyYAML/yaml/cyaml.pyi b/stubs/PyYAML/yaml/cyaml.pyi index 85526cc44723..f3642b6fa27e 100644 --- a/stubs/PyYAML/yaml/cyaml.pyi +++ b/stubs/PyYAML/yaml/cyaml.pyi @@ -1,4 +1,4 @@ -from _typeshed import Incomplete, SupportsRead +from _typeshed import SupportsRead from collections.abc import Mapping, Sequence from typing import IO, Any from typing_extensions import TypeAlias @@ -34,14 +34,14 @@ class CBaseDumper(CEmitter, BaseRepresenter, BaseResolver): stream: IO[Any], default_style: str | None = None, default_flow_style: bool | None = False, - canonical: Incomplete | None = None, + canonical=None, indent: int | None = None, width: int | None = None, - allow_unicode: Incomplete | None = None, + allow_unicode=None, line_break: str | None = None, encoding: str | None = None, - explicit_start: Incomplete | None = None, - explicit_end: Incomplete | None = None, + explicit_start=None, + explicit_end=None, version: Sequence[int] | None = None, tags: Mapping[str, str] | None = None, sort_keys: bool = True, @@ -53,14 +53,14 @@ class CDumper(CEmitter, SafeRepresenter, Resolver): stream: IO[Any], default_style: str | None = None, default_flow_style: bool = False, - canonical: Incomplete | None = None, + canonical=None, indent: int | None = None, width: int | None = None, - allow_unicode: Incomplete | None = None, + allow_unicode=None, line_break: str | None = None, encoding: str | None = None, - explicit_start: Incomplete | None = None, - explicit_end: Incomplete | None = None, + explicit_start=None, + explicit_end=None, version: Sequence[int] | None = None, tags: Mapping[str, str] | None = None, sort_keys: bool = True, diff --git a/stubs/Pygments/pygments/cmdline.pyi b/stubs/Pygments/pygments/cmdline.pyi index d835e626a408..446595b76ffb 100644 --- a/stubs/Pygments/pygments/cmdline.pyi +++ b/stubs/Pygments/pygments/cmdline.pyi @@ -1,9 +1,8 @@ import argparse -from _typeshed import Incomplete def main_inner(parser, argns): ... class HelpFormatter(argparse.HelpFormatter): - def __init__(self, prog, indent_increment: int = 2, max_help_position: int = 16, width: Incomplete | None = None) -> None: ... + def __init__(self, prog, indent_increment: int = 2, max_help_position: int = 16, width=None) -> None: ... def main(args=...): ... diff --git a/stubs/Pygments/pygments/filter.pyi b/stubs/Pygments/pygments/filter.pyi index b3e9709d1adc..d11b994f87fa 100644 --- a/stubs/Pygments/pygments/filter.pyi +++ b/stubs/Pygments/pygments/filter.pyi @@ -1,11 +1,10 @@ -from _typeshed import Incomplete from collections.abc import Iterable, Iterator from typing import Any from pygments.lexer import Lexer from pygments.token import _TokenType -def apply_filters(stream, filters, lexer: Incomplete | None = None): ... +def apply_filters(stream, filters, lexer=None): ... def simplefilter(f): ... class Filter: diff --git a/stubs/Pygments/pygments/formatters/html.pyi b/stubs/Pygments/pygments/formatters/html.pyi index aba09dad6d64..234ff59e1dd1 100644 --- a/stubs/Pygments/pygments/formatters/html.pyi +++ b/stubs/Pygments/pygments/formatters/html.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from typing import Any, TypeVar from pygments.formatter import Formatter @@ -33,9 +32,9 @@ class HtmlFormatter(Formatter[_T]): linespans: Any anchorlinenos: Any hl_lines: Any - def get_style_defs(self, arg: Incomplete | None = None): ... - def get_token_style_defs(self, arg: Incomplete | None = None): ... - def get_background_style_defs(self, arg: Incomplete | None = None): ... + def get_style_defs(self, arg=None): ... + def get_token_style_defs(self, arg=None): ... + def get_background_style_defs(self, arg=None): ... def get_linenos_style_defs(self): ... def get_css_prefix(self, arg): ... def wrap(self, source): ... diff --git a/stubs/Pygments/pygments/formatters/terminal256.pyi b/stubs/Pygments/pygments/formatters/terminal256.pyi index 362c09d907fc..e494e05467c8 100644 --- a/stubs/Pygments/pygments/formatters/terminal256.pyi +++ b/stubs/Pygments/pygments/formatters/terminal256.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from typing import Any, TypeVar from pygments.formatter import Formatter @@ -11,14 +10,7 @@ class EscapeSequence: bold: Any underline: Any italic: Any - def __init__( - self, - fg: Incomplete | None = None, - bg: Incomplete | None = None, - bold: bool = False, - underline: bool = False, - italic: bool = False, - ) -> None: ... + def __init__(self, fg=None, bg=None, bold: bool = False, underline: bool = False, italic: bool = False) -> None: ... def escape(self, attrs): ... def color_string(self): ... def true_color_string(self): ... diff --git a/stubs/Pygments/pygments/lexer.pyi b/stubs/Pygments/pygments/lexer.pyi index 595e1818fad3..9d760b95b206 100644 --- a/stubs/Pygments/pygments/lexer.pyi +++ b/stubs/Pygments/pygments/lexer.pyi @@ -49,9 +49,9 @@ class combined(tuple[Incomplete, ...]): class _PseudoMatch: def __init__(self, start, text) -> None: ... - def start(self, arg: Incomplete | None = None): ... - def end(self, arg: Incomplete | None = None): ... - def group(self, arg: Incomplete | None = None): ... + def start(self, arg=None): ... + def end(self, arg=None): ... + def group(self, arg=None): ... def groups(self): ... def groupdict(self): ... @@ -75,7 +75,7 @@ class words(Future): def get(self): ... class RegexLexerMeta(LexerMeta): - def process_tokendef(cls, name, tokendefs: Incomplete | None = None): ... + def process_tokendef(cls, name, tokendefs=None): ... def get_tokendefs(cls): ... def __call__(cls, *args, **kwds): ... @@ -89,7 +89,7 @@ class LexerContext: pos: Incomplete end: Incomplete stack: Incomplete - def __init__(self, text, pos, stack: Incomplete | None = None, end: Incomplete | None = None) -> None: ... + def __init__(self, text, pos, stack=None, end=None) -> None: ... class ExtendedRegexLexer(RegexLexer): def get_tokens_unprocessed( # type: ignore[override] diff --git a/stubs/Pygments/pygments/util.pyi b/stubs/Pygments/pygments/util.pyi index 160045340566..963a810038a6 100644 --- a/stubs/Pygments/pygments/util.pyi +++ b/stubs/Pygments/pygments/util.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from io import TextIOWrapper from typing import Any @@ -10,10 +9,10 @@ xml_decl_re: Any class ClassNotFound(ValueError): ... class OptionError(Exception): ... -def get_choice_opt(options, optname, allowed, default: Incomplete | None = None, normcase: bool = False): ... -def get_bool_opt(options, optname, default: Incomplete | None = None): ... -def get_int_opt(options, optname, default: Incomplete | None = None): ... -def get_list_opt(options, optname, default: Incomplete | None = None): ... +def get_choice_opt(options, optname, allowed, default=None, normcase: bool = False): ... +def get_bool_opt(options, optname, default=None): ... +def get_int_opt(options, optname, default=None): ... +def get_list_opt(options, optname, default=None): ... def docstring_headline(obj): ... def make_analysator(f): ... def shebang_matches(text, regex): ... diff --git a/stubs/aiofiles/aiofiles/tempfile/__init__.pyi b/stubs/aiofiles/aiofiles/tempfile/__init__.pyi index 1e3bbbb33933..87692d844b39 100644 --- a/stubs/aiofiles/aiofiles/tempfile/__init__.pyi +++ b/stubs/aiofiles/aiofiles/tempfile/__init__.pyi @@ -1,7 +1,6 @@ import sys from _typeshed import ( BytesPath, - Incomplete, OpenBinaryMode, OpenBinaryModeReading, OpenBinaryModeUpdating, @@ -29,7 +28,7 @@ def TemporaryFile( prefix: AnyStr | None = None, dir: StrOrBytesPath | None = None, loop: AbstractEventLoop | None = None, - executor: Incomplete | None = None, + executor=None, ) -> AiofilesContextManager[AsyncTextIOWrapper]: ... # Unbuffered binary: returns a FileIO @@ -43,7 +42,7 @@ def TemporaryFile( prefix: AnyStr | None = None, dir: StrOrBytesPath | None = None, loop: AbstractEventLoop | None = None, - executor: Incomplete | None = None, + executor=None, ) -> AiofilesContextManager[AsyncFileIO]: ... # Buffered binary reading/updating: AsyncBufferedReader @@ -57,7 +56,7 @@ def TemporaryFile( prefix: AnyStr | None = None, dir: StrOrBytesPath | None = None, loop: AbstractEventLoop | None = None, - executor: Incomplete | None = None, + executor=None, ) -> AiofilesContextManager[AsyncBufferedReader]: ... # Buffered binary writing: AsyncBufferedIOBase @@ -71,7 +70,7 @@ def TemporaryFile( prefix: AnyStr | None = None, dir: StrOrBytesPath | None = None, loop: AbstractEventLoop | None = None, - executor: Incomplete | None = None, + executor=None, ) -> AiofilesContextManager[AsyncBufferedIOBase]: ... # 3.12 added `delete_on_close` @@ -89,7 +88,7 @@ if sys.version_info >= (3, 12): delete: bool = True, delete_on_close: bool = True, loop: AbstractEventLoop | None = None, - executor: Incomplete | None = None, + executor=None, ) -> AiofilesContextManager[AsyncTextIOWrapper]: ... # Unbuffered binary: returns a FileIO @@ -105,7 +104,7 @@ if sys.version_info >= (3, 12): delete: bool = True, delete_on_close: bool = True, loop: AbstractEventLoop | None = None, - executor: Incomplete | None = None, + executor=None, ) -> AiofilesContextManager[AsyncFileIO]: ... # Buffered binary reading/updating: AsyncBufferedReader @@ -121,7 +120,7 @@ if sys.version_info >= (3, 12): delete: bool = True, delete_on_close: bool = True, loop: AbstractEventLoop | None = None, - executor: Incomplete | None = None, + executor=None, ) -> AiofilesContextManager[AsyncBufferedReader]: ... # Buffered binary writing: AsyncBufferedIOBase @@ -137,7 +136,7 @@ if sys.version_info >= (3, 12): delete: bool = True, delete_on_close: bool = True, loop: AbstractEventLoop | None = None, - executor: Incomplete | None = None, + executor=None, ) -> AiofilesContextManager[AsyncBufferedIOBase]: ... else: @@ -153,7 +152,7 @@ else: dir: StrOrBytesPath | None = None, delete: bool = True, loop: AbstractEventLoop | None = None, - executor: Incomplete | None = None, + executor=None, ) -> AiofilesContextManager[AsyncTextIOWrapper]: ... # Unbuffered binary: returns a FileIO @@ -168,7 +167,7 @@ else: dir: StrOrBytesPath | None = None, delete: bool = True, loop: AbstractEventLoop | None = None, - executor: Incomplete | None = None, + executor=None, ) -> AiofilesContextManager[AsyncFileIO]: ... # Buffered binary reading/updating: AsyncBufferedReader @@ -183,7 +182,7 @@ else: dir: StrOrBytesPath | None = None, delete: bool = True, loop: AbstractEventLoop | None = None, - executor: Incomplete | None = None, + executor=None, ) -> AiofilesContextManager[AsyncBufferedReader]: ... # Buffered binary writing: AsyncBufferedIOBase @@ -198,7 +197,7 @@ else: dir: StrOrBytesPath | None = None, delete: bool = True, loop: AbstractEventLoop | None = None, - executor: Incomplete | None = None, + executor=None, ) -> AiofilesContextManager[AsyncBufferedIOBase]: ... # Text mode: always returns AsyncTextIOWrapper @@ -214,7 +213,7 @@ def SpooledTemporaryFile( prefix: AnyStr | None = None, dir: StrOrBytesPath | None = None, loop: AbstractEventLoop | None = None, - executor: Incomplete | None = None, + executor=None, ) -> AiofilesContextManager[AsyncTextIOWrapper]: ... @overload def SpooledTemporaryFile( @@ -227,7 +226,7 @@ def SpooledTemporaryFile( prefix: AnyStr | None = None, dir: StrOrBytesPath | None = None, loop: AbstractEventLoop | None = None, - executor: Incomplete | None = None, + executor=None, ) -> AiofilesContextManager[AsyncTextIOWrapper]: ... # Unbuffered binary: returns a FileIO @@ -243,7 +242,7 @@ def SpooledTemporaryFile( prefix: AnyStr | None = None, dir: StrOrBytesPath | None = None, loop: AbstractEventLoop | None = None, - executor: Incomplete | None = None, + executor=None, ) -> AiofilesContextManager[AsyncFileIO]: ... @overload def SpooledTemporaryFile( @@ -256,7 +255,7 @@ def SpooledTemporaryFile( prefix: AnyStr | None = None, dir: StrOrBytesPath | None = None, loop: AbstractEventLoop | None = None, - executor: Incomplete | None = None, + executor=None, ) -> AiofilesContextManager[AsyncFileIO]: ... # Buffered binary reading/updating: AsyncBufferedReader @@ -271,7 +270,7 @@ def SpooledTemporaryFile( prefix: AnyStr | None = None, dir: StrOrBytesPath | None = None, loop: AbstractEventLoop | None = None, - executor: Incomplete | None = None, + executor=None, ) -> AiofilesContextManager[AsyncBufferedReader]: ... # Buffered binary writing: AsyncBufferedIOBase @@ -287,7 +286,7 @@ def SpooledTemporaryFile( prefix: AnyStr | None = None, dir: StrOrBytesPath | None = None, loop: AbstractEventLoop | None = None, - executor: Incomplete | None = None, + executor=None, ) -> AiofilesContextManager[AsyncBufferedIOBase]: ... @overload def SpooledTemporaryFile( @@ -300,7 +299,7 @@ def SpooledTemporaryFile( prefix: AnyStr | None = None, dir: StrOrBytesPath | None = None, loop: AbstractEventLoop | None = None, - executor: Incomplete | None = None, + executor=None, ) -> AiofilesContextManager[AsyncBufferedIOBase]: ... @overload def TemporaryDirectory( @@ -308,7 +307,7 @@ def TemporaryDirectory( prefix: str | None = None, dir: StrPath | None = None, loop: AbstractEventLoop | None = None, - executor: Incomplete | None = None, + executor=None, ) -> AiofilesContextManagerTempDir: ... @overload def TemporaryDirectory( @@ -316,7 +315,7 @@ def TemporaryDirectory( prefix: bytes | None = None, dir: BytesPath | None = None, loop: AbstractEventLoop | None = None, - executor: Incomplete | None = None, + executor=None, ) -> AiofilesContextManagerTempDir: ... class AiofilesContextManagerTempDir(AiofilesContextManager[AsyncTemporaryDirectory]): diff --git a/stubs/aiofiles/aiofiles/threadpool/__init__.pyi b/stubs/aiofiles/aiofiles/threadpool/__init__.pyi index cf4814ced833..4a3635a1c69c 100644 --- a/stubs/aiofiles/aiofiles/threadpool/__init__.pyi +++ b/stubs/aiofiles/aiofiles/threadpool/__init__.pyi @@ -1,6 +1,5 @@ from _typeshed import ( FileDescriptorOrPath, - Incomplete, OpenBinaryMode, OpenBinaryModeReading, OpenBinaryModeUpdating, @@ -31,7 +30,7 @@ def open( opener: _Opener | None = None, *, loop: AbstractEventLoop | None = None, - executor: Incomplete | None = None, + executor=None, ) -> AiofilesContextManager[AsyncTextIOWrapper]: ... # Unbuffered binary: returns a FileIO @@ -47,7 +46,7 @@ def open( opener: _Opener | None = None, *, loop: AbstractEventLoop | None = None, - executor: Incomplete | None = None, + executor=None, ) -> AiofilesContextManager[AsyncFileIO]: ... # Buffered binary reading/updating: AsyncBufferedReader @@ -63,7 +62,7 @@ def open( opener: _Opener | None = None, *, loop: AbstractEventLoop | None = None, - executor: Incomplete | None = None, + executor=None, ) -> AiofilesContextManager[AsyncBufferedReader]: ... # Buffered binary writing: AsyncBufferedIOBase @@ -79,7 +78,7 @@ def open( opener: _Opener | None = None, *, loop: AbstractEventLoop | None = None, - executor: Incomplete | None = None, + executor=None, ) -> AiofilesContextManager[AsyncBufferedIOBase]: ... # Buffering cannot be determined: fall back to _UnknownAsyncBinaryIO @@ -95,7 +94,7 @@ def open( opener: _Opener | None = None, *, loop: AbstractEventLoop | None = None, - executor: Incomplete | None = None, + executor=None, ) -> AiofilesContextManager[_UnknownAsyncBinaryIO]: ... stdin: AsyncTextIndirectIOWrapper diff --git a/stubs/antlr4-python3-runtime/antlr4/ParserRuleContext.pyi b/stubs/antlr4-python3-runtime/antlr4/ParserRuleContext.pyi index 2a78146a481a..b687a84d2713 100644 --- a/stubs/antlr4-python3-runtime/antlr4/ParserRuleContext.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/ParserRuleContext.pyi @@ -28,7 +28,7 @@ class ParserRuleContext(RuleContext): def addTokenNode(self, token: Token): ... def addErrorNode(self, badToken: Token): ... def getChild(self, i: int, ttype: type | None = None): ... - def getChildren(self, predicate: Incomplete | None = None) -> Generator[Incomplete, None, None]: ... + def getChildren(self, predicate=None) -> Generator[Incomplete, None, None]: ... def getToken(self, ttype: int, i: int): ... def getTokens(self, ttype: int): ... def getTypedRuleContext(self, ctxType: type, i: int): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/atn/ATNConfigSet.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/ATNConfigSet.pyi index 3e20e2da2e62..0c7ac52b273d 100644 --- a/stubs/antlr4-python3-runtime/antlr4/atn/ATNConfigSet.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/atn/ATNConfigSet.pyi @@ -24,7 +24,7 @@ class ATNConfigSet: cachedHashCode: int def __init__(self, fullCtx: bool = True) -> None: ... def __iter__(self): ... - def add(self, config: ATNConfig, mergeCache: Incomplete | None = None): ... + def add(self, config: ATNConfig, mergeCache=None): ... def getOrAdd(self, config: ATNConfig): ... def getStates(self): ... def getPredicates(self): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/atn/ATNDeserializer.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/ATNDeserializer.pyi index fbb06ad67f97..8c2a67e5a627 100644 --- a/stubs/antlr4-python3-runtime/antlr4/atn/ATNDeserializer.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/atn/ATNDeserializer.pyi @@ -30,7 +30,7 @@ class ATNDeserializer: def stateIsEndStateFor(self, state: ATNState, idx: int): ... def markPrecedenceDecisions(self, atn: ATN): ... def verifyATN(self, atn: ATN): ... - def checkCondition(self, condition: bool, message: Incomplete | None = None): ... + def checkCondition(self, condition: bool, message=None): ... def readInt(self): ... edgeFactories: Incomplete def edgeFactory(self, atn: ATN, type: int, src: int, trg: int, arg1: int, arg2: int, arg3: int, sets: list[Incomplete]): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/error/Errors.pyi b/stubs/antlr4-python3-runtime/antlr4/error/Errors.pyi index 1c2a6812247e..1a4fe8f7d38f 100644 --- a/stubs/antlr4-python3-runtime/antlr4/error/Errors.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/error/Errors.pyi @@ -21,11 +21,7 @@ class RecognitionException(Exception): offendingToken: Incomplete offendingState: int def __init__( - self, - message: str | None = None, - recognizer: Recognizer | None = None, - input: InputStream | None = None, - ctx: Incomplete | None = None, + self, message: str | None = None, recognizer: Recognizer | None = None, input: InputStream | None = None, ctx=None ) -> None: ... def getExpectedTokens(self): ... @@ -42,10 +38,10 @@ class NoViableAltException(RecognitionException): def __init__( self, recognizer, - input: Incomplete | None = None, - startToken: Incomplete | None = None, - offendingToken: Incomplete | None = None, - deadEndConfigs: Incomplete | None = None, + input=None, + startToken=None, + offendingToken=None, + deadEndConfigs=None, ctx: ParserRuleContext | None = None, ) -> None: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/xpath/XPathLexer.pyi b/stubs/antlr4-python3-runtime/antlr4/xpath/XPathLexer.pyi index a6b67a6d5329..5aac6033dbf9 100644 --- a/stubs/antlr4-python3-runtime/antlr4/xpath/XPathLexer.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/xpath/XPathLexer.pyi @@ -22,7 +22,7 @@ class XPathLexer(Lexer): symbolicNames: Incomplete ruleNames: Incomplete grammarFileName: str - def __init__(self, input: Incomplete | None = None, output: TextIO = ...) -> None: ... + def __init__(self, input=None, output: TextIO = ...) -> None: ... def action(self, localctx: RuleContext, ruleIndex: int, actionIndex: int): ... type: Incomplete def ID_action(self, localctx: RuleContext, actionIndex: int): ... diff --git a/stubs/auth0-python/auth0/exceptions.pyi b/stubs/auth0-python/auth0/exceptions.pyi index 22a62ca6d727..608d6bb013f7 100644 --- a/stubs/auth0-python/auth0/exceptions.pyi +++ b/stubs/auth0-python/auth0/exceptions.pyi @@ -5,7 +5,7 @@ class Auth0Error(Exception): error_code: str message: str content: Incomplete | None - def __init__(self, status_code: int, error_code: str, message: str, content: Incomplete | None = None) -> None: ... + def __init__(self, status_code: int, error_code: str, message: str, content=None) -> None: ... class RateLimitError(Auth0Error): reset_at: int diff --git a/stubs/aws-xray-sdk/aws_xray_sdk/core/async_context.pyi b/stubs/aws-xray-sdk/aws_xray_sdk/core/async_context.pyi index d0fd6c1f8bd9..7579d0896224 100644 --- a/stubs/aws-xray-sdk/aws_xray_sdk/core/async_context.pyi +++ b/stubs/aws-xray-sdk/aws_xray_sdk/core/async_context.pyi @@ -1,13 +1,11 @@ -from _typeshed import Incomplete - from .context import Context as _Context class AsyncContext(_Context): - def __init__(self, *args, loop: Incomplete | None = None, use_task_factory: bool = True, **kwargs) -> None: ... + def __init__(self, *args, loop=None, use_task_factory: bool = True, **kwargs) -> None: ... def clear_trace_entities(self) -> None: ... class TaskLocalStorage: - def __init__(self, loop: Incomplete | None = None) -> None: ... + def __init__(self, loop=None) -> None: ... def __setattr__(self, name: str, value) -> None: ... def __getattribute__(self, item: str): ... def clear(self) -> None: ... diff --git a/stubs/aws-xray-sdk/aws_xray_sdk/core/async_recorder.pyi b/stubs/aws-xray-sdk/aws_xray_sdk/core/async_recorder.pyi index 41138e427427..4d4bdb8be332 100644 --- a/stubs/aws-xray-sdk/aws_xray_sdk/core/async_recorder.pyi +++ b/stubs/aws-xray-sdk/aws_xray_sdk/core/async_recorder.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from types import TracebackType from .models.segment import SegmentContextManager @@ -19,7 +18,7 @@ class AsyncSubsegmentContextManager(SubsegmentContextManager): ) -> None: ... class AsyncAWSXRayRecorder(AWSXRayRecorder): - def capture_async(self, name: Incomplete | None = None): ... - def in_segment_async(self, name: Incomplete | None = None, **segment_kwargs): ... - def in_subsegment_async(self, name: Incomplete | None = None, **subsegment_kwargs): ... + def capture_async(self, name=None): ... + def in_segment_async(self, name=None, **segment_kwargs): ... + def in_subsegment_async(self, name=None, **subsegment_kwargs): ... async def record_subsegment_async(self, wrapped, instance, args, kwargs, name, namespace, meta_processor): ... diff --git a/stubs/aws-xray-sdk/aws_xray_sdk/core/lambda_launcher.pyi b/stubs/aws-xray-sdk/aws_xray_sdk/core/lambda_launcher.pyi index 2ff1a6d6c6ba..9d0cbf5e3f17 100644 --- a/stubs/aws-xray-sdk/aws_xray_sdk/core/lambda_launcher.pyi +++ b/stubs/aws-xray-sdk/aws_xray_sdk/core/lambda_launcher.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from logging import Logger from .context import Context @@ -14,7 +13,7 @@ def check_in_lambda(): ... class LambdaContext(Context): def __init__(self) -> None: ... def put_segment(self, segment) -> None: ... - def end_segment(self, end_time: Incomplete | None = None) -> None: ... + def end_segment(self, end_time=None) -> None: ... def put_subsegment(self, subsegment) -> None: ... def get_trace_entity(self): ... @property diff --git a/stubs/aws-xray-sdk/aws_xray_sdk/core/models/entity.pyi b/stubs/aws-xray-sdk/aws_xray_sdk/core/models/entity.pyi index 91260a83d9b7..20be6fe08682 100644 --- a/stubs/aws-xray-sdk/aws_xray_sdk/core/models/entity.pyi +++ b/stubs/aws-xray-sdk/aws_xray_sdk/core/models/entity.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from logging import Logger from traceback import StackSummary from typing import Any @@ -20,8 +19,8 @@ class Entity: cause: Any subsegments: Any end_time: Any - def __init__(self, name, entity_id: Incomplete | None = None) -> None: ... - def close(self, end_time: Incomplete | None = None) -> None: ... + def __init__(self, name, entity_id=None) -> None: ... + def close(self, end_time=None) -> None: ... def add_subsegment(self, subsegment) -> None: ... def remove_subsegment(self, subsegment) -> None: ... def put_http_meta(self, key, value) -> None: ... diff --git a/stubs/aws-xray-sdk/aws_xray_sdk/core/models/facade_segment.pyi b/stubs/aws-xray-sdk/aws_xray_sdk/core/models/facade_segment.pyi index 44623545f089..96d406a6a9d7 100644 --- a/stubs/aws-xray-sdk/aws_xray_sdk/core/models/facade_segment.pyi +++ b/stubs/aws-xray-sdk/aws_xray_sdk/core/models/facade_segment.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from typing import Any from .segment import Segment @@ -8,7 +7,7 @@ MUTATION_UNSUPPORTED_MESSAGE: str class FacadeSegment(Segment): initializing: Any def __init__(self, name, entityid, traceid, sampled) -> None: ... - def close(self, end_time: Incomplete | None = None) -> None: ... + def close(self, end_time=None) -> None: ... def put_http_meta(self, key, value) -> None: ... def put_annotation(self, key, value) -> None: ... def put_metadata(self, key, value, namespace: str = "default") -> None: ... diff --git a/stubs/aws-xray-sdk/aws_xray_sdk/core/models/subsegment.pyi b/stubs/aws-xray-sdk/aws_xray_sdk/core/models/subsegment.pyi index 634e32b12a84..aefece2fd34b 100644 --- a/stubs/aws-xray-sdk/aws_xray_sdk/core/models/subsegment.pyi +++ b/stubs/aws-xray-sdk/aws_xray_sdk/core/models/subsegment.pyi @@ -1,5 +1,4 @@ import time -from _typeshed import Incomplete from types import TracebackType from typing import Any @@ -18,7 +17,7 @@ class SubsegmentContextManager: subsegment_kwargs: dict[str, Any] | None recorder: AWSXRayRecorder subsegment: Subsegment - def __init__(self, recorder: AWSXRayRecorder, name: Incomplete | None = None, **subsegment_kwargs) -> None: ... + def __init__(self, recorder: AWSXRayRecorder, name=None, **subsegment_kwargs) -> None: ... def __call__(self, wrapped, instance, args: list[Any], kwargs: dict[str, Any]): ... def __enter__(self) -> Subsegment | None: ... def __exit__( diff --git a/stubs/aws-xray-sdk/aws_xray_sdk/core/plugins/ec2_plugin.pyi b/stubs/aws-xray-sdk/aws_xray_sdk/core/plugins/ec2_plugin.pyi index 1d6578904503..3d3e5ea52cc3 100644 --- a/stubs/aws-xray-sdk/aws_xray_sdk/core/plugins/ec2_plugin.pyi +++ b/stubs/aws-xray-sdk/aws_xray_sdk/core/plugins/ec2_plugin.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from typing import Any log: Any @@ -8,6 +7,6 @@ IMDS_URL: str def initialize() -> None: ... def get_token(): ... -def get_metadata(token: Incomplete | None = None): ... +def get_metadata(token=None): ... def parse_metadata_json(json_str): ... -def do_request(url, headers: Incomplete | None = None, method: str = "GET"): ... +def do_request(url, headers=None, method: str = "GET"): ... diff --git a/stubs/aws-xray-sdk/aws_xray_sdk/core/sampling/local/sampler.pyi b/stubs/aws-xray-sdk/aws_xray_sdk/core/sampling/local/sampler.pyi index b681bba3524f..fef2a8ebb405 100644 --- a/stubs/aws-xray-sdk/aws_xray_sdk/core/sampling/local/sampler.pyi +++ b/stubs/aws-xray-sdk/aws_xray_sdk/core/sampling/local/sampler.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from typing import Any from ...exceptions.exceptions import InvalidSamplingManifestError as InvalidSamplingManifestError @@ -9,5 +8,5 @@ SUPPORTED_RULE_VERSION: Any class LocalSampler: def __init__(self, rules=...) -> None: ... - def should_trace(self, sampling_req: Incomplete | None = None): ... + def should_trace(self, sampling_req=None): ... def load_local_rules(self, rules) -> None: ... diff --git a/stubs/aws-xray-sdk/aws_xray_sdk/core/sampling/sampler.pyi b/stubs/aws-xray-sdk/aws_xray_sdk/core/sampling/sampler.pyi index e9dd290957d0..e3ff1cb5e23a 100644 --- a/stubs/aws-xray-sdk/aws_xray_sdk/core/sampling/sampler.pyi +++ b/stubs/aws-xray-sdk/aws_xray_sdk/core/sampling/sampler.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from typing import Any from .connector import ServiceConnector as ServiceConnector @@ -13,9 +12,9 @@ log: Any class DefaultSampler: def __init__(self) -> None: ... def start(self) -> None: ... - def should_trace(self, sampling_req: Incomplete | None = None): ... + def should_trace(self, sampling_req=None): ... def load_local_rules(self, rules) -> None: ... - def load_settings(self, daemon_config, context, origin: Incomplete | None = None) -> None: ... + def load_settings(self, daemon_config, context, origin=None) -> None: ... @property def xray_client(self): ... @xray_client.setter diff --git a/stubs/aws-xray-sdk/aws_xray_sdk/core/sampling/sampling_rule.pyi b/stubs/aws-xray-sdk/aws_xray_sdk/core/sampling/sampling_rule.pyi index 17b6ee99d6d3..1ea487cfb0c7 100644 --- a/stubs/aws-xray-sdk/aws_xray_sdk/core/sampling/sampling_rule.pyi +++ b/stubs/aws-xray-sdk/aws_xray_sdk/core/sampling/sampling_rule.pyi @@ -1,17 +1,6 @@ -from _typeshed import Incomplete - class SamplingRule: def __init__( - self, - name, - priority, - rate, - reservoir_size, - host: Incomplete | None = None, - method: Incomplete | None = None, - path: Incomplete | None = None, - service: Incomplete | None = None, - service_type: Incomplete | None = None, + self, name, priority, rate, reservoir_size, host=None, method=None, path=None, service=None, service_type=None ) -> None: ... def match(self, sampling_req): ... def is_default(self): ... diff --git a/stubs/aws-xray-sdk/aws_xray_sdk/core/utils/stacktrace.pyi b/stubs/aws-xray-sdk/aws_xray_sdk/core/utils/stacktrace.pyi index 6ebf8da33e5e..f7df10b9d7a3 100644 --- a/stubs/aws-xray-sdk/aws_xray_sdk/core/utils/stacktrace.pyi +++ b/stubs/aws-xray-sdk/aws_xray_sdk/core/utils/stacktrace.pyi @@ -1,3 +1 @@ -from _typeshed import Incomplete - -def get_stacktrace(limit: Incomplete | None = None): ... +def get_stacktrace(limit=None): ... diff --git a/stubs/beautifulsoup4/bs4/__init__.pyi b/stubs/beautifulsoup4/bs4/__init__.pyi index f3eadd12e906..2bbbfc78da19 100644 --- a/stubs/beautifulsoup4/bs4/__init__.pyi +++ b/stubs/beautifulsoup4/bs4/__init__.pyi @@ -1,4 +1,4 @@ -from _typeshed import Incomplete, SupportsRead +from _typeshed import SupportsRead from collections.abc import Iterator, Sequence from typing import Any from typing_extensions import Self @@ -55,35 +55,19 @@ class BeautifulSoup(Tag): preserve_whitespace_tag_stack: Any string_container_stack: Any def reset(self) -> None: ... - def new_tag( - self, - name, - namespace: Incomplete | None = None, - nsprefix: Incomplete | None = None, - attrs={}, - sourceline: Incomplete | None = None, - sourcepos: Incomplete | None = None, - **kwattrs, - ) -> Tag: ... - def string_container(self, base_class: Incomplete | None = None): ... - def new_string(self, s, subclass: Incomplete | None = None): ... + def new_tag(self, name, namespace=None, nsprefix=None, attrs={}, sourceline=None, sourcepos=None, **kwattrs) -> Tag: ... + def string_container(self, base_class=None): ... + def new_string(self, s, subclass=None): ... def insert_before(self, *args) -> None: ... def insert_after(self, *args) -> None: ... def popTag(self): ... def pushTag(self, tag) -> None: ... - def endData(self, containerClass: Incomplete | None = None) -> None: ... - def object_was_parsed(self, o, parent: Incomplete | None = None, most_recent_element: Incomplete | None = None) -> None: ... + def endData(self, containerClass=None) -> None: ... + def object_was_parsed(self, o, parent=None, most_recent_element=None) -> None: ... def handle_starttag( - self, - name, - namespace, - nsprefix, - attrs, - sourceline: Incomplete | None = None, - sourcepos: Incomplete | None = None, - namespaces: dict[str, str] | None = None, + self, name, namespace, nsprefix, attrs, sourceline=None, sourcepos=None, namespaces: dict[str, str] | None = None ): ... - def handle_endtag(self, name, nsprefix: Incomplete | None = None) -> None: ... + def handle_endtag(self, name, nsprefix=None) -> None: ... def handle_data(self, data) -> None: ... def decode( # type: ignore[override] self, diff --git a/stubs/beautifulsoup4/bs4/builder/__init__.pyi b/stubs/beautifulsoup4/bs4/builder/__init__.pyi index 126205fa2d9f..0399fb520883 100644 --- a/stubs/beautifulsoup4/bs4/builder/__init__.pyi +++ b/stubs/beautifulsoup4/bs4/builder/__init__.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from typing import Any class TreeBuilderRegistry: @@ -33,11 +32,7 @@ class TreeBuilder: def can_be_empty_element(self, tag_name): ... def feed(self, markup) -> None: ... def prepare_markup( - self, - markup, - user_specified_encoding: Incomplete | None = None, - document_declared_encoding: Incomplete | None = None, - exclude_encodings: Incomplete | None = None, + self, markup, user_specified_encoding=None, document_declared_encoding=None, exclude_encodings=None ) -> None: ... def test_fragment_to_document(self, fragment): ... def set_up_substitutions(self, tag): ... diff --git a/stubs/beautifulsoup4/bs4/builder/_htmlparser.pyi b/stubs/beautifulsoup4/bs4/builder/_htmlparser.pyi index c4677b05a63c..fc912e5eeccd 100644 --- a/stubs/beautifulsoup4/bs4/builder/_htmlparser.pyi +++ b/stubs/beautifulsoup4/bs4/builder/_htmlparser.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from html.parser import HTMLParser from typing import Any @@ -28,12 +27,8 @@ class HTMLParserTreeBuilder(HTMLTreeBuilder): features: Any TRACKS_LINE_NUMBERS: bool parser_args: Any - def __init__(self, parser_args: Incomplete | None = None, parser_kwargs: Incomplete | None = None, **kwargs) -> None: ... + def __init__(self, parser_args=None, parser_kwargs=None, **kwargs) -> None: ... def prepare_markup( - self, - markup, - user_specified_encoding: Incomplete | None = None, - document_declared_encoding: Incomplete | None = None, - exclude_encodings: Incomplete | None = None, + self, markup, user_specified_encoding=None, document_declared_encoding=None, exclude_encodings=None ) -> None: ... def feed(self, markup) -> None: ... diff --git a/stubs/beautifulsoup4/bs4/builder/_lxml.pyi b/stubs/beautifulsoup4/bs4/builder/_lxml.pyi index 63b2a2f4bd7e..8490889295aa 100644 --- a/stubs/beautifulsoup4/bs4/builder/_lxml.pyi +++ b/stubs/beautifulsoup4/bs4/builder/_lxml.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from typing import Any from bs4.builder import HTMLTreeBuilder, TreeBuilder @@ -19,13 +18,9 @@ class LXMLTreeBuilderForXML(TreeBuilder): empty_element_tags: Any soup: Any nsmaps: Any - def __init__(self, parser: Incomplete | None = None, empty_element_tags: Incomplete | None = None, **kwargs) -> None: ... + def __init__(self, parser=None, empty_element_tags=None, **kwargs) -> None: ... def prepare_markup( # type: ignore[override] # the order of the parameters is different - self, - markup, - user_specified_encoding: Incomplete | None = None, - exclude_encodings: Incomplete | None = None, - document_declared_encoding: Incomplete | None = None, + self, markup, user_specified_encoding=None, exclude_encodings=None, document_declared_encoding=None ) -> None: ... parser: Any def feed(self, markup) -> None: ... diff --git a/stubs/beautifulsoup4/bs4/element.pyi b/stubs/beautifulsoup4/bs4/element.pyi index cf5d39a4d4ef..47eb80b5b853 100644 --- a/stubs/beautifulsoup4/bs4/element.pyi +++ b/stubs/beautifulsoup4/bs4/element.pyi @@ -1,4 +1,4 @@ -from _typeshed import Incomplete, ReadableBuffer +from _typeshed import ReadableBuffer from collections.abc import Callable, Iterable, Iterator from re import Pattern from typing import Any, Literal, TypeVar, overload @@ -359,16 +359,10 @@ class Tag(PageElement): @property def descendants(self) -> Iterable[PageElement]: ... def select_one( - self, selector: str, namespaces: Incomplete | None = None, *, flags: int = ..., custom: dict[str, str] | None = ... + self, selector: str, namespaces=None, *, flags: int = ..., custom: dict[str, str] | None = ... ) -> Tag | None: ... def select( - self, - selector: str, - namespaces: Incomplete | None = None, - limit: int | None = None, - *, - flags: int = ..., - custom: dict[str, str] | None = ..., + self, selector: str, namespaces=None, limit: int | None = None, *, flags: int = ..., custom: dict[str, str] | None = ... ) -> ResultSet[Tag]: ... def childGenerator(self) -> Iterable[PageElement]: ... def recursiveChildGenerator(self) -> Iterable[PageElement]: ... diff --git a/stubs/boltons/boltons/cacheutils.pyi b/stubs/boltons/boltons/cacheutils.pyi index fe00700c3299..751aaec3b1e8 100644 --- a/stubs/boltons/boltons/cacheutils.pyi +++ b/stubs/boltons/boltons/cacheutils.pyi @@ -20,9 +20,7 @@ class LRI(dict[_KT, _VT]): soft_miss_count: int max_size: int on_miss: Callable[[_KT], _VT] | None - def __init__( - self, max_size: int = 128, values: Incomplete | None = None, on_miss: Callable[[_KT], _VT] | None = None - ) -> None: ... + def __init__(self, max_size: int = 128, values=None, on_miss: Callable[[_KT], _VT] | None = None) -> None: ... def __setitem__(self, key: _KT, value: _VT) -> None: ... def __getitem__(self, key: _KT) -> _VT: ... @overload @@ -85,7 +83,7 @@ class CachedMethod: typed: bool = False, key: Callable[..., Incomplete] | None = None, ): ... - def __get__(self, obj, objtype: Incomplete | None = None): ... + def __get__(self, obj, objtype=None): ... def __call__(self, *args, **kwargs): ... def cached( diff --git a/stubs/boltons/boltons/funcutils.pyi b/stubs/boltons/boltons/funcutils.pyi index b44cccf3b94b..0102bc154b33 100644 --- a/stubs/boltons/boltons/funcutils.pyi +++ b/stubs/boltons/boltons/funcutils.pyi @@ -19,7 +19,7 @@ def inspect_formatargspec( formatreturns=..., formatannotation=..., ): ... -def get_module_callables(mod, ignore: Incomplete | None = None): ... +def get_module_callables(mod, ignore=None): ... def mro_items(type_obj): ... def dir_dict(obj, raise_exc: bool = False): ... def copy_function(orig, copy_dict: bool = True): ... @@ -37,22 +37,11 @@ class CachedInstancePartial(functools.partial[Incomplete]): partial = CachedInstancePartial -def format_invocation(name: str = "", args=(), kwargs: Incomplete | None = None, **kw): ... -def format_exp_repr( - obj, pos_names, req_names: Incomplete | None = None, opt_names: Incomplete | None = None, opt_key: Incomplete | None = None -): ... -def format_nonexp_repr( - obj, req_names: Incomplete | None = None, opt_names: Incomplete | None = None, opt_key: Incomplete | None = None -): ... -def wraps(func, injected: Incomplete | None = None, expected: Incomplete | None = None, **kw): ... -def update_wrapper( - wrapper, - func, - injected: Incomplete | None = None, - expected: Incomplete | None = None, - build_from: Incomplete | None = None, - **kw, -): ... +def format_invocation(name: str = "", args=(), kwargs=None, **kw): ... +def format_exp_repr(obj, pos_names, req_names=None, opt_names=None, opt_key=None): ... +def format_nonexp_repr(obj, req_names=None, opt_names=None, opt_key=None): ... +def wraps(func, injected=None, expected=None, **kw): ... +def update_wrapper(wrapper, func, injected=None, expected=None, build_from=None, **kw): ... class FunctionBuilder: name: Incomplete @@ -61,7 +50,7 @@ class FunctionBuilder: def get_invocation_str(self): ... @classmethod def from_func(cls, func): ... - def get_func(self, execdict: Incomplete | None = None, add_source: bool = True, with_dict: bool = True): ... + def get_func(self, execdict=None, add_source: bool = True, with_dict: bool = True): ... def get_defaults_dict(self): ... def get_arg_names(self, only_required: bool = False): ... defaults: Incomplete diff --git a/stubs/boltons/boltons/ioutils.pyi b/stubs/boltons/boltons/ioutils.pyi index a7e325a1aa23..90bb47dad2e1 100644 --- a/stubs/boltons/boltons/ioutils.pyi +++ b/stubs/boltons/boltons/ioutils.pyi @@ -7,7 +7,7 @@ EINVAL: Incomplete class SpooledIOBase(metaclass=abc.ABCMeta): __metaclass__: Incomplete - def __init__(self, max_size: int = 5000000, dir: Incomplete | None = None) -> None: ... + def __init__(self, max_size: int = 5000000, dir=None) -> None: ... @abstractmethod def read(self, n: int = -1): ... @abstractmethod @@ -15,7 +15,7 @@ class SpooledIOBase(metaclass=abc.ABCMeta): @abstractmethod def seek(self, pos, mode: int = 0): ... @abstractmethod - def readline(self, length: Incomplete | None = None): ... + def readline(self, length=None): ... @abstractmethod def readlines(self, sizehint: int = 0): ... def writelines(self, lines) -> None: ... @@ -41,7 +41,7 @@ class SpooledIOBase(metaclass=abc.ABCMeta): @property def buf(self): ... def fileno(self): ... - def truncate(self, size: Incomplete | None = None): ... + def truncate(self, size=None): ... def getvalue(self): ... def seekable(self): ... def readable(self): ... @@ -61,7 +61,7 @@ class SpooledBytesIO(SpooledIOBase): def read(self, n: int = -1): ... def write(self, s) -> None: ... def seek(self, pos, mode: int = 0): ... - def readline(self, length: Incomplete | None = None): ... + def readline(self, length=None): ... def readlines(self, sizehint: int = 0): ... def rollover(self) -> None: ... @property @@ -75,7 +75,7 @@ class SpooledStringIO(SpooledIOBase): def read(self, n: int = -1): ... def write(self, s) -> None: ... def seek(self, pos, mode: int = 0): ... - def readline(self, length: Incomplete | None = None): ... + def readline(self, length=None): ... def readlines(self, sizehint: int = 0): ... @property def buffer(self): ... @@ -88,5 +88,5 @@ def is_text_fileobj(fileobj) -> bool: ... class MultiFileReader: def __init__(self, *fileobjs) -> None: ... - def read(self, amt: Incomplete | None = None): ... + def read(self, amt=None): ... def seek(self, offset, whence=0) -> None: ... diff --git a/stubs/boltons/boltons/iterutils.pyi b/stubs/boltons/boltons/iterutils.pyi index 5e4ad8292868..ba1b7dd0293c 100644 --- a/stubs/boltons/boltons/iterutils.pyi +++ b/stubs/boltons/boltons/iterutils.pyi @@ -4,17 +4,15 @@ from collections.abc import Generator def is_iterable(obj) -> bool: ... def is_scalar(obj) -> bool: ... def is_collection(obj) -> bool: ... -def split(src, sep: Incomplete | None = None, maxsplit: Incomplete | None = None): ... -def split_iter( - src, sep: Incomplete | None = None, maxsplit: Incomplete | None = None -) -> Generator[Incomplete, None, Incomplete]: ... -def lstrip(iterable, strip_value: Incomplete | None = None): ... -def lstrip_iter(iterable, strip_value: Incomplete | None = None) -> Generator[Incomplete, None, None]: ... -def rstrip(iterable, strip_value: Incomplete | None = None): ... -def rstrip_iter(iterable, strip_value: Incomplete | None = None) -> Generator[Incomplete, None, None]: ... -def strip(iterable, strip_value: Incomplete | None = None): ... -def strip_iter(iterable, strip_value: Incomplete | None = None): ... -def chunked(src, size, count: Incomplete | None = None, **kw): ... +def split(src, sep=None, maxsplit=None): ... +def split_iter(src, sep=None, maxsplit=None) -> Generator[Incomplete, None, Incomplete]: ... +def lstrip(iterable, strip_value=None): ... +def lstrip_iter(iterable, strip_value=None) -> Generator[Incomplete, None, None]: ... +def rstrip(iterable, strip_value=None): ... +def rstrip_iter(iterable, strip_value=None) -> Generator[Incomplete, None, None]: ... +def strip(iterable, strip_value=None): ... +def strip_iter(iterable, strip_value=None): ... +def chunked(src, size, count=None, **kw): ... def chunked_iter(src, size, **kw) -> Generator[Incomplete, None, Incomplete]: ... def chunk_ranges( input_size: int, chunk_size: int, input_offset: int = 0, overlap_size: int = 0, align: bool = False @@ -23,19 +21,17 @@ def pairwise(src, end=...): ... def pairwise_iter(src, end=...): ... def windowed(src, size, fill=...): ... def windowed_iter(src, size, fill=...): ... -def xfrange(stop, start: Incomplete | None = None, step: float = 1.0) -> Generator[Incomplete, None, None]: ... -def frange(stop, start: Incomplete | None = None, step: float = 1.0): ... -def backoff(start, stop, count: Incomplete | None = None, factor: float = 2.0, jitter: bool = False): ... -def backoff_iter( - start, stop, count: Incomplete | None = None, factor: float = 2.0, jitter: bool = False -) -> Generator[Incomplete, None, None]: ... -def bucketize(src, key=..., value_transform: Incomplete | None = None, key_filter: Incomplete | None = None): ... +def xfrange(stop, start=None, step: float = 1.0) -> Generator[Incomplete, None, None]: ... +def frange(stop, start=None, step: float = 1.0): ... +def backoff(start, stop, count=None, factor: float = 2.0, jitter: bool = False): ... +def backoff_iter(start, stop, count=None, factor: float = 2.0, jitter: bool = False) -> Generator[Incomplete, None, None]: ... +def bucketize(src, key=..., value_transform=None, key_filter=None): ... def partition(src, key=...): ... -def unique(src, key: Incomplete | None = None): ... -def unique_iter(src, key: Incomplete | None = None) -> Generator[Incomplete, None, Incomplete]: ... -def redundant(src, key: Incomplete | None = None, groups: bool = False): ... -def one(src, default: Incomplete | None = None, key: Incomplete | None = None): ... -def first(iterable, default: Incomplete | None = None, key: Incomplete | None = None): ... +def unique(src, key=None): ... +def unique_iter(src, key=None) -> Generator[Incomplete, None, Incomplete]: ... +def redundant(src, key=None, groups: bool = False): ... +def one(src, default=None, key=None): ... +def first(iterable, default=None, key=None): ... def flatten_iter(iterable) -> Generator[Incomplete, None, None]: ... def flatten(iterable): ... def same(iterable, ref=...): ... @@ -73,11 +69,5 @@ class SequentialGUIDerator(GUIDerator): guid_iter: Incomplete seq_guid_iter: Incomplete -def soft_sorted( - iterable, - first: Incomplete | None = None, - last: Incomplete | None = None, - key: Incomplete | None = None, - reverse: bool = False, -): ... -def untyped_sorted(iterable, key: Incomplete | None = None, reverse: bool = False): ... +def soft_sorted(iterable, first=None, last=None, key=None, reverse: bool = False): ... +def untyped_sorted(iterable, key=None, reverse: bool = False): ... diff --git a/stubs/boltons/boltons/tableutils.pyi b/stubs/boltons/boltons/tableutils.pyi index f59606c0391d..19c2ee650767 100644 --- a/stubs/boltons/boltons/tableutils.pyi +++ b/stubs/boltons/boltons/tableutils.pyi @@ -38,21 +38,21 @@ class NamedTupleInputType(InputType): class Table: headers: Incomplete metadata: Incomplete - def __init__(self, data: Incomplete | None = None, headers=..., metadata: Incomplete | None = None) -> None: ... + def __init__(self, data=None, headers=..., metadata=None) -> None: ... def extend(self, data) -> None: ... @classmethod - def from_dict(cls, data, headers=..., max_depth: int = 1, metadata: Incomplete | None = None): ... + def from_dict(cls, data, headers=..., max_depth: int = 1, metadata=None): ... @classmethod - def from_list(cls, data, headers=..., max_depth: int = 1, metadata: Incomplete | None = None): ... + def from_list(cls, data, headers=..., max_depth: int = 1, metadata=None): ... @classmethod - def from_object(cls, data, headers=..., max_depth: int = 1, metadata: Incomplete | None = None): ... + def from_object(cls, data, headers=..., max_depth: int = 1, metadata=None): ... @classmethod def from_data(cls, data, headers=..., max_depth: int = 1, **kwargs): ... def __len__(self): ... def __getitem__(self, idx): ... def to_html( self, - orientation: Incomplete | None = None, + orientation=None, wrapped: bool = True, with_headers: bool = True, with_newlines: bool = True, @@ -60,6 +60,6 @@ class Table: max_depth: int = 1, ): ... def get_cell_html(self, value): ... - def to_text(self, with_headers: bool = True, maxlen: Incomplete | None = None): ... + def to_text(self, with_headers: bool = True, maxlen=None): ... __all__ = ["Table"] diff --git a/stubs/boltons/boltons/urlutils.pyi b/stubs/boltons/boltons/urlutils.pyi index fb4645226dcf..3947a0444bda 100644 --- a/stubs/boltons/boltons/urlutils.pyi +++ b/stubs/boltons/boltons/urlutils.pyi @@ -17,14 +17,14 @@ def quote_fragment_part(text, full_quote: bool = True): ... def quote_userinfo_part(text, full_quote: bool = True): ... def unquote(string, encoding: str = "utf-8", errors: str = "replace"): ... def unquote_to_bytes(string): ... -def register_scheme(text, uses_netloc: Incomplete | None = None, default_port: Incomplete | None = None) -> None: ... +def register_scheme(text, uses_netloc=None, default_port=None) -> None: ... def resolve_path_parts(path_parts): ... class cachedproperty: __doc__: Incomplete func: Incomplete def __init__(self, func) -> None: ... - def __get__(self, obj, objtype: Incomplete | None = None): ... + def __get__(self, obj, objtype=None): ... class URL: scheme: Incomplete @@ -38,15 +38,7 @@ class URL: def __init__(self, url: str = "") -> None: ... @classmethod def from_parts( - cls, - scheme: Incomplete | None = None, - host: Incomplete | None = None, - path_parts=(), - query_params=(), - fragment: str = "", - port: Incomplete | None = None, - username: Incomplete | None = None, - password: Incomplete | None = None, + cls, scheme=None, host=None, path_parts=(), query_params=(), fragment: str = "", port=None, username=None, password=None ): ... query_params: Incomplete qp: Incomplete diff --git a/stubs/braintree/braintree/braintree_gateway.pyi b/stubs/braintree/braintree/braintree_gateway.pyi index 5afb253f8639..c80300a7ec4c 100644 --- a/stubs/braintree/braintree/braintree_gateway.pyi +++ b/stubs/braintree/braintree/braintree_gateway.pyi @@ -63,4 +63,4 @@ class BraintreeGateway: verification: CreditCardVerificationGateway webhook_notification: WebhookNotificationGateway webhook_testing: WebhookTestingGateway - def __init__(self, config: Incomplete | None = None, **kwargs) -> None: ... + def __init__(self, config=None, **kwargs) -> None: ... diff --git a/stubs/braintree/braintree/client_token.pyi b/stubs/braintree/braintree/client_token.pyi index 214c90c4be3d..1e2f68f19a0b 100644 --- a/stubs/braintree/braintree/client_token.pyi +++ b/stubs/braintree/braintree/client_token.pyi @@ -1,7 +1,5 @@ -from _typeshed import Incomplete - class ClientToken: @staticmethod - def generate(params: Incomplete | None = None, gateway: Incomplete | None = None): ... + def generate(params=None, gateway=None): ... @staticmethod def generate_signature() -> list[str | dict[str, list[str]]]: ... diff --git a/stubs/braintree/braintree/client_token_gateway.pyi b/stubs/braintree/braintree/client_token_gateway.pyi index 6b0e8bda26f3..1f53d277caba 100644 --- a/stubs/braintree/braintree/client_token_gateway.pyi +++ b/stubs/braintree/braintree/client_token_gateway.pyi @@ -4,4 +4,4 @@ class ClientTokenGateway: gateway: Incomplete config: Incomplete def __init__(self, gateway) -> None: ... - def generate(self, params: Incomplete | None = None): ... + def generate(self, params=None): ... diff --git a/stubs/braintree/braintree/configuration.pyi b/stubs/braintree/braintree/configuration.pyi index 30af8103bc43..b480ffaa6781 100644 --- a/stubs/braintree/braintree/configuration.pyi +++ b/stubs/braintree/braintree/configuration.pyi @@ -12,7 +12,7 @@ class Configuration: public_key: str, private_key: str, *, - http_strategy: Incomplete | None = None, + http_strategy=None, timeout: int = 60, wrap_http_exceptions: bool = False, ) -> None: ... @@ -23,7 +23,7 @@ class Configuration: public_key: str, private_key: str, *, - http_strategy: Incomplete | None = None, + http_strategy=None, timeout: int = 60, wrap_http_exceptions: bool = False, ) -> Configuration: ... @@ -46,7 +46,7 @@ class Configuration: wrap_http_exceptions: bool def __init__( self, - environment: Incomplete | None = None, + environment=None, merchant_id: str | None = None, public_key: str | None = None, private_key: str | None = None, @@ -56,7 +56,7 @@ class Configuration: *args, timeout: int = 60, wrap_http_exceptions: bool = False, - http_strategy: Incomplete | None = None, + http_strategy=None, ) -> None: ... def base_merchant_path(self) -> str: ... def base_url(self) -> str: ... diff --git a/stubs/braintree/braintree/exchange_rate_quote_gateway.pyi b/stubs/braintree/braintree/exchange_rate_quote_gateway.pyi index 98449f0872c1..ee551915d1e8 100644 --- a/stubs/braintree/braintree/exchange_rate_quote_gateway.pyi +++ b/stubs/braintree/braintree/exchange_rate_quote_gateway.pyi @@ -8,6 +8,6 @@ class ExchangeRateQuoteGateway: gateway: Incomplete config: Incomplete graphql_client: Incomplete - def __init__(self, gateway, graphql_client: Incomplete | None = None) -> None: ... + def __init__(self, gateway, graphql_client=None) -> None: ... exchange_rate_quote_payload: ExchangeRateQuotePayload def generate(self, request) -> SuccessfulResult | ErrorResult | None: ... diff --git a/stubs/braintree/braintree/merchant_account/merchant_account.pyi b/stubs/braintree/braintree/merchant_account/merchant_account.pyi index dac21af07dcb..2fbe0ac80ed9 100644 --- a/stubs/braintree/braintree/merchant_account/merchant_account.pyi +++ b/stubs/braintree/braintree/merchant_account/merchant_account.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from typing import Final from braintree.resource import Resource @@ -18,7 +17,7 @@ class MerchantAccount(Resource): master_merchant_account: MerchantAccount def __init__(self, gateway, attributes) -> None: ... @staticmethod - def create(params: Incomplete | None = None): ... + def create(params=None): ... @staticmethod def update(id, attributes): ... @staticmethod diff --git a/stubs/braintree/braintree/payment_method.pyi b/stubs/braintree/braintree/payment_method.pyi index b87060d671d1..9d87458d2c30 100644 --- a/stubs/braintree/braintree/payment_method.pyi +++ b/stubs/braintree/braintree/payment_method.pyi @@ -12,7 +12,7 @@ class PaymentMethod(Resource): @staticmethod def update(payment_method_token: str, params) -> SuccessfulResult | ErrorResult: ... @staticmethod - def delete(payment_method_token: str, options: Incomplete | None = None) -> SuccessfulResult: ... + def delete(payment_method_token: str, options=None) -> SuccessfulResult: ... @staticmethod def create_signature() -> ( list[ diff --git a/stubs/braintree/braintree/payment_method_gateway.pyi b/stubs/braintree/braintree/payment_method_gateway.pyi index 7d8ab1876484..73393a851e7c 100644 --- a/stubs/braintree/braintree/payment_method_gateway.pyi +++ b/stubs/braintree/braintree/payment_method_gateway.pyi @@ -11,7 +11,7 @@ class PaymentMethodGateway: def create(self, params: dict[str, Incomplete] | None = None) -> SuccessfulResult | ErrorResult: ... def find(self, payment_method_token: str) -> Resource: ... def update(self, payment_method_token: str, params) -> SuccessfulResult | ErrorResult: ... - def delete(self, payment_method_token: str, options: Incomplete | None = None) -> SuccessfulResult: ... + def delete(self, payment_method_token: str, options=None) -> SuccessfulResult: ... options: dict[str, Incomplete] - def grant(self, payment_method_token: str, options: Incomplete | None = None) -> SuccessfulResult | ErrorResult: ... + def grant(self, payment_method_token: str, options=None) -> SuccessfulResult | ErrorResult: ... def revoke(self, payment_method_token: str) -> SuccessfulResult | ErrorResult: ... diff --git a/stubs/braintree/braintree/paypal_account.pyi b/stubs/braintree/braintree/paypal_account.pyi index c24ebba39820..984bb08de2d8 100644 --- a/stubs/braintree/braintree/paypal_account.pyi +++ b/stubs/braintree/braintree/paypal_account.pyi @@ -1,5 +1,3 @@ -from _typeshed import Incomplete - from braintree.error_result import ErrorResult from braintree.resource import Resource from braintree.subscription import Subscription @@ -11,7 +9,7 @@ class PayPalAccount(Resource): @staticmethod def delete(paypal_account_token: str) -> SuccessfulResult: ... @staticmethod - def update(paypal_account_token: str, params: Incomplete | None = None) -> SuccessfulResult | ErrorResult | None: ... + def update(paypal_account_token: str, params=None) -> SuccessfulResult | ErrorResult | None: ... @staticmethod def signature() -> list[str | dict[str, list[str]]]: ... subscriptions: list[Subscription] diff --git a/stubs/braintree/braintree/paypal_account_gateway.pyi b/stubs/braintree/braintree/paypal_account_gateway.pyi index 471601f27e67..873cca8b10d1 100644 --- a/stubs/braintree/braintree/paypal_account_gateway.pyi +++ b/stubs/braintree/braintree/paypal_account_gateway.pyi @@ -10,4 +10,4 @@ class PayPalAccountGateway: def __init__(self, gateway) -> None: ... def find(self, paypal_account_token: str) -> PayPalAccount | None: ... def delete(self, paypal_account_token: str) -> SuccessfulResult: ... - def update(self, paypal_account_token: str, params: Incomplete | None = None) -> SuccessfulResult | ErrorResult | None: ... + def update(self, paypal_account_token: str, params=None) -> SuccessfulResult | ErrorResult | None: ... diff --git a/stubs/braintree/braintree/plan.pyi b/stubs/braintree/braintree/plan.pyi index 4320bf8af9d9..47de7b28c147 100644 --- a/stubs/braintree/braintree/plan.pyi +++ b/stubs/braintree/braintree/plan.pyi @@ -1,5 +1,3 @@ -from _typeshed import Incomplete - from braintree.add_on import AddOn from braintree.discount import Discount from braintree.resource import Resource @@ -11,11 +9,11 @@ class Plan(Resource): @staticmethod def all(): ... @staticmethod - def create(params: Incomplete | None = None): ... + def create(params=None): ... @staticmethod def find(subscription_id): ... @staticmethod - def update(subscription_id, params: Incomplete | None = None): ... + def update(subscription_id, params=None): ... @staticmethod def create_signature(): ... @staticmethod diff --git a/stubs/braintree/braintree/plan_gateway.pyi b/stubs/braintree/braintree/plan_gateway.pyi index 0c021ab7fb67..8d4416170c5d 100644 --- a/stubs/braintree/braintree/plan_gateway.pyi +++ b/stubs/braintree/braintree/plan_gateway.pyi @@ -5,6 +5,6 @@ class PlanGateway: config: Incomplete def __init__(self, gateway) -> None: ... def all(self): ... - def create(self, params: Incomplete | None = None): ... + def create(self, params=None): ... def find(self, plan_id): ... - def update(self, plan_id, params: Incomplete | None = None): ... + def update(self, plan_id, params=None): ... diff --git a/stubs/braintree/braintree/settlement_batch_summary.pyi b/stubs/braintree/braintree/settlement_batch_summary.pyi index e8e46e8be9ee..0e48629a28d1 100644 --- a/stubs/braintree/braintree/settlement_batch_summary.pyi +++ b/stubs/braintree/braintree/settlement_batch_summary.pyi @@ -1,7 +1,5 @@ -from _typeshed import Incomplete - from braintree.resource import Resource class SettlementBatchSummary(Resource): @staticmethod - def generate(settlement_date, group_by_custom_field: Incomplete | None = None): ... + def generate(settlement_date, group_by_custom_field=None): ... diff --git a/stubs/braintree/braintree/settlement_batch_summary_gateway.pyi b/stubs/braintree/braintree/settlement_batch_summary_gateway.pyi index 9ee140a02e51..09c324ea0d3e 100644 --- a/stubs/braintree/braintree/settlement_batch_summary_gateway.pyi +++ b/stubs/braintree/braintree/settlement_batch_summary_gateway.pyi @@ -4,4 +4,4 @@ class SettlementBatchSummaryGateway: gateway: Incomplete config: Incomplete def __init__(self, gateway) -> None: ... - def generate(self, settlement_date, group_by_custom_field: Incomplete | None = None): ... + def generate(self, settlement_date, group_by_custom_field=None): ... diff --git a/stubs/braintree/braintree/subscription.pyi b/stubs/braintree/braintree/subscription.pyi index 6bb0ff0af6d0..98acf134f39d 100644 --- a/stubs/braintree/braintree/subscription.pyi +++ b/stubs/braintree/braintree/subscription.pyi @@ -27,15 +27,15 @@ class Subscription(Resource): Pending: Final = "Pending" @staticmethod - def create(params: Incomplete | None = None): ... + def create(params=None): ... @staticmethod def create_signature(): ... @staticmethod def find(subscription_id): ... @staticmethod - def retry_charge(subscription_id, amount: Incomplete | None = None, submit_for_settlement: bool = False): ... + def retry_charge(subscription_id, amount=None, submit_for_settlement: bool = False): ... @staticmethod - def update(subscription_id, params: Incomplete | None = None): ... + def update(subscription_id, params=None): ... @staticmethod def cancel(subscription_id): ... @staticmethod diff --git a/stubs/braintree/braintree/subscription_gateway.pyi b/stubs/braintree/braintree/subscription_gateway.pyi index e77c4df80a8a..37dfbae24b93 100644 --- a/stubs/braintree/braintree/subscription_gateway.pyi +++ b/stubs/braintree/braintree/subscription_gateway.pyi @@ -5,8 +5,8 @@ class SubscriptionGateway: config: Incomplete def __init__(self, gateway) -> None: ... def cancel(self, subscription_id): ... - def create(self, params: Incomplete | None = None): ... + def create(self, params=None): ... def find(self, subscription_id): ... - def retry_charge(self, subscription_id, amount: Incomplete | None = None, submit_for_settlement: bool = False): ... + def retry_charge(self, subscription_id, amount=None, submit_for_settlement: bool = False): ... def search(self, *query): ... - def update(self, subscription_id, params: Incomplete | None = None): ... + def update(self, subscription_id, params=None): ... diff --git a/stubs/braintree/braintree/transaction.pyi b/stubs/braintree/braintree/transaction.pyi index e2dc4045b9d4..9fe08991b64a 100644 --- a/stubs/braintree/braintree/transaction.pyi +++ b/stubs/braintree/braintree/transaction.pyi @@ -99,19 +99,19 @@ class Transaction(Resource): @staticmethod def clone_transaction(transaction_id, params): ... @staticmethod - def credit(params: Incomplete | None = None): ... + def credit(params=None): ... @staticmethod def find(transaction_id): ... @staticmethod - def refund(transaction_id, amount_or_options: Incomplete | None = None): ... + def refund(transaction_id, amount_or_options=None): ... @staticmethod - def sale(params: Incomplete | None = None): ... + def sale(params=None): ... @staticmethod def search(*query): ... @staticmethod - def submit_for_settlement(transaction_id, amount: Incomplete | None = None, params: Incomplete | None = None): ... + def submit_for_settlement(transaction_id, amount=None, params=None): ... @staticmethod - def update_details(transaction_id, params: Incomplete | None = None): ... + def update_details(transaction_id, params=None): ... @staticmethod def void(transaction_id): ... @staticmethod @@ -127,13 +127,13 @@ class Transaction(Resource): @staticmethod def package_tracking_signature(): ... @staticmethod - def package_tracking(transaction_id, params: Incomplete | None = None): ... + def package_tracking(transaction_id, params=None): ... @staticmethod def update_details_signature(): ... @staticmethod def refund_signature(): ... @staticmethod - def submit_for_partial_settlement(transaction_id, amount, params: Incomplete | None = None): ... + def submit_for_partial_settlement(transaction_id, amount, params=None): ... amount: Decimal tax_amount: Decimal | None discount_amount: Decimal | None diff --git a/stubs/braintree/braintree/transaction_gateway.pyi b/stubs/braintree/braintree/transaction_gateway.pyi index a0b2cb8cbaf0..a6ad5f3f6c97 100644 --- a/stubs/braintree/braintree/transaction_gateway.pyi +++ b/stubs/braintree/braintree/transaction_gateway.pyi @@ -10,11 +10,11 @@ class TransactionGateway: def create(self, params): ... def credit(self, params): ... def find(self, transaction_id): ... - def refund(self, transaction_id, amount_or_options: Incomplete | None = None): ... + def refund(self, transaction_id, amount_or_options=None): ... def sale(self, params): ... def search(self, *query): ... - def submit_for_settlement(self, transaction_id, amount: Incomplete | None = None, params: Incomplete | None = None): ... - def update_details(self, transaction_id, params: Incomplete | None = None): ... - def submit_for_partial_settlement(self, transaction_id, amount, params: Incomplete | None = None): ... - def package_tracking(self, transaction_id, params: Incomplete | None = None): ... + def submit_for_settlement(self, transaction_id, amount=None, params=None): ... + def update_details(self, transaction_id, params=None): ... + def submit_for_partial_settlement(self, transaction_id, amount, params=None): ... + def package_tracking(self, transaction_id, params=None): ... def void(self, transaction_id): ... diff --git a/stubs/braintree/braintree/util/graphql_client.pyi b/stubs/braintree/braintree/util/graphql_client.pyi index e893ae0d1e8f..35c5d3def76c 100644 --- a/stubs/braintree/braintree/util/graphql_client.pyi +++ b/stubs/braintree/braintree/util/graphql_client.pyi @@ -27,7 +27,7 @@ class GraphQLClient(Http): def raise_exception_for_graphql_error(response: _Response) -> None: ... graphql_headers: dict[str, str] def __init__(self, config: Configuration | None = None, environment: Environment | None = None) -> None: ... - def query(self, definition, variables: Incomplete | None = None, operation_name: Incomplete | None = None): ... + def query(self, definition, variables=None, operation_name=None): ... @staticmethod def get_validation_errors(response) -> _ValidationErrors | None: ... @staticmethod diff --git a/stubs/caldav/caldav/objects.pyi b/stubs/caldav/caldav/objects.pyi index bcdcaff036fc..329ee2ce973a 100644 --- a/stubs/caldav/caldav/objects.pyi +++ b/stubs/caldav/caldav/objects.pyi @@ -39,17 +39,15 @@ class DAVObject: def canonical_url(self) -> str: ... def children(self, type: str | None = None) -> list[tuple[URL, Incomplete, Incomplete]]: ... def get_property(self, prop, use_cached: bool = False, **passthrough) -> Incomplete | None: ... - def get_properties( - self, props: Incomplete | None = None, depth: int = 0, parse_response_xml: bool = True, parse_props: bool = True - ): ... - def set_properties(self, props: Incomplete | None = None) -> Self: ... + def get_properties(self, props=None, depth: int = 0, parse_response_xml: bool = True, parse_props: bool = True): ... + def set_properties(self, props=None) -> Self: ... def save(self) -> Self: ... def delete(self) -> None: ... class CalendarSet(DAVObject): def calendars(self) -> list[Calendar]: ... def make_calendar( - self, name: str | None = None, cal_id: str | None = None, supported_calendar_component_set: Incomplete | None = None + self, name: str | None = None, cal_id: str | None = None, supported_calendar_component_set=None ) -> Calendar: ... def calendar(self, name: str | None = None, cal_id: str | None = None) -> Calendar: ... @@ -57,7 +55,7 @@ class Principal(DAVObject): def __init__(self, client: DAVClient | None = None, url: str | ParseResult | SplitResult | URL | None = None) -> None: ... def calendars(self) -> list[Calendar]: ... def make_calendar( - self, name: str | None = None, cal_id: str | None = None, supported_calendar_component_set: Incomplete | None = None + self, name: str | None = None, cal_id: str | None = None, supported_calendar_component_set=None ) -> Calendar: ... def calendar(self, name: str | None = None, cal_id: str | None = None, cal_url: str | None = None) -> Calendar: ... def get_vcal_address(self) -> _VCalAddress: ... @@ -175,16 +173,14 @@ class Calendar(DAVObject): def todos( self, sort_keys: Iterable[str] = ("due", "priority"), include_completed: bool = False, sort_key: str | None = None ) -> list[Todo]: ... - def event_by_url(self, href, data: Incomplete | None = None) -> Event: ... + def event_by_url(self, href, data=None) -> Event: ... def object_by_uid(self, uid: str, comp_filter: CompFilter | None = None, comp_class: _CompClass | None = None) -> Event: ... def todo_by_uid(self, uid: str) -> CalendarObjectResource: ... def event_by_uid(self, uid: str) -> CalendarObjectResource: ... def journal_by_uid(self, uid: str) -> CalendarObjectResource: ... event = event_by_uid def events(self) -> list[Event]: ... - def objects_by_sync_token( - self, sync_token: Incomplete | None = None, load_objects: bool = False - ) -> SynchronizableCalendarObjectCollection: ... + def objects_by_sync_token(self, sync_token=None, load_objects: bool = False) -> SynchronizableCalendarObjectCollection: ... objects = objects_by_sync_token def journals(self) -> list[Journal]: ... @@ -215,10 +211,10 @@ class CalendarObjectResource(DAVObject): self, client: DAVClient | None = None, url: str | ParseResult | SplitResult | URL | None = None, - data: Incomplete | None = None, - parent: Incomplete | None = None, - id: Incomplete | None = None, - props: Incomplete | None = None, + data=None, + parent=None, + id=None, + props=None, ) -> None: ... def add_organizer(self) -> None: ... def split_expanded(self) -> list[Self]: ... @@ -232,12 +228,12 @@ class CalendarObjectResource(DAVObject): ) -> defaultdict[str, set[str]]: ... def add_attendee(self, attendee, no_default_parameters: bool = False, **parameters) -> None: ... def is_invite_request(self) -> bool: ... - def accept_invite(self, calendar: Incomplete | None = None) -> None: ... - def decline_invite(self, calendar: Incomplete | None = None) -> None: ... - def tentatively_accept_invite(self, calendar: Incomplete | None = None) -> None: ... - def copy(self, keep_uid: bool = False, new_parent: Incomplete | None = None) -> Self: ... + def accept_invite(self, calendar=None) -> None: ... + def decline_invite(self, calendar=None) -> None: ... + def tentatively_accept_invite(self, calendar=None) -> None: ... + def copy(self, keep_uid: bool = False, new_parent=None) -> Self: ... def load(self, only_if_unloaded: bool = False) -> Self: ... - def change_attendee_status(self, attendee: Incomplete | None = None, **kwargs) -> None: ... + def change_attendee_status(self, attendee=None, **kwargs) -> None: ... def save( self, no_overwrite: bool = False, @@ -256,9 +252,7 @@ class Event(CalendarObjectResource): ... class Journal(CalendarObjectResource): ... class FreeBusy(CalendarObjectResource): - def __init__( - self, parent, data, url: str | ParseResult | SplitResult | URL | None = None, id: Incomplete | None = None - ) -> None: ... + def __init__(self, parent, data, url: str | ParseResult | SplitResult | URL | None = None, id=None) -> None: ... class Todo(CalendarObjectResource): def complete( diff --git a/stubs/cffi/cffi/api.pyi b/stubs/cffi/cffi/api.pyi index 5caf266535c0..b3967e1612fe 100644 --- a/stubs/cffi/cffi/api.pyi +++ b/stubs/cffi/cffi/api.pyi @@ -1,6 +1,6 @@ import sys import types -from _typeshed import Incomplete, ReadableBuffer, WriteableBuffer +from _typeshed import ReadableBuffer, WriteableBuffer from collections.abc import Callable, Hashable from typing import Any, Literal, TypeVar, overload from typing_extensions import TypeAlias @@ -36,7 +36,7 @@ class FFI: def sizeof(self, cdecl: str | CData) -> int: ... def alignof(self, cdecl: str | CData) -> int: ... def offsetof(self, cdecl: str | CData, *fields_or_indexes: str | int) -> int: ... - def new(self, cdecl: str | CType, init: Incomplete | None = None) -> CData: ... + def new(self, cdecl: str | CType, init=None) -> CData: ... def new_allocator( self, alloc: Callable[[int], CData] | None = None, diff --git a/stubs/cffi/cffi/backend_ctypes.pyi b/stubs/cffi/cffi/backend_ctypes.pyi index 800ddb7e7011..7faef0c73d59 100644 --- a/stubs/cffi/cffi/backend_ctypes.pyi +++ b/stubs/cffi/cffi/backend_ctypes.pyi @@ -69,7 +69,7 @@ class CTypesBackend: typeof: Incomplete def getcname(self, BType, replace_with): ... def typeoffsetof(self, BType, fieldname, num: int = 0): ... - def rawaddressof(self, BTypePtr, cdata, offset: Incomplete | None = None): ... + def rawaddressof(self, BTypePtr, cdata, offset=None): ... class CTypesLibrary: backend: Incomplete diff --git a/stubs/cffi/cffi/cparser.pyi b/stubs/cffi/cffi/cparser.pyi index 63946020c53e..a42b2ddc0406 100644 --- a/stubs/cffi/cffi/cparser.pyi +++ b/stubs/cffi/cffi/cparser.pyi @@ -6,9 +6,7 @@ CDEF_SOURCE_STRING: str class Parser: def __init__(self) -> None: ... def convert_pycparser_error(self, e, csource) -> None: ... - def parse( - self, csource, override: bool = False, packed: bool = False, pack: Incomplete | None = None, dllexport: bool = False - ) -> None: ... + def parse(self, csource, override: bool = False, packed: bool = False, pack=None, dllexport: bool = False) -> None: ... def parse_type(self, cdecl): ... def parse_type_and_quals(self, cdecl): ... def include(self, other) -> None: ... diff --git a/stubs/cffi/cffi/ffiplatform.pyi b/stubs/cffi/cffi/ffiplatform.pyi index bb0e71bcb34c..f7fe46729b32 100644 --- a/stubs/cffi/cffi/ffiplatform.pyi +++ b/stubs/cffi/cffi/ffiplatform.pyi @@ -1,10 +1,10 @@ -from _typeshed import Incomplete, StrOrBytesPath +from _typeshed import StrOrBytesPath from typing import Any, Final LIST_OF_FILE_NAMES: Final[list[str]] def get_extension(srcfilename, modname, sources=(), **kwds): ... -def compile(tmpdir, ext, compiler_verbose: int = 0, debug: Incomplete | None = None): ... +def compile(tmpdir, ext, compiler_verbose: int = 0, debug=None): ... def maybe_relative_path(path: StrOrBytesPath) -> StrOrBytesPath | str: ... int_or_long = int diff --git a/stubs/cffi/cffi/model.pyi b/stubs/cffi/cffi/model.pyi index 3c462dfa8661..5a4e7cc82f19 100644 --- a/stubs/cffi/cffi/model.pyi +++ b/stubs/cffi/cffi/model.pyi @@ -65,7 +65,7 @@ class BaseFunctionType(BaseType): ellipsis: Incomplete abi: Incomplete c_name_with_marker: str - def __init__(self, args, result, ellipsis, abi: Incomplete | None = None) -> None: ... + def __init__(self, args, result, ellipsis, abi=None) -> None: ... class RawFunctionType(BaseFunctionType): is_raw_function: bool @@ -123,7 +123,7 @@ class StructOrUnion(StructOrUnionOrEnum): fldtypes: Incomplete fldbitsize: Incomplete fldquals: Incomplete - def __init__(self, name, fldnames, fldtypes, fldbitsize, fldquals: Incomplete | None = None) -> None: ... + def __init__(self, name, fldnames, fldtypes, fldbitsize, fldquals=None) -> None: ... def anonymous_struct_fields(self) -> Generator[StructOrUnion, None, None]: ... def enumfields(self, expand_anonymous_struct_union: bool = True) -> Generator[Incomplete, None, None]: ... def force_flatten(self) -> None: ... @@ -146,7 +146,7 @@ class EnumType(StructOrUnionOrEnum): enumerators: Incomplete enumvalues: Incomplete baseinttype: Incomplete - def __init__(self, name, enumerators, enumvalues, baseinttype: Incomplete | None = None) -> None: ... + def __init__(self, name, enumerators, enumvalues, baseinttype=None) -> None: ... forcename: str | None def force_the_name(self, forcename: str | None) -> None: ... def check_not_partial(self) -> None: ... diff --git a/stubs/cffi/cffi/recompiler.pyi b/stubs/cffi/cffi/recompiler.pyi index d4c3d03bf48d..729975b862db 100644 --- a/stubs/cffi/cffi/recompiler.pyi +++ b/stubs/cffi/cffi/recompiler.pyi @@ -86,7 +86,7 @@ def recompile( preamble: str | None, tmpdir: str = ".", call_c_compiler: bool = True, - c_file: Incomplete | None = None, + c_file=None, source_extension: str = ".c", extradir: StrPath | None = None, compiler_verbose: int = 1, diff --git a/stubs/cffi/cffi/vengine_cpy.pyi b/stubs/cffi/cffi/vengine_cpy.pyi index 0ff656759f2d..e9cd0afa70c0 100644 --- a/stubs/cffi/cffi/vengine_cpy.pyi +++ b/stubs/cffi/cffi/vengine_cpy.pyi @@ -8,6 +8,6 @@ class VCPythonEngine: def find_module(self, module_name, path, so_suffixes): ... def collect_types(self) -> None: ... def write_source_to_f(self) -> None: ... - def load_library(self, flags: Incomplete | None = None): ... + def load_library(self, flags=None): ... cffimod_header: str diff --git a/stubs/cffi/cffi/verifier.pyi b/stubs/cffi/cffi/verifier.pyi index a56e9579e413..6901a0a3ba70 100644 --- a/stubs/cffi/cffi/verifier.pyi +++ b/stubs/cffi/cffi/verifier.pyi @@ -29,7 +29,7 @@ class Verifier: relative_to: os.PathLike[AnyStr] | None = None, **kwds: list[str] | tuple[str], ) -> None: ... - def write_source(self, file: Incomplete | None = None) -> None: ... + def write_source(self, file=None) -> None: ... def compile_module(self) -> None: ... def load_library(self): ... def get_module_name(self) -> str: ... diff --git a/stubs/corus/corus/io.pyi b/stubs/corus/corus/io.pyi index 0dfb4f6ea63f..8795b3bce8c4 100644 --- a/stubs/corus/corus/io.pyi +++ b/stubs/corus/corus/io.pyi @@ -14,7 +14,7 @@ def load_xz_lines(path): ... def list_zip(path): ... def load_zip_lines(path, name, encoding: str = "utf8") -> Generator[Incomplete]: ... def load_zip_texts(path, names, encoding: str = "utf8") -> Generator[Incomplete]: ... -def parse_csv(lines, delimiter: str = ",", max_field: Incomplete | None = None): ... +def parse_csv(lines, delimiter: str = ",", max_field=None): ... def parse_tsv(lines): ... def skip_header(rows): ... def dict_csv(rows) -> Generator[Incomplete]: ... diff --git a/stubs/corus/corus/readme.pyi b/stubs/corus/corus/readme.pyi index dd02343ee0ee..ae101bfe7c8a 100644 --- a/stubs/corus/corus/readme.pyi +++ b/stubs/corus/corus/readme.pyi @@ -11,7 +11,7 @@ def is_command(step, commands=("wget", "unzip", "unrar", "rm", "mv", "tar")): .. def format_bytes(value): ... def format_count(value): ... def unfold_metas(items) -> Generator[Incomplete]: ... -def format_metas_(metas, nbviewer: Incomplete | None = None) -> Generator[Incomplete]: ... -def format_metas(metas, url: Incomplete | None = None): ... +def format_metas_(metas, nbviewer=None) -> Generator[Incomplete]: ... +def format_metas(metas, url=None): ... def show_html(html) -> None: ... def patch_readme(html, path) -> None: ... diff --git a/stubs/corus/corus/sources/lenta.pyi b/stubs/corus/corus/sources/lenta.pyi index 58aff5b5df83..70301bbd4fb4 100644 --- a/stubs/corus/corus/sources/lenta.pyi +++ b/stubs/corus/corus/sources/lenta.pyi @@ -11,7 +11,7 @@ class LentaRecord(Record): topic: Incomplete tags: Incomplete date: Incomplete - def __init__(self, url, title, text, topic, tags, date: Incomplete | None = None) -> None: ... + def __init__(self, url, title, text, topic, tags, date=None) -> None: ... def parse_lenta(lines) -> Generator[Incomplete]: ... def parse_lenta2(lines) -> Generator[Incomplete]: ... diff --git a/stubs/corus/corus/sources/meta.pyi b/stubs/corus/corus/sources/meta.pyi index c8b50c267f27..e343c67b16b7 100644 --- a/stubs/corus/corus/sources/meta.pyi +++ b/stubs/corus/corus/sources/meta.pyi @@ -11,16 +11,7 @@ class Meta(Record): instruction: Incomplete tags: Incomplete functions: Incomplete - def __init__( - self, - title, - url: Incomplete | None = None, - description: Incomplete | None = None, - stats: Incomplete | None = None, - instruction=(), - tags=(), - functions=(), - ) -> None: ... + def __init__(self, title, url=None, description=None, stats=None, instruction=(), tags=(), functions=()) -> None: ... class Group(Record): __attributes__: Incomplete @@ -29,9 +20,7 @@ class Group(Record): description: Incomplete instruction: Incomplete metas: Incomplete - def __init__( - self, title, url: Incomplete | None = None, description: Incomplete | None = None, instruction=(), metas=() - ) -> None: ... + def __init__(self, title, url=None, description=None, instruction=(), metas=()) -> None: ... def is_group(item): ... @@ -39,7 +28,7 @@ class Stats(Record): __attributes__: Incomplete bytes: Incomplete count: Incomplete - def __init__(self, bytes: Incomplete | None = None, count: Incomplete | None = None) -> None: ... + def __init__(self, bytes=None, count=None) -> None: ... NER: str NEWS: str diff --git a/stubs/corus/corus/sources/morphoru.pyi b/stubs/corus/corus/sources/morphoru.pyi index 3010d097b255..447fb4c4a5e0 100644 --- a/stubs/corus/corus/sources/morphoru.pyi +++ b/stubs/corus/corus/sources/morphoru.pyi @@ -17,7 +17,7 @@ class MorphoToken(Record): pos: Incomplete feats: Incomplete feats2: Incomplete - def __init__(self, text, lemma, pos, feats, feats2: Incomplete | None = None) -> None: ... + def __init__(self, text, lemma, pos, feats, feats2=None) -> None: ... def load_morphoru_gicrya(path): ... def load_morphoru_rnc(path): ... diff --git a/stubs/corus/corus/sources/taiga/arzamas.pyi b/stubs/corus/corus/sources/taiga/arzamas.pyi index 2b653b0c3678..f8b5500537c6 100644 --- a/stubs/corus/corus/sources/taiga/arzamas.pyi +++ b/stubs/corus/corus/sources/taiga/arzamas.pyi @@ -1,6 +1,4 @@ -from _typeshed import Incomplete - __all__ = ["load_taiga_arzamas_metas", "load_taiga_arzamas"] def load_taiga_arzamas_metas(path, offset: int = 0, count: int = 1): ... -def load_taiga_arzamas(path, metas: Incomplete | None = None, offset: int = 144896, count: int = 311): ... +def load_taiga_arzamas(path, metas=None, offset: int = 144896, count: int = 311): ... diff --git a/stubs/corus/corus/sources/taiga/common.pyi b/stubs/corus/corus/sources/taiga/common.pyi index 34fd4cc4ab9c..cedb4a68ec9f 100644 --- a/stubs/corus/corus/sources/taiga/common.pyi +++ b/stubs/corus/corus/sources/taiga/common.pyi @@ -25,15 +25,7 @@ class Author(Record): profession: Incomplete about: Incomplete url: Incomplete - def __init__( - self, - name, - readers: Incomplete | None = None, - texts: Incomplete | None = None, - profession: Incomplete | None = None, - about: Incomplete | None = None, - url: Incomplete | None = None, - ) -> None: ... + def __init__(self, name, readers=None, texts=None, profession=None, about=None, url=None) -> None: ... class Meta(Record): __attributes__: Incomplete @@ -51,16 +43,16 @@ class Meta(Record): def __init__( self, id, - timestamp: Incomplete | None = None, - tags: Incomplete | None = None, - themes: Incomplete | None = None, - rubric: Incomplete | None = None, - genre: Incomplete | None = None, - topic: Incomplete | None = None, - author: Incomplete | None = None, - lang: Incomplete | None = None, - title: Incomplete | None = None, - url: Incomplete | None = None, + timestamp=None, + tags=None, + themes=None, + rubric=None, + genre=None, + topic=None, + author=None, + lang=None, + title=None, + url=None, ) -> None: ... def load_tar(path, offset: int = 0) -> Generator[Incomplete]: ... @@ -73,5 +65,5 @@ def load_texts(path, pattern, offset, count, parse_id, load, encoding: str = "ut def parse_filename_id(path): ... def load_tar_texts(path, pattern, offset, count, parse_id=...): ... def load_zip_texts(path, pattern, offset, count, parse_id=...): ... -def merge_metas(records, metas: Incomplete | None = None) -> Generator[Incomplete]: ... +def merge_metas(records, metas=None) -> Generator[Incomplete]: ... def patch_month(date, months): ... diff --git a/stubs/corus/corus/sources/taiga/fontanka.pyi b/stubs/corus/corus/sources/taiga/fontanka.pyi index 0b04f867fe5a..1870bd9270c8 100644 --- a/stubs/corus/corus/sources/taiga/fontanka.pyi +++ b/stubs/corus/corus/sources/taiga/fontanka.pyi @@ -1,6 +1,4 @@ -from _typeshed import Incomplete - __all__ = ["load_taiga_fontanka_metas", "load_taiga_fontanka"] def load_taiga_fontanka_metas(path, offset: int = 0, count=13): ... -def load_taiga_fontanka(path, metas: Incomplete | None = None, offset: int = 306359296, count: int = 342683): ... +def load_taiga_fontanka(path, metas=None, offset: int = 306359296, count: int = 342683): ... diff --git a/stubs/corus/corus/sources/taiga/interfax.pyi b/stubs/corus/corus/sources/taiga/interfax.pyi index 9957c1c760a4..6425b7aab3d3 100644 --- a/stubs/corus/corus/sources/taiga/interfax.pyi +++ b/stubs/corus/corus/sources/taiga/interfax.pyi @@ -1,6 +1,4 @@ -from _typeshed import Incomplete - __all__ = ["load_taiga_interfax_metas", "load_taiga_interfax"] def load_taiga_interfax_metas(path, offset: int = 0, count: int = 1): ... -def load_taiga_interfax(path, metas: Incomplete | None = None, offset: int = 11447296, count: int = 46429): ... +def load_taiga_interfax(path, metas=None, offset: int = 11447296, count: int = 46429): ... diff --git a/stubs/corus/corus/sources/taiga/kp.pyi b/stubs/corus/corus/sources/taiga/kp.pyi index ed3d7f7024bc..6301a4c6ef63 100644 --- a/stubs/corus/corus/sources/taiga/kp.pyi +++ b/stubs/corus/corus/sources/taiga/kp.pyi @@ -1,6 +1,4 @@ -from _typeshed import Incomplete - __all__ = ["load_taiga_kp_metas", "load_taiga_kp"] def load_taiga_kp_metas(path, offset: int = 0, count: int = 1): ... -def load_taiga_kp(path, metas: Incomplete | None = None, offset: int = 13042176, count: int = 45503): ... +def load_taiga_kp(path, metas=None, offset: int = 13042176, count: int = 45503): ... diff --git a/stubs/corus/corus/sources/taiga/lenta.pyi b/stubs/corus/corus/sources/taiga/lenta.pyi index a46dc6187c6c..96b280c96bba 100644 --- a/stubs/corus/corus/sources/taiga/lenta.pyi +++ b/stubs/corus/corus/sources/taiga/lenta.pyi @@ -1,6 +1,4 @@ -from _typeshed import Incomplete - __all__ = ["load_taiga_lenta_metas", "load_taiga_lenta"] def load_taiga_lenta_metas(path, offset: int = 0, count: int = 1): ... -def load_taiga_lenta(path, metas: Incomplete | None = None, offset: int = 12800000, count: int = 36446): ... +def load_taiga_lenta(path, metas=None, offset: int = 12800000, count: int = 36446): ... diff --git a/stubs/corus/corus/sources/taiga/magazines.pyi b/stubs/corus/corus/sources/taiga/magazines.pyi index b536568f19ed..ee36599ae8b3 100644 --- a/stubs/corus/corus/sources/taiga/magazines.pyi +++ b/stubs/corus/corus/sources/taiga/magazines.pyi @@ -1,6 +1,4 @@ -from _typeshed import Incomplete - __all__ = ["load_taiga_magazines_metas", "load_taiga_magazines"] def load_taiga_magazines_metas(path, offset: int = 0, count: int = 36): ... -def load_taiga_magazines(path, metas: Incomplete | None = None, offset: int = 7292416, count: int = 39890): ... +def load_taiga_magazines(path, metas=None, offset: int = 7292416, count: int = 39890): ... diff --git a/stubs/corus/corus/sources/taiga/nplus1.pyi b/stubs/corus/corus/sources/taiga/nplus1.pyi index b83240f2a600..7e89a05dd643 100644 --- a/stubs/corus/corus/sources/taiga/nplus1.pyi +++ b/stubs/corus/corus/sources/taiga/nplus1.pyi @@ -1,6 +1,4 @@ -from _typeshed import Incomplete - __all__ = ["load_taiga_nplus1_metas", "load_taiga_nplus1"] def load_taiga_nplus1_metas(path, offset: int = 0, count: int = 1): ... -def load_taiga_nplus1(path, metas: Incomplete | None = None, offset: int = 1919488, count: int = 7696): ... +def load_taiga_nplus1(path, metas=None, offset: int = 1919488, count: int = 7696): ... diff --git a/stubs/corus/corus/sources/taiga/proza.pyi b/stubs/corus/corus/sources/taiga/proza.pyi index 2e584686b345..3389ea294cf6 100644 --- a/stubs/corus/corus/sources/taiga/proza.pyi +++ b/stubs/corus/corus/sources/taiga/proza.pyi @@ -1,8 +1,6 @@ -from _typeshed import Incomplete - __all__ = ["load_taiga_proza_metas", "load_taiga_proza", "load_taiga_stihi_metas", "load_taiga_stihi"] def load_taiga_proza_metas(path, offset: int = 0, count=13): ... def load_taiga_stihi_metas(path, offset: int = 0, count=3): ... -def load_taiga_proza(path, metas: Incomplete | None = None, offset: int = ..., count: int = ...): ... -def load_taiga_stihi(path, metas: Incomplete | None = None, offset: int = ..., count: int = ...): ... +def load_taiga_proza(path, metas=None, offset: int = ..., count: int = ...): ... +def load_taiga_stihi(path, metas=None, offset: int = ..., count: int = ...): ... diff --git a/stubs/corus/corus/sources/taiga/subtitles.pyi b/stubs/corus/corus/sources/taiga/subtitles.pyi index b2066c0d5ab9..356c8dcd6c32 100644 --- a/stubs/corus/corus/sources/taiga/subtitles.pyi +++ b/stubs/corus/corus/sources/taiga/subtitles.pyi @@ -3,4 +3,4 @@ from collections.abc import Generator def parse_metas(items) -> Generator[Incomplete]: ... def load_taiga_subtitles_metas(path, offset: int = 0, count: int = 1): ... -def load_taiga_subtitles(path, metas: Incomplete | None = None, offset: int = 2113024, count: int = 19011): ... +def load_taiga_subtitles(path, metas=None, offset: int = 2113024, count: int = 19011): ... diff --git a/stubs/corus/corus/third/WikiExtractor.pyi b/stubs/corus/corus/third/WikiExtractor.pyi index bda32a5d62a1..78053703104c 100644 --- a/stubs/corus/corus/third/WikiExtractor.pyi +++ b/stubs/corus/corus/third/WikiExtractor.pyi @@ -73,7 +73,7 @@ class Frame: args: Incomplete prev: Incomplete depth: Incomplete - def __init__(self, title: str = "", args=[], prev: Incomplete | None = None) -> None: ... + def __init__(self, title: str = "", args=[], prev=None) -> None: ... def push(self, title, args): ... def pop(self): ... @@ -147,9 +147,9 @@ class Infix: ROUND: Incomplete def sharp_expr(extr, expr): ... -def sharp_if(extr, testValue, valueIfTrue, valueIfFalse: Incomplete | None = None, *args): ... -def sharp_ifeq(extr, lvalue, rvalue, valueIfTrue, valueIfFalse: Incomplete | None = None, *args): ... -def sharp_iferror(extr, test, then: str = "", Else: Incomplete | None = None, *args): ... +def sharp_if(extr, testValue, valueIfTrue, valueIfFalse=None, *args): ... +def sharp_ifeq(extr, lvalue, rvalue, valueIfTrue, valueIfFalse=None, *args): ... +def sharp_iferror(extr, test, then: str = "", Else=None, *args): ... def sharp_switch(extr, primary, *params): ... def sharp_invoke(module, function, args): ... @@ -210,16 +210,14 @@ tagRE: Incomplete keyRE: Incomplete catRE: Incomplete -def load_templates(file, output_file: Incomplete | None = None) -> None: ... +def load_templates(file, output_file=None) -> None: ... def pages_from(input) -> Generator[Incomplete]: ... def process_dump(input_file, template_file, out_file, file_size, file_compress, process_count) -> None: ... def extract_process(opts, i, jobs_queue, output_queue) -> None: ... report_period: int -def reduce_process( - opts, output_queue, spool_length, out_file: Incomplete | None = None, file_size: int = 0, file_compress: bool = True -) -> None: ... +def reduce_process(opts, output_queue, spool_length, out_file=None, file_size: int = 0, file_compress: bool = True) -> None: ... minFileSize: Incomplete diff --git a/stubs/dateparser/dateparser/freshness_date_parser.pyi b/stubs/dateparser/dateparser/freshness_date_parser.pyi index 93df25f3bafe..f33996d9e23e 100644 --- a/stubs/dateparser/dateparser/freshness_date_parser.pyi +++ b/stubs/dateparser/dateparser/freshness_date_parser.pyi @@ -11,6 +11,6 @@ class FreshnessDateDataParser: def get_local_tz(self) -> ZoneInfo: ... def parse(self, date_string: str, settings) -> tuple[Incomplete | None, str | None]: ... def get_kwargs(self, date_string: str) -> dict[str, float]: ... - def get_date_data(self, date_string: str, settings: Incomplete | None = None) -> DateData: ... + def get_date_data(self, date_string: str, settings=None) -> DateData: ... freshness_date_parser: FreshnessDateDataParser diff --git a/stubs/dateparser/dateparser/languages/dictionary.pyi b/stubs/dateparser/dateparser/languages/dictionary.pyi index 74211e657a23..57c520d5fa0f 100644 --- a/stubs/dateparser/dateparser/languages/dictionary.pyi +++ b/stubs/dateparser/dateparser/languages/dictionary.pyi @@ -14,7 +14,7 @@ class UnknownTokenError(Exception): ... class Dictionary: info: Any - def __init__(self, locale_info: dict[str, Incomplete], settings: Incomplete | None = None) -> None: ... + def __init__(self, locale_info: dict[str, Incomplete], settings=None) -> None: ... def __contains__(self, key): ... def __getitem__(self, key): ... def __iter__(self) -> Any: ... @@ -25,4 +25,4 @@ class Dictionary: def split(self, string: str, keep_formatting: bool = False) -> list[str]: ... class NormalizedDictionary(Dictionary): - def __init__(self, locale_info: dict[str, Incomplete], settings: Incomplete | None = None) -> None: ... + def __init__(self, locale_info: dict[str, Incomplete], settings=None) -> None: ... diff --git a/stubs/dateparser/dateparser/search/detection.pyi b/stubs/dateparser/dateparser/search/detection.pyi index 499ca50b96e8..5e741cf04db6 100644 --- a/stubs/dateparser/dateparser/search/detection.pyi +++ b/stubs/dateparser/dateparser/search/detection.pyi @@ -1,18 +1,17 @@ -from _typeshed import Incomplete from typing import Any class BaseLanguageDetector: languages: Any def __init__(self, languages) -> None: ... - def iterate_applicable_languages(self, date_string, modify: bool = False, settings: Incomplete | None = None) -> None: ... + def iterate_applicable_languages(self, date_string, modify: bool = False, settings=None) -> None: ... class AutoDetectLanguage(BaseLanguageDetector): language_pool: Any allow_redetection: Any def __init__(self, languages, allow_redetection: bool = False) -> None: ... languages: Any - def iterate_applicable_languages(self, date_string, modify: bool = False, settings: Incomplete | None = None) -> None: ... + def iterate_applicable_languages(self, date_string, modify: bool = False, settings=None) -> None: ... class ExactLanguages(BaseLanguageDetector): def __init__(self, languages) -> None: ... - def iterate_applicable_languages(self, date_string, modify: bool = False, settings: Incomplete | None = None) -> None: ... + def iterate_applicable_languages(self, date_string, modify: bool = False, settings=None) -> None: ... diff --git a/stubs/dateparser/dateparser/search/search.pyi b/stubs/dateparser/dateparser/search/search.pyi index 7c96046a44e2..aeb8b863bc75 100644 --- a/stubs/dateparser/dateparser/search/search.pyi +++ b/stubs/dateparser/dateparser/search/search.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from typing import Any from ..date import _DetectLanguagesFunction @@ -29,16 +28,8 @@ class DateSearchWithDetection: def __init__(self) -> None: ... language_detector: Any def detect_language( - self, - text, - languages, - settings: Incomplete | None = None, - detect_languages_function: _DetectLanguagesFunction | None = None, + self, text, languages, settings=None, detect_languages_function: _DetectLanguagesFunction | None = None ): ... def search_dates( - self, - text, - languages: Incomplete | None = None, - settings: Incomplete | None = None, - detect_languages_function: _DetectLanguagesFunction | None = None, + self, text, languages=None, settings=None, detect_languages_function: _DetectLanguagesFunction | None = None ): ... diff --git a/stubs/dateparser/dateparser/utils/__init__.pyi b/stubs/dateparser/dateparser/utils/__init__.pyi index 573e5f1d1957..35555d510786 100644 --- a/stubs/dateparser/dateparser/utils/__init__.pyi +++ b/stubs/dateparser/dateparser/utils/__init__.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from collections import OrderedDict from collections.abc import Mapping from logging import Logger @@ -18,7 +17,7 @@ def apply_timezone_from_settings(date_obj, settings): ... def get_last_day_of_month(year, month): ... def get_previous_leap_year(year): ... def get_next_leap_year(year): ... -def set_correct_day_from_settings(date_obj, settings, current_day: Incomplete | None = None): ... +def set_correct_day_from_settings(date_obj, settings, current_day=None): ... def set_correct_month_from_settings(date_obj, settings, current_month=None): ... def registry(cls): ... def get_logger() -> Logger: ... diff --git a/stubs/defusedxml/defusedxml/ElementTree.pyi b/stubs/defusedxml/defusedxml/ElementTree.pyi index a041469d7c94..6328863d59b0 100644 --- a/stubs/defusedxml/defusedxml/ElementTree.pyi +++ b/stubs/defusedxml/defusedxml/ElementTree.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from collections.abc import Iterator, Sequence from typing import Any from xml.etree.ElementTree import Element, ElementTree, ParseError as ParseError, XMLParser as _XMLParser, tostring as tostring @@ -10,7 +9,7 @@ class DefusedXMLParser(_XMLParser): def __init__( self, html=..., - target: Incomplete | None = None, + target=None, encoding: str | None = None, forbid_dtd: bool = False, forbid_entities: bool = True, diff --git a/stubs/defusedxml/defusedxml/minidom.pyi b/stubs/defusedxml/defusedxml/minidom.pyi index 974d1a93aa7d..312cbe8c518e 100644 --- a/stubs/defusedxml/defusedxml/minidom.pyi +++ b/stubs/defusedxml/defusedxml/minidom.pyi @@ -1,20 +1,15 @@ -from _typeshed import Incomplete from xml.dom.minidom import Document __origin__: str def parse( file, - parser: Incomplete | None = None, + parser=None, bufsize: int | None = None, forbid_dtd: bool = False, forbid_entities: bool = True, forbid_external: bool = True, ) -> Document: ... def parseString( - string: str, - parser: Incomplete | None = None, - forbid_dtd: bool = False, - forbid_entities: bool = True, - forbid_external: bool = True, + string: str, parser=None, forbid_dtd: bool = False, forbid_entities: bool = True, forbid_external: bool = True ) -> Document: ... diff --git a/stubs/docker/docker/api/config.pyi b/stubs/docker/docker/api/config.pyi index 688fd463ffdd..209a0762c53c 100644 --- a/stubs/docker/docker/api/config.pyi +++ b/stubs/docker/docker/api/config.pyi @@ -1,7 +1,5 @@ -from _typeshed import Incomplete - class ConfigApiMixin: - def create_config(self, name, data, labels: Incomplete | None = None, templating: Incomplete | None = None): ... + def create_config(self, name, data, labels=None, templating=None): ... def inspect_config(self, id): ... def remove_config(self, id): ... - def configs(self, filters: Incomplete | None = None): ... + def configs(self, filters=None): ... diff --git a/stubs/docker/docker/api/container.pyi b/stubs/docker/docker/api/container.pyi index d2a58261853f..a44bd63fb4ee 100644 --- a/stubs/docker/docker/api/container.pyi +++ b/stubs/docker/docker/api/container.pyi @@ -28,17 +28,17 @@ class ContainerApiMixin: logs: bool = False, demux: bool = False, ): ... - def attach_socket(self, container: _Container, params: Incomplete | None = None, ws: bool = False): ... + def attach_socket(self, container: _Container, params=None, ws: bool = False): ... def commit( self, container: _Container, repository: str | None = None, tag: str | None = None, - message: Incomplete | None = None, - author: Incomplete | None = None, + message=None, + author=None, pause: bool = True, - changes: Incomplete | None = None, - conf: Incomplete | None = None, + changes=None, + conf=None, ): ... def containers( self, @@ -50,7 +50,7 @@ class ContainerApiMixin: before: str | None = None, limit: int = -1, size: bool = False, - filters: Incomplete | None = None, + filters=None, ): ... def create_container( self, @@ -71,19 +71,19 @@ class ContainerApiMixin: entrypoint: str | list[str] | None = None, working_dir: str | None = None, domainname: str | None = None, - host_config: Incomplete | None = None, + host_config=None, mac_address: str | None = None, labels: dict[str, str] | list[str] | None = None, stop_signal: str | None = None, - networking_config: Incomplete | None = None, - healthcheck: Incomplete | None = None, + networking_config=None, + healthcheck=None, stop_timeout: int | None = None, runtime: str | None = None, use_config_proxy: bool = True, platform: str | None = None, ): ... def create_container_config(self, *args, **kwargs) -> ContainerConfig: ... - def create_container_from_config(self, config, name: Incomplete | None = None, platform: Incomplete | None = None): ... + def create_container_from_config(self, config, name=None, platform=None): ... def create_host_config(self, *args, **kwargs) -> HostConfig: ... def create_networking_config(self, *args, **kwargs) -> NetworkingConfig: ... def create_endpoint_config(self, *args, **kwargs) -> EndpointConfig: ... @@ -137,7 +137,7 @@ class ContainerApiMixin: def pause(self, container: _Container) -> None: ... def port(self, container: _Container, private_port: int): ... def put_archive(self, container: _Container, path: str, data) -> bool: ... - def prune_containers(self, filters: Incomplete | None = None): ... + def prune_containers(self, filters=None): ... def remove_container(self, container: _Container, v: bool = False, link: bool = False, force: bool = False) -> None: ... def rename(self, container: _Container, name: str) -> None: ... def resize(self, container: _Container, height: int, width: int) -> None: ... @@ -160,7 +160,7 @@ class ContainerApiMixin: mem_reservation: float | str | None = None, memswap_limit: int | str | None = None, kernel_memory: int | str | None = None, - restart_policy: Incomplete | None = None, + restart_policy=None, ): ... def wait( self, diff --git a/stubs/docker/docker/api/exec_api.pyi b/stubs/docker/docker/api/exec_api.pyi index 0f578d2d5977..60a3e4682c35 100644 --- a/stubs/docker/docker/api/exec_api.pyi +++ b/stubs/docker/docker/api/exec_api.pyi @@ -1,5 +1,3 @@ -from _typeshed import Incomplete - class ExecApiMixin: def exec_create( self, @@ -11,12 +9,12 @@ class ExecApiMixin: tty: bool = False, privileged: bool = False, user: str = "", - environment: Incomplete | None = None, - workdir: Incomplete | None = None, - detach_keys: Incomplete | None = None, + environment=None, + workdir=None, + detach_keys=None, ): ... def exec_inspect(self, exec_id): ... - def exec_resize(self, exec_id, height: Incomplete | None = None, width: Incomplete | None = None) -> None: ... + def exec_resize(self, exec_id, height=None, width=None) -> None: ... def exec_start( self, exec_id, detach: bool = False, tty: bool = False, stream: bool = False, socket: bool = False, demux: bool = False ): ... diff --git a/stubs/docker/docker/api/image.pyi b/stubs/docker/docker/api/image.pyi index f03dc7cab47a..92d765fa41a8 100644 --- a/stubs/docker/docker/api/image.pyi +++ b/stubs/docker/docker/api/image.pyi @@ -6,35 +6,25 @@ log: Incomplete class ImageApiMixin: def get_image(self, image: str, chunk_size: int | None = 2097152): ... def history(self, image): ... - def images(self, name: str | None = None, quiet: bool = False, all: bool = False, filters: Incomplete | None = None): ... + def images(self, name: str | None = None, quiet: bool = False, all: bool = False, filters=None): ... def import_image( self, - src: Incomplete | None = None, + src=None, repository: str | None = None, tag: str | None = None, image: str | None = None, - changes: Incomplete | None = None, + changes=None, stream_src: bool = False, ): ... - def import_image_from_data( - self, data, repository: str | None = None, tag: str | None = None, changes: Incomplete | None = None - ): ... - def import_image_from_file( - self, filename: str, repository: str | None = None, tag: str | None = None, changes: Incomplete | None = None - ): ... - def import_image_from_stream( - self, stream, repository: str | None = None, tag: str | None = None, changes: Incomplete | None = None - ): ... - def import_image_from_url( - self, url, repository: str | None = None, tag: str | None = None, changes: Incomplete | None = None - ): ... - def import_image_from_image( - self, image, repository: str | None = None, tag: str | None = None, changes: Incomplete | None = None - ): ... + def import_image_from_data(self, data, repository: str | None = None, tag: str | None = None, changes=None): ... + def import_image_from_file(self, filename: str, repository: str | None = None, tag: str | None = None, changes=None): ... + def import_image_from_stream(self, stream, repository: str | None = None, tag: str | None = None, changes=None): ... + def import_image_from_url(self, url, repository: str | None = None, tag: str | None = None, changes=None): ... + def import_image_from_image(self, image, repository: str | None = None, tag: str | None = None, changes=None): ... def inspect_image(self, image): ... - def inspect_distribution(self, image, auth_config: Incomplete | None = None): ... - def load_image(self, data, quiet: Incomplete | None = None): ... - def prune_images(self, filters: Incomplete | None = None): ... + def inspect_distribution(self, image, auth_config=None): ... + def load_image(self, data, quiet=None): ... + def prune_images(self, filters=None): ... def pull( self, repository: str, @@ -45,14 +35,7 @@ class ImageApiMixin: platform: str | None = None, all_tags: bool = False, ): ... - def push( - self, - repository: str, - tag: str | None = None, - stream: bool = False, - auth_config: Incomplete | None = None, - decode: bool = False, - ): ... + def push(self, repository: str, tag: str | None = None, stream: bool = False, auth_config=None, decode: bool = False): ... def remove_image(self, image: str, force: bool = False, noprune: bool = False): ... def search(self, term: str, limit: int | None = None): ... def tag(self, image, repository, tag: str | None = None, force: bool = False): ... diff --git a/stubs/docker/docker/api/network.pyi b/stubs/docker/docker/api/network.pyi index 7f20d8707256..1441ff496c6d 100644 --- a/stubs/docker/docker/api/network.pyi +++ b/stubs/docker/docker/api/network.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from collections.abc import Iterable from typing import Any, Literal, TypedDict, type_check_only from typing_extensions import TypeAlias @@ -17,7 +16,7 @@ _Network: TypeAlias = _HasId | _HasID | str _Container: TypeAlias = _HasId | _HasID | str class NetworkApiMixin: - def networks(self, names: Incomplete | None = None, ids: Incomplete | None = None, filters: Incomplete | None = None): ... + def networks(self, names=None, ids=None, filters=None): ... def create_network( self, name: str, @@ -32,7 +31,7 @@ class NetworkApiMixin: scope: Literal["local", "global", "swarm"] | None = None, ingress: bool | None = None, ) -> dict[str, str]: ... - def prune_networks(self, filters: Incomplete | None = None): ... + def prune_networks(self, filters=None): ... def remove_network(self, net_id: _Network) -> None: ... def inspect_network( self, net_id: _Network, verbose: bool | None = None, scope: Literal["local", "global", "swarm"] | None = None @@ -41,12 +40,12 @@ class NetworkApiMixin: self, container: _Container, net_id: str, - ipv4_address: Incomplete | None = None, - ipv6_address: Incomplete | None = None, - aliases: Incomplete | None = None, + ipv4_address=None, + ipv6_address=None, + aliases=None, links: dict[str, str] | dict[str, None] | dict[str, str | None] | Iterable[tuple[str, str | None]] | None = None, - link_local_ips: Incomplete | None = None, - driver_opt: Incomplete | None = None, - mac_address: Incomplete | None = None, + link_local_ips=None, + driver_opt=None, + mac_address=None, ) -> None: ... def disconnect_container_from_network(self, container: _Container, net_id: str, force: bool = False) -> None: ... diff --git a/stubs/docker/docker/api/plugin.pyi b/stubs/docker/docker/api/plugin.pyi index d75818ae14ad..2b8e21968b88 100644 --- a/stubs/docker/docker/api/plugin.pyi +++ b/stubs/docker/docker/api/plugin.pyi @@ -1,12 +1,10 @@ -from _typeshed import Incomplete - class PluginApiMixin: def configure_plugin(self, name, options): ... def create_plugin(self, name, plugin_data_dir, gzip: bool = False): ... def disable_plugin(self, name, force: bool = False): ... def enable_plugin(self, name, timeout: int = 0): ... def inspect_plugin(self, name): ... - def pull_plugin(self, remote, privileges, name: Incomplete | None = None): ... + def pull_plugin(self, remote, privileges, name=None): ... def plugins(self): ... def plugin_privileges(self, name): ... def push_plugin(self, name): ... diff --git a/stubs/docker/docker/api/service.pyi b/stubs/docker/docker/api/service.pyi index 8484722ae722..cf86afc7ba6f 100644 --- a/stubs/docker/docker/api/service.pyi +++ b/stubs/docker/docker/api/service.pyi @@ -1,22 +1,20 @@ -from _typeshed import Incomplete - class ServiceApiMixin: def create_service( self, task_template, - name: Incomplete | None = None, - labels: Incomplete | None = None, - mode: Incomplete | None = None, - update_config: Incomplete | None = None, - networks: Incomplete | None = None, - endpoint_config: Incomplete | None = None, - endpoint_spec: Incomplete | None = None, - rollback_config: Incomplete | None = None, + name=None, + labels=None, + mode=None, + update_config=None, + networks=None, + endpoint_config=None, + endpoint_spec=None, + rollback_config=None, ): ... - def inspect_service(self, service, insert_defaults: Incomplete | None = None): ... + def inspect_service(self, service, insert_defaults=None): ... def inspect_task(self, task): ... def remove_service(self, service): ... - def services(self, filters: Incomplete | None = None, status: Incomplete | None = None): ... + def services(self, filters=None, status=None): ... def service_logs( self, service, @@ -27,21 +25,21 @@ class ServiceApiMixin: since: int = 0, timestamps: bool = False, tail: str = "all", - is_tty: Incomplete | None = None, + is_tty=None, ): ... - def tasks(self, filters: Incomplete | None = None): ... + def tasks(self, filters=None): ... def update_service( self, service, version, - task_template: Incomplete | None = None, - name: Incomplete | None = None, - labels: Incomplete | None = None, - mode: Incomplete | None = None, - update_config: Incomplete | None = None, - networks: Incomplete | None = None, - endpoint_config: Incomplete | None = None, - endpoint_spec: Incomplete | None = None, + task_template=None, + name=None, + labels=None, + mode=None, + update_config=None, + networks=None, + endpoint_config=None, + endpoint_spec=None, fetch_current_spec: bool = False, - rollback_config: Incomplete | None = None, + rollback_config=None, ): ... diff --git a/stubs/docker/docker/api/swarm.pyi b/stubs/docker/docker/api/swarm.pyi index 55111d09ae15..b95dc627c5cc 100644 --- a/stubs/docker/docker/api/swarm.pyi +++ b/stubs/docker/docker/api/swarm.pyi @@ -7,34 +7,29 @@ class SwarmApiMixin: def get_unlock_key(self): ... def init_swarm( self, - advertise_addr: Incomplete | None = None, + advertise_addr=None, listen_addr: str = "0.0.0.0:2377", force_new_cluster: bool = False, - swarm_spec: Incomplete | None = None, - default_addr_pool: Incomplete | None = None, - subnet_size: Incomplete | None = None, - data_path_addr: Incomplete | None = None, - data_path_port: Incomplete | None = None, + swarm_spec=None, + default_addr_pool=None, + subnet_size=None, + data_path_addr=None, + data_path_port=None, ): ... def inspect_swarm(self): ... def inspect_node(self, node_id): ... def join_swarm( - self, - remote_addrs, - join_token, - listen_addr: str = "0.0.0.0:2377", - advertise_addr: Incomplete | None = None, - data_path_addr: Incomplete | None = None, + self, remote_addrs, join_token, listen_addr: str = "0.0.0.0:2377", advertise_addr=None, data_path_addr=None ): ... def leave_swarm(self, force: bool = False): ... - def nodes(self, filters: Incomplete | None = None): ... + def nodes(self, filters=None): ... def remove_node(self, node_id, force: bool = False): ... def unlock_swarm(self, key): ... - def update_node(self, node_id, version, node_spec: Incomplete | None = None): ... + def update_node(self, node_id, version, node_spec=None): ... def update_swarm( self, version, - swarm_spec: Incomplete | None = None, + swarm_spec=None, rotate_worker_token: bool = False, rotate_manager_token: bool = False, rotate_manager_unlock_key: bool = False, diff --git a/stubs/docker/docker/auth.pyi b/stubs/docker/docker/auth.pyi index c5edb95d7dc5..48d3c5c2cbd6 100644 --- a/stubs/docker/docker/auth.pyi +++ b/stubs/docker/docker/auth.pyi @@ -16,17 +16,14 @@ def split_repo_name(repo_name: str) -> tuple[str, str]: ... def get_credential_store(authconfig: AuthConfig | MutableMapping[str, Incomplete], registry: str | None): ... class AuthConfig(dict[str, Incomplete]): - def __init__(self, dct: MutableMapping[str, Incomplete], credstore_env: Incomplete | None = None) -> None: ... + def __init__(self, dct: MutableMapping[str, Incomplete], credstore_env=None) -> None: ... @classmethod def parse_auth( cls, entries: Mapping[str, dict[Incomplete, Incomplete]], raise_on_error: bool = False ) -> dict[str, Incomplete]: ... @classmethod def load_config( - cls, - config_path: FileDescriptorOrPath | None, - config_dict: dict[str, Incomplete] | None, - credstore_env: Incomplete | None = None, + cls, config_path: FileDescriptorOrPath | None, config_dict: dict[str, Incomplete] | None, credstore_env=None ) -> Self: ... @property def auths(self) -> dict[str, Incomplete]: ... @@ -41,13 +38,11 @@ class AuthConfig(dict[str, Incomplete]): def get_all_credentials(self): ... def add_auth(self, reg: str, data) -> None: ... -def resolve_authconfig(authconfig, registry: str | None = None, credstore_env: Incomplete | None = None): ... +def resolve_authconfig(authconfig, registry: str | None = None, credstore_env=None): ... def convert_to_hostname(url: str) -> str: ... def decode_auth(auth: str | ReadableBuffer) -> tuple[str, str]: ... def encode_header(auth) -> bytes: ... def parse_auth(entries: Mapping[str, dict[Incomplete, Incomplete]], raise_on_error: bool = False): ... def load_config( - config_path: FileDescriptorOrPath | None = None, - config_dict: dict[str, Incomplete] | None = None, - credstore_env: Incomplete | None = None, + config_path: FileDescriptorOrPath | None = None, config_dict: dict[str, Incomplete] | None = None, credstore_env=None ) -> AuthConfig: ... diff --git a/stubs/docker/docker/credentials/store.pyi b/stubs/docker/docker/credentials/store.pyi index c98be5bd8c21..c4e708853d4d 100644 --- a/stubs/docker/docker/credentials/store.pyi +++ b/stubs/docker/docker/credentials/store.pyi @@ -4,7 +4,7 @@ class Store: program: Incomplete exe: Incomplete environment: Incomplete - def __init__(self, program, environment: Incomplete | None = None) -> None: ... + def __init__(self, program, environment=None) -> None: ... def get(self, server): ... def store(self, server, username, secret): ... def erase(self, server) -> None: ... diff --git a/stubs/docker/docker/models/containers.pyi b/stubs/docker/docker/models/containers.pyi index 3224f5844cfd..6f5fe645befe 100644 --- a/stubs/docker/docker/models/containers.pyi +++ b/stubs/docker/docker/models/containers.pyi @@ -46,15 +46,15 @@ class Container(Model): detach: bool = False, stream: bool = False, socket: bool = False, - environment: Incomplete | None = None, - workdir: Incomplete | None = None, + environment=None, + workdir=None, demux: bool = False, ) -> ExecResult: ... def export(self, chunk_size: int | None = 2097152) -> str: ... def get_archive( self, path: str, chunk_size: int | None = 2097152, encode_stream: bool = False ) -> tuple[Incomplete, Incomplete]: ... - def kill(self, signal: Incomplete | None = None): ... + def kill(self, signal=None): ... @overload def logs( self, @@ -141,9 +141,9 @@ class ContainerCollection(Collection[Container]): detach: Literal[False] = False, device_cgroup_rules: list[Incomplete] | None = None, device_read_bps: list[Incomplete] | None = None, - device_read_iops: Incomplete | None = None, - device_write_bps: Incomplete | None = None, - device_write_iops: Incomplete | None = None, + device_read_iops=None, + device_write_bps=None, + device_write_iops=None, devices: list[str] | None = None, device_requests: list[DeviceRequest] | None = None, dns: list[Incomplete] | None = None, @@ -236,9 +236,9 @@ class ContainerCollection(Collection[Container]): detach: Literal[True], device_cgroup_rules: list[Incomplete] | None = None, device_read_bps: list[Incomplete] | None = None, - device_read_iops: Incomplete | None = None, - device_write_bps: Incomplete | None = None, - device_write_iops: Incomplete | None = None, + device_read_iops=None, + device_write_bps=None, + device_write_iops=None, devices: list[str] | None = None, device_requests: list[DeviceRequest] | None = None, dns: list[Incomplete] | None = None, @@ -327,9 +327,9 @@ class ContainerCollection(Collection[Container]): detach: Literal[True], device_cgroup_rules: list[Incomplete] | None = None, device_read_bps: list[Incomplete] | None = None, - device_read_iops: Incomplete | None = None, - device_write_bps: Incomplete | None = None, - device_write_iops: Incomplete | None = None, + device_read_iops=None, + device_write_bps=None, + device_write_iops=None, devices: list[str] | None = None, device_requests: list[DeviceRequest] | None = None, dns: list[Incomplete] | None = None, @@ -399,13 +399,13 @@ class ContainerCollection(Collection[Container]): self, all: bool = False, before: str | None = None, - filters: Incomplete | None = None, + filters=None, limit: int = -1, since: str | None = None, sparse: bool = False, ignore_removed: bool = False, ): ... - def prune(self, filters: Incomplete | None = None): ... + def prune(self, filters=None): ... RUN_CREATE_KWARGS: list[str] RUN_HOST_CONFIG_KWARGS: list[str] diff --git a/stubs/docker/docker/models/plugins.pyi b/stubs/docker/docker/models/plugins.pyi index aeb85eef7f00..92b7da67e199 100644 --- a/stubs/docker/docker/models/plugins.pyi +++ b/stubs/docker/docker/models/plugins.pyi @@ -15,11 +15,11 @@ class Plugin(Model): def enable(self, timeout: int = 0) -> None: ... def push(self): ... def remove(self, force: bool = False): ... - def upgrade(self, remote: Incomplete | None = None) -> Generator[Incomplete, Incomplete, None]: ... + def upgrade(self, remote=None) -> Generator[Incomplete, Incomplete, None]: ... class PluginCollection(Collection[Plugin]): model: type[Plugin] def create(self, name, plugin_data_dir, gzip: bool = False): ... # type:ignore[override] def get(self, name): ... - def install(self, remote_name, local_name: Incomplete | None = None): ... + def install(self, remote_name, local_name=None): ... def list(self): ... diff --git a/stubs/docker/docker/models/services.pyi b/stubs/docker/docker/models/services.pyi index fe22b9bba0ae..c20b9e4cc13d 100644 --- a/stubs/docker/docker/models/services.pyi +++ b/stubs/docker/docker/models/services.pyi @@ -9,7 +9,7 @@ class Service(Model): @property def version(self): ... def remove(self): ... - def tasks(self, filters: Incomplete | None = None): ... + def tasks(self, filters=None): ... def update(self, **kwargs): ... def logs(self, **kwargs): ... def scale(self, replicas): ... @@ -17,8 +17,8 @@ class Service(Model): class ServiceCollection(Collection[Service]): model: type[Service] - def create(self, image, command: Incomplete | None = None, **kwargs): ... # type:ignore[override] - def get(self, service_id, insert_defaults: Incomplete | None = None): ... + def create(self, image, command=None, **kwargs): ... # type:ignore[override] + def get(self, service_id, insert_defaults=None): ... def list(self, **kwargs): ... CONTAINER_SPEC_KWARGS: Incomplete diff --git a/stubs/docker/docker/transport/npipeconn.pyi b/stubs/docker/docker/transport/npipeconn.pyi index ab6300e403e1..1b4bd8758dda 100644 --- a/stubs/docker/docker/transport/npipeconn.pyi +++ b/stubs/docker/docker/transport/npipeconn.pyi @@ -25,5 +25,5 @@ class NpipeHTTPAdapter(BaseHTTPAdapter): max_pool_size: Incomplete pools: Incomplete def __init__(self, base_url, timeout: int = 60, pool_connections=..., max_pool_size=...) -> None: ... - def get_connection(self, url, proxies: Incomplete | None = None): ... + def get_connection(self, url, proxies=None): ... def request_url(self, request, proxies): ... diff --git a/stubs/docker/docker/transport/npipesocket.pyi b/stubs/docker/docker/transport/npipesocket.pyi index 8227daae24bb..01fc54e37368 100644 --- a/stubs/docker/docker/transport/npipesocket.pyi +++ b/stubs/docker/docker/transport/npipesocket.pyi @@ -9,7 +9,7 @@ MAXIMUM_RETRY_COUNT: int def check_closed(f): ... class NpipeSocket: - def __init__(self, handle: Incomplete | None = None) -> None: ... + def __init__(self, handle=None) -> None: ... def accept(self) -> None: ... def bind(self, address) -> None: ... def close(self) -> None: ... @@ -20,10 +20,10 @@ class NpipeSocket: def dup(self): ... def getpeername(self): ... def getsockname(self): ... - def getsockopt(self, level, optname, buflen: Incomplete | None = None) -> None: ... + def getsockopt(self, level, optname, buflen=None) -> None: ... def ioctl(self, control, option) -> None: ... def listen(self, backlog) -> None: ... - def makefile(self, mode: Incomplete | None = None, bufsize: Incomplete | None = None): ... + def makefile(self, mode=None, bufsize=None): ... def recv(self, bufsize, flags: int = 0): ... def recvfrom(self, bufsize, flags: int = 0): ... def recvfrom_into(self, buf, nbytes: int = 0, flags: int = 0): ... diff --git a/stubs/docker/docker/transport/sshconn.pyi b/stubs/docker/docker/transport/sshconn.pyi index 42d703e778a5..467bc42f45f9 100644 --- a/stubs/docker/docker/transport/sshconn.pyi +++ b/stubs/docker/docker/transport/sshconn.pyi @@ -24,7 +24,7 @@ class SSHConnection(urllib3.connection.HTTPConnection): ssh_transport: Incomplete timeout: Incomplete ssh_host: Incomplete - def __init__(self, ssh_transport: Incomplete | None = None, timeout: int = 60, host: Incomplete | None = None) -> None: ... + def __init__(self, ssh_transport=None, timeout: int = 60, host=None) -> None: ... sock: Incomplete def connect(self) -> None: ... @@ -33,9 +33,7 @@ class SSHConnectionPool(urllib3.connectionpool.HTTPConnectionPool): ssh_transport: Incomplete timeout: Incomplete ssh_host: Incomplete - def __init__( - self, ssh_client: Incomplete | None = None, timeout: int = 60, maxsize: int = 10, host: Incomplete | None = None - ) -> None: ... + def __init__(self, ssh_client=None, timeout: int = 60, maxsize: int = 10, host=None) -> None: ... class SSHHTTPAdapter(BaseHTTPAdapter): __attrs__: Incomplete @@ -45,5 +43,5 @@ class SSHHTTPAdapter(BaseHTTPAdapter): max_pool_size: Incomplete pools: Incomplete def __init__(self, base_url, timeout: int = 60, pool_connections=..., max_pool_size=..., shell_out: bool = False) -> None: ... - def get_connection(self, url, proxies: Incomplete | None = None): ... + def get_connection(self, url, proxies=None): ... def close(self) -> None: ... diff --git a/stubs/docker/docker/transport/unixconn.pyi b/stubs/docker/docker/transport/unixconn.pyi index 47042570087a..7ca8d338df57 100644 --- a/stubs/docker/docker/transport/unixconn.pyi +++ b/stubs/docker/docker/transport/unixconn.pyi @@ -27,5 +27,5 @@ class UnixHTTPAdapter(BaseHTTPAdapter): max_pool_size: Incomplete pools: Incomplete def __init__(self, socket_url, timeout: int = 60, pool_connections=25, max_pool_size=10) -> None: ... - def get_connection(self, url, proxies: Incomplete | None = None): ... + def get_connection(self, url, proxies=None): ... def request_url(self, request, proxies): ... diff --git a/stubs/docker/docker/types/networks.pyi b/stubs/docker/docker/types/networks.pyi index fb64d7d9fe61..b3caf0209499 100644 --- a/stubs/docker/docker/types/networks.pyi +++ b/stubs/docker/docker/types/networks.pyi @@ -10,7 +10,7 @@ class EndpointConfig(dict[str, Incomplete]): ipv4_address: str | None = None, ipv6_address: str | None = None, link_local_ips: list[str] | None = None, - driver_opt: Incomplete | None = None, + driver_opt=None, mac_address: str | None = None, ) -> None: ... diff --git a/stubs/docker/docker/utils/build.pyi b/stubs/docker/docker/utils/build.pyi index 57100aec43b6..ec168530da14 100644 --- a/stubs/docker/docker/utils/build.pyi +++ b/stubs/docker/docker/utils/build.pyi @@ -1,5 +1,5 @@ import io -from _typeshed import Incomplete, StrOrBytesPath, StrPath +from _typeshed import StrOrBytesPath, StrPath from collections.abc import Generator, Iterable, MutableSequence from os import PathLike from tarfile import _Fileobj @@ -16,11 +16,7 @@ def tar( def exclude_paths(root: StrPath, patterns: MutableSequence[str], dockerfile: str | None = None) -> set[str]: ... def build_file_list(root: str) -> list[str]: ... def create_archive( - root: str, - files: Iterable[str] | None = None, - fileobj: _Fileobj | None = None, - gzip: bool = False, - extra_files: Incomplete | None = None, + root: str, files: Iterable[str] | None = None, fileobj: _Fileobj | None = None, gzip: bool = False, extra_files=None ) -> _TemporaryFileWrapper[bytes] | _Fileobj: ... def mkbuildcontext(dockerfile: io.IOBase | StrOrBytesPath) -> _TemporaryFileWrapper[bytes]: ... def split_path(p: str) -> list[str]: ... diff --git a/stubs/docutils/docutils/core.pyi b/stubs/docutils/docutils/core.pyi index 5e22026bed1e..bf9297b95bd7 100644 --- a/stubs/docutils/docutils/core.pyi +++ b/stubs/docutils/docutils/core.pyi @@ -16,56 +16,36 @@ class Publisher: settings: Incomplete def __init__( self, - reader: Incomplete | None = None, - parser: Incomplete | None = None, - writer: Incomplete | None = None, - source: Incomplete | None = None, + reader=None, + parser=None, + writer=None, + source=None, source_class=..., - destination: Incomplete | None = None, + destination=None, destination_class=..., - settings: Incomplete | None = None, + settings=None, ) -> None: ... def set_reader(self, reader_name, parser, parser_name) -> None: ... def set_writer(self, writer_name) -> None: ... def set_components(self, reader_name, parser_name, writer_name) -> None: ... - def setup_option_parser( - self, - usage: Incomplete | None = None, - description: Incomplete | None = None, - settings_spec: Incomplete | None = None, - config_section: Incomplete | None = None, - **defaults, - ): ... - def get_settings( - self, - usage: Incomplete | None = None, - description: Incomplete | None = None, - settings_spec: Incomplete | None = None, - config_section: Incomplete | None = None, - **defaults, - ): ... + def setup_option_parser(self, usage=None, description=None, settings_spec=None, config_section=None, **defaults): ... + def get_settings(self, usage=None, description=None, settings_spec=None, config_section=None, **defaults): ... def process_programmatic_settings(self, settings_spec, settings_overrides, config_section) -> None: ... def process_command_line( - self, - argv: Incomplete | None = None, - usage: Incomplete | None = None, - description: Incomplete | None = None, - settings_spec: Incomplete | None = None, - config_section: Incomplete | None = None, - **defaults, + self, argv=None, usage=None, description=None, settings_spec=None, config_section=None, **defaults ) -> None: ... - def set_io(self, source_path: Incomplete | None = None, destination_path: Incomplete | None = None) -> None: ... - def set_source(self, source: Incomplete | None = None, source_path: Incomplete | None = None) -> None: ... - def set_destination(self, destination: Incomplete | None = None, destination_path: Incomplete | None = None) -> None: ... + def set_io(self, source_path=None, destination_path=None) -> None: ... + def set_source(self, source=None, source_path=None) -> None: ... + def set_destination(self, destination=None, destination_path=None) -> None: ... def apply_transforms(self) -> None: ... def publish( self, - argv: Incomplete | None = None, - usage: Incomplete | None = None, - description: Incomplete | None = None, - settings_spec: Incomplete | None = None, - settings_overrides: Incomplete | None = None, - config_section: Incomplete | None = None, + argv=None, + usage=None, + description=None, + settings_spec=None, + settings_overrides=None, + config_section=None, enable_exit_status: bool = False, ): ... def debugging_dumps(self) -> None: ... @@ -78,112 +58,112 @@ default_usage: str default_description: str def publish_cmdline( - reader: Incomplete | None = None, + reader=None, reader_name: str = "standalone", - parser: Incomplete | None = None, + parser=None, parser_name: str = "restructuredtext", - writer: Incomplete | None = None, + writer=None, writer_name: str = "pseudoxml", - settings: Incomplete | None = None, - settings_spec: Incomplete | None = None, - settings_overrides: Incomplete | None = None, - config_section: Incomplete | None = None, + settings=None, + settings_spec=None, + settings_overrides=None, + config_section=None, enable_exit_status: bool = True, - argv: Incomplete | None = None, + argv=None, usage=..., description=..., ): ... def publish_file( - source: Incomplete | None = None, - source_path: Incomplete | None = None, - destination: Incomplete | None = None, - destination_path: Incomplete | None = None, - reader: Incomplete | None = None, + source=None, + source_path=None, + destination=None, + destination_path=None, + reader=None, reader_name: str = "standalone", - parser: Incomplete | None = None, + parser=None, parser_name: str = "restructuredtext", - writer: Incomplete | None = None, + writer=None, writer_name: str = "pseudoxml", - settings: Incomplete | None = None, - settings_spec: Incomplete | None = None, - settings_overrides: Incomplete | None = None, - config_section: Incomplete | None = None, + settings=None, + settings_spec=None, + settings_overrides=None, + config_section=None, enable_exit_status: bool = False, ): ... def publish_string( source, - source_path: Incomplete | None = None, - destination_path: Incomplete | None = None, - reader: Incomplete | None = None, + source_path=None, + destination_path=None, + reader=None, reader_name: str = "standalone", - parser: Incomplete | None = None, + parser=None, parser_name: str = "restructuredtext", - writer: Incomplete | None = None, + writer=None, writer_name: str = "pseudoxml", - settings: Incomplete | None = None, - settings_spec: Incomplete | None = None, - settings_overrides: Incomplete | None = None, - config_section: Incomplete | None = None, + settings=None, + settings_spec=None, + settings_overrides=None, + config_section=None, enable_exit_status: bool = False, ): ... def publish_parts( source, - source_path: Incomplete | None = None, + source_path=None, source_class=..., - destination_path: Incomplete | None = None, - reader: Incomplete | None = None, + destination_path=None, + reader=None, reader_name: str = "standalone", - parser: Incomplete | None = None, + parser=None, parser_name: str = "restructuredtext", - writer: Incomplete | None = None, + writer=None, writer_name: str = "pseudoxml", - settings: Incomplete | None = None, - settings_spec: Incomplete | None = None, - settings_overrides: Incomplete | None = None, - config_section: Incomplete | None = None, + settings=None, + settings_spec=None, + settings_overrides=None, + config_section=None, enable_exit_status: bool = False, ) -> _WriterParts: ... def publish_doctree( source, - source_path: Incomplete | None = None, + source_path=None, source_class=..., - reader: Incomplete | None = None, + reader=None, reader_name: str = "standalone", - parser: Incomplete | None = None, + parser=None, parser_name: str = "restructuredtext", - settings: Incomplete | None = None, - settings_spec: Incomplete | None = None, - settings_overrides: Incomplete | None = None, - config_section: Incomplete | None = None, + settings=None, + settings_spec=None, + settings_overrides=None, + config_section=None, enable_exit_status: bool = False, ): ... def publish_from_doctree( document, - destination_path: Incomplete | None = None, - writer: Incomplete | None = None, + destination_path=None, + writer=None, writer_name: str = "pseudoxml", - settings: Incomplete | None = None, - settings_spec: Incomplete | None = None, - settings_overrides: Incomplete | None = None, - config_section: Incomplete | None = None, + settings=None, + settings_spec=None, + settings_overrides=None, + config_section=None, enable_exit_status: bool = False, ): ... def publish_cmdline_to_binary( - reader: Incomplete | None = None, + reader=None, reader_name: str = "standalone", - parser: Incomplete | None = None, + parser=None, parser_name: str = "restructuredtext", - writer: Incomplete | None = None, + writer=None, writer_name: str = "pseudoxml", - settings: Incomplete | None = None, - settings_spec: Incomplete | None = None, - settings_overrides: Incomplete | None = None, - config_section: Incomplete | None = None, + settings=None, + settings_spec=None, + settings_overrides=None, + config_section=None, enable_exit_status: bool = True, - argv: Incomplete | None = None, + argv=None, usage=..., description=..., - destination: Incomplete | None = None, + destination=None, destination_class=..., ): ... def publish_programmatically( diff --git a/stubs/docutils/docutils/frontend.pyi b/stubs/docutils/docutils/frontend.pyi index fadb85ee5673..3b24a75a1442 100644 --- a/stubs/docutils/docutils/frontend.pyi +++ b/stubs/docutils/docutils/frontend.pyi @@ -11,87 +11,27 @@ __docformat__: str def store_multiple(option, opt, value, parser, *args, **kwargs) -> None: ... def read_config_file(option, opt, value, parser) -> None: ... -def validate_encoding( - setting, - value: Incomplete | None = None, - option_parser: Incomplete | None = None, - config_parser: Incomplete | None = None, - config_section: Incomplete | None = None, -): ... -def validate_encoding_error_handler( - setting, - value: Incomplete | None = None, - option_parser: Incomplete | None = None, - config_parser: Incomplete | None = None, - config_section: Incomplete | None = None, -): ... -def validate_encoding_and_error_handler( - setting, value, option_parser, config_parser: Incomplete | None = None, config_section: Incomplete | None = None -): ... -def validate_boolean( - setting, - value: Incomplete | None = None, - option_parser: Incomplete | None = None, - config_parser: Incomplete | None = None, - config_section: Incomplete | None = None, -) -> bool: ... -def validate_nonnegative_int( - setting, - value: Incomplete | None = None, - option_parser: Incomplete | None = None, - config_parser: Incomplete | None = None, - config_section: Incomplete | None = None, -) -> int: ... -def validate_threshold( - setting, - value: Incomplete | None = None, - option_parser: Incomplete | None = None, - config_parser: Incomplete | None = None, - config_section: Incomplete | None = None, -) -> int: ... +def validate_encoding(setting, value=None, option_parser=None, config_parser=None, config_section=None): ... +def validate_encoding_error_handler(setting, value=None, option_parser=None, config_parser=None, config_section=None): ... +def validate_encoding_and_error_handler(setting, value, option_parser, config_parser=None, config_section=None): ... +def validate_boolean(setting, value=None, option_parser=None, config_parser=None, config_section=None) -> bool: ... +def validate_nonnegative_int(setting, value=None, option_parser=None, config_parser=None, config_section=None) -> int: ... +def validate_threshold(setting, value=None, option_parser=None, config_parser=None, config_section=None) -> int: ... def validate_colon_separated_string_list( - setting, - value: Incomplete | None = None, - option_parser: Incomplete | None = None, - config_parser: Incomplete | None = None, - config_section: Incomplete | None = None, + setting, value=None, option_parser=None, config_parser=None, config_section=None ) -> list[str]: ... def validate_comma_separated_list( - setting, - value: Incomplete | None = None, - option_parser: Incomplete | None = None, - config_parser: Incomplete | None = None, - config_section: Incomplete | None = None, + setting, value=None, option_parser=None, config_parser=None, config_section=None ) -> list[str]: ... -def validate_url_trailing_slash( - setting, - value: Incomplete | None = None, - option_parser: Incomplete | None = None, - config_parser: Incomplete | None = None, - config_section: Incomplete | None = None, -) -> str: ... +def validate_url_trailing_slash(setting, value=None, option_parser=None, config_parser=None, config_section=None) -> str: ... def validate_dependency_file( - setting, - value: Incomplete | None = None, - option_parser: Incomplete | None = None, - config_parser: Incomplete | None = None, - config_section: Incomplete | None = None, + setting, value=None, option_parser=None, config_parser=None, config_section=None ) -> DependencyList: ... -def validate_strip_class( - setting, - value: Incomplete | None = None, - option_parser: Incomplete | None = None, - config_parser: Incomplete | None = None, - config_section: Incomplete | None = None, -): ... +def validate_strip_class(setting, value=None, option_parser=None, config_parser=None, config_section=None): ... def validate_smartquotes_locales( - setting, - value: Incomplete | None = None, - option_parser: Incomplete | None = None, - config_parser: Incomplete | None = None, - config_section: Incomplete | None = None, + setting, value=None, option_parser=None, config_parser=None, config_section=None ) -> list[tuple[str, str]]: ... -def make_paths_absolute(pathdict, keys, base_path: Incomplete | None = None) -> None: ... +def make_paths_absolute(pathdict, keys, base_path=None) -> None: ... def make_one_path_absolute(base_path, path) -> str: ... def filter_settings_spec(settings_spec, *exclude, **replace) -> tuple[Any, ...]: ... diff --git a/stubs/docutils/docutils/io.pyi b/stubs/docutils/docutils/io.pyi index 9796329c7819..532c8bec15e6 100644 --- a/stubs/docutils/docutils/io.pyi +++ b/stubs/docutils/docutils/io.pyi @@ -44,11 +44,7 @@ class Output(TransformSpec): component_type: ClassVar[str] default_destination_path: ClassVar[str | None] def __init__( - self, - destination: Incomplete | None = None, - destination_path: Incomplete | None = None, - encoding: str | None = None, - error_handler: str = "strict", + self, destination=None, destination_path=None, encoding: str | None = None, error_handler: str = "strict" ) -> None: ... def write(self, data: str) -> Any: ... # returns bytes or str def encode(self, data: str) -> Any: ... # returns bytes or str @@ -68,8 +64,8 @@ class ErrorOutput: class FileInput(Input[IO[str]]): def __init__( self, - source: Incomplete | None = None, - source_path: Incomplete | None = None, + source=None, + source_path=None, encoding: str | None = None, error_handler: str = "strict", autoclose: bool = True, diff --git a/stubs/docutils/docutils/parsers/rst/states.pyi b/stubs/docutils/docutils/parsers/rst/states.pyi index 33d80a454093..ae8059b595db 100644 --- a/stubs/docutils/docutils/parsers/rst/states.pyi +++ b/stubs/docutils/docutils/parsers/rst/states.pyi @@ -33,7 +33,7 @@ class RSTState(StateWS[list[str]]): node, match_titles: bool = False, state_machine_class: type[StateMachine[list[str]]] | None = None, - state_machine_kwargs: Incomplete | None = None, + state_machine_kwargs=None, ): ... def nested_list_parse( self, @@ -42,11 +42,11 @@ class RSTState(StateWS[list[str]]): node, initial_state, blank_finish, - blank_finish_state: Incomplete | None = None, + blank_finish_state=None, extra_settings={}, match_titles: bool = False, - state_machine_class: Incomplete | None = None, - state_machine_kwargs: Incomplete | None = None, + state_machine_class=None, + state_machine_kwargs=None, ): ... def section(self, title: str, source, style, lineno: int, messages) -> None: ... def check_subsection(self, source, style, lineno: int): ... diff --git a/stubs/fpdf2/fpdf/annotations.pyi b/stubs/fpdf2/fpdf/annotations.pyi index 4d1c3e42d8d8..351f46fa9edd 100644 --- a/stubs/fpdf2/fpdf/annotations.pyi +++ b/stubs/fpdf2/fpdf/annotations.pyi @@ -48,7 +48,7 @@ class AnnotationMixin: ink_list: tuple[int, ...] = (), file_spec: str | None = None, field_type: str | None = None, - value: Incomplete | None = None, + value=None, default_appearance: str | None = None, ) -> None: ... diff --git a/stubs/fpdf2/fpdf/drawing.pyi b/stubs/fpdf2/fpdf/drawing.pyi index 648523c5c152..1d0fbefb7245 100644 --- a/stubs/fpdf2/fpdf/drawing.pyi +++ b/stubs/fpdf2/fpdf/drawing.pyi @@ -72,8 +72,8 @@ class DeviceCMYK(_DeviceCMYKBase): def colors(self) -> tuple[Number, Number, Number, Number]: ... def serialize(self) -> str: ... -def rgb8(r, g, b, a: Incomplete | None = None) -> DeviceRGB: ... -def gray8(g, a: Incomplete | None = None) -> DeviceGray: ... +def rgb8(r, g, b, a=None) -> DeviceRGB: ... +def gray8(g, a=None) -> DeviceGray: ... @overload def convert_to_device_color(r: DeviceCMYK) -> DeviceCMYK: ... @overload @@ -86,7 +86,7 @@ def convert_to_device_color(r: str) -> DeviceRGB: ... def convert_to_device_color(r: int, g: Literal[-1] = -1, b: Literal[-1] = -1) -> DeviceGray: ... @overload def convert_to_device_color(r: Sequence[int] | int, g: int, b: int) -> DeviceGray | DeviceRGB: ... -def cmyk8(c, m, y, k, a: Incomplete | None = None) -> DeviceCMYK: ... +def cmyk8(c, m, y, k, a=None) -> DeviceCMYK: ... def color_from_hex_string(hexstr) -> DeviceRGB: ... def color_from_rgb_string(rgbstr) -> DeviceRGB: ... @@ -118,18 +118,18 @@ class Transform(NamedTuple): @classmethod def translation(cls, x, y): ... @classmethod - def scaling(cls, x, y: Incomplete | None = None): ... + def scaling(cls, x, y=None): ... @classmethod def rotation(cls, theta): ... @classmethod def rotation_d(cls, theta_d): ... @classmethod - def shearing(cls, x, y: Incomplete | None = None): ... + def shearing(cls, x, y=None): ... def translate(self, x, y): ... - def scale(self, x, y: Incomplete | None = None): ... + def scale(self, x, y=None): ... def rotate(self, theta): ... def rotate_d(self, theta_d): ... - def shear(self, x, y: Incomplete | None = None): ... + def shear(self, x, y=None): ... def about(self, x, y): ... def __mul__(self, other): ... def __rmul__(self, other): ... @@ -379,17 +379,13 @@ class PaintedPath: def arc_to(self, rx, ry, rotation, large_arc, positive_sweep, x, y) -> Self: ... def arc_relative(self, rx, ry, rotation, large_arc, positive_sweep, dx, dy) -> Self: ... def close(self) -> None: ... - def render( - self, gsd_registry, style, last_item, initial_point, debug_stream: Incomplete | None = None, pfx: Incomplete | None = None - ): ... + def render(self, gsd_registry, style, last_item, initial_point, debug_stream=None, pfx=None): ... def render_debug(self, gsd_registry, style, last_item, initial_point, debug_stream, pfx): ... class ClippingPath(PaintedPath): paint_rule: Incomplete def __init__(self, x: int = 0, y: int = 0) -> None: ... - def render( - self, gsd_registry, style, last_item, initial_point, debug_stream: Incomplete | None = None, pfx: Incomplete | None = None - ): ... + def render(self, gsd_registry, style, last_item, initial_point, debug_stream=None, pfx=None): ... def render_debug(self, gsd_registry, style, last_item, initial_point, debug_stream, pfx): ... class GraphicsContext: @@ -409,24 +405,10 @@ class GraphicsContext: def remove_last_item(self) -> None: ... def merge(self, other_context) -> None: ... def build_render_list( - self, - gsd_registry, - style, - last_item, - initial_point, - debug_stream: Incomplete | None = None, - pfx: Incomplete | None = None, - _push_stack: bool = True, + self, gsd_registry, style, last_item, initial_point, debug_stream=None, pfx=None, _push_stack: bool = True ): ... def render( - self, - gsd_registry, - style: DrawingContext, - last_item, - initial_point, - debug_stream: Incomplete | None = None, - pfx: Incomplete | None = None, - _push_stack: bool = True, + self, gsd_registry, style: DrawingContext, last_item, initial_point, debug_stream=None, pfx=None, _push_stack: bool = True ): ... def render_debug( self, gsd_registry, style: DrawingContext, last_item, initial_point, debug_stream, pfx, _push_stack: bool = True diff --git a/stubs/fpdf2/fpdf/fonts.pyi b/stubs/fpdf2/fpdf/fonts.pyi index adb41f2412ea..ee0502b493dc 100644 --- a/stubs/fpdf2/fpdf/fonts.pyi +++ b/stubs/fpdf2/fpdf/fonts.pyi @@ -29,7 +29,7 @@ class FontFace: def __init__( self, family: str | None = None, - emphasis: Incomplete | None = None, + emphasis=None, size_pt: int | None = None, color: int | tuple[Number, Number, Number] | DeviceGray | DeviceRGB | None = None, fill_color: int | tuple[Number, Number, Number] | DeviceGray | DeviceRGB | None = None, @@ -149,13 +149,7 @@ class SubsetMap: def items(self) -> Generator[Incomplete, None, None]: ... def pick(self, unicode: int): ... def pick_glyph(self, glyph): ... - def get_glyph( - self, - glyph: Incomplete | None = None, - unicode: Incomplete | None = None, - glyph_name: Incomplete | None = None, - glyph_width: Incomplete | None = None, - ) -> Glyph: ... + def get_glyph(self, glyph=None, unicode=None, glyph_name=None, glyph_width=None) -> Glyph: ... def get_all_glyph_names(self): ... CORE_FONTS: dict[str, str] diff --git a/stubs/fpdf2/fpdf/fpdf.pyi b/stubs/fpdf2/fpdf/fpdf.pyi index 2d5bd0f8ebd5..67832cb7fbf2 100644 --- a/stubs/fpdf2/fpdf/fpdf.pyi +++ b/stubs/fpdf2/fpdf/fpdf.pyi @@ -298,11 +298,11 @@ class FPDF(GraphicsStateMixin): def get_string_width(self, s: str, normalized: bool = False, markdown: bool = False) -> float: ... def set_line_width(self, width: float) -> None: ... def set_page_background(self, background) -> None: ... - def drawing_context(self, debug_stream: Incomplete | None = None) -> _GeneratorContextManager[DrawingContext]: ... + def drawing_context(self, debug_stream=None) -> _GeneratorContextManager[DrawingContext]: ... def new_path( - self, x: float = 0, y: float = 0, paint_rule: PathPaintRule = ..., debug_stream: Incomplete | None = None + self, x: float = 0, y: float = 0, paint_rule: PathPaintRule = ..., debug_stream=None ) -> _GeneratorContextManager[PaintedPath]: ... - def draw_path(self, path: PaintedPath, debug_stream: Incomplete | None = None) -> None: ... + def draw_path(self, path: PaintedPath, debug_stream=None) -> None: ... def set_dash_pattern(self, dash: float = 0, gap: float = 0, phase: float = 0) -> None: ... def line(self, x1: float, y1: float, x2: float, y2: float) -> None: ... def polyline( @@ -505,14 +505,14 @@ class FPDF(GraphicsStateMixin): def local_context( self, *, - font_family: Incomplete | None = None, - font_style: Incomplete | None = None, - font_size_pt: Incomplete | None = None, - line_width: Incomplete | None = None, - draw_color: Incomplete | None = None, - fill_color: Incomplete | None = None, - text_color: Incomplete | None = None, - dash_pattern: Incomplete | None = None, + font_family=None, + font_style=None, + font_size_pt=None, + line_width=None, + draw_color=None, + fill_color=None, + text_color=None, + dash_pattern=None, font_size=..., # semi-deprecated, prefer font_size_pt char_vpos=..., char_spacing=..., diff --git a/stubs/fpdf2/fpdf/image_parsing.pyi b/stubs/fpdf2/fpdf/image_parsing.pyi index 000d9f725696..67cbe33b7530 100644 --- a/stubs/fpdf2/fpdf/image_parsing.pyi +++ b/stubs/fpdf2/fpdf/image_parsing.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from collections.abc import Iterable from dataclasses import dataclass from io import BytesIO @@ -40,7 +39,7 @@ def get_svg_info(filename: str, img: BytesIO, image_cache: ImageCache) -> tuple[ # Returned dict could be typed as a TypedDict. def get_img_info( - filename, img: BytesIO | Image.Image | None = None, image_filter: _ImageFilter = "AUTO", dims: Incomplete | None = None + filename, img: BytesIO | Image.Image | None = None, image_filter: _ImageFilter = "AUTO", dims=None ) -> dict[str, Any]: ... class temp_attr: diff --git a/stubs/fpdf2/fpdf/output.pyi b/stubs/fpdf2/fpdf/output.pyi index 630472ca0abd..fb963c770e04 100644 --- a/stubs/fpdf2/fpdf/output.pyi +++ b/stubs/fpdf2/fpdf/output.pyi @@ -34,14 +34,7 @@ class PDFFont(PDFObject): c_i_d_system_info: Incomplete | None font_descriptor: Incomplete | None c_i_d_to_g_i_d_map: Incomplete | None - def __init__( - self, - subtype: str, - base_font: str, - encoding: str | None = None, - d_w: Incomplete | None = None, - w: Incomplete | None = None, - ) -> None: ... + def __init__(self, subtype: str, base_font: str, encoding: str | None = None, d_w=None, w=None) -> None: ... class CIDSystemInfo(PDFObject): registry: PDFString @@ -88,13 +81,7 @@ class PDFCatalog(PDFObject): outlines: Incomplete | None output_intents: Incomplete | None struct_tree_root: Incomplete | None - def __init__( - self, - lang: str | None = None, - page_layout: Incomplete | None = None, - page_mode: Incomplete | None = None, - viewer_preferences: Incomplete | None = None, - ) -> None: ... + def __init__(self, lang: str | None = None, page_layout=None, page_mode=None, viewer_preferences=None) -> None: ... class PDFResources(PDFObject): proc_set: Incomplete @@ -134,8 +121,8 @@ class PDFXObject(PDFContentStream): color_space, bits_per_component, img_filter: str | None = None, - decode: Incomplete | None = None, - decode_parms: Incomplete | None = None, + decode=None, + decode_parms=None, ) -> None: ... class PDFICCProfile(PDFContentStream): @@ -214,7 +201,7 @@ class OutputIntentDictionary: dest_output_profile: PDFICCProfile | None = None, info: str | None = None, ) -> None: ... - def serialize(self, _security_handler: StandardSecurityHandler | None = None, _obj_id: Incomplete | None = None): ... + def serialize(self, _security_handler: StandardSecurityHandler | None = None, _obj_id=None): ... class ResourceCatalog: resources: defaultdict[PDFResourceType, dict[Incomplete, Incomplete]] diff --git a/stubs/fpdf2/fpdf/pattern.pyi b/stubs/fpdf2/fpdf/pattern.pyi index c58a3ea4e82c..6d0d043764c6 100644 --- a/stubs/fpdf2/fpdf/pattern.pyi +++ b/stubs/fpdf2/fpdf/pattern.pyi @@ -78,7 +78,7 @@ class LinearGradient(Gradient): to_x: float, to_y: float, colors: list[Incomplete], - background: Incomplete | None = None, + background=None, extend_before: bool = False, extend_after: bool = False, bounds: list[int] | None = None, diff --git a/stubs/fpdf2/fpdf/prefs.pyi b/stubs/fpdf2/fpdf/prefs.pyi index f4d4899686e9..d7b1b55f442f 100644 --- a/stubs/fpdf2/fpdf/prefs.pyi +++ b/stubs/fpdf2/fpdf/prefs.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from typing import Literal from .enums import Duplex, PageBoundaries, PageMode, TextDirection @@ -26,7 +25,7 @@ class ViewerPreferences: view_clip: PageBoundaries | None = None, print_area: PageBoundaries | None = None, print_clip: PageBoundaries | None = None, - print_scaling: Incomplete | None = None, + print_scaling=None, ) -> None: ... @property def non_full_screen_page_mode(self) -> PageMode | None: ... diff --git a/stubs/fpdf2/fpdf/sign.pyi b/stubs/fpdf2/fpdf/sign.pyi index a34fbe831655..1176b1a099e1 100644 --- a/stubs/fpdf2/fpdf/sign.pyi +++ b/stubs/fpdf2/fpdf/sign.pyi @@ -10,13 +10,7 @@ class Signature: reason: Incomplete | None byte_range: str contents: str - def __init__( - self, - contact_info: Incomplete | None = None, - location: Incomplete | None = None, - m: Incomplete | None = None, - reason: Incomplete | None = None, - ) -> None: ... + def __init__(self, contact_info=None, location=None, m=None, reason=None) -> None: ... def serialize(self) -> str: ... def sign_content(signer, buffer, key, cert, extra_certs, hashalgo, sign_time): ... diff --git a/stubs/fpdf2/fpdf/svg.pyi b/stubs/fpdf2/fpdf/svg.pyi index c7d1a41f645e..3f98d58aafdf 100644 --- a/stubs/fpdf2/fpdf/svg.pyi +++ b/stubs/fpdf2/fpdf/svg.pyi @@ -110,12 +110,10 @@ class SVGObject: def transform_to_rect_viewport( self, scale, width, height, align_viewbox: bool = True, ignore_svg_top_attrs: bool = False ): ... - def draw_to_page( - self, pdf: FPDF, x: Incomplete | None = None, y: Incomplete | None = None, debug_stream: Incomplete | None = None - ) -> None: ... + def draw_to_page(self, pdf: FPDF, x=None, y=None, debug_stream=None) -> None: ... def handle_defs(self, defs) -> None: ... def build_xref(self, xref): ... - def build_group(self, group, pdf_group: Incomplete | None = None): ... + def build_group(self, group, pdf_group=None): ... def build_path(self, path): ... def build_shape(self, shape): ... def build_clipping_path(self, shape, clip_id): ... diff --git a/stubs/fpdf2/fpdf/syntax.pyi b/stubs/fpdf2/fpdf/syntax.pyi index 62daa150bafd..a6005e9e4c52 100644 --- a/stubs/fpdf2/fpdf/syntax.pyi +++ b/stubs/fpdf2/fpdf/syntax.pyi @@ -20,9 +20,7 @@ def create_dictionary_string( ) -> str: ... def create_list_string(list_): ... def iobj_ref(n): ... -def create_stream( - stream: str | bytes | bytearray, encryption_handler: StandardSecurityHandler | None = None, obj_id: Incomplete | None = None -): ... +def create_stream(stream: str | bytes | bytearray, encryption_handler: StandardSecurityHandler | None = None, obj_id=None): ... class Raw(str): ... @@ -37,7 +35,7 @@ class PDFObject: def id(self, n: int) -> None: ... @property def ref(self) -> str: ... - def serialize(self, obj_dict: Incomplete | None = None, _security_handler: StandardSecurityHandler | None = None) -> str: ... + def serialize(self, obj_dict=None, _security_handler: StandardSecurityHandler | None = None) -> str: ... def content_stream(self) -> bytes: ... class PDFContentStream(PDFObject): @@ -78,9 +76,5 @@ class DestinationXYZ(Destination): def __init__(self, page: int, top: float, left: float = 0, zoom: float | Literal["null"] = "null") -> None: ... def serialize(self) -> str: ... def replace( - self, - page: Incomplete | None = None, - top: float | None = None, - left: float | None = None, - zoom: float | Literal["null"] | None = None, + self, page=None, top: float | None = None, left: float | None = None, zoom: float | Literal["null"] | None = None ) -> DestinationXYZ: ... diff --git a/stubs/fpdf2/fpdf/table.pyi b/stubs/fpdf2/fpdf/table.pyi index 28e0313bfabf..55ec102ed504 100644 --- a/stubs/fpdf2/fpdf/table.pyi +++ b/stubs/fpdf2/fpdf/table.pyi @@ -52,14 +52,10 @@ class Table: outer_border_width: float | None = None, num_heading_rows: int = 1, repeat_headings: TableHeadingsDisplay | int = 1, - min_row_height: Incomplete | None = None, + min_row_height=None, ) -> None: ... def row( - self, - cells: Iterable[str] = (), - style: FontFace | None = None, - v_align: VAlign | str | None = None, - min_height: Incomplete | None = None, + self, cells: Iterable[str] = (), style: FontFace | None = None, v_align: VAlign | str | None = None, min_height=None ) -> Row: ... def render(self) -> None: ... def get_cell_border(self, i: int, j: int, cell: Cell) -> str | Literal[0, 1]: ... @@ -70,11 +66,7 @@ class Row: v_align: VAlign | None min_height: Incomplete | None def __init__( - self, - table: Table, - style: FontFace | None = None, - v_align: VAlign | str | None = None, - min_height: Incomplete | None = None, + self, table: Table, style: FontFace | None = None, v_align: VAlign | str | None = None, min_height=None ) -> None: ... @property def cols_count(self) -> int: ... @@ -126,7 +118,7 @@ class Cell: link: str | int | None border: CellBordersLayout | None - def write(self, text, align: Incomplete | None = None): ... + def write(self, text, align=None): ... @dataclass(frozen=True) class RowLayoutInfo: @@ -144,4 +136,4 @@ class RowSpanLayoutInfo: def row_range(self) -> range: ... -def draw_box_borders(pdf: FPDF, x1, y1, x2, y2, border: str | Literal[0, 1], fill_color: Incomplete | None = None) -> None: ... +def draw_box_borders(pdf: FPDF, x1, y1, x2, y2, border: str | Literal[0, 1], fill_color=None) -> None: ... diff --git a/stubs/fpdf2/fpdf/template.pyi b/stubs/fpdf2/fpdf/template.pyi index e795850c5054..4c94913b3356 100644 --- a/stubs/fpdf2/fpdf/template.pyi +++ b/stubs/fpdf2/fpdf/template.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from os import PathLike from typing import Any @@ -11,7 +10,7 @@ class FlexTemplate: splitting_pdf: Any handlers: Any texts: Any - def __init__(self, pdf, elements: Incomplete | None = None) -> None: ... + def __init__(self, pdf, elements=None) -> None: ... elements: Any keys: Any def load_elements(self, elements) -> None: ... @@ -29,8 +28,8 @@ class FlexTemplate: class Template(FlexTemplate): def __init__( self, - infile: Incomplete | None = None, - elements: Incomplete | None = None, + infile=None, + elements=None, format: str = "A4", orientation: str = "portrait", unit: str = "mm", @@ -41,4 +40,4 @@ class Template(FlexTemplate): keywords: str = "", ) -> None: ... def add_page(self) -> None: ... - def render(self, outfile: Incomplete | None = None, dest: Incomplete | None = None) -> None: ... # type: ignore[override] + def render(self, outfile=None, dest=None) -> None: ... # type: ignore[override] diff --git a/stubs/fpdf2/fpdf/text_region.pyi b/stubs/fpdf2/fpdf/text_region.pyi index 76d7b4c69173..481a173b16c0 100644 --- a/stubs/fpdf2/fpdf/text_region.pyi +++ b/stubs/fpdf2/fpdf/text_region.pyi @@ -47,7 +47,7 @@ class Paragraph: self, region, text_align: _TextAlign | None = None, - line_height: Incomplete | None = None, + line_height=None, top_margin: float = 0, bottom_margin: float = 0, indent: float = 0, @@ -59,7 +59,7 @@ class Paragraph: ) -> None: ... def __enter__(self): ... def __exit__(self, exc_type, exc_value, traceback) -> None: ... - def write(self, text: str, link: Incomplete | None = None): ... + def write(self, text: str, link=None): ... def generate_bullet_frags_and_tl( self, bullet_string: str, bullet_r_margin: float ) -> tuple[tuple[Fragment, ...], TextLine] | None: ... @@ -93,9 +93,9 @@ class ImageParagraph: keep_aspect_ratio: bool = False, top_margin: float = 0, bottom_margin: float = 0, - link: Incomplete | None = None, - title: Incomplete | None = None, - alt_text: Incomplete | None = None, + link=None, + title=None, + alt_text=None, ) -> None: ... def build_line(self) -> Self: ... def render(self, col_left: float, col_width: float, max_height: float) -> VectorImageInfo | RasterImageInfo: ... @@ -117,18 +117,18 @@ class ParagraphCollectorMixin: print_sh: bool = False, skip_leading_spaces: bool = False, wrapmode: WrapMode | None = None, - img: Incomplete | None = None, + img=None, img_fill_width: bool = False, **kwargs, ) -> None: ... def __enter__(self): ... def __exit__(self, exc_type, exc_value, traceback) -> None: ... - def write(self, text: str, link: Incomplete | None = None): ... + def write(self, text: str, link=None): ... def ln(self, h: float | None = None) -> None: ... def paragraph( self, text_align: _TextAlign | None = None, - line_height: Incomplete | None = None, + line_height=None, skip_leading_spaces: bool = False, top_margin: int = 0, bottom_margin: int = 0, @@ -149,9 +149,9 @@ class ParagraphCollectorMixin: keep_aspect_ratio: bool = False, top_margin: float = 0, bottom_margin: float = 0, - link: Incomplete | None = None, - title: Incomplete | None = None, - alt_text: Incomplete | None = None, + link=None, + title=None, + alt_text=None, ) -> None: ... class TextRegion(ParagraphCollectorMixin): @@ -163,7 +163,7 @@ class TextRegion(ParagraphCollectorMixin): class TextColumnarMixin: l_margin: Incomplete r_margin: Incomplete - def __init__(self, pdf, *args, l_margin: Incomplete | None = None, r_margin: Incomplete | None = None, **kwargs) -> None: ... + def __init__(self, pdf, *args, l_margin=None, r_margin=None, **kwargs) -> None: ... class TextColumns(TextRegion, TextColumnarMixin): balance: Incomplete diff --git a/stubs/gdb/gdb/disassembler.pyi b/stubs/gdb/gdb/disassembler.pyi index d4fe1ae0d511..a1560c154600 100644 --- a/stubs/gdb/gdb/disassembler.pyi +++ b/stubs/gdb/gdb/disassembler.pyi @@ -48,7 +48,7 @@ STYLE_ADDRESS_OFFSET: Final = 7 STYLE_SYMBOL: Final = 8 STYLE_COMMENT_START: Final = 9 -def builtin_disassemble(INFO: DisassembleInfo, MEMORY_SOURCE: Incomplete | None = None) -> None: ... +def builtin_disassemble(INFO: DisassembleInfo, MEMORY_SOURCE=None) -> None: ... class maint_info_py_disassemblers_cmd(gdb.Command): def __init__(self) -> None: ... diff --git a/stubs/html5lib/html5lib/_ihatexml.pyi b/stubs/html5lib/html5lib/_ihatexml.pyi index ea95b52c86e7..cd710c885815 100644 --- a/stubs/html5lib/html5lib/_ihatexml.pyi +++ b/stubs/html5lib/html5lib/_ihatexml.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from typing import Any baseChar: str @@ -44,7 +43,7 @@ class InfosetFilter: replaceFormFeedCharacters: bool = True, preventSingleQuotePubid: bool = False, ) -> None: ... - def coerceAttribute(self, name, namespace: Incomplete | None = None): ... + def coerceAttribute(self, name, namespace=None): ... def coerceElement(self, name): ... def coerceComment(self, data): ... def coerceCharacters(self, data): ... diff --git a/stubs/html5lib/html5lib/_inputstream.pyi b/stubs/html5lib/html5lib/_inputstream.pyi index bf0479f9a5be..10759fad6cd5 100644 --- a/stubs/html5lib/html5lib/_inputstream.pyi +++ b/stubs/html5lib/html5lib/_inputstream.pyi @@ -1,4 +1,4 @@ -from _typeshed import Incomplete, SupportsRead +from _typeshed import SupportsRead from codecs import CodecInfo from typing import Any, Protocol, overload from typing_extensions import TypeAlias @@ -62,7 +62,7 @@ class HTMLUnicodeInputStream: def openStream(self, source): ... def position(self) -> tuple[int, int]: ... def char(self): ... - def readChunk(self, chunkSize: Incomplete | None = None): ... + def readChunk(self, chunkSize=None): ... def characterErrorsUCS4(self, data) -> None: ... def characterErrorsUCS2(self, data) -> None: ... def charsUntil(self, characters, opposite: bool = False): ... diff --git a/stubs/html5lib/html5lib/_tokenizer.pyi b/stubs/html5lib/html5lib/_tokenizer.pyi index 72b701c37491..e999fbcde4cf 100644 --- a/stubs/html5lib/html5lib/_tokenizer.pyi +++ b/stubs/html5lib/html5lib/_tokenizer.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from typing import Any from ._inputstream import _InputStream @@ -14,11 +13,11 @@ class HTMLTokenizer: state: Any escape: bool currentToken: Any - def __init__(self, stream: _InputStream, parser: Incomplete | None = None, **kwargs) -> None: ... + def __init__(self, stream: _InputStream, parser=None, **kwargs) -> None: ... tokenQueue: Any def __iter__(self): ... def consumeNumberEntity(self, isHex): ... - def consumeEntity(self, allowedChar: Incomplete | None = None, fromAttribute: bool = False) -> None: ... + def consumeEntity(self, allowedChar=None, fromAttribute: bool = False) -> None: ... def processEntityInAttribute(self, allowedChar) -> None: ... def emitCurrentToken(self) -> None: ... def dataState(self): ... diff --git a/stubs/html5lib/html5lib/_trie/_base.pyi b/stubs/html5lib/html5lib/_trie/_base.pyi index 1d88b9a602d8..7016028a2417 100644 --- a/stubs/html5lib/html5lib/_trie/_base.pyi +++ b/stubs/html5lib/html5lib/_trie/_base.pyi @@ -1,10 +1,9 @@ -from _typeshed import Incomplete from abc import ABCMeta from collections.abc import Mapping from typing import Any class Trie(Mapping[Any, Any], metaclass=ABCMeta): - def keys(self, prefix: Incomplete | None = None): ... + def keys(self, prefix=None): ... def has_keys_with_prefix(self, prefix): ... def longest_prefix(self, prefix): ... def longest_prefix_item(self, prefix): ... diff --git a/stubs/html5lib/html5lib/_trie/py.pyi b/stubs/html5lib/html5lib/_trie/py.pyi index 03d9a4687b8a..4d3f7c7bb069 100644 --- a/stubs/html5lib/html5lib/_trie/py.pyi +++ b/stubs/html5lib/html5lib/_trie/py.pyi @@ -1,5 +1,3 @@ -from _typeshed import Incomplete - from ._base import Trie as ABCTrie class Trie(ABCTrie): @@ -8,5 +6,5 @@ class Trie(ABCTrie): def __len__(self) -> int: ... def __iter__(self): ... def __getitem__(self, key): ... - def keys(self, prefix: Incomplete | None = None): ... + def keys(self, prefix=None): ... def has_keys_with_prefix(self, prefix): ... diff --git a/stubs/html5lib/html5lib/_utils.pyi b/stubs/html5lib/html5lib/_utils.pyi index 7401d338c649..7cc3edba3c64 100644 --- a/stubs/html5lib/html5lib/_utils.pyi +++ b/stubs/html5lib/html5lib/_utils.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from collections.abc import Mapping from typing import Any @@ -8,7 +7,7 @@ class MethodDispatcher(dict[Any, Any]): default: Any def __init__(self, items=()) -> None: ... def __getitem__(self, key): ... - def __get__(self, instance, owner: Incomplete | None = None): ... + def __get__(self, instance, owner=None): ... class BoundMethodDispatcher(Mapping[Any, Any]): instance: Any diff --git a/stubs/html5lib/html5lib/html5parser.pyi b/stubs/html5lib/html5lib/html5parser.pyi index 3f2fa19db7a1..2e38eaea5688 100644 --- a/stubs/html5lib/html5lib/html5parser.pyi +++ b/stubs/html5lib/html5lib/html5parser.pyi @@ -21,9 +21,7 @@ class HTMLParser: tree: Any errors: list[Incomplete] phases: Any - def __init__( - self, tree: Incomplete | None = None, strict: bool = False, namespaceHTMLElements: bool = True, debug: bool = False - ) -> None: ... + def __init__(self, tree=None, strict: bool = False, namespaceHTMLElements: bool = True, debug: bool = False) -> None: ... firstStartTag: bool log: Any compatMode: str @@ -42,7 +40,7 @@ class HTMLParser: def mainLoop(self) -> None: ... def parse(self, stream: _InputStream, scripting: bool = ..., **kwargs): ... def parseFragment(self, stream: _InputStream, *args, **kwargs): ... - def parseError(self, errorcode: str = "XXX-undefined-error", datavars: Incomplete | None = None) -> None: ... + def parseError(self, errorcode: str = "XXX-undefined-error", datavars=None) -> None: ... def adjustMathMLAttributes(self, token) -> None: ... def adjustSVGAttributes(self, token) -> None: ... def adjustForeignAttributes(self, token) -> None: ... @@ -53,6 +51,6 @@ class HTMLParser: def getPhases(debug): ... def adjust_attributes(token, replacements) -> None: ... -def impliedTagToken(name, type: str = "EndTag", attributes: Incomplete | None = None, selfClosing: bool = False): ... +def impliedTagToken(name, type: str = "EndTag", attributes=None, selfClosing: bool = False): ... class ParseError(Exception): ... diff --git a/stubs/html5lib/html5lib/serializer.pyi b/stubs/html5lib/html5lib/serializer.pyi index 12ae9db51fa4..c17b2205ae43 100644 --- a/stubs/html5lib/html5lib/serializer.pyi +++ b/stubs/html5lib/html5lib/serializer.pyi @@ -32,8 +32,8 @@ class HTMLSerializer: def encode(self, string): ... def encodeStrict(self, string): ... encoding: Any - def serialize(self, treewalker, encoding: Incomplete | None = None) -> Generator[Incomplete, None, None]: ... - def render(self, treewalker, encoding: Incomplete | None = None): ... + def serialize(self, treewalker, encoding=None) -> Generator[Incomplete, None, None]: ... + def render(self, treewalker, encoding=None): ... def serializeError(self, data: str = "XXX ERROR MESSAGE NEEDED") -> None: ... class SerializeError(Exception): ... diff --git a/stubs/html5lib/html5lib/treebuilders/__init__.pyi b/stubs/html5lib/html5lib/treebuilders/__init__.pyi index b8bc40a63840..703478287fe5 100644 --- a/stubs/html5lib/html5lib/treebuilders/__init__.pyi +++ b/stubs/html5lib/html5lib/treebuilders/__init__.pyi @@ -1,6 +1,5 @@ -from _typeshed import Incomplete from typing import Any treeBuilderCache: Any -def getTreeBuilder(treeType, implementation: Incomplete | None = None, **kwargs): ... +def getTreeBuilder(treeType, implementation=None, **kwargs): ... diff --git a/stubs/html5lib/html5lib/treebuilders/base.pyi b/stubs/html5lib/html5lib/treebuilders/base.pyi index eae192172c68..577b92327b9c 100644 --- a/stubs/html5lib/html5lib/treebuilders/base.pyi +++ b/stubs/html5lib/html5lib/treebuilders/base.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from typing import Any Marker: Any @@ -12,7 +11,7 @@ class Node: childNodes: Any def __init__(self, name) -> None: ... def appendChild(self, node) -> None: ... - def insertText(self, data, insertBefore: Incomplete | None = None) -> None: ... + def insertText(self, data, insertBefore=None) -> None: ... def insertBefore(self, node, refNode) -> None: ... def removeChild(self, node) -> None: ... def reparentChildren(self, newParent) -> None: ... @@ -38,19 +37,19 @@ class TreeBuilder: insertFromTable: bool document: Any def reset(self) -> None: ... - def elementInScope(self, target, variant: Incomplete | None = None): ... + def elementInScope(self, target, variant=None): ... def reconstructActiveFormattingElements(self) -> None: ... def clearActiveFormattingElements(self) -> None: ... def elementInActiveFormattingElements(self, name): ... def insertRoot(self, token) -> None: ... def insertDoctype(self, token) -> None: ... - def insertComment(self, token, parent: Incomplete | None = None) -> None: ... + def insertComment(self, token, parent=None) -> None: ... def createElement(self, token): ... def insertElementNormal(self, token): ... def insertElementTable(self, token): ... - def insertText(self, data, parent: Incomplete | None = None) -> None: ... + def insertText(self, data, parent=None) -> None: ... def getTableMisnestedNodePosition(self): ... - def generateImpliedEndTags(self, exclude: Incomplete | None = None) -> None: ... + def generateImpliedEndTags(self, exclude=None) -> None: ... def getDocument(self): ... def getFragment(self): ... def testSerializer(self, node) -> None: ... diff --git a/stubs/html5lib/html5lib/treebuilders/etree_lxml.pyi b/stubs/html5lib/html5lib/treebuilders/etree_lxml.pyi index 018bf60a60ba..3bf5bea6481f 100644 --- a/stubs/html5lib/html5lib/treebuilders/etree_lxml.pyi +++ b/stubs/html5lib/html5lib/treebuilders/etree_lxml.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from typing import Any from . import base @@ -39,7 +38,7 @@ class TreeBuilder(base.TreeBuilder): def getDocument(self): ... def getFragment(self): ... def insertDoctype(self, token) -> None: ... - def insertCommentInitial(self, data, parent: Incomplete | None = None) -> None: ... - def insertCommentMain(self, data, parent: Incomplete | None = None) -> None: ... + def insertCommentInitial(self, data, parent=None) -> None: ... + def insertCommentMain(self, data, parent=None) -> None: ... document: Any def insertRoot(self, token) -> None: ... diff --git a/stubs/html5lib/html5lib/treewalkers/__init__.pyi b/stubs/html5lib/html5lib/treewalkers/__init__.pyi index 8246ff05a289..62d6f2a9f5fc 100644 --- a/stubs/html5lib/html5lib/treewalkers/__init__.pyi +++ b/stubs/html5lib/html5lib/treewalkers/__init__.pyi @@ -1,4 +1,2 @@ -from _typeshed import Incomplete - -def getTreeWalker(treeType, implementation: Incomplete | None = None, **kwargs): ... +def getTreeWalker(treeType, implementation=None, **kwargs): ... def pprint(walker): ... diff --git a/stubs/html5lib/html5lib/treewalkers/base.pyi b/stubs/html5lib/html5lib/treewalkers/base.pyi index 87fdc9447e73..3e6649735266 100644 --- a/stubs/html5lib/html5lib/treewalkers/base.pyi +++ b/stubs/html5lib/html5lib/treewalkers/base.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from typing import Any DOCUMENT: Any @@ -19,7 +18,7 @@ class TreeWalker: def endTag(self, namespace, name): ... def text(self, data) -> None: ... def comment(self, data): ... - def doctype(self, name, publicId: Incomplete | None = None, systemId: Incomplete | None = None): ... + def doctype(self, name, publicId=None, systemId=None): ... def entity(self, name): ... def unknown(self, nodeType): ... diff --git a/stubs/httplib2/httplib2/__init__.pyi b/stubs/httplib2/httplib2/__init__.pyi index 6e4b6dc7f320..1ae0fe340053 100644 --- a/stubs/httplib2/httplib2/__init__.pyi +++ b/stubs/httplib2/httplib2/__init__.pyi @@ -1,5 +1,4 @@ import http.client -from _typeshed import Incomplete from collections.abc import Generator from typing import Any, ClassVar from typing_extensions import Self @@ -41,7 +40,7 @@ class DigestAuthentication(Authentication): challenge: Any A1: Any def __init__(self, credentials, host, request_uri, headers, response, content, http) -> None: ... - def request(self, method, request_uri, headers, content, cnonce: Incomplete | None = None): ... + def request(self, method, request_uri, headers, content, cnonce=None): ... def response(self, response, content): ... class HmacDigestAuthentication(Authentication): @@ -87,14 +86,7 @@ class AllHosts: ... class ProxyInfo: bypass_hosts: Any def __init__( - self, - proxy_type, - proxy_host, - proxy_port, - proxy_rdns: bool = True, - proxy_user: Incomplete | None = None, - proxy_pass: Incomplete | None = None, - proxy_headers: Incomplete | None = None, + self, proxy_type, proxy_host, proxy_port, proxy_rdns: bool = True, proxy_user=None, proxy_pass=None, proxy_headers=None ) -> None: ... def astuple(self): ... def isgood(self): ... @@ -103,9 +95,7 @@ class ProxyInfo: class HTTPConnectionWithTimeout(http.client.HTTPConnection): proxy_info: Any - def __init__( - self, host, port: Incomplete | None = None, timeout: Incomplete | None = None, proxy_info: Incomplete | None = None - ) -> None: ... + def __init__(self, host, port=None, timeout=None, proxy_info=None) -> None: ... sock: Any def connect(self) -> None: ... @@ -119,16 +109,16 @@ class HTTPSConnectionWithTimeout(http.client.HTTPSConnection): def __init__( self, host, - port: Incomplete | None = None, - key_file: Incomplete | None = None, - cert_file: Incomplete | None = None, - timeout: Incomplete | None = None, - proxy_info: Incomplete | None = None, - ca_certs: Incomplete | None = None, + port=None, + key_file=None, + cert_file=None, + timeout=None, + proxy_info=None, + ca_certs=None, disable_ssl_certificate_validation: bool = False, - tls_maximum_version: Incomplete | None = None, - tls_minimum_version: Incomplete | None = None, - key_password: Incomplete | None = None, + tls_maximum_version=None, + tls_minimum_version=None, + key_password=None, ) -> None: ... sock: Any def connect(self) -> None: ... @@ -155,27 +145,19 @@ class Http: forward_authorization_headers: bool def __init__( self, - cache: Incomplete | None = None, - timeout: Incomplete | None = None, + cache=None, + timeout=None, proxy_info=..., - ca_certs: Incomplete | None = None, + ca_certs=None, disable_ssl_certificate_validation: bool = False, - tls_maximum_version: Incomplete | None = None, - tls_minimum_version: Incomplete | None = None, + tls_maximum_version=None, + tls_minimum_version=None, ) -> None: ... def close(self) -> None: ... def add_credentials(self, name, password, domain: str = "") -> None: ... - def add_certificate(self, key, cert, domain, password: Incomplete | None = None) -> None: ... + def add_certificate(self, key, cert, domain, password=None) -> None: ... def clear_credentials(self) -> None: ... - def request( - self, - uri, - method: str = "GET", - body: Incomplete | None = None, - headers: Incomplete | None = None, - redirections=5, - connection_type: Incomplete | None = None, - ): ... + def request(self, uri, method: str = "GET", body=None, headers=None, redirections=5, connection_type=None): ... class Response(dict[str, Any]): fromcache: bool diff --git a/stubs/httplib2/httplib2/socks.pyi b/stubs/httplib2/httplib2/socks.pyi index 25d9b60d5cab..f5ef620f8d78 100644 --- a/stubs/httplib2/httplib2/socks.pyi +++ b/stubs/httplib2/httplib2/socks.pyi @@ -1,5 +1,4 @@ import socket -from _typeshed import Incomplete PROXY_TYPE_SOCKS4: int PROXY_TYPE_SOCKS5: int @@ -13,28 +12,14 @@ class Socks5Error(ProxyError): ... class Socks4Error(ProxyError): ... class HTTPError(ProxyError): ... -def setdefaultproxy( - proxytype: Incomplete | None = None, - addr: Incomplete | None = None, - port: Incomplete | None = None, - rdns: bool = True, - username: Incomplete | None = None, - password: Incomplete | None = None, -) -> None: ... +def setdefaultproxy(proxytype=None, addr=None, port=None, rdns: bool = True, username=None, password=None) -> None: ... def wrapmodule(module) -> None: ... class socksocket(socket.socket): - def __init__(self, family=..., type=..., proto: int = 0, _sock: Incomplete | None = None) -> None: ... + def __init__(self, family=..., type=..., proto: int = 0, _sock=None) -> None: ... def sendall(self, content, *args): ... def setproxy( - self, - proxytype: Incomplete | None = None, - addr: Incomplete | None = None, - port: Incomplete | None = None, - rdns: bool = True, - username: Incomplete | None = None, - password: Incomplete | None = None, - headers: Incomplete | None = None, + self, proxytype=None, addr=None, port=None, rdns: bool = True, username=None, password=None, headers=None ) -> None: ... def getproxysockname(self): ... def getproxypeername(self): ... diff --git a/stubs/hvac/hvac/api/auth_methods/approle.pyi b/stubs/hvac/hvac/api/auth_methods/approle.pyi index 6a32d5167ccb..530e11f8bf0e 100644 --- a/stubs/hvac/hvac/api/auth_methods/approle.pyi +++ b/stubs/hvac/hvac/api/auth_methods/approle.pyi @@ -1,25 +1,23 @@ -from _typeshed import Incomplete - from hvac.api.vault_api_base import VaultApiBase class AppRole(VaultApiBase): def create_or_update_approle( self, role_name, - bind_secret_id: Incomplete | None = None, - secret_id_bound_cidrs: Incomplete | None = None, - secret_id_num_uses: Incomplete | None = None, - secret_id_ttl: Incomplete | None = None, - enable_local_secret_ids: Incomplete | None = None, - token_ttl: Incomplete | None = None, - token_max_ttl: Incomplete | None = None, - token_policies: Incomplete | None = None, - token_bound_cidrs: Incomplete | None = None, - token_explicit_max_ttl: Incomplete | None = None, - token_no_default_policy: Incomplete | None = None, - token_num_uses: Incomplete | None = None, - token_period: Incomplete | None = None, - token_type: Incomplete | None = None, + bind_secret_id=None, + secret_id_bound_cidrs=None, + secret_id_num_uses=None, + secret_id_ttl=None, + enable_local_secret_ids=None, + token_ttl=None, + token_max_ttl=None, + token_policies=None, + token_bound_cidrs=None, + token_explicit_max_ttl=None, + token_no_default_policy=None, + token_num_uses=None, + token_period=None, + token_type=None, mount_point="approle", ): ... def list_roles(self, mount_point="approle"): ... @@ -28,27 +26,14 @@ class AppRole(VaultApiBase): def read_role_id(self, role_name, mount_point="approle"): ... def update_role_id(self, role_name, role_id, mount_point="approle"): ... def generate_secret_id( - self, - role_name, - metadata: Incomplete | None = None, - cidr_list: Incomplete | None = None, - token_bound_cidrs: Incomplete | None = None, - mount_point="approle", - wrap_ttl: Incomplete | None = None, + self, role_name, metadata=None, cidr_list=None, token_bound_cidrs=None, mount_point="approle", wrap_ttl=None ): ... def create_custom_secret_id( - self, - role_name, - secret_id, - metadata: Incomplete | None = None, - cidr_list: Incomplete | None = None, - token_bound_cidrs: Incomplete | None = None, - mount_point="approle", - wrap_ttl: Incomplete | None = None, + self, role_name, secret_id, metadata=None, cidr_list=None, token_bound_cidrs=None, mount_point="approle", wrap_ttl=None ): ... def read_secret_id(self, role_name, secret_id, mount_point="approle"): ... def destroy_secret_id(self, role_name, secret_id, mount_point="approle"): ... def list_secret_id_accessors(self, role_name, mount_point="approle"): ... def read_secret_id_accessor(self, role_name, secret_id_accessor, mount_point="approle"): ... def destroy_secret_id_accessor(self, role_name, secret_id_accessor, mount_point="approle"): ... - def login(self, role_id, secret_id: Incomplete | None = None, use_token: bool = True, mount_point="approle"): ... + def login(self, role_id, secret_id=None, use_token: bool = True, mount_point="approle"): ... diff --git a/stubs/hvac/hvac/api/auth_methods/aws.pyi b/stubs/hvac/hvac/api/auth_methods/aws.pyi index e89a9b1d008b..79dba1a592d6 100644 --- a/stubs/hvac/hvac/api/auth_methods/aws.pyi +++ b/stubs/hvac/hvac/api/auth_methods/aws.pyi @@ -7,13 +7,13 @@ logger: Incomplete class Aws(VaultApiBase): def configure( self, - max_retries: Incomplete | None = None, - access_key: Incomplete | None = None, - secret_key: Incomplete | None = None, - endpoint: Incomplete | None = None, - iam_endpoint: Incomplete | None = None, - sts_endpoint: Incomplete | None = None, - iam_server_id_header_value: Incomplete | None = None, + max_retries=None, + access_key=None, + secret_key=None, + endpoint=None, + iam_endpoint=None, + sts_endpoint=None, + iam_server_id_header_value=None, mount_point: str = "aws", sts_region: str | None = None, ): ... @@ -21,16 +21,14 @@ class Aws(VaultApiBase): def delete_config(self, mount_point: str = "aws"): ... def configure_identity_integration( self, - iam_alias: Incomplete | None = None, - ec2_alias: Incomplete | None = None, + iam_alias=None, + ec2_alias=None, mount_point: str = "aws", iam_metadata: str | list[str] | None = None, ec2_metadata: str | list[str] | None = None, ): ... def read_identity_integration(self, mount_point: str = "aws"): ... - def create_certificate_configuration( - self, cert_name, aws_public_cert, document_type: Incomplete | None = None, mount_point: str = "aws" - ): ... + def create_certificate_configuration(self, cert_name, aws_public_cert, document_type=None, mount_point: str = "aws"): ... def read_certificate_configuration(self, cert_name, mount_point: str = "aws"): ... def delete_certificate_configuration(self, cert_name, mount_point: str = "aws"): ... def list_certificate_configurations(self, mount_point: str = "aws"): ... @@ -38,39 +36,35 @@ class Aws(VaultApiBase): def read_sts_role(self, account_id, mount_point: str = "aws"): ... def list_sts_roles(self, mount_point: str = "aws"): ... def delete_sts_role(self, account_id, mount_point: str = "aws"): ... - def configure_identity_whitelist_tidy( - self, safety_buffer: Incomplete | None = None, disable_periodic_tidy: Incomplete | None = None, mount_point: str = "aws" - ): ... + def configure_identity_whitelist_tidy(self, safety_buffer=None, disable_periodic_tidy=None, mount_point: str = "aws"): ... def read_identity_whitelist_tidy(self, mount_point: str = "aws"): ... def delete_identity_whitelist_tidy(self, mount_point: str = "aws"): ... - def configure_role_tag_blacklist_tidy( - self, safety_buffer: Incomplete | None = None, disable_periodic_tidy: Incomplete | None = None, mount_point: str = "aws" - ): ... + def configure_role_tag_blacklist_tidy(self, safety_buffer=None, disable_periodic_tidy=None, mount_point: str = "aws"): ... def read_role_tag_blacklist_tidy(self, mount_point: str = "aws"): ... def delete_role_tag_blacklist_tidy(self, mount_point: str = "aws"): ... def create_role( self, role, - auth_type: Incomplete | None = None, - bound_ami_id: Incomplete | None = None, - bound_account_id: Incomplete | None = None, - bound_region: Incomplete | None = None, - bound_vpc_id: Incomplete | None = None, - bound_subnet_id: Incomplete | None = None, - bound_iam_role_arn: Incomplete | None = None, - bound_iam_instance_profile_arn: Incomplete | None = None, - bound_ec2_instance_id: Incomplete | None = None, - role_tag: Incomplete | None = None, - bound_iam_principal_arn: Incomplete | None = None, - inferred_entity_type: Incomplete | None = None, - inferred_aws_region: Incomplete | None = None, - resolve_aws_unique_ids: Incomplete | None = None, - ttl: Incomplete | None = None, - max_ttl: Incomplete | None = None, - period: Incomplete | None = None, - policies: Incomplete | None = None, - allow_instance_migration: Incomplete | None = None, - disallow_reauthentication: Incomplete | None = None, + auth_type=None, + bound_ami_id=None, + bound_account_id=None, + bound_region=None, + bound_vpc_id=None, + bound_subnet_id=None, + bound_iam_role_arn=None, + bound_iam_instance_profile_arn=None, + bound_ec2_instance_id=None, + role_tag=None, + bound_iam_principal_arn=None, + inferred_entity_type=None, + inferred_aws_region=None, + resolve_aws_unique_ids=None, + ttl=None, + max_ttl=None, + period=None, + policies=None, + allow_instance_migration=None, + disallow_reauthentication=None, mount_point: str = "aws", ): ... def read_role(self, role, mount_point: str = "aws"): ... @@ -79,32 +73,25 @@ class Aws(VaultApiBase): def create_role_tags( self, role, - policies: Incomplete | None = None, - max_ttl: Incomplete | None = None, - instance_id: Incomplete | None = None, - allow_instance_migration: Incomplete | None = None, - disallow_reauthentication: Incomplete | None = None, + policies=None, + max_ttl=None, + instance_id=None, + allow_instance_migration=None, + disallow_reauthentication=None, mount_point: str = "aws", ): ... def iam_login( self, access_key, secret_key, - session_token: Incomplete | None = None, - header_value: Incomplete | None = None, - role: Incomplete | None = None, + session_token=None, + header_value=None, + role=None, use_token: bool = True, region: str = "us-east-1", mount_point: str = "aws", ): ... - def ec2_login( - self, - pkcs7, - nonce: Incomplete | None = None, - role: Incomplete | None = None, - use_token: bool = True, - mount_point: str = "aws", - ): ... + def ec2_login(self, pkcs7, nonce=None, role=None, use_token: bool = True, mount_point: str = "aws"): ... def place_role_tags_in_blacklist(self, role_tag, mount_point: str = "aws"): ... def read_role_tag_blacklist(self, role_tag, mount_point: str = "aws"): ... def list_blacklist_tags(self, mount_point: str = "aws"): ... diff --git a/stubs/hvac/hvac/api/auth_methods/azure.pyi b/stubs/hvac/hvac/api/auth_methods/azure.pyi index cfa1e2447ef6..5198b3c2e7a1 100644 --- a/stubs/hvac/hvac/api/auth_methods/azure.pyi +++ b/stubs/hvac/hvac/api/auth_methods/azure.pyi @@ -6,31 +6,23 @@ DEFAULT_MOUNT_POINT: str logger: Incomplete class Azure(VaultApiBase): - def configure( - self, - tenant_id, - resource, - environment: Incomplete | None = None, - client_id: Incomplete | None = None, - client_secret: Incomplete | None = None, - mount_point="azure", - ): ... + def configure(self, tenant_id, resource, environment=None, client_id=None, client_secret=None, mount_point="azure"): ... def read_config(self, mount_point="azure"): ... def delete_config(self, mount_point="azure"): ... def create_role( self, name, - policies: Incomplete | None = None, - ttl: Incomplete | None = None, - max_ttl: Incomplete | None = None, - period: Incomplete | None = None, - bound_service_principal_ids: Incomplete | None = None, - bound_group_ids: Incomplete | None = None, - bound_locations: Incomplete | None = None, - bound_subscription_ids: Incomplete | None = None, - bound_resource_groups: Incomplete | None = None, - bound_scale_sets: Incomplete | None = None, - num_uses: Incomplete | None = None, + policies=None, + ttl=None, + max_ttl=None, + period=None, + bound_service_principal_ids=None, + bound_group_ids=None, + bound_locations=None, + bound_subscription_ids=None, + bound_resource_groups=None, + bound_scale_sets=None, + num_uses=None, mount_point="azure", ): ... def read_role(self, name, mount_point="azure"): ... @@ -40,10 +32,10 @@ class Azure(VaultApiBase): self, role, jwt, - subscription_id: Incomplete | None = None, - resource_group_name: Incomplete | None = None, - vm_name: Incomplete | None = None, - vmss_name: Incomplete | None = None, + subscription_id=None, + resource_group_name=None, + vm_name=None, + vmss_name=None, use_token: bool = True, mount_point="azure", ): ... diff --git a/stubs/hvac/hvac/api/auth_methods/gcp.pyi b/stubs/hvac/hvac/api/auth_methods/gcp.pyi index 1c619a52f324..8a38915c3637 100644 --- a/stubs/hvac/hvac/api/auth_methods/gcp.pyi +++ b/stubs/hvac/hvac/api/auth_methods/gcp.pyi @@ -7,10 +7,7 @@ logger: Incomplete class Gcp(VaultApiBase): def configure( - self, - credentials: Incomplete | None = None, - google_certs_endpoint="https://www.googleapis.com/oauth2/v3/certs", - mount_point="gcp", + self, credentials=None, google_certs_endpoint="https://www.googleapis.com/oauth2/v3/certs", mount_point="gcp" ): ... def read_config(self, mount_point="gcp"): ... def delete_config(self, mount_point="gcp"): ... @@ -19,25 +16,21 @@ class Gcp(VaultApiBase): name, role_type, project_id, - ttl: Incomplete | None = None, - max_ttl: Incomplete | None = None, - period: Incomplete | None = None, - policies: Incomplete | None = None, - bound_service_accounts: Incomplete | None = None, - max_jwt_exp: Incomplete | None = None, - allow_gce_inference: Incomplete | None = None, - bound_zones: Incomplete | None = None, - bound_regions: Incomplete | None = None, - bound_instance_groups: Incomplete | None = None, - bound_labels: Incomplete | None = None, + ttl=None, + max_ttl=None, + period=None, + policies=None, + bound_service_accounts=None, + max_jwt_exp=None, + allow_gce_inference=None, + bound_zones=None, + bound_regions=None, + bound_instance_groups=None, + bound_labels=None, mount_point="gcp", ): ... - def edit_service_accounts_on_iam_role( - self, name, add: Incomplete | None = None, remove: Incomplete | None = None, mount_point="gcp" - ): ... - def edit_labels_on_gce_role( - self, name, add: Incomplete | None = None, remove: Incomplete | None = None, mount_point="gcp" - ): ... + def edit_service_accounts_on_iam_role(self, name, add=None, remove=None, mount_point="gcp"): ... + def edit_labels_on_gce_role(self, name, add=None, remove=None, mount_point="gcp"): ... def read_role(self, name, mount_point="gcp"): ... def list_roles(self, mount_point="gcp"): ... def delete_role(self, role, mount_point="gcp"): ... diff --git a/stubs/hvac/hvac/api/auth_methods/github.pyi b/stubs/hvac/hvac/api/auth_methods/github.pyi index 4f1ba8eae6eb..3005c03d599a 100644 --- a/stubs/hvac/hvac/api/auth_methods/github.pyi +++ b/stubs/hvac/hvac/api/auth_methods/github.pyi @@ -1,21 +1,12 @@ -from _typeshed import Incomplete - from hvac.api.vault_api_base import VaultApiBase DEFAULT_MOUNT_POINT: str class Github(VaultApiBase): - def configure( - self, - organization, - base_url: Incomplete | None = None, - ttl: Incomplete | None = None, - max_ttl: Incomplete | None = None, - mount_point="github", - ): ... + def configure(self, organization, base_url=None, ttl=None, max_ttl=None, mount_point="github"): ... def read_configuration(self, mount_point="github"): ... - def map_team(self, team_name, policies: Incomplete | None = None, mount_point="github"): ... + def map_team(self, team_name, policies=None, mount_point="github"): ... def read_team_mapping(self, team_name, mount_point="github"): ... - def map_user(self, user_name, policies: Incomplete | None = None, mount_point="github"): ... + def map_user(self, user_name, policies=None, mount_point="github"): ... def read_user_mapping(self, user_name, mount_point="github"): ... def login(self, token, use_token: bool = True, mount_point="github"): ... diff --git a/stubs/hvac/hvac/api/auth_methods/jwt.pyi b/stubs/hvac/hvac/api/auth_methods/jwt.pyi index 98829d4d969c..9db9396fe388 100644 --- a/stubs/hvac/hvac/api/auth_methods/jwt.pyi +++ b/stubs/hvac/hvac/api/auth_methods/jwt.pyi @@ -1,5 +1,3 @@ -from _typeshed import Incomplete - from hvac.api.vault_api_base import VaultApiBase class JWT(VaultApiBase): @@ -7,55 +5,55 @@ class JWT(VaultApiBase): def resolve_path(self, path): ... def configure( self, - oidc_discovery_url: Incomplete | None = None, - oidc_discovery_ca_pem: Incomplete | None = None, - oidc_client_id: Incomplete | None = None, - oidc_client_secret: Incomplete | None = None, - oidc_response_mode: Incomplete | None = None, - oidc_response_types: Incomplete | None = None, - jwks_url: Incomplete | None = None, - jwks_ca_pem: Incomplete | None = None, - jwt_validation_pubkeys: Incomplete | None = None, - bound_issuer: Incomplete | None = None, - jwt_supported_algs: Incomplete | None = None, - default_role: Incomplete | None = None, - provider_config: Incomplete | None = None, + oidc_discovery_url=None, + oidc_discovery_ca_pem=None, + oidc_client_id=None, + oidc_client_secret=None, + oidc_response_mode=None, + oidc_response_types=None, + jwks_url=None, + jwks_ca_pem=None, + jwt_validation_pubkeys=None, + bound_issuer=None, + jwt_supported_algs=None, + default_role=None, + provider_config=None, path: str | None = None, namespace_in_state: bool | None = None, ): ... - def read_config(self, path: Incomplete | None = None): ... + def read_config(self, path=None): ... def create_role( self, name, user_claim, allowed_redirect_uris, role_type: str = "jwt", - bound_audiences: Incomplete | None = None, - clock_skew_leeway: Incomplete | None = None, - expiration_leeway: Incomplete | None = None, - not_before_leeway: Incomplete | None = None, - bound_subject: Incomplete | None = None, - bound_claims: Incomplete | None = None, - groups_claim: Incomplete | None = None, - claim_mappings: Incomplete | None = None, - oidc_scopes: Incomplete | None = None, + bound_audiences=None, + clock_skew_leeway=None, + expiration_leeway=None, + not_before_leeway=None, + bound_subject=None, + bound_claims=None, + groups_claim=None, + claim_mappings=None, + oidc_scopes=None, bound_claims_type: str = "string", verbose_oidc_logging: bool = False, - token_ttl: Incomplete | None = None, - token_max_ttl: Incomplete | None = None, - token_policies: Incomplete | None = None, - token_bound_cidrs: Incomplete | None = None, - token_explicit_max_ttl: Incomplete | None = None, - token_no_default_policy: Incomplete | None = None, - token_num_uses: Incomplete | None = None, - token_period: Incomplete | None = None, - token_type: Incomplete | None = None, - path: Incomplete | None = None, - user_claim_json_pointer: Incomplete | None = None, + token_ttl=None, + token_max_ttl=None, + token_policies=None, + token_bound_cidrs=None, + token_explicit_max_ttl=None, + token_no_default_policy=None, + token_num_uses=None, + token_period=None, + token_type=None, + path=None, + user_claim_json_pointer=None, ): ... - def read_role(self, name, path: Incomplete | None = None): ... - def list_roles(self, path: Incomplete | None = None): ... - def delete_role(self, name, path: Incomplete | None = None): ... - def oidc_authorization_url_request(self, role, redirect_uri, path: Incomplete | None = None): ... - def oidc_callback(self, state, nonce, code, path: Incomplete | None = None): ... - def jwt_login(self, role, jwt, use_token: bool = True, path: Incomplete | None = None): ... + def read_role(self, name, path=None): ... + def list_roles(self, path=None): ... + def delete_role(self, name, path=None): ... + def oidc_authorization_url_request(self, role, redirect_uri, path=None): ... + def oidc_callback(self, state, nonce, code, path=None): ... + def jwt_login(self, role, jwt, use_token: bool = True, path=None): ... diff --git a/stubs/hvac/hvac/api/auth_methods/kubernetes.pyi b/stubs/hvac/hvac/api/auth_methods/kubernetes.pyi index f77e4c5d7082..490238b0200d 100644 --- a/stubs/hvac/hvac/api/auth_methods/kubernetes.pyi +++ b/stubs/hvac/hvac/api/auth_methods/kubernetes.pyi @@ -1,5 +1,3 @@ -from _typeshed import Incomplete - from hvac.api.vault_api_base import VaultApiBase DEFAULT_MOUNT_POINT: str @@ -8,10 +6,10 @@ class Kubernetes(VaultApiBase): def configure( self, kubernetes_host, - kubernetes_ca_cert: Incomplete | None = None, - token_reviewer_jwt: Incomplete | None = None, - pem_keys: Incomplete | None = None, - issuer: Incomplete | None = None, + kubernetes_ca_cert=None, + token_reviewer_jwt=None, + pem_keys=None, + issuer=None, mount_point="kubernetes", disable_local_ca_jwt: bool = False, ): ... @@ -21,13 +19,13 @@ class Kubernetes(VaultApiBase): name, bound_service_account_names, bound_service_account_namespaces, - ttl: Incomplete | None = None, - max_ttl: Incomplete | None = None, - period: Incomplete | None = None, - policies: Incomplete | None = None, + ttl=None, + max_ttl=None, + period=None, + policies=None, token_type: str = "", mount_point="kubernetes", - alias_name_source: Incomplete | None = None, + alias_name_source=None, ): ... def read_role(self, name, mount_point="kubernetes"): ... def list_roles(self, mount_point="kubernetes"): ... diff --git a/stubs/hvac/hvac/api/auth_methods/ldap.pyi b/stubs/hvac/hvac/api/auth_methods/ldap.pyi index 1a940a8a59fb..f2d19fc58cb6 100644 --- a/stubs/hvac/hvac/api/auth_methods/ldap.pyi +++ b/stubs/hvac/hvac/api/auth_methods/ldap.pyi @@ -1,5 +1,3 @@ -from _typeshed import Incomplete - from hvac.api.vault_api_base import VaultApiBase DEFAULT_MOUNT_POINT: str @@ -7,53 +5,51 @@ DEFAULT_MOUNT_POINT: str class Ldap(VaultApiBase): def configure( self, - userdn: Incomplete | None = None, - groupdn: Incomplete | None = None, - url: Incomplete | None = None, - case_sensitive_names: Incomplete | None = None, - starttls: Incomplete | None = None, - tls_min_version: Incomplete | None = None, - tls_max_version: Incomplete | None = None, - insecure_tls: Incomplete | None = None, - certificate: Incomplete | None = None, - binddn: Incomplete | None = None, - bindpass: Incomplete | None = None, - userattr: Incomplete | None = None, - discoverdn: Incomplete | None = None, + userdn=None, + groupdn=None, + url=None, + case_sensitive_names=None, + starttls=None, + tls_min_version=None, + tls_max_version=None, + insecure_tls=None, + certificate=None, + binddn=None, + bindpass=None, + userattr=None, + discoverdn=None, deny_null_bind: bool = True, - upndomain: Incomplete | None = None, - groupfilter: Incomplete | None = None, - groupattr: Incomplete | None = None, - use_token_groups: Incomplete | None = None, - token_ttl: Incomplete | None = None, - token_max_ttl: Incomplete | None = None, + upndomain=None, + groupfilter=None, + groupattr=None, + use_token_groups=None, + token_ttl=None, + token_max_ttl=None, mount_point="ldap", *, - anonymous_group_search: Incomplete | None = None, - client_tls_cert: Incomplete | None = None, - client_tls_key: Incomplete | None = None, - connection_timeout: Incomplete | None = None, - dereference_aliases: Incomplete | None = None, - max_page_size: Incomplete | None = None, - request_timeout: Incomplete | None = None, - token_bound_cidrs: Incomplete | None = None, - token_explicit_max_ttl: Incomplete | None = None, - token_no_default_policy: Incomplete | None = None, - token_num_uses: Incomplete | None = None, - token_period: Incomplete | None = None, - token_policies: Incomplete | None = None, - token_type: Incomplete | None = None, - userfilter: Incomplete | None = None, - username_as_alias: Incomplete | None = None, + anonymous_group_search=None, + client_tls_cert=None, + client_tls_key=None, + connection_timeout=None, + dereference_aliases=None, + max_page_size=None, + request_timeout=None, + token_bound_cidrs=None, + token_explicit_max_ttl=None, + token_no_default_policy=None, + token_num_uses=None, + token_period=None, + token_policies=None, + token_type=None, + userfilter=None, + username_as_alias=None, ): ... def read_configuration(self, mount_point="ldap"): ... - def create_or_update_group(self, name, policies: Incomplete | None = None, mount_point="ldap"): ... + def create_or_update_group(self, name, policies=None, mount_point="ldap"): ... def list_groups(self, mount_point="ldap"): ... def read_group(self, name, mount_point="ldap"): ... def delete_group(self, name, mount_point="ldap"): ... - def create_or_update_user( - self, username, policies: Incomplete | None = None, groups: Incomplete | None = None, mount_point="ldap" - ): ... + def create_or_update_user(self, username, policies=None, groups=None, mount_point="ldap"): ... def list_users(self, mount_point="ldap"): ... def read_user(self, username, mount_point="ldap"): ... def delete_user(self, username, mount_point="ldap"): ... diff --git a/stubs/hvac/hvac/api/auth_methods/legacy_mfa.pyi b/stubs/hvac/hvac/api/auth_methods/legacy_mfa.pyi index 37e8bcb404ac..42598cb7fd93 100644 --- a/stubs/hvac/hvac/api/auth_methods/legacy_mfa.pyi +++ b/stubs/hvac/hvac/api/auth_methods/legacy_mfa.pyi @@ -9,7 +9,5 @@ class LegacyMfa(VaultApiBase): def configure(self, mount_point, mfa_type: str = "duo", force: bool = False): ... def read_configuration(self, mount_point): ... def configure_duo_access(self, mount_point, host, integration_key, secret_key): ... - def configure_duo_behavior( - self, mount_point, push_info: Incomplete | None = None, user_agent: Incomplete | None = None, username_format: str = "%s" - ): ... + def configure_duo_behavior(self, mount_point, push_info=None, user_agent=None, username_format: str = "%s"): ... def read_duo_behavior_configuration(self, mount_point): ... diff --git a/stubs/hvac/hvac/api/auth_methods/oidc.pyi b/stubs/hvac/hvac/api/auth_methods/oidc.pyi index 4dbe6b14917a..c53113665a6e 100644 --- a/stubs/hvac/hvac/api/auth_methods/oidc.pyi +++ b/stubs/hvac/hvac/api/auth_methods/oidc.pyi @@ -1,5 +1,3 @@ -from _typeshed import Incomplete - from hvac.api.auth_methods.jwt import JWT class OIDC(JWT): @@ -10,26 +8,26 @@ class OIDC(JWT): user_claim, allowed_redirect_uris, role_type: str = "oidc", - bound_audiences: Incomplete | None = None, - clock_skew_leeway: Incomplete | None = None, - expiration_leeway: Incomplete | None = None, - not_before_leeway: Incomplete | None = None, - bound_subject: Incomplete | None = None, - bound_claims: Incomplete | None = None, - groups_claim: Incomplete | None = None, - claim_mappings: Incomplete | None = None, - oidc_scopes: Incomplete | None = None, + bound_audiences=None, + clock_skew_leeway=None, + expiration_leeway=None, + not_before_leeway=None, + bound_subject=None, + bound_claims=None, + groups_claim=None, + claim_mappings=None, + oidc_scopes=None, bound_claims_type: str = "string", verbose_oidc_logging: bool = False, - token_ttl: Incomplete | None = None, - token_max_ttl: Incomplete | None = None, - token_policies: Incomplete | None = None, - token_bound_cidrs: Incomplete | None = None, - token_explicit_max_ttl: Incomplete | None = None, - token_no_default_policy: Incomplete | None = None, - token_num_uses: Incomplete | None = None, - token_period: Incomplete | None = None, - token_type: Incomplete | None = None, - path: Incomplete | None = None, - user_claim_json_pointer: Incomplete | None = None, + token_ttl=None, + token_max_ttl=None, + token_policies=None, + token_bound_cidrs=None, + token_explicit_max_ttl=None, + token_no_default_policy=None, + token_num_uses=None, + token_period=None, + token_type=None, + path=None, + user_claim_json_pointer=None, ) -> None: ... diff --git a/stubs/hvac/hvac/api/auth_methods/okta.pyi b/stubs/hvac/hvac/api/auth_methods/okta.pyi index c47f7ffaaaf7..d48dfbf6de1f 100644 --- a/stubs/hvac/hvac/api/auth_methods/okta.pyi +++ b/stubs/hvac/hvac/api/auth_methods/okta.pyi @@ -1,29 +1,18 @@ -from _typeshed import Incomplete - from hvac.api.vault_api_base import VaultApiBase DEFAULT_MOUNT_POINT: str class Okta(VaultApiBase): def configure( - self, - org_name, - api_token: Incomplete | None = None, - base_url: Incomplete | None = None, - ttl: Incomplete | None = None, - max_ttl: Incomplete | None = None, - bypass_okta_mfa: Incomplete | None = None, - mount_point="okta", + self, org_name, api_token=None, base_url=None, ttl=None, max_ttl=None, bypass_okta_mfa=None, mount_point="okta" ): ... def read_config(self, mount_point="okta"): ... def list_users(self, mount_point="okta"): ... - def register_user( - self, username, groups: Incomplete | None = None, policies: Incomplete | None = None, mount_point="okta" - ): ... + def register_user(self, username, groups=None, policies=None, mount_point="okta"): ... def read_user(self, username, mount_point="okta"): ... def delete_user(self, username, mount_point="okta"): ... def list_groups(self, mount_point="okta"): ... - def register_group(self, name, policies: Incomplete | None = None, mount_point="okta"): ... + def register_group(self, name, policies=None, mount_point="okta"): ... def read_group(self, name, mount_point="okta"): ... def delete_group(self, name, mount_point="okta"): ... def login(self, username, password, use_token: bool = True, mount_point="okta"): ... diff --git a/stubs/hvac/hvac/api/auth_methods/radius.pyi b/stubs/hvac/hvac/api/auth_methods/radius.pyi index bbfaff2fee4d..1db73c5bef9f 100644 --- a/stubs/hvac/hvac/api/auth_methods/radius.pyi +++ b/stubs/hvac/hvac/api/auth_methods/radius.pyi @@ -1,22 +1,13 @@ -from _typeshed import Incomplete - from hvac.api.vault_api_base import VaultApiBase DEFAULT_MOUNT_POINT: str class Radius(VaultApiBase): def configure( - self, - host, - secret, - port: Incomplete | None = None, - unregistered_user_policies: Incomplete | None = None, - dial_timeout: Incomplete | None = None, - nas_port: Incomplete | None = None, - mount_point="radius", + self, host, secret, port=None, unregistered_user_policies=None, dial_timeout=None, nas_port=None, mount_point="radius" ): ... def read_configuration(self, mount_point="radius"): ... - def register_user(self, username, policies: Incomplete | None = None, mount_point="radius"): ... + def register_user(self, username, policies=None, mount_point="radius"): ... def list_users(self, mount_point="radius"): ... def read_user(self, username, mount_point="radius"): ... def delete_user(self, username, mount_point="radius"): ... diff --git a/stubs/hvac/hvac/api/auth_methods/token.pyi b/stubs/hvac/hvac/api/auth_methods/token.pyi index 9d630231cfc6..5500be2fcab1 100644 --- a/stubs/hvac/hvac/api/auth_methods/token.pyi +++ b/stubs/hvac/hvac/api/auth_methods/token.pyi @@ -1,5 +1,3 @@ -from _typeshed import Incomplete - from hvac.api.vault_api_base import VaultApiBase DEFAULT_MOUNT_POINT: str @@ -7,50 +5,48 @@ DEFAULT_MOUNT_POINT: str class Token(VaultApiBase): def create( self, - id: Incomplete | None = None, - role_name: Incomplete | None = None, - policies: Incomplete | None = None, - meta: Incomplete | None = None, + id=None, + role_name=None, + policies=None, + meta=None, no_parent: bool = False, no_default_policy: bool = False, renewable: bool = True, - ttl: Incomplete | None = None, - type: Incomplete | None = None, - explicit_max_ttl: Incomplete | None = None, + ttl=None, + type=None, + explicit_max_ttl=None, display_name: str = "token", num_uses: int = 0, - period: Incomplete | None = None, - entity_alias: Incomplete | None = None, - wrap_ttl: Incomplete | None = None, + period=None, + entity_alias=None, + wrap_ttl=None, mount_point="token", ): ... def create_orphan( self, - id: Incomplete | None = None, - role_name: Incomplete | None = None, - policies: Incomplete | None = None, - meta: Incomplete | None = None, + id=None, + role_name=None, + policies=None, + meta=None, no_default_policy: bool = False, renewable: bool = True, - ttl: Incomplete | None = None, - type: Incomplete | None = None, - explicit_max_ttl: Incomplete | None = None, + ttl=None, + type=None, + explicit_max_ttl=None, display_name: str = "token", num_uses: int = 0, - period: Incomplete | None = None, - entity_alias: Incomplete | None = None, - wrap_ttl: Incomplete | None = None, + period=None, + entity_alias=None, + wrap_ttl=None, mount_point="token", ): ... def list_accessors(self, mount_point="token"): ... def lookup(self, token, mount_point="token"): ... def lookup_self(self, mount_point="token"): ... def lookup_accessor(self, accessor, mount_point="token"): ... - def renew(self, token, increment: Incomplete | None = None, wrap_ttl: Incomplete | None = None, mount_point="token"): ... - def renew_self(self, increment: Incomplete | None = None, wrap_ttl: Incomplete | None = None, mount_point="token"): ... - def renew_accessor( - self, accessor, increment: Incomplete | None = None, wrap_ttl: Incomplete | None = None, mount_point="token" - ): ... + def renew(self, token, increment=None, wrap_ttl=None, mount_point="token"): ... + def renew_self(self, increment=None, wrap_ttl=None, mount_point="token"): ... + def renew_accessor(self, accessor, increment=None, wrap_ttl=None, mount_point="token"): ... def revoke(self, token, mount_point="token"): ... def revoke_self(self, mount_point="token"): ... def revoke_accessor(self, accessor, mount_point="token"): ... @@ -60,15 +56,15 @@ class Token(VaultApiBase): def create_or_update_role( self, role_name, - allowed_policies: Incomplete | None = None, - disallowed_policies: Incomplete | None = None, + allowed_policies=None, + disallowed_policies=None, orphan: bool = False, renewable: bool = True, - path_suffix: Incomplete | None = None, - allowed_entity_aliases: Incomplete | None = None, + path_suffix=None, + allowed_entity_aliases=None, mount_point="token", - token_period: Incomplete | None = None, - token_explicit_max_ttl: Incomplete | None = None, + token_period=None, + token_explicit_max_ttl=None, ): ... def delete_role(self, role_name, mount_point="token"): ... def tidy(self, mount_point="token"): ... diff --git a/stubs/hvac/hvac/api/auth_methods/userpass.pyi b/stubs/hvac/hvac/api/auth_methods/userpass.pyi index 60677915cb61..d8991c0b9f2f 100644 --- a/stubs/hvac/hvac/api/auth_methods/userpass.pyi +++ b/stubs/hvac/hvac/api/auth_methods/userpass.pyi @@ -1,13 +1,9 @@ -from _typeshed import Incomplete - from hvac.api.vault_api_base import VaultApiBase DEFAULT_MOUNT_POINT: str class Userpass(VaultApiBase): - def create_or_update_user( - self, username, password: Incomplete | None = None, policies: Incomplete | None = None, mount_point="userpass", **kwargs - ): ... + def create_or_update_user(self, username, password=None, policies=None, mount_point="userpass", **kwargs): ... def list_user(self, mount_point="userpass"): ... def read_user(self, username, mount_point="userpass"): ... def delete_user(self, username, mount_point="userpass"): ... diff --git a/stubs/hvac/hvac/api/secrets_engines/active_directory.pyi b/stubs/hvac/hvac/api/secrets_engines/active_directory.pyi index 1f9553368fd9..f7d527c75232 100644 --- a/stubs/hvac/hvac/api/secrets_engines/active_directory.pyi +++ b/stubs/hvac/hvac/api/secrets_engines/active_directory.pyi @@ -1,5 +1,3 @@ -from _typeshed import Incomplete - from hvac.api.vault_api_base import VaultApiBase DEFAULT_MOUNT_POINT: str @@ -7,21 +5,19 @@ DEFAULT_MOUNT_POINT: str class ActiveDirectory(VaultApiBase): def configure( self, - binddn: Incomplete | None = None, - bindpass: Incomplete | None = None, - url: Incomplete | None = None, - userdn: Incomplete | None = None, - upndomain: Incomplete | None = None, - ttl: Incomplete | None = None, - max_ttl: Incomplete | None = None, + binddn=None, + bindpass=None, + url=None, + userdn=None, + upndomain=None, + ttl=None, + max_ttl=None, mount_point="ad", *args, **kwargs, ): ... def read_config(self, mount_point="ad"): ... - def create_or_update_role( - self, name, service_account_name: Incomplete | None = None, ttl: Incomplete | None = None, mount_point="ad" - ): ... + def create_or_update_role(self, name, service_account_name=None, ttl=None, mount_point="ad"): ... def read_role(self, name, mount_point="ad"): ... def list_roles(self, mount_point="ad"): ... def delete_role(self, name, mount_point="ad"): ... diff --git a/stubs/hvac/hvac/api/secrets_engines/aws.pyi b/stubs/hvac/hvac/api/secrets_engines/aws.pyi index dd2dde1d8e3f..373d55a4e0e5 100644 --- a/stubs/hvac/hvac/api/secrets_engines/aws.pyi +++ b/stubs/hvac/hvac/api/secrets_engines/aws.pyi @@ -1,17 +1,8 @@ -from _typeshed import Incomplete - from hvac.api.vault_api_base import VaultApiBase class Aws(VaultApiBase): def configure_root_iam_credentials( - self, - access_key, - secret_key, - region: Incomplete | None = None, - iam_endpoint: Incomplete | None = None, - sts_endpoint: Incomplete | None = None, - max_retries: Incomplete | None = None, - mount_point="aws", + self, access_key, secret_key, region=None, iam_endpoint=None, sts_endpoint=None, max_retries=None, mount_point="aws" ): ... def rotate_root_iam_credentials(self, mount_point="aws"): ... def configure_lease(self, lease, lease_max, mount_point="aws"): ... @@ -20,24 +11,18 @@ class Aws(VaultApiBase): self, name, credential_type, - policy_document: Incomplete | None = None, - default_sts_ttl: Incomplete | None = None, - max_sts_ttl: Incomplete | None = None, - role_arns: Incomplete | None = None, - policy_arns: Incomplete | None = None, + policy_document=None, + default_sts_ttl=None, + max_sts_ttl=None, + role_arns=None, + policy_arns=None, legacy_params: bool = False, - iam_tags: Incomplete | None = None, + iam_tags=None, mount_point="aws", ): ... def read_role(self, name, mount_point="aws"): ... def list_roles(self, mount_point="aws"): ... def delete_role(self, name, mount_point="aws"): ... def generate_credentials( - self, - name, - role_arn: Incomplete | None = None, - ttl: Incomplete | None = None, - endpoint: str = "creds", - mount_point="aws", - role_session_name: Incomplete | None = None, + self, name, role_arn=None, ttl=None, endpoint: str = "creds", mount_point="aws", role_session_name=None ): ... diff --git a/stubs/hvac/hvac/api/secrets_engines/azure.pyi b/stubs/hvac/hvac/api/secrets_engines/azure.pyi index ba49483b2a5f..5ba4e4eac2dd 100644 --- a/stubs/hvac/hvac/api/secrets_engines/azure.pyi +++ b/stubs/hvac/hvac/api/secrets_engines/azure.pyi @@ -1,23 +1,13 @@ -from _typeshed import Incomplete - from hvac.api.vault_api_base import VaultApiBase DEFAULT_MOUNT_POINT: str class Azure(VaultApiBase): def configure( - self, - subscription_id, - tenant_id, - client_id: Incomplete | None = None, - client_secret: Incomplete | None = None, - environment: Incomplete | None = None, - mount_point="azure", + self, subscription_id, tenant_id, client_id=None, client_secret=None, environment=None, mount_point="azure" ): ... def read_config(self, mount_point="azure"): ... def delete_config(self, mount_point="azure"): ... - def create_or_update_role( - self, name, azure_roles, ttl: Incomplete | None = None, max_ttl: Incomplete | None = None, mount_point="azure" - ): ... + def create_or_update_role(self, name, azure_roles, ttl=None, max_ttl=None, mount_point="azure"): ... def list_roles(self, mount_point="azure"): ... def generate_credentials(self, name, mount_point="azure"): ... diff --git a/stubs/hvac/hvac/api/secrets_engines/consul.pyi b/stubs/hvac/hvac/api/secrets_engines/consul.pyi index b51d841e7a08..0a27a6da6b70 100644 --- a/stubs/hvac/hvac/api/secrets_engines/consul.pyi +++ b/stubs/hvac/hvac/api/secrets_engines/consul.pyi @@ -1,21 +1,11 @@ -from _typeshed import Incomplete - from hvac.api.vault_api_base import VaultApiBase DEFAULT_MOUNT_POINT: str class Consul(VaultApiBase): - def configure_access(self, address, token, scheme: Incomplete | None = None, mount_point="consul"): ... + def configure_access(self, address, token, scheme=None, mount_point="consul"): ... def create_or_update_role( - self, - name, - policy: Incomplete | None = None, - policies: Incomplete | None = None, - token_type: Incomplete | None = None, - local: Incomplete | None = None, - ttl: Incomplete | None = None, - max_ttl: Incomplete | None = None, - mount_point="consul", + self, name, policy=None, policies=None, token_type=None, local=None, ttl=None, max_ttl=None, mount_point="consul" ): ... def read_role(self, name, mount_point="consul"): ... def list_roles(self, mount_point="consul"): ... diff --git a/stubs/hvac/hvac/api/secrets_engines/database.pyi b/stubs/hvac/hvac/api/secrets_engines/database.pyi index 6f016ec1eedc..ff88fe47d9e6 100644 --- a/stubs/hvac/hvac/api/secrets_engines/database.pyi +++ b/stubs/hvac/hvac/api/secrets_engines/database.pyi @@ -1,5 +1,3 @@ -from _typeshed import Incomplete - from hvac.api.vault_api_base import VaultApiBase DEFAULT_MOUNT_POINT: str @@ -9,9 +7,9 @@ class Database(VaultApiBase): self, name, plugin_name, - verify_connection: Incomplete | None = None, - allowed_roles: Incomplete | None = None, - root_rotation_statements: Incomplete | None = None, + verify_connection=None, + allowed_roles=None, + root_rotation_statements=None, mount_point="database", *args, **kwargs, @@ -26,11 +24,11 @@ class Database(VaultApiBase): name, db_name, creation_statements, - default_ttl: Incomplete | None = None, - max_ttl: Incomplete | None = None, - revocation_statements: Incomplete | None = None, - rollback_statements: Incomplete | None = None, - renew_statements: Incomplete | None = None, + default_ttl=None, + max_ttl=None, + revocation_statements=None, + rollback_statements=None, + renew_statements=None, mount_point="database", ): ... def create_static_role( diff --git a/stubs/hvac/hvac/api/secrets_engines/gcp.pyi b/stubs/hvac/hvac/api/secrets_engines/gcp.pyi index 0c027af5b223..8ccc89bbf6e3 100644 --- a/stubs/hvac/hvac/api/secrets_engines/gcp.pyi +++ b/stubs/hvac/hvac/api/secrets_engines/gcp.pyi @@ -1,28 +1,12 @@ -from _typeshed import Incomplete - from hvac.api.vault_api_base import VaultApiBase DEFAULT_MOUNT_POINT: str class Gcp(VaultApiBase): - def configure( - self, - credentials: Incomplete | None = None, - ttl: Incomplete | None = None, - max_ttl: Incomplete | None = None, - mount_point="gcp", - ): ... + def configure(self, credentials=None, ttl=None, max_ttl=None, mount_point="gcp"): ... def rotate_root_credentials(self, mount_point="gcp"): ... def read_config(self, mount_point="gcp"): ... - def create_or_update_roleset( - self, - name, - project, - bindings, - secret_type: Incomplete | None = None, - token_scopes: Incomplete | None = None, - mount_point="gcp", - ): ... + def create_or_update_roleset(self, name, project, bindings, secret_type=None, token_scopes=None, mount_point="gcp"): ... def rotate_roleset_account(self, name, mount_point="gcp"): ... def rotate_roleset_account_key(self, name, mount_point="gcp"): ... def read_roleset(self, name, mount_point="gcp"): ... @@ -38,13 +22,7 @@ class Gcp(VaultApiBase): mount_point="gcp", ): ... def create_or_update_static_account( - self, - name, - service_account_email, - bindings: Incomplete | None = None, - secret_type: Incomplete | None = None, - token_scopes: Incomplete | None = None, - mount_point="gcp", + self, name, service_account_email, bindings=None, secret_type=None, token_scopes=None, mount_point="gcp" ): ... def rotate_static_account_key(self, name, mount_point="gcp"): ... def read_static_account(self, name, mount_point="gcp"): ... @@ -60,12 +38,7 @@ class Gcp(VaultApiBase): mount_point="gcp", ): ... def create_or_update_impersonated_account( - self, - name, - service_account_email, - token_scopes: Incomplete | None = None, - ttl: Incomplete | None = None, - mount_point="gcp", + self, name, service_account_email, token_scopes=None, ttl=None, mount_point="gcp" ): ... def read_impersonated_account(self, name, mount_point="gcp"): ... def list_impersonated_accounts(self, mount_point="gcp"): ... diff --git a/stubs/hvac/hvac/api/secrets_engines/identity.pyi b/stubs/hvac/hvac/api/secrets_engines/identity.pyi index d848c279494d..acafad3d33c9 100644 --- a/stubs/hvac/hvac/api/secrets_engines/identity.pyi +++ b/stubs/hvac/hvac/api/secrets_engines/identity.pyi @@ -6,48 +6,22 @@ logger: Incomplete class Identity(VaultApiBase): def create_or_update_entity( - self, - name, - entity_id: Incomplete | None = None, - metadata: Incomplete | None = None, - policies: Incomplete | None = None, - disabled: Incomplete | None = None, - mount_point: str = "identity", + self, name, entity_id=None, metadata=None, policies=None, disabled=None, mount_point: str = "identity" ): ... def create_or_update_entity_by_name( - self, - name, - metadata: Incomplete | None = None, - policies: Incomplete | None = None, - disabled: Incomplete | None = None, - mount_point: str = "identity", + self, name, metadata=None, policies=None, disabled=None, mount_point: str = "identity" ): ... def read_entity(self, entity_id, mount_point: str = "identity"): ... def read_entity_by_name(self, name, mount_point: str = "identity"): ... - def update_entity( - self, - entity_id, - name: Incomplete | None = None, - metadata: Incomplete | None = None, - policies: Incomplete | None = None, - disabled: Incomplete | None = None, - mount_point: str = "identity", - ): ... + def update_entity(self, entity_id, name=None, metadata=None, policies=None, disabled=None, mount_point: str = "identity"): ... def delete_entity(self, entity_id, mount_point: str = "identity"): ... def delete_entity_by_name(self, name, mount_point: str = "identity"): ... def list_entities(self, method: str = "LIST", mount_point: str = "identity"): ... def list_entities_by_name(self, method: str = "LIST", mount_point: str = "identity"): ... def merge_entities( - self, - from_entity_ids, - to_entity_id, - force: Incomplete | None = None, - mount_point: str = "identity", - conflicting_alias_ids_to_keep: Incomplete | None = None, - ): ... - def create_or_update_entity_alias( - self, name, canonical_id, mount_accessor, alias_id: Incomplete | None = None, mount_point: str = "identity" + self, from_entity_ids, to_entity_id, force=None, mount_point: str = "identity", conflicting_alias_ids_to_keep=None ): ... + def create_or_update_entity_alias(self, name, canonical_id, mount_accessor, alias_id=None, mount_point: str = "identity"): ... def read_entity_alias(self, alias_id, mount_point: str = "identity"): ... def update_entity_alias(self, alias_id, name, canonical_id, mount_accessor, mount_point: str = "identity"): ... def list_entity_aliases(self, method: str = "LIST", mount_point: str = "identity"): ... @@ -57,12 +31,12 @@ class Identity(VaultApiBase): def create_or_update_group( self, name, - group_id: Incomplete | None = None, + group_id=None, group_type: str = "internal", - metadata: Incomplete | None = None, - policies: Incomplete | None = None, - member_group_ids: Incomplete | None = None, - member_entity_ids: Incomplete | None = None, + metadata=None, + policies=None, + member_group_ids=None, + member_entity_ids=None, mount_point: str = "identity", ): ... def read_group(self, group_id, mount_point: str = "identity"): ... @@ -71,10 +45,10 @@ class Identity(VaultApiBase): group_id, name, group_type: str = "internal", - metadata: Incomplete | None = None, - policies: Incomplete | None = None, - member_group_ids: Incomplete | None = None, - member_entity_ids: Incomplete | None = None, + metadata=None, + policies=None, + member_group_ids=None, + member_entity_ids=None, mount_point: str = "identity", ): ... def delete_group(self, group_id, mount_point: str = "identity"): ... @@ -84,59 +58,35 @@ class Identity(VaultApiBase): self, name, group_type: str = "internal", - metadata: Incomplete | None = None, - policies: Incomplete | None = None, - member_group_ids: Incomplete | None = None, - member_entity_ids: Incomplete | None = None, + metadata=None, + policies=None, + member_group_ids=None, + member_entity_ids=None, mount_point: str = "identity", ): ... def read_group_by_name(self, name, mount_point: str = "identity"): ... def delete_group_by_name(self, name, mount_point: str = "identity"): ... def create_or_update_group_alias( - self, - name, - alias_id: Incomplete | None = None, - mount_accessor: Incomplete | None = None, - canonical_id: Incomplete | None = None, - mount_point: str = "identity", - ): ... - def update_group_alias( - self, - entity_id, - name, - mount_accessor: Incomplete | None = None, - canonical_id: Incomplete | None = None, - mount_point="identity", + self, name, alias_id=None, mount_accessor=None, canonical_id=None, mount_point: str = "identity" ): ... + def update_group_alias(self, entity_id, name, mount_accessor=None, canonical_id=None, mount_point="identity"): ... def read_group_alias(self, alias_id, mount_point: str = "identity"): ... def delete_group_alias(self, entity_id, mount_point: str = "identity"): ... def list_group_aliases(self, method: str = "LIST", mount_point: str = "identity"): ... def lookup_entity( - self, - name: Incomplete | None = None, - entity_id: Incomplete | None = None, - alias_id: Incomplete | None = None, - alias_name: Incomplete | None = None, - alias_mount_accessor: Incomplete | None = None, - mount_point: str = "identity", + self, name=None, entity_id=None, alias_id=None, alias_name=None, alias_mount_accessor=None, mount_point: str = "identity" ): ... def lookup_group( - self, - name: Incomplete | None = None, - group_id: Incomplete | None = None, - alias_id: Incomplete | None = None, - alias_name: Incomplete | None = None, - alias_mount_accessor: Incomplete | None = None, - mount_point: str = "identity", + self, name=None, group_id=None, alias_id=None, alias_name=None, alias_mount_accessor=None, mount_point: str = "identity" ): ... - def configure_tokens_backend(self, issuer: Incomplete | None = None, mount_point: str = "identity"): ... + def configure_tokens_backend(self, issuer=None, mount_point: str = "identity"): ... def read_tokens_backend_configuration(self, mount_point: str = "identity"): ... def create_named_key( self, name, rotation_period: str = "24h", verification_ttl: str = "24h", - allowed_client_ids: Incomplete | None = None, + allowed_client_ids=None, algorithm: str = "RS256", mount_point: str = "identity", ): ... @@ -145,18 +95,12 @@ class Identity(VaultApiBase): def list_named_keys(self, mount_point: str = "identity"): ... def rotate_named_key(self, name, verification_ttl, mount_point: str = "identity"): ... def create_or_update_role( - self, - name, - key, - template: Incomplete | None = None, - client_id: Incomplete | None = None, - ttl: str = "24h", - mount_point: str = "identity", + self, name, key, template=None, client_id=None, ttl: str = "24h", mount_point: str = "identity" ): ... def read_role(self, name, mount_point: str = "identity"): ... def delete_role(self, name, mount_point: str = "identity"): ... def list_roles(self, mount_point: str = "identity"): ... def generate_signed_id_token(self, name, mount_point: str = "identity"): ... - def introspect_signed_id_token(self, token, client_id: Incomplete | None = None, mount_point: str = "identity"): ... + def introspect_signed_id_token(self, token, client_id=None, mount_point: str = "identity"): ... def read_well_known_configurations(self, mount_point: str = "identity"): ... def read_active_public_keys(self, mount_point: str = "identity"): ... diff --git a/stubs/hvac/hvac/api/secrets_engines/kv_v1.pyi b/stubs/hvac/hvac/api/secrets_engines/kv_v1.pyi index c64b3c75c5aa..ed8da7fdf350 100644 --- a/stubs/hvac/hvac/api/secrets_engines/kv_v1.pyi +++ b/stubs/hvac/hvac/api/secrets_engines/kv_v1.pyi @@ -1,5 +1,3 @@ -from _typeshed import Incomplete - from hvac.api.vault_api_base import VaultApiBase DEFAULT_MOUNT_POINT: str @@ -7,5 +5,5 @@ DEFAULT_MOUNT_POINT: str class KvV1(VaultApiBase): def read_secret(self, path, mount_point="secret"): ... def list_secrets(self, path, mount_point="secret"): ... - def create_or_update_secret(self, path, secret, method: Incomplete | None = None, mount_point="secret"): ... + def create_or_update_secret(self, path, secret, method=None, mount_point="secret"): ... def delete_secret(self, path, mount_point="secret"): ... diff --git a/stubs/hvac/hvac/api/secrets_engines/kv_v2.pyi b/stubs/hvac/hvac/api/secrets_engines/kv_v2.pyi index 6996f136e0fb..32c6a06a8014 100644 --- a/stubs/hvac/hvac/api/secrets_engines/kv_v2.pyi +++ b/stubs/hvac/hvac/api/secrets_engines/kv_v2.pyi @@ -1,23 +1,13 @@ -from _typeshed import Incomplete - from hvac.api.vault_api_base import VaultApiBase DEFAULT_MOUNT_POINT: str class KvV2(VaultApiBase): - def configure( - self, - max_versions: int = 10, - cas_required: Incomplete | None = None, - delete_version_after: str = "0s", - mount_point="secret", - ): ... + def configure(self, max_versions: int = 10, cas_required=None, delete_version_after: str = "0s", mount_point="secret"): ... def read_configuration(self, mount_point="secret"): ... - def read_secret(self, path, mount_point="secret", raise_on_deleted_version: Incomplete | None = None): ... - def read_secret_version( - self, path, version: Incomplete | None = None, mount_point="secret", raise_on_deleted_version: Incomplete | None = None - ): ... - def create_or_update_secret(self, path, secret, cas: Incomplete | None = None, mount_point="secret"): ... + def read_secret(self, path, mount_point="secret", raise_on_deleted_version=None): ... + def read_secret_version(self, path, version=None, mount_point="secret", raise_on_deleted_version=None): ... + def create_or_update_secret(self, path, secret, cas=None, mount_point="secret"): ... def patch(self, path, secret, mount_point="secret"): ... def delete_latest_version_of_secret(self, path, mount_point="secret"): ... def delete_secret_versions(self, path, versions, mount_point="secret"): ... @@ -28,10 +18,10 @@ class KvV2(VaultApiBase): def update_metadata( self, path, - max_versions: Incomplete | None = None, - cas_required: Incomplete | None = None, + max_versions=None, + cas_required=None, delete_version_after: str = "0s", mount_point="secret", - custom_metadata: Incomplete | None = None, + custom_metadata=None, ): ... def delete_metadata_and_all_versions(self, path, mount_point="secret"): ... diff --git a/stubs/hvac/hvac/api/secrets_engines/pki.pyi b/stubs/hvac/hvac/api/secrets_engines/pki.pyi index b8ee9bc1ccc5..f8ea43398709 100644 --- a/stubs/hvac/hvac/api/secrets_engines/pki.pyi +++ b/stubs/hvac/hvac/api/secrets_engines/pki.pyi @@ -1,5 +1,3 @@ -from _typeshed import Incomplete - from hvac.api.vault_api_base import VaultApiBase DEFAULT_MOUNT_POINT: str @@ -11,40 +9,28 @@ class Pki(VaultApiBase): def list_certificates(self, mount_point="pki"): ... def submit_ca_information(self, pem_bundle, mount_point="pki"): ... def read_crl_configuration(self, mount_point="pki"): ... - def set_crl_configuration( - self, - expiry: Incomplete | None = None, - disable: Incomplete | None = None, - extra_params: Incomplete | None = None, - mount_point="pki", - ): ... + def set_crl_configuration(self, expiry=None, disable=None, extra_params=None, mount_point="pki"): ... def read_urls(self, mount_point="pki"): ... def set_urls(self, params, mount_point="pki"): ... def read_crl(self, mount_point="pki"): ... def rotate_crl(self, mount_point="pki"): ... - def generate_intermediate( - self, type, common_name, extra_params: Incomplete | None = None, mount_point="pki", wrap_ttl: Incomplete | None = None - ): ... + def generate_intermediate(self, type, common_name, extra_params=None, mount_point="pki", wrap_ttl=None): ... def set_signed_intermediate(self, certificate, mount_point="pki"): ... - def generate_certificate( - self, name, common_name, extra_params: Incomplete | None = None, mount_point="pki", wrap_ttl: Incomplete | None = None - ): ... + def generate_certificate(self, name, common_name, extra_params=None, mount_point="pki", wrap_ttl=None): ... def revoke_certificate(self, serial_number, mount_point="pki"): ... - def create_or_update_role(self, name, extra_params: Incomplete | None = None, mount_point="pki"): ... + def create_or_update_role(self, name, extra_params=None, mount_point="pki"): ... def read_role(self, name, mount_point="pki"): ... def list_roles(self, mount_point="pki"): ... def delete_role(self, name, mount_point="pki"): ... - def generate_root( - self, type, common_name, extra_params: Incomplete | None = None, mount_point="pki", wrap_ttl: Incomplete | None = None - ): ... + def generate_root(self, type, common_name, extra_params=None, mount_point="pki", wrap_ttl=None): ... def delete_root(self, mount_point="pki"): ... - def sign_intermediate(self, csr, common_name, extra_params: Incomplete | None = None, mount_point="pki"): ... + def sign_intermediate(self, csr, common_name, extra_params=None, mount_point="pki"): ... def sign_self_issued(self, certificate, mount_point="pki"): ... - def sign_certificate(self, name, csr, common_name, extra_params: Incomplete | None = None, mount_point="pki"): ... - def sign_verbatim(self, csr, name: bool = False, extra_params: Incomplete | None = None, mount_point="pki"): ... - def tidy(self, extra_params: Incomplete | None = None, mount_point="pki"): ... + def sign_certificate(self, name, csr, common_name, extra_params=None, mount_point="pki"): ... + def sign_verbatim(self, csr, name: bool = False, extra_params=None, mount_point="pki"): ... + def tidy(self, extra_params=None, mount_point="pki"): ... def read_issuer(self, issuer_ref, mount_point="pki"): ... def list_issuers(self, mount_point="pki"): ... - def update_issuer(self, issuer_ref, extra_params: Incomplete | None = None, mount_point="pki"): ... + def update_issuer(self, issuer_ref, extra_params=None, mount_point="pki"): ... def revoke_issuer(self, issuer_ref, mount_point="pki"): ... def delete_issuer(self, issuer_ref, mount_point="pki"): ... diff --git a/stubs/hvac/hvac/api/secrets_engines/ssh.pyi b/stubs/hvac/hvac/api/secrets_engines/ssh.pyi index 8697ec503cba..b7b7f3d3305d 100644 --- a/stubs/hvac/hvac/api/secrets_engines/ssh.pyi +++ b/stubs/hvac/hvac/api/secrets_engines/ssh.pyi @@ -1,5 +1,3 @@ -from _typeshed import Incomplete - from hvac.api.vault_api_base import VaultApiBase DEFAULT_MOUNT_POINT: str @@ -27,15 +25,15 @@ class Ssh(VaultApiBase): max_ttl: str = "", allowed_critical_options: str = "", allowed_extensions: str = "", - default_critical_options: Incomplete | None = None, - default_extensions: Incomplete | None = None, + default_critical_options=None, + default_extensions=None, allow_user_certificates: str = "", allow_host_certificates: bool = False, allow_bare_domains: bool = False, allow_subdomains: bool = False, allow_user_key_ids: bool = False, key_id_format: str = "", - allowed_user_key_lengths: Incomplete | None = None, + allowed_user_key_lengths=None, algorithm_signer: str = "", mount_point="ssh", ): ... @@ -67,7 +65,7 @@ class Ssh(VaultApiBase): valid_principals: str = "", cert_type: str = "user", key_id: str = "", - critical_options: Incomplete | None = None, - extensions: Incomplete | None = None, + critical_options=None, + extensions=None, mount_point: str = "ssh", ): ... diff --git a/stubs/hvac/hvac/api/secrets_engines/transform.pyi b/stubs/hvac/hvac/api/secrets_engines/transform.pyi index b1a89f8f9299..39aff82b3f50 100644 --- a/stubs/hvac/hvac/api/secrets_engines/transform.pyi +++ b/stubs/hvac/hvac/api/secrets_engines/transform.pyi @@ -1,5 +1,3 @@ -from _typeshed import Incomplete - from hvac.api.vault_api_base import VaultApiBase DEFAULT_MOUNT_POINT: str @@ -16,32 +14,22 @@ class Transform(VaultApiBase): template, tweak_source: str = "supplied", masking_character: str = "*", - allowed_roles: Incomplete | None = None, + allowed_roles=None, mount_point: str = "transform", ): ... def create_or_update_fpe_transformation( - self, - name, - template, - tweak_source: str = "supplied", - allowed_roles: Incomplete | None = None, - mount_point: str = "transform", + self, name, template, tweak_source: str = "supplied", allowed_roles=None, mount_point: str = "transform" ): ... def create_or_update_masking_transformation( - self, - name, - template, - masking_character: str = "*", - allowed_roles: Incomplete | None = None, - mount_point: str = "transform", + self, name, template, masking_character: str = "*", allowed_roles=None, mount_point: str = "transform" ): ... def create_or_update_tokenization_transformation( self, name, max_ttl: int = 0, mapping_mode: str = "default", - allowed_roles: Incomplete | None = None, - stores: Incomplete | None = None, + allowed_roles=None, + stores=None, mount_point: str = "transform", ): ... def read_transformation(self, name, mount_point: str = "transform"): ... @@ -60,10 +48,10 @@ class Transform(VaultApiBase): name, driver, connection_string, - username: Incomplete | None = None, - password: Incomplete | None = None, + username=None, + password=None, type: str = "sql", - supported_transformations: Incomplete | None = None, + supported_transformations=None, schema: str = "public", max_open_connections: int = 4, max_idle_connections: int = 4, @@ -71,32 +59,14 @@ class Transform(VaultApiBase): mount_point: str = "transform", ): ... def encode( - self, - role_name, - value: Incomplete | None = None, - transformation: Incomplete | None = None, - tweak: Incomplete | None = None, - batch_input: Incomplete | None = None, - mount_point: str = "transform", + self, role_name, value=None, transformation=None, tweak=None, batch_input=None, mount_point: str = "transform" ): ... def decode( - self, - role_name, - value: Incomplete | None = None, - transformation: Incomplete | None = None, - tweak: Incomplete | None = None, - batch_input: Incomplete | None = None, - mount_point: str = "transform", - ): ... - def validate_token( - self, role_name, value, transformation, batch_input: Incomplete | None = None, mount_point: str = "transform" - ): ... - def check_tokenization( - self, role_name, value, transformation, batch_input: Incomplete | None = None, mount_point: str = "transform" - ): ... - def retrieve_token_metadata( - self, role_name, value, transformation, batch_input: Incomplete | None = None, mount_point: str = "transform" + self, role_name, value=None, transformation=None, tweak=None, batch_input=None, mount_point: str = "transform" ): ... + def validate_token(self, role_name, value, transformation, batch_input=None, mount_point: str = "transform"): ... + def check_tokenization(self, role_name, value, transformation, batch_input=None, mount_point: str = "transform"): ... + def retrieve_token_metadata(self, role_name, value, transformation, batch_input=None, mount_point: str = "transform"): ... def snapshot_tokenization_state(self, name, limit: int = 1000, continuation: str = "", mount_point: str = "transform"): ... def restore_tokenization_state(self, name, values, mount_point: str = "transform"): ... def export_decoded_tokenization_state( diff --git a/stubs/hvac/hvac/api/secrets_engines/transit.pyi b/stubs/hvac/hvac/api/secrets_engines/transit.pyi index 495611b0e5c0..86a2335ec4d6 100644 --- a/stubs/hvac/hvac/api/secrets_engines/transit.pyi +++ b/stubs/hvac/hvac/api/secrets_engines/transit.pyi @@ -1,5 +1,3 @@ -from _typeshed import Incomplete - from hvac.api.vault_api_base import VaultApiBase DEFAULT_MOUNT_POINT: str @@ -8,13 +6,13 @@ class Transit(VaultApiBase): def create_key( self, name, - convergent_encryption: Incomplete | None = None, - derived: Incomplete | None = None, - exportable: Incomplete | None = None, - allow_plaintext_backup: Incomplete | None = None, - key_type: Incomplete | None = None, + convergent_encryption=None, + derived=None, + exportable=None, + allow_plaintext_backup=None, + key_type=None, mount_point="transit", - auto_rotate_period: Incomplete | None = None, + auto_rotate_period=None, ): ... def read_key(self, name, mount_point="transit"): ... def list_keys(self, mount_point="transit"): ... @@ -22,95 +20,74 @@ class Transit(VaultApiBase): def update_key_configuration( self, name, - min_decryption_version: Incomplete | None = None, - min_encryption_version: Incomplete | None = None, - deletion_allowed: Incomplete | None = None, - exportable: Incomplete | None = None, - allow_plaintext_backup: Incomplete | None = None, + min_decryption_version=None, + min_encryption_version=None, + deletion_allowed=None, + exportable=None, + allow_plaintext_backup=None, mount_point="transit", - auto_rotate_period: Incomplete | None = None, + auto_rotate_period=None, ): ... def rotate_key(self, name, mount_point="transit"): ... - def export_key(self, name, key_type, version: Incomplete | None = None, mount_point="transit"): ... + def export_key(self, name, key_type, version=None, mount_point="transit"): ... def encrypt_data( self, name, - plaintext: Incomplete | None = None, - context: Incomplete | None = None, - key_version: Incomplete | None = None, - nonce: Incomplete | None = None, - batch_input: Incomplete | None = None, - type: Incomplete | None = None, - convergent_encryption: Incomplete | None = None, + plaintext=None, + context=None, + key_version=None, + nonce=None, + batch_input=None, + type=None, + convergent_encryption=None, mount_point: str = "transit", associated_data: str | None = None, ): ... def decrypt_data( self, name, - ciphertext: Incomplete | None = None, - context: Incomplete | None = None, - nonce: Incomplete | None = None, - batch_input: Incomplete | None = None, + ciphertext=None, + context=None, + nonce=None, + batch_input=None, mount_point: str = "transit", associated_data: str | None = None, ): ... def rewrap_data( - self, - name, - ciphertext, - context: Incomplete | None = None, - key_version: Incomplete | None = None, - nonce: Incomplete | None = None, - batch_input: Incomplete | None = None, - mount_point="transit", - ): ... - def generate_data_key( - self, - name, - key_type, - context: Incomplete | None = None, - nonce: Incomplete | None = None, - bits: Incomplete | None = None, - mount_point="transit", - ): ... - def generate_random_bytes( - self, n_bytes: Incomplete | None = None, output_format: Incomplete | None = None, mount_point="transit" - ): ... - def hash_data( - self, hash_input, algorithm: Incomplete | None = None, output_format: Incomplete | None = None, mount_point="transit" - ): ... - def generate_hmac( - self, name, hash_input, key_version: Incomplete | None = None, algorithm: Incomplete | None = None, mount_point="transit" + self, name, ciphertext, context=None, key_version=None, nonce=None, batch_input=None, mount_point="transit" ): ... + def generate_data_key(self, name, key_type, context=None, nonce=None, bits=None, mount_point="transit"): ... + def generate_random_bytes(self, n_bytes=None, output_format=None, mount_point="transit"): ... + def hash_data(self, hash_input, algorithm=None, output_format=None, mount_point="transit"): ... + def generate_hmac(self, name, hash_input, key_version=None, algorithm=None, mount_point="transit"): ... def sign_data( self, name, - hash_input: Incomplete | None = None, - key_version: Incomplete | None = None, - hash_algorithm: Incomplete | None = None, - context: Incomplete | None = None, - prehashed: Incomplete | None = None, - signature_algorithm: Incomplete | None = None, - marshaling_algorithm: Incomplete | None = None, - salt_length: Incomplete | None = None, + hash_input=None, + key_version=None, + hash_algorithm=None, + context=None, + prehashed=None, + signature_algorithm=None, + marshaling_algorithm=None, + salt_length=None, mount_point="transit", - batch_input: Incomplete | None = None, + batch_input=None, ): ... def verify_signed_data( self, name, hash_input, - signature: Incomplete | None = None, - hmac: Incomplete | None = None, - hash_algorithm: Incomplete | None = None, - context: Incomplete | None = None, - prehashed: Incomplete | None = None, - signature_algorithm: Incomplete | None = None, - salt_length: Incomplete | None = None, - marshaling_algorithm: Incomplete | None = None, + signature=None, + hmac=None, + hash_algorithm=None, + context=None, + prehashed=None, + signature_algorithm=None, + salt_length=None, + marshaling_algorithm=None, mount_point="transit", ): ... def backup_key(self, name, mount_point="transit"): ... - def restore_key(self, backup, name: Incomplete | None = None, force: Incomplete | None = None, mount_point="transit"): ... + def restore_key(self, backup, name=None, force=None, mount_point="transit"): ... def trim_key(self, name, min_version, mount_point="transit"): ... diff --git a/stubs/hvac/hvac/api/system_backend/audit.pyi b/stubs/hvac/hvac/api/system_backend/audit.pyi index b1e5bc0d7670..99dea810178f 100644 --- a/stubs/hvac/hvac/api/system_backend/audit.pyi +++ b/stubs/hvac/hvac/api/system_backend/audit.pyi @@ -1,16 +1,7 @@ -from _typeshed import Incomplete - from hvac.api.system_backend.system_backend_mixin import SystemBackendMixin class Audit(SystemBackendMixin): def list_enabled_audit_devices(self): ... - def enable_audit_device( - self, - device_type, - description: Incomplete | None = None, - options: Incomplete | None = None, - path: Incomplete | None = None, - local: Incomplete | None = None, - ): ... + def enable_audit_device(self, device_type, description=None, options=None, path=None, local=None): ... def disable_audit_device(self, path): ... def calculate_hash(self, path, input_to_hash): ... diff --git a/stubs/hvac/hvac/api/system_backend/auth.pyi b/stubs/hvac/hvac/api/system_backend/auth.pyi index 70f37609fb1c..1534a1c71499 100644 --- a/stubs/hvac/hvac/api/system_backend/auth.pyi +++ b/stubs/hvac/hvac/api/system_backend/auth.pyi @@ -1,30 +1,21 @@ -from _typeshed import Incomplete - from hvac.api.system_backend.system_backend_mixin import SystemBackendMixin class Auth(SystemBackendMixin): def list_auth_methods(self): ... def enable_auth_method( - self, - method_type, - description: Incomplete | None = None, - config: Incomplete | None = None, - plugin_name: Incomplete | None = None, - local: bool = False, - path: Incomplete | None = None, - **kwargs, + self, method_type, description=None, config=None, plugin_name=None, local: bool = False, path=None, **kwargs ): ... def disable_auth_method(self, path): ... def read_auth_method_tuning(self, path): ... def tune_auth_method( self, path, - default_lease_ttl: Incomplete | None = None, - max_lease_ttl: Incomplete | None = None, - description: Incomplete | None = None, - audit_non_hmac_request_keys: Incomplete | None = None, - audit_non_hmac_response_keys: Incomplete | None = None, - listing_visibility: Incomplete | None = None, - passthrough_request_headers: Incomplete | None = None, + default_lease_ttl=None, + max_lease_ttl=None, + description=None, + audit_non_hmac_request_keys=None, + audit_non_hmac_response_keys=None, + listing_visibility=None, + passthrough_request_headers=None, **kwargs, ): ... diff --git a/stubs/hvac/hvac/api/system_backend/capabilities.pyi b/stubs/hvac/hvac/api/system_backend/capabilities.pyi index 7dab2917eff6..398f822a5870 100644 --- a/stubs/hvac/hvac/api/system_backend/capabilities.pyi +++ b/stubs/hvac/hvac/api/system_backend/capabilities.pyi @@ -1,6 +1,4 @@ -from _typeshed import Incomplete - from hvac.api.system_backend.system_backend_mixin import SystemBackendMixin class Capabilities(SystemBackendMixin): - def get_capabilities(self, paths, token: Incomplete | None = None, accessor: Incomplete | None = None): ... + def get_capabilities(self, paths, token=None, accessor=None): ... diff --git a/stubs/hvac/hvac/api/system_backend/health.pyi b/stubs/hvac/hvac/api/system_backend/health.pyi index 10ff1162079c..9e252742786e 100644 --- a/stubs/hvac/hvac/api/system_backend/health.pyi +++ b/stubs/hvac/hvac/api/system_backend/health.pyi @@ -1,16 +1,14 @@ -from _typeshed import Incomplete - from hvac.api.system_backend.system_backend_mixin import SystemBackendMixin class Health(SystemBackendMixin): def read_health_status( self, - standby_ok: Incomplete | None = None, - active_code: Incomplete | None = None, - standby_code: Incomplete | None = None, - dr_secondary_code: Incomplete | None = None, - performance_standby_code: Incomplete | None = None, - sealed_code: Incomplete | None = None, - uninit_code: Incomplete | None = None, + standby_ok=None, + active_code=None, + standby_code=None, + dr_secondary_code=None, + performance_standby_code=None, + sealed_code=None, + uninit_code=None, method: str = "HEAD", ): ... diff --git a/stubs/hvac/hvac/api/system_backend/init.pyi b/stubs/hvac/hvac/api/system_backend/init.pyi index c936f9a392b1..bd96164557d3 100644 --- a/stubs/hvac/hvac/api/system_backend/init.pyi +++ b/stubs/hvac/hvac/api/system_backend/init.pyi @@ -1,5 +1,3 @@ -from _typeshed import Incomplete - from hvac.api.system_backend.system_backend_mixin import SystemBackendMixin class Init(SystemBackendMixin): @@ -7,12 +5,12 @@ class Init(SystemBackendMixin): def is_initialized(self): ... def initialize( self, - secret_shares: Incomplete | None = None, - secret_threshold: Incomplete | None = None, - pgp_keys: Incomplete | None = None, - root_token_pgp_key: Incomplete | None = None, - stored_shares: Incomplete | None = None, - recovery_shares: Incomplete | None = None, - recovery_threshold: Incomplete | None = None, - recovery_pgp_keys: Incomplete | None = None, + secret_shares=None, + secret_threshold=None, + pgp_keys=None, + root_token_pgp_key=None, + stored_shares=None, + recovery_shares=None, + recovery_threshold=None, + recovery_pgp_keys=None, ): ... diff --git a/stubs/hvac/hvac/api/system_backend/key.pyi b/stubs/hvac/hvac/api/system_backend/key.pyi index 49897874efa7..ada0db2177f4 100644 --- a/stubs/hvac/hvac/api/system_backend/key.pyi +++ b/stubs/hvac/hvac/api/system_backend/key.pyi @@ -1,10 +1,8 @@ -from _typeshed import Incomplete - from hvac.api.system_backend.system_backend_mixin import SystemBackendMixin class Key(SystemBackendMixin): def read_root_generation_progress(self): ... - def start_root_token_generation(self, otp: Incomplete | None = None, pgp_key: Incomplete | None = None): ... + def start_root_token_generation(self, otp=None, pgp_key=None): ... def generate_root(self, key, nonce): ... def cancel_root_generation(self): ... def get_encryption_key_status(self): ... @@ -14,14 +12,14 @@ class Key(SystemBackendMixin): self, secret_shares: int = 5, secret_threshold: int = 3, - pgp_keys: Incomplete | None = None, + pgp_keys=None, backup: bool = False, require_verification: bool = False, recovery_key: bool = False, ): ... def cancel_rekey(self, recovery_key: bool = False): ... - def rekey(self, key, nonce: Incomplete | None = None, recovery_key: bool = False): ... - def rekey_multi(self, keys, nonce: Incomplete | None = None, recovery_key: bool = False): ... + def rekey(self, key, nonce=None, recovery_key: bool = False): ... + def rekey_multi(self, keys, nonce=None, recovery_key: bool = False): ... def read_backup_keys(self, recovery_key: bool = False): ... def cancel_rekey_verify(self): ... def rekey_verify(self, key, nonce): ... diff --git a/stubs/hvac/hvac/api/system_backend/lease.pyi b/stubs/hvac/hvac/api/system_backend/lease.pyi index 817ae2a42998..9fd68b3f5f9d 100644 --- a/stubs/hvac/hvac/api/system_backend/lease.pyi +++ b/stubs/hvac/hvac/api/system_backend/lease.pyi @@ -1,11 +1,9 @@ -from _typeshed import Incomplete - from hvac.api.system_backend.system_backend_mixin import SystemBackendMixin class Lease(SystemBackendMixin): def read_lease(self, lease_id): ... def list_leases(self, prefix): ... - def renew_lease(self, lease_id, increment: Incomplete | None = None): ... + def renew_lease(self, lease_id, increment=None): ... def revoke_lease(self, lease_id): ... def revoke_prefix(self, prefix): ... def revoke_force(self, prefix): ... diff --git a/stubs/hvac/hvac/api/system_backend/mount.pyi b/stubs/hvac/hvac/api/system_backend/mount.pyi index 729b29cf8762..54e83c1af5d7 100644 --- a/stubs/hvac/hvac/api/system_backend/mount.pyi +++ b/stubs/hvac/hvac/api/system_backend/mount.pyi @@ -1,18 +1,16 @@ -from _typeshed import Incomplete - from hvac.api.system_backend.system_backend_mixin import SystemBackendMixin class Mount(SystemBackendMixin): def list_mounted_secrets_engines(self): ... - def retrieve_mount_option(self, mount_point, option_name, default_value: Incomplete | None = None): ... + def retrieve_mount_option(self, mount_point, option_name, default_value=None): ... def enable_secrets_engine( self, backend_type, - path: Incomplete | None = None, - description: Incomplete | None = None, - config: Incomplete | None = None, - plugin_name: Incomplete | None = None, - options: Incomplete | None = None, + path=None, + description=None, + config=None, + plugin_name=None, + options=None, local: bool = False, seal_wrap: bool = False, **kwargs, @@ -22,15 +20,15 @@ class Mount(SystemBackendMixin): def tune_mount_configuration( self, path, - default_lease_ttl: Incomplete | None = None, - max_lease_ttl: Incomplete | None = None, - description: Incomplete | None = None, - audit_non_hmac_request_keys: Incomplete | None = None, - audit_non_hmac_response_keys: Incomplete | None = None, - listing_visibility: Incomplete | None = None, - passthrough_request_headers: Incomplete | None = None, - options: Incomplete | None = None, - force_no_cache: Incomplete | None = None, + default_lease_ttl=None, + max_lease_ttl=None, + description=None, + audit_non_hmac_request_keys=None, + audit_non_hmac_response_keys=None, + listing_visibility=None, + passthrough_request_headers=None, + options=None, + force_no_cache=None, **kwargs, ): ... def move_backend(self, from_path, to_path): ... diff --git a/stubs/hvac/hvac/api/system_backend/quota.pyi b/stubs/hvac/hvac/api/system_backend/quota.pyi index a197212eebb3..15385d68a492 100644 --- a/stubs/hvac/hvac/api/system_backend/quota.pyi +++ b/stubs/hvac/hvac/api/system_backend/quota.pyi @@ -1,19 +1,9 @@ -from _typeshed import Incomplete - from hvac.api.system_backend.system_backend_mixin import SystemBackendMixin class Quota(SystemBackendMixin): def read_quota(self, name): ... def list_quotas(self): ... def create_or_update_quota( - self, - name, - rate, - path: Incomplete | None = None, - interval: Incomplete | None = None, - block_interval: Incomplete | None = None, - role: Incomplete | None = None, - rate_limit_type: Incomplete | None = None, - inheritable: Incomplete | None = None, + self, name, rate, path=None, interval=None, block_interval=None, role=None, rate_limit_type=None, inheritable=None ): ... def delete_quota(self, name): ... diff --git a/stubs/hvac/hvac/api/system_backend/raft.pyi b/stubs/hvac/hvac/api/system_backend/raft.pyi index ebd9051d18c3..94b531a53953 100644 --- a/stubs/hvac/hvac/api/system_backend/raft.pyi +++ b/stubs/hvac/hvac/api/system_backend/raft.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from typing import Any from hvac.api.system_backend.system_backend_mixin import SystemBackendMixin @@ -6,12 +5,7 @@ from requests import Response class Raft(SystemBackendMixin): def join_raft_cluster( - self, - leader_api_addr, - retry: bool = False, - leader_ca_cert: Incomplete | None = None, - leader_client_cert: Incomplete | None = None, - leader_client_key: Incomplete | None = None, + self, leader_api_addr, retry: bool = False, leader_ca_cert=None, leader_client_cert=None, leader_client_key=None ): ... def read_raft_config(self): ... def remove_raft_node(self, server_id): ... diff --git a/stubs/hvac/hvac/api/system_backend/seal.pyi b/stubs/hvac/hvac/api/system_backend/seal.pyi index a0eb8ea7608b..9590f989df20 100644 --- a/stubs/hvac/hvac/api/system_backend/seal.pyi +++ b/stubs/hvac/hvac/api/system_backend/seal.pyi @@ -1,10 +1,8 @@ -from _typeshed import Incomplete - from hvac.api.system_backend.system_backend_mixin import SystemBackendMixin class Seal(SystemBackendMixin): def is_sealed(self): ... def read_seal_status(self): ... def seal(self): ... - def submit_unseal_key(self, key: Incomplete | None = None, reset: bool = False, migrate: bool = False): ... + def submit_unseal_key(self, key=None, reset: bool = False, migrate: bool = False): ... def submit_unseal_keys(self, keys, migrate: bool = False): ... diff --git a/stubs/hvac/hvac/api/system_backend/wrapping.pyi b/stubs/hvac/hvac/api/system_backend/wrapping.pyi index 450f9c841e3d..37a386f4250f 100644 --- a/stubs/hvac/hvac/api/system_backend/wrapping.pyi +++ b/stubs/hvac/hvac/api/system_backend/wrapping.pyi @@ -3,5 +3,5 @@ from _typeshed import Incomplete from hvac.api.system_backend.system_backend_mixin import SystemBackendMixin class Wrapping(SystemBackendMixin): - def unwrap(self, token: Incomplete | None = None): ... + def unwrap(self, token=None): ... def wrap(self, payload: dict[Incomplete, Incomplete] | None = None, ttl: int = 60): ... diff --git a/stubs/jsonschema/jsonschema/exceptions.pyi b/stubs/jsonschema/jsonschema/exceptions.pyi index b92391c306c9..e65c81fad046 100644 --- a/stubs/jsonschema/jsonschema/exceptions.pyi +++ b/stubs/jsonschema/jsonschema/exceptions.pyi @@ -71,7 +71,7 @@ class UnknownType(Exception): class FormatError(Exception): message: Incomplete cause: Incomplete - def __init__(self, message, cause: Incomplete | None = None) -> None: ... + def __init__(self, message, cause=None) -> None: ... class ErrorTree: errors: MutableMapping[str, ValidationError] diff --git a/stubs/jsonschema/jsonschema/validators.pyi b/stubs/jsonschema/jsonschema/validators.pyi index a6f39402fcdc..206455c8bd32 100644 --- a/stubs/jsonschema/jsonschema/validators.pyi +++ b/stubs/jsonschema/jsonschema/validators.pyi @@ -31,11 +31,11 @@ class _Validator: def __init__( self, schema: Schema, - resolver: Incomplete | None = None, + resolver=None, format_checker: FormatChecker | None = None, *, registry: SchemaRegistry = ..., - _resolver: Incomplete | None = None, + _resolver=None, ) -> None: ... @classmethod def check_schema(cls, schema: Schema, format_checker: FormatChecker | Unset = ...) -> None: ... @@ -44,12 +44,7 @@ class _Validator: def evolve(self, **changes) -> _Validator: ... def iter_errors(self, instance, _schema: Schema | None = ...) -> Generator[Incomplete, None, None]: ... def descend( - self, - instance, - schema: Schema, - path: Incomplete | None = ..., - schema_path: Incomplete | None = ..., - resolver: Incomplete | None = None, + self, instance, schema: Schema, path: Incomplete | None = ..., schema_path: Incomplete | None = ..., resolver=None ) -> Generator[Incomplete, None, None]: ... def validate(self, *args, **kwargs) -> None: ... def is_type(self, instance, type): ... @@ -59,19 +54,13 @@ def validates(version: str) -> Callable[..., Incomplete]: ... def create( meta_schema: Schema, validators: Mapping[str, _ValidatorCallback] | tuple[()] = (), - version: Incomplete | None = None, + version=None, type_checker: TypeChecker = ..., format_checker: FormatChecker = ..., id_of: Callable[[Schema], str] = ..., applicable_validators: Callable[[Schema], Iterable[tuple[str, _ValidatorCallback]]] = ..., ) -> type[_Validator]: ... -def extend( - validator, - validators=(), - version: Incomplete | None = None, - type_checker: Incomplete | None = None, - format_checker: Incomplete | None = None, -): ... +def extend(validator, validators=(), version=None, type_checker=None, format_checker=None): ... # At runtime these are fields that are assigned the return values of create() calls. class Draft3Validator(_Validator): ... @@ -95,8 +84,8 @@ class RefResolver: store: SupportsKeysAndGetItem[str, str] | Iterable[tuple[str, str]] = ..., cache_remote: bool = True, handlers: SupportsKeysAndGetItem[str, _Handler] | Iterable[tuple[str, _Handler]] = (), - urljoin_cache: Incomplete | None = None, - remote_cache: Incomplete | None = None, + urljoin_cache=None, + remote_cache=None, ) -> None: ... @classmethod def from_schema(cls, schema: Schema, id_of=..., *args, **kwargs): ... diff --git a/stubs/jwcrypto/jwcrypto/jws.pyi b/stubs/jwcrypto/jwcrypto/jws.pyi index f5d3fab33a28..92c84ab787c7 100644 --- a/stubs/jwcrypto/jwcrypto/jws.pyi +++ b/stubs/jwcrypto/jwcrypto/jws.pyi @@ -41,18 +41,16 @@ class JWS: objects: Incomplete verifylog: list[str] | None header_registry: Incomplete - def __init__(self, payload: Incomplete | None = None, header_registry: Incomplete | None = None) -> None: ... + def __init__(self, payload=None, header_registry=None) -> None: ... @property def allowed_algs(self): ... @allowed_algs.setter def allowed_algs(self, algs) -> None: ... @property def is_valid(self): ... - def verify(self, key, alg: Incomplete | None = None, detached_payload: Incomplete | None = None) -> None: ... - def deserialize(self, raw_jws, key: Incomplete | None = None, alg: Incomplete | None = None) -> None: ... - def add_signature( - self, key, alg: Incomplete | None = None, protected: Incomplete | None = None, header: Incomplete | None = None - ) -> None: ... + def verify(self, key, alg=None, detached_payload=None) -> None: ... + def deserialize(self, raw_jws, key=None, alg=None) -> None: ... + def add_signature(self, key, alg=None, protected=None, header=None) -> None: ... def serialize(self, compact: bool = False) -> str: ... @property def payload(self): ... diff --git a/stubs/jwcrypto/jwcrypto/jwt.pyi b/stubs/jwcrypto/jwcrypto/jwt.pyi index eb3f062e2a85..bb2cce86e2f1 100644 --- a/stubs/jwcrypto/jwcrypto/jwt.pyi +++ b/stubs/jwcrypto/jwcrypto/jwt.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from collections.abc import Mapping from typing import Any, SupportsInt from typing_extensions import deprecated @@ -37,12 +36,12 @@ class JWT: self, header: dict[str, Any] | str | None = None, claims: dict[str, Any] | str | None = None, - jwt: Incomplete | None = None, + jwt=None, key: JWK | JWKSet | None = None, - algs: Incomplete | None = None, - default_claims: Incomplete | None = None, - check_claims: Incomplete | None = None, - expected_type: Incomplete | None = None, + algs=None, + default_claims=None, + check_claims=None, + expected_type=None, ) -> None: ... @property def header(self) -> str: ... @@ -72,7 +71,7 @@ class JWT: def make_signed_token(self, key: JWK) -> None: ... def make_encrypted_token(self, key: JWK) -> None: ... def validate(self, key: JWK | JWKSet) -> None: ... - def deserialize(self, jwt, key: Incomplete | None = None) -> None: ... + def deserialize(self, jwt, key=None) -> None: ... def serialize(self, compact: bool = True) -> str: ... @classmethod def from_jose_token(cls, token): ... diff --git a/stubs/ldap3/ldap3/abstract/attrDef.pyi b/stubs/ldap3/ldap3/abstract/attrDef.pyi index d81a68f65f9a..60332975fc42 100644 --- a/stubs/ldap3/ldap3/abstract/attrDef.pyi +++ b/stubs/ldap3/ldap3/abstract/attrDef.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from typing import Any class AttrDef: @@ -17,16 +16,16 @@ class AttrDef: def __init__( self, name, - key: Incomplete | None = None, - validate: Incomplete | None = None, - pre_query: Incomplete | None = None, - post_query: Incomplete | None = None, + key=None, + validate=None, + pre_query=None, + post_query=None, default=..., - dereference_dn: Incomplete | None = None, - description: Incomplete | None = None, + dereference_dn=None, + description=None, mandatory: bool = False, - single_value: Incomplete | None = None, - alias: Incomplete | None = None, + single_value=None, + alias=None, ) -> None: ... def __eq__(self, other): ... def __lt__(self, other): ... diff --git a/stubs/ldap3/ldap3/abstract/cursor.pyi b/stubs/ldap3/ldap3/abstract/cursor.pyi index 862475b51ab8..73c1eb2db8c1 100644 --- a/stubs/ldap3/ldap3/abstract/cursor.pyi +++ b/stubs/ldap3/ldap3/abstract/cursor.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from typing import Any, NamedTuple class Operation(NamedTuple): @@ -20,9 +19,9 @@ class Cursor: connection, object_def, get_operational_attributes: bool = False, - attributes: Incomplete | None = None, - controls: Incomplete | None = None, - auxiliary_class: Incomplete | None = None, + attributes=None, + controls=None, + auxiliary_class=None, ) -> None: ... def __iter__(self): ... def __getitem__(self, item): ... @@ -56,9 +55,9 @@ class Reader(Cursor): components_in_and: bool = True, sub_tree: bool = True, get_operational_attributes: bool = False, - attributes: Incomplete | None = None, - controls: Incomplete | None = None, - auxiliary_class: Incomplete | None = None, + attributes=None, + controls=None, + auxiliary_class=None, ) -> None: ... @property def query(self): ... @@ -72,36 +71,29 @@ class Reader(Cursor): execution_time: Any entries: Any def reset(self) -> None: ... - def search(self, attributes: Incomplete | None = None): ... - def search_object(self, entry_dn: Incomplete | None = None, attributes: Incomplete | None = None): ... - def search_level(self, attributes: Incomplete | None = None): ... - def search_subtree(self, attributes: Incomplete | None = None): ... - def search_paged( - self, paged_size, paged_criticality: bool = True, generator: bool = True, attributes: Incomplete | None = None - ): ... + def search(self, attributes=None): ... + def search_object(self, entry_dn=None, attributes=None): ... + def search_level(self, attributes=None): ... + def search_subtree(self, attributes=None): ... + def search_paged(self, paged_size, paged_criticality: bool = True, generator: bool = True, attributes=None): ... class Writer(Cursor): entry_class: Any attribute_class: Any entry_initial_status: Any @staticmethod - def from_cursor( - cursor, - connection: Incomplete | None = None, - object_def: Incomplete | None = None, - custom_validator: Incomplete | None = None, - ): ... + def from_cursor(cursor, connection=None, object_def=None, custom_validator=None): ... @staticmethod - def from_response(connection, object_def, response: Incomplete | None = None): ... + def from_response(connection, object_def, response=None): ... dereference_aliases: Any def __init__( self, connection, object_def, get_operational_attributes: bool = False, - attributes: Incomplete | None = None, - controls: Incomplete | None = None, - auxiliary_class: Incomplete | None = None, + attributes=None, + controls=None, + auxiliary_class=None, ) -> None: ... execution_time: Any def commit(self, refresh: bool = True): ... diff --git a/stubs/ldap3/ldap3/abstract/entry.pyi b/stubs/ldap3/ldap3/abstract/entry.pyi index ffc55feeb35b..2c16d59fb8ec 100644 --- a/stubs/ldap3/ldap3/abstract/entry.pyi +++ b/stubs/ldap3/ldap3/abstract/entry.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from typing import Any class EntryState: @@ -50,26 +49,15 @@ class EntryBase: raw: bool = False, indent: int = 4, sort: bool = True, - stream: Incomplete | None = None, + stream=None, checked_attributes: bool = True, include_empty: bool = True, ): ... - def entry_to_ldif( - self, - all_base64: bool = False, - line_separator: Incomplete | None = None, - sort_order: Incomplete | None = None, - stream: Incomplete | None = None, - ): ... + def entry_to_ldif(self, all_base64: bool = False, line_separator=None, sort_order=None, stream=None): ... class Entry(EntryBase): def entry_writable( - self, - object_def: Incomplete | None = None, - writer_cursor: Incomplete | None = None, - attributes: Incomplete | None = None, - custom_validator: Incomplete | None = None, - auxiliary_class: Incomplete | None = None, + self, object_def=None, writer_cursor=None, attributes=None, custom_validator=None, auxiliary_class=None ): ... class WritableEntry(EntryBase): @@ -78,7 +66,7 @@ class WritableEntry(EntryBase): def __getattr__(self, item: str): ... @property def entry_virtual_attributes(self): ... - def entry_commit_changes(self, refresh: bool = True, controls: Incomplete | None = None, clear_history: bool = True): ... + def entry_commit_changes(self, refresh: bool = True, controls=None, clear_history: bool = True): ... def entry_discard_changes(self) -> None: ... def entry_delete(self) -> None: ... def entry_refresh(self, tries: int = 4, seconds: int = 2): ... diff --git a/stubs/ldap3/ldap3/abstract/objectDef.pyi b/stubs/ldap3/ldap3/abstract/objectDef.pyi index 8f3b42799f30..4aaefa21448b 100644 --- a/stubs/ldap3/ldap3/abstract/objectDef.pyi +++ b/stubs/ldap3/ldap3/abstract/objectDef.pyi @@ -1,13 +1,5 @@ -from _typeshed import Incomplete - class ObjectDef: - def __init__( - self, - object_class: Incomplete | None = None, - schema: Incomplete | None = None, - custom_validator: Incomplete | None = None, - auxiliary_class: Incomplete | None = None, - ) -> None: ... + def __init__(self, object_class=None, schema=None, custom_validator=None, auxiliary_class=None) -> None: ... def __getitem__(self, item): ... def __getattr__(self, item: str): ... def __setattr__(self, key: str, value) -> None: ... @@ -18,6 +10,6 @@ class ObjectDef: def __bool__(self) -> bool: ... def __contains__(self, item): ... def add_from_schema(self, attribute_name, mandatory: bool = False) -> None: ... - def add_attribute(self, definition: Incomplete | None = None) -> None: ... + def add_attribute(self, definition=None) -> None: ... def remove_attribute(self, item) -> None: ... def clear_attributes(self) -> None: ... diff --git a/stubs/ldap3/ldap3/core/connection.pyi b/stubs/ldap3/ldap3/core/connection.pyi index b961f60cd632..47df86aa6ad8 100644 --- a/stubs/ldap3/ldap3/core/connection.pyi +++ b/stubs/ldap3/ldap3/core/connection.pyi @@ -88,7 +88,7 @@ class Connection: auto_referrals: bool = True, auto_range: bool = True, sasl_mechanism: str | None = None, - sasl_credentials: Incomplete | None = None, + sasl_credentials=None, check_names: bool = True, collect_usage: bool = False, read_only: bool = False, @@ -97,17 +97,17 @@ class Connection: pool_name: str | None = None, pool_size: int | None = None, pool_lifetime: int | None = None, - cred_store: Incomplete | None = None, + cred_store=None, fast_decoder: bool = True, - receive_timeout: Incomplete | None = None, + receive_timeout=None, return_empty_attributes: bool = True, use_referral_cache: bool = False, auto_escape: bool = True, auto_encode: bool = True, - pool_keepalive: Incomplete | None = None, + pool_keepalive=None, source_address: str | None = None, source_port: int | None = None, - source_port_list: Incomplete | None = None, + source_port_list=None, ) -> None: ... def repr_with_sensitive_data_stripped(self): ... @property @@ -120,76 +120,58 @@ class Connection: def __exit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None ) -> Literal[False] | None: ... - def bind(self, read_server_info: bool = True, controls: Incomplete | None = None): ... + def bind(self, read_server_info: bool = True, controls=None): ... def rebind( self, - user: Incomplete | None = None, - password: Incomplete | None = None, - authentication: Incomplete | None = None, - sasl_mechanism: Incomplete | None = None, - sasl_credentials: Incomplete | None = None, + user=None, + password=None, + authentication=None, + sasl_mechanism=None, + sasl_credentials=None, read_server_info: bool = True, - controls: Incomplete | None = None, + controls=None, ): ... - def unbind(self, controls: Incomplete | None = None): ... + def unbind(self, controls=None): ... def search( self, search_base: str, search_filter: str, search_scope: Literal["BASE", "LEVEL", "SUBTREE"] = "SUBTREE", dereference_aliases: Literal["NEVER", "SEARCH", "FINDING_BASE", "ALWAYS"] = "ALWAYS", - attributes: Incomplete | None = None, + attributes=None, size_limit: int = 0, time_limit: int = 0, types_only: bool = False, get_operational_attributes: bool = False, - controls: Incomplete | None = None, + controls=None, paged_size: int | None = None, paged_criticality: bool = False, paged_cookie: str | bytes | None = None, auto_escape: bool | None = None, ): ... - def compare(self, dn, attribute, value, controls: Incomplete | None = None): ... - def add( - self, dn, object_class: Incomplete | None = None, attributes: Incomplete | None = None, controls: Incomplete | None = None - ): ... - def delete(self, dn, controls: Incomplete | None = None): ... - def modify(self, dn, changes, controls: Incomplete | None = None): ... - def modify_dn( - self, - dn, - relative_dn, - delete_old_dn: bool = True, - new_superior: Incomplete | None = None, - controls: Incomplete | None = None, - ): ... - def abandon(self, message_id, controls: Incomplete | None = None): ... + def compare(self, dn, attribute, value, controls=None): ... + def add(self, dn, object_class=None, attributes=None, controls=None): ... + def delete(self, dn, controls=None): ... + def modify(self, dn, changes, controls=None): ... + def modify_dn(self, dn, relative_dn, delete_old_dn: bool = True, new_superior=None, controls=None): ... + def abandon(self, message_id, controls=None): ... def extended( - self, - request_name, - request_value: Asn1Item | ReadableBuffer | None = None, - controls: Incomplete | None = None, - no_encode: bool | None = None, + self, request_name, request_value: Asn1Item | ReadableBuffer | None = None, controls=None, no_encode: bool | None = None ): ... def start_tls(self, read_server_info: bool = True): ... def do_sasl_bind(self, controls): ... def do_ntlm_bind(self, controls): ... def refresh_server_info(self) -> None: ... def response_to_ldif( - self, - search_result: Incomplete | None = None, - all_base64: bool = False, - line_separator: Incomplete | None = None, - sort_order: Incomplete | None = None, - stream: Incomplete | None = None, + self, search_result=None, all_base64: bool = False, line_separator=None, sort_order=None, stream=None ): ... def response_to_json( self, raw: bool = False, - search_result: Incomplete | None = None, + search_result=None, indent: int = 4, sort: bool = True, - stream: Incomplete | None = None, + stream=None, checked_attributes: bool = True, include_empty: bool = True, ): ... diff --git a/stubs/ldap3/ldap3/core/exceptions.pyi b/stubs/ldap3/ldap3/core/exceptions.pyi index 4fe7cfc3014e..518ad6b18378 100644 --- a/stubs/ldap3/ldap3/core/exceptions.pyi +++ b/stubs/ldap3/ldap3/core/exceptions.pyi @@ -1,35 +1,18 @@ import socket -from _typeshed import Incomplete from typing import Any from typing_extensions import Self class LDAPException(Exception): ... class LDAPOperationResult(LDAPException): - def __new__( - cls, - result: Incomplete | None = None, - description: Incomplete | None = None, - dn: Incomplete | None = None, - message: Incomplete | None = None, - response_type: Incomplete | None = None, - response: Incomplete | None = None, - ) -> Self: ... + def __new__(cls, result=None, description=None, dn=None, message=None, response_type=None, response=None) -> Self: ... result: Any description: Any dn: Any message: Any type: Any response: Any - def __init__( - self, - result: Incomplete | None = None, - description: Incomplete | None = None, - dn: Incomplete | None = None, - message: Incomplete | None = None, - response_type: Incomplete | None = None, - response: Incomplete | None = None, - ) -> None: ... + def __init__(self, result=None, description=None, dn=None, message=None, response_type=None, response=None) -> None: ... class LDAPOperationsErrorResult(LDAPOperationResult): ... class LDAPProtocolErrorResult(LDAPOperationResult): ... diff --git a/stubs/ldap3/ldap3/core/pooling.pyi b/stubs/ldap3/ldap3/core/pooling.pyi index d178b0e7c3db..c35ddcad8006 100644 --- a/stubs/ldap3/ldap3/core/pooling.pyi +++ b/stubs/ldap3/ldap3/core/pooling.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from typing import Any POOLING_STRATEGIES: Any @@ -31,12 +30,7 @@ class ServerPool: single: Any strategy: Any def __init__( - self, - servers: Incomplete | None = None, - pool_strategy="ROUND_ROBIN", - active: bool = True, - exhaust: bool = False, - single_state: bool = True, + self, servers=None, pool_strategy="ROUND_ROBIN", active: bool = True, exhaust: bool = False, single_state: bool = True ) -> None: ... def __len__(self) -> int: ... def __getitem__(self, item): ... diff --git a/stubs/ldap3/ldap3/core/server.pyi b/stubs/ldap3/ldap3/core/server.pyi index c56c6a4ff05d..d86112148ab3 100644 --- a/stubs/ldap3/ldap3/core/server.pyi +++ b/stubs/ldap3/ldap3/core/server.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from typing import Any, Literal unix_socket_available: bool @@ -23,43 +22,30 @@ class Server: host: str, port: int | None = None, use_ssl: bool = False, - allowed_referral_hosts: Incomplete | None = None, + allowed_referral_hosts=None, get_info: Literal["NO_INFO", "DSA", "SCHEMA", "ALL"] = "SCHEMA", - tls: Incomplete | None = None, - formatter: Incomplete | None = None, - connect_timeout: Incomplete | None = None, + tls=None, + formatter=None, + connect_timeout=None, mode: Literal["IP_SYSTEM_DEFAULT", "IP_V4_ONLY", "IP_V6_ONLY", "IP_V4_PREFERRED", "IP_V6_PREFERRED"] = "IP_V6_PREFERRED", - validator: Incomplete | None = None, + validator=None, ) -> None: ... @property def address_info(self): ... def update_availability(self, address, available) -> None: ... def reset_availability(self) -> None: ... - def check_availability( - self, - source_address: Incomplete | None = None, - source_port: Incomplete | None = None, - source_port_list: Incomplete | None = None, - ): ... + def check_availability(self, source_address=None, source_port=None, source_port_list=None): ... @staticmethod def next_message_id(): ... def get_info_from_server(self, connection) -> None: ... - def attach_dsa_info(self, dsa_info: Incomplete | None = None) -> None: ... - def attach_schema_info(self, dsa_schema: Incomplete | None = None) -> None: ... + def attach_dsa_info(self, dsa_info=None) -> None: ... + def attach_schema_info(self, dsa_schema=None) -> None: ... @property def info(self): ... @property def schema(self): ... @staticmethod - def from_definition( - host, - dsa_info, - dsa_schema, - port: Incomplete | None = None, - use_ssl: bool = False, - formatter: Incomplete | None = None, - validator: Incomplete | None = None, - ): ... + def from_definition(host, dsa_info, dsa_schema, port=None, use_ssl: bool = False, formatter=None, validator=None): ... def candidate_addresses(self): ... def has_control(self, control): ... def has_extension(self, extension): ... diff --git a/stubs/ldap3/ldap3/core/tls.pyi b/stubs/ldap3/ldap3/core/tls.pyi index 11184b4fcfbc..87d423da05d5 100644 --- a/stubs/ldap3/ldap3/core/tls.pyi +++ b/stubs/ldap3/ldap3/core/tls.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from typing import Any use_ssl_context: bool @@ -18,18 +17,18 @@ class Tls: sni: Any def __init__( self, - local_private_key_file: Incomplete | None = None, - local_certificate_file: Incomplete | None = None, + local_private_key_file=None, + local_certificate_file=None, validate=..., - version: Incomplete | None = None, - ssl_options: Incomplete | None = None, - ca_certs_file: Incomplete | None = None, - valid_names: Incomplete | None = None, - ca_certs_path: Incomplete | None = None, - ca_certs_data: Incomplete | None = None, - local_private_key_password: Incomplete | None = None, - ciphers: Incomplete | None = None, - sni: Incomplete | None = None, + version=None, + ssl_options=None, + ca_certs_file=None, + valid_names=None, + ca_certs_path=None, + ca_certs_data=None, + local_private_key_password=None, + ciphers=None, + sni=None, ) -> None: ... def wrap_socket(self, connection, do_handshake: bool = False) -> None: ... def start_tls(self, connection): ... diff --git a/stubs/ldap3/ldap3/extend/__init__.pyi b/stubs/ldap3/ldap3/extend/__init__.pyi index 72ae2d21aecc..90d3746d5ac5 100644 --- a/stubs/ldap3/ldap3/extend/__init__.pyi +++ b/stubs/ldap3/ldap3/extend/__init__.pyi @@ -1,32 +1,23 @@ -from _typeshed import Incomplete from typing import Any class ExtendedOperationContainer: def __init__(self, connection) -> None: ... class StandardExtendedOperations(ExtendedOperationContainer): - def who_am_i(self, controls: Incomplete | None = None): ... - def modify_password( - self, - user: Incomplete | None = None, - old_password: Incomplete | None = None, - new_password: Incomplete | None = None, - hash_algorithm: Incomplete | None = None, - salt: Incomplete | None = None, - controls: Incomplete | None = None, - ): ... + def who_am_i(self, controls=None): ... + def modify_password(self, user=None, old_password=None, new_password=None, hash_algorithm=None, salt=None, controls=None): ... def paged_search( self, search_base, search_filter, search_scope="SUBTREE", dereference_aliases="ALWAYS", - attributes: Incomplete | None = None, + attributes=None, size_limit: int = 0, time_limit: int = 0, types_only: bool = False, get_operational_attributes: bool = False, - controls: Incomplete | None = None, + controls=None, paged_size: int = 100, paged_criticality: bool = False, generator: bool = True, @@ -40,7 +31,7 @@ class StandardExtendedOperations(ExtendedOperationContainer): attributes="*", size_limit: int = 0, time_limit: int = 0, - controls: Incomplete | None = None, + controls=None, changes_only: bool = True, show_additions: bool = True, show_deletions: bool = True, @@ -48,7 +39,7 @@ class StandardExtendedOperations(ExtendedOperationContainer): show_dn_modifications: bool = True, notifications: bool = True, streaming: bool = True, - callback: Incomplete | None = None, + callback=None, ): ... def funnel_search( self, @@ -59,20 +50,20 @@ class StandardExtendedOperations(ExtendedOperationContainer): attributes="*", size_limit: int = 0, time_limit: int = 0, - controls: Incomplete | None = None, + controls=None, streaming: bool = False, - callback: Incomplete | None = None, + callback=None, ): ... class NovellExtendedOperations(ExtendedOperationContainer): - def get_bind_dn(self, controls: Incomplete | None = None): ... - def get_universal_password(self, user, controls: Incomplete | None = None): ... - def set_universal_password(self, user, new_password: Incomplete | None = None, controls: Incomplete | None = None): ... - def list_replicas(self, server_dn, controls: Incomplete | None = None): ... - def partition_entry_count(self, partition_dn, controls: Incomplete | None = None): ... - def replica_info(self, server_dn, partition_dn, controls: Incomplete | None = None): ... - def start_transaction(self, controls: Incomplete | None = None): ... - def end_transaction(self, commit: bool = True, controls: Incomplete | None = None): ... + def get_bind_dn(self, controls=None): ... + def get_universal_password(self, user, controls=None): ... + def set_universal_password(self, user, new_password=None, controls=None): ... + def list_replicas(self, server_dn, controls=None): ... + def partition_entry_count(self, partition_dn, controls=None): ... + def replica_info(self, server_dn, partition_dn, controls=None): ... + def start_transaction(self, controls=None): ... + def end_transaction(self, commit: bool = True, controls=None): ... def add_members_to_groups(self, members, groups, fix: bool = True, transaction: bool = True): ... def remove_members_from_groups(self, members, groups, fix: bool = True, transaction: bool = True): ... def check_groups_memberships(self, members, groups, fix: bool = False, transaction: bool = True): ... @@ -83,7 +74,7 @@ class MicrosoftExtendedOperations(ExtendedOperationContainer): sync_base, sync_filter: str = "(objectclass=*)", attributes="*", - cookie: Incomplete | None = None, + cookie=None, object_security: bool = False, ancestors_first: bool = True, public_data_only: bool = False, @@ -91,17 +82,12 @@ class MicrosoftExtendedOperations(ExtendedOperationContainer): max_length: int = 2147483647, hex_guid: bool = False, ): ... - def modify_password(self, user, new_password, old_password: Incomplete | None = None, controls: Incomplete | None = None): ... + def modify_password(self, user, new_password, old_password=None, controls=None): ... def unlock_account(self, user): ... def add_members_to_groups(self, members, groups, fix: bool = True): ... def remove_members_from_groups(self, members, groups, fix: bool = True): ... def persistent_search( - self, - search_base: str = "", - search_scope="SUBTREE", - attributes="*", - streaming: bool = True, - callback: Incomplete | None = None, + self, search_base: str = "", search_scope="SUBTREE", attributes="*", streaming: bool = True, callback=None ): ... class ExtendedOperationsRoot(ExtendedOperationContainer): diff --git a/stubs/ldap3/ldap3/extend/microsoft/modifyPassword.pyi b/stubs/ldap3/ldap3/extend/microsoft/modifyPassword.pyi index 598d74963aa1..6ee9b965f9d2 100644 --- a/stubs/ldap3/ldap3/extend/microsoft/modifyPassword.pyi +++ b/stubs/ldap3/ldap3/extend/microsoft/modifyPassword.pyi @@ -1,3 +1 @@ -from _typeshed import Incomplete - -def ad_modify_password(connection, user_dn, new_password, old_password, controls: Incomplete | None = None): ... +def ad_modify_password(connection, user_dn, new_password, old_password, controls=None): ... diff --git a/stubs/ldap3/ldap3/extend/microsoft/persistentSearch.pyi b/stubs/ldap3/ldap3/extend/microsoft/persistentSearch.pyi index 64164d437780..2e8818c7e1cb 100644 --- a/stubs/ldap3/ldap3/extend/microsoft/persistentSearch.pyi +++ b/stubs/ldap3/ldap3/extend/microsoft/persistentSearch.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from typing import Any class ADPersistentSearch: @@ -12,5 +11,5 @@ class ADPersistentSearch: def __init__(self, connection, search_base, search_scope, attributes, streaming, callback) -> None: ... def start(self) -> None: ... def stop(self, unbind: bool = True) -> None: ... - def next(self, block: bool = False, timeout: Incomplete | None = None): ... - def funnel(self, block: bool = False, timeout: Incomplete | None = None) -> None: ... + def next(self, block: bool = False, timeout=None): ... + def funnel(self, block: bool = False, timeout=None) -> None: ... diff --git a/stubs/ldap3/ldap3/extend/microsoft/unlockAccount.pyi b/stubs/ldap3/ldap3/extend/microsoft/unlockAccount.pyi index b43e43e79c1e..dd96df4b40a5 100644 --- a/stubs/ldap3/ldap3/extend/microsoft/unlockAccount.pyi +++ b/stubs/ldap3/ldap3/extend/microsoft/unlockAccount.pyi @@ -1,3 +1 @@ -from _typeshed import Incomplete - -def ad_unlock_account(connection, user_dn, controls: Incomplete | None = None): ... +def ad_unlock_account(connection, user_dn, controls=None): ... diff --git a/stubs/ldap3/ldap3/extend/novell/endTransaction.pyi b/stubs/ldap3/ldap3/extend/novell/endTransaction.pyi index d54945837bfc..d771f87e9fc1 100644 --- a/stubs/ldap3/ldap3/extend/novell/endTransaction.pyi +++ b/stubs/ldap3/ldap3/extend/novell/endTransaction.pyi @@ -9,7 +9,7 @@ class EndTransaction(ExtendedOperation): request_value: EndGroupTypeRequestValue asn1_spec: EndGroupTypeResponseValue def config(self) -> None: ... - def __init__(self, connection, commit: bool = True, controls: Incomplete | None = None) -> None: ... + def __init__(self, connection, commit: bool = True, controls=None) -> None: ... def populate_result(self) -> None: ... response_value: Incomplete def set_response(self) -> None: ... diff --git a/stubs/ldap3/ldap3/extend/novell/listReplicas.pyi b/stubs/ldap3/ldap3/extend/novell/listReplicas.pyi index ce21c7f5cdd4..414761713115 100644 --- a/stubs/ldap3/ldap3/extend/novell/listReplicas.pyi +++ b/stubs/ldap3/ldap3/extend/novell/listReplicas.pyi @@ -1,5 +1,3 @@ -from _typeshed import Incomplete - from ...extend.operation import ExtendedOperation from ...protocol.novell import ReplicaList from ...protocol.rfc4511 import LDAPDN @@ -11,5 +9,5 @@ class ListReplicas(ExtendedOperation): asn1_spec: ReplicaList response_attribute: str def config(self) -> None: ... - def __init__(self, connection, server_dn, controls: Incomplete | None = None) -> None: ... + def __init__(self, connection, server_dn, controls=None) -> None: ... def populate_result(self) -> None: ... diff --git a/stubs/ldap3/ldap3/extend/novell/nmasGetUniversalPassword.pyi b/stubs/ldap3/ldap3/extend/novell/nmasGetUniversalPassword.pyi index 1209787536db..f18b0ab3ac64 100644 --- a/stubs/ldap3/ldap3/extend/novell/nmasGetUniversalPassword.pyi +++ b/stubs/ldap3/ldap3/extend/novell/nmasGetUniversalPassword.pyi @@ -1,5 +1,3 @@ -from _typeshed import Incomplete - from ...extend.operation import ExtendedOperation from ...protocol.novell import NmasGetUniversalPasswordRequestValue, NmasGetUniversalPasswordResponseValue @@ -10,5 +8,5 @@ class NmasGetUniversalPassword(ExtendedOperation): asn1_spec: NmasGetUniversalPasswordResponseValue response_attribute: str def config(self) -> None: ... - def __init__(self, connection, user, controls: Incomplete | None = None) -> None: ... + def __init__(self, connection, user, controls=None) -> None: ... def populate_result(self) -> None: ... diff --git a/stubs/ldap3/ldap3/extend/novell/nmasSetUniversalPassword.pyi b/stubs/ldap3/ldap3/extend/novell/nmasSetUniversalPassword.pyi index f62ae3c0b05c..762b20e7e2b5 100644 --- a/stubs/ldap3/ldap3/extend/novell/nmasSetUniversalPassword.pyi +++ b/stubs/ldap3/ldap3/extend/novell/nmasSetUniversalPassword.pyi @@ -1,5 +1,3 @@ -from _typeshed import Incomplete - from ...extend.operation import ExtendedOperation from ...protocol.novell import NmasSetUniversalPasswordRequestValue, NmasSetUniversalPasswordResponseValue @@ -10,5 +8,5 @@ class NmasSetUniversalPassword(ExtendedOperation): asn1_spec: NmasSetUniversalPasswordResponseValue response_attribute: str def config(self) -> None: ... - def __init__(self, connection, user, new_password, controls: Incomplete | None = None) -> None: ... + def __init__(self, connection, user, new_password, controls=None) -> None: ... def populate_result(self) -> None: ... diff --git a/stubs/ldap3/ldap3/extend/novell/partition_entry_count.pyi b/stubs/ldap3/ldap3/extend/novell/partition_entry_count.pyi index e9ac9535238d..3231221f71ab 100644 --- a/stubs/ldap3/ldap3/extend/novell/partition_entry_count.pyi +++ b/stubs/ldap3/ldap3/extend/novell/partition_entry_count.pyi @@ -1,5 +1,3 @@ -from _typeshed import Incomplete - from ...protocol.rfc4511 import LDAPDN from ..operation import ExtendedOperation @@ -9,5 +7,5 @@ class PartitionEntryCount(ExtendedOperation): request_value: LDAPDN response_attribute: str def config(self) -> None: ... - def __init__(self, connection, partition_dn, controls: Incomplete | None = None) -> None: ... + def __init__(self, connection, partition_dn, controls=None) -> None: ... def populate_result(self) -> None: ... diff --git a/stubs/ldap3/ldap3/extend/novell/replicaInfo.pyi b/stubs/ldap3/ldap3/extend/novell/replicaInfo.pyi index e690c60a9d59..5bc47e022a2a 100644 --- a/stubs/ldap3/ldap3/extend/novell/replicaInfo.pyi +++ b/stubs/ldap3/ldap3/extend/novell/replicaInfo.pyi @@ -1,5 +1,3 @@ -from _typeshed import Incomplete - from ...protocol.novell import ReplicaInfoRequestValue from ..operation import ExtendedOperation @@ -9,5 +7,5 @@ class ReplicaInfo(ExtendedOperation): request_value: ReplicaInfoRequestValue response_attribute: str def config(self) -> None: ... - def __init__(self, connection, server_dn, partition_dn, controls: Incomplete | None = None) -> None: ... + def __init__(self, connection, server_dn, partition_dn, controls=None) -> None: ... def populate_result(self) -> None: ... diff --git a/stubs/ldap3/ldap3/extend/novell/startTransaction.pyi b/stubs/ldap3/ldap3/extend/novell/startTransaction.pyi index 3fa7312a2cbc..f5225664e7c3 100644 --- a/stubs/ldap3/ldap3/extend/novell/startTransaction.pyi +++ b/stubs/ldap3/ldap3/extend/novell/startTransaction.pyi @@ -9,7 +9,7 @@ class StartTransaction(ExtendedOperation): request_value: CreateGroupTypeRequestValue asn1_spec: CreateGroupTypeResponseValue def config(self) -> None: ... - def __init__(self, connection, controls: Incomplete | None = None) -> None: ... + def __init__(self, connection, controls=None) -> None: ... def populate_result(self) -> None: ... response_value: Incomplete def set_response(self) -> None: ... diff --git a/stubs/ldap3/ldap3/extend/operation.pyi b/stubs/ldap3/ldap3/extend/operation.pyi index 878cdfa36b1e..614f5c9842c7 100644 --- a/stubs/ldap3/ldap3/extend/operation.pyi +++ b/stubs/ldap3/ldap3/extend/operation.pyi @@ -13,9 +13,9 @@ class ExtendedOperation: response_value: Incomplete | None response_attribute: Incomplete | None controls: Incomplete - def __init__(self, connection, controls: Incomplete | None = None) -> None: ... + def __init__(self, connection, controls=None) -> None: ... def send(self): ... def populate_result(self) -> None: ... - def decode_response(self, response: Incomplete | None = None) -> None: ... + def decode_response(self, response=None) -> None: ... def set_response(self) -> None: ... def config(self) -> None: ... diff --git a/stubs/ldap3/ldap3/extend/standard/PagedSearch.pyi b/stubs/ldap3/ldap3/extend/standard/PagedSearch.pyi index 22d0a22f403c..d651eff6afda 100644 --- a/stubs/ldap3/ldap3/extend/standard/PagedSearch.pyi +++ b/stubs/ldap3/ldap3/extend/standard/PagedSearch.pyi @@ -1,17 +1,15 @@ -from _typeshed import Incomplete - def paged_search_generator( connection, search_base, search_filter, search_scope="SUBTREE", dereference_aliases="ALWAYS", - attributes: Incomplete | None = None, + attributes=None, size_limit: int = 0, time_limit: int = 0, types_only: bool = False, get_operational_attributes: bool = False, - controls: Incomplete | None = None, + controls=None, paged_size: int = 100, paged_criticality: bool = False, ) -> None: ... @@ -21,12 +19,12 @@ def paged_search_accumulator( search_filter, search_scope="SUBTREE", dereference_aliases="ALWAYS", - attributes: Incomplete | None = None, + attributes=None, size_limit: int = 0, time_limit: int = 0, types_only: bool = False, get_operational_attributes: bool = False, - controls: Incomplete | None = None, + controls=None, paged_size: int = 100, paged_criticality: bool = False, ): ... diff --git a/stubs/ldap3/ldap3/extend/standard/PersistentSearch.pyi b/stubs/ldap3/ldap3/extend/standard/PersistentSearch.pyi index 0b85f988dfd7..4ed285367246 100644 --- a/stubs/ldap3/ldap3/extend/standard/PersistentSearch.pyi +++ b/stubs/ldap3/ldap3/extend/standard/PersistentSearch.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from typing import Any class PersistentSearch: @@ -33,5 +32,5 @@ class PersistentSearch: ) -> None: ... def start(self) -> None: ... def stop(self, unbind: bool = True) -> None: ... - def next(self, block: bool = False, timeout: Incomplete | None = None): ... - def funnel(self, block: bool = False, timeout: Incomplete | None = None) -> None: ... + def next(self, block: bool = False, timeout=None): ... + def funnel(self, block: bool = False, timeout=None) -> None: ... diff --git a/stubs/ldap3/ldap3/extend/standard/modifyPassword.pyi b/stubs/ldap3/ldap3/extend/standard/modifyPassword.pyi index b609123ab966..1cd8156be3e8 100644 --- a/stubs/ldap3/ldap3/extend/standard/modifyPassword.pyi +++ b/stubs/ldap3/ldap3/extend/standard/modifyPassword.pyi @@ -1,5 +1,3 @@ -from _typeshed import Incomplete - from ...extend.operation import ExtendedOperation from ...protocol.rfc3062 import PasswdModifyRequestValue, PasswdModifyResponseValue @@ -10,13 +8,6 @@ class ModifyPassword(ExtendedOperation): response_attribute: str def config(self) -> None: ... def __init__( - self, - connection, - user: Incomplete | None = None, - old_password: Incomplete | None = None, - new_password: Incomplete | None = None, - hash_algorithm: Incomplete | None = None, - salt: Incomplete | None = None, - controls: Incomplete | None = None, + self, connection, user=None, old_password=None, new_password=None, hash_algorithm=None, salt=None, controls=None ) -> None: ... def populate_result(self) -> None: ... diff --git a/stubs/ldap3/ldap3/operation/add.pyi b/stubs/ldap3/ldap3/operation/add.pyi index 2d10f9e12b4a..777b0e3c2f5f 100644 --- a/stubs/ldap3/ldap3/operation/add.pyi +++ b/stubs/ldap3/ldap3/operation/add.pyi @@ -1,7 +1,3 @@ -from _typeshed import Incomplete - -def add_operation( - dn, attributes, auto_encode, schema: Incomplete | None = None, validator: Incomplete | None = None, check_names: bool = False -): ... +def add_operation(dn, attributes, auto_encode, schema=None, validator=None, check_names: bool = False): ... def add_request_to_dict(request): ... def add_response_to_dict(response): ... diff --git a/stubs/ldap3/ldap3/operation/bind.pyi b/stubs/ldap3/ldap3/operation/bind.pyi index 3c2473d9eb51..9d8f99c13ecd 100644 --- a/stubs/ldap3/ldap3/operation/bind.pyi +++ b/stubs/ldap3/ldap3/operation/bind.pyi @@ -1,21 +1,9 @@ -from _typeshed import Incomplete - def bind_operation( - version, - authentication, - name: str = "", - password: Incomplete | None = None, - sasl_mechanism: Incomplete | None = None, - sasl_credentials: Incomplete | None = None, - auto_encode: bool = False, + version, authentication, name: str = "", password=None, sasl_mechanism=None, sasl_credentials=None, auto_encode: bool = False ): ... def bind_request_to_dict(request): ... def bind_response_operation( - result_code, - matched_dn: str = "", - diagnostic_message: str = "", - referral: Incomplete | None = None, - server_sasl_credentials: Incomplete | None = None, + result_code, matched_dn: str = "", diagnostic_message: str = "", referral=None, server_sasl_credentials=None ): ... def bind_response_to_dict(response): ... def sicily_bind_response_to_dict(response): ... diff --git a/stubs/ldap3/ldap3/operation/compare.pyi b/stubs/ldap3/ldap3/operation/compare.pyi index 88d507c4c31b..911781143cc4 100644 --- a/stubs/ldap3/ldap3/operation/compare.pyi +++ b/stubs/ldap3/ldap3/operation/compare.pyi @@ -1,13 +1,3 @@ -from _typeshed import Incomplete - -def compare_operation( - dn, - attribute, - value, - auto_encode, - schema: Incomplete | None = None, - validator: Incomplete | None = None, - check_names: bool = False, -): ... +def compare_operation(dn, attribute, value, auto_encode, schema=None, validator=None, check_names: bool = False): ... def compare_request_to_dict(request): ... def compare_response_to_dict(response): ... diff --git a/stubs/ldap3/ldap3/operation/modify.pyi b/stubs/ldap3/ldap3/operation/modify.pyi index 971f323f2264..53ff3564333c 100644 --- a/stubs/ldap3/ldap3/operation/modify.pyi +++ b/stubs/ldap3/ldap3/operation/modify.pyi @@ -1,10 +1,7 @@ -from _typeshed import Incomplete from typing import Any change_table: Any -def modify_operation( - dn, changes, auto_encode, schema: Incomplete | None = None, validator: Incomplete | None = None, check_names: bool = False -): ... +def modify_operation(dn, changes, auto_encode, schema=None, validator=None, check_names: bool = False): ... def modify_request_to_dict(request): ... def modify_response_to_dict(response): ... diff --git a/stubs/ldap3/ldap3/operation/modifyDn.pyi b/stubs/ldap3/ldap3/operation/modifyDn.pyi index dcd409b7edde..b69d03458aae 100644 --- a/stubs/ldap3/ldap3/operation/modifyDn.pyi +++ b/stubs/ldap3/ldap3/operation/modifyDn.pyi @@ -1,5 +1,3 @@ -from _typeshed import Incomplete - -def modify_dn_operation(dn, new_relative_dn, delete_old_rdn: bool = True, new_superior: Incomplete | None = None): ... +def modify_dn_operation(dn, new_relative_dn, delete_old_rdn: bool = True, new_superior=None): ... def modify_dn_request_to_dict(request): ... def modify_dn_response_to_dict(response): ... diff --git a/stubs/ldap3/ldap3/operation/search.pyi b/stubs/ldap3/ldap3/operation/search.pyi index 19df68da5c1e..24577fea092c 100644 --- a/stubs/ldap3/ldap3/operation/search.pyi +++ b/stubs/ldap3/ldap3/operation/search.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from typing import Any ROOT: int @@ -22,7 +21,7 @@ class FilterNode: parent: Any assertion: Any elements: Any - def __init__(self, tag: Incomplete | None = None, assertion: Incomplete | None = None) -> None: ... + def __init__(self, tag=None, assertion=None) -> None: ... def __str__(self, pos: int = 0) -> str: ... def __repr__(self, pos: int = 0) -> str: ... def append(self, filter_node): ... @@ -42,8 +41,8 @@ def search_operation( types_only, auto_escape, auto_encode, - schema: Incomplete | None = None, - validator: Incomplete | None = None, + schema=None, + validator=None, check_names: bool = False, ): ... def decode_vals(vals): ... @@ -54,10 +53,8 @@ def decode_raw_vals(vals): ... def decode_raw_vals_fast(vals): ... def raw_attributes_to_dict(attribute_list): ... def raw_attributes_to_dict_fast(attribute_list): ... -def checked_attributes_to_dict(attribute_list, schema: Incomplete | None = None, custom_formatter: Incomplete | None = None): ... -def checked_attributes_to_dict_fast( - attribute_list, schema: Incomplete | None = None, custom_formatter: Incomplete | None = None -): ... +def checked_attributes_to_dict(attribute_list, schema=None, custom_formatter=None): ... +def checked_attributes_to_dict_fast(attribute_list, schema=None, custom_formatter=None): ... def matching_rule_assertion_to_string(matching_rule_assertion): ... def filter_to_string(filter_object): ... def search_request_to_dict(request): ... diff --git a/stubs/ldap3/ldap3/protocol/convert.pyi b/stubs/ldap3/ldap3/protocol/convert.pyi index e85da5f58302..10b06b4f6165 100644 --- a/stubs/ldap3/ldap3/protocol/convert.pyi +++ b/stubs/ldap3/ldap3/protocol/convert.pyi @@ -1,5 +1,3 @@ -from _typeshed import Incomplete - def to_str_or_normalized_unicode(val): ... def attribute_to_dict(attribute): ... def attributes_to_dict(attributes): ... @@ -17,8 +15,6 @@ def substring_to_dict(substring): ... def prepare_changes_for_request(changes): ... def build_controls_list(controls): ... def validate_assertion_value(schema, name, value, auto_escape, auto_encode, validator, check_names): ... -def validate_attribute_value( - schema, name, value, auto_encode, validator: Incomplete | None = None, check_names: bool = False -): ... +def validate_attribute_value(schema, name, value, auto_encode, validator=None, check_names: bool = False): ... def prepare_filter_for_sending(raw_string): ... def prepare_for_sending(raw_string): ... diff --git a/stubs/ldap3/ldap3/protocol/rfc2696.pyi b/stubs/ldap3/ldap3/protocol/rfc2696.pyi index 7e72093e8b51..4df6aa5aed3e 100644 --- a/stubs/ldap3/ldap3/protocol/rfc2696.pyi +++ b/stubs/ldap3/ldap3/protocol/rfc2696.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from typing import Final from pyasn1.type.constraint import ConstraintsIntersection, ValueRangeConstraint @@ -17,4 +16,4 @@ class Cookie(OctetString): ... class RealSearchControlValue(Sequence): componentType: NamedTypes -def paged_search_control(criticality: bool = False, size: int = 10, cookie: Incomplete | None = None): ... +def paged_search_control(criticality: bool = False, size: int = 10, cookie=None): ... diff --git a/stubs/ldap3/ldap3/protocol/rfc2849.pyi b/stubs/ldap3/ldap3/protocol/rfc2849.pyi index 6bfed49cbc47..03de80172871 100644 --- a/stubs/ldap3/ldap3/protocol/rfc2849.pyi +++ b/stubs/ldap3/ldap3/protocol/rfc2849.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from typing import Any conf_ldif_line_length: Any @@ -7,12 +6,12 @@ def safe_ldif_string(bytes_value): ... def add_controls(controls, all_base64): ... def add_attributes(attributes, all_base64): ... def sort_ldif_lines(lines, sort_order): ... -def search_response_to_ldif(entries, all_base64, sort_order: Incomplete | None = None): ... -def add_request_to_ldif(entry, all_base64, sort_order: Incomplete | None = None): ... -def delete_request_to_ldif(entry, all_base64, sort_order: Incomplete | None = None): ... -def modify_request_to_ldif(entry, all_base64, sort_order: Incomplete | None = None): ... -def modify_dn_request_to_ldif(entry, all_base64, sort_order: Incomplete | None = None): ... -def operation_to_ldif(operation_type, entries, all_base64: bool = False, sort_order: Incomplete | None = None): ... +def search_response_to_ldif(entries, all_base64, sort_order=None): ... +def add_request_to_ldif(entry, all_base64, sort_order=None): ... +def delete_request_to_ldif(entry, all_base64, sort_order=None): ... +def modify_request_to_ldif(entry, all_base64, sort_order=None): ... +def modify_dn_request_to_ldif(entry, all_base64, sort_order=None): ... +def operation_to_ldif(operation_type, entries, all_base64: bool = False, sort_order=None): ... def add_ldif_header(ldif_lines): ... def ldif_sort(line, sort_order): ... def decode_persistent_search_control(change): ... diff --git a/stubs/ldap3/ldap3/protocol/rfc4512.pyi b/stubs/ldap3/ldap3/protocol/rfc4512.pyi index 6f586820061d..b3c3d10f426a 100644 --- a/stubs/ldap3/ldap3/protocol/rfc4512.pyi +++ b/stubs/ldap3/ldap3/protocol/rfc4512.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from typing import Any def constant_to_class_kind(value): ... @@ -13,9 +12,9 @@ class BaseServerInfo: raw: Any def __init__(self, raw_attributes) -> None: ... @classmethod - def from_json(cls, json_definition, schema: Incomplete | None = None, custom_formatter: Incomplete | None = None): ... + def from_json(cls, json_definition, schema=None, custom_formatter=None): ... @classmethod - def from_file(cls, target, schema: Incomplete | None = None, custom_formatter: Incomplete | None = None): ... + def from_file(cls, target, schema=None, custom_formatter=None): ... def to_file(self, target, indent: int = 4, sort: bool = True) -> None: ... def to_json(self, indent: int = 4, sort: bool = True): ... @@ -58,14 +57,7 @@ class BaseObjectInfo: experimental: Any raw_definition: Any def __init__( - self, - oid: Incomplete | None = None, - name: Incomplete | None = None, - description: Incomplete | None = None, - obsolete: bool = False, - extensions: Incomplete | None = None, - experimental: Incomplete | None = None, - definition: Incomplete | None = None, + self, oid=None, name=None, description=None, obsolete: bool = False, extensions=None, experimental=None, definition=None ) -> None: ... @property def oid_info(self): ... @@ -76,28 +68,28 @@ class MatchingRuleInfo(BaseObjectInfo): syntax: Any def __init__( self, - oid: Incomplete | None = None, - name: Incomplete | None = None, - description: Incomplete | None = None, + oid=None, + name=None, + description=None, obsolete: bool = False, - syntax: Incomplete | None = None, - extensions: Incomplete | None = None, - experimental: Incomplete | None = None, - definition: Incomplete | None = None, + syntax=None, + extensions=None, + experimental=None, + definition=None, ) -> None: ... class MatchingRuleUseInfo(BaseObjectInfo): apply_to: Any def __init__( self, - oid: Incomplete | None = None, - name: Incomplete | None = None, - description: Incomplete | None = None, + oid=None, + name=None, + description=None, obsolete: bool = False, - apply_to: Incomplete | None = None, - extensions: Incomplete | None = None, - experimental: Incomplete | None = None, - definition: Incomplete | None = None, + apply_to=None, + extensions=None, + experimental=None, + definition=None, ) -> None: ... class ObjectClassInfo(BaseObjectInfo): @@ -107,17 +99,17 @@ class ObjectClassInfo(BaseObjectInfo): may_contain: Any def __init__( self, - oid: Incomplete | None = None, - name: Incomplete | None = None, - description: Incomplete | None = None, + oid=None, + name=None, + description=None, obsolete: bool = False, - superior: Incomplete | None = None, - kind: Incomplete | None = None, - must_contain: Incomplete | None = None, - may_contain: Incomplete | None = None, - extensions: Incomplete | None = None, - experimental: Incomplete | None = None, - definition: Incomplete | None = None, + superior=None, + kind=None, + must_contain=None, + may_contain=None, + extensions=None, + experimental=None, + definition=None, ) -> None: ... class AttributeTypeInfo(BaseObjectInfo): @@ -135,34 +127,27 @@ class AttributeTypeInfo(BaseObjectInfo): optional_in: Any def __init__( self, - oid: Incomplete | None = None, - name: Incomplete | None = None, - description: Incomplete | None = None, + oid=None, + name=None, + description=None, obsolete: bool = False, - superior: Incomplete | None = None, - equality: Incomplete | None = None, - ordering: Incomplete | None = None, - substring: Incomplete | None = None, - syntax: Incomplete | None = None, - min_length: Incomplete | None = None, + superior=None, + equality=None, + ordering=None, + substring=None, + syntax=None, + min_length=None, single_value: bool = False, collective: bool = False, no_user_modification: bool = False, - usage: Incomplete | None = None, - extensions: Incomplete | None = None, - experimental: Incomplete | None = None, - definition: Incomplete | None = None, + usage=None, + extensions=None, + experimental=None, + definition=None, ) -> None: ... class LdapSyntaxInfo(BaseObjectInfo): - def __init__( - self, - oid: Incomplete | None = None, - description: Incomplete | None = None, - extensions: Incomplete | None = None, - experimental: Incomplete | None = None, - definition: Incomplete | None = None, - ) -> None: ... + def __init__(self, oid=None, description=None, extensions=None, experimental=None, definition=None) -> None: ... class DitContentRuleInfo(BaseObjectInfo): auxiliary_classes: Any @@ -171,17 +156,17 @@ class DitContentRuleInfo(BaseObjectInfo): not_contains: Any def __init__( self, - oid: Incomplete | None = None, - name: Incomplete | None = None, - description: Incomplete | None = None, + oid=None, + name=None, + description=None, obsolete: bool = False, - auxiliary_classes: Incomplete | None = None, - must_contain: Incomplete | None = None, - may_contain: Incomplete | None = None, - not_contains: Incomplete | None = None, - extensions: Incomplete | None = None, - experimental: Incomplete | None = None, - definition: Incomplete | None = None, + auxiliary_classes=None, + must_contain=None, + may_contain=None, + not_contains=None, + extensions=None, + experimental=None, + definition=None, ) -> None: ... class DitStructureRuleInfo(BaseObjectInfo): @@ -189,15 +174,15 @@ class DitStructureRuleInfo(BaseObjectInfo): name_form: Any def __init__( self, - oid: Incomplete | None = None, - name: Incomplete | None = None, - description: Incomplete | None = None, + oid=None, + name=None, + description=None, obsolete: bool = False, - name_form: Incomplete | None = None, - superior: Incomplete | None = None, - extensions: Incomplete | None = None, - experimental: Incomplete | None = None, - definition: Incomplete | None = None, + name_form=None, + superior=None, + extensions=None, + experimental=None, + definition=None, ) -> None: ... class NameFormInfo(BaseObjectInfo): @@ -206,14 +191,14 @@ class NameFormInfo(BaseObjectInfo): may_contain: Any def __init__( self, - oid: Incomplete | None = None, - name: Incomplete | None = None, - description: Incomplete | None = None, + oid=None, + name=None, + description=None, obsolete: bool = False, - object_class: Incomplete | None = None, - must_contain: Incomplete | None = None, - may_contain: Incomplete | None = None, - extensions: Incomplete | None = None, - experimental: Incomplete | None = None, - definition: Incomplete | None = None, + object_class=None, + must_contain=None, + may_contain=None, + extensions=None, + experimental=None, + definition=None, ) -> None: ... diff --git a/stubs/ldap3/ldap3/strategy/base.pyi b/stubs/ldap3/ldap3/strategy/base.pyi index f0437a94e41d..41fb3a05c1a6 100644 --- a/stubs/ldap3/ldap3/strategy/base.pyi +++ b/stubs/ldap3/ldap3/strategy/base.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from typing import Any unix_socket_available: bool @@ -17,8 +16,8 @@ class BaseStrategy: def __init__(self, ldap_connection) -> None: ... def open(self, reset_usage: bool = True, read_server_info: bool = True) -> None: ... def close(self) -> None: ... - def send(self, message_type, request, controls: Incomplete | None = None): ... - def get_response(self, message_id, timeout: Incomplete | None = None, get_request: bool = False): ... + def send(self, message_type, request, controls=None): ... + def get_response(self, message_id, timeout=None, get_request: bool = False): ... @staticmethod def compute_ldap_message_size(data): ... def decode_response(self, ldap_message): ... @@ -28,7 +27,7 @@ class BaseStrategy: @staticmethod def decode_control_fast(control, from_server: bool = True): ... @staticmethod - def decode_request(message_type, component, controls: Incomplete | None = None): ... + def decode_request(message_type, component, controls=None): ... def valid_referral_list(self, referrals): ... def do_next_range_search(self, request, response, attr_name): ... def do_search_on_auto_range(self, request, response): ... diff --git a/stubs/ldap3/ldap3/strategy/ldifProducer.pyi b/stubs/ldap3/ldap3/strategy/ldifProducer.pyi index 5daee79b5fa9..0c740e77cc2a 100644 --- a/stubs/ldap3/ldap3/strategy/ldifProducer.pyi +++ b/stubs/ldap3/ldap3/strategy/ldifProducer.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from typing import Any from .base import BaseStrategy @@ -14,7 +13,7 @@ class LdifProducerStrategy(BaseStrategy): order: Any def __init__(self, ldap_connection) -> None: ... def receiving(self) -> None: ... - def send(self, message_type, request, controls: Incomplete | None = None): ... + def send(self, message_type, request, controls=None): ... def post_send_single_response(self, message_id): ... def post_send_search(self, message_id) -> None: ... def accumulate_stream(self, fragment) -> None: ... diff --git a/stubs/ldap3/ldap3/strategy/mockAsync.pyi b/stubs/ldap3/ldap3/strategy/mockAsync.pyi index 3b690c890982..619b8befa28e 100644 --- a/stubs/ldap3/ldap3/strategy/mockAsync.pyi +++ b/stubs/ldap3/ldap3/strategy/mockAsync.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from typing import Any from .asynchronous import AsyncStrategy @@ -9,4 +8,4 @@ class MockAsyncStrategy(MockBaseStrategy, AsyncStrategy): def post_send_search(self, payload): ... bound: Any def post_send_single_response(self, payload): ... - def get_response(self, message_id, timeout: Incomplete | None = None, get_request: bool = False): ... + def get_response(self, message_id, timeout=None, get_request: bool = False): ... diff --git a/stubs/ldap3/ldap3/strategy/mockBase.pyi b/stubs/ldap3/ldap3/strategy/mockBase.pyi index e5d9e213bcc4..8f828dcfc12b 100644 --- a/stubs/ldap3/ldap3/strategy/mockBase.pyi +++ b/stubs/ldap3/ldap3/strategy/mockBase.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from typing import Any SEARCH_CONTROLS: Any @@ -13,7 +12,7 @@ class PagedSearchSet: sent: int done: bool def __init__(self, response, size, criticality) -> None: ... - def next(self, size: Incomplete | None = None): ... + def next(self, size=None): ... class MockBaseStrategy: entries: Any @@ -35,4 +34,4 @@ class MockBaseStrategy: def mock_extended(self, request_message, controls): ... def evaluate_filter_node(self, node, candidates): ... def equal(self, dn, attribute_type, value_to_check): ... - def send(self, message_type, request, controls: Incomplete | None = None): ... + def send(self, message_type, request, controls=None): ... diff --git a/stubs/ldap3/ldap3/strategy/restartable.pyi b/stubs/ldap3/ldap3/strategy/restartable.pyi index f7bce1d24bda..4e5c6dc3779f 100644 --- a/stubs/ldap3/ldap3/strategy/restartable.pyi +++ b/stubs/ldap3/ldap3/strategy/restartable.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from typing import Any from .sync import SyncStrategy @@ -13,7 +12,7 @@ class RestartableStrategy(SyncStrategy): exception_history: Any def __init__(self, ldap_connection) -> None: ... def open(self, reset_usage: bool = False, read_server_info: bool = True) -> None: ... - def send(self, message_type, request, controls: Incomplete | None = None): ... + def send(self, message_type, request, controls=None): ... def post_send_single_response(self, message_id): ... def post_send_search(self, message_id): ... def get_stream(self) -> None: ... diff --git a/stubs/ldap3/ldap3/strategy/reusable.pyi b/stubs/ldap3/ldap3/strategy/reusable.pyi index 10413bcff780..4624d7058fad 100644 --- a/stubs/ldap3/ldap3/strategy/reusable.pyi +++ b/stubs/ldap3/ldap3/strategy/reusable.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from threading import Thread from typing import Any @@ -69,8 +68,8 @@ class ReusableStrategy(BaseStrategy): def __init__(self, ldap_connection) -> None: ... def open(self, reset_usage: bool = True, read_server_info: bool = True) -> None: ... def terminate(self) -> None: ... - def send(self, message_type, request, controls: Incomplete | None = None): ... + def send(self, message_type, request, controls=None): ... def validate_bind(self, controls): ... - def get_response(self, counter, timeout: Incomplete | None = None, get_request: bool = False): ... + def get_response(self, counter, timeout=None, get_request: bool = False): ... def post_send_single_response(self, counter): ... def post_send_search(self, counter): ... diff --git a/stubs/ldap3/ldap3/utils/ciDict.pyi b/stubs/ldap3/ldap3/utils/ciDict.pyi index 333dac5983c1..ff9abffa557e 100644 --- a/stubs/ldap3/ldap3/utils/ciDict.pyi +++ b/stubs/ldap3/ldap3/utils/ciDict.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from collections.abc import MutableMapping from typing import TypeVar @@ -6,7 +5,7 @@ _KT = TypeVar("_KT") _VT = TypeVar("_VT") class CaseInsensitiveDict(MutableMapping[_KT, _VT]): - def __init__(self, other: Incomplete | None = None, **kwargs) -> None: ... + def __init__(self, other=None, **kwargs) -> None: ... def __contains__(self, item): ... def __delitem__(self, key) -> None: ... def __setitem__(self, key, item) -> None: ... @@ -20,7 +19,7 @@ class CaseInsensitiveDict(MutableMapping[_KT, _VT]): def copy(self): ... class CaseInsensitiveWithAliasDict(CaseInsensitiveDict[_KT, _VT]): - def __init__(self, other: Incomplete | None = None, **kwargs) -> None: ... + def __init__(self, other=None, **kwargs) -> None: ... def aliases(self): ... def __setitem__(self, key, value) -> None: ... def __delitem__(self, key) -> None: ... diff --git a/stubs/ldap3/ldap3/utils/conv.pyi b/stubs/ldap3/ldap3/utils/conv.pyi index 6837700a346c..a31439c08e46 100644 --- a/stubs/ldap3/ldap3/utils/conv.pyi +++ b/stubs/ldap3/ldap3/utils/conv.pyi @@ -1,9 +1,7 @@ -from _typeshed import Incomplete - def to_unicode(obj: float | bytes | str, encoding: str | None = None, from_server: bool = False) -> str: ... def to_raw(obj, encoding: str = "utf-8"): ... def escape_filter_chars(text: float | bytes | str, encoding: str | None = None) -> str: ... -def unescape_filter_chars(text, encoding: Incomplete | None = None): ... +def unescape_filter_chars(text, encoding=None): ... def escape_bytes(bytes_value: str | bytes) -> str: ... def prepare_for_stream(value): ... def json_encode_b64(obj): ... diff --git a/stubs/ldap3/ldap3/utils/hashed.pyi b/stubs/ldap3/ldap3/utils/hashed.pyi index e255563f99c4..c4aa9f8a3d49 100644 --- a/stubs/ldap3/ldap3/utils/hashed.pyi +++ b/stubs/ldap3/ldap3/utils/hashed.pyi @@ -1,7 +1,6 @@ -from _typeshed import Incomplete from typing import Any algorithms_table: Any salted_table: Any -def hashed(algorithm, value, salt: Incomplete | None = None, raw: bool = False, encoding: str = "utf-8"): ... +def hashed(algorithm, value, salt=None, raw: bool = False, encoding: str = "utf-8"): ... diff --git a/stubs/m3u8/m3u8/__init__.pyi b/stubs/m3u8/m3u8/__init__.pyi index c2d7c4548a93..888edc10743c 100644 --- a/stubs/m3u8/m3u8/__init__.pyi +++ b/stubs/m3u8/m3u8/__init__.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from collections.abc import Callable, Mapping from typing import Any from typing_extensions import TypeAlias @@ -66,7 +65,7 @@ _CustomTagsParser: TypeAlias = Callable[[str, int, dict[str, Any], dict[str, Any def loads(content: str, uri: str | None = None, custom_tags_parser: _CustomTagsParser | None = None) -> M3U8: ... def load( uri: str, - timeout: Incomplete | None = None, + timeout=None, headers: Mapping[str, Any] = {}, custom_tags_parser: _CustomTagsParser | None = None, http_client: _HTTPClientProtocol = ..., diff --git a/stubs/m3u8/m3u8/model.pyi b/stubs/m3u8/m3u8/model.pyi index 17e7ef4941fe..d9b50c11f051 100644 --- a/stubs/m3u8/m3u8/model.pyi +++ b/stubs/m3u8/m3u8/model.pyi @@ -177,12 +177,12 @@ class PartialSegment(BasePathMixin): uri: str | None, duration: float | None, program_date_time: dt.datetime | None = None, - current_program_date_time: Incomplete | None = None, - byterange: Incomplete | None = None, - independent: Incomplete | None = None, - gap: Incomplete | None = None, + current_program_date_time=None, + byterange=None, + independent=None, + gap=None, dateranges: list[Mapping[str, Incomplete]] | None = None, - gap_tag: Incomplete | None = None, + gap_tag=None, ) -> None: ... def dumps(self, last_segment) -> str: ... diff --git a/stubs/mock/mock/mock.pyi b/stubs/mock/mock/mock.pyi index 132f1ffde4ee..a4ad35e575cb 100644 --- a/stubs/mock/mock/mock.pyi +++ b/stubs/mock/mock/mock.pyi @@ -47,24 +47,12 @@ DEFAULT: _SentinelObject class _Call(tuple[Any, ...]): def __new__( - cls, - value: Any = (), - name: Incomplete | None = "", - parent: Incomplete | None = None, - two: bool = False, - from_kall: bool = True, + cls, value: Any = (), name: Incomplete | None = "", parent=None, two: bool = False, from_kall: bool = True ) -> Self: ... name: Any parent: Any from_kall: Any - def __init__( - self, - value: Any = (), - name: Incomplete | None = None, - parent: Incomplete | None = None, - two: bool = False, - from_kall: bool = True, - ) -> None: ... + def __init__(self, value: Any = (), name=None, parent=None, two: bool = False, from_kall: bool = True) -> None: ... def __eq__(self, other: object) -> bool: ... def __ne__(self, other: object, /) -> bool: ... def __call__(self, *args: Any, **kwargs: Any) -> _Call: ... @@ -93,7 +81,7 @@ class NonCallableMock(Base, Any): name: str | None = None, spec_set: list[str] | object | type[object] | None = None, parent: NonCallableMock | None = None, - _spec_state: Incomplete | None = None, + _spec_state=None, _new_name: str = "", _new_parent: NonCallableMock | None = None, _spec_as_instance: bool = False, @@ -108,7 +96,7 @@ class NonCallableMock(Base, Any): name: str | None = None, spec_set: list[str] | object | type[object] | None = None, parent: NonCallableMock | None = None, - _spec_state: Incomplete | None = None, + _spec_state=None, _new_name: str = "", _new_parent: NonCallableMock | None = None, _spec_as_instance: bool = False, @@ -147,16 +135,16 @@ class CallableMixin(Base): side_effect: Any def __init__( self, - spec: Incomplete | None = None, - side_effect: Incomplete | None = None, + spec=None, + side_effect=None, return_value: Any = ..., - wraps: Incomplete | None = None, - name: Incomplete | None = None, - spec_set: Incomplete | None = None, - parent: Incomplete | None = None, - _spec_state: Incomplete | None = None, + wraps=None, + name=None, + spec_set=None, + parent=None, + _spec_state=None, _new_name: Any = "", - _new_parent: Incomplete | None = None, + _new_parent=None, **kwargs: Any, ) -> None: ... def __call__(_mock_self, *args: Any, **kwargs: Any) -> Any: ... @@ -341,7 +329,7 @@ class MagicProxy(Base): parent: Any def __init__(self, name: str, parent: Any) -> None: ... def create_mock(self) -> Any: ... - def __get__(self, obj: Any, _type: Incomplete | None = None) -> Any: ... + def __get__(self, obj: Any, _type=None) -> Any: ... class _ANY: def __eq__(self, other: object) -> Literal[True]: ... @@ -350,14 +338,7 @@ class _ANY: ANY: Any def create_autospec( - spec: Any, - spec_set: Any = False, - instance: Any = False, - _parent: Incomplete | None = None, - _name: Incomplete | None = None, - *, - unsafe: bool = False, - **kwargs: Any, + spec: Any, spec_set: Any = False, instance: Any = False, _parent=None, _name=None, *, unsafe: bool = False, **kwargs: Any ) -> Any: ... class _SpecState: @@ -367,17 +348,9 @@ class _SpecState: parent: Any instance: Any name: Any - def __init__( - self, - spec: Any, - spec_set: Any = False, - parent: Incomplete | None = None, - name: Incomplete | None = None, - ids: Incomplete | None = None, - instance: Any = False, - ) -> None: ... + def __init__(self, spec: Any, spec_set: Any = False, parent=None, name=None, ids=None, instance: Any = False) -> None: ... -def mock_open(mock: Incomplete | None = None, read_data: Any = "") -> Any: ... +def mock_open(mock=None, read_data: Any = "") -> Any: ... class PropertyMock(Mock): def __get__(self, obj: _T, obj_type: type[_T] | None = None) -> Self: ... diff --git a/stubs/mysqlclient/MySQLdb/cursors.pyi b/stubs/mysqlclient/MySQLdb/cursors.pyi index f572c38e1ab3..e3111af67896 100644 --- a/stubs/mysqlclient/MySQLdb/cursors.pyi +++ b/stubs/mysqlclient/MySQLdb/cursors.pyi @@ -39,7 +39,7 @@ class BaseCursor: def nextset(self): ... def setinputsizes(self, *args) -> None: ... def setoutputsizes(self, *args) -> None: ... - def execute(self, query, args: Incomplete | None = None): ... + def execute(self, query, args=None): ... def mogrify(self, query: str | bytes, args: _Arguments | None = None) -> str: ... def executemany(self, query: LiteralString, args: Iterable[_Arguments]) -> int | None: ... def callproc(self, procname, args=()): ... @@ -48,7 +48,7 @@ class BaseCursor: class CursorStoreResultMixIn: rownumber: Incomplete def fetchone(self): ... - def fetchmany(self, size: Incomplete | None = None): ... + def fetchmany(self, size=None): ... def fetchall(self): ... def scroll(self, value, mode: str = "relative") -> None: ... def __iter__(self): ... @@ -56,7 +56,7 @@ class CursorStoreResultMixIn: class CursorUseResultMixIn: rownumber: Incomplete def fetchone(self): ... - def fetchmany(self, size: Incomplete | None = None): ... + def fetchmany(self, size=None): ... def fetchall(self): ... def __iter__(self): ... def next(self): ... diff --git a/stubs/oauthlib/oauthlib/common.pyi b/stubs/oauthlib/oauthlib/common.pyi index f691b0e496f3..9fd6b272e212 100644 --- a/stubs/oauthlib/oauthlib/common.pyi +++ b/stubs/oauthlib/oauthlib/common.pyi @@ -52,7 +52,7 @@ class CaseInsensitiveDict(dict[str, Incomplete]): def __contains__(self, k: object) -> bool: ... def __delitem__(self, k: str) -> None: ... def __getitem__(self, k: str): ... - def get(self, k: str, default: Incomplete | None = None) -> Incomplete | None: ... + def get(self, k: str, default=None) -> Incomplete | None: ... def __setitem__(self, k: str, v) -> None: ... def update(self, *args, **kwargs) -> None: ... diff --git a/stubs/oauthlib/oauthlib/oauth1/rfc5849/__init__.pyi b/stubs/oauthlib/oauthlib/oauth1/rfc5849/__init__.pyi index a98b62b667fb..6730205943f7 100644 --- a/stubs/oauthlib/oauthlib/oauth1/rfc5849/__init__.pyi +++ b/stubs/oauthlib/oauthlib/oauth1/rfc5849/__init__.pyi @@ -41,26 +41,19 @@ class Client: self, client_key: str, client_secret: str | None = None, - resource_owner_key: Incomplete | None = None, - resource_owner_secret: Incomplete | None = None, - callback_uri: Incomplete | None = None, + resource_owner_key=None, + resource_owner_secret=None, + callback_uri=None, signature_method="HMAC-SHA1", signature_type="AUTH_HEADER", - rsa_key: Incomplete | None = None, - verifier: Incomplete | None = None, - realm: Incomplete | None = None, + rsa_key=None, + verifier=None, + realm=None, encoding: str = "utf-8", - decoding: Incomplete | None = None, - nonce: Incomplete | None = None, - timestamp: Incomplete | None = None, + decoding=None, + nonce=None, + timestamp=None, ): ... def get_oauth_signature(self, request): ... def get_oauth_params(self, request): ... - def sign( - self, - uri, - http_method: str = "GET", - body: str | None = None, - headers: dict[str, str] | None = None, - realm: Incomplete | None = None, - ): ... + def sign(self, uri, http_method: str = "GET", body: str | None = None, headers: dict[str, str] | None = None, realm=None): ... diff --git a/stubs/oauthlib/oauthlib/oauth1/rfc5849/endpoints/access_token.pyi b/stubs/oauthlib/oauthlib/oauth1/rfc5849/endpoints/access_token.pyi index a9f541a2b282..1a05e8f7a9b2 100644 --- a/stubs/oauthlib/oauthlib/oauth1/rfc5849/endpoints/access_token.pyi +++ b/stubs/oauthlib/oauthlib/oauth1/rfc5849/endpoints/access_token.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from logging import Logger from .base import BaseEndpoint as BaseEndpoint @@ -7,12 +6,5 @@ log: Logger class AccessTokenEndpoint(BaseEndpoint): def create_access_token(self, request, credentials): ... - def create_access_token_response( - self, - uri, - http_method: str = "GET", - body: Incomplete | None = None, - headers: Incomplete | None = None, - credentials: Incomplete | None = None, - ): ... + def create_access_token_response(self, uri, http_method: str = "GET", body=None, headers=None, credentials=None): ... def validate_access_token_request(self, request): ... diff --git a/stubs/oauthlib/oauthlib/oauth1/rfc5849/endpoints/authorization.pyi b/stubs/oauthlib/oauthlib/oauth1/rfc5849/endpoints/authorization.pyi index 24c7747e1115..478b0dc9ff20 100644 --- a/stubs/oauthlib/oauthlib/oauth1/rfc5849/endpoints/authorization.pyi +++ b/stubs/oauthlib/oauthlib/oauth1/rfc5849/endpoints/authorization.pyi @@ -1,18 +1,8 @@ -from _typeshed import Incomplete - from .base import BaseEndpoint as BaseEndpoint class AuthorizationEndpoint(BaseEndpoint): def create_verifier(self, request, credentials): ... def create_authorization_response( - self, - uri, - http_method: str = "GET", - body: Incomplete | None = None, - headers: Incomplete | None = None, - realms: Incomplete | None = None, - credentials: Incomplete | None = None, - ): ... - def get_realms_and_credentials( - self, uri, http_method: str = "GET", body: Incomplete | None = None, headers: Incomplete | None = None + self, uri, http_method: str = "GET", body=None, headers=None, realms=None, credentials=None ): ... + def get_realms_and_credentials(self, uri, http_method: str = "GET", body=None, headers=None): ... diff --git a/stubs/oauthlib/oauthlib/oauth1/rfc5849/endpoints/base.pyi b/stubs/oauthlib/oauthlib/oauth1/rfc5849/endpoints/base.pyi index 941c42ac8033..6ba5cb7a6471 100644 --- a/stubs/oauthlib/oauthlib/oauth1/rfc5849/endpoints/base.pyi +++ b/stubs/oauthlib/oauthlib/oauth1/rfc5849/endpoints/base.pyi @@ -1,7 +1,6 @@ -from _typeshed import Incomplete from typing import Any class BaseEndpoint: request_validator: Any token_generator: Any - def __init__(self, request_validator, token_generator: Incomplete | None = None) -> None: ... + def __init__(self, request_validator, token_generator=None) -> None: ... diff --git a/stubs/oauthlib/oauthlib/oauth1/rfc5849/endpoints/request_token.pyi b/stubs/oauthlib/oauthlib/oauth1/rfc5849/endpoints/request_token.pyi index dcdd497ec614..9f8f06aca4d7 100644 --- a/stubs/oauthlib/oauthlib/oauth1/rfc5849/endpoints/request_token.pyi +++ b/stubs/oauthlib/oauthlib/oauth1/rfc5849/endpoints/request_token.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from logging import Logger from .base import BaseEndpoint as BaseEndpoint @@ -7,12 +6,5 @@ log: Logger class RequestTokenEndpoint(BaseEndpoint): def create_request_token(self, request, credentials): ... - def create_request_token_response( - self, - uri, - http_method: str = "GET", - body: Incomplete | None = None, - headers: Incomplete | None = None, - credentials: Incomplete | None = None, - ): ... + def create_request_token_response(self, uri, http_method: str = "GET", body=None, headers=None, credentials=None): ... def validate_request_token_request(self, request): ... diff --git a/stubs/oauthlib/oauthlib/oauth1/rfc5849/endpoints/resource.pyi b/stubs/oauthlib/oauthlib/oauth1/rfc5849/endpoints/resource.pyi index c59cab3e82f8..a262b1a70729 100644 --- a/stubs/oauthlib/oauthlib/oauth1/rfc5849/endpoints/resource.pyi +++ b/stubs/oauthlib/oauthlib/oauth1/rfc5849/endpoints/resource.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from logging import Logger from .base import BaseEndpoint as BaseEndpoint @@ -6,11 +5,4 @@ from .base import BaseEndpoint as BaseEndpoint log: Logger class ResourceEndpoint(BaseEndpoint): - def validate_protected_resource_request( - self, - uri, - http_method: str = "GET", - body: Incomplete | None = None, - headers: Incomplete | None = None, - realms: Incomplete | None = None, - ): ... + def validate_protected_resource_request(self, uri, http_method: str = "GET", body=None, headers=None, realms=None): ... diff --git a/stubs/oauthlib/oauthlib/oauth1/rfc5849/endpoints/signature_only.pyi b/stubs/oauthlib/oauthlib/oauth1/rfc5849/endpoints/signature_only.pyi index 38ac0d31be9d..8000469a86c3 100644 --- a/stubs/oauthlib/oauthlib/oauth1/rfc5849/endpoints/signature_only.pyi +++ b/stubs/oauthlib/oauthlib/oauth1/rfc5849/endpoints/signature_only.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from logging import Logger from .base import BaseEndpoint as BaseEndpoint @@ -6,6 +5,4 @@ from .base import BaseEndpoint as BaseEndpoint log: Logger class SignatureOnlyEndpoint(BaseEndpoint): - def validate_request( - self, uri, http_method: str = "GET", body: Incomplete | None = None, headers: Incomplete | None = None - ): ... + def validate_request(self, uri, http_method: str = "GET", body=None, headers=None): ... diff --git a/stubs/oauthlib/oauthlib/oauth1/rfc5849/errors.pyi b/stubs/oauthlib/oauthlib/oauth1/rfc5849/errors.pyi index 4e05e4eca034..1337e5b76355 100644 --- a/stubs/oauthlib/oauthlib/oauth1/rfc5849/errors.pyi +++ b/stubs/oauthlib/oauthlib/oauth1/rfc5849/errors.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from typing import Any class OAuth1Error(Exception): @@ -6,13 +5,7 @@ class OAuth1Error(Exception): description: str uri: Any status_code: Any - def __init__( - self, - description: Incomplete | None = None, - uri: Incomplete | None = None, - status_code: int = 400, - request: Incomplete | None = None, - ) -> None: ... + def __init__(self, description=None, uri=None, status_code: int = 400, request=None) -> None: ... def in_uri(self, uri): ... @property def twotuples(self): ... diff --git a/stubs/oauthlib/oauthlib/oauth1/rfc5849/signature.pyi b/stubs/oauthlib/oauthlib/oauth1/rfc5849/signature.pyi index 9da09f5d07e3..c86e96d0f646 100644 --- a/stubs/oauthlib/oauthlib/oauth1/rfc5849/signature.pyi +++ b/stubs/oauthlib/oauthlib/oauth1/rfc5849/signature.pyi @@ -1,4 +1,4 @@ -from _typeshed import Incomplete, Unused +from _typeshed import Unused from collections.abc import Iterable from logging import Logger @@ -17,14 +17,10 @@ def collect_parameters( ) -> list[tuple[str, str]]: ... def normalize_parameters(params: dict[str, str]) -> str: ... def sign_hmac_sha1_with_client(sig_base_str: str, client): ... -def verify_hmac_sha1( - request: Request, client_secret: Incomplete | None = None, resource_owner_secret: Incomplete | None = None -) -> bool: ... +def verify_hmac_sha1(request: Request, client_secret=None, resource_owner_secret=None) -> bool: ... def sign_hmac_sha1(base_string: str | bytes, client_secret, resource_owner_secret): ... def sign_hmac_sha256_with_client(sig_base_str, client): ... -def verify_hmac_sha256( - request, client_secret: Incomplete | None = None, resource_owner_secret: Incomplete | None = None -) -> bool: ... +def verify_hmac_sha256(request, client_secret=None, resource_owner_secret=None) -> bool: ... def sign_hmac_sha256(base_string: str | bytes, client_secret, resource_owner_secret): ... def sign_hmac_sha512_with_client(sig_base_str: str, client): ... def verify_hmac_sha512(request, client_secret: str | None = None, resource_owner_secret: str | None = None) -> bool: ... diff --git a/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/authorization.pyi b/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/authorization.pyi index cf7ead300b4a..f4604fd1fe0b 100644 --- a/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/authorization.pyi +++ b/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/authorization.pyi @@ -23,7 +23,7 @@ class AuthorizationEndpoint(BaseEndpoint): http_method: _HTTPMethod = "GET", body: str | None = None, headers: dict[str, str] | None = None, - scopes: Incomplete | None = None, + scopes=None, credentials: dict[str, Incomplete] | None = None, ): ... def validate_authorization_request( diff --git a/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/resource.pyi b/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/resource.pyi index 9259ac8d4920..8c27bdffec02 100644 --- a/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/resource.pyi +++ b/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/resource.pyi @@ -21,6 +21,6 @@ class ResourceEndpoint(BaseEndpoint): http_method: _HTTPMethod = "GET", body: str | None = None, headers: dict[str, str] | None = None, - scopes: Incomplete | None = None, + scopes=None, ) -> tuple[bool, Request]: ... def find_token_type(self, request: Request): ... diff --git a/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/token.pyi b/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/token.pyi index 9b90fe906b6c..5634512740a0 100644 --- a/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/token.pyi +++ b/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/token.pyi @@ -25,8 +25,8 @@ class TokenEndpoint(BaseEndpoint): http_method: _HTTPMethod = "POST", body: str | None = None, headers: dict[str, str] | None = None, - credentials: Incomplete | None = None, - grant_type_for_scope: Incomplete | None = None, - claims: Incomplete | None = None, + credentials=None, + grant_type_for_scope=None, + claims=None, ): ... def validate_token_request(self, request: Request) -> None: ... diff --git a/stubs/oauthlib/oauthlib/openid/connect/core/grant_types/base.pyi b/stubs/oauthlib/oauthlib/openid/connect/core/grant_types/base.pyi index 24bb8d409449..c7c3039277b3 100644 --- a/stubs/oauthlib/oauthlib/openid/connect/core/grant_types/base.pyi +++ b/stubs/oauthlib/oauthlib/openid/connect/core/grant_types/base.pyi @@ -1,5 +1,4 @@ from _hashlib import HASH -from _typeshed import Incomplete from collections.abc import Callable from logging import Logger @@ -14,7 +13,7 @@ class GrantTypeBase: def id_token_hash( self, value: str, hashfunc: Callable[..., HASH] = ... # Arguments: ReadableBuffer (string) and bool (usedforsecurity) ) -> str: ... - def add_id_token(self, token, token_handler, request: Request, nonce: Incomplete | None = None): ... + def add_id_token(self, token, token_handler, request: Request, nonce=None): ... def openid_authorization_validator(self, request: Request): ... OpenIDConnectBase = GrantTypeBase diff --git a/stubs/oauthlib/oauthlib/openid/connect/core/grant_types/dispatchers.pyi b/stubs/oauthlib/oauthlib/openid/connect/core/grant_types/dispatchers.pyi index 4dcf5a690771..67428ed7d1e8 100644 --- a/stubs/oauthlib/oauthlib/openid/connect/core/grant_types/dispatchers.pyi +++ b/stubs/oauthlib/oauthlib/openid/connect/core/grant_types/dispatchers.pyi @@ -13,14 +13,14 @@ class Dispatcher: class AuthorizationCodeGrantDispatcher(Dispatcher): default_grant: Incomplete | None oidc_grant: Incomplete | None - def __init__(self, default_grant: Incomplete | None = None, oidc_grant: Incomplete | None = None) -> None: ... + def __init__(self, default_grant=None, oidc_grant=None) -> None: ... def create_authorization_response(self, request: Request, token_handler): ... def validate_authorization_request(self, request: Request): ... class ImplicitTokenGrantDispatcher(Dispatcher): default_grant: Incomplete | None oidc_grant: Incomplete | None - def __init__(self, default_grant: Incomplete | None = None, oidc_grant: Incomplete | None = None) -> None: ... + def __init__(self, default_grant=None, oidc_grant=None) -> None: ... def create_authorization_response(self, request: Request, token_handler): ... def validate_authorization_request(self, request: Request): ... @@ -28,10 +28,5 @@ class AuthorizationTokenGrantDispatcher(Dispatcher): default_grant: Incomplete | None oidc_grant: Incomplete | None request_validator: OAuth2RequestValidator - def __init__( - self, - request_validator: OAuth2RequestValidator, - default_grant: Incomplete | None = None, - oidc_grant: Incomplete | None = None, - ) -> None: ... + def __init__(self, request_validator: OAuth2RequestValidator, default_grant=None, oidc_grant=None) -> None: ... def create_token_response(self, request: Request, token_handler): ... diff --git a/stubs/openpyxl/openpyxl/cell/_writer.pyi b/stubs/openpyxl/openpyxl/cell/_writer.pyi index da72488a1370..d079c8c08c77 100644 --- a/stubs/openpyxl/openpyxl/cell/_writer.pyi +++ b/stubs/openpyxl/openpyxl/cell/_writer.pyi @@ -1,8 +1,8 @@ -from _typeshed import Incomplete, Unused +from _typeshed import Unused from openpyxl.cell import _CellOrMergedCell -def etree_write_cell(xf, worksheet: Unused, cell: _CellOrMergedCell, styled: Incomplete | None = None) -> None: ... +def etree_write_cell(xf, worksheet: Unused, cell: _CellOrMergedCell, styled=None) -> None: ... def lxml_write_cell(xf, worksheet: Unused, cell: _CellOrMergedCell, styled: bool = False) -> None: ... write_cell = lxml_write_cell diff --git a/stubs/openpyxl/openpyxl/chart/area_chart.pyi b/stubs/openpyxl/openpyxl/chart/area_chart.pyi index 1ff454f00cfb..2d80d765ccff 100644 --- a/stubs/openpyxl/openpyxl/chart/area_chart.pyi +++ b/stubs/openpyxl/openpyxl/chart/area_chart.pyi @@ -57,4 +57,4 @@ class AreaChart3D(AreaChart): y_axis: Typed[NumericAxis, Literal[False]] z_axis: Typed[SeriesAxis, Literal[True]] __elements__: ClassVar[tuple[str, ...]] - def __init__(self, gapDepth: Incomplete | None = None, **kw) -> None: ... + def __init__(self, gapDepth=None, **kw) -> None: ... diff --git a/stubs/openpyxl/openpyxl/chart/axis.pyi b/stubs/openpyxl/openpyxl/chart/axis.pyi index d11c8b2f1b44..068ac588da0d 100644 --- a/stubs/openpyxl/openpyxl/chart/axis.pyi +++ b/stubs/openpyxl/openpyxl/chart/axis.pyi @@ -115,15 +115,15 @@ class _BaseAxis(Serialisable): majorGridlines: ChartLines | None = None, minorGridlines: ChartLines | None = None, title: str | Title | None = None, - numFmt: Incomplete | None = None, - majorTickMark: Incomplete | None = None, - minorTickMark: Incomplete | None = None, - tickLblPos: Incomplete | None = None, + numFmt=None, + majorTickMark=None, + minorTickMark=None, + tickLblPos=None, spPr: GraphicalProperties | None = None, txPr: RichText | None = None, *, crossAx: _HasTagAndGet[ConvertibleToInt] | ConvertibleToInt, - crosses: Incomplete | None = None, + crosses=None, crossesAt: _HasTagAndGet[ConvertibleToFloat | None] | ConvertibleToFloat | None = None, ) -> None: ... diff --git a/stubs/openpyxl/openpyxl/chart/bar_chart.pyi b/stubs/openpyxl/openpyxl/chart/bar_chart.pyi index b45a3096c65e..c5326c40e3cd 100644 --- a/stubs/openpyxl/openpyxl/chart/bar_chart.pyi +++ b/stubs/openpyxl/openpyxl/chart/bar_chart.pyi @@ -52,12 +52,7 @@ class BarChart(_BarChartBase): __elements__: ClassVar[tuple[str, ...]] legend: Incomplete def __init__( - self, - gapWidth: int = 150, - overlap: Incomplete | None = None, - serLines: ChartLines | None = None, - extLst: Unused = None, - **kw, + self, gapWidth: int = 150, overlap=None, serLines: ChartLines | None = None, extLst: Unused = None, **kw ) -> None: ... class BarChart3D(_BarChartBase, _3DBase): diff --git a/stubs/openpyxl/openpyxl/chart/chartspace.pyi b/stubs/openpyxl/openpyxl/chart/chartspace.pyi index eeaa4f6d4a52..fef551ba034b 100644 --- a/stubs/openpyxl/openpyxl/chart/chartspace.pyi +++ b/stubs/openpyxl/openpyxl/chart/chartspace.pyi @@ -121,7 +121,7 @@ class ChartSpace(Serialisable): txPr: RichText | None = None, externalData: ExternalData | None = None, printSettings: PrintSettings | None = None, - userShapes: Incomplete | None = None, + userShapes=None, extLst: Unused = None, ) -> None: ... @overload @@ -139,7 +139,7 @@ class ChartSpace(Serialisable): txPr: RichText | None = None, externalData: ExternalData | None = None, printSettings: PrintSettings | None = None, - userShapes: Incomplete | None = None, + userShapes=None, extLst: Unused = None, ) -> None: ... def to_tree(self, tagname: Unused = None, idx: Unused = None, namespace: Unused = None) -> Element: ... diff --git a/stubs/openpyxl/openpyxl/chart/data_source.pyi b/stubs/openpyxl/openpyxl/chart/data_source.pyi index 186c02c6f36e..0d7237cd1c10 100644 --- a/stubs/openpyxl/openpyxl/chart/data_source.pyi +++ b/stubs/openpyxl/openpyxl/chart/data_source.pyi @@ -23,7 +23,7 @@ class NumVal(Serialisable): idx: Integer[Literal[False]] formatCode: NestedText[str, Literal[True]] v: Incomplete - def __init__(self, idx: ConvertibleToInt, formatCode: object = None, v: Incomplete | None = None) -> None: ... + def __init__(self, idx: ConvertibleToInt, formatCode: object = None, v=None) -> None: ... class NumData(Serialisable): formatCode: NestedText[str, Literal[True]] diff --git a/stubs/openpyxl/openpyxl/chart/line_chart.pyi b/stubs/openpyxl/openpyxl/chart/line_chart.pyi index c6949aed2168..e03d6c35a397 100644 --- a/stubs/openpyxl/openpyxl/chart/line_chart.pyi +++ b/stubs/openpyxl/openpyxl/chart/line_chart.pyi @@ -78,7 +78,7 @@ class LineChart3D(_LineChartBase): __elements__: ClassVar[tuple[str, ...]] def __init__( self, - gapDepth: Incomplete | None = None, + gapDepth=None, hiLowLines: ChartLines | None = None, upDownBars: UpDownBars | None = None, marker: _HasTagAndGet[_ConvertibleToBool | None] | _ConvertibleToBool | None = None, diff --git a/stubs/openpyxl/openpyxl/chart/pie_chart.pyi b/stubs/openpyxl/openpyxl/chart/pie_chart.pyi index c29375bcd023..f6e1d1fc6f3e 100644 --- a/stubs/openpyxl/openpyxl/chart/pie_chart.pyi +++ b/stubs/openpyxl/openpyxl/chart/pie_chart.pyi @@ -94,7 +94,7 @@ class ProjectedPieChart(_PieChartBase): def __init__( self, ofPieType: _HasTagAndGet[_ProjectedPieChartOfPieType] | _ProjectedPieChartOfPieType = "pie", - gapWidth: Incomplete | None = None, + gapWidth=None, splitType: _NestedNoneSetParam[_ProjectedPieChartSplitType] = "auto", splitPos: _HasTagAndGet[ConvertibleToFloat | None] | ConvertibleToFloat | None = None, custSplit: CustomSplit | None = None, diff --git a/stubs/openpyxl/openpyxl/chart/shapes.pyi b/stubs/openpyxl/openpyxl/chart/shapes.pyi index 1d43dd8ab0ab..c285bbe921a0 100644 --- a/stubs/openpyxl/openpyxl/chart/shapes.pyi +++ b/stubs/openpyxl/openpyxl/chart/shapes.pyi @@ -42,7 +42,7 @@ class GraphicalProperties(Serialisable): solidFill: str | ColorChoice | None = None, gradFill: GradientFillProperties | None = None, pattFill: PatternFillProperties | None = None, - ln: Incomplete | None = None, + ln=None, scene3d: Scene3D | None = None, custGeom: CustomGeometry2D | None = None, prstGeom: PresetGeometry2D | None = None, diff --git a/stubs/openpyxl/openpyxl/chart/text.pyi b/stubs/openpyxl/openpyxl/chart/text.pyi index 481dfa60c93e..b576900cc2e7 100644 --- a/stubs/openpyxl/openpyxl/chart/text.pyi +++ b/stubs/openpyxl/openpyxl/chart/text.pyi @@ -15,9 +15,7 @@ class RichText(Serialisable): p: Incomplete paragraphs: Alias __elements__: ClassVar[tuple[str, ...]] - def __init__( - self, bodyPr: RichTextProperties | None = None, lstStyle: ListStyle | None = None, p: Incomplete | None = None - ) -> None: ... + def __init__(self, bodyPr: RichTextProperties | None = None, lstStyle: ListStyle | None = None, p=None) -> None: ... class Text(Serialisable): tagname: ClassVar[str] diff --git a/stubs/openpyxl/openpyxl/chartsheet/custom.pyi b/stubs/openpyxl/openpyxl/chartsheet/custom.pyi index 70865e0cb01b..2f1066bd2db5 100644 --- a/stubs/openpyxl/openpyxl/chartsheet/custom.pyi +++ b/stubs/openpyxl/openpyxl/chartsheet/custom.pyi @@ -20,7 +20,7 @@ class CustomChartsheetView(Serialisable): @overload def __init__( self, - guid: Incomplete | None = None, + guid=None, *, scale: ConvertibleToInt, state: _VisibilityType = "visible", @@ -45,4 +45,4 @@ class CustomChartsheetViews(Serialisable): tagname: ClassVar[str] customSheetView: Incomplete __elements__: ClassVar[tuple[str, ...]] - def __init__(self, customSheetView: Incomplete | None = None) -> None: ... + def __init__(self, customSheetView=None) -> None: ... diff --git a/stubs/openpyxl/openpyxl/chartsheet/protection.pyi b/stubs/openpyxl/openpyxl/chartsheet/protection.pyi index 95266410c76f..c109380c2dd2 100644 --- a/stubs/openpyxl/openpyxl/chartsheet/protection.pyi +++ b/stubs/openpyxl/openpyxl/chartsheet/protection.pyi @@ -19,9 +19,9 @@ class ChartsheetProtection(Serialisable, _Protected): self, content: _ConvertibleToBool | None = None, objects: _ConvertibleToBool | None = None, - hashValue: Incomplete | None = None, + hashValue=None, spinCount: ConvertibleToInt | None = None, - saltValue: Incomplete | None = None, + saltValue=None, algorithmName: str | None = None, - password: Incomplete | None = None, + password=None, ) -> None: ... diff --git a/stubs/openpyxl/openpyxl/chartsheet/publish.pyi b/stubs/openpyxl/openpyxl/chartsheet/publish.pyi index c69ecce3e05d..56780bc5ca04 100644 --- a/stubs/openpyxl/openpyxl/chartsheet/publish.pyi +++ b/stubs/openpyxl/openpyxl/chartsheet/publish.pyi @@ -50,4 +50,4 @@ class WebPublishItems(Serialisable): count: Integer[Literal[True]] webPublishItem: Incomplete __elements__: ClassVar[tuple[str, ...]] - def __init__(self, count: ConvertibleToInt | None = None, webPublishItem: Incomplete | None = None) -> None: ... + def __init__(self, count: ConvertibleToInt | None = None, webPublishItem=None) -> None: ... diff --git a/stubs/openpyxl/openpyxl/chartsheet/relation.pyi b/stubs/openpyxl/openpyxl/chartsheet/relation.pyi index d43efafa5fe0..d355d7a7b1eb 100644 --- a/stubs/openpyxl/openpyxl/chartsheet/relation.pyi +++ b/stubs/openpyxl/openpyxl/chartsheet/relation.pyi @@ -49,7 +49,7 @@ class DrawingHF(Serialisable): rightFooterFirstPage: Alias def __init__( self, - id: Incomplete | None = None, + id=None, lho: ConvertibleToInt | None = None, lhe: ConvertibleToInt | None = None, lhf: ConvertibleToInt | None = None, diff --git a/stubs/openpyxl/openpyxl/chartsheet/views.pyi b/stubs/openpyxl/openpyxl/chartsheet/views.pyi index 4cabc25d45f8..f13138f829b9 100644 --- a/stubs/openpyxl/openpyxl/chartsheet/views.pyi +++ b/stubs/openpyxl/openpyxl/chartsheet/views.pyi @@ -27,4 +27,4 @@ class ChartsheetViewList(Serialisable): sheetView: Incomplete extLst: Typed[ExtensionList, Literal[True]] __elements__: ClassVar[tuple[str, ...]] - def __init__(self, sheetView: Incomplete | None = None, extLst: Unused = None) -> None: ... + def __init__(self, sheetView=None, extLst: Unused = None) -> None: ... diff --git a/stubs/openpyxl/openpyxl/comments/comment_sheet.pyi b/stubs/openpyxl/openpyxl/comments/comment_sheet.pyi index f7069a43bd03..51799b51ce1f 100644 --- a/stubs/openpyxl/openpyxl/comments/comment_sheet.pyi +++ b/stubs/openpyxl/openpyxl/comments/comment_sheet.pyi @@ -93,7 +93,7 @@ class CommentRecord(Serialisable): self, ref: str = "", authorId: ConvertibleToInt = 0, - guid: Incomplete | None = None, + guid=None, shapeId: ConvertibleToInt | None = 0, text: Text | None = None, commentPr: Properties | None = None, @@ -113,12 +113,12 @@ class CommentSheet(Serialisable): extLst: Typed[ExtensionList, Literal[True]] mime_type: str __elements__: ClassVar[tuple[str, ...]] - def __init__(self, authors: AuthorList, commentList: Incomplete | None = None, extLst: Unused = None) -> None: ... + def __init__(self, authors: AuthorList, commentList=None, extLst: Unused = None) -> None: ... def to_tree(self) -> Element: ... # type: ignore[override] @property def comments(self) -> Generator[tuple[str, Comment], None, None]: ... @classmethod def from_comments(cls, comments): ... - def write_shapes(self, vml: Incomplete | None = None): ... + def write_shapes(self, vml=None): ... @property def path(self) -> str: ... diff --git a/stubs/openpyxl/openpyxl/drawing/colors.pyi b/stubs/openpyxl/openpyxl/drawing/colors.pyi index 8795cdd13d98..00c6a9c5d865 100644 --- a/stubs/openpyxl/openpyxl/drawing/colors.pyi +++ b/stubs/openpyxl/openpyxl/drawing/colors.pyi @@ -299,7 +299,7 @@ class SystemColor(Serialisable): def __init__( self, val: _SystemColorVal = "windowText", - lastClr: Incomplete | None = None, + lastClr=None, tint: _HasTagAndGet[ConvertibleToInt | None] | ConvertibleToInt | None = None, shade: _HasTagAndGet[ConvertibleToInt | None] | ConvertibleToInt | None = None, comp: Transform | None = None, diff --git a/stubs/openpyxl/openpyxl/drawing/fill.pyi b/stubs/openpyxl/openpyxl/drawing/fill.pyi index 17e5656f81c7..0a4a365244fd 100644 --- a/stubs/openpyxl/openpyxl/drawing/fill.pyi +++ b/stubs/openpyxl/openpyxl/drawing/fill.pyi @@ -118,9 +118,7 @@ class RelativeRect(Serialisable): right: Alias b: Incomplete bottom: Alias - def __init__( - self, l: Incomplete | None = None, t: Incomplete | None = None, r: Incomplete | None = None, b: Incomplete | None = None - ) -> None: ... + def __init__(self, l=None, t=None, r=None, b=None) -> None: ... class StretchInfoProperties(Serialisable): tagname: ClassVar[str] @@ -247,8 +245,8 @@ class Blip(Serialisable): def __init__( self, cstate: _BlipCstate | Literal["none"] | None = None, - embed: Incomplete | None = None, - link: Incomplete | None = None, + embed=None, + link=None, noGrp: _ConvertibleToBool | None = None, noSelect: _ConvertibleToBool | None = None, noRot: _ConvertibleToBool | None = None, diff --git a/stubs/openpyxl/openpyxl/drawing/geometry.pyi b/stubs/openpyxl/openpyxl/drawing/geometry.pyi index 82079d9cfc87..4e2241e94ebd 100644 --- a/stubs/openpyxl/openpyxl/drawing/geometry.pyi +++ b/stubs/openpyxl/openpyxl/drawing/geometry.pyi @@ -329,7 +329,7 @@ class Point2D(Serialisable): namespace: ClassVar[str] x: Incomplete y: Incomplete - def __init__(self, x: Incomplete | None = None, y: Incomplete | None = None) -> None: ... + def __init__(self, x=None, y=None) -> None: ... class PositiveSize2D(Serialisable): tagname: ClassVar[str] @@ -497,14 +497,12 @@ class GeomRect(Serialisable): t: Incomplete r: Incomplete b: Incomplete - def __init__( - self, l: Incomplete | None = None, t: Incomplete | None = None, r: Incomplete | None = None, b: Incomplete | None = None - ) -> None: ... + def __init__(self, l=None, t=None, r=None, b=None) -> None: ... class AdjPoint2D(Serialisable): x: Incomplete y: Incomplete - def __init__(self, x: Incomplete | None = None, y: Incomplete | None = None) -> None: ... + def __init__(self, x=None, y=None) -> None: ... class ConnectionSite(Serialisable): ang: MinMax[float, Literal[False]] diff --git a/stubs/openpyxl/openpyxl/drawing/graphic.pyi b/stubs/openpyxl/openpyxl/drawing/graphic.pyi index 675f0585cf97..38164a99b4ef 100644 --- a/stubs/openpyxl/openpyxl/drawing/graphic.pyi +++ b/stubs/openpyxl/openpyxl/drawing/graphic.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from typing import ClassVar, Literal from openpyxl.descriptors.base import Alias, Bool, String, Typed, _ConvertibleToBool @@ -39,7 +38,7 @@ class NonVisualGraphicFrame(Serialisable): cNvPr: Typed[ExtensionList, Literal[False]] cNvGraphicFramePr: Typed[ExtensionList, Literal[False]] __elements__: ClassVar[tuple[str, ...]] - def __init__(self, cNvPr: Incomplete | None = None, cNvGraphicFramePr: Incomplete | None = None) -> None: ... + def __init__(self, cNvPr=None, cNvGraphicFramePr=None) -> None: ... class GraphicData(Serialisable): tagname: ClassVar[str] diff --git a/stubs/openpyxl/openpyxl/drawing/line.pyi b/stubs/openpyxl/openpyxl/drawing/line.pyi index 668c5c98ced4..130895608758 100644 --- a/stubs/openpyxl/openpyxl/drawing/line.pyi +++ b/stubs/openpyxl/openpyxl/drawing/line.pyi @@ -44,7 +44,7 @@ class DashStop(Serialisable): class DashStopList(Serialisable): ds: Incomplete - def __init__(self, ds: Incomplete | None = None) -> None: ... + def __init__(self, ds=None) -> None: ... class LineProperties(Serialisable): tagname: ClassVar[str] diff --git a/stubs/openpyxl/openpyxl/drawing/picture.pyi b/stubs/openpyxl/openpyxl/drawing/picture.pyi index 74a29558685d..4a3de4c9ea65 100644 --- a/stubs/openpyxl/openpyxl/drawing/picture.pyi +++ b/stubs/openpyxl/openpyxl/drawing/picture.pyi @@ -1,4 +1,4 @@ -from _typeshed import Incomplete, Unused +from _typeshed import Unused from typing import ClassVar, Literal from openpyxl.chart.shapes import GraphicalProperties @@ -47,9 +47,7 @@ class NonVisualPictureProperties(Serialisable): picLocks: Typed[PictureLocking, Literal[True]] extLst: Typed[ExtensionList, Literal[True]] __elements__: ClassVar[tuple[str, ...]] - def __init__( - self, preferRelativeResize: _ConvertibleToBool | None = None, picLocks: Incomplete | None = None, extLst: Unused = None - ) -> None: ... + def __init__(self, preferRelativeResize: _ConvertibleToBool | None = None, picLocks=None, extLst: Unused = None) -> None: ... class PictureNonVisual(Serialisable): tagname: ClassVar[str] diff --git a/stubs/openpyxl/openpyxl/drawing/properties.pyi b/stubs/openpyxl/openpyxl/drawing/properties.pyi index 0d9c33726daf..0444a5321293 100644 --- a/stubs/openpyxl/openpyxl/drawing/properties.pyi +++ b/stubs/openpyxl/openpyxl/drawing/properties.pyi @@ -63,7 +63,7 @@ class NonVisualGroupDrawingShapeProps(Serialisable): grpSpLocks: Typed[GroupLocking, Literal[True]] extLst: Typed[ExtensionList, Literal[True]] __elements__: ClassVar[tuple[str, ...]] - def __init__(self, grpSpLocks: Incomplete | None = None, extLst: Unused = None) -> None: ... + def __init__(self, grpSpLocks=None, extLst: Unused = None) -> None: ... class NonVisualDrawingShapeProps(Serialisable): tagname: ClassVar[str] @@ -72,9 +72,7 @@ class NonVisualDrawingShapeProps(Serialisable): extLst: Typed[ExtensionList, Literal[True]] __elements__: ClassVar[tuple[str, ...]] txBox: Incomplete - def __init__( - self, spLocks: Incomplete | None = None, txBox: _ConvertibleToBool | None = None, extLst: Unused = None - ) -> None: ... + def __init__(self, spLocks=None, txBox: _ConvertibleToBool | None = None, extLst: Unused = None) -> None: ... class NonVisualDrawingProps(Serialisable): tagname: ClassVar[str] @@ -91,7 +89,7 @@ class NonVisualDrawingProps(Serialisable): @overload def __init__( self, - id: Incomplete | None = None, + id=None, *, name: str, descr: str | None = None, diff --git a/stubs/openpyxl/openpyxl/drawing/spreadsheet_drawing.pyi b/stubs/openpyxl/openpyxl/drawing/spreadsheet_drawing.pyi index 17a8d36ad9e7..df2388c58f95 100644 --- a/stubs/openpyxl/openpyxl/drawing/spreadsheet_drawing.pyi +++ b/stubs/openpyxl/openpyxl/drawing/spreadsheet_drawing.pyi @@ -49,7 +49,7 @@ class _AnchorBase(Serialisable): graphicFrame: GraphicFrame | None = None, cxnSp: Shape | None = None, pic: PictureFrame | None = None, - contentPart: Incomplete | None = None, + contentPart=None, ) -> None: ... class AbsoluteAnchor(_AnchorBase): diff --git a/stubs/openpyxl/openpyxl/drawing/text.pyi b/stubs/openpyxl/openpyxl/drawing/text.pyi index 02c18ff83617..b5ed491d7cc4 100644 --- a/stubs/openpyxl/openpyxl/drawing/text.pyi +++ b/stubs/openpyxl/openpyxl/drawing/text.pyi @@ -161,7 +161,7 @@ class Hyperlink(Serialisable): endSnd: _ConvertibleToBool | None = None, snd: EmbeddedWAVAudioFile | None = None, extLst: ExtensionList | None = None, - id: Incomplete | None = None, + id=None, ) -> None: ... class Font(Serialisable): @@ -172,11 +172,7 @@ class Font(Serialisable): pitchFamily: MinMax[float, Literal[True]] charset: Integer[Literal[True]] def __init__( - self, - typeface: str, - panose: Incomplete | None = None, - pitchFamily: ConvertibleToFloat | None = None, - charset: ConvertibleToInt | None = None, + self, typeface: str, panose=None, pitchFamily: ConvertibleToFloat | None = None, charset: ConvertibleToInt | None = None ) -> None: ... class CharacterProperties(Serialisable): @@ -276,7 +272,7 @@ class TabStop(Serialisable): class TabStopList(Serialisable): tab: Typed[TabStop, Literal[True]] - def __init__(self, tab: Incomplete | None = None) -> None: ... + def __init__(self, tab=None) -> None: ... class Spacing(Serialisable): spcPct: NestedInteger[Literal[True]] @@ -435,7 +431,7 @@ class Paragraph(Serialisable): self, pPr: ParagraphProperties | None = None, endParaRPr: CharacterProperties | None = None, - r: Incomplete | None = None, + r=None, br: LineBreak | None = None, fld: TextField | None = None, ) -> None: ... @@ -447,7 +443,7 @@ class GeomGuide(Serialisable): class GeomGuideList(Serialisable): gd: Incomplete - def __init__(self, gd: Incomplete | None = None) -> None: ... + def __init__(self, gd=None) -> None: ... class PresetTextShape(Serialisable): prst: Typed[Set[_PresetTextShapePrst], Literal[False]] diff --git a/stubs/openpyxl/openpyxl/formatting/rule.pyi b/stubs/openpyxl/openpyxl/formatting/rule.pyi index 9b04c503e6b4..cd5efaae934d 100644 --- a/stubs/openpyxl/openpyxl/formatting/rule.pyi +++ b/stubs/openpyxl/openpyxl/formatting/rule.pyi @@ -79,7 +79,7 @@ class FormatObject(Serialisable): extLst: Typed[ExtensionList, Literal[True]] __elements__: ClassVar[tuple[str, ...]] def __init__( - self, type: _FormatObjectType, val: Incomplete | None = None, gte: _ConvertibleToBool | None = None, extLst: Unused = None + self, type: _FormatObjectType, val=None, gte: _ConvertibleToBool | None = None, extLst: Unused = None ) -> None: ... class RuleType(Serialisable): @@ -99,7 +99,7 @@ class IconSet(RuleType): showValue: _ConvertibleToBool | None = None, percent: _ConvertibleToBool | None = None, reverse: _ConvertibleToBool | None = None, - cfvo: Incomplete | None = None, + cfvo=None, ) -> None: ... class DataBar(RuleType): @@ -116,7 +116,7 @@ class DataBar(RuleType): minLength: ConvertibleToInt | None = None, maxLength: ConvertibleToInt | None = None, showValue: _ConvertibleToBool | None = None, - cfvo: Incomplete | None = None, + cfvo=None, *, color: str | Color, ) -> None: ... @@ -135,7 +135,7 @@ class ColorScale(RuleType): color: Incomplete __elements__: ClassVar[tuple[str, ...]] cfvo: Incomplete - def __init__(self, cfvo: Incomplete | None = None, color: Incomplete | None = None) -> None: ... + def __init__(self, cfvo=None, color=None) -> None: ... class Rule(Serialisable): tagname: ClassVar[str] @@ -184,46 +184,19 @@ class Rule(Serialisable): ) -> None: ... def ColorScaleRule( - start_type: Incomplete | None = None, - start_value: Incomplete | None = None, - start_color: Incomplete | None = None, - mid_type: Incomplete | None = None, - mid_value: Incomplete | None = None, - mid_color: Incomplete | None = None, - end_type: Incomplete | None = None, - end_value: Incomplete | None = None, - end_color: Incomplete | None = None, -): ... -def FormulaRule( - formula: Incomplete | None = None, - stopIfTrue: Incomplete | None = None, - font: Incomplete | None = None, - border: Incomplete | None = None, - fill: Incomplete | None = None, -): ... -def CellIsRule( - operator: Incomplete | None = None, - formula: Incomplete | None = None, - stopIfTrue: Incomplete | None = None, - font: Incomplete | None = None, - border: Incomplete | None = None, - fill: Incomplete | None = None, -): ... -def IconSetRule( - icon_style: Incomplete | None = None, - type: Incomplete | None = None, - values: Incomplete | None = None, - showValue: Incomplete | None = None, - percent: Incomplete | None = None, - reverse: Incomplete | None = None, + start_type=None, + start_value=None, + start_color=None, + mid_type=None, + mid_value=None, + mid_color=None, + end_type=None, + end_value=None, + end_color=None, ): ... +def FormulaRule(formula=None, stopIfTrue=None, font=None, border=None, fill=None): ... +def CellIsRule(operator=None, formula=None, stopIfTrue=None, font=None, border=None, fill=None): ... +def IconSetRule(icon_style=None, type=None, values=None, showValue=None, percent=None, reverse=None): ... def DataBarRule( - start_type: Incomplete | None = None, - start_value: Incomplete | None = None, - end_type: Incomplete | None = None, - end_value: Incomplete | None = None, - color: Incomplete | None = None, - showValue: Incomplete | None = None, - minLength: Incomplete | None = None, - maxLength: Incomplete | None = None, + start_type=None, start_value=None, end_type=None, end_value=None, color=None, showValue=None, minLength=None, maxLength=None ): ... diff --git a/stubs/openpyxl/openpyxl/formula/translate.pyi b/stubs/openpyxl/openpyxl/formula/translate.pyi index 3f03b328b884..212f0bedd006 100644 --- a/stubs/openpyxl/openpyxl/formula/translate.pyi +++ b/stubs/openpyxl/openpyxl/formula/translate.pyi @@ -19,4 +19,4 @@ class Translator: def strip_ws_name(range_str): ... @classmethod def translate_range(cls, range_str, rdelta, cdelta): ... - def translate_formula(self, dest: Incomplete | None = None, row_delta: int = 0, col_delta: int = 0): ... + def translate_formula(self, dest=None, row_delta: int = 0, col_delta: int = 0): ... diff --git a/stubs/openpyxl/openpyxl/packaging/core.pyi b/stubs/openpyxl/openpyxl/packaging/core.pyi index 0106e8cb9222..9111e2bb6387 100644 --- a/stubs/openpyxl/openpyxl/packaging/core.pyi +++ b/stubs/openpyxl/openpyxl/packaging/core.pyi @@ -46,7 +46,7 @@ class DocumentProperties(Serialisable): contentStatus: object = None, keywords: object = None, lastModifiedBy: object = None, - lastPrinted: Incomplete | None = None, + lastPrinted=None, revision: object = None, version: object = None, created=None, diff --git a/stubs/openpyxl/openpyxl/packaging/workbook.pyi b/stubs/openpyxl/openpyxl/packaging/workbook.pyi index b333a3651550..3734b75e9932 100644 --- a/stubs/openpyxl/openpyxl/packaging/workbook.pyi +++ b/stubs/openpyxl/openpyxl/packaging/workbook.pyi @@ -38,18 +38,14 @@ class ChildSheet(Serialisable): state: NoneSet[_VisibilityType] id: Incomplete def __init__( - self, - name: str, - sheetId: ConvertibleToInt, - state: _VisibilityType | Literal["none"] | None = "visible", - id: Incomplete | None = None, + self, name: str, sheetId: ConvertibleToInt, state: _VisibilityType | Literal["none"] | None = "visible", id=None ) -> None: ... class PivotCache(Serialisable): tagname: ClassVar[str] cacheId: Integer[Literal[False]] id: Incomplete - def __init__(self, cacheId: ConvertibleToInt, id: Incomplete | None = None) -> None: ... + def __init__(self, cacheId: ConvertibleToInt, id=None) -> None: ... class WorkbookPackage(Serialisable): tagname: ClassVar[str] diff --git a/stubs/openpyxl/openpyxl/pivot/cache.pyi b/stubs/openpyxl/openpyxl/pivot/cache.pyi index bd8c5f6ca837..d156091b67a9 100644 --- a/stubs/openpyxl/openpyxl/pivot/cache.pyi +++ b/stubs/openpyxl/openpyxl/pivot/cache.pyi @@ -71,13 +71,9 @@ class CalculatedItem(Serialisable): extLst: Typed[ExtensionList, Literal[True]] __elements__: ClassVar[tuple[str, ...]] @overload - def __init__( - self, field: ConvertibleToInt | None = None, *, formula: str, pivotArea: PivotArea, extLst: Incomplete | None = None - ) -> None: ... + def __init__(self, field: ConvertibleToInt | None = None, *, formula: str, pivotArea: PivotArea, extLst=None) -> None: ... @overload - def __init__( - self, field: ConvertibleToInt | None, formula: str, pivotArea: PivotArea, extLst: Incomplete | None = None - ) -> None: ... + def __init__(self, field: ConvertibleToInt | None, formula: str, pivotArea: PivotArea, extLst=None) -> None: ... class ServerFormat(Serialisable): tagname: ClassVar[str] @@ -107,7 +103,7 @@ class OLAPSet(Serialisable): count: ConvertibleToInt, maxRank: ConvertibleToInt, setDefinition: str, - sortType: Incomplete | None = None, + sortType=None, queryFailed: _ConvertibleToBool = None, tpls: TupleList | None = None, sortByTuple: TupleList | None = None, @@ -315,7 +311,7 @@ class GroupItems(Serialisable): d: Incomplete __elements__: ClassVar[tuple[str, ...]] __attrs__: ClassVar[tuple[str, ...]] - def __init__(self, count: Incomplete | None = None, m=(), n=(), b=(), e=(), s=(), d=()) -> None: ... + def __init__(self, count=None, m=(), n=(), b=(), e=(), s=(), d=()) -> None: ... @property def count(self) -> int: ... @@ -607,7 +603,7 @@ class CacheDefinition(Serialisable): measureGroups=(), maps=(), extLst: ExtensionList | None = None, - id: Incomplete | None = None, + id=None, ) -> None: ... @overload def __init__( @@ -640,7 +636,7 @@ class CacheDefinition(Serialisable): measureGroups=(), maps=(), extLst: ExtensionList | None = None, - id: Incomplete | None = None, + id=None, ) -> None: ... def to_tree(self) -> Element: ... # type: ignore[override] @property diff --git a/stubs/openpyxl/openpyxl/pivot/fields.pyi b/stubs/openpyxl/openpyxl/pivot/fields.pyi index a35b43114fb5..15b8be17efbd 100644 --- a/stubs/openpyxl/openpyxl/pivot/fields.pyi +++ b/stubs/openpyxl/openpyxl/pivot/fields.pyi @@ -50,8 +50,8 @@ class Missing(Serialisable): c: str | None = None, cp: ConvertibleToInt | None = None, _in: ConvertibleToInt | None = None, - bc: Incomplete | None = None, - fc: Incomplete | None = None, + bc=None, + fc=None, i: _ConvertibleToBool | None = None, un: _ConvertibleToBool | None = None, st: _ConvertibleToBool | None = None, @@ -87,8 +87,8 @@ class Number(Serialisable): c: str | None = None, cp: ConvertibleToInt | None = None, _in: ConvertibleToInt | None = None, - bc: Incomplete | None = None, - fc: Incomplete | None = None, + bc=None, + fc=None, i: _ConvertibleToBool | None = None, un: _ConvertibleToBool | None = None, st: _ConvertibleToBool | None = None, @@ -105,8 +105,8 @@ class Number(Serialisable): c: str | None = None, cp: ConvertibleToInt | None = None, _in: ConvertibleToInt | None = None, - bc: Incomplete | None = None, - fc: Incomplete | None = None, + bc=None, + fc=None, i: _ConvertibleToBool | None = None, un: _ConvertibleToBool | None = None, st: _ConvertibleToBool | None = None, @@ -142,8 +142,8 @@ class Error(Serialisable): c: str | None = None, cp: ConvertibleToInt | None = None, _in: ConvertibleToInt | None = None, - bc: Incomplete | None = None, - fc: Incomplete | None = None, + bc=None, + fc=None, i: _ConvertibleToBool | None = None, un: _ConvertibleToBool | None = None, st: _ConvertibleToBool | None = None, @@ -160,8 +160,8 @@ class Error(Serialisable): c: str | None = None, cp: ConvertibleToInt | None = None, _in: ConvertibleToInt | None = None, - bc: Incomplete | None = None, - fc: Incomplete | None = None, + bc=None, + fc=None, i: _ConvertibleToBool | None = None, un: _ConvertibleToBool | None = None, st: _ConvertibleToBool | None = None, @@ -208,14 +208,14 @@ class Text(Serialisable): self, tpls=(), x=(), - v: Incomplete | None = None, + v=None, u: _ConvertibleToBool | None = None, f: _ConvertibleToBool | None = None, - c: Incomplete | None = None, + c=None, cp: ConvertibleToInt | None = None, _in: ConvertibleToInt | None = None, - bc: Incomplete | None = None, - fc: Incomplete | None = None, + bc=None, + fc=None, i: _ConvertibleToBool | None = None, un: _ConvertibleToBool | None = None, st: _ConvertibleToBool | None = None, diff --git a/stubs/openpyxl/openpyxl/pivot/record.pyi b/stubs/openpyxl/openpyxl/pivot/record.pyi index 3c1429e962ca..219fc70261c6 100644 --- a/stubs/openpyxl/openpyxl/pivot/record.pyi +++ b/stubs/openpyxl/openpyxl/pivot/record.pyi @@ -15,17 +15,7 @@ class Record(Serialisable): s: Incomplete d: Incomplete x: Incomplete - def __init__( - self, - _fields=(), - m: Incomplete | None = None, - n: Incomplete | None = None, - b: Incomplete | None = None, - e: Incomplete | None = None, - s: Incomplete | None = None, - d: Incomplete | None = None, - x: Incomplete | None = None, - ) -> None: ... + def __init__(self, _fields=(), m=None, n=None, b=None, e=None, s=None, d=None, x=None) -> None: ... class RecordList(Serialisable): mime_type: str diff --git a/stubs/openpyxl/openpyxl/pivot/table.pyi b/stubs/openpyxl/openpyxl/pivot/table.pyi index 0a3009f6cd2e..17bf54d48e43 100644 --- a/stubs/openpyxl/openpyxl/pivot/table.pyi +++ b/stubs/openpyxl/openpyxl/pivot/table.pyi @@ -116,7 +116,7 @@ class ColHierarchiesUsage(Serialisable): colHierarchyUsage: Incomplete __elements__: ClassVar[tuple[str, ...]] __attrs__: ClassVar[tuple[str, ...]] - def __init__(self, count: Incomplete | None = None, colHierarchyUsage=()) -> None: ... + def __init__(self, count=None, colHierarchyUsage=()) -> None: ... @property def count(self) -> int: ... @@ -125,7 +125,7 @@ class RowHierarchiesUsage(Serialisable): rowHierarchyUsage: Incomplete __elements__: ClassVar[tuple[str, ...]] __attrs__: ClassVar[tuple[str, ...]] - def __init__(self, count: Incomplete | None = None, rowHierarchyUsage=()) -> None: ... + def __init__(self, count=None, rowHierarchyUsage=()) -> None: ... @property def count(self) -> int: ... @@ -210,7 +210,7 @@ class MemberList(Serialisable): level: Integer[Literal[True]] member: Incomplete __elements__: ClassVar[tuple[str, ...]] - def __init__(self, count: Incomplete | None = None, level: ConvertibleToInt | None = None, member=()) -> None: ... + def __init__(self, count=None, level: ConvertibleToInt | None = None, member=()) -> None: ... @property def count(self) -> int: ... @@ -859,7 +859,7 @@ class TableDefinition(Serialisable): rowHierarchiesUsage: RowHierarchiesUsage | None = None, colHierarchiesUsage: ColHierarchiesUsage | None = None, extLst: ExtensionList | None = None, - id: Incomplete | None = None, + id=None, ) -> None: ... @overload def __init__( @@ -949,7 +949,7 @@ class TableDefinition(Serialisable): rowHierarchiesUsage: RowHierarchiesUsage | None = None, colHierarchiesUsage: ColHierarchiesUsage | None = None, extLst: ExtensionList | None = None, - id: Incomplete | None = None, + id=None, ) -> None: ... def to_tree(self) -> Element: ... # type: ignore[override] @property diff --git a/stubs/openpyxl/openpyxl/styles/alignment.pyi b/stubs/openpyxl/openpyxl/styles/alignment.pyi index 6bad3b07174a..f6ec12d50bf3 100644 --- a/stubs/openpyxl/openpyxl/styles/alignment.pyi +++ b/stubs/openpyxl/openpyxl/styles/alignment.pyi @@ -1,4 +1,4 @@ -from _typeshed import ConvertibleToFloat, Incomplete +from _typeshed import ConvertibleToFloat from collections.abc import Iterator from typing import ClassVar, Final, Literal from typing_extensions import TypeAlias @@ -30,8 +30,8 @@ class Alignment(Serialisable): readingOrder: Min[float, Literal[False]] def __init__( self, - horizontal: Incomplete | None = None, - vertical: Incomplete | None = None, + horizontal=None, + vertical=None, textRotation: int = 0, wrapText: _ConvertibleToBool | None = None, shrinkToFit: _ConvertibleToBool | None = None, @@ -39,9 +39,9 @@ class Alignment(Serialisable): relativeIndent: ConvertibleToFloat = 0, justifyLastLine: _ConvertibleToBool | None = None, readingOrder: ConvertibleToFloat = 0, - text_rotation: Incomplete | None = None, - wrap_text: Incomplete | None = None, - shrink_to_fit: Incomplete | None = None, - mergeCell: Incomplete | None = None, + text_rotation=None, + wrap_text=None, + shrink_to_fit=None, + mergeCell=None, ) -> None: ... def __iter__(self) -> Iterator[tuple[str, str]]: ... diff --git a/stubs/openpyxl/openpyxl/styles/borders.pyi b/stubs/openpyxl/openpyxl/styles/borders.pyi index c54e2e55d8e3..4c75f3e5ca25 100644 --- a/stubs/openpyxl/openpyxl/styles/borders.pyi +++ b/stubs/openpyxl/openpyxl/styles/borders.pyi @@ -43,10 +43,7 @@ class Side(Serialisable): style: NoneSet[_SideStyle] border_style: Alias def __init__( - self, - style: _SideStyle | Literal["none"] | None = None, - color: str | Color | None = None, - border_style: Incomplete | None = None, + self, style: _SideStyle | Literal["none"] | None = None, color: str | Color | None = None, border_style=None ) -> None: ... class Border(Serialisable): @@ -72,7 +69,7 @@ class Border(Serialisable): top: Side | None = None, bottom: Side | None = None, diagonal: Side | None = None, - diagonal_direction: Incomplete | None = None, + diagonal_direction=None, vertical: Side | None = None, horizontal: Side | None = None, diagonalUp: _ConvertibleToBool = False, diff --git a/stubs/openpyxl/openpyxl/styles/named_styles.pyi b/stubs/openpyxl/openpyxl/styles/named_styles.pyi index 157ad8864db4..13af11fa42d8 100644 --- a/stubs/openpyxl/openpyxl/styles/named_styles.pyi +++ b/stubs/openpyxl/openpyxl/styles/named_styles.pyi @@ -31,7 +31,7 @@ class NamedStyle(Serialisable): fill: Fill | None = None, border: Border | None = None, alignment: Alignment | None = None, - number_format: Incomplete | None = None, + number_format=None, protection: Protection | None = None, builtinId: ConvertibleToInt | None = None, hidden: _ConvertibleToBool | None = False, diff --git a/stubs/openpyxl/openpyxl/workbook/defined_name.pyi b/stubs/openpyxl/openpyxl/workbook/defined_name.pyi index 4c456d18902a..aeda5c7e8fc1 100644 --- a/stubs/openpyxl/openpyxl/workbook/defined_name.pyi +++ b/stubs/openpyxl/openpyxl/workbook/defined_name.pyi @@ -48,7 +48,7 @@ class DefinedName(Serialisable): shortcutKey: str | None = None, publishToServer: _ConvertibleToBool | None = None, workbookParameter: _ConvertibleToBool | None = None, - attr_text: Incomplete | None = None, + attr_text=None, ) -> None: ... @property def type(self) -> _TokenTypesNotOperand | _TokenOperandSubtypes: ... diff --git a/stubs/openpyxl/openpyxl/workbook/external_link/external.pyi b/stubs/openpyxl/openpyxl/workbook/external_link/external.pyi index cc7e5ceaf3d3..7e0e72060e2d 100644 --- a/stubs/openpyxl/openpyxl/workbook/external_link/external.pyi +++ b/stubs/openpyxl/openpyxl/workbook/external_link/external.pyi @@ -24,7 +24,7 @@ class ExternalRow(Serialisable): r: Integer[Literal[False]] cell: Incomplete __elements__: ClassVar[tuple[str, ...]] - def __init__(self, r: ConvertibleToInt, cell: Incomplete | None = None) -> None: ... + def __init__(self, r: ConvertibleToInt, cell=None) -> None: ... class ExternalSheetData(Serialisable): sheetId: Integer[Literal[False]] @@ -36,7 +36,7 @@ class ExternalSheetData(Serialisable): class ExternalSheetDataSet(Serialisable): sheetData: Incomplete __elements__: ClassVar[tuple[str, ...]] - def __init__(self, sheetData: Incomplete | None = None) -> None: ... + def __init__(self, sheetData=None) -> None: ... class ExternalSheetNames(Serialisable): sheetName: Incomplete @@ -62,7 +62,7 @@ class ExternalBook(Serialisable): sheetNames: ExternalSheetNames | None = None, definedNames=(), sheetDataSet: ExternalSheetDataSet | None = None, - id: Incomplete | None = None, + id=None, ) -> None: ... class ExternalLink(Serialisable): diff --git a/stubs/openpyxl/openpyxl/workbook/properties.pyi b/stubs/openpyxl/openpyxl/workbook/properties.pyi index ccc400f8f7ec..f95fd2be4505 100644 --- a/stubs/openpyxl/openpyxl/workbook/properties.pyi +++ b/stubs/openpyxl/openpyxl/workbook/properties.pyi @@ -99,5 +99,5 @@ class FileVersion(Serialisable): lastEdited: str | None = None, lowestEdited: str | None = None, rupBuild: str | None = None, - codeName: Incomplete | None = None, + codeName=None, ) -> None: ... diff --git a/stubs/openpyxl/openpyxl/workbook/protection.pyi b/stubs/openpyxl/openpyxl/workbook/protection.pyi index 81bf5b4ab92d..f89cdbe8e645 100644 --- a/stubs/openpyxl/openpyxl/workbook/protection.pyi +++ b/stubs/openpyxl/openpyxl/workbook/protection.pyi @@ -30,20 +30,20 @@ class WorkbookProtection(Serialisable): __attrs__: ClassVar[tuple[str, ...]] def __init__( self, - workbookPassword: Incomplete | None = None, + workbookPassword=None, workbookPasswordCharacterSet: str | None = None, - revisionsPassword: Incomplete | None = None, + revisionsPassword=None, revisionsPasswordCharacterSet: str | None = None, lockStructure: _ConvertibleToBool | None = None, lockWindows: _ConvertibleToBool | None = None, lockRevision: _ConvertibleToBool | None = None, revisionsAlgorithmName: str | None = None, - revisionsHashValue: Incomplete | None = None, - revisionsSaltValue: Incomplete | None = None, + revisionsHashValue=None, + revisionsSaltValue=None, revisionsSpinCount: ConvertibleToInt | None = None, workbookAlgorithmName: str | None = None, - workbookHashValue: Incomplete | None = None, - workbookSaltValue: Incomplete | None = None, + workbookHashValue=None, + workbookSaltValue=None, workbookSpinCount: ConvertibleToInt | None = None, ) -> None: ... @overload @@ -84,9 +84,9 @@ class FileSharing(Serialisable): self, readOnlyRecommended: _ConvertibleToBool | None = None, userName: str | None = None, - reservationPassword: Incomplete | None = None, + reservationPassword=None, algorithmName: str | None = None, - hashValue: Incomplete | None = None, - saltValue: Incomplete | None = None, + hashValue=None, + saltValue=None, spinCount: ConvertibleToInt | None = None, ) -> None: ... diff --git a/stubs/openpyxl/openpyxl/workbook/views.pyi b/stubs/openpyxl/openpyxl/workbook/views.pyi index 2850d363b0e0..9c0abeb8872f 100644 --- a/stubs/openpyxl/openpyxl/workbook/views.pyi +++ b/stubs/openpyxl/openpyxl/workbook/views.pyi @@ -77,7 +77,7 @@ class CustomWorkbookView(Serialisable): def __init__( self, name: str, - guid: Incomplete | None = None, + guid=None, autoUpdate: _ConvertibleToBool | None = None, mergeInterval: ConvertibleToInt | None = None, changesSavedWin: _ConvertibleToBool | None = None, diff --git a/stubs/openpyxl/openpyxl/worksheet/controls.pyi b/stubs/openpyxl/openpyxl/worksheet/controls.pyi index 47480d501b85..247969cdbb43 100644 --- a/stubs/openpyxl/openpyxl/worksheet/controls.pyi +++ b/stubs/openpyxl/openpyxl/worksheet/controls.pyi @@ -41,7 +41,7 @@ class ControlProperty(Serialisable): linkedCell: str | None = None, listFillRange: str | None = None, cf: str | None = "pict", - id: Incomplete | None = None, + id=None, ) -> None: ... class Control(Serialisable): diff --git a/stubs/openpyxl/openpyxl/worksheet/datavalidation.pyi b/stubs/openpyxl/openpyxl/worksheet/datavalidation.pyi index 85f40baea56b..d26a47dc59f8 100644 --- a/stubs/openpyxl/openpyxl/worksheet/datavalidation.pyi +++ b/stubs/openpyxl/openpyxl/worksheet/datavalidation.pyi @@ -99,7 +99,7 @@ class DataValidationList(Serialisable): disablePrompts: _ConvertibleToBool | None = None, xWindow: ConvertibleToInt | None = None, yWindow: ConvertibleToInt | None = None, - count: Incomplete | None = None, + count=None, dataValidation=(), ) -> None: ... @property diff --git a/stubs/openpyxl/openpyxl/worksheet/dimensions.pyi b/stubs/openpyxl/openpyxl/worksheet/dimensions.pyi index 03a849348df8..167dfb1c7b1f 100644 --- a/stubs/openpyxl/openpyxl/worksheet/dimensions.pyi +++ b/stubs/openpyxl/openpyxl/worksheet/dimensions.pyi @@ -1,4 +1,4 @@ -from _typeshed import ConvertibleToFloat, ConvertibleToInt, Incomplete, Unused +from _typeshed import ConvertibleToFloat, ConvertibleToInt, Unused from collections.abc import Callable, Iterator from typing import ClassVar, Literal, TypeVar from typing_extensions import Self @@ -36,7 +36,7 @@ class Dimension(Strict, StyleableObject): collapsed: _ConvertibleToBool, worksheet: Worksheet, visible: Unused = True, - style: Incomplete | None = None, + style=None, ) -> None: ... def __iter__(self) -> Iterator[tuple[str, str]]: ... def __copy__(self) -> Self: ... @@ -54,15 +54,15 @@ class RowDimension(Dimension): index: int = 0, ht: ConvertibleToFloat | None = None, customHeight: Unused = None, - s: Incomplete | None = None, + s=None, customFormat: Unused = None, hidden: _ConvertibleToBool = None, outlineLevel: ConvertibleToInt | None = 0, outline_level: ConvertibleToInt | None = None, collapsed: _ConvertibleToBool = None, - visible: Incomplete | None = None, - height: Incomplete | None = None, - r: Incomplete | None = None, + visible=None, + height=None, + r=None, spans: Unused = None, thickBot: _ConvertibleToBool = None, thickTop: _ConvertibleToBool = None, @@ -92,7 +92,7 @@ class ColumnDimension(Dimension): outlineLevel: ConvertibleToInt | None = 0, outline_level: ConvertibleToInt | None = None, collapsed: _ConvertibleToBool = False, - style: Incomplete | None = None, + style=None, min: ConvertibleToInt | None = None, max: ConvertibleToInt | None = None, customWidth: Unused = False, diff --git a/stubs/openpyxl/openpyxl/worksheet/drawing.pyi b/stubs/openpyxl/openpyxl/worksheet/drawing.pyi index 39bd9a91f651..589c271fedea 100644 --- a/stubs/openpyxl/openpyxl/worksheet/drawing.pyi +++ b/stubs/openpyxl/openpyxl/worksheet/drawing.pyi @@ -6,4 +6,4 @@ from openpyxl.descriptors.serialisable import Serialisable class Drawing(Serialisable): tagname: ClassVar[str] id: Incomplete - def __init__(self, id: Incomplete | None = None) -> None: ... + def __init__(self, id=None) -> None: ... diff --git a/stubs/openpyxl/openpyxl/worksheet/errors.pyi b/stubs/openpyxl/openpyxl/worksheet/errors.pyi index 41bd07af54b5..67883b448949 100644 --- a/stubs/openpyxl/openpyxl/worksheet/errors.pyi +++ b/stubs/openpyxl/openpyxl/worksheet/errors.pyi @@ -29,7 +29,7 @@ class IgnoredError(Serialisable): calculatedColumn: Bool[Literal[True]] def __init__( self, - sqref: Incomplete | None = None, + sqref=None, evalError: _ConvertibleToBool | None = False, twoDigitTextYear: _ConvertibleToBool | None = False, numberStoredAsText: _ConvertibleToBool | None = False, diff --git a/stubs/openpyxl/openpyxl/worksheet/filters.pyi b/stubs/openpyxl/openpyxl/worksheet/filters.pyi index 4a641e818a48..530321e49d0e 100644 --- a/stubs/openpyxl/openpyxl/worksheet/filters.pyi +++ b/stubs/openpyxl/openpyxl/worksheet/filters.pyi @@ -109,7 +109,7 @@ class SortCondition(Serialisable): iconId: Integer[Literal[True]] def __init__( self, - ref: Incomplete | None = None, + ref=None, descending: _ConvertibleToBool | None = None, sortBy: _SortConditionSortBy | Literal["none"] | None = None, customList: str | None = None, @@ -132,7 +132,7 @@ class SortState(Serialisable): columnSort: _ConvertibleToBool | None = None, caseSensitive: _ConvertibleToBool | None = None, sortMethod: _SortStateSortMethod | Literal["none"] | None = None, - ref: Incomplete | None = None, + ref=None, sortCondition=(), extLst: Unused = None, ) -> None: ... @@ -299,8 +299,8 @@ class FilterColumn(Serialisable): colorFilter: ColorFilter | None = None, iconFilter: IconFilter | None = None, extLst: Unused = None, - blank: Incomplete | None = None, - vals: Incomplete | None = None, + blank=None, + vals=None, ) -> None: ... class AutoFilter(Serialisable): @@ -310,9 +310,7 @@ class AutoFilter(Serialisable): sortState: Typed[SortState, Literal[True]] extLst: Typed[ExtensionList, Literal[True]] __elements__: ClassVar[tuple[str, ...]] - def __init__( - self, ref: Incomplete | None = None, filterColumn=(), sortState: SortState | None = None, extLst: Unused = None - ) -> None: ... + def __init__(self, ref=None, filterColumn=(), sortState: SortState | None = None, extLst: Unused = None) -> None: ... def __bool__(self) -> bool: ... def add_filter_column(self, col_id, vals, blank: bool = False) -> None: ... def add_sort_condition(self, ref, descending: bool = False) -> None: ... diff --git a/stubs/openpyxl/openpyxl/worksheet/formula.pyi b/stubs/openpyxl/openpyxl/worksheet/formula.pyi index 78b762cf5957..73e3601d9f6d 100644 --- a/stubs/openpyxl/openpyxl/worksheet/formula.pyi +++ b/stubs/openpyxl/openpyxl/worksheet/formula.pyi @@ -20,8 +20,8 @@ class DataTableFormula: ca: bool = False, dt2D: bool = False, dtr: bool = False, - r1: Incomplete | None = None, - r2: Incomplete | None = None, + r1=None, + r2=None, del1: bool = False, del2: bool = False, **kw, @@ -33,5 +33,5 @@ class ArrayFormula: ref: Incomplete text: Incomplete | None - def __init__(self, ref, text: Incomplete | None = None) -> None: ... + def __init__(self, ref, text=None) -> None: ... def __iter__(self) -> Iterator[tuple[str, str]]: ... diff --git a/stubs/openpyxl/openpyxl/worksheet/hyperlink.pyi b/stubs/openpyxl/openpyxl/worksheet/hyperlink.pyi index 1e8c5e166fa3..f569caaae701 100644 --- a/stubs/openpyxl/openpyxl/worksheet/hyperlink.pyi +++ b/stubs/openpyxl/openpyxl/worksheet/hyperlink.pyi @@ -20,7 +20,7 @@ class Hyperlink(Serialisable): location: str | None = None, tooltip: str | None = None, display: str | None = None, - id: Incomplete | None = None, + id=None, target: str | None = None, ) -> None: ... diff --git a/stubs/openpyxl/openpyxl/worksheet/merge.pyi b/stubs/openpyxl/openpyxl/worksheet/merge.pyi index 7faa6cfaea71..0e97147febae 100644 --- a/stubs/openpyxl/openpyxl/worksheet/merge.pyi +++ b/stubs/openpyxl/openpyxl/worksheet/merge.pyi @@ -14,7 +14,7 @@ class MergeCell(CellRange): @property def ref(self) -> str: ... __attrs__: ClassVar[tuple[str, ...]] - def __init__(self, ref: Incomplete | None = None) -> None: ... + def __init__(self, ref=None) -> None: ... def __copy__(self): ... class MergeCells(Serialisable): diff --git a/stubs/openpyxl/openpyxl/worksheet/page.pyi b/stubs/openpyxl/openpyxl/worksheet/page.pyi index 28368f924d41..d4c75ea6b259 100644 --- a/stubs/openpyxl/openpyxl/worksheet/page.pyi +++ b/stubs/openpyxl/openpyxl/worksheet/page.pyi @@ -34,7 +34,7 @@ class PrintPageSetup(Serialisable): id: Incomplete def __init__( self, - worksheet: Incomplete | None = None, + worksheet=None, orientation: _PrintPageSetupOrientation | Literal["none"] | None = None, paperSize: ConvertibleToInt | None = None, scale: ConvertibleToInt | None = None, @@ -42,8 +42,8 @@ class PrintPageSetup(Serialisable): fitToWidth: ConvertibleToInt | None = None, firstPageNumber: ConvertibleToInt | None = None, useFirstPageNumber: _ConvertibleToBool | None = None, - paperHeight: Incomplete | None = None, - paperWidth: Incomplete | None = None, + paperHeight=None, + paperWidth=None, pageOrder: _PrintPageSetupPageOrder | Literal["none"] | None = None, usePrinterDefaults: _ConvertibleToBool | None = None, blackAndWhite: _ConvertibleToBool | None = None, @@ -53,7 +53,7 @@ class PrintPageSetup(Serialisable): horizontalDpi: ConvertibleToInt | None = None, verticalDpi: ConvertibleToInt | None = None, copies: ConvertibleToInt | None = None, - id: Incomplete | None = None, + id=None, ) -> None: ... def __bool__(self) -> bool: ... @property diff --git a/stubs/openpyxl/openpyxl/worksheet/pagebreak.pyi b/stubs/openpyxl/openpyxl/worksheet/pagebreak.pyi index 47ff5d24e8da..b0fe838d39db 100644 --- a/stubs/openpyxl/openpyxl/worksheet/pagebreak.pyi +++ b/stubs/openpyxl/openpyxl/worksheet/pagebreak.pyi @@ -35,7 +35,7 @@ class RowBreak(Serialisable): def count(self) -> int: ... @property def manualBreakCount(self) -> int: ... - def append(self, brk: Incomplete | None = None) -> None: ... + def append(self, brk=None) -> None: ... PageBreak = RowBreak diff --git a/stubs/openpyxl/openpyxl/worksheet/protection.pyi b/stubs/openpyxl/openpyxl/worksheet/protection.pyi index 285576f3219f..141af543cb29 100644 --- a/stubs/openpyxl/openpyxl/worksheet/protection.pyi +++ b/stubs/openpyxl/openpyxl/worksheet/protection.pyi @@ -59,11 +59,11 @@ class SheetProtection(Serialisable, _Protected): sort: _ConvertibleToBool = True, autoFilter: _ConvertibleToBool = True, pivotTables: _ConvertibleToBool = True, - password: Incomplete | None = None, + password=None, algorithmName: str | None = None, - saltValue: Incomplete | None = None, + saltValue=None, spinCount: ConvertibleToInt | None = None, - hashValue: Incomplete | None = None, + hashValue=None, ) -> None: ... @overload def set_password(self, value: str = "", already_hashed: Literal[False] = False) -> None: ... diff --git a/stubs/openpyxl/openpyxl/worksheet/related.pyi b/stubs/openpyxl/openpyxl/worksheet/related.pyi index eb1ee7ee6584..3e31438cea64 100644 --- a/stubs/openpyxl/openpyxl/worksheet/related.pyi +++ b/stubs/openpyxl/openpyxl/worksheet/related.pyi @@ -5,5 +5,5 @@ from openpyxl.xml.functions import Element class Related(Serialisable): id: Incomplete - def __init__(self, id: Incomplete | None = None) -> None: ... + def __init__(self, id=None) -> None: ... def to_tree(self, tagname: str | None, idx: Unused = None) -> Element: ... # type: ignore[override] diff --git a/stubs/openpyxl/openpyxl/worksheet/table.pyi b/stubs/openpyxl/openpyxl/worksheet/table.pyi index a4808c85f04e..8650bbadec1d 100644 --- a/stubs/openpyxl/openpyxl/worksheet/table.pyi +++ b/stubs/openpyxl/openpyxl/worksheet/table.pyi @@ -67,7 +67,7 @@ class TableFormula(Serialisable): array: Bool[Literal[True]] attr_text: Incomplete text: Alias - def __init__(self, array: _ConvertibleToBool | None = None, attr_text: Incomplete | None = None) -> None: ... + def __init__(self, array: _ConvertibleToBool | None = None, attr_text=None) -> None: ... class TableColumn(Serialisable): tagname: ClassVar[str] @@ -170,8 +170,8 @@ class Table(Serialisable): def __init__( self, id: ConvertibleToInt = 1, - displayName: Incomplete | None = None, - ref: Incomplete | None = None, + displayName=None, + ref=None, name: str | None = None, comment: str | None = None, tableType: _TableTableType | Literal["none"] | None = None, @@ -218,5 +218,5 @@ class TablePartList(Serialisable): class TableList(dict[Incomplete, Incomplete]): def add(self, table) -> None: ... - def get(self, name: Incomplete | None = None, table_range: Incomplete | None = None): ... + def get(self, name=None, table_range=None): ... def items(self): ... diff --git a/stubs/openpyxl/openpyxl/worksheet/views.pyi b/stubs/openpyxl/openpyxl/worksheet/views.pyi index 6ea3c2acf1e3..72f05ffe0844 100644 --- a/stubs/openpyxl/openpyxl/worksheet/views.pyi +++ b/stubs/openpyxl/openpyxl/worksheet/views.pyi @@ -85,7 +85,7 @@ class SheetView(Serialisable): zoomScalePageLayoutView: ConvertibleToInt | None = None, zoomToFit: _ConvertibleToBool | None = None, workbookViewId: ConvertibleToInt | None = 0, - selection: Incomplete | None = None, + selection=None, pane: Pane | None = None, ) -> None: ... diff --git a/stubs/opentracing/opentracing/span.pyi b/stubs/opentracing/opentracing/span.pyi index 0bb1d18dd033..dd9bff361ef7 100644 --- a/stubs/opentracing/opentracing/span.pyi +++ b/stubs/opentracing/opentracing/span.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from types import TracebackType from typing import Any from typing_extensions import Self @@ -26,5 +25,5 @@ class Span: def __exit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None ) -> None: ... - def log_event(self, event: Any, payload: Incomplete | None = None) -> Self: ... + def log_event(self, event: Any, payload=None) -> Self: ... def log(self, **kwargs: Any) -> Self: ... diff --git a/stubs/paramiko/paramiko/_winapi.pyi b/stubs/paramiko/paramiko/_winapi.pyi index 1577807f04c1..e2baa2db37e3 100644 --- a/stubs/paramiko/paramiko/_winapi.pyi +++ b/stubs/paramiko/paramiko/_winapi.pyi @@ -34,7 +34,7 @@ if sys.platform == "win32": pos: int filemap: Incomplete view: Incomplete - def __init__(self, name: str, length: int, security_attributes: Incomplete | None = None) -> None: ... + def __init__(self, name: str, length: int, security_attributes=None) -> None: ... def __enter__(self) -> Self: ... def seek(self, pos: int) -> None: ... def write(self, msg: bytes) -> None: ... diff --git a/stubs/passlib/passlib/context.pyi b/stubs/passlib/passlib/context.pyi index 0d6521b91bd1..c6a779aafb13 100644 --- a/stubs/passlib/passlib/context.pyi +++ b/stubs/passlib/passlib/context.pyi @@ -1,4 +1,4 @@ -from _typeshed import Incomplete, StrOrBytesPath, SupportsItems +from _typeshed import StrOrBytesPath, SupportsItems from typing import Any from typing_extensions import Self @@ -16,14 +16,14 @@ class CryptPolicy: def has_schemes(self): ... def iter_handlers(self): ... def schemes(self, resolve: bool = False): ... - def get_handler(self, name: Incomplete | None = None, category: Incomplete | None = None, required: bool = False): ... - def get_min_verify_time(self, category: Incomplete | None = None): ... - def get_options(self, name, category: Incomplete | None = None): ... - def handler_is_deprecated(self, name, category: Incomplete | None = None): ... + def get_handler(self, name=None, category=None, required: bool = False): ... + def get_min_verify_time(self, category=None): ... + def get_options(self, name, category=None): ... + def handler_is_deprecated(self, name, category=None): ... def iter_config(self, ini: bool = False, resolve: bool = False): ... def to_dict(self, resolve: bool = False): ... def to_file(self, stream, section: str = "passlib") -> None: ... - def to_string(self, section: str = "passlib", encoding: Incomplete | None = None): ... + def to_string(self, section: str = "passlib", encoding=None): ... class CryptContext: @classmethod @@ -33,7 +33,7 @@ class CryptContext: def copy(self, **kwds: Any) -> CryptContext: ... def using(self, **kwds: Any) -> CryptContext: ... def replace(self, **kwds): ... - def __init__(self, schemes: Incomplete | None = None, policy=..., _autoload: bool = True, **kwds) -> None: ... + def __init__(self, schemes=None, policy=..., _autoload: bool = True, **kwds) -> None: ... policy: CryptPolicy def load_path( self, path: StrOrBytesPath, update: bool = False, section: str = "passlib", encoding: str = "utf-8" @@ -46,9 +46,9 @@ class CryptContext: encoding: str = "utf-8", ) -> None: ... def update(self, *args: Any, **kwds: Any) -> None: ... - def schemes(self, resolve: bool = False, category: Incomplete | None = None, unconfigured: bool = False): ... - def default_scheme(self, category: Incomplete | None = None, resolve: bool = False, unconfigured: bool = False): ... - def handler(self, scheme: Incomplete | None = None, category: Incomplete | None = None, unconfigured: bool = False): ... + def schemes(self, resolve: bool = False, category=None, unconfigured: bool = False): ... + def default_scheme(self, category=None, resolve: bool = False, unconfigured: bool = False): ... + def handler(self, scheme=None, category=None, unconfigured: bool = False): ... @property def context_kwds(self): ... def to_dict(self, resolve: bool = False) -> dict[str, Any]: ... @@ -63,12 +63,10 @@ class CryptContext: def needs_update( self, hash: str | bytes, scheme: str | None = None, category: str | None = None, secret: str | bytes | None = None ) -> bool: ... - def hash_needs_update(self, hash, scheme: Incomplete | None = None, category: Incomplete | None = None): ... - def genconfig(self, scheme: Incomplete | None = None, category: Incomplete | None = None, **settings): ... - def genhash(self, secret, config, scheme: Incomplete | None = None, category: Incomplete | None = None, **kwds): ... - def identify( - self, hash, category: Incomplete | None = None, resolve: bool = False, required: bool = False, unconfigured: bool = False - ): ... + def hash_needs_update(self, hash, scheme=None, category=None): ... + def genconfig(self, scheme=None, category=None, **settings): ... + def genhash(self, secret, config, scheme=None, category=None, **kwds): ... + def identify(self, hash, category=None, resolve: bool = False, required: bool = False, unconfigured: bool = False): ... def hash(self, secret: str | bytes, scheme: str | None = None, category: str | None = None, **kwds: Any) -> str: ... def encrypt(self, *args, **kwds): ... def verify( @@ -83,7 +81,7 @@ class CryptContext: def enable(self, hash: str | bytes) -> str: ... class LazyCryptContext(CryptContext): - def __init__(self, schemes: Incomplete | None = None, **kwds) -> None: ... + def __init__(self, schemes=None, **kwds) -> None: ... def __getattribute__(self, attr: str) -> Any: ... __all__ = ["CryptContext", "LazyCryptContext", "CryptPolicy"] diff --git a/stubs/passlib/passlib/crypto/_md4.pyi b/stubs/passlib/passlib/crypto/_md4.pyi index cbd87fb432bd..448d2abb05b5 100644 --- a/stubs/passlib/passlib/crypto/_md4.pyi +++ b/stubs/passlib/passlib/crypto/_md4.pyi @@ -1,11 +1,9 @@ -from _typeshed import Incomplete - class md4: name: str digest_size: int digestsize: int block_size: int - def __init__(self, content: Incomplete | None = None) -> None: ... + def __init__(self, content=None) -> None: ... def update(self, content) -> None: ... def copy(self): ... def digest(self): ... diff --git a/stubs/passlib/passlib/crypto/digest.pyi b/stubs/passlib/passlib/crypto/digest.pyi index 7aa493613f31..f4ffced63eaf 100644 --- a/stubs/passlib/passlib/crypto/digest.pyi +++ b/stubs/passlib/passlib/crypto/digest.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from typing import Any from passlib.utils import SequenceMixin @@ -24,8 +23,8 @@ class HashInfo(SequenceMixin): def supported_by_hashlib_pbkdf2(self): ... def compile_hmac(digest, key, multipart: bool = False): ... -def pbkdf1(digest, secret, salt, rounds, keylen: Incomplete | None = None): ... -def pbkdf2_hmac(digest, secret, salt, rounds, keylen: Incomplete | None = None): ... +def pbkdf1(digest, secret, salt, rounds, keylen=None): ... +def pbkdf2_hmac(digest, secret, salt, rounds, keylen=None): ... __all__ = [ # hash utils diff --git a/stubs/passlib/passlib/exc.pyi b/stubs/passlib/passlib/exc.pyi index 945a3ce0430f..f68f28f414c6 100644 --- a/stubs/passlib/passlib/exc.pyi +++ b/stubs/passlib/passlib/exc.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from typing import Any class UnknownBackendError(ValueError): @@ -12,15 +11,15 @@ class PasswordValueError(ValueError): ... class PasswordSizeError(PasswordValueError): max_size: Any - def __init__(self, max_size, msg: Incomplete | None = None) -> None: ... + def __init__(self, max_size, msg=None) -> None: ... class PasswordTruncateError(PasswordSizeError): - def __init__(self, cls, msg: Incomplete | None = None) -> None: ... + def __init__(self, cls, msg=None) -> None: ... class PasslibSecurityError(RuntimeError): ... class TokenError(ValueError): - def __init__(self, msg: Incomplete | None = None, *args, **kwds) -> None: ... + def __init__(self, msg=None, *args, **kwds) -> None: ... class MalformedTokenError(TokenError): ... class InvalidTokenError(TokenError): ... @@ -32,7 +31,7 @@ class UsedTokenError(TokenError): class UnknownHashError(ValueError): value: Any message: Any - def __init__(self, message: Incomplete | None = None, value: Incomplete | None = None) -> None: ... + def __init__(self, message=None, value=None) -> None: ... class PasslibWarning(UserWarning): ... class PasslibConfigWarning(PasslibWarning): ... @@ -43,11 +42,11 @@ class PasslibSecurityWarning(PasslibWarning): ... def type_name(value): ... def ExpectedTypeError(value, expected, param): ... def ExpectedStringError(value, param): ... -def MissingDigestError(handler: Incomplete | None = None): ... -def NullPasswordError(handler: Incomplete | None = None): ... -def InvalidHashError(handler: Incomplete | None = None): ... -def MalformedHashError(handler: Incomplete | None = None, reason: Incomplete | None = None): ... -def ZeroPaddedRoundsError(handler: Incomplete | None = None): ... +def MissingDigestError(handler=None): ... +def NullPasswordError(handler=None): ... +def InvalidHashError(handler=None): ... +def MalformedHashError(handler=None, reason=None): ... +def ZeroPaddedRoundsError(handler=None): ... def ChecksumSizeError(handler, raw: bool = False): ... ENABLE_DEBUG_ONLY_REPR: bool diff --git a/stubs/passlib/passlib/ext/django/utils.pyi b/stubs/passlib/passlib/ext/django/utils.pyi index aa67154af7c3..c97ad907a0a3 100644 --- a/stubs/passlib/passlib/ext/django/utils.pyi +++ b/stubs/passlib/passlib/ext/django/utils.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from typing import Any __all__ = ["DJANGO_VERSION", "MIN_DJANGO_VERSION", "get_preset_config", "quirks"] @@ -15,7 +14,7 @@ def get_preset_config(name): ... class DjangoTranslator: context: Any - def __init__(self, context: Incomplete | None = None, **kwds) -> None: ... + def __init__(self, context=None, **kwds) -> None: ... def reset_hashers(self) -> None: ... def passlib_to_django_name(self, passlib_name): ... def passlib_to_django(self, passlib_hasher, cached: bool = True): ... @@ -29,13 +28,13 @@ class DjangoContextAdapter(DjangoTranslator): enabled: bool patched: bool log: Any - def __init__(self, context: Incomplete | None = None, get_user_category: Incomplete | None = None, **kwds) -> None: ... + def __init__(self, context=None, get_user_category=None, **kwds) -> None: ... def reset_hashers(self) -> None: ... def get_hashers(self): ... def get_hasher(self, algorithm: str = "default"): ... def identify_hasher(self, encoded): ... - def make_password(self, password, salt: Incomplete | None = None, hasher: str = "default"): ... - def check_password(self, password, encoded, setter: Incomplete | None = None, preferred: str = "default"): ... + def make_password(self, password, salt=None, hasher: str = "default"): ... + def check_password(self, password, encoded, setter=None, preferred: str = "default"): ... def user_check_password(self, user, password): ... def user_set_password(self, user, password) -> None: ... def get_user_category(self, user): ... diff --git a/stubs/passlib/passlib/handlers/argon2.pyi b/stubs/passlib/passlib/handlers/argon2.pyi index 7f24f3be3c7e..41585256b093 100644 --- a/stubs/passlib/passlib/handlers/argon2.pyi +++ b/stubs/passlib/passlib/handlers/argon2.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from typing import Any, ClassVar import passlib.utils.handlers as uh @@ -39,29 +38,21 @@ class _Argon2Common( # type: ignore[misc] @classmethod def using( # type: ignore[override] cls, - type: Incomplete | None = None, - memory_cost: Incomplete | None = None, - salt_len: Incomplete | None = None, - time_cost: Incomplete | None = None, - digest_size: Incomplete | None = None, - checksum_size: Incomplete | None = None, - hash_len: Incomplete | None = None, - max_threads: Incomplete | None = None, + type=None, + memory_cost=None, + salt_len=None, + time_cost=None, + digest_size=None, + checksum_size=None, + hash_len=None, + max_threads=None, **kwds, ): ... @classmethod def identify(cls, hash): ... @classmethod def from_string(cls, hash): ... - def __init__( - self, - type: Incomplete | None = None, - type_d: bool = False, - version: Incomplete | None = None, - memory_cost: Incomplete | None = None, - data: Incomplete | None = None, - **kwds, - ) -> None: ... + def __init__(self, type=None, type_d: bool = False, version=None, memory_cost=None, data=None, **kwds) -> None: ... class _NoBackend(_Argon2Common): @classmethod diff --git a/stubs/passlib/passlib/handlers/bcrypt.pyi b/stubs/passlib/passlib/handlers/bcrypt.pyi index 6117d9c8c23d..0965bfdd36e8 100644 --- a/stubs/passlib/passlib/handlers/bcrypt.pyi +++ b/stubs/passlib/passlib/handlers/bcrypt.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from typing import Any, ClassVar import passlib.utils.handlers as uh @@ -46,12 +45,12 @@ class bcrypt_sha256(_wrapped_bcrypt): default_ident: ClassVar[str] version: ClassVar[int] @classmethod - def using(cls, version: Incomplete | None = None, **kwds): ... # type: ignore[override] + def using(cls, version=None, **kwds): ... # type: ignore[override] prefix: Any @classmethod def identify(cls, hash): ... @classmethod def from_string(cls, hash): ... - def __init__(self, version: Incomplete | None = None, **kwds) -> None: ... + def __init__(self, version=None, **kwds) -> None: ... __all__ = ["bcrypt"] diff --git a/stubs/passlib/passlib/handlers/digests.pyi b/stubs/passlib/passlib/handlers/digests.pyi index 8353f7d9d760..09d0970b53e5 100644 --- a/stubs/passlib/passlib/handlers/digests.pyi +++ b/stubs/passlib/passlib/handlers/digests.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from typing import Any, ClassVar import passlib.utils.handlers as uh @@ -7,7 +6,7 @@ class HexDigestHash(uh.StaticHandler): checksum_chars: ClassVar[str] supported: ClassVar[bool] -def create_hex_hash(digest, module="passlib.handlers.digests", django_name: Incomplete | None = None, required: bool = True): ... +def create_hex_hash(digest, module="passlib.handlers.digests", django_name=None, required: bool = True): ... hex_md4: Any hex_md5: Any @@ -21,7 +20,7 @@ class htdigest(uh.MinimalHandler): setting_kwds: ClassVar[tuple[str, ...]] context_kwds: ClassVar[tuple[str, ...]] @classmethod - def hash(cls, secret, user, realm, encoding: Incomplete | None = None): ... # type: ignore[override] + def hash(cls, secret, user, realm, encoding=None): ... # type: ignore[override] @classmethod def verify(cls, secret, hash, user, realm, encoding: str = "utf-8"): ... # type: ignore[override] @classmethod @@ -29,6 +28,6 @@ class htdigest(uh.MinimalHandler): @classmethod def genconfig(cls): ... @classmethod - def genhash(cls, secret, config, user, realm, encoding: Incomplete | None = None): ... # type: ignore[override] + def genhash(cls, secret, config, user, realm, encoding=None): ... # type: ignore[override] __all__ = ["create_hex_hash", "hex_md4", "hex_md5", "hex_sha1", "hex_sha256", "hex_sha512"] diff --git a/stubs/passlib/passlib/handlers/misc.pyi b/stubs/passlib/passlib/handlers/misc.pyi index 2120f052a1c9..4e3cc4df76d7 100644 --- a/stubs/passlib/passlib/handlers/misc.pyi +++ b/stubs/passlib/passlib/handlers/misc.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from typing import Any, ClassVar import passlib.utils.handlers as uh @@ -19,7 +18,7 @@ class unix_disabled(DisabledHash, uh.MinimalHandler): setting_kwds: ClassVar[tuple[str, ...]] context_kwds: ClassVar[tuple[str, ...]] @classmethod - def using(cls, marker: Incomplete | None = None, **kwds): ... # type: ignore[override] + def using(cls, marker=None, **kwds): ... # type: ignore[override] @classmethod def identify(cls, hash: str | bytes) -> bool: ... @classmethod @@ -27,7 +26,7 @@ class unix_disabled(DisabledHash, uh.MinimalHandler): @classmethod def hash(cls, secret: str | bytes, **kwds) -> str: ... @classmethod - def genhash(cls, secret: str | bytes, config, marker: Incomplete | None = None): ... # type: ignore[override] + def genhash(cls, secret: str | bytes, config, marker=None): ... # type: ignore[override] @classmethod def disable(cls, hash: str | bytes | None = None) -> str: ... @classmethod @@ -41,7 +40,7 @@ class plaintext(uh.MinimalHandler): @classmethod def identify(cls, hash: str | bytes): ... @classmethod - def hash(cls, secret: str | bytes, encoding: Incomplete | None = None): ... # type: ignore[override] + def hash(cls, secret: str | bytes, encoding=None): ... # type: ignore[override] @classmethod def verify(cls, secret: str | bytes, hash: str | bytes, encoding: str | None = None): ... # type: ignore[override] @classmethod diff --git a/stubs/passlib/passlib/handlers/scram.pyi b/stubs/passlib/passlib/handlers/scram.pyi index 7c6fbd23c41a..ab7f2bf53228 100644 --- a/stubs/passlib/passlib/handlers/scram.pyi +++ b/stubs/passlib/passlib/handlers/scram.pyi @@ -23,8 +23,8 @@ class scram(uh.HasRounds, uh.HasRawSalt, uh.HasRawChecksum, uh.GenericHandler): @classmethod def from_string(cls, hash): ... @classmethod - def using(cls, default_algs: Incomplete | None = None, algs: Incomplete | None = None, **kwds): ... # type: ignore[override] - def __init__(self, algs: Incomplete | None = None, **kwds) -> None: ... + def using(cls, default_algs=None, algs=None, **kwds): ... # type: ignore[override] + def __init__(self, algs=None, **kwds) -> None: ... @classmethod def verify(cls, secret, hash, full: bool = False): ... # type: ignore[override] diff --git a/stubs/passlib/passlib/handlers/scrypt.pyi b/stubs/passlib/passlib/handlers/scrypt.pyi index 871390276c29..431ad7c9f7fc 100644 --- a/stubs/passlib/passlib/handlers/scrypt.pyi +++ b/stubs/passlib/passlib/handlers/scrypt.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from typing import ClassVar import passlib.utils.handlers as uh @@ -18,13 +17,13 @@ class scrypt(uh.ParallelismMixin, uh.HasRounds, uh.HasRawSalt, uh.HasRawChecksum parallelism: int block_size: int @classmethod - def using(cls, block_size: Incomplete | None = None, **kwds): ... # type: ignore[override] + def using(cls, block_size=None, **kwds): ... # type: ignore[override] @classmethod def from_string(cls, hash): ... @classmethod def parse(cls, hash): ... def to_string(self): ... - def __init__(self, block_size: Incomplete | None = None, **kwds) -> None: ... + def __init__(self, block_size=None, **kwds) -> None: ... @classmethod def get_backend(cls): ... @classmethod diff --git a/stubs/passlib/passlib/handlers/windows.pyi b/stubs/passlib/passlib/handlers/windows.pyi index 165deec3d8b7..28a8fd7181ac 100644 --- a/stubs/passlib/passlib/handlers/windows.pyi +++ b/stubs/passlib/passlib/handlers/windows.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from typing import Any, ClassVar, Literal, overload import passlib.utils.handlers as uh @@ -9,7 +8,7 @@ class lmhash(uh.TruncateMixin, uh.HasEncodingContext, uh.StaticHandler): checksum_size: ClassVar[int] truncate_size: ClassVar[int] @classmethod - def raw(cls, secret, encoding: Incomplete | None = None): ... + def raw(cls, secret, encoding=None): ... class nthash(uh.StaticHandler): name: ClassVar[str] diff --git a/stubs/passlib/passlib/utils/decor.pyi b/stubs/passlib/passlib/utils/decor.pyi index 3104d06f7ca6..939db459430c 100644 --- a/stubs/passlib/passlib/utils/decor.pyi +++ b/stubs/passlib/passlib/utils/decor.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from typing import Any class classproperty: @@ -22,24 +21,12 @@ class memoized_property: def __init__(self, func) -> None: ... def __get__(self, obj, cls): ... def clear_cache(self, obj) -> None: ... - def peek_cache(self, obj, default: Incomplete | None = None): ... + def peek_cache(self, obj, default=None): ... def deprecated_function( - msg: Incomplete | None = None, - deprecated: Incomplete | None = None, - removed: Incomplete | None = None, - updoc: bool = True, - replacement: Incomplete | None = None, - _is_method: bool = False, - func_module: Incomplete | None = None, -): ... -def deprecated_method( - msg: Incomplete | None = None, - deprecated: Incomplete | None = None, - removed: Incomplete | None = None, - updoc: bool = True, - replacement: Incomplete | None = None, + msg=None, deprecated=None, removed=None, updoc: bool = True, replacement=None, _is_method: bool = False, func_module=None ): ... +def deprecated_method(msg=None, deprecated=None, removed=None, updoc: bool = True, replacement=None): ... __all__ = [ "classproperty", diff --git a/stubs/passlib/passlib/utils/handlers.pyi b/stubs/passlib/passlib/utils/handlers.pyi index 6c76c322cb78..420a1ef06584 100644 --- a/stubs/passlib/passlib/utils/handlers.pyi +++ b/stubs/passlib/passlib/utils/handlers.pyi @@ -12,10 +12,8 @@ PADDED_B64_CHARS = PADDED_BASE64_CHARS UC_HEX_CHARS = UPPER_HEX_CHARS LC_HEX_CHARS = LOWER_HEX_CHARS -def parse_mc2(hash, prefix, sep="$", handler: Incomplete | None = None): ... -def parse_mc3( - hash, prefix, sep="$", rounds_base: int = 10, default_rounds: Incomplete | None = None, handler: Incomplete | None = None -): ... +def parse_mc2(hash, prefix, sep="$", handler=None): ... +def parse_mc3(hash, prefix, sep="$", rounds_base: int = 10, default_rounds=None, handler=None): ... def render_mc2(ident, salt, checksum, sep="$"): ... def render_mc3(ident, rounds, salt, checksum, sep="$", rounds_base: int = 10): ... @@ -68,13 +66,13 @@ class HasEncodingContext(GenericHandler): class HasUserContext(GenericHandler): user: Incomplete | None - def __init__(self, user: Incomplete | None = None, **kwds) -> None: ... + def __init__(self, user=None, **kwds) -> None: ... @classmethod - def hash(cls, secret, user: Incomplete | None = None, **context): ... + def hash(cls, secret, user=None, **context): ... @classmethod - def verify(cls, secret, hash, user: Incomplete | None = None, **context): ... + def verify(cls, secret, hash, user=None, **context): ... @classmethod - def genhash(cls, secret, config, user: Incomplete | None = None, **context): ... + def genhash(cls, secret, config, user=None, **context): ... class HasRawChecksum(GenericHandler): ... @@ -84,10 +82,8 @@ class HasManyIdents(GenericHandler): ident_aliases: ClassVar[dict[str, str] | None] ident: str # type: ignore[misc] @classmethod - def using( # type: ignore[override] - cls, default_ident: Incomplete | None = None, ident: Incomplete | None = None, **kwds - ): ... - def __init__(self, ident: Incomplete | None = None, **kwds) -> None: ... + def using(cls, default_ident=None, ident=None, **kwds): ... # type: ignore[override] + def __init__(self, ident=None, **kwds) -> None: ... class HasSalt(GenericHandler): min_salt_size: ClassVar[int] @@ -120,24 +116,24 @@ class HasRounds(GenericHandler): @classmethod def using( # type: ignore[override] cls, - min_desired_rounds: Incomplete | None = None, - max_desired_rounds: Incomplete | None = None, - default_rounds: Incomplete | None = None, - vary_rounds: Incomplete | None = None, - min_rounds: Incomplete | None = None, - max_rounds: Incomplete | None = None, - rounds: Incomplete | None = None, + min_desired_rounds=None, + max_desired_rounds=None, + default_rounds=None, + vary_rounds=None, + min_rounds=None, + max_rounds=None, + rounds=None, **kwds, ): ... - def __init__(self, rounds: Incomplete | None = None, **kwds) -> None: ... + def __init__(self, rounds=None, **kwds) -> None: ... @classmethod - def bitsize(cls, rounds: Incomplete | None = None, vary_rounds: float = 0.1, **kwds): ... + def bitsize(cls, rounds=None, vary_rounds: float = 0.1, **kwds): ... class ParallelismMixin(GenericHandler): parallelism: int @classmethod - def using(cls, parallelism: Incomplete | None = None, **kwds): ... # type: ignore[override] - def __init__(self, parallelism: Incomplete | None = None, **kwds) -> None: ... + def using(cls, parallelism=None, **kwds): ... # type: ignore[override] + def __init__(self, parallelism=None, **kwds) -> None: ... class BackendMixin(PasswordHash, metaclass=abc.ABCMeta): backends: ClassVar[tuple[str, ...] | None] @@ -156,16 +152,7 @@ class PrefixWrapper: prefix: Any orig_prefix: Any __doc__: Any - def __init__( - self, - name, - wrapped, - prefix="", - orig_prefix="", - lazy: bool = False, - doc: Incomplete | None = None, - ident: Incomplete | None = None, - ) -> None: ... + def __init__(self, name, wrapped, prefix="", orig_prefix="", lazy: bool = False, doc=None, ident=None) -> None: ... @property def wrapped(self): ... @property diff --git a/stubs/passlib/passlib/utils/pbkdf2.pyi b/stubs/passlib/passlib/utils/pbkdf2.pyi index 31f0748694d2..f64dfb4d02aa 100644 --- a/stubs/passlib/passlib/utils/pbkdf2.pyi +++ b/stubs/passlib/passlib/utils/pbkdf2.pyi @@ -1,10 +1,8 @@ -from _typeshed import Incomplete - from passlib.crypto.digest import norm_hash_name as norm_hash_name def get_prf(name): ... -def pbkdf1(secret, salt, rounds, keylen: Incomplete | None = None, hash: str = "sha1"): ... -def pbkdf2(secret, salt, rounds, keylen: Incomplete | None = None, prf: str = "hmac-sha1"): ... +def pbkdf1(secret, salt, rounds, keylen=None, hash: str = "sha1"): ... +def pbkdf2(secret, salt, rounds, keylen=None, prf: str = "hmac-sha1"): ... __all__ = [ # hash utils diff --git a/stubs/peewee/peewee.pyi b/stubs/peewee/peewee.pyi index 9aaf79ac88a1..9133731a4347 100644 --- a/stubs/peewee/peewee.pyi +++ b/stubs/peewee/peewee.pyi @@ -190,11 +190,11 @@ class Table(_HashableSource, BaseTable): # type: ignore[misc] schema: str | None = None, alias: Incomplete | None = ..., _model: Incomplete | None = ..., - _database: Incomplete | None = None, + _database=None, ) -> None: ... def clone(self): ... - def bind(self, database: Incomplete | None = None): ... - def bind_ctx(self, database: Incomplete | None = None): ... + def bind(self, database=None): ... + def bind_ctx(self, database=None): ... def select(self, *columns): ... def insert(self, insert: Incomplete | None = ..., columns: Incomplete | None = ..., **kwargs): ... def replace(self, insert: Incomplete | None = ..., columns: Incomplete | None = ..., **kwargs): ... @@ -463,7 +463,7 @@ class Case(ColumnBase): predicate: Incomplete expression_tuples: Incomplete default: Incomplete | None - def __init__(self, predicate, expression_tuples, default: Incomplete | None = None) -> None: ... + def __init__(self, predicate, expression_tuples, default=None) -> None: ... def __sql__(self, ctx): ... class NodeList(ColumnBase): @@ -517,8 +517,8 @@ class OnConflict(Node): class BaseQuery(Node): default_row_type: Incomplete - def __init__(self, _database: Incomplete | None = None, **kwargs) -> None: ... - def bind(self, database: Incomplete | None = None): ... + def __init__(self, _database=None, **kwargs) -> None: ... + def bind(self, database=None): ... def clone(self): ... def dicts(self, as_dict: bool = ...) -> Self: ... def tuples(self, as_tuple: bool = ...) -> Self: ... @@ -526,8 +526,8 @@ class BaseQuery(Node): def objects(self, constructor: Incomplete | None = ...) -> Self: ... def __sql__(self, ctx) -> None: ... def sql(self): ... - def execute(self, database: Incomplete | None = None): ... - def iterator(self, database: Incomplete | None = None): ... + def execute(self, database=None): ... + def iterator(self, database=None): ... def __iter__(self): ... def __getitem__(self, value): ... def __len__(self) -> int: ... @@ -571,20 +571,20 @@ class SelectQuery(Query): def select_from(self, *columns): ... class SelectBase(_HashableSource, Source, SelectQuery): # type: ignore[misc] - def peek(self, database: Incomplete | None = None, n: int = ...): ... - def first(self, database: Incomplete | None = None, n: int = ...): ... - def scalar(self, database: Incomplete | None = None, as_tuple: bool = ..., as_dict: bool = ...): ... - def scalars(self, database: Incomplete | None = None) -> Generator[Incomplete, None, None]: ... - def count(self, database: Incomplete | None = None, clear_limit: bool = ...): ... - def exists(self, database: Incomplete | None = None): ... - def get(self, database: Incomplete | None = None): ... + def peek(self, database=None, n: int = ...): ... + def first(self, database=None, n: int = ...): ... + def scalar(self, database=None, as_tuple: bool = ..., as_dict: bool = ...): ... + def scalars(self, database=None) -> Generator[Incomplete, None, None]: ... + def count(self, database=None, clear_limit: bool = ...): ... + def exists(self, database=None): ... + def get(self, database=None): ... class CompoundSelectQuery(SelectBase): lhs: Incomplete op: Incomplete rhs: Incomplete def __init__(self, lhs, op, rhs) -> None: ... - def exists(self, database: Incomplete | None = None): ... + def exists(self, database=None): ... def __sql__(self, ctx): ... class Select(SelectBase): @@ -811,7 +811,7 @@ class Database(_callable_context_manager): def is_closed(self) -> bool: ... def is_connection_usable(self) -> bool: ... def connection(self): ... - def cursor(self, commit: Incomplete | None = None, named_cursor: Incomplete | None = None): ... + def cursor(self, commit=None, named_cursor=None): ... def execute_sql(self, sql, params: Incomplete | None = ..., commit=...): ... def execute(self, query, commit=..., **context_options): ... def get_context_options(self): ... @@ -1495,7 +1495,7 @@ class _SortedFieldList: class SchemaManager: model: Incomplete context_options: Incomplete - def __init__(self, model, database: Incomplete | None = None, **context_options) -> None: ... + def __init__(self, model, database=None, **context_options) -> None: ... @property def database(self): ... @database.setter @@ -1546,7 +1546,7 @@ class Metadata: def __init__( self, model, - database: Incomplete | None = None, + database=None, table_name: Incomplete | None = ..., indexes: Incomplete | None = ..., primary_key: Incomplete | None = ..., @@ -1744,8 +1744,8 @@ class BaseModelSelect(_ModelQueryHelper): __sub__: Incomplete def __iter__(self): ... def prefetch(self, *subqueries): ... - def get(self, database: Incomplete | None = None): ... - def get_or_none(self, database: Incomplete | None = None): ... + def get(self, database=None): ... + def get_or_none(self, database=None): ... def group_by(self, *columns) -> Self: ... class ModelCompoundSelectQuery(BaseModelSelect, CompoundSelectQuery): # type: ignore[misc] diff --git a/stubs/pexpect/pexpect/FSM.pyi b/stubs/pexpect/pexpect/FSM.pyi index 7a4a2d3ad611..abc0cb037638 100644 --- a/stubs/pexpect/pexpect/FSM.pyi +++ b/stubs/pexpect/pexpect/FSM.pyi @@ -14,15 +14,11 @@ class FSM: next_state: Incomplete action: Incomplete memory: Incomplete - def __init__(self, initial_state, memory: Incomplete | None = None) -> None: ... + def __init__(self, initial_state, memory=None) -> None: ... def reset(self) -> None: ... - def add_transition( - self, input_symbol, state, action: Incomplete | None = None, next_state: Incomplete | None = None - ) -> None: ... - def add_transition_list( - self, list_input_symbols, state, action: Incomplete | None = None, next_state: Incomplete | None = None - ) -> None: ... - def add_transition_any(self, state, action: Incomplete | None = None, next_state: Incomplete | None = None) -> None: ... + def add_transition(self, input_symbol, state, action=None, next_state=None) -> None: ... + def add_transition_list(self, list_input_symbols, state, action=None, next_state=None) -> None: ... + def add_transition_any(self, state, action=None, next_state=None) -> None: ... def set_default_transition(self, action, next_state) -> None: ... def get_transition(self, input_symbol, state): ... def process(self, input_symbol) -> None: ... diff --git a/stubs/pexpect/pexpect/replwrap.pyi b/stubs/pexpect/pexpect/replwrap.pyi index 39f7b35d89f7..1ef0a541ff8c 100644 --- a/stubs/pexpect/pexpect/replwrap.pyi +++ b/stubs/pexpect/pexpect/replwrap.pyi @@ -17,7 +17,7 @@ class REPLWrapper: prompt_change, new_prompt="[PEXPECT_PROMPT>", continuation_prompt="[PEXPECT_PROMPT+", - extra_init_cmd: Incomplete | None = None, + extra_init_cmd=None, ) -> None: ... def set_prompt(self, orig_prompt, prompt_change) -> None: ... def run_command(self, command, timeout: float | None = -1, async_: bool = False): ... diff --git a/stubs/pika/pika/adapters/base_connection.pyi b/stubs/pika/pika/adapters/base_connection.pyi index a9dc74b92579..b8fb59831335 100644 --- a/stubs/pika/pika/adapters/base_connection.pyi +++ b/stubs/pika/pika/adapters/base_connection.pyi @@ -21,9 +21,7 @@ class BaseConnection(Connection, metaclass=abc.ABCMeta): ) -> None: ... @classmethod @abc.abstractmethod - def create_connection( - cls, connection_configs, on_done, custom_ioloop: Incomplete | None = None, workflow: Incomplete | None = None - ): ... + def create_connection(cls, connection_configs, on_done, custom_ioloop=None, workflow=None): ... @property def ioloop(self): ... diff --git a/stubs/pika/pika/adapters/blocking_connection.pyi b/stubs/pika/pika/adapters/blocking_connection.pyi index af721b7863b7..41a5dc3af5a7 100644 --- a/stubs/pika/pika/adapters/blocking_connection.pyi +++ b/stubs/pika/pika/adapters/blocking_connection.pyi @@ -13,7 +13,7 @@ from ..spec import BasicProperties LOGGER: Logger class _CallbackResult: - def __init__(self, value_class: Incomplete | None = None) -> None: ... + def __init__(self, value_class=None) -> None: ... def reset(self) -> None: ... def __bool__(self) -> bool: ... __nonzero__: Incomplete @@ -56,9 +56,7 @@ class BlockingConnection: class _OnChannelOpenedArgs(NamedTuple): channel: Incomplete - def __init__( - self, parameters: Parameters | Sequence[Parameters] | None = None, _impl_class: Incomplete | None = None - ) -> None: ... + def __init__(self, parameters: Parameters | Sequence[Parameters] | None = None, _impl_class=None) -> None: ... def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None @@ -129,13 +127,7 @@ class _ConsumerInfo: on_message_callback: Incomplete alternate_event_sink: Incomplete state: Incomplete - def __init__( - self, - consumer_tag, - auto_ack, - on_message_callback: Incomplete | None = None, - alternate_event_sink: Incomplete | None = None, - ) -> None: ... + def __init__(self, consumer_tag, auto_ack, on_message_callback=None, alternate_event_sink=None) -> None: ... @property def setting_up(self): ... @property @@ -188,24 +180,13 @@ class BlockingChannel: def add_on_cancel_callback(self, callback) -> None: ... def add_on_return_callback(self, callback): ... def basic_consume( - self, - queue, - on_message_callback, - auto_ack: bool = False, - exclusive: bool = False, - consumer_tag: Incomplete | None = None, - arguments: Incomplete | None = None, + self, queue, on_message_callback, auto_ack: bool = False, exclusive: bool = False, consumer_tag=None, arguments=None ): ... def basic_cancel(self, consumer_tag): ... def start_consuming(self) -> None: ... - def stop_consuming(self, consumer_tag: Incomplete | None = None) -> None: ... + def stop_consuming(self, consumer_tag=None) -> None: ... def consume( - self, - queue, - auto_ack: bool = False, - exclusive: bool = False, - arguments: Incomplete | None = None, - inactivity_timeout: Incomplete | None = None, + self, queue, auto_ack: bool = False, exclusive: bool = False, arguments=None, inactivity_timeout=None ) -> Generator[Incomplete, None, None]: ... def get_waiting_message_count(self): ... def cancel(self): ... @@ -235,14 +216,8 @@ class BlockingChannel: arguments: _ArgumentMapping | None = None, ): ... def exchange_delete(self, exchange: str | None = None, if_unused: bool = False): ... - def exchange_bind(self, destination, source, routing_key: str = "", arguments: Incomplete | None = None): ... - def exchange_unbind( - self, - destination: Incomplete | None = None, - source: Incomplete | None = None, - routing_key: str = "", - arguments: Incomplete | None = None, - ): ... + def exchange_bind(self, destination, source, routing_key: str = "", arguments=None): ... + def exchange_unbind(self, destination=None, source=None, routing_key: str = "", arguments=None): ... def queue_declare( self, queue, @@ -250,18 +225,12 @@ class BlockingChannel: durable: bool = False, exclusive: bool = False, auto_delete: bool = False, - arguments: Incomplete | None = None, + arguments=None, ): ... def queue_delete(self, queue, if_unused: bool = False, if_empty: bool = False): ... def queue_purge(self, queue): ... - def queue_bind(self, queue, exchange, routing_key: Incomplete | None = None, arguments: Incomplete | None = None): ... - def queue_unbind( - self, - queue, - exchange: Incomplete | None = None, - routing_key: Incomplete | None = None, - arguments: Incomplete | None = None, - ): ... + def queue_bind(self, queue, exchange, routing_key=None, arguments=None): ... + def queue_unbind(self, queue, exchange=None, routing_key=None, arguments=None): ... def tx_select(self): ... def tx_commit(self): ... def tx_rollback(self): ... diff --git a/stubs/pika/pika/adapters/select_connection.pyi b/stubs/pika/pika/adapters/select_connection.pyi index 08c343e776c1..4eed5e1f351e 100644 --- a/stubs/pika/pika/adapters/select_connection.pyi +++ b/stubs/pika/pika/adapters/select_connection.pyi @@ -12,17 +12,15 @@ SELECT_TYPE: Incomplete class SelectConnection(BaseConnection): def __init__( self, - parameters: Incomplete | None = None, - on_open_callback: Incomplete | None = None, - on_open_error_callback: Incomplete | None = None, - on_close_callback: Incomplete | None = None, - custom_ioloop: Incomplete | None = None, + parameters=None, + on_open_callback=None, + on_open_error_callback=None, + on_close_callback=None, + custom_ioloop=None, internal_connection_workflow: bool = True, ) -> None: ... @classmethod - def create_connection( - cls, connection_configs, on_done, custom_ioloop: Incomplete | None = None, workflow: Incomplete | None = None - ): ... + def create_connection(cls, connection_configs, on_done, custom_ioloop=None, workflow=None): ... class _Timeout: deadline: Incomplete diff --git a/stubs/pika/pika/adapters/utils/io_services_utils.pyi b/stubs/pika/pika/adapters/utils/io_services_utils.pyi index c6f059279d02..3f99e672ee31 100644 --- a/stubs/pika/pika/adapters/utils/io_services_utils.pyi +++ b/stubs/pika/pika/adapters/utils/io_services_utils.pyi @@ -1,5 +1,4 @@ import abc -from _typeshed import Incomplete from pika.adapters.utils.nbio_interface import AbstractIOReference, AbstractStreamTransport @@ -10,9 +9,7 @@ class SocketConnectionMixin: def connect_socket(self, sock, resolved_addr, on_done): ... class StreamingConnectionMixin: - def create_streaming_connection( - self, protocol_factory, sock, on_done, ssl_context: Incomplete | None = None, server_hostname: Incomplete | None = None - ): ... + def create_streaming_connection(self, protocol_factory, sock, on_done, ssl_context=None, server_hostname=None): ... class _AsyncServiceAsyncHandle(AbstractIOReference): def __init__(self, subject) -> None: ... diff --git a/stubs/pika/pika/adapters/utils/nbio_interface.pyi b/stubs/pika/pika/adapters/utils/nbio_interface.pyi index 897e069d8cb0..14c721842d29 100644 --- a/stubs/pika/pika/adapters/utils/nbio_interface.pyi +++ b/stubs/pika/pika/adapters/utils/nbio_interface.pyi @@ -1,5 +1,4 @@ import abc -from _typeshed import Incomplete import pika.compat @@ -21,9 +20,7 @@ class AbstractIOServices(pika.compat.AbstractBase, metaclass=abc.ABCMeta): @abc.abstractmethod def connect_socket(self, sock, resolved_addr, on_done): ... @abc.abstractmethod - def create_streaming_connection( - self, protocol_factory, sock, on_done, ssl_context: Incomplete | None = None, server_hostname: Incomplete | None = None - ): ... + def create_streaming_connection(self, protocol_factory, sock, on_done, ssl_context=None, server_hostname=None): ... class AbstractFileDescriptorServices(pika.compat.AbstractBase, metaclass=abc.ABCMeta): @abc.abstractmethod diff --git a/stubs/pika/pika/adapters/utils/selector_ioloop_adapter.pyi b/stubs/pika/pika/adapters/utils/selector_ioloop_adapter.pyi index 5ecae98b6afc..91dd227fa3ed 100644 --- a/stubs/pika/pika/adapters/utils/selector_ioloop_adapter.pyi +++ b/stubs/pika/pika/adapters/utils/selector_ioloop_adapter.pyi @@ -57,7 +57,7 @@ class SelectorIOServicesAdapter( class _FileDescriptorCallbacks: reader: Incomplete writer: Incomplete - def __init__(self, reader: Incomplete | None = None, writer: Incomplete | None = None) -> None: ... + def __init__(self, reader=None, writer=None) -> None: ... class _TimerHandle(nbio_interface.AbstractTimerReference): def __init__(self, handle, loop) -> None: ... diff --git a/stubs/pika/pika/callback.pyi b/stubs/pika/pika/callback.pyi index a92b13d97830..5acaca3cb5d8 100644 --- a/stubs/pika/pika/callback.pyi +++ b/stubs/pika/pika/callback.pyi @@ -26,7 +26,7 @@ class CallbackManager: callback: Callable[[Incomplete], Incomplete], one_shot: bool = True, only_caller: object | None = None, - arguments: Incomplete | None = None, + arguments=None, ) -> tuple[str | int, str | object]: ... def clear(self) -> None: ... def cleanup(self, prefix: str | int) -> bool: ... @@ -37,6 +37,6 @@ class CallbackManager: prefix: str | int, key: str | object, callback_value: Callable[[Incomplete], Incomplete] | None = None, - arguments: Incomplete | None = None, + arguments=None, ) -> Literal[True]: ... def remove_all(self, prefix: str | int, key: str | object) -> None: ... diff --git a/stubs/pika/pika/connection.pyi b/stubs/pika/pika/connection.pyi index 590ae33f7666..689bca9363fe 100644 --- a/stubs/pika/pika/connection.pyi +++ b/stubs/pika/pika/connection.pyi @@ -171,7 +171,7 @@ class Connection(AbstractBase, metaclass=abc.ABCMeta): def channel( self, channel_number: int | None = None, on_open_callback: Callable[[Channel], object] | None = None ) -> Channel: ... - def update_secret(self, new_secret, reason, callback: Incomplete | None = None) -> None: ... + def update_secret(self, new_secret, reason, callback=None) -> None: ... def close(self, reply_code: int = 200, reply_text: str = "Normal shutdown") -> None: ... @property def is_closed(self) -> bool: ... diff --git a/stubs/pika/pika/spec.pyi b/stubs/pika/pika/spec.pyi index d5f4d9383b64..05fd5e8dcaa0 100644 --- a/stubs/pika/pika/spec.pyi +++ b/stubs/pika/pika/spec.pyi @@ -155,13 +155,7 @@ class Connection(Class): reply_text: Incomplete class_id: Incomplete method_id: Incomplete - def __init__( - self, - reply_code: Incomplete | None = None, - reply_text: _str = "", - class_id: Incomplete | None = None, - method_id: Incomplete | None = None, - ) -> None: ... + def __init__(self, reply_code=None, reply_text: _str = "", class_id=None, method_id=None) -> None: ... @property def synchronous(self) -> Literal[True]: ... def decode(self, encoded: bytes, offset: int = 0) -> Self: ... @@ -256,13 +250,7 @@ class Channel(Class): reply_text: Incomplete class_id: Incomplete method_id: Incomplete - def __init__( - self, - reply_code: Incomplete | None = None, - reply_text: _str = "", - class_id: Incomplete | None = None, - method_id: Incomplete | None = None, - ) -> None: ... + def __init__(self, reply_code=None, reply_text: _str = "", class_id=None, method_id=None) -> None: ... @property def synchronous(self) -> Literal[True]: ... def decode(self, encoded: bytes, offset: int = 0) -> Self: ... @@ -327,14 +315,14 @@ class Exchange(Class): def __init__( self, ticket: int = 0, - exchange: Incomplete | None = None, + exchange=None, type=..., passive: bool = False, durable: bool = False, auto_delete: bool = False, internal: bool = False, nowait: bool = False, - arguments: Incomplete | None = None, + arguments=None, ) -> None: ... @property def synchronous(self) -> Literal[True]: ... @@ -355,9 +343,7 @@ class Exchange(Class): exchange: Incomplete if_unused: Incomplete nowait: bool - def __init__( - self, ticket: int = 0, exchange: Incomplete | None = None, if_unused: bool = False, nowait: bool = False - ) -> None: ... + def __init__(self, ticket: int = 0, exchange=None, if_unused: bool = False, nowait: bool = False) -> None: ... @property def synchronous(self) -> Literal[True]: ... def decode(self, encoded: bytes, offset: int = 0) -> Self: ... @@ -380,13 +366,7 @@ class Exchange(Class): nowait: bool arguments: Incomplete | None def __init__( - self, - ticket: int = 0, - destination: Incomplete | None = None, - source: Incomplete | None = None, - routing_key: _str = "", - nowait: bool = False, - arguments: Incomplete | None = None, + self, ticket: int = 0, destination=None, source=None, routing_key: _str = "", nowait: bool = False, arguments=None ) -> None: ... @property def synchronous(self) -> Literal[True]: ... @@ -410,13 +390,7 @@ class Exchange(Class): nowait: bool arguments: Incomplete def __init__( - self, - ticket: int = 0, - destination: Incomplete | None = None, - source: Incomplete | None = None, - routing_key: _str = "", - nowait: bool = False, - arguments: Incomplete | None = None, + self, ticket: int = 0, destination=None, source=None, routing_key: _str = "", nowait: bool = False, arguments=None ) -> None: ... @property def synchronous(self) -> Literal[True]: ... @@ -453,7 +427,7 @@ class Queue(Class): exclusive: bool = False, auto_delete: bool = False, nowait: bool = False, - arguments: Incomplete | None = None, + arguments=None, ) -> None: ... @property def synchronous(self) -> Literal[True]: ... @@ -480,13 +454,7 @@ class Queue(Class): nowait: bool arguments: Incomplete def __init__( - self, - ticket: int = 0, - queue: _str = "", - exchange: Incomplete | None = None, - routing_key: _str = "", - nowait: bool = False, - arguments: Incomplete | None = None, + self, ticket: int = 0, queue: _str = "", exchange=None, routing_key: _str = "", nowait: bool = False, arguments=None ) -> None: ... @property def synchronous(self) -> Literal[True]: ... @@ -515,7 +483,7 @@ class Queue(Class): class PurgeOk(Method): INDEX: ClassVar[int] message_count: Incomplete - def __init__(self, message_count: Incomplete | None = None) -> None: ... + def __init__(self, message_count=None) -> None: ... @property def synchronous(self) -> Literal[False]: ... def decode(self, encoded: bytes, offset: int = 0) -> Self: ... @@ -539,7 +507,7 @@ class Queue(Class): class DeleteOk(Method): INDEX: ClassVar[int] message_count: Incomplete - def __init__(self, message_count: Incomplete | None = None) -> None: ... + def __init__(self, message_count=None) -> None: ... @property def synchronous(self) -> Literal[False]: ... def decode(self, encoded: bytes, offset: int = 0) -> Self: ... @@ -552,14 +520,7 @@ class Queue(Class): exchange: Incomplete routing_key: Incomplete arguments: Incomplete - def __init__( - self, - ticket: int = 0, - queue: _str = "", - exchange: Incomplete | None = None, - routing_key: _str = "", - arguments: Incomplete | None = None, - ) -> None: ... + def __init__(self, ticket: int = 0, queue: _str = "", exchange=None, routing_key: _str = "", arguments=None) -> None: ... @property def synchronous(self) -> Literal[True]: ... def decode(self, encoded: bytes, offset: int = 0) -> Self: ... @@ -614,7 +575,7 @@ class Basic(Class): no_ack: bool = False, exclusive: bool = False, nowait: bool = False, - arguments: Incomplete | None = None, + arguments=None, ) -> None: ... @property def synchronous(self) -> Literal[True]: ... @@ -624,7 +585,7 @@ class Basic(Class): class ConsumeOk(Method): INDEX: ClassVar[int] consumer_tag: Incomplete - def __init__(self, consumer_tag: Incomplete | None = None) -> None: ... + def __init__(self, consumer_tag=None) -> None: ... @property def synchronous(self) -> Literal[False]: ... def decode(self, encoded: bytes, offset: int = 0) -> Self: ... @@ -634,7 +595,7 @@ class Basic(Class): INDEX: ClassVar[int] consumer_tag: Incomplete nowait: bool - def __init__(self, consumer_tag: Incomplete | None = None, nowait: bool = False) -> None: ... + def __init__(self, consumer_tag=None, nowait: bool = False) -> None: ... @property def synchronous(self) -> Literal[True]: ... def decode(self, encoded: bytes, offset: int = 0) -> Self: ... @@ -643,7 +604,7 @@ class Basic(Class): class CancelOk(Method): INDEX: ClassVar[int] consumer_tag: Incomplete - def __init__(self, consumer_tag: Incomplete | None = None) -> None: ... + def __init__(self, consumer_tag=None) -> None: ... @property def synchronous(self) -> Literal[False]: ... def decode(self, encoded: bytes, offset: int = 0) -> Self: ... @@ -670,13 +631,7 @@ class Basic(Class): reply_text: Incomplete exchange: Incomplete routing_key: Incomplete - def __init__( - self, - reply_code: Incomplete | None = None, - reply_text: _str = "", - exchange: Incomplete | None = None, - routing_key: Incomplete | None = None, - ) -> None: ... + def __init__(self, reply_code=None, reply_text: _str = "", exchange=None, routing_key=None) -> None: ... @property def synchronous(self) -> Literal[False]: ... def decode(self, encoded: bytes, offset: int = 0) -> Self: ... @@ -690,12 +645,7 @@ class Basic(Class): exchange: Incomplete routing_key: Incomplete def __init__( - self, - consumer_tag: Incomplete | None = None, - delivery_tag: Incomplete | None = None, - redelivered: bool = False, - exchange: Incomplete | None = None, - routing_key: Incomplete | None = None, + self, consumer_tag=None, delivery_tag=None, redelivered: bool = False, exchange=None, routing_key=None ) -> None: ... @property def synchronous(self) -> Literal[False]: ... @@ -721,12 +671,7 @@ class Basic(Class): routing_key: Incomplete message_count: Incomplete def __init__( - self, - delivery_tag: Incomplete | None = None, - redelivered: bool = False, - exchange: Incomplete | None = None, - routing_key: Incomplete | None = None, - message_count: Incomplete | None = None, + self, delivery_tag=None, redelivered: bool = False, exchange=None, routing_key=None, message_count=None ) -> None: ... @property def synchronous(self) -> Literal[False]: ... @@ -756,7 +701,7 @@ class Basic(Class): INDEX: ClassVar[int] delivery_tag: Incomplete requeue: bool - def __init__(self, delivery_tag: Incomplete | None = None, requeue: bool = True) -> None: ... + def __init__(self, delivery_tag=None, requeue: bool = True) -> None: ... @property def synchronous(self) -> Literal[False]: ... def decode(self, encoded: bytes, offset: int = 0) -> Self: ... @@ -907,12 +852,12 @@ class BasicProperties(Properties): content_encoding: _str | None = None, headers: _ArgumentMapping | None = None, delivery_mode: DeliveryMode | Literal[1, 2] | None = None, - priority: Incomplete | None = None, + priority=None, correlation_id: _str | None = None, reply_to: _str | None = None, expiration: _str | None = None, message_id: _str | None = None, - timestamp: Incomplete | None = None, + timestamp=None, type: _str | None = None, user_id: _str | None = None, app_id: _str | None = None, diff --git a/stubs/protobuf/google/protobuf/descriptor.pyi b/stubs/protobuf/google/protobuf/descriptor.pyi index 45891fda76c5..c1c5fc33823f 100644 --- a/stubs/protobuf/google/protobuf/descriptor.pyi +++ b/stubs/protobuf/google/protobuf/descriptor.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from typing import Any from .descriptor_pb2 import ( @@ -71,17 +70,17 @@ class Descriptor(_NestedDescriptorBase): nested_types: list[FieldDescriptor], enum_types: list[EnumDescriptor], extensions: list[FieldDescriptor], - options: Incomplete | None = None, - serialized_options: Incomplete | None = None, + options=None, + serialized_options=None, is_extendable: bool | None = True, - extension_ranges: Incomplete | None = None, + extension_ranges=None, oneofs: list[OneofDescriptor] | None = None, file: FileDescriptor | None = None, - serialized_start: Incomplete | None = None, - serialized_end: Incomplete | None = None, + serialized_start=None, + serialized_end=None, syntax: str | None = None, is_map_entry=False, - create_key: Incomplete | None = None, + create_key=None, ): ... def EnumValueName(self, enum, value): ... def CopyToProto(self, proto): ... @@ -264,11 +263,11 @@ class ServiceDescriptor(_NestedDescriptorBase): index: int, methods: list[MethodDescriptor], options: ServiceOptions | None = None, - serialized_options: Incomplete | None = None, + serialized_options=None, file: FileDescriptor | None = None, - serialized_start: Incomplete | None = None, - serialized_end: Incomplete | None = None, - create_key: Incomplete | None = None, + serialized_start=None, + serialized_end=None, + create_key=None, ): ... def FindMethodByName(self, name): ... def CopyToProto(self, proto): ... diff --git a/stubs/protobuf/google/protobuf/descriptor_pool.pyi b/stubs/protobuf/google/protobuf/descriptor_pool.pyi index f555c5d73e5a..bb1d19004e84 100644 --- a/stubs/protobuf/google/protobuf/descriptor_pool.pyi +++ b/stubs/protobuf/google/protobuf/descriptor_pool.pyi @@ -1,9 +1,9 @@ -from _typeshed import Incomplete, Unused +from _typeshed import Unused class DescriptorPool: - def __new__(cls, descriptor_db: Incomplete | None = None): ... + def __new__(cls, descriptor_db=None): ... def __init__( # pyright: ignore[reportInconsistentConstructor] - self, descriptor_db: Incomplete | None = None, use_deprecated_legacy_json_field_conflicts: Unused = False + self, descriptor_db=None, use_deprecated_legacy_json_field_conflicts: Unused = False ) -> None: ... def Add(self, file_desc_proto): ... def AddSerializedFile(self, serialized_file_desc_proto): ... diff --git a/stubs/protobuf/google/protobuf/internal/well_known_types.pyi b/stubs/protobuf/google/protobuf/internal/well_known_types.pyi index 5013ea53fc03..4ed2f895c762 100644 --- a/stubs/protobuf/google/protobuf/internal/well_known_types.pyi +++ b/stubs/protobuf/google/protobuf/internal/well_known_types.pyi @@ -9,7 +9,7 @@ from google.protobuf import struct_pb2 class Any: type_url: str value: Incomplete - def Pack(self, msg, type_url_prefix: str = "type.googleapis.com/", deterministic: Incomplete | None = None) -> None: ... + def Pack(self, msg, type_url_prefix: str = "type.googleapis.com/", deterministic=None) -> None: ... def Unpack(self, msg) -> bool: ... def TypeName(self) -> str: ... def Is(self, descriptor) -> bool: ... diff --git a/stubs/psutil/psutil/__init__.pyi b/stubs/psutil/psutil/__init__.pyi index f41380bb9460..9d995532571c 100644 --- a/stubs/psutil/psutil/__init__.pyi +++ b/stubs/psutil/psutil/__init__.pyi @@ -175,7 +175,7 @@ class Process: info: dict[str, Any] def oneshot(self) -> AbstractContextManager[None]: ... def as_dict( - self, attrs: list[str] | tuple[str, ...] | set[str] | frozenset[str] | None = None, ad_value: Incomplete | None = None + self, attrs: list[str] | tuple[str, ...] | set[str] | frozenset[str] | None = None, ad_value=None ) -> dict[str, Any]: ... def parent(self) -> Process | None: ... def parents(self) -> list[Process]: ... @@ -236,7 +236,7 @@ class Popen(Process): def pids() -> list[int]: ... def pid_exists(pid: int) -> bool: ... def process_iter( - attrs: list[str] | tuple[str, ...] | set[str] | frozenset[str] | None = None, ad_value: Incomplete | None = None + attrs: list[str] | tuple[str, ...] | set[str] | frozenset[str] | None = None, ad_value=None ) -> Iterator[Process]: ... def wait_procs( procs: Iterable[Process], timeout: float | None = None, callback: Callable[[Process], object] | None = None diff --git a/stubs/psutil/psutil/_common.pyi b/stubs/psutil/psutil/_common.pyi index 872a329548ff..2c3813b79c1d 100644 --- a/stubs/psutil/psutil/_common.pyi +++ b/stubs/psutil/psutil/_common.pyi @@ -1,5 +1,5 @@ import enum -from _typeshed import Incomplete, StrOrBytesPath, SupportsWrite +from _typeshed import StrOrBytesPath, SupportsWrite from collections.abc import Callable from socket import AF_INET6 as AF_INET6, AddressFamily, SocketKind from typing import Any, Literal, NamedTuple, TypeVar, overload @@ -219,7 +219,7 @@ class NoSuchProcess(Error): pid: Any name: Any msg: Any - def __init__(self, pid, name: Incomplete | None = None, msg: Incomplete | None = None) -> None: ... + def __init__(self, pid, name=None, msg=None) -> None: ... class ZombieProcess(NoSuchProcess): __module__: str @@ -227,23 +227,21 @@ class ZombieProcess(NoSuchProcess): ppid: Any name: Any msg: Any - def __init__( - self, pid, name: Incomplete | None = None, ppid: Incomplete | None = None, msg: Incomplete | None = None - ) -> None: ... + def __init__(self, pid, name=None, ppid=None, msg=None) -> None: ... class AccessDenied(Error): __module__: str pid: Any name: Any msg: Any - def __init__(self, pid: Incomplete | None = None, name: Incomplete | None = None, msg: Incomplete | None = None) -> None: ... + def __init__(self, pid=None, name=None, msg=None) -> None: ... class TimeoutExpired(Error): __module__: str seconds: Any pid: Any name: Any - def __init__(self, seconds, pid: Incomplete | None = None, name: Incomplete | None = None) -> None: ... + def __init__(self, seconds, pid=None, name=None) -> None: ... _Func = TypeVar("_Func", bound=Callable[..., Any]) @@ -269,7 +267,7 @@ class _WrapNumbers: reminder_keys: Any def __init__(self) -> None: ... def run(self, input_dict, name): ... - def cache_clear(self, name: Incomplete | None = None) -> None: ... + def cache_clear(self, name=None) -> None: ... def cache_info(self): ... def wrap_numbers(input_dict, name: str): ... diff --git a/stubs/psutil/psutil/_psposix.pyi b/stubs/psutil/psutil/_psposix.pyi index 8b157113e188..46c218915a04 100644 --- a/stubs/psutil/psutil/_psposix.pyi +++ b/stubs/psutil/psutil/_psposix.pyi @@ -1,16 +1,5 @@ -from _typeshed import Incomplete - def pid_exists(pid): ... -def wait_pid( - pid, - timeout: Incomplete | None = None, - proc_name: Incomplete | None = None, - _waitpid=..., - _timer=..., - _min=..., - _sleep=..., - _pid_exists=..., -): ... +def wait_pid(pid, timeout=None, proc_name=None, _waitpid=..., _timer=..., _min=..., _sleep=..., _pid_exists=...): ... def disk_usage(path): ... def get_terminal_map(): ... diff --git a/stubs/psutil/psutil/_pswindows.pyi b/stubs/psutil/psutil/_pswindows.pyi index f04e9e276b68..afd5f49b1a94 100644 --- a/stubs/psutil/psutil/_pswindows.pyi +++ b/stubs/psutil/psutil/_pswindows.pyi @@ -158,7 +158,7 @@ pid_exists: Any ppid_map: Any def is_permission_err(exc): ... -def convert_oserror(exc, pid: Incomplete | None = None, name: Incomplete | None = None): ... +def convert_oserror(exc, pid=None, name=None): ... def wrap_exceptions(fun): ... def retry_error_partial_copy(fun): ... @@ -177,7 +177,7 @@ class Process: def memory_maps(self) -> None: ... def kill(self): ... def send_signal(self, sig) -> None: ... - def wait(self, timeout: Incomplete | None = None): ... + def wait(self, timeout=None): ... def username(self): ... def create_time(self, fast_only: bool = False): ... def num_threads(self): ... diff --git a/stubs/psycopg2/psycopg2/_psycopg.pyi b/stubs/psycopg2/psycopg2/_psycopg.pyi index 1eac9616959a..abb744901c03 100644 --- a/stubs/psycopg2/psycopg2/_psycopg.pyi +++ b/stubs/psycopg2/psycopg2/_psycopg.pyi @@ -600,10 +600,7 @@ def get_wait_callback() -> Incomplete | None: ... def libpq_version() -> int: ... def new_array_type(values: tuple[int, ...], name: str, baseobj: _type) -> _type: ... def new_type( - values: tuple[int, ...], - name: str, - castobj: Callable[[str | bytes | None, cursor], Any] | None = None, - baseobj: Incomplete | None = None, + values: tuple[int, ...], name: str, castobj: Callable[[str | bytes | None, cursor], Any] | None = None, baseobj=None ) -> _type: ... def parse_dsn(dsn: str | bytes) -> dict[str, Any]: ... def quote_ident(ident: str | bytes, scope) -> str: ... diff --git a/stubs/psycopg2/psycopg2/extras.pyi b/stubs/psycopg2/psycopg2/extras.pyi index b3f953cdb3c9..2c0587efa00d 100644 --- a/stubs/psycopg2/psycopg2/extras.pyi +++ b/stubs/psycopg2/psycopg2/extras.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from collections import OrderedDict from collections.abc import Callable from typing import Any, NamedTuple, TypeVar, overload @@ -62,8 +61,8 @@ class DictConnection(_connection): class DictCursor(DictCursorBase): def __init__(self, *args, **kwargs) -> None: ... index: Any - def execute(self, query, vars: Incomplete | None = None): ... - def callproc(self, procname, vars: Incomplete | None = None): ... + def execute(self, query, vars=None): ... + def callproc(self, procname, vars=None): ... def fetchone(self) -> DictRow | None: ... # type: ignore[override] def fetchmany(self, size: int | None = None) -> list[DictRow]: ... # type: ignore[override] def fetchall(self) -> list[DictRow]: ... # type: ignore[override] @@ -76,7 +75,7 @@ class DictRow(list[Any]): def items(self): ... def keys(self): ... def values(self): ... - def get(self, x, default: Incomplete | None = None): ... + def get(self, x, default=None): ... def copy(self): ... def __contains__(self, x): ... def __reduce__(self): ... @@ -107,8 +106,8 @@ class RealDictConnection(_connection): class RealDictCursor(DictCursorBase): def __init__(self, *args, **kwargs) -> None: ... column_mapping: Any - def execute(self, query, vars: Incomplete | None = None): ... - def callproc(self, procname, vars: Incomplete | None = None): ... + def execute(self, query, vars=None): ... + def callproc(self, procname, vars=None): ... def fetchone(self) -> RealDictRow | None: ... # type: ignore[override] def fetchmany(self, size: int | None = None) -> list[RealDictRow]: ... # type: ignore[override] def fetchall(self) -> list[RealDictRow]: ... # type: ignore[override] @@ -144,9 +143,9 @@ class NamedTupleConnection(_connection): class NamedTupleCursor(_cursor): Record: Any MAX_CACHE: int - def execute(self, query, vars: Incomplete | None = None): ... + def execute(self, query, vars=None): ... def executemany(self, query, vars): ... - def callproc(self, procname, vars: Incomplete | None = None): ... + def callproc(self, procname, vars=None): ... def fetchone(self) -> NamedTuple | None: ... def fetchmany(self, size: int | None = None) -> list[NamedTuple]: ... # type: ignore[override] def fetchall(self) -> list[NamedTuple]: ... # type: ignore[override] @@ -159,8 +158,8 @@ class LoggingConnection(_connection): def cursor(self, *args, **kwargs): ... class LoggingCursor(_cursor): - def execute(self, query, vars: Incomplete | None = None): ... - def callproc(self, procname, vars: Incomplete | None = None): ... + def execute(self, query, vars=None): ... + def callproc(self, procname, vars=None): ... class MinTimeLoggingConnection(LoggingConnection): def initialize(self, logobj, mintime: int = 0) -> None: ... @@ -169,8 +168,8 @@ class MinTimeLoggingConnection(LoggingConnection): class MinTimeLoggingCursor(LoggingCursor): timestamp: Any - def execute(self, query, vars: Incomplete | None = None): ... - def callproc(self, procname, vars: Incomplete | None = None): ... + def execute(self, query, vars=None): ... + def callproc(self, procname, vars=None): ... class LogicalReplicationConnection(_replicationConnection): def __init__(self, *args, **kwargs) -> None: ... @@ -181,17 +180,15 @@ class PhysicalReplicationConnection(_replicationConnection): class StopReplication(Exception): ... class ReplicationCursor(_replicationCursor): - def create_replication_slot( - self, slot_name, slot_type: Incomplete | None = None, output_plugin: Incomplete | None = None - ) -> None: ... + def create_replication_slot(self, slot_name, slot_type=None, output_plugin=None) -> None: ... def drop_replication_slot(self, slot_name) -> None: ... def start_replication( self, - slot_name: Incomplete | None = None, - slot_type: Incomplete | None = None, + slot_name=None, + slot_type=None, start_lsn: int = 0, timeline: int = 0, - options: Incomplete | None = None, + options=None, decode: bool = False, status_interval: int = 10, ) -> None: ... @@ -205,7 +202,7 @@ class UUID_adapter: def __conform__(self, proto): ... def getquoted(self): ... -def register_uuid(oids: Incomplete | None = None, conn_or_curs: Incomplete | None = None): ... +def register_uuid(oids=None, conn_or_curs=None): ... class Inet: addr: Any @@ -214,7 +211,7 @@ class Inet: def getquoted(self): ... def __conform__(self, proto): ... -def register_inet(oid: Incomplete | None = None, conn_or_curs: Incomplete | None = None): ... +def register_inet(oid=None, conn_or_curs=None): ... def wait_select(conn) -> None: ... class HstoreAdapter: @@ -230,13 +227,7 @@ class HstoreAdapter: @classmethod def get_oids(cls, conn_or_curs): ... -def register_hstore( - conn_or_curs, - globally: bool = False, - unicode: bool = False, - oid: Incomplete | None = None, - array_oid: Incomplete | None = None, -) -> None: ... +def register_hstore(conn_or_curs, globally: bool = False, unicode: bool = False, oid=None, array_oid=None) -> None: ... class CompositeCaster: name: Any @@ -247,12 +238,12 @@ class CompositeCaster: atttypes: Any typecaster: Any array_typecaster: Any - def __init__(self, name, oid, attrs, array_oid: Incomplete | None = None, schema: Incomplete | None = None) -> None: ... + def __init__(self, name, oid, attrs, array_oid=None, schema=None) -> None: ... def parse(self, s, curs): ... def make(self, values): ... @classmethod def tokenize(cls, s): ... -def register_composite(name, conn_or_curs, globally: bool = False, factory: Incomplete | None = None): ... +def register_composite(name, conn_or_curs, globally: bool = False, factory=None): ... def execute_batch(cur, sql, argslist, page_size: int = 100) -> None: ... -def execute_values(cur, sql, argslist, template: Incomplete | None = None, page_size: int = 100, fetch: bool = False): ... +def execute_values(cur, sql, argslist, template=None, page_size: int = 100, fetch: bool = False): ... diff --git a/stubs/psycopg2/psycopg2/pool.pyi b/stubs/psycopg2/psycopg2/pool.pyi index c74257d7294d..c2a4dc47ab5a 100644 --- a/stubs/psycopg2/psycopg2/pool.pyi +++ b/stubs/psycopg2/psycopg2/pool.pyi @@ -1,4 +1,4 @@ -from _typeshed import ConvertibleToInt, Incomplete +from _typeshed import ConvertibleToInt from collections.abc import Hashable import psycopg2 @@ -21,4 +21,4 @@ class SimpleConnectionPool(AbstractConnectionPool): ... class ThreadedConnectionPool(AbstractConnectionPool): # This subclass has a default value for conn which doesn't exist # in the SimpleConnectionPool class, nor in the documentation - def putconn(self, conn: Incomplete | None = None, key: Hashable | None = None, close: bool = False) -> None: ... + def putconn(self, conn=None, key: Hashable | None = None, close: bool = False) -> None: ... diff --git a/stubs/pyasn1/pyasn1/codec/ber/decoder.pyi b/stubs/pyasn1/pyasn1/codec/ber/decoder.pyi index ded04121b894..587e25b84992 100644 --- a/stubs/pyasn1/pyasn1/codec/ber/decoder.pyi +++ b/stubs/pyasn1/pyasn1/codec/ber/decoder.pyi @@ -17,7 +17,7 @@ class AbstractPayloadDecoder: asn1Spec, tagSet: TagSet | None = None, length: int | None = None, - state: Incomplete | None = None, + state=None, decodeFun: Callable[..., Incomplete] | None = None, substrateFun: Callable[..., Incomplete] | None = None, **options, @@ -29,7 +29,7 @@ class AbstractPayloadDecoder: asn1Spec, tagSet: TagSet | None = None, length: int | None = None, - state: Incomplete | None = None, + state=None, decodeFun: Callable[..., Incomplete] | None = None, substrateFun: Callable[..., Incomplete] | None = None, **options, @@ -232,7 +232,7 @@ class ChoicePayloadDecoder(AbstractConstructedPayloadDecoder): asn1Spec, tagSet: TagSet | None = None, length: int | None = None, - state: Incomplete | None = None, + state=None, decodeFun: Callable[..., Incomplete] | None = None, substrateFun: Callable[..., Incomplete] | None = None, **options, @@ -243,7 +243,7 @@ class ChoicePayloadDecoder(AbstractConstructedPayloadDecoder): asn1Spec, tagSet: TagSet | None = None, length: int | None = None, - state: Incomplete | None = None, + state=None, decodeFun: Callable[..., Incomplete] | None = None, substrateFun: Callable[..., Incomplete] | None = None, **options, diff --git a/stubs/pyasn1/pyasn1/codec/ber/encoder.pyi b/stubs/pyasn1/pyasn1/codec/ber/encoder.pyi index 04fa55b688c6..58a9193b1a17 100644 --- a/stubs/pyasn1/pyasn1/codec/ber/encoder.pyi +++ b/stubs/pyasn1/pyasn1/codec/ber/encoder.pyi @@ -1,4 +1,4 @@ -from _typeshed import Incomplete, Unused +from _typeshed import Unused from abc import abstractmethod from pyasn1.type.base import Asn1Type @@ -14,7 +14,7 @@ class AbstractItemEncoder: def encodeLength(self, length, defMode): ... @abstractmethod def encodeValue(self, value, asn1Spec, encodeFun, **options) -> None: ... - def encode(self, value, asn1Spec: Asn1Type | None = None, encodeFun: Incomplete | None = None, **options): ... + def encode(self, value, asn1Spec: Asn1Type | None = None, encodeFun=None, **options): ... class EndOfOctetsEncoder(AbstractItemEncoder): def encodeValue(self, value, asn1Spec, encodeFun, **options): ... diff --git a/stubs/pyasn1/pyasn1/codec/streaming.pyi b/stubs/pyasn1/pyasn1/codec/streaming.pyi index 9c90e7e06ee6..1752e5cc8a4e 100644 --- a/stubs/pyasn1/pyasn1/codec/streaming.pyi +++ b/stubs/pyasn1/pyasn1/codec/streaming.pyi @@ -18,4 +18,4 @@ class CachingStreamWrapper(io.IOBase): def asSeekableStream(substrate): ... def isEndOfStream(substrate) -> Generator[Incomplete, None, None]: ... def peekIntoStream(substrate, size: int = -1) -> Generator[Incomplete, None, None]: ... -def readFromStream(substrate, size: int = -1, context: Incomplete | None = None) -> Generator[Incomplete, None, None]: ... +def readFromStream(substrate, size: int = -1, context=None) -> Generator[Incomplete, None, None]: ... diff --git a/stubs/python-dateutil/dateutil/rrule.pyi b/stubs/python-dateutil/dateutil/rrule.pyi index f091cfbd6e77..22b7c983505c 100644 --- a/stubs/python-dateutil/dateutil/rrule.pyi +++ b/stubs/python-dateutil/dateutil/rrule.pyi @@ -32,7 +32,7 @@ class rrulebase: def count(self): ... def before(self, dt, inc: bool = False): ... def after(self, dt, inc: bool = False): ... - def xafter(self, dt, count: Incomplete | None = None, inc: bool = False): ... + def xafter(self, dt, count=None, inc: bool = False): ... def between(self, after, before, inc: bool = False, count: int = 1): ... class rrule(rrulebase): diff --git a/stubs/python-dateutil/dateutil/tz/tz.pyi b/stubs/python-dateutil/dateutil/tz/tz.pyi index 6addba659e02..c129277343c1 100644 --- a/stubs/python-dateutil/dateutil/tz/tz.pyi +++ b/stubs/python-dateutil/dateutil/tz/tz.pyi @@ -1,5 +1,4 @@ import datetime -from _typeshed import Incomplete from typing import ClassVar, Literal, Protocol, TypeVar from ..relativedelta import relativedelta @@ -99,7 +98,7 @@ class _ICalReader(Protocol): class tzical: def __init__(self, fileobj: str | _ICalReader) -> None: ... def keys(self): ... - def get(self, tzid: Incomplete | None = None): ... + def get(self, tzid=None): ... TZFILES: list[str] TZPATHS: list[str] diff --git a/stubs/python-dateutil/dateutil/zoneinfo/__init__.pyi b/stubs/python-dateutil/dateutil/zoneinfo/__init__.pyi index 7bd2845a21e5..423e003dc095 100644 --- a/stubs/python-dateutil/dateutil/zoneinfo/__init__.pyi +++ b/stubs/python-dateutil/dateutil/zoneinfo/__init__.pyi @@ -10,7 +10,7 @@ class ZoneInfoFile: zones: dict[Incomplete, Incomplete] metadata: _MetadataType | None def __init__(self, zonefile_stream: IO[bytes] | None = None) -> None: ... - def get(self, name, default: Incomplete | None = None): ... + def get(self, name, default=None): ... def get_zonefile_instance(new_instance: bool = False) -> ZoneInfoFile: ... def gettz(name): ... diff --git a/stubs/python-dateutil/dateutil/zoneinfo/rebuild.pyi b/stubs/python-dateutil/dateutil/zoneinfo/rebuild.pyi index e38459267d91..18e2d1c50b2d 100644 --- a/stubs/python-dateutil/dateutil/zoneinfo/rebuild.pyi +++ b/stubs/python-dateutil/dateutil/zoneinfo/rebuild.pyi @@ -1,11 +1,7 @@ -from _typeshed import Incomplete, StrOrBytesPath +from _typeshed import StrOrBytesPath from collections.abc import Sequence from tarfile import TarInfo def rebuild( - filename: StrOrBytesPath, - tag: Incomplete | None = None, - format: str = "gz", - zonegroups: Sequence[str | TarInfo] = [], - metadata: Incomplete | None = None, + filename: StrOrBytesPath, tag=None, format: str = "gz", zonegroups: Sequence[str | TarInfo] = [], metadata=None ) -> None: ... diff --git a/stubs/python-jose/jose/backends/cryptography_backend.pyi b/stubs/python-jose/jose/backends/cryptography_backend.pyi index 3e7e6a9ffc2b..02937a42989e 100644 --- a/stubs/python-jose/jose/backends/cryptography_backend.pyi +++ b/stubs/python-jose/jose/backends/cryptography_backend.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from typing import Any, ClassVar from .base import Key @@ -54,10 +53,8 @@ class CryptographyAESKey(Key): IV_BYTE_LENGTH_MODE_MAP: ClassVar[dict[str, int]] def __init__(self, key, algorithm) -> None: ... def to_dict(self): ... - def encrypt(self, plain_text, aad: Incomplete | None = None): ... - def decrypt( - self, cipher_text, iv: Incomplete | None = None, aad: Incomplete | None = None, tag: Incomplete | None = None - ): ... + def encrypt(self, plain_text, aad=None): ... + def decrypt(self, cipher_text, iv=None, aad=None, tag=None): ... def wrap_key(self, key_data): ... def unwrap_key(self, wrapped_key): ... diff --git a/stubs/pytz/pytz/lazy.pyi b/stubs/pytz/pytz/lazy.pyi index dc3b309e8a95..00e43d79cd98 100644 --- a/stubs/pytz/pytz/lazy.pyi +++ b/stubs/pytz/pytz/lazy.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from collections.abc import Iterator, Mapping as DictMixin from typing import TypeVar @@ -14,8 +13,8 @@ class LazyDict(DictMixin[str, _VT]): class LazyList(list[_T]): # does not return `Self` type: - def __new__(cls, fill_iter: Incomplete | None = None) -> LazyList[_T]: ... + def __new__(cls, fill_iter=None) -> LazyList[_T]: ... class LazySet(set[_T]): # does not return `Self` type: - def __new__(cls, fill_iter: Incomplete | None = None) -> LazySet[_T]: ... + def __new__(cls, fill_iter=None) -> LazySet[_T]: ... diff --git a/stubs/pywin32/win32/lib/win32evtlogutil.pyi b/stubs/pywin32/win32/lib/win32evtlogutil.pyi index b5a74189d8c5..7ab2cff5e46b 100644 --- a/stubs/pywin32/win32/lib/win32evtlogutil.pyi +++ b/stubs/pywin32/win32/lib/win32evtlogutil.pyi @@ -8,12 +8,7 @@ error = win32api.error langid: Incomplete def AddSourceToRegistry( - appName, - msgDLL: Incomplete | None = None, - eventLogType: str = "Application", - eventLogFlags: Incomplete | None = None, - categoryDLL: Incomplete | None = None, - categoryCount: int = 0, + appName, msgDLL=None, eventLogType: str = "Application", eventLogFlags=None, categoryDLL=None, categoryCount: int = 0 ) -> None: ... def RemoveSourceFromRegistry(appName, eventLogType: str = ...) -> None: ... def ReportEvent( diff --git a/stubs/pywin32/win32comext/axscript/client/framework.pyi b/stubs/pywin32/win32comext/axscript/client/framework.pyi index 5a2dd0745612..270eddc67a58 100644 --- a/stubs/pywin32/win32comext/axscript/client/framework.pyi +++ b/stubs/pywin32/win32comext/axscript/client/framework.pyi @@ -13,7 +13,7 @@ def profile(fn, *args): ... class SafeOutput: softspace: int redir: Incomplete - def __init__(self, redir: Incomplete | None = None) -> None: ... + def __init__(self, redir=None) -> None: ... def write(self, message) -> None: ... def flush(self) -> None: ... def close(self) -> None: ... @@ -140,9 +140,9 @@ class COMScript: def Reset(self) -> None: ... def ChangeScriptState(self, state) -> None: ... def ApplyInScriptedSection(self, codeBlock: AXScriptCodeBlock | None, fn, args): ... - def CompileInScriptedSection(self, codeBlock: AXScriptCodeBlock, type, realCode: Incomplete | None = None): ... - def ExecInScriptedSection(self, codeBlock: AXScriptCodeBlock, globals, locals: Incomplete | None = None): ... - def EvalInScriptedSection(self, codeBlock, globals, locals: Incomplete | None = None): ... + def CompileInScriptedSection(self, codeBlock: AXScriptCodeBlock, type, realCode=None): ... + def ExecInScriptedSection(self, codeBlock: AXScriptCodeBlock, globals, locals=None): ... + def EvalInScriptedSection(self, codeBlock, globals, locals=None): ... def HandleException(self, codeBlock: AXScriptCodeBlock | None) -> NoReturn: ... def BeginScriptedSection(self) -> None: ... def EndScriptedSection(self) -> None: ... diff --git a/stubs/reportlab/reportlab/graphics/barcode/lto.pyi b/stubs/reportlab/reportlab/graphics/barcode/lto.pyi index 9da4dbe51909..27cfc80368d0 100644 --- a/stubs/reportlab/reportlab/graphics/barcode/lto.pyi +++ b/stubs/reportlab/reportlab/graphics/barcode/lto.pyi @@ -17,13 +17,7 @@ class BaseLTOLabel(Standard39): border: Incomplete label: Incomplete def __init__( - self, - prefix: str = "", - number: Incomplete | None = None, - subtype: str = "1", - border: Incomplete | None = None, - checksum: bool = False, - availheight: Incomplete | None = None, + self, prefix: str = "", number=None, subtype: str = "1", border=None, checksum: bool = False, availheight=None ) -> None: ... def drawOn(self, canvas, x, y) -> None: ... diff --git a/stubs/reportlab/reportlab/graphics/barcode/qr.pyi b/stubs/reportlab/reportlab/graphics/barcode/qr.pyi index 0a06bc5ab0bb..d680aa8b304d 100644 --- a/stubs/reportlab/reportlab/graphics/barcode/qr.pyi +++ b/stubs/reportlab/reportlab/graphics/barcode/qr.pyi @@ -46,7 +46,7 @@ class QrCode(Flowable): qrVersion: Incomplete value: Incomplete qr: Incomplete - def __init__(self, value: Incomplete | None = None, **kw) -> None: ... + def __init__(self, value=None, **kw) -> None: ... def addData(self, value) -> None: ... def draw(self) -> None: ... def rect(self, x, y, w, h) -> None: ... diff --git a/stubs/reportlab/reportlab/graphics/charts/axes.pyi b/stubs/reportlab/reportlab/graphics/charts/axes.pyi index 21fad3a0beda..51274804d9e4 100644 --- a/stubs/reportlab/reportlab/graphics/charts/axes.pyi +++ b/stubs/reportlab/reportlab/graphics/charts/axes.pyi @@ -28,13 +28,13 @@ class TickLU: def __getitem__(self, t): ... class _AxisG(Widget): - def makeGrid(self, g, dim: Incomplete | None = None, parent: Incomplete | None = None, exclude=[]) -> None: ... - def getGridDims(self, start: Incomplete | None = None, end: Incomplete | None = None): ... + def makeGrid(self, g, dim=None, parent=None, exclude=[]) -> None: ... + def getGridDims(self, start=None, end=None): ... @property def isYAxis(self): ... @property def isXAxis(self): ... - def addAnnotations(self, g, A: Incomplete | None = None) -> None: ... + def addAnnotations(self, g, A=None) -> None: ... def draw(self): ... class CALabel(PMVLabel): @@ -75,7 +75,7 @@ class CategoryAxis(_AxisG): hiLLen: int def __init__(self) -> None: ... def setPosition(self, x, y, length) -> None: ... - def configure(self, multiSeries, barWidth: Incomplete | None = None) -> None: ... + def configure(self, multiSeries, barWidth=None) -> None: ... def scale(self, idx): ... def midScale(self, idx): ... @@ -95,7 +95,7 @@ class XCategoryAxis(_XTicks, CategoryAxis): def __init__(self) -> None: ... categoryNames: Incomplete def demo(self): ... - def joinToAxis(self, yAxis, mode: str = "bottom", pos: Incomplete | None = None) -> None: ... + def joinToAxis(self, yAxis, mode: str = "bottom", pos=None) -> None: ... def loScale(self, idx): ... def makeAxis(self): ... def makeTickLabels(self): ... @@ -106,7 +106,7 @@ class YCategoryAxis(_YTicks, CategoryAxis): def __init__(self) -> None: ... categoryNames: Incomplete def demo(self): ... - def joinToAxis(self, xAxis, mode: str = "left", pos: Incomplete | None = None) -> None: ... + def joinToAxis(self, xAxis, mode: str = "left", pos=None) -> None: ... def loScale(self, idx): ... def makeAxis(self): ... def makeTickLabels(self): ... @@ -129,7 +129,7 @@ class XValueAxis(_XTicks, ValueAxis): joinAxisPos: Incomplete def __init__(self, **kw) -> None: ... def demo(self): ... - def joinToAxis(self, yAxis, mode: str = "bottom", pos: Incomplete | None = None) -> None: ... + def joinToAxis(self, yAxis, mode: str = "bottom", pos=None) -> None: ... def makeAxis(self): ... def parseDayAndMonth(dmstr): ... @@ -163,7 +163,7 @@ class YValueAxis(_YTicks, ValueAxis): joinAxisPos: Incomplete def __init__(self) -> None: ... def demo(self): ... - def joinToAxis(self, xAxis, mode: str = "left", pos: Incomplete | None = None) -> None: ... + def joinToAxis(self, xAxis, mode: str = "left", pos=None) -> None: ... def makeAxis(self): ... class TimeValueAxis: diff --git a/stubs/reportlab/reportlab/graphics/charts/barcharts.pyi b/stubs/reportlab/reportlab/graphics/charts/barcharts.pyi index 93ebae5a747e..303ca4bc20c1 100644 --- a/stubs/reportlab/reportlab/graphics/charts/barcharts.pyi +++ b/stubs/reportlab/reportlab/graphics/charts/barcharts.pyi @@ -17,7 +17,7 @@ class BarChartProperties(PropHolder): class BarChart(PlotArea): def makeSwatchSample(self, rowNo, x, y, width, height): ... - def getSeriesName(self, i, default: Incomplete | None = None): ... + def getSeriesName(self, i, default=None): ... categoryAxis: Incomplete valueAxis: Incomplete barSpacing: int @@ -43,7 +43,7 @@ class VerticalBarChart(BarChart): ... class HorizontalBarChart(BarChart): ... class _FakeGroup: - def __init__(self, cmp: Incomplete | None = None) -> None: ... + def __init__(self, cmp=None) -> None: ... def add(self, what) -> None: ... def value(self): ... def sort(self) -> None: ... diff --git a/stubs/reportlab/reportlab/graphics/charts/dotbox.pyi b/stubs/reportlab/reportlab/graphics/charts/dotbox.pyi index 624ebef56031..d67fdfe982b9 100644 --- a/stubs/reportlab/reportlab/graphics/charts/dotbox.pyi +++ b/stubs/reportlab/reportlab/graphics/charts/dotbox.pyi @@ -20,5 +20,5 @@ class DotBox(Widget): x: int y: int def __init__(self) -> None: ... - def demo(self, drawing: Incomplete | None = None): ... + def demo(self, drawing=None): ... def draw(self): ... diff --git a/stubs/reportlab/reportlab/graphics/charts/doughnut.pyi b/stubs/reportlab/reportlab/graphics/charts/doughnut.pyi index a25351583a30..414bb8d5ae39 100644 --- a/stubs/reportlab/reportlab/graphics/charts/doughnut.pyi +++ b/stubs/reportlab/reportlab/graphics/charts/doughnut.pyi @@ -25,7 +25,7 @@ class Doughnut(AbstractPieChart): angleRange: int def __init__(self, *, angleRange: int = 360, **kwds) -> None: ... def demo(self): ... - def normalizeData(self, data: Incomplete | None = None): ... + def normalizeData(self, data=None): ... def makeSectors(self): ... def draw(self): ... diff --git a/stubs/reportlab/reportlab/graphics/charts/legends.pyi b/stubs/reportlab/reportlab/graphics/charts/legends.pyi index 012ba70bf36a..ec3b453fba10 100644 --- a/stubs/reportlab/reportlab/graphics/charts/legends.pyi +++ b/stubs/reportlab/reportlab/graphics/charts/legends.pyi @@ -76,7 +76,7 @@ class TotalAnnotator(LegendColEndCallout): fillColor=..., strokeWidth: float = 0.5, strokeColor=..., - strokeDashArray: Incomplete | None = None, + strokeDashArray=None, dx: int = 0, dy: int = 0, dly: int = 0, diff --git a/stubs/reportlab/reportlab/graphics/charts/linecharts.pyi b/stubs/reportlab/reportlab/graphics/charts/linecharts.pyi index 9d7034c31d33..16d1c4bf69e7 100644 --- a/stubs/reportlab/reportlab/graphics/charts/linecharts.pyi +++ b/stubs/reportlab/reportlab/graphics/charts/linecharts.pyi @@ -11,7 +11,7 @@ class LineChartProperties(PropHolder): ... class AbstractLineChart(PlotArea): def makeSwatchSample(self, rowNo, x, y, width, height): ... - def getSeriesName(self, i, default: Incomplete | None = None): ... + def getSeriesName(self, i, default=None): ... class LineChart(AbstractLineChart): ... diff --git a/stubs/reportlab/reportlab/graphics/charts/lineplots.pyi b/stubs/reportlab/reportlab/graphics/charts/lineplots.pyi index 436e7b7d5534..9c7a0009bf8e 100644 --- a/stubs/reportlab/reportlab/graphics/charts/lineplots.pyi +++ b/stubs/reportlab/reportlab/graphics/charts/lineplots.pyi @@ -15,7 +15,7 @@ class LinePlotProperties(PropHolder): ... class InFillValue(int): yValue: Incomplete - def __new__(cls, v, yValue: Incomplete | None = None): ... + def __new__(cls, v, yValue=None): ... class Shader(_SetKeyWordArgs): def shade(self, lp, g, rowNo, rowColor, row) -> None: ... @@ -74,7 +74,7 @@ class GridLinePlot(SimpleTimeSeriesPlot): scaleFactor: Incomplete background: Incomplete def __init__(self) -> None: ... - def demo(self, drawing: Incomplete | None = None): ... + def demo(self, drawing=None): ... def draw(self): ... class AreaLinePlot(LinePlot): @@ -108,7 +108,7 @@ class ScatterPlot(LinePlot): lineLabelFormat: str lineLabelNudge: int def __init__(self) -> None: ... - def demo(self, drawing: Incomplete | None = None): ... + def demo(self, drawing=None): ... def draw(self): ... def sample1a(): ... diff --git a/stubs/reportlab/reportlab/graphics/charts/piecharts.pyi b/stubs/reportlab/reportlab/graphics/charts/piecharts.pyi index c2be4eacf55a..aa15c8d58846 100644 --- a/stubs/reportlab/reportlab/graphics/charts/piecharts.pyi +++ b/stubs/reportlab/reportlab/graphics/charts/piecharts.pyi @@ -50,7 +50,7 @@ class WedgeProperties(PropHolder): class AbstractPieChart(PlotArea): def makeSwatchSample(self, rowNo, x, y, width, height): ... - def getSeriesName(self, i, default: Incomplete | None = None): ... + def getSeriesName(self, i, default=None): ... def boundsOverlap(P, Q): ... def findOverlapRun(B, wrap: int = 1): ... @@ -115,7 +115,7 @@ class LegendedPie(Pie): drawLegend: int def __init__(self) -> None: ... def draw(self): ... - def demo(self, drawing: Incomplete | None = None): ... + def demo(self, drawing=None): ... class Wedge3dProperties(PropHolder): strokeWidth: int diff --git a/stubs/reportlab/reportlab/graphics/charts/slidebox.pyi b/stubs/reportlab/reportlab/graphics/charts/slidebox.pyi index 84787d4fdec5..5af8e999ae03 100644 --- a/stubs/reportlab/reportlab/graphics/charts/slidebox.pyi +++ b/stubs/reportlab/reportlab/graphics/charts/slidebox.pyi @@ -34,5 +34,5 @@ class SlideBox(Widget): sourceLabelFontSize: int sourceLabelFillColor: Incomplete def __init__(self) -> None: ... - def demo(self, drawing: Incomplete | None = None): ... + def demo(self, drawing=None): ... def draw(self): ... diff --git a/stubs/reportlab/reportlab/graphics/charts/spider.pyi b/stubs/reportlab/reportlab/graphics/charts/spider.pyi index 78b466069435..e66e11bb7e8a 100644 --- a/stubs/reportlab/reportlab/graphics/charts/spider.pyi +++ b/stubs/reportlab/reportlab/graphics/charts/spider.pyi @@ -37,7 +37,7 @@ class StrandLabel(SpokeLabel): class SpiderChart(PlotArea): def makeSwatchSample(self, rowNo, x, y, width, height): ... - def getSeriesName(self, i, default: Incomplete | None = None): ... + def getSeriesName(self, i, default=None): ... data: Incomplete labels: Incomplete startAngle: int diff --git a/stubs/reportlab/reportlab/graphics/charts/utils.pyi b/stubs/reportlab/reportlab/graphics/charts/utils.pyi index f8a7c02a6f23..607d3b15a387 100644 --- a/stubs/reportlab/reportlab/graphics/charts/utils.pyi +++ b/stubs/reportlab/reportlab/graphics/charts/utils.pyi @@ -8,10 +8,8 @@ def str2seconds(timeString): ... def seconds2str(seconds): ... def nextRoundNumber(x): ... def find_interval(lo, hi, I: int = 5): ... -def find_good_grid(lower, upper, n=(4, 5, 6, 7, 8, 9), grid: Incomplete | None = None): ... -def ticks( - lower, upper, n=(4, 5, 6, 7, 8, 9), split: int = 1, percent: int = 0, grid: Incomplete | None = None, labelVOffset: int = 0 -): ... +def find_good_grid(lower, upper, n=(4, 5, 6, 7, 8, 9), grid=None): ... +def ticks(lower, upper, n=(4, 5, 6, 7, 8, 9), split: int = 1, percent: int = 0, grid=None, labelVOffset: int = 0): ... def findNones(data): ... def pairFixNones(pairs): ... def maverage(data, n: int = 6): ... @@ -35,9 +33,7 @@ class DrawTimeCollector: def xyDist(xxx_todo_changeme, xxx_todo_changeme1): ... def lineSegmentIntersect(xxx_todo_changeme2, xxx_todo_changeme3, xxx_todo_changeme4, xxx_todo_changeme5): ... -def makeCircularString( - x, y, radius, angle, text, fontName, fontSize, inside: int = 0, G: Incomplete | None = None, textAnchor: str = "start" -): ... +def makeCircularString(x, y, radius, angle, text, fontName, fontSize, inside: int = 0, G=None, textAnchor: str = "start"): ... class CustomDrawChanger: store: Incomplete diff --git a/stubs/reportlab/reportlab/graphics/renderPM.pyi b/stubs/reportlab/reportlab/graphics/renderPM.pyi index 75d6b2ea26fc..68833ea9da95 100644 --- a/stubs/reportlab/reportlab/graphics/renderPM.pyi +++ b/stubs/reportlab/reportlab/graphics/renderPM.pyi @@ -32,19 +32,12 @@ BEZIER_ARC_MAGIC: float class PMCanvas: ctm: Incomplete def __init__( - self, - w, - h, - dpi: int = 72, - bg: int = 16777215, - configPIL: Incomplete | None = None, - backend: Incomplete | None = None, - backendFmt: str = "RGB", + self, w, h, dpi: int = 72, bg: int = 16777215, configPIL=None, backend=None, backendFmt: str = "RGB" ) -> None: ... def toPIL(self): ... - def saveToFile(self, fn, fmt: Incomplete | None = None): ... + def saveToFile(self, fn, fmt=None): ... def saveToString(self, fmt: str = "GIF"): ... - def setFont(self, fontName, fontSize, leading: Incomplete | None = None) -> None: ... + def setFont(self, fontName, fontSize, leading=None) -> None: ... def __setattr__(self, name, value) -> None: ... def __getattr__(self, name): ... def fillstrokepath(self, stroke: int = 1, fill: int = 1) -> None: ... @@ -59,7 +52,7 @@ class PMCanvas: x: float, y: float, text: str, - _fontInfo: Incomplete | None = None, + _fontInfo=None, text_anchor: str = "left", direction: str | None = None, shaping: bool = False, @@ -83,13 +76,13 @@ class PMCanvas: def setLineJoin(self, join) -> None: ... strokeWidth: Incomplete def setLineWidth(self, width) -> None: ... - def stringWidth(self, text, fontName: Incomplete | None = None, fontSize: Incomplete | None = None): ... + def stringWidth(self, text, fontName=None, fontSize=None): ... def drawToPMCanvas( d: Drawing, dpi: float = 72, bg: int = 0xFFFFFF, - configPIL: Incomplete | None = None, + configPIL=None, showBoundary=..., backend="rlPyCairo", backendFmt: str = "RGB", @@ -98,7 +91,7 @@ def drawToPIL( d: Drawing, dpi: float = 72, bg: int = 0xFFFFFF, - configPIL: Incomplete | None = None, + configPIL=None, showBoundary=..., backend="rlPyCairo", backendFmt: str = "RGB", @@ -107,7 +100,7 @@ def drawToPILP( d: Drawing, dpi: float = 72, bg: int = 0xFFFFFF, - configPIL: Incomplete | None = None, + configPIL=None, showBoundary=..., backend="rlPyCairo", backendFmt: str = "RGB", @@ -118,7 +111,7 @@ def drawToFile( fmt: str = "GIF", dpi: float = 72, bg: int = 0xFFFFFF, - configPIL: Incomplete | None = None, + configPIL=None, showBoundary=..., backend="rlPyCairo", backendFmt: str = "RGB", @@ -128,7 +121,7 @@ def drawToString( fmt: str = "GIF", dpi: float = 72, bg: int = 0xFFFFFF, - configPIL: Incomplete | None = None, + configPIL=None, showBoundary=..., backend="rlPyCairo", backendFmt: str = "RGB", diff --git a/stubs/reportlab/reportlab/graphics/renderPS.pyi b/stubs/reportlab/reportlab/graphics/renderPS.pyi index 105d7879168c..f7e47c5af40e 100644 --- a/stubs/reportlab/reportlab/graphics/renderPS.pyi +++ b/stubs/reportlab/reportlab/graphics/renderPS.pyi @@ -15,12 +15,12 @@ class PSCanvas: PostScriptLevel: Incomplete def __init__(self, size=(300, 300), PostScriptLevel: int = 2) -> None: ... def comment(self, msg) -> None: ... - def drawImage(self, image, x1, y1, width: Incomplete | None = None, height: Incomplete | None = None) -> None: ... + def drawImage(self, image, x1, y1, width=None, height=None) -> None: ... def clear(self) -> None: ... - def save(self, f: Incomplete | None = None) -> None: ... + def save(self, f=None) -> None: ... def saveState(self) -> None: ... def restoreState(self) -> None: ... - def stringWidth(self, s, font: Incomplete | None = None, fontSize: Incomplete | None = None): ... + def stringWidth(self, s, font=None, fontSize=None): ... def setLineCap(self, v) -> None: ... def setLineJoin(self, v) -> None: ... def setDash(self, array=[], phase: int = 0) -> None: ... @@ -29,7 +29,7 @@ class PSCanvas: def setFillColor(self, color) -> None: ... def setFillMode(self, v) -> None: ... def setLineWidth(self, width) -> None: ... - def setFont(self, font, fontSize, leading: Incomplete | None = None) -> None: ... + def setFont(self, font, fontSize, leading=None) -> None: ... def line(self, x1, y1, x2, y2) -> None: ... def drawString(self, x, y, s, angle: int = 0, text_anchor: str = "left", textRenderMode: int = 0) -> None: ... def drawCentredString(self, x, y, text, text_anchor: str = "middle", textRenderMode: int = 0) -> None: ... @@ -41,7 +41,7 @@ class PSCanvas: def circle(self, xc, yc, r) -> None: ... def drawArc(self, x1, y1, x2, y2, startAng: int = 0, extent: int = 360, fromcenter: int = 0) -> None: ... def polygon(self, p, closed: int = 0, stroke: int = 1, fill: int = 1) -> None: ... - def lines(self, lineList, color: Incomplete | None = None, width: Incomplete | None = None) -> None: ... + def lines(self, lineList, color=None, width=None) -> None: ... def moveTo(self, x, y) -> None: ... def lineTo(self, x, y) -> None: ... def curveTo(self, x1, y1, x2, y2, x3, y3) -> None: ... @@ -64,7 +64,7 @@ class _PSRenderer(Renderer): def drawEllipse(self, ellipse) -> None: ... def drawPolygon(self, p) -> None: ... def drawString(self, stringObj) -> None: ... - def drawPath(self, path, fillMode: Incomplete | None = None): ... + def drawPath(self, path, fillMode=None): ... def applyStateChanges(self, delta, newState) -> None: ... def drawImage(self, image) -> None: ... diff --git a/stubs/reportlab/reportlab/graphics/renderSVG.pyi b/stubs/reportlab/reportlab/graphics/renderSVG.pyi index b687ee9a86ca..b9611dad4138 100644 --- a/stubs/reportlab/reportlab/graphics/renderSVG.pyi +++ b/stubs/reportlab/reportlab/graphics/renderSVG.pyi @@ -16,7 +16,7 @@ EXTRA_FILL_STYLES: Final[Sequence[str]] def drawToString(d: Drawing, showBoundary=0, **kwds) -> str: ... def drawToFile(d: Drawing, fn: str | IO[str], showBoundary=0, **kwds) -> None: ... def draw(drawing: Drawing, canvas: Canvas, x: float = 0, y: float = 0, showBoundary=0) -> None: ... -def transformNode(doc, newTag, node: Incomplete | None = None, **attrDict): ... +def transformNode(doc, newTag, node=None, **attrDict): ... class EncodedWriter(list[Incomplete]): BOMS: Incomplete @@ -44,8 +44,8 @@ class SVGCanvas: scaleTree: Incomplete currGroup: Incomplete def __init__(self, size=(300, 300), encoding: str = "utf-8", verbose: int = 0, bom: bool = False, **kwds) -> None: ... - def save(self, fn: Incomplete | None = None) -> None: ... - def NOTUSED_stringWidth(self, s, font: Incomplete | None = None, fontSize: Incomplete | None = None): ... + def save(self, fn=None) -> None: ... + def NOTUSED_stringWidth(self, s, font=None, fontSize=None): ... def setLineCap(self, v) -> None: ... def setLineJoin(self, v) -> None: ... def setDash(self, array=[], phase: int = 0) -> None: ... @@ -54,50 +54,26 @@ class SVGCanvas: def setFillMode(self, v) -> None: ... def setLineWidth(self, width) -> None: ... def setFont(self, font, fontSize) -> None: ... - def rect(self, x1, y1, x2, y2, rx: int = 8, ry: int = 8, link_info: Incomplete | None = None, **_svgAttrs) -> None: ... - def roundRect(self, x1, y1, x2, y2, rx: int = 8, ry: int = 8, link_info: Incomplete | None = None, **_svgAttrs) -> None: ... + def rect(self, x1, y1, x2, y2, rx: int = 8, ry: int = 8, link_info=None, **_svgAttrs) -> None: ... + def roundRect(self, x1, y1, x2, y2, rx: int = 8, ry: int = 8, link_info=None, **_svgAttrs) -> None: ... def drawString( - self, - s, - x, - y, - angle: int = 0, - link_info: Incomplete | None = None, - text_anchor: str = "left", - textRenderMode: int = 0, - **_svgAttrs, + self, s, x, y, angle: int = 0, link_info=None, text_anchor: str = "left", textRenderMode: int = 0, **_svgAttrs ) -> None: ... def drawCentredString( - self, - s, - x, - y, - angle: int = 0, - text_anchor: str = "middle", - link_info: Incomplete | None = None, - textRenderMode: int = 0, - **_svgAttrs, + self, s, x, y, angle: int = 0, text_anchor: str = "middle", link_info=None, textRenderMode: int = 0, **_svgAttrs ) -> None: ... def drawRightString( - self, - text, - x, - y, - angle: int = 0, - text_anchor: str = "end", - link_info: Incomplete | None = None, - textRenderMode: int = 0, - **_svgAttrs, + self, text, x, y, angle: int = 0, text_anchor: str = "end", link_info=None, textRenderMode: int = 0, **_svgAttrs ) -> None: ... def comment(self, data) -> None: ... def drawImage(self, image, x, y, width, height, embed: bool = True) -> None: ... def line(self, x1, y1, x2, y2) -> None: ... - def ellipse(self, x1, y1, x2, y2, link_info: Incomplete | None = None) -> None: ... - def circle(self, xc, yc, r, link_info: Incomplete | None = None) -> None: ... + def ellipse(self, x1, y1, x2, y2, link_info=None) -> None: ... + def circle(self, xc, yc, r, link_info=None) -> None: ... def drawCurve(self, x1, y1, x2, y2, x3, y3, x4, y4, closed: int = 0) -> None: ... def drawArc(self, x1, y1, x2, y2, startAng: int = 0, extent: int = 360, fromcenter: int = 0) -> None: ... - def polygon(self, points, closed: int = 0, link_info: Incomplete | None = None) -> None: ... - def lines(self, lineList, color: Incomplete | None = None, width: Incomplete | None = None) -> None: ... + def polygon(self, points, closed: int = 0, link_info=None) -> None: ... + def lines(self, lineList, color=None, width=None) -> None: ... def polyLine(self, points) -> None: ... def startGroup(self, attrDict={"transform": ""}): ... def endGroup(self, currGroup) -> None: ... diff --git a/stubs/reportlab/reportlab/graphics/renderbase.pyi b/stubs/reportlab/reportlab/graphics/renderbase.pyi index bd2c377b27d9..e9d04c948872 100644 --- a/stubs/reportlab/reportlab/graphics/renderbase.pyi +++ b/stubs/reportlab/reportlab/graphics/renderbase.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from typing import Final __version__: Final[str] @@ -6,7 +5,7 @@ __version__: Final[str] def getStateDelta(shape): ... class StateTracker: - def __init__(self, defaults: Incomplete | None = None, defaultObj: Incomplete | None = None) -> None: ... + def __init__(self, defaults=None, defaultObj=None) -> None: ... def push(self, delta) -> None: ... def pop(self): ... def getState(self): ... diff --git a/stubs/reportlab/reportlab/graphics/svgpath.pyi b/stubs/reportlab/reportlab/graphics/svgpath.pyi index 89464473a29e..1c8f62625f79 100644 --- a/stubs/reportlab/reportlab/graphics/svgpath.pyi +++ b/stubs/reportlab/reportlab/graphics/svgpath.pyi @@ -4,7 +4,7 @@ from .shapes import Path, UserNode class SvgPath(Path, UserNode): fillColor: Incomplete - def __init__(self, s, isClipPath: int = 0, autoclose: Incomplete | None = None, fillMode=0, **kw) -> None: ... + def __init__(self, s, isClipPath: int = 0, autoclose=None, fillMode=0, **kw) -> None: ... def provideNode(self): ... __all__ = ("SvgPath",) diff --git a/stubs/reportlab/reportlab/graphics/utils.pyi b/stubs/reportlab/reportlab/graphics/utils.pyi index 5853d4943246..af61914c0058 100644 --- a/stubs/reportlab/reportlab/graphics/utils.pyi +++ b/stubs/reportlab/reportlab/graphics/utils.pyi @@ -1,5 +1,3 @@ -from _typeshed import Incomplete - class RenderPMError(Exception): ... def setFont(gs, fontName, fontSize) -> None: ... @@ -13,7 +11,7 @@ def text2Path( anchor: str = "start", truncate: int = 1, pathReverse: int = 0, - gs: Incomplete | None = None, + gs=None, **kwds, ): ... diff --git a/stubs/reportlab/reportlab/graphics/widgetbase.pyi b/stubs/reportlab/reportlab/graphics/widgetbase.pyi index 9ecbd223fc79..e02d3857b65e 100644 --- a/stubs/reportlab/reportlab/graphics/widgetbase.pyi +++ b/stubs/reportlab/reportlab/graphics/widgetbase.pyi @@ -26,7 +26,7 @@ class ScaleWidget(Widget): y: Incomplete contents: Incomplete scale: Incomplete - def __init__(self, x: int = 0, y: int = 0, scale: float = 1.0, contents: Incomplete | None = None) -> None: ... + def __init__(self, x: int = 0, y: int = 0, scale: float = 1.0, contents=None) -> None: ... def draw(self): ... class CloneMixin: @@ -43,7 +43,7 @@ class TypedPropertyCollection(PropHolder): def setVector(self, **kw) -> None: ... def __getattr__(self, name): ... def __setattr__(self, name, value): ... - def checkAttr(self, key, a, default: Incomplete | None = None): ... + def checkAttr(self, key, a, default=None): ... def tpcGetItem(obj, x): ... def isWKlass(obj): ... @@ -81,7 +81,7 @@ class Sizer(Widget): fillColor: Incomplete strokeColor: Incomplete def __init__(self, *elements) -> None: ... - def add(self, node, name: Incomplete | None = None) -> None: ... + def add(self, node, name=None) -> None: ... def getBounds(self): ... def draw(self): ... diff --git a/stubs/reportlab/reportlab/graphics/widgets/grids.pyi b/stubs/reportlab/reportlab/graphics/widgets/grids.pyi index 1664fa63de61..3a3a249301e6 100644 --- a/stubs/reportlab/reportlab/graphics/widgets/grids.pyi +++ b/stubs/reportlab/reportlab/graphics/widgets/grids.pyi @@ -6,7 +6,7 @@ from reportlab.graphics.widgetbase import Widget __version__: Final[str] -def frange(start, end: Incomplete | None = None, inc: Incomplete | None = None): ... +def frange(start, end=None, inc=None): ... def makeDistancesList(list): ... class Grid(Widget): diff --git a/stubs/reportlab/reportlab/lib/PyFontify.pyi b/stubs/reportlab/reportlab/lib/PyFontify.pyi index f25d70340d16..c1bcc0d67e05 100644 --- a/stubs/reportlab/reportlab/lib/PyFontify.pyi +++ b/stubs/reportlab/reportlab/lib/PyFontify.pyi @@ -17,5 +17,5 @@ matchRE: Incomplete idKeyPat: str idRE: Incomplete -def fontify(pytext, searchfrom: int = 0, searchto: Incomplete | None = None): ... +def fontify(pytext, searchfrom: int = 0, searchto=None): ... def test(path) -> None: ... diff --git a/stubs/reportlab/reportlab/lib/attrmap.pyi b/stubs/reportlab/reportlab/lib/attrmap.pyi index d39e92fb3a84..ac586aadc7f1 100644 --- a/stubs/reportlab/reportlab/lib/attrmap.pyi +++ b/stubs/reportlab/reportlab/lib/attrmap.pyi @@ -13,28 +13,14 @@ class CallableValue: class AttrMapValue: validate: Incomplete desc: Incomplete - def __init__( - self, - validate: Incomplete | None = None, - desc: Incomplete | None = None, - initial: Incomplete | None = None, - advancedUsage: int = 0, - **kw, - ) -> None: ... + def __init__(self, validate=None, desc=None, initial=None, advancedUsage: int = 0, **kw) -> None: ... def __getattr__(self, name): ... class AttrMap(dict[str, AttrMapValue]): - def __init__(self, BASE: Incomplete | None = None, UNWANTED=[], **kw) -> None: ... + def __init__(self, BASE=None, UNWANTED=[], **kw) -> None: ... def remove(self, unwanted) -> None: ... def clone(self, UNWANTED=[], **kw): ... def validateSetattr(obj, name, value) -> None: ... def hook__setattr__(obj): ... -def addProxyAttribute( - src, - name, - validate: Incomplete | None = None, - desc: Incomplete | None = None, - initial: Incomplete | None = None, - dst: Incomplete | None = None, -) -> None: ... +def addProxyAttribute(src, name, validate=None, desc=None, initial=None, dst=None) -> None: ... diff --git a/stubs/reportlab/reportlab/lib/fontfinder.pyi b/stubs/reportlab/reportlab/lib/fontfinder.pyi index 4fbe901e9f03..05370969b197 100644 --- a/stubs/reportlab/reportlab/lib/fontfinder.pyi +++ b/stubs/reportlab/reportlab/lib/fontfinder.pyi @@ -37,16 +37,10 @@ class FontFinder: validate: Incomplete verbose: Incomplete def __init__( - self, - dirs=[], - useCache: bool = True, - validate: bool = False, - recur: bool = False, - fsEncoding: Incomplete | None = None, - verbose: int = 0, + self, dirs=[], useCache: bool = True, validate: bool = False, recur: bool = False, fsEncoding=None, verbose: int = 0 ) -> None: ... - def addDirectory(self, dirName, recur: Incomplete | None = None) -> None: ... - def addDirectories(self, dirNames, recur: Incomplete | None = None) -> None: ... + def addDirectory(self, dirName, recur=None) -> None: ... + def addDirectories(self, dirNames, recur=None) -> None: ... def getFamilyNames(self): ... def getFontsInFamily(self, familyName): ... def getFamilyXmlReport(self): ... diff --git a/stubs/reportlab/reportlab/lib/formatters.pyi b/stubs/reportlab/reportlab/lib/formatters.pyi index 31f215719e2c..c88692bc1331 100644 --- a/stubs/reportlab/reportlab/lib/formatters.pyi +++ b/stubs/reportlab/reportlab/lib/formatters.pyi @@ -13,14 +13,7 @@ class DecimalFormatter(Formatter): comma: Incomplete prefix: Incomplete suffix: Incomplete - def __init__( - self, - places: int = 2, - decimalSep: str = ".", - thousandSep: Incomplete | None = None, - prefix: Incomplete | None = None, - suffix: Incomplete | None = None, - ) -> None: ... + def __init__(self, places: int = 2, decimalSep: str = ".", thousandSep=None, prefix=None, suffix=None) -> None: ... def format(self, num): ... __all__ = ("Formatter", "DecimalFormatter") diff --git a/stubs/reportlab/reportlab/lib/normalDate.pyi b/stubs/reportlab/reportlab/lib/normalDate.pyi index f02c70d1c911..0d26498cb29e 100644 --- a/stubs/reportlab/reportlab/lib/normalDate.pyi +++ b/stubs/reportlab/reportlab/lib/normalDate.pyi @@ -12,7 +12,7 @@ def isLeapYear(year): ... class NormalDateException(Exception): ... class NormalDate: - def __init__(self, normalDate: Incomplete | None = None) -> None: ... + def __init__(self, normalDate=None) -> None: ... def add(self, days) -> None: ... def __add__(self, days): ... def __radd__(self, days): ... diff --git a/stubs/reportlab/reportlab/lib/pdfencrypt.pyi b/stubs/reportlab/reportlab/lib/pdfencrypt.pyi index b5819145e288..d81d451618ec 100644 --- a/stubs/reportlab/reportlab/lib/pdfencrypt.pyi +++ b/stubs/reportlab/reportlab/lib/pdfencrypt.pyi @@ -35,12 +35,12 @@ class StandardEncryption: def __init__( self, userPassword, - ownerPassword: Incomplete | None = None, + ownerPassword=None, canPrint: int = 1, canModify: int = 1, canCopy: int = 1, canAnnotate: int = 1, - strength: Incomplete | None = None, + strength=None, ) -> None: ... def setAllPermissions(self, value) -> None: ... def permissionBits(self): ... @@ -52,7 +52,7 @@ class StandardEncryption: OE: Incomplete Perms: Incomplete objnum: Incomplete - def prepare(self, document, overrideID: Incomplete | None = None) -> None: ... + def prepare(self, document, overrideID=None) -> None: ... version: Incomplete def register(self, objnum, version) -> None: ... def info(self): ... @@ -71,22 +71,22 @@ def unHexText(hexText): ... PadString: Incomplete def checkRevision(revision): ... -def encryptionkey(password, OwnerKey, Permissions, FileId1, revision: Incomplete | None = None): ... +def encryptionkey(password, OwnerKey, Permissions, FileId1, revision=None): ... def computeO(userPassword, ownerPassword, revision): ... def computeU( encryptionkey, encodestring=b"(\xbfN^Nu\x8aAd\x00NV\xff\xfa\x01\x08..\x00\xb6\xd0h>\x80/\x0c\xa9\xfedSiz", - revision: Incomplete | None = None, - documentId: Incomplete | None = None, + revision=None, + documentId=None, ): ... def checkU(encryptionkey, U) -> None: ... -def encodePDF(key, objectNumber, generationNumber, string, revision: Incomplete | None = None): ... +def encodePDF(key, objectNumber, generationNumber, string, revision=None): ... def equalityCheck(observed, expected, label) -> None: ... def test() -> None: ... def encryptCanvas( canvas, userPassword, - ownerPassword: Incomplete | None = None, + ownerPassword=None, canPrint: int = 1, canModify: int = 1, canCopy: int = 1, @@ -101,7 +101,7 @@ class EncryptionFlowable(StandardEncryption, Flowable): def encryptDocTemplate( dt, userPassword, - ownerPassword: Incomplete | None = None, + ownerPassword=None, canPrint: int = 1, canModify: int = 1, canCopy: int = 1, @@ -111,7 +111,7 @@ def encryptDocTemplate( def encryptPdfInMemory( inputPDF, userPassword, - ownerPassword: Incomplete | None = None, + ownerPassword=None, canPrint: int = 1, canModify: int = 1, canCopy: int = 1, @@ -122,7 +122,7 @@ def encryptPdfOnDisk( inputFileName, outputFileName, userPassword, - ownerPassword: Incomplete | None = None, + ownerPassword=None, canPrint: int = 1, canModify: int = 1, canCopy: int = 1, diff --git a/stubs/reportlab/reportlab/lib/rl_safe_eval.pyi b/stubs/reportlab/reportlab/lib/rl_safe_eval.pyi index 8f50c931bcfb..6c2eed8f2074 100644 --- a/stubs/reportlab/reportlab/lib/rl_safe_eval.pyi +++ b/stubs/reportlab/reportlab/lib/rl_safe_eval.pyi @@ -23,7 +23,7 @@ def copy_locations(new_node, old_node) -> None: ... class UntrustedAstTransformer(ast.NodeTransformer): names_seen: Incomplete nameIsAllowed: Incomplete - def __init__(self, names_seen: Incomplete | None = None, nameIsAllowed: Incomplete | None = None) -> None: ... + def __init__(self, names_seen=None, nameIsAllowed=None) -> None: ... @property def tmpName(self): ... def error(self, node, msg) -> NoReturn: ... @@ -160,7 +160,7 @@ class __rl_SafeIter__: __rl_safe_builtins__: Incomplete -def safer_globals(g: Incomplete | None = None): ... +def safer_globals(g=None): ... math_log10 = math.log10 __rl_undef__: Incomplete @@ -176,12 +176,7 @@ class __RL_SAFE_ENV__: real_bi: Incomplete bi_replace: Incomplete __rl_builtins__: Incomplete - def __init__( - self, - timeout: Incomplete | None = None, - allowed_magic_methods: Incomplete | None = None, - allowed_magic_names: Incomplete | None = None, - ) -> None: ... + def __init__(self, timeout=None, allowed_magic_methods=None, allowed_magic_names=None) -> None: ... def __rl_type__(self, *args): ... def __rl_check__(self) -> None: ... def __rl_sd__(self, obj): ... @@ -217,45 +212,23 @@ class __RL_SAFE_ENV__: def __rl_args_iter__(self, *args): ... def __rl_list__(self, it): ... def __rl_compile__( - self, - src, - fname: str = "", - mode: str = "eval", - flags: int = 0, - inherit: bool = True, - visit: Incomplete | None = None, + self, src, fname: str = "", mode: str = "eval", flags: int = 0, inherit: bool = True, visit=None ): ... __rl_limit__: Incomplete def __rl_safe_eval__( - self, - expr, - g, - l, - mode, - timeout: Incomplete | None = None, - allowed_magic_methods: Incomplete | None = None, - __frame_depth__: int = 3, - allowed_magic_names: Incomplete | None = None, + self, expr, g, l, mode, timeout=None, allowed_magic_methods=None, __frame_depth__: int = 3, allowed_magic_names=None ): ... class __rl_safe_eval__: mode: str env: Incomplete def __init__(self) -> None: ... - def __call__( - self, - expr, - g: Incomplete | None = None, - l: Incomplete | None = None, - timeout: Incomplete | None = None, - allowed_magic_methods: Incomplete | None = None, - allowed_magic_names: Incomplete | None = None, - ): ... + def __call__(self, expr, g=None, l=None, timeout=None, allowed_magic_methods=None, allowed_magic_names=None): ... class __rl_safe_exec__(__rl_safe_eval__): mode: str -def rl_extended_literal_eval(expr, safe_callables: Incomplete | None = None, safe_names: Incomplete | None = None): ... +def rl_extended_literal_eval(expr, safe_callables=None, safe_names=None): ... rl_safe_exec: __rl_safe_exec__ rl_safe_eval: __rl_safe_eval__ diff --git a/stubs/reportlab/reportlab/lib/rparsexml.pyi b/stubs/reportlab/reportlab/lib/rparsexml.pyi index ef804c2e708b..4bff57d0bd6a 100644 --- a/stubs/reportlab/reportlab/lib/rparsexml.pyi +++ b/stubs/reportlab/reportlab/lib/rparsexml.pyi @@ -17,7 +17,7 @@ CDATAENDMARKER: str replacelist: Incomplete def unEscapeContentList(contentList): ... -def parsexmlSimple(xmltext, oneOutermostTag: int = 0, eoCB: Incomplete | None = None, entityReplacer=...): ... +def parsexmlSimple(xmltext, oneOutermostTag: int = 0, eoCB=None, entityReplacer=...): ... parsexml = parsexmlSimple diff --git a/stubs/reportlab/reportlab/lib/sequencer.pyi b/stubs/reportlab/reportlab/lib/sequencer.pyi index f76d11a45c0b..1973b8138cdc 100644 --- a/stubs/reportlab/reportlab/lib/sequencer.pyi +++ b/stubs/reportlab/reportlab/lib/sequencer.pyi @@ -1,11 +1,9 @@ -from _typeshed import Incomplete - __all__ = ["Sequencer", "getSequencer", "setSequencer"] class _Counter: def __init__(self) -> None: ... def setFormatter(self, formatFunc) -> None: ... - def reset(self, value: Incomplete | None = None) -> None: ... + def reset(self, value=None) -> None: ... def next(self): ... __next__ = next def nextf(self): ... @@ -15,13 +13,13 @@ class _Counter: class Sequencer: def __init__(self) -> None: ... def __next__(self): ... - def next(self, counter: Incomplete | None = None): ... - def thisf(self, counter: Incomplete | None = None): ... - def nextf(self, counter: Incomplete | None = None): ... - def setDefaultCounter(self, default: Incomplete | None = None) -> None: ... + def next(self, counter=None): ... + def thisf(self, counter=None): ... + def nextf(self, counter=None): ... + def setDefaultCounter(self, default=None) -> None: ... def registerFormat(self, format, func) -> None: ... def setFormat(self, counter, format) -> None: ... - def reset(self, counter: Incomplete | None = None, base: int = 0) -> None: ... + def reset(self, counter=None, base: int = 0) -> None: ... def chain(self, parent, child) -> None: ... def __getitem__(self, key): ... def format(self, template): ... diff --git a/stubs/reportlab/reportlab/lib/testutils.pyi b/stubs/reportlab/reportlab/lib/testutils.pyi index 436e9327464a..0dd2fa224150 100644 --- a/stubs/reportlab/reportlab/lib/testutils.pyi +++ b/stubs/reportlab/reportlab/lib/testutils.pyi @@ -20,7 +20,7 @@ def setOutDir(name): ... def mockUrlRead(name): ... def outputfile(fn): ... def printLocation(depth: int = 1) -> None: ... -def makeSuiteForClasses(*classes, testMethodPrefix: Incomplete | None = None): ... +def makeSuiteForClasses(*classes, testMethodPrefix=None): ... def getCVSEntries(folder, files: int = 1, folders: int = 0): ... class ExtConfigParser(ConfigParser): @@ -39,7 +39,7 @@ class GlobDirectoryWalker: class RestrictedGlobDirectoryWalker(GlobDirectoryWalker): ignorePatterns: Incomplete - def __init__(self, directory, pattern: str = "*", ignore: Incomplete | None = None) -> None: ... + def __init__(self, directory, pattern: str = "*", ignore=None) -> None: ... def filterFiles(self, folder, files): ... class CVSGlobDirectoryWalker(GlobDirectoryWalker): @@ -67,6 +67,6 @@ class ScriptThatMakesFileTest(unittest.TestCase): def equalStrings(a, b, enc: str = "utf8"): ... def eqCheck(r, x) -> None: ... def rlextraNeeded(): ... -def rlSkipIf(cond, reason, __module__: Incomplete | None = None): ... -def rlSkipUnless(cond, reason, __module__: Incomplete | None = None): ... -def rlSkip(reason, __module__: Incomplete | None = None): ... +def rlSkipIf(cond, reason, __module__=None): ... +def rlSkipUnless(cond, reason, __module__=None): ... +def rlSkip(reason, __module__=None): ... diff --git a/stubs/reportlab/reportlab/lib/utils.pyi b/stubs/reportlab/reportlab/lib/utils.pyi index 08485eb2df59..bb22eef1f3f7 100644 --- a/stubs/reportlab/reportlab/lib/utils.pyi +++ b/stubs/reportlab/reportlab/lib/utils.pyi @@ -63,7 +63,7 @@ class CIDict(dict[_KT, _VT]): def __init__(self, *args, **kwds) -> None: ... def update(self, D: SupportsItems[_KT, _VT]) -> None: ... # type:ignore[override] -def markfilename(filename, creatorcode: Incomplete | None = None, filetype: Incomplete | None = None): ... +def markfilename(filename, creatorcode=None, filetype=None): ... __rl_loader__: Incomplete @@ -72,7 +72,7 @@ def isFileSystemDistro() -> bool: ... def isCompactDistro() -> bool: ... def isSourceDistro() -> bool: ... def normalize_path(p: PathLike[AnyStr]) -> PathLike[AnyStr]: ... -def recursiveImport(modulename, baseDir: Incomplete | None = None, noCWD: int = 0, debug: int = 0): ... +def recursiveImport(modulename, baseDir=None, noCWD: int = 0, debug: int = 0): ... haveImages: Final[bool] @@ -82,7 +82,7 @@ class ArgvDictValue: def __init__(self, value, func) -> None: ... def getArgvDict(**kw): ... -def getHyphenater(hDict: Incomplete | None = None): ... +def getHyphenater(hDict=None): ... def open_for_read_by_name(name, mode: str = "b"): ... def rlUrlRead(name): ... def open_for_read(name, mode: str = "b"): ... @@ -98,12 +98,12 @@ def rl_get_module(name, dir): ... class ImageReader: fileName: Incomplete fp: Incomplete - def __init__(self, fileName, ident: Incomplete | None = None) -> None: ... + def __init__(self, fileName, ident=None) -> None: ... def identity(self) -> str: ... @classmethod def check_pil_image_size(cls, im) -> None: ... @classmethod - def set_max_image_size(cls, max_image_size: Incomplete | None = None) -> None: ... + def set_max_image_size(cls, max_image_size=None) -> None: ... def jpeg_fh(self) -> None: ... def getSize(self) -> tuple[int, int]: ... mode: Incomplete @@ -126,7 +126,7 @@ class DebugMemo: getScript: int = 1, modules=(), capture_traceback: int = 1, - stdout: Incomplete | None = None, + stdout=None, **kw, ) -> None: ... def add(self, **kw) -> None: ... @@ -184,13 +184,13 @@ class TimeStamp: YMDhms: Incomplete dhh: Incomplete dmm: Incomplete - def __init__(self, invariant: Incomplete | None = None) -> None: ... + def __init__(self, invariant=None) -> None: ... @property def datetime(self): ... @property def asctime(self): ... -def recursiveGetAttr(obj, name, g: Incomplete | None = None): ... +def recursiveGetAttr(obj, name, g=None): ... def recursiveSetAttr(obj, name, value) -> None: ... def recursiveDelAttr(obj, name) -> None: ... def yieldNoneSplits(L) -> Generator[Incomplete, None, None]: ... diff --git a/stubs/reportlab/reportlab/lib/validators.pyi b/stubs/reportlab/reportlab/lib/validators.pyi index 9b1b6db637be..689aa1ffb60e 100644 --- a/stubs/reportlab/reportlab/lib/validators.pyi +++ b/stubs/reportlab/reportlab/lib/validators.pyi @@ -80,13 +80,11 @@ class OneOf(Validator): def test(self, x): ... class SequenceOf(Validator): - def __init__( - self, elemTest, name: Incomplete | None = None, emptyOK: int = 1, NoneOK: int = 0, lo: int = 0, hi: int = 2147483647 - ) -> None: ... + def __init__(self, elemTest, name=None, emptyOK: int = 1, NoneOK: int = 0, lo: int = 0, hi: int = 2147483647) -> None: ... def test(self, x): ... class EitherOr(Validator): - def __init__(self, tests, name: Incomplete | None = None) -> None: ... + def __init__(self, tests, name=None) -> None: ... def test(self, x): ... class NoneOr(EitherOr): @@ -108,11 +106,11 @@ class AutoOr(EitherOr): def test(self, x): ... class isInstanceOf(Validator): - def __init__(self, klass: Incomplete | None = None) -> None: ... + def __init__(self, klass=None) -> None: ... def test(self, x): ... class isSubclassOf(Validator): - def __init__(self, klass: Incomplete | None = None) -> None: ... + def __init__(self, klass=None) -> None: ... def test(self, x): ... class matchesPattern(Validator): diff --git a/stubs/reportlab/reportlab/pdfbase/acroform.pyi b/stubs/reportlab/reportlab/pdfbase/acroform.pyi index 83df837e92a8..4722559f0f0c 100644 --- a/stubs/reportlab/reportlab/pdfbase/acroform.pyi +++ b/stubs/reportlab/reportlab/pdfbase/acroform.pyi @@ -38,9 +38,9 @@ class AcroForm(PDFObject): value, buttonStyle: str = "circle", shape: str = "square", - fillColor: Incomplete | None = None, - borderColor: Incomplete | None = None, - textColor: Incomplete | None = None, + fillColor=None, + borderColor=None, + textColor=None, borderWidth: int = 1, borderStyle: str = "solid", size: int = 20, @@ -63,16 +63,16 @@ class AcroForm(PDFObject): checked: bool = False, buttonStyle: str = "check", shape: str = "square", - fillColor: Incomplete | None = None, - borderColor: Incomplete | None = None, - textColor: Incomplete | None = None, + fillColor=None, + borderColor=None, + textColor=None, borderWidth: int = 1, borderStyle: str = "solid", size: int = 20, x: int = 0, y: int = 0, - tooltip: Incomplete | None = None, - name: Incomplete | None = None, + tooltip=None, + name=None, annotationFlags: str = "print", fieldFlags: str = "required", forceBorder: bool = False, @@ -81,20 +81,20 @@ class AcroForm(PDFObject): ) -> None: ... def radio( self, - value: Incomplete | None = None, + value=None, selected: bool = False, buttonStyle: str = "circle", shape: str = "circle", - fillColor: Incomplete | None = None, - borderColor: Incomplete | None = None, - textColor: Incomplete | None = None, + fillColor=None, + borderColor=None, + textColor=None, borderWidth: int = 1, borderStyle: str = "solid", size: int = 20, x: int = 0, y: int = 0, - tooltip: Incomplete | None = None, - name: Incomplete | None = None, + tooltip=None, + name=None, annotationFlags: str = "print", fieldFlags: str = "noToggleToOff required radio", forceBorder: bool = False, @@ -110,9 +110,9 @@ class AcroForm(PDFObject): rFontName, fontSize, shape: str = "square", - fillColor: Incomplete | None = None, - borderColor: Incomplete | None = None, - textColor: Incomplete | None = None, + fillColor=None, + borderColor=None, + textColor=None, borderWidth: int = 1, borderStyle: str = "solid", width: int = 120, @@ -128,72 +128,72 @@ class AcroForm(PDFObject): def textfield( self, value: str = "", - fillColor: Incomplete | None = None, - borderColor: Incomplete | None = None, - textColor: Incomplete | None = None, + fillColor=None, + borderColor=None, + textColor=None, borderWidth: int = 1, borderStyle: str = "solid", width: int = 120, height: int = 36, x: int = 0, y: int = 0, - tooltip: Incomplete | None = None, - name: Incomplete | None = None, + tooltip=None, + name=None, annotationFlags: str = "print", fieldFlags: str = "", forceBorder: bool = False, relative: bool = False, maxlen: int = 100, - fontName: Incomplete | None = None, - fontSize: Incomplete | None = None, + fontName=None, + fontSize=None, dashLen: int = 3, ): ... def listbox( self, value: str = "", - fillColor: Incomplete | None = None, - borderColor: Incomplete | None = None, - textColor: Incomplete | None = None, + fillColor=None, + borderColor=None, + textColor=None, borderWidth: int = 1, borderStyle: str = "solid", width: int = 120, height: int = 36, x: int = 0, y: int = 0, - tooltip: Incomplete | None = None, - name: Incomplete | None = None, + tooltip=None, + name=None, annotationFlags: str = "print", fieldFlags: str = "", forceBorder: bool = False, relative: bool = False, - fontName: Incomplete | None = None, - fontSize: Incomplete | None = None, + fontName=None, + fontSize=None, dashLen: int = 3, - maxlen: Incomplete | None = None, + maxlen=None, options=[], ): ... def choice( self, value: str = "", - fillColor: Incomplete | None = None, - borderColor: Incomplete | None = None, - textColor: Incomplete | None = None, + fillColor=None, + borderColor=None, + textColor=None, borderWidth: int = 1, borderStyle: str = "solid", width: int = 120, height: int = 36, x: int = 0, y: int = 0, - tooltip: Incomplete | None = None, - name: Incomplete | None = None, + tooltip=None, + name=None, annotationFlags: str = "print", fieldFlags: str = "combo", forceBorder: bool = False, relative: bool = False, - fontName: Incomplete | None = None, - fontSize: Incomplete | None = None, + fontName=None, + fontSize=None, dashLen: int = 3, - maxlen: Incomplete | None = None, + maxlen=None, options=[], ): ... def checkboxRelative(self, **kwds) -> None: ... diff --git a/stubs/reportlab/reportlab/pdfbase/cidfonts.pyi b/stubs/reportlab/reportlab/pdfbase/cidfonts.pyi index e4370bcdd199..d2afbed2a4c8 100644 --- a/stubs/reportlab/reportlab/pdfbase/cidfonts.pyi +++ b/stubs/reportlab/reportlab/pdfbase/cidfonts.pyi @@ -34,7 +34,7 @@ class CIDFont(pdfmetrics.Font): substitutionFonts: Incomplete def __init__(self, face, encoding) -> None: ... def formatForPdf(self, text): ... - def stringWidth(self, text, size, encoding: Incomplete | None = None): ... + def stringWidth(self, text, size, encoding=None): ... def addObjects(self, doc) -> None: ... class UnicodeCIDFont(CIDFont): @@ -45,7 +45,7 @@ class UnicodeCIDFont(CIDFont): unicodeWidths: Incomplete def __init__(self, face, isVertical: bool = False, isHalfWidth: bool = False) -> None: ... def formatForPdf(self, text): ... - def stringWidth(self, text, size, encoding: Incomplete | None = None): ... + def stringWidth(self, text, size, encoding=None): ... def precalculate(cmapdir) -> None: ... def test() -> None: ... diff --git a/stubs/reportlab/reportlab/pdfbase/pdfdoc.pyi b/stubs/reportlab/reportlab/pdfbase/pdfdoc.pyi index 0fb9e8ea44d3..3cd33b470efa 100644 --- a/stubs/reportlab/reportlab/pdfbase/pdfdoc.pyi +++ b/stubs/reportlab/reportlab/pdfbase/pdfdoc.pyi @@ -48,13 +48,7 @@ class PDFDocument(PDFObject): fontMapping: Incomplete delayedFonts: Incomplete def __init__( - self, - dummyoutline: int = 0, - compression=1, - invariant=0, - filename: Incomplete | None = None, - pdfVersion=(1, 3), - lang: Incomplete | None = None, + self, dummyoutline: int = 0, compression=1, invariant=0, filename=None, pdfVersion=(1, 3), lang=None ) -> None: ... compression: Incomplete def setCompression(self, onoff) -> None: ... @@ -89,7 +83,7 @@ class PDFDocument(PDFObject): def getFormBBox(self, name, boxType: str = "MediaBox"): ... def getXObjectName(self, name): ... def xobjDict(self, formnames): ... - def Reference(self, obj, name: Incomplete | None = None): ... + def Reference(self, obj, name=None): ... PDFtrue: str PDFfalse: str @@ -116,7 +110,7 @@ def PDFName(data, lo="!", hi="~"): ... class PDFDictionary(PDFObject): multiline: bool dict: Incomplete - def __init__(self, dict: Incomplete | None = None) -> None: ... + def __init__(self, dict=None) -> None: ... def __setitem__(self, name, value) -> None: ... def __getitem__(self, a): ... def __contains__(self, a) -> bool: ... @@ -134,7 +128,7 @@ def checkPDFBoolean(value): ... class CheckedPDFDictionary(PDFDictionary): validate: Incomplete - def __init__(self, dict: Incomplete | None = None, validate: Incomplete | None = None) -> None: ... + def __init__(self, dict=None, validate=None) -> None: ... def __setitem__(self, name, value) -> None: ... class ViewerPreferencesPDFDictionary(CheckedPDFDictionary): @@ -159,12 +153,10 @@ class PDFStream(PDFObject): dictionary: Incomplete content: Incomplete filters: Incomplete - def __init__( - self, dictionary: Incomplete | None = None, content: Incomplete | None = None, filters: Incomplete | None = None - ) -> None: ... + def __init__(self, dictionary=None, content=None, filters=None) -> None: ... def format(self, document): ... -def teststream(content: Incomplete | None = None): ... +def teststream(content=None): ... teststreamcontent: str @@ -213,16 +205,7 @@ class PDFCrossReferenceTable(PDFObject): class PDFTrailer(PDFObject): startxref: Incomplete - def __init__( - self, - startxref, - Size: Incomplete | None = None, - Prev: Incomplete | None = None, - Root: Incomplete | None = None, - Info: Incomplete | None = None, - ID: Incomplete | None = None, - Encrypt: Incomplete | None = None, - ) -> None: ... + def __init__(self, startxref, Size=None, Prev=None, Root=None, Info=None, ID=None, Encrypt=None) -> None: ... def format(self, document): ... class PDFCatalog(PDFObject): @@ -307,9 +290,7 @@ class PDFPageLabel(PDFCatalog): S: Incomplete St: Incomplete P: Incomplete - def __init__( - self, style: Incomplete | None = None, start: Incomplete | None = None, prefix: Incomplete | None = None - ) -> None: ... + def __init__(self, style=None, start=None, prefix=None) -> None: ... def __lt__(self, oth): ... def testpage(document) -> None: ... @@ -344,9 +325,7 @@ class PDFOutlines(PDFObject): buildtree: Incomplete closedict: Incomplete def __init__(self) -> None: ... - def addOutlineEntry( - self, destinationname, level: int = 0, title: Incomplete | None = None, closed: Incomplete | None = None - ) -> None: ... + def addOutlineEntry(self, destinationname, level: int = 0, title=None, closed=None) -> None: ... def setDestinations(self, destinationtree) -> None: ... def format(self, document): ... def setNames(self, canvas, *nametree) -> None: ... @@ -355,9 +334,9 @@ class PDFOutlines(PDFObject): first: Incomplete count: int def prepare(self, document, canvas) -> None: ... - def maketree(self, document, destinationtree, Parent: Incomplete | None = None, toplevel: int = 0): ... + def maketree(self, document, destinationtree, Parent=None, toplevel: int = 0): ... -def count(tree, closedict: Incomplete | None = None): ... +def count(tree, closedict=None): ... class PDFInfo(PDFObject): producer: Incomplete @@ -425,7 +404,7 @@ class PDFRectangle(PDFObject): class PDFDate(PDFObject): dateFormatter: Incomplete - def __init__(self, ts: Incomplete | None = None, dateFormatter: Incomplete | None = None) -> None: ... + def __init__(self, ts=None, dateFormatter=None) -> None: ... def format(self, doc): ... class Destination(PDFObject): @@ -538,7 +517,7 @@ class PDFFormXObject(PDFObject): class PDFPostScriptXObject(PDFObject): content: Incomplete - def __init__(self, content: Incomplete | None = None) -> None: ... + def __init__(self, content=None) -> None: ... def format(self, document): ... class PDFImageXObject(PDFObject): @@ -549,7 +528,7 @@ class PDFImageXObject(PDFObject): colorSpace: str streamContent: str mask: Incomplete - def __init__(self, name, source: Incomplete | None = None, mask: Incomplete | None = None) -> None: ... + def __init__(self, name, source=None, mask=None) -> None: ... def loadImageFromA85(self, source): ... def loadImageFromJPEG(self, imageFile): ... def loadImageFromRaw(self, source): ... @@ -627,7 +606,7 @@ class PDFRadialShading(PDFShading): def Dict(self, document): ... class XMP(PDFStream): - def __init__(self, path: Incomplete | None = None, creator: Incomplete | None = None) -> None: ... + def __init__(self, path=None, creator=None) -> None: ... def makeContent(self, doc): ... # Param name is changed from the base class: def format(self, doc): ... diff --git a/stubs/reportlab/reportlab/pdfbase/pdfmetrics.pyi b/stubs/reportlab/reportlab/pdfbase/pdfmetrics.pyi index 3eac7bcd4829..d4a2cdb43697 100644 --- a/stubs/reportlab/reportlab/pdfbase/pdfmetrics.pyi +++ b/stubs/reportlab/reportlab/pdfbase/pdfmetrics.pyi @@ -27,7 +27,7 @@ class TypeFace: def getFontFiles(self): ... def findT1File(self, ext: str = ".pfb"): ... -def bruteForceSearchForFile(fn, searchPath: Incomplete | None = None): ... +def bruteForceSearchForFile(fn, searchPath=None): ... def bruteForceSearchForAFM(faceName): ... class Encoding: @@ -35,7 +35,7 @@ class Encoding: frozen: int baseEncodingName: Incomplete vector: Incomplete - def __init__(self, name, base: Incomplete | None = None) -> None: ... + def __init__(self, name, base=None) -> None: ... def __getitem__(self, index): ... def __setitem__(self, index, value) -> None: ... def freeze(self) -> None: ... @@ -52,7 +52,7 @@ class Font: encName: Incomplete substitutionFonts: Incomplete shapable: bool - def __init__(self, name, faceName, encName, substitutionFonts: Incomplete | None = None) -> None: ... + def __init__(self, name, faceName, encName, substitutionFonts=None) -> None: ... def stringWidth(self, text: str | bytes, size: float, encoding: str = "utf8") -> float: ... def addObjects(self, doc) -> None: ... @@ -71,13 +71,7 @@ class EmbeddedType1Face(TypeFace): def registerTypeFace(face) -> None: ... def registerEncoding(enc) -> None: ... -def registerFontFamily( - family, - normal: Incomplete | None = None, - bold: Incomplete | None = None, - italic: Incomplete | None = None, - boldItalic: Incomplete | None = None, -) -> None: ... +def registerFontFamily(family, normal=None, bold=None, italic=None, boldItalic=None) -> None: ... def registerFont(font) -> None: ... def getTypeFace(faceName): ... def getEncoding(encName): ... diff --git a/stubs/reportlab/reportlab/pdfbase/pdfutils.pyi b/stubs/reportlab/reportlab/pdfbase/pdfutils.pyi index 38994148da3d..a1e9636b0bf1 100644 --- a/stubs/reportlab/reportlab/pdfbase/pdfutils.pyi +++ b/stubs/reportlab/reportlab/pdfbase/pdfutils.pyi @@ -1,11 +1,10 @@ -from _typeshed import Incomplete from typing import Final __version__: Final[str] -def makeA85Image(filename, IMG: Incomplete | None = None, detectJpeg: bool = False): ... -def makeRawImage(filename, IMG: Incomplete | None = None, detectJpeg: bool = False): ... -def cacheImageFile(filename, returnInMemory: int = 0, IMG: Incomplete | None = None): ... +def makeA85Image(filename, IMG=None, detectJpeg: bool = False): ... +def makeRawImage(filename, IMG=None, detectJpeg: bool = False): ... +def cacheImageFile(filename, returnInMemory: int = 0, IMG=None): ... def preProcessImages(spec) -> None: ... def cachedImageExists(filename): ... def readJPEGInfo(image): ... diff --git a/stubs/reportlab/reportlab/pdfbase/ttfonts.pyi b/stubs/reportlab/reportlab/pdfbase/ttfonts.pyi index c3f4d20dcc7c..d410830bd2e8 100644 --- a/stubs/reportlab/reportlab/pdfbase/ttfonts.pyi +++ b/stubs/reportlab/reportlab/pdfbase/ttfonts.pyi @@ -137,7 +137,7 @@ class TTFont: internalName: Incomplete frozen: int subsets: Incomplete - def __init__(self, asciiReadable: Incomplete | None = None, ttf: Incomplete | None = None) -> None: ... + def __init__(self, asciiReadable=None, ttf=None) -> None: ... fontName: str face: TTFontFace diff --git a/stubs/reportlab/reportlab/pdfgen/canvas.pyi b/stubs/reportlab/reportlab/pdfgen/canvas.pyi index 214fe6383b93..4f3d5891a107 100644 --- a/stubs/reportlab/reportlab/pdfgen/canvas.pyi +++ b/stubs/reportlab/reportlab/pdfgen/canvas.pyi @@ -25,21 +25,21 @@ class Canvas(_PDFColorSetter): filename: str | IO[bytes], pagesize: tuple[float, float] | None = None, bottomup: int = 1, - pageCompression: Incomplete | None = None, - invariant: Incomplete | None = None, + pageCompression=None, + invariant=None, verbosity: int = 0, - encrypt: Incomplete | None = None, - cropMarks: Incomplete | None = None, - pdfVersion: Incomplete | None = None, - enforceColorSpace: Incomplete | None = None, + encrypt=None, + cropMarks=None, + pdfVersion=None, + enforceColorSpace=None, initialFontName: float | None = None, initialFontSize: float | None = None, initialLeading: float | None = None, - cropBox: Incomplete | None = None, - artBox: Incomplete | None = None, - trimBox: Incomplete | None = None, - bleedBox: Incomplete | None = None, - lang: Incomplete | None = None, + cropBox=None, + artBox=None, + trimBox=None, + bleedBox=None, + lang=None, ) -> None: ... def setEncrypt(self, encrypt) -> None: ... def init_graphics_state(self) -> None: ... @@ -49,7 +49,7 @@ class Canvas(_PDFColorSetter): STATE_RANGE: Incomplete def setAuthor(self, author: str | None) -> None: ... def setDateFormatter(self, dateFormatter) -> None: ... - def addOutlineEntry(self, title, key, level: int = 0, closed: Incomplete | None = None) -> None: ... + def addOutlineEntry(self, title, key, level: int = 0, closed=None) -> None: ... def setOutlineNames0(self, *nametree) -> None: ... def setTitle(self, title: str | None) -> None: ... def setSubject(self, subject: str | None) -> None: ... @@ -62,16 +62,7 @@ class Canvas(_PDFColorSetter): def setBlendMode(self, v) -> None: ... def showPage(self) -> None: ... def setPageCallBack(self, func) -> None: ... - def bookmarkPage( - self, - key, - fit: str = "Fit", - left: Incomplete | None = None, - top: Incomplete | None = None, - bottom: Incomplete | None = None, - right: Incomplete | None = None, - zoom: Incomplete | None = None, - ): ... + def bookmarkPage(self, key, fit: str = "Fit", left=None, top=None, bottom=None, right=None, zoom=None): ... def bookmarkHorizontalAbsolute(self, key, top, left: int = 0, fit: str = "XYZ", **kw): ... def bookmarkHorizontal(self, key, relativeX, relativeY, **kw) -> None: ... def doForm(self, name) -> None: ... @@ -87,7 +78,7 @@ class Canvas(_PDFColorSetter): anchor: str = "c", anchorAtXY: bool = False, showBoundary: bool = False, - extraReturn: Incomplete | None = None, + extraReturn=None, ): ... def drawImage( self, @@ -96,83 +87,49 @@ class Canvas(_PDFColorSetter): y: float, width: float | None = None, height: float | None = None, - mask: Incomplete | None = None, + mask=None, preserveAspectRatio: bool = False, anchor: str = "c", anchorAtXY: bool = False, showBoundary: bool = False, - extraReturn: Incomplete | None = None, + extraReturn=None, ): ... - def beginForm( - self, name, lowerx: int = 0, lowery: int = 0, upperx: Incomplete | None = None, uppery: Incomplete | None = None - ) -> None: ... + def beginForm(self, name, lowerx: int = 0, lowery: int = 0, upperx=None, uppery=None) -> None: ... def endForm(self, **extra_attributes) -> None: ... def addPostScriptCommand(self, command, position: int = 1) -> None: ... - def freeTextAnnotation( - self, - contents, - DA, - Rect: Incomplete | None = None, - addtopage: int = 1, - name: Incomplete | None = None, - relative: int = 0, - **kw, - ) -> None: ... - def textAnnotation( - self, - contents, - Rect: Incomplete | None = None, - addtopage: int = 1, - name: Incomplete | None = None, - relative: int = 0, - **kw, - ) -> None: ... + def freeTextAnnotation(self, contents, DA, Rect=None, addtopage: int = 1, name=None, relative: int = 0, **kw) -> None: ... + def textAnnotation(self, contents, Rect=None, addtopage: int = 1, name=None, relative: int = 0, **kw) -> None: ... textAnnotation0 = textAnnotation def highlightAnnotation( - self, - contents, - Rect, - QuadPoints: Incomplete | None = None, - Color=[0.83, 0.89, 0.95], - addtopage: int = 1, - name: Incomplete | None = None, - relative: int = 0, - **kw, + self, contents, Rect, QuadPoints=None, Color=[0.83, 0.89, 0.95], addtopage: int = 1, name=None, relative: int = 0, **kw ) -> None: ... def inkAnnotation( - self, - contents, - InkList: Incomplete | None = None, - Rect: Incomplete | None = None, - addtopage: int = 1, - name: Incomplete | None = None, - relative: int = 0, - **kw, + self, contents, InkList=None, Rect=None, addtopage: int = 1, name=None, relative: int = 0, **kw ) -> None: ... inkAnnotation0 = inkAnnotation def linkAbsolute( self, contents, destinationname, - Rect: Incomplete | None = None, + Rect=None, addtopage: int = 1, - name: Incomplete | None = None, + name=None, thickness: int = 0, color: Color | None = None, - dashArray: Incomplete | None = None, + dashArray=None, **kw, ): ... def linkRect( self, contents, destinationname, - Rect: Incomplete | None = None, + Rect=None, addtopage: int = 1, - name: Incomplete | None = None, + name=None, relative: int = 1, thickness: int = 0, color: Color | None = None, - dashArray: Incomplete | None = None, + dashArray=None, **kw, ): ... def linkURL( @@ -182,7 +139,7 @@ class Canvas(_PDFColorSetter): relative: int = 0, thickness: int = 0, color: Color | None = None, - dashArray: Incomplete | None = None, + dashArray=None, kind: str = "URI", **kw, ) -> None: ... @@ -214,8 +171,8 @@ class Canvas(_PDFColorSetter): y: float, size: float = 5, gap: float = 1, - text: Incomplete | None = None, - strokeColor: Incomplete | None = None, + text=None, + strokeColor=None, strokeWidth: float | None = None, fontSize: float = 3, ) -> None: ... @@ -228,8 +185,8 @@ class Canvas(_PDFColorSetter): def circle(self, x_cen, y_cen, r, stroke: int = 1, fill: int = 0) -> None: ... def roundRect(self, x, y, width, height, radius, stroke: int = 1, fill: int = 0) -> None: ... def shade(self, shading) -> None: ... - def linearGradient(self, x0, y0, x1, y1, colors, positions: Incomplete | None = None, extend: bool = True) -> None: ... - def radialGradient(self, x, y, radius, colors, positions: Incomplete | None = None, extend: bool = True) -> None: ... + def linearGradient(self, x0, y0, x1, y1, colors, positions=None, extend: bool = True) -> None: ... + def radialGradient(self, x, y, radius, colors, positions=None, extend: bool = True) -> None: ... def drawString( self, x: float, @@ -286,12 +243,12 @@ class Canvas(_PDFColorSetter): def setMiterLimit(self, limit) -> None: ... def setDash(self, array: list[float] | tuple[float, ...] | float = [], phase: float = 0) -> None: ... def beginPath(self): ... - def drawPath(self, aPath, stroke: int = 1, fill: int = 0, fillMode: Incomplete | None = None) -> None: ... - def clipPath(self, aPath, stroke: int = 1, fill: int = 0, fillMode: Incomplete | None = None) -> None: ... + def drawPath(self, aPath, stroke: int = 1, fill: int = 0, fillMode=None) -> None: ... + def clipPath(self, aPath, stroke: int = 1, fill: int = 0, fillMode=None) -> None: ... def beginText(self, x: float = 0, y: float = 0, direction: Literal["LTR", "RTL"] | None = None) -> PDFTextObject: ... def drawText(self, aTextObject: PDFTextObject) -> None: ... def setPageCompression(self, pageCompression: int = 1) -> None: ... - def setPageDuration(self, duration: Incomplete | None = None) -> None: ... + def setPageDuration(self, duration=None) -> None: ... def setPageTransition( self, effectname: str | None = None, duration: float = 1, direction: float = 0, dimension: str = "H", motion: str = "I" ) -> None: ... @@ -302,9 +259,7 @@ class Canvas(_PDFColorSetter): def setCatalogEntry(self, key, value) -> None: ... def getCatalogEntry(self, key): ... def delCatalogEntry(self, key) -> None: ... - def addPageLabel( - self, pageNum, style: Incomplete | None = None, start: Incomplete | None = None, prefix: Incomplete | None = None - ) -> None: ... + def addPageLabel(self, pageNum, style=None, start=None, prefix=None) -> None: ... @property def acroForm(self): ... def drawBoundary(self, sb, x1: float, y1: float, width: float, height: float) -> None: ... diff --git a/stubs/reportlab/reportlab/pdfgen/pathobject.pyi b/stubs/reportlab/reportlab/pdfgen/pathobject.pyi index 681399eb166e..3081a3e14434 100644 --- a/stubs/reportlab/reportlab/pdfgen/pathobject.pyi +++ b/stubs/reportlab/reportlab/pdfgen/pathobject.pyi @@ -1,10 +1,9 @@ -from _typeshed import Incomplete from typing import Final __version__: Final[str] class PDFPathObject: - def __init__(self, code: Incomplete | None = None) -> None: ... + def __init__(self, code=None) -> None: ... def getCode(self): ... def moveTo(self, x, y) -> None: ... def lineTo(self, x, y) -> None: ... diff --git a/stubs/reportlab/reportlab/pdfgen/pdfimages.pyi b/stubs/reportlab/reportlab/pdfgen/pdfimages.pyi index dd93cbf1b9c1..3ebca254012a 100644 --- a/stubs/reportlab/reportlab/pdfgen/pdfimages.pyi +++ b/stubs/reportlab/reportlab/pdfgen/pdfimages.pyi @@ -15,9 +15,7 @@ class PDFImage: bitsPerComponent: int filters: Incomplete source: Incomplete - def __init__( - self, image, x, y, width: Incomplete | None = None, height: Incomplete | None = None, caching: int = 0 - ) -> None: ... + def __init__(self, image, x, y, width=None, height=None, caching: int = 0) -> None: ... def jpg_imagedata(self): ... def cache_imagedata(self): ... def PIL_imagedata(self): ... @@ -33,6 +31,6 @@ class PDFImage: anchor: str = "sw", anchorAtXY: bool = False, showBoundary: bool = False, - extraReturn: Incomplete | None = None, + extraReturn=None, ): ... def format(self, document): ... diff --git a/stubs/reportlab/reportlab/platypus/doctemplate.pyi b/stubs/reportlab/reportlab/platypus/doctemplate.pyi index d5557db149e5..5dc5ca0c6d7e 100644 --- a/stubs/reportlab/reportlab/platypus/doctemplate.pyi +++ b/stubs/reportlab/reportlab/platypus/doctemplate.pyi @@ -40,18 +40,18 @@ class _CanvasMaker(Protocol): filename: str | IO[bytes], /, *, - pagesize: Incomplete | None = None, - pageCompression: Incomplete | None = None, - invariant: Incomplete | None = None, - enforceColorSpace: Incomplete | None = None, - initialFontName: Incomplete | None = None, - initialFontSize: Incomplete | None = None, - initialLeading: Incomplete | None = None, - cropBox: Incomplete | None = None, - artBox: Incomplete | None = None, - trimBox: Incomplete | None = None, - bleedBox: Incomplete | None = None, - lang: Incomplete | None = None, + pagesize=None, + pageCompression=None, + invariant=None, + enforceColorSpace=None, + initialFontName=None, + initialFontSize=None, + initialLeading=None, + cropBox=None, + artBox=None, + trimBox=None, + bleedBox=None, + lang=None, ) -> Canvas: ... class LayoutError(Exception): ... @@ -120,7 +120,7 @@ class Indenter(FrameActionFlowable): class NotAtTopPageBreak(FrameActionFlowable): locChanger: int nextTemplate: Incomplete - def __init__(self, nextTemplate: Incomplete | None = None) -> None: ... + def __init__(self, nextTemplate=None) -> None: ... def frameAction(self, frame: Frame) -> None: ... class NextPageTemplate(ActionFlowable): @@ -145,11 +145,11 @@ class PageTemplate: onPage: _PageCallback = ..., onPageEnd: _PageCallback = ..., pagesize: tuple[float, float] | None = None, - autoNextPageTemplate: Incomplete | None = None, - cropBox: Incomplete | None = None, - artBox: Incomplete | None = None, - trimBox: Incomplete | None = None, - bleedBox: Incomplete | None = None, + autoNextPageTemplate=None, + cropBox=None, + artBox=None, + trimBox=None, + bleedBox=None, ) -> None: ... def beforeDrawPage(self, canv: Canvas, doc: BaseDocTemplate) -> None: ... def checkPageSize(self, canv: Canvas, doc: BaseDocTemplate) -> None: ... @@ -240,7 +240,7 @@ class BaseDocTemplate: def handle_pageBegin(self) -> None: ... def handle_pageEnd(self) -> None: ... def handle_pageBreak(self, slow: bool | None = None) -> None: ... - def handle_frameBegin(self, resume: int = 0, pageTopFlowables: Incomplete | None = None) -> None: ... + def handle_frameBegin(self, resume: int = 0, pageTopFlowables=None) -> None: ... def handle_frameEnd(self, resume: int = 0) -> None: ... def handle_nextPageTemplate(self, pt: str | int | list[str] | tuple[str, ...]) -> None: ... def handle_nextFrame(self, fx: str | int, resume: int = 0) -> None: ... diff --git a/stubs/reportlab/reportlab/platypus/figures.pyi b/stubs/reportlab/reportlab/platypus/figures.pyi index 9d5c8d2cc828..8433c7e14f04 100644 --- a/stubs/reportlab/reportlab/platypus/figures.pyi +++ b/stubs/reportlab/reportlab/platypus/figures.pyi @@ -30,13 +30,13 @@ class Figure(Flowable): caption: str = "", captionFont="Helvetica-Oblique", captionSize: int = 12, - background: Incomplete | None = None, + background=None, captionTextColor=..., - captionBackColor: Incomplete | None = None, - border: Incomplete | None = None, + captionBackColor=None, + border=None, spaceBefore: int = 12, spaceAfter: int = 12, - captionGap: Incomplete | None = None, + captionGap=None, captionAlign: str = "centre", captionPosition: str = "bottom", hAlign: str = "CENTER", @@ -56,7 +56,7 @@ class PageFigure(Figure): caption: str captionStyle: Incomplete background: Incomplete - def __init__(self, background: Incomplete | None = None) -> None: ... + def __init__(self, background=None) -> None: ... def drawVirtualPage(self) -> None: ... def drawFigure(self) -> None: ... @@ -75,7 +75,7 @@ class FlexFigure(Figure): width, height, caption, - background: Incomplete | None = None, + background=None, captionFont: str = "Helvetica-Oblique", captionSize: int = 8, captionTextColor=..., @@ -86,7 +86,7 @@ class FlexFigure(Figure): captionGap: int = 9, captionAlign: str = "centre", captionPosition: str = "top", - scaleFactor: Incomplete | None = None, + scaleFactor=None, hAlign: str = "CENTER", border: int = 1, ) -> None: ... @@ -95,23 +95,13 @@ class FlexFigure(Figure): class ImageFigure(FlexFigure): filename: Incomplete - def __init__( - self, - filename, - caption, - background: Incomplete | None = None, - scaleFactor: Incomplete | None = None, - hAlign: str = "CENTER", - border: Incomplete | None = None, - ) -> None: ... + def __init__(self, filename, caption, background=None, scaleFactor=None, hAlign: str = "CENTER", border=None) -> None: ... def drawFigure(self) -> None: ... class DrawingFigure(FlexFigure): drawing: Incomplete growToFit: int - def __init__( - self, modulename, classname, caption, baseDir: Incomplete | None = None, background: Incomplete | None = None - ) -> None: ... + def __init__(self, modulename, classname, caption, baseDir=None, background=None) -> None: ... def drawFigure(self) -> None: ... def demo1(canvas) -> None: ... diff --git a/stubs/reportlab/reportlab/platypus/flowables.pyi b/stubs/reportlab/reportlab/platypus/flowables.pyi index 328a145ca7d2..b51aa216c3da 100644 --- a/stubs/reportlab/reportlab/platypus/flowables.pyi +++ b/stubs/reportlab/reportlab/platypus/flowables.pyi @@ -191,7 +191,7 @@ class KeepTogether(_ContainerSpace, Flowable): splitAtTop: bool # TODO: Consider using Sequence[Flowable] for covariance, even if reportlab # only supports list/tuple - def __init__(self, flowables: _FlowableSublist | None, maxHeight: Incomplete | None = None) -> None: ... + def __init__(self, flowables: _FlowableSublist | None, maxHeight=None) -> None: ... class KeepTogetherSplitAtTop(KeepTogether): splitAtTop: bool @@ -239,7 +239,7 @@ class HRFlowable(Flowable): spaceAfter: float = 1, hAlign: _HAlignment = "CENTER", vAlign: _VAlignment = "BOTTOM", - dash: Incomplete | None = None, + dash=None, ) -> None: ... def draw(self) -> None: ... @@ -321,7 +321,7 @@ class BalancedColumns(_FindSplitterMixin, NullDraw): needed: float = 72, spaceBefore: float = 0, spaceAfter: float = 0, - showBoundary: Incomplete | None = None, + showBoundary=None, leftPadding: float | None = None, innerPadding: float | None = None, rightPadding: float | None = None, @@ -387,7 +387,7 @@ class BulletDrawer: bulletOffsetY: int = 0, bulletDedent: int = 0, bulletDir: str = "ltr", - bulletFormat: Incomplete | None = None, + bulletFormat=None, ) -> None: ... def drawOn(self, indenter: DDIndenter, canv: Canvas, x: float, y: float) -> None: ... @@ -400,7 +400,7 @@ class LIIndenter(DDIndenter): flowable: Flowable, leftIndent: float = 0, rightIndent: float = 0, - bullet: Incomplete | None = None, + bullet=None, spaceBefore: float | None = None, spaceAfter: float | None = None, ) -> None: ... @@ -414,9 +414,7 @@ class ListFlowable(_Container, Flowable): style: ListStyle # NOTE: style has to be a ListStyle, but this will be annoying with sheet["ul"] # TODO: Use Unpack for kwds with the ListStyle properties + spaceBefore/spaceAfter - def __init__( - self, flowables: Iterable[_NestedFlowable], start: Incomplete | None = None, style: PropertySet | None = None, **kwds - ) -> None: ... + def __init__(self, flowables: Iterable[_NestedFlowable], start=None, style: PropertySet | None = None, **kwds) -> None: ... class TopPadder(Flowable): # NOTE: TopPadder is mostly a transparent wrapper, we may consider trying diff --git a/stubs/reportlab/reportlab/platypus/frames.pyi b/stubs/reportlab/reportlab/platypus/frames.pyi index 061ba9be67c6..f8e832c1b83a 100644 --- a/stubs/reportlab/reportlab/platypus/frames.pyi +++ b/stubs/reportlab/reportlab/platypus/frames.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from typing import Literal from reportlab.pdfgen.canvas import Canvas @@ -27,8 +26,8 @@ class Frame: topPadding: float = 6, id: str | None = None, showBoundary: int = 0, - overlapAttachedSpace: Incomplete | None = None, - _debug: Incomplete | None = None, + overlapAttachedSpace=None, + _debug=None, ) -> None: ... def add(self, flowable: Flowable, canv: Canvas, trySplit: int = 0) -> Literal[0, 1]: ... def split(self, flowable: Flowable, canv: Canvas) -> list[Flowable]: ... diff --git a/stubs/reportlab/reportlab/platypus/tables.pyi b/stubs/reportlab/reportlab/platypus/tables.pyi index 3bcaa2f87018..617c86634b3f 100644 --- a/stubs/reportlab/reportlab/platypus/tables.pyi +++ b/stubs/reportlab/reportlab/platypus/tables.pyi @@ -85,7 +85,7 @@ class Table(Flowable): rowSplitRange: tuple[int, int] | None = None, spaceBefore: float | None = None, spaceAfter: float | None = None, - longTableOptimize: Incomplete | None = None, + longTableOptimize=None, minRowHeights: Sequence[float] | None = None, cornerRadii: _CornerRadii | _UNSET_ | None = ..., renderCB: TableRenderCB | None = None, diff --git a/stubs/requests-oauthlib/requests_oauthlib/oauth1_auth.pyi b/stubs/requests-oauthlib/requests_oauthlib/oauth1_auth.pyi index 494dd12a4193..75a2de35da1d 100644 --- a/stubs/requests-oauthlib/requests_oauthlib/oauth1_auth.pyi +++ b/stubs/requests-oauthlib/requests_oauthlib/oauth1_auth.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from logging import Logger from typing import Any @@ -16,21 +15,21 @@ class OAuth1(AuthBase): def __init__( self, client_key, - client_secret: Incomplete | None = None, - resource_owner_key: Incomplete | None = None, - resource_owner_secret: Incomplete | None = None, - callback_uri: Incomplete | None = None, + client_secret=None, + resource_owner_key=None, + resource_owner_secret=None, + callback_uri=None, signature_method="HMAC-SHA1", signature_type="AUTH_HEADER", - rsa_key: Incomplete | None = None, - verifier: Incomplete | None = None, + rsa_key=None, + verifier=None, decoding: str | None = "utf-8", client_class: type[Client] | None = None, force_include_body: bool = False, *, - realm: Incomplete | None = None, + realm=None, encoding: str = "utf-8", - nonce: Incomplete | None = None, - timestamp: Incomplete | None = None, + nonce=None, + timestamp=None, **kwargs: Any, # passed to client_class's __init__ ) -> None: ... diff --git a/stubs/requests-oauthlib/requests_oauthlib/oauth1_session.pyi b/stubs/requests-oauthlib/requests_oauthlib/oauth1_session.pyi index 2c48e0a1a859..e97a83ba0a5f 100644 --- a/stubs/requests-oauthlib/requests_oauthlib/oauth1_session.pyi +++ b/stubs/requests-oauthlib/requests_oauthlib/oauth1_session.pyi @@ -37,20 +37,20 @@ class OAuth1Session(requests.Session): def __init__( self, client_key, - client_secret: Incomplete | None = None, - resource_owner_key: Incomplete | None = None, - resource_owner_secret: Incomplete | None = None, - callback_uri: Incomplete | None = None, + client_secret=None, + resource_owner_key=None, + resource_owner_secret=None, + callback_uri=None, signature_method="HMAC-SHA1", signature_type="AUTH_HEADER", - rsa_key: Incomplete | None = None, - verifier: Incomplete | None = None, + rsa_key=None, + verifier=None, client_class: type[Client] | None = None, force_include_body: bool = False, *, encoding: str = "utf-8", - nonce: Incomplete | None = None, - timestamp: Incomplete | None = None, + nonce=None, + timestamp=None, ) -> None: ... @property def token(self) -> _TokenDict: ... @@ -58,8 +58,8 @@ class OAuth1Session(requests.Session): def token(self, value: _TokenDict) -> None: ... @property def authorized(self) -> bool: ... - def authorization_url(self, url: str, request_token: Incomplete | None = None, **kwargs) -> str: ... - def fetch_request_token(self, url: str, realm: Incomplete | None = None, **request_kwargs) -> _ParsedToken: ... - def fetch_access_token(self, url: str, verifier: Incomplete | None = None, **request_kwargs) -> _ParsedToken: ... + def authorization_url(self, url: str, request_token=None, **kwargs) -> str: ... + def fetch_request_token(self, url: str, realm=None, **request_kwargs) -> _ParsedToken: ... + def fetch_access_token(self, url: str, verifier=None, **request_kwargs) -> _ParsedToken: ... def parse_authorization_response(self, url: str) -> _ParsedToken: ... def rebuild_auth(self, prepared_request: requests.PreparedRequest, response: requests.Response) -> None: ... diff --git a/stubs/requests-oauthlib/requests_oauthlib/oauth2_auth.pyi b/stubs/requests-oauthlib/requests_oauthlib/oauth2_auth.pyi index fb241519eeb1..9bd5a5258562 100644 --- a/stubs/requests-oauthlib/requests_oauthlib/oauth2_auth.pyi +++ b/stubs/requests-oauthlib/requests_oauthlib/oauth2_auth.pyi @@ -1,9 +1,5 @@ -from _typeshed import Incomplete - from oauthlib.oauth2 import Client from requests.auth import AuthBase class OAuth2(AuthBase): - def __init__( - self, client_id: Incomplete | None = None, client: Client | None = None, token: Incomplete | None = None - ) -> None: ... + def __init__(self, client_id=None, client: Client | None = None, token=None) -> None: ... diff --git a/stubs/requests-oauthlib/requests_oauthlib/oauth2_session.pyi b/stubs/requests-oauthlib/requests_oauthlib/oauth2_session.pyi index d2bc3d5b58a6..300230fd1ce3 100644 --- a/stubs/requests-oauthlib/requests_oauthlib/oauth2_session.pyi +++ b/stubs/requests-oauthlib/requests_oauthlib/oauth2_session.pyi @@ -38,16 +38,16 @@ class OAuth2Session(requests.Session): compliance_hook: _ComplianceHooks def __init__( self, - client_id: Incomplete | None = None, + client_id=None, client: Client | None = None, auto_refresh_url: str | None = None, auto_refresh_kwargs: dict[str, Any] | None = None, - scope: Incomplete | None = None, - redirect_uri: Incomplete | None = None, - token: Incomplete | None = None, - state: Incomplete | None = None, - token_updater: Incomplete | None = None, - pkce: Incomplete | None = None, + scope=None, + redirect_uri=None, + token=None, + state=None, + token_updater=None, + pkce=None, **kwargs, ) -> None: ... @property @@ -73,38 +73,38 @@ class OAuth2Session(requests.Session): def access_token(self) -> None: ... @property def authorized(self) -> bool: ... - def authorization_url(self, url: str, state: Incomplete | None = None, **kwargs) -> tuple[str, str]: ... + def authorization_url(self, url: str, state=None, **kwargs) -> tuple[str, str]: ... def fetch_token( self, token_url: str, - code: Incomplete | None = None, - authorization_response: Incomplete | None = None, + code=None, + authorization_response=None, body: str = "", - auth: Incomplete | None = None, - username: Incomplete | None = None, - password: Incomplete | None = None, + auth=None, + username=None, + password=None, method: str = "POST", force_querystring: bool = False, - timeout: Incomplete | None = None, - headers: Incomplete | None = None, + timeout=None, + headers=None, verify: bool | None = None, - proxies: Incomplete | None = None, - include_client_id: Incomplete | None = None, - client_secret: Incomplete | None = None, - cert: Incomplete | None = None, + proxies=None, + include_client_id=None, + client_secret=None, + cert=None, **kwargs, ) -> _Token: ... def token_from_fragment(self, authorization_response: str) -> _Token: ... def refresh_token( self, token_url: str, - refresh_token: Incomplete | None = None, + refresh_token=None, body: str = "", - auth: Incomplete | None = None, - timeout: Incomplete | None = None, - headers: Incomplete | None = None, + auth=None, + timeout=None, + headers=None, verify: bool | None = None, - proxies: Incomplete | None = None, + proxies=None, **kwargs, ) -> _Token: ... def request( # type: ignore[override] @@ -114,8 +114,8 @@ class OAuth2Session(requests.Session): data: requests.sessions._Data | None = None, headers: requests.sessions._HeadersUpdateMapping | None = None, withhold_token: bool = False, - client_id: Incomplete | None = None, - client_secret: Incomplete | None = None, + client_id=None, + client_secret=None, files: requests.sessions._Files | None = None, *, params: requests.sessions._Params | None = None, @@ -128,7 +128,7 @@ class OAuth2Session(requests.Session): stream: bool | None = None, verify: requests.sessions._Verify | None = None, cert: requests.sessions._Cert | None = None, - json: Incomplete | None = None, + json=None, ) -> requests.Response: ... @overload def register_compliance_hook(self, hook_type: Literal["access_token_response"], hook: _AccessTokenResponseHook) -> None: ... diff --git a/stubs/seaborn/seaborn/matrix.pyi b/stubs/seaborn/seaborn/matrix.pyi index f7ba705584bd..3a9a25e54590 100644 --- a/stubs/seaborn/seaborn/matrix.pyi +++ b/stubs/seaborn/seaborn/matrix.pyi @@ -117,8 +117,8 @@ class ClusterGrid(Grid): z_score: int | None = None, standard_scale: int | None = None, figsize: tuple[float, float] | None = None, - row_colors: Incomplete | None = None, - col_colors: Incomplete | None = None, + row_colors=None, + col_colors=None, mask: NDArray[np.bool_] | DataFrame | None = None, dendrogram_ratio: float | tuple[float, float] | None = None, colors_ratio: float | tuple[float, float] | None = None, @@ -185,8 +185,8 @@ def clustermap( col_cluster: bool = True, row_linkage: NDArray[Incomplete] | None = None, col_linkage: NDArray[Incomplete] | None = None, - row_colors: Incomplete | None = None, - col_colors: Incomplete | None = None, + row_colors=None, + col_colors=None, mask: NDArray[np.bool_] | DataFrame | None = None, dendrogram_ratio: float | tuple[float, float] = 0.2, colors_ratio: float | tuple[float, float] = 0.03, diff --git a/stubs/setuptools/setuptools/_distutils/dist.pyi b/stubs/setuptools/setuptools/_distutils/dist.pyi index 4552c94d8364..2757b725f582 100644 --- a/stubs/setuptools/setuptools/_distutils/dist.pyi +++ b/stubs/setuptools/setuptools/_distutils/dist.pyi @@ -96,9 +96,7 @@ class Distribution: command_obj: dict[str, Command] have_run: dict[str, bool] want_user_cfg: bool - def dump_option_dicts( - self, header: Incomplete | None = None, commands: Incomplete | None = None, indent: str = "" - ) -> None: ... + def dump_option_dicts(self, header=None, commands=None, indent: str = "") -> None: ... def find_config_files(self): ... commands: Incomplete def parse_command_line(self): ... diff --git a/stubs/setuptools/setuptools/archive_util.pyi b/stubs/setuptools/setuptools/archive_util.pyi index 600586a65578..acadce09a7ca 100644 --- a/stubs/setuptools/setuptools/archive_util.pyi +++ b/stubs/setuptools/setuptools/archive_util.pyi @@ -16,7 +16,7 @@ __all__ = [ class UnrecognizedFormat(DistutilsError): ... def default_filter(src, dst): ... -def unpack_archive(filename, extract_dir, progress_filter=..., drivers: Incomplete | None = None) -> None: ... +def unpack_archive(filename, extract_dir, progress_filter=..., drivers=None) -> None: ... def unpack_directory(filename, extract_dir, progress_filter=...) -> None: ... def unpack_zipfile(filename, extract_dir, progress_filter=...) -> None: ... def unpack_tarfile(filename, extract_dir, progress_filter=...): ... diff --git a/stubs/setuptools/setuptools/command/build_ext.pyi b/stubs/setuptools/setuptools/command/build_ext.pyi index 85c477b0366e..34366afb3ee0 100644 --- a/stubs/setuptools/setuptools/command/build_ext.pyi +++ b/stubs/setuptools/setuptools/command/build_ext.pyi @@ -38,14 +38,14 @@ def link_shared_object( self, objects, output_libname, - output_dir: Incomplete | None = None, - libraries: Incomplete | None = None, - library_dirs: Incomplete | None = None, - runtime_library_dirs: Incomplete | None = None, - export_symbols: Incomplete | None = None, + output_dir=None, + libraries=None, + library_dirs=None, + runtime_library_dirs=None, + export_symbols=None, debug: bool = False, - extra_preargs: Incomplete | None = None, - extra_postargs: Incomplete | None = None, - build_temp: Incomplete | None = None, - target_lang: Incomplete | None = None, + extra_preargs=None, + extra_postargs=None, + build_temp=None, + target_lang=None, ) -> None: ... diff --git a/stubs/setuptools/setuptools/depends.pyi b/stubs/setuptools/setuptools/depends.pyi index c0ddba05e919..54ddd352768c 100644 --- a/stubs/setuptools/setuptools/depends.pyi +++ b/stubs/setuptools/setuptools/depends.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from typing import IO, Any, Literal, TypeVar _T = TypeVar("_T") @@ -10,22 +9,12 @@ def find_module( ) -> tuple[IO[Any], str | None, tuple[str, Literal["", "r", "rb"], Literal[7, 6, 1, 2, 3, -1]]]: ... class Require: - def __init__( - self, - name, - requested_version, - module, - homepage: str = "", - attribute: Incomplete | None = None, - format: Incomplete | None = None, - ) -> None: ... + def __init__(self, name, requested_version, module, homepage: str = "", attribute=None, format=None) -> None: ... def full_name(self): ... def version_ok(self, version): ... - def get_version( - self, paths: Incomplete | None = None, default: _T | Literal["unknown"] = "unknown" - ) -> _T | Literal["unknown"] | None | Any: ... - def is_present(self, paths: Incomplete | None = None): ... - def is_current(self, paths: Incomplete | None = None): ... + def get_version(self, paths=None, default: _T | Literal["unknown"] = "unknown") -> _T | Literal["unknown"] | None | Any: ... + def is_present(self, paths=None): ... + def is_current(self, paths=None): ... -def get_module_constant(module, symbol, default: _T | int = -1, paths: Incomplete | None = None) -> _T | int | None | Any: ... +def get_module_constant(module, symbol, default: _T | int = -1, paths=None) -> _T | int | None | Any: ... def extract_constant(code, symbol, default: _T | int = -1) -> _T | int | None | Any: ... diff --git a/stubs/tensorflow/tensorflow/__init__.pyi b/stubs/tensorflow/tensorflow/__init__.pyi index 647e29ab0988..502113f1ce9a 100644 --- a/stubs/tensorflow/tensorflow/__init__.pyi +++ b/stubs/tensorflow/tensorflow/__init__.pyi @@ -161,7 +161,7 @@ class Variable(Tensor, metaclass=_VariableMetaclass): name: str | None = None, # Real type is VariableDef protobuf type. Can be added after adding script # to generate tensorflow protobuf stubs with mypy-protobuf. - variable_def: Incomplete | None = None, + variable_def=None, dtype: DTypeLike | None = None, import_scope: str | None = None, constraint: Callable[[Tensor], Tensor] | None = None, @@ -203,7 +203,7 @@ class Operation: control_inputs: Iterable[Tensor | Operation] | None = None, input_types: Iterable[DType] | None = None, original_op: Operation | None = None, - op_def: Incomplete | None = None, + op_def=None, ) -> None: ... @property def inputs(self) -> list[Tensor]: ... diff --git a/stubs/tensorflow/tensorflow/data/__init__.pyi b/stubs/tensorflow/tensorflow/data/__init__.pyi index 37520262c444..329b85a2967c 100644 --- a/stubs/tensorflow/tensorflow/data/__init__.pyi +++ b/stubs/tensorflow/tensorflow/data/__init__.pyi @@ -187,7 +187,7 @@ class Dataset(ABC, Generic[_T1_co]): path: str, compression: _CompressionTypes = None, shard_func: Callable[[_T1_co], int] | None = None, - checkpoint_args: Incomplete | None = None, + checkpoint_args=None, ) -> None: ... def scan( self, initial_state: _T2, scan_func: Callable[[_T2, _T1_co], tuple[_T2, _T3]], name: str | None = None diff --git a/stubs/tensorflow/tensorflow/keras/losses.pyi b/stubs/tensorflow/tensorflow/keras/losses.pyi index e6ea69e47553..ebb96740637e 100644 --- a/stubs/tensorflow/tensorflow/keras/losses.pyi +++ b/stubs/tensorflow/tensorflow/keras/losses.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from abc import ABC, abstractmethod from collections.abc import Callable from typing import Any, Final, Literal, TypeVar, overload @@ -14,9 +13,7 @@ from tensorflow.keras.metrics import ( class Loss(ABC): reduction: _ReductionValues name: str | None - def __init__( - self, name: str | None = None, reduction: _ReductionValues = "sum_over_batch_size", dtype: Incomplete | None = None - ) -> None: ... + def __init__(self, name: str | None = None, reduction: _ReductionValues = "sum_over_batch_size", dtype=None) -> None: ... @abstractmethod def call(self, y_true: Tensor, y_pred: Tensor) -> Tensor: ... @classmethod @@ -34,7 +31,7 @@ class BinaryCrossentropy(Loss): axis: int = -1, reduction: _ReductionValues = "sum_over_batch_size", name: str | None = "binary_crossentropy", - dtype: Incomplete | None = None, + dtype=None, ) -> None: ... def call(self, y_true: Tensor, y_pred: Tensor) -> Tensor: ... @@ -49,7 +46,7 @@ class BinaryFocalCrossentropy(Loss): axis: int = -1, reduction: _ReductionValues = "sum_over_batch_size", name: str | None = "binary_focal_crossentropy", - dtype: Incomplete | None = None, + dtype=None, ) -> None: ... def call(self, y_true: Tensor, y_pred: Tensor) -> Tensor: ... @@ -61,16 +58,13 @@ class CategoricalCrossentropy(Loss): axis: int = -1, reduction: _ReductionValues = "sum_over_batch_size", name: str | None = "categorical_crossentropy", - dtype: Incomplete | None = None, + dtype=None, ) -> None: ... def call(self, y_true: Tensor, y_pred: Tensor) -> Tensor: ... class CategoricalHinge(Loss): def __init__( - self, - reduction: _ReductionValues = "sum_over_batch_size", - name: str | None = "categorical_hinge", - dtype: Incomplete | None = None, + self, reduction: _ReductionValues = "sum_over_batch_size", name: str | None = "categorical_hinge", dtype=None ) -> None: ... def call(self, y_true: Tensor, y_pred: Tensor) -> Tensor: ... @@ -80,81 +74,58 @@ class CosineSimilarity(Loss): axis: int = -1, reduction: _ReductionValues = "sum_over_batch_size", name: str | None = "cosine_similarity", - dtype: Incomplete | None = None, + dtype=None, ) -> None: ... def call(self, y_true: Tensor, y_pred: Tensor) -> Tensor: ... class Hinge(Loss): - def __init__( - self, reduction: _ReductionValues = "sum_over_batch_size", name: str | None = "hinge", dtype: Incomplete | None = None - ) -> None: ... + def __init__(self, reduction: _ReductionValues = "sum_over_batch_size", name: str | None = "hinge", dtype=None) -> None: ... def call(self, y_true: Tensor, y_pred: Tensor) -> Tensor: ... class Huber(Loss): def __init__( - self, - delta: float = 1.0, - reduction: _ReductionValues = "sum_over_batch_size", - name: str | None = "huber_loss", - dtype: Incomplete | None = None, + self, delta: float = 1.0, reduction: _ReductionValues = "sum_over_batch_size", name: str | None = "huber_loss", dtype=None ) -> None: ... def call(self, y_true: Tensor, y_pred: Tensor) -> Tensor: ... class KLDivergence(Loss): def __init__( - self, - reduction: _ReductionValues = "sum_over_batch_size", - name: str | None = "kl_divergence", - dtype: Incomplete | None = None, + self, reduction: _ReductionValues = "sum_over_batch_size", name: str | None = "kl_divergence", dtype=None ) -> None: ... def call(self, y_true: Tensor, y_pred: Tensor) -> Tensor: ... class LogCosh(Loss): def __init__( - self, reduction: _ReductionValues = "sum_over_batch_size", name: str | None = "log_cosh", dtype: Incomplete | None = None + self, reduction: _ReductionValues = "sum_over_batch_size", name: str | None = "log_cosh", dtype=None ) -> None: ... def call(self, y_true: Tensor, y_pred: Tensor) -> Tensor: ... class MeanAbsoluteError(Loss): def __init__( - self, - reduction: _ReductionValues = "sum_over_batch_size", - name: str | None = "mean_absolute_error", - dtype: Incomplete | None = None, + self, reduction: _ReductionValues = "sum_over_batch_size", name: str | None = "mean_absolute_error", dtype=None ) -> None: ... def call(self, y_true: Tensor, y_pred: Tensor) -> Tensor: ... class MeanAbsolutePercentageError(Loss): def __init__( - self, - reduction: _ReductionValues = "sum_over_batch_size", - name: str | None = "mean_absolute_percentage_error", - dtype: Incomplete | None = None, + self, reduction: _ReductionValues = "sum_over_batch_size", name: str | None = "mean_absolute_percentage_error", dtype=None ) -> None: ... def call(self, y_true: Tensor, y_pred: Tensor) -> Tensor: ... class MeanSquaredError(Loss): def __init__( - self, - reduction: _ReductionValues = "sum_over_batch_size", - name: str | None = "mean_squared_error", - dtype: Incomplete | None = None, + self, reduction: _ReductionValues = "sum_over_batch_size", name: str | None = "mean_squared_error", dtype=None ) -> None: ... def call(self, y_true: Tensor, y_pred: Tensor) -> Tensor: ... class MeanSquaredLogarithmicError(Loss): def __init__( - self, - reduction: _ReductionValues = "sum_over_batch_size", - name: str | None = "mean_squared_logarithmic_error", - dtype: Incomplete | None = None, + self, reduction: _ReductionValues = "sum_over_batch_size", name: str | None = "mean_squared_logarithmic_error", dtype=None ) -> None: ... def call(self, y_true: Tensor, y_pred: Tensor) -> Tensor: ... class Poisson(Loss): - def __init__( - self, reduction: _ReductionValues = "sum_over_batch_size", name: str | None = "poisson", dtype: Incomplete | None = None - ) -> None: ... + def __init__(self, reduction: _ReductionValues = "sum_over_batch_size", name: str | None = "poisson", dtype=None) -> None: ... def call(self, y_true: Tensor, y_pred: Tensor) -> Tensor: ... class SparseCategoricalCrossentropy(Loss): @@ -164,16 +135,13 @@ class SparseCategoricalCrossentropy(Loss): ignore_class: int | None = None, reduction: _ReductionValues = "sum_over_batch_size", name: str = "sparse_categorical_crossentropy", - dtype: Incomplete | None = None, + dtype=None, ) -> None: ... def call(self, y_true: Tensor, y_pred: Tensor) -> Tensor: ... class SquaredHinge(Loss): def __init__( - self, - reduction: _ReductionValues = "sum_over_batch_size", - name: str | None = "squared_hinge", - dtype: Incomplete | None = None, + self, reduction: _ReductionValues = "sum_over_batch_size", name: str | None = "squared_hinge", dtype=None ) -> None: ... def call(self, y_true: Tensor, y_pred: Tensor) -> Tensor: ... diff --git a/stubs/tensorflow/tensorflow/keras/models.pyi b/stubs/tensorflow/tensorflow/keras/models.pyi index 079066d4bf47..afbd68461ac5 100644 --- a/stubs/tensorflow/tensorflow/keras/models.pyi +++ b/stubs/tensorflow/tensorflow/keras/models.pyi @@ -22,9 +22,7 @@ class Model(Layer[_InputT_contra, _OutputT_co]): optimizer: Optimizer | None # This is actually TensorFlowTrainer.loss @deprecated("Instead, use `model.compute_loss(x, y, y_pred, sample_weight)`.") - def loss( - self, y: TensorCompatible | None, y_pred: TensorCompatible | None, sample_weight: Incomplete | None = None - ) -> tf.Tensor | None: ... + def loss(self, y: TensorCompatible | None, y_pred: TensorCompatible | None, sample_weight=None) -> tf.Tensor | None: ... stop_training: bool def __new__(cls, *args: Any, **kwargs: Any) -> Model[_InputT_contra, _OutputT_co]: ... @@ -67,11 +65,11 @@ class Model(Layer[_InputT_contra, _OutputT_co]): x: TensorCompatible | None = None, y: TensorCompatible | None = None, y_pred: TensorCompatible | None = None, - sample_weight: Incomplete | None = None, + sample_weight=None, training: bool = True, ) -> tf.Tensor | None: ... def compute_metrics( - self, x: TensorCompatible, y: TensorCompatible, y_pred: TensorCompatible, sample_weight: Incomplete | None = None + self, x: TensorCompatible, y: TensorCompatible, y_pred: TensorCompatible, sample_weight=None ) -> dict[str, float]: ... def get_metrics_result(self) -> dict[str, float]: ... def make_train_function(self, force: bool = False) -> Callable[[tf.data.Iterator[Incomplete]], dict[str, float]]: ... @@ -146,7 +144,7 @@ class Model(Layer[_InputT_contra, _OutputT_co]): def load_weights(self, filepath: str | Path, skip_mismatch: bool = False, *, by_name: bool = False) -> None: ... def get_config(self) -> dict[str, Any]: ... @classmethod - def from_config(cls, config: dict[str, Any], custom_objects: Incomplete | None = None) -> Self: ... + def from_config(cls, config: dict[str, Any], custom_objects=None) -> Self: ... def to_json(self, **kwargs: Any) -> str: ... @property def weights(self) -> list[Variable]: ... diff --git a/stubs/tensorflow/tensorflow/saved_model/__init__.pyi b/stubs/tensorflow/tensorflow/saved_model/__init__.pyi index 203d144c751f..c8244ba4f50e 100644 --- a/stubs/tensorflow/tensorflow/saved_model/__init__.pyi +++ b/stubs/tensorflow/tensorflow/saved_model/__init__.pyi @@ -71,8 +71,8 @@ class SaveOptions: experimental_custom_gradients: bool = True, experimental_image_format: bool = False, experimental_skip_saver: bool = False, - experimental_sharding_callback: Incomplete | None = None, - extra_tags: Incomplete | None = None, + experimental_sharding_callback=None, + extra_tags=None, ) -> None: ... def contains_saved_model(export_dir: str | Path) -> bool: ... diff --git a/stubs/tensorflow/tensorflow/summary.pyi b/stubs/tensorflow/tensorflow/summary.pyi index 70d9fb784b7b..07cbe539d92d 100644 --- a/stubs/tensorflow/tensorflow/summary.pyi +++ b/stubs/tensorflow/tensorflow/summary.pyi @@ -1,5 +1,4 @@ import abc -from _typeshed import Incomplete from collections.abc import Callable, Generator from contextlib import AbstractContextManager, contextmanager from typing import Literal @@ -56,6 +55,4 @@ def text(name: str, data: str | tf.Tensor, step: int | tf.Tensor | None = None, def trace_export(name: str, step: int | tf.Tensor | None = None, profiler_outdir: str | None = None) -> None: ... def trace_off() -> None: ... def trace_on(graph: bool = True, profiler: bool = False, profiler_outdir: str | None = None) -> None: ... -def write( - tag: str, tensor: tf.Tensor, step: int | tf.Tensor | None = None, metadata: Incomplete | None = None, name: str | None = None -) -> bool: ... +def write(tag: str, tensor: tf.Tensor, step: int | tf.Tensor | None = None, metadata=None, name: str | None = None) -> bool: ... diff --git a/stubs/tensorflow/tensorflow/train/__init__.pyi b/stubs/tensorflow/tensorflow/train/__init__.pyi index c40249a92c5e..685accb4767c 100644 --- a/stubs/tensorflow/tensorflow/train/__init__.pyi +++ b/stubs/tensorflow/tensorflow/train/__init__.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from collections.abc import Callable from typing import Any, TypeVar from typing_extensions import Self @@ -31,7 +30,7 @@ class CheckpointOptions: experimental_write_callbacks: None | list[Callable[[str], object] | Callable[[], object]] = None, enable_async: bool = False, experimental_skip_slot_variables: bool = False, - experimental_sharding_callback: Incomplete | None = None, + experimental_sharding_callback=None, ) -> None: ... _T = TypeVar("_T", bound=list[str] | tuple[str] | dict[int, str]) diff --git a/stubs/tqdm/tqdm/contrib/__init__.pyi b/stubs/tqdm/tqdm/contrib/__init__.pyi index 6a63b3b40969..7023bed256ab 100644 --- a/stubs/tqdm/tqdm/contrib/__init__.pyi +++ b/stubs/tqdm/tqdm/contrib/__init__.pyi @@ -10,6 +10,6 @@ class DummyTqdmFile(ObjectWrapper): def write(self, x, nolock: bool = False) -> None: ... def __del__(self) -> None: ... -def tenumerate(iterable, start: int = 0, total: Incomplete | None = None, tqdm_class: type[Incomplete] = ..., **tqdm_kwargs): ... +def tenumerate(iterable, start: int = 0, total=None, tqdm_class: type[Incomplete] = ..., **tqdm_kwargs): ... def tzip(iter1, *iter2plus, **tqdm_kwargs) -> Generator[Incomplete, None, None]: ... def tmap(function: Callable[..., Incomplete], *sequences, **tqdm_kwargs) -> Generator[Incomplete, None, None]: ... diff --git a/stubs/tqdm/tqdm/keras.pyi b/stubs/tqdm/tqdm/keras.pyi index f14d9be2a38b..7548952fb794 100644 --- a/stubs/tqdm/tqdm/keras.pyi +++ b/stubs/tqdm/tqdm/keras.pyi @@ -29,7 +29,7 @@ class _Callback: class TqdmCallback(_Callback): @staticmethod - def bar2callback(bar, pop: Incomplete | None = None, delta=...): ... + def bar2callback(bar, pop=None, delta=...): ... tqdm_class: Incomplete epoch_bar: Incomplete on_epoch_end: Incomplete @@ -37,15 +37,7 @@ class TqdmCallback(_Callback): verbose: Incomplete batch_bar: Incomplete on_batch_end: Incomplete - def __init__( - self, - epochs: Incomplete | None = None, - data_size: Incomplete | None = None, - batch_size: Incomplete | None = None, - verbose: int = 1, - tqdm_class=..., - **tqdm_kwargs, - ) -> None: ... + def __init__(self, epochs=None, data_size=None, batch_size=None, verbose: int = 1, tqdm_class=..., **tqdm_kwargs) -> None: ... def on_train_begin(self, *_, **__) -> None: ... def on_epoch_begin(self, epoch, *_, **__) -> None: ... def on_train_end(self, *_, **__) -> None: ... diff --git a/stubs/tqdm/tqdm/std.pyi b/stubs/tqdm/tqdm/std.pyi index 8ffbb0eaa60e..e5e84eeec8a4 100644 --- a/stubs/tqdm/tqdm/std.pyi +++ b/stubs/tqdm/tqdm/std.pyi @@ -23,7 +23,7 @@ class TqdmTypeError(TypeError): ... class TqdmKeyError(KeyError): ... class TqdmWarning(Warning): - def __init__(self, msg, fp_write: Incomplete | None = None, *a, **k) -> None: ... + def __init__(self, msg, fp_write=None, *a, **k) -> None: ... class TqdmExperimentalWarning(TqdmWarning, FutureWarning): ... class TqdmDeprecationWarning(TqdmWarning, DeprecationWarning): ... diff --git a/stubs/tqdm/tqdm/tk.pyi b/stubs/tqdm/tqdm/tk.pyi index e8a888d25205..84f018692a3c 100644 --- a/stubs/tqdm/tqdm/tk.pyi +++ b/stubs/tqdm/tqdm/tk.pyi @@ -85,7 +85,7 @@ class tqdm_tk(std_tqdm[_T]): desc: Incomplete def set_description_str(self, desc: str | None = None, refresh: bool | None = True) -> None: ... def cancel(self) -> None: ... - def reset(self, total: Incomplete | None = None) -> None: ... + def reset(self, total=None) -> None: ... def ttkrange(*args, **kwargs) -> tqdm_tk[int]: ... diff --git a/stubs/vobject/vobject/base.pyi b/stubs/vobject/vobject/base.pyi index 11cc2cf30e13..931aabc870ce 100644 --- a/stubs/vobject/vobject/base.pyi +++ b/stubs/vobject/vobject/base.pyi @@ -22,7 +22,7 @@ class VBase: behavior: Incomplete | None parentBehavior: Incomplete | None isNative: bool - def __init__(self, group: Incomplete | None = None) -> None: ... + def __init__(self, group=None) -> None: ... def copy(self, copyit: VBase) -> None: ... def validate(self, *args, **kwds) -> bool: ... def getChildren(self) -> list[Incomplete]: ... @@ -36,18 +36,10 @@ class VBase: # Use Any because args and kwargs are passed to the behavior object @overload def serialize( - self, - buf: None = None, - lineLength: int = 75, - validate: bool = True, - behavior: Incomplete | None = None, - *args: Any, - **kwargs: Any, + self, buf: None = None, lineLength: int = 75, validate: bool = True, behavior=None, *args: Any, **kwargs: Any ) -> str: ... @overload - def serialize( - self, buf: _W, lineLength: int = 75, validate: bool = True, behavior: Incomplete | None = None, *args: Any, **kwargs: Any - ) -> _W: ... + def serialize(self, buf: _W, lineLength: int = 75, validate: bool = True, behavior=None, *args: Any, **kwargs: Any) -> _W: ... def toVName(name, stripNum: int = 0, upper: bool = False): ... @@ -60,16 +52,7 @@ class ContentLine(VBase): lineNumber: Incomplete value: Incomplete def __init__( - self, - name, - params, - value, - group: Incomplete | None = None, - encoded: bool = False, - isNative: bool = False, - lineNumber: Incomplete | None = None, - *args, - **kwds, + self, name, params, value, group=None, encoded: bool = False, isNative: bool = False, lineNumber=None, *args, **kwds ) -> None: ... @classmethod def duplicate(cls, copyit): ... @@ -86,7 +69,7 @@ class Component(VBase): contents: dict[str, list[VBase]] name: Incomplete useBegin: bool - def __init__(self, name: Incomplete | None = None, *args, **kwds) -> None: ... + def __init__(self, name=None, *args, **kwds) -> None: ... @classmethod def duplicate(cls, copyit): ... def copy(self, copyit) -> None: ... @@ -95,7 +78,7 @@ class Component(VBase): normal_attributes: Incomplete def __setattr__(self, name: str, value) -> None: ... def __delattr__(self, name: str) -> None: ... - def getChildValue(self, childName, default: Incomplete | None = None, childNumber: int = 0): ... + def getChildValue(self, childName, default=None, childNumber: int = 0): ... @overload def add(self, objOrName: _V, group: str | None = None) -> _V: ... @overload @@ -120,7 +103,7 @@ class Component(VBase): class VObjectError(Exception): msg: Incomplete lineNumber: Incomplete - def __init__(self, msg, lineNumber: Incomplete | None = None) -> None: ... + def __init__(self, msg, lineNumber=None) -> None: ... class ParseError(VObjectError): ... class ValidateError(VObjectError): ... @@ -133,14 +116,14 @@ line_re: Incomplete begin_re: Incomplete def parseParams(string): ... -def parseLine(line, lineNumber: Incomplete | None = None): ... +def parseLine(line, lineNumber=None): ... wrap_re: Incomplete logical_lines_re: Incomplete testLines: str def getLogicalLines(fp, allowQP: bool = True) -> None: ... -def textLineToContentLine(text, n: Incomplete | None = None): ... +def textLineToContentLine(text, n=None): ... def dquoteEscape(param): ... def foldOneLine(outbuf, input, lineLength: int = 75) -> None: ... def defaultSerialize(obj, buf, lineLength): ... @@ -158,7 +141,7 @@ def readComponents( streamOrString, validate: bool = False, transform: bool = True, ignoreUnreadable: bool = False, allowQP: bool = False ) -> Iterator[Component]: ... def readOne(stream, validate: bool = False, transform: bool = True, ignoreUnreadable: bool = False, allowQP: bool = False): ... -def registerBehavior(behavior, name: Incomplete | None = None, default: bool = False, id: Incomplete | None = None) -> None: ... -def getBehavior(name, id: Incomplete | None = None): ... -def newFromBehavior(name, id: Incomplete | None = None): ... +def registerBehavior(behavior, name=None, default: bool = False, id=None) -> None: ... +def getBehavior(name, id=None): ... +def newFromBehavior(name, id=None): ... def backslashEscape(s): ... diff --git a/stubs/vobject/vobject/hcalendar.pyi b/stubs/vobject/vobject/hcalendar.pyi index bbebf71bd65f..589e7feef2d7 100644 --- a/stubs/vobject/vobject/hcalendar.pyi +++ b/stubs/vobject/vobject/hcalendar.pyi @@ -1,8 +1,6 @@ -from _typeshed import Incomplete - from .icalendar import VCalendar2_0 class HCalendar(VCalendar2_0): name: str @classmethod - def serialize(cls, obj, buf: Incomplete | None = None, lineLength: Incomplete | None = None, validate: bool = True): ... + def serialize(cls, obj, buf=None, lineLength=None, validate: bool = True): ... diff --git a/stubs/vobject/vobject/icalendar.pyi b/stubs/vobject/vobject/icalendar.pyi index 0071b69745f3..0f2441e919f0 100644 --- a/stubs/vobject/vobject/icalendar.pyi +++ b/stubs/vobject/vobject/icalendar.pyi @@ -27,7 +27,7 @@ class TimezoneComponent(Component): tzinfo: Incomplete name: str useBegin: bool - def __init__(self, tzinfo: Incomplete | None = None, *args, **kwds) -> None: ... + def __init__(self, tzinfo=None, *args, **kwds) -> None: ... @classmethod def registerTzinfo(cls, tzinfo): ... def gettzinfo(self): ... @@ -220,9 +220,9 @@ def stringToDateTime(s, tzinfo: datetime.tzinfo | None = None, strict: bool = Fa escapableCharList: str -def stringToTextValues(s, listSeparator: str = ",", charList: Incomplete | None = None, strict: bool = False): ... +def stringToTextValues(s, listSeparator: str = ",", charList=None, strict: bool = False): ... def stringToDurations(s, strict: bool = False): ... def parseDtstart(contentline, allowSignatureMismatch: bool = False): ... -def stringToPeriod(s, tzinfo: Incomplete | None = None): ... +def stringToPeriod(s, tzinfo=None): ... def getTransition(transitionTo, year, tzinfo): ... def tzinfo_eq(tzinfo1, tzinfo2, startYear: int = 2000, endYear: int = 2020): ... diff --git a/stubs/vobject/vobject/vcard.pyi b/stubs/vobject/vobject/vcard.pyi index 803acb03e4d7..f70f98b6160b 100644 --- a/stubs/vobject/vobject/vcard.pyi +++ b/stubs/vobject/vobject/vcard.pyi @@ -89,7 +89,7 @@ class Photo(VCardTextBehavior): def toListOrString(string): ... def splitFields(string): ... def toList(stringOrList): ... -def serializeFields(obj, order: Incomplete | None = None): ... +def serializeFields(obj, order=None): ... NAME_ORDER: Incomplete ADDRESS_ORDER: Incomplete diff --git a/stubs/workalendar/workalendar/africa/south_africa.pyi b/stubs/workalendar/workalendar/africa/south_africa.pyi index 6a0030dd2151..a7c7afbd1dda 100644 --- a/stubs/workalendar/workalendar/africa/south_africa.pyi +++ b/stubs/workalendar/workalendar/africa/south_africa.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from typing import ClassVar from ..core import WesternCalendar @@ -6,7 +5,7 @@ from ..core import WesternCalendar class SouthAfrica(WesternCalendar): include_good_friday: ClassVar[bool] include_christmas: ClassVar[bool] - def holidays(self, year: Incomplete | None = None): ... + def holidays(self, year=None): ... def get_easter_monday_or_family_day(self, year): ... def get_fixed_holidays(self, year): ... def get_variable_days(self, year): ... diff --git a/stubs/workalendar/workalendar/asia/china.pyi b/stubs/workalendar/workalendar/asia/china.pyi index b0934335f67b..c9b89624abfb 100644 --- a/stubs/workalendar/workalendar/asia/china.pyi +++ b/stubs/workalendar/workalendar/asia/china.pyi @@ -13,20 +13,6 @@ class China(ChineseNewYearCalendar): def __init__(self, *args, **kwargs) -> None: ... def get_calendar_holidays(self, year): ... def get_variable_days(self, year): ... - def is_working_day(self, day, extra_working_days: Incomplete | None = None, extra_holidays: Incomplete | None = None): ... - def add_working_days( - self, - day, - delta, - extra_working_days: Incomplete | None = None, - extra_holidays: Incomplete | None = None, - keep_datetime: bool = False, - ): ... - def sub_working_days( - self, - day, - delta, - extra_working_days: Incomplete | None = None, - extra_holidays: Incomplete | None = None, - keep_datetime: bool = False, - ): ... + def is_working_day(self, day, extra_working_days=None, extra_holidays=None): ... + def add_working_days(self, day, delta, extra_working_days=None, extra_holidays=None, keep_datetime: bool = False): ... + def sub_working_days(self, day, delta, extra_working_days=None, extra_holidays=None, keep_datetime: bool = False): ... diff --git a/stubs/workalendar/workalendar/core.pyi b/stubs/workalendar/workalendar/core.pyi index 9f083a4b5014..379712f18619 100644 --- a/stubs/workalendar/workalendar/core.pyi +++ b/stubs/workalendar/workalendar/core.pyi @@ -138,46 +138,25 @@ class CoreCalendar: def get_fixed_holidays(self, year): ... def get_variable_days(self, year): ... def get_calendar_holidays(self, year): ... - def holidays(self, year: Incomplete | None = None): ... + def holidays(self, year=None): ... def get_holiday_label(self, day): ... - def holidays_set(self, year: Incomplete | None = None): ... + def holidays_set(self, year=None): ... def get_weekend_days(self): ... - def is_working_day(self, day, extra_working_days: Incomplete | None = None, extra_holidays: Incomplete | None = None): ... - def is_holiday(self, day, extra_holidays: Incomplete | None = None): ... - def add_working_days( - self, - day, - delta, - extra_working_days: Incomplete | None = None, - extra_holidays: Incomplete | None = None, - keep_datetime: bool = False, - ): ... - def sub_working_days( - self, - day, - delta, - extra_working_days: Incomplete | None = None, - extra_holidays: Incomplete | None = None, - keep_datetime: bool = False, - ): ... + def is_working_day(self, day, extra_working_days=None, extra_holidays=None): ... + def is_holiday(self, day, extra_holidays=None): ... + def add_working_days(self, day, delta, extra_working_days=None, extra_holidays=None, keep_datetime: bool = False): ... + def sub_working_days(self, day, delta, extra_working_days=None, extra_holidays=None, keep_datetime: bool = False): ... def find_following_working_day(self, day): ... @staticmethod - def get_nth_weekday_in_month(year, month, weekday, n: int = 1, start: Incomplete | None = None): ... + def get_nth_weekday_in_month(year, month, weekday, n: int = 1, start=None): ... @staticmethod def get_last_weekday_in_month(year, month, weekday): ... @staticmethod def get_iso_week_date(year, week_nb, weekday=1): ... @staticmethod def get_first_weekday_after(day, weekday): ... - def get_working_days_delta( - self, - start, - end, - include_start: bool = False, - extra_working_days: Incomplete | None = None, - extra_holidays: Incomplete | None = None, - ): ... - def export_to_ical(self, period=[2000, 2030], target_path: Incomplete | None = None): ... + def get_working_days_delta(self, start, end, include_start: bool = False, extra_working_days=None, extra_holidays=None): ... + def export_to_ical(self, period=[2000, 2030], target_path=None): ... class Calendar(CoreCalendar): include_new_years_day: ClassVar[bool] diff --git a/stubs/workalendar/workalendar/europe/russia.pyi b/stubs/workalendar/workalendar/europe/russia.pyi index dca16c11e4ff..e82ebdb8c709 100644 --- a/stubs/workalendar/workalendar/europe/russia.pyi +++ b/stubs/workalendar/workalendar/europe/russia.pyi @@ -12,4 +12,4 @@ class Russia(OrthodoxCalendar): labour_day_label: ClassVar[str] def get_fixed_holidays(self, year): ... def get_calendar_holidays(self, year): ... - def is_working_day(self, day, extra_working_days: Incomplete | None = None, extra_holidays: Incomplete | None = None): ... + def is_working_day(self, day, extra_working_days=None, extra_holidays=None): ... diff --git a/stubs/workalendar/workalendar/registry.pyi b/stubs/workalendar/workalendar/registry.pyi index ae17a15a5561..b918ce9438a3 100644 --- a/stubs/workalendar/workalendar/registry.pyi +++ b/stubs/workalendar/workalendar/registry.pyi @@ -10,6 +10,6 @@ class IsoRegistry: def load_module_from_items(self, module_name, items) -> None: ... def get(self, iso_code): ... def get_subregions(self, iso_code): ... - def get_calendars(self, region_codes: Incomplete | None = None, include_subregions: bool = False): ... + def get_calendars(self, region_codes=None, include_subregions: bool = False): ... registry: IsoRegistry