From 8f9223bfe13200c685fc97c25ada3015a69c6df7 Mon Sep 17 00:00:00 2001 From: Stainless Bot <107565488+stainless-bot@users.noreply.github.com> Date: Wed, 17 Apr 2024 18:47:58 -0400 Subject: [PATCH] chore(internal): ban usage of lru_cache (#1331) --- pyproject.toml | 7 ++++++- src/openai/_base_client.py | 3 +-- src/openai/_models.py | 2 +- src/openai/_utils/_utils.py | 4 +++- 4 files changed, 11 insertions(+), 5 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index b59317912..dd6d2f10a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -167,7 +167,9 @@ select = [ "T201", "T203", # misuse of typing.TYPE_CHECKING - "TCH004" + "TCH004", + # import rules + "TID251", ] ignore = [ # mutable defaults @@ -183,6 +185,9 @@ ignore-init-module-imports = true [tool.ruff.format] docstring-code-format = true +[tool.ruff.lint.flake8-tidy-imports.banned-api] +"functools.lru_cache".msg = "This function does not retain type information for the wrapped function's arguments; The `lru_cache` function from `_utils` should be used instead" + [tool.ruff.lint.isort] length-sort = true length-sort-straight = true diff --git a/src/openai/_base_client.py b/src/openai/_base_client.py index 0bb284a21..cd8361607 100644 --- a/src/openai/_base_client.py +++ b/src/openai/_base_client.py @@ -29,7 +29,6 @@ cast, overload, ) -from functools import lru_cache from typing_extensions import Literal, override, get_origin import anyio @@ -61,7 +60,7 @@ RequestOptions, ModelBuilderProtocol, ) -from ._utils import is_dict, is_list, is_given, is_mapping +from ._utils import is_dict, is_list, is_given, lru_cache, is_mapping from ._compat import model_copy, model_dump from ._models import GenericModel, FinalRequestOptions, validate_type, construct_type from ._response import ( diff --git a/src/openai/_models.py b/src/openai/_models.py index 80ab51256..ff93fbd84 100644 --- a/src/openai/_models.py +++ b/src/openai/_models.py @@ -4,7 +4,6 @@ import inspect from typing import TYPE_CHECKING, Any, Type, Union, Generic, TypeVar, Callable, cast from datetime import date, datetime -from functools import lru_cache from typing_extensions import ( Unpack, Literal, @@ -37,6 +36,7 @@ PropertyInfo, is_list, is_given, + lru_cache, is_mapping, parse_date, coerce_boolean, diff --git a/src/openai/_utils/_utils.py b/src/openai/_utils/_utils.py index 5123a230f..fd3a8a4d1 100644 --- a/src/openai/_utils/_utils.py +++ b/src/openai/_utils/_utils.py @@ -395,5 +395,7 @@ def lru_cache(*, maxsize: int | None = 128) -> Callable[[CallableT], CallableT]: """A version of functools.lru_cache that retains the type signature for the wrapped function arguments. """ - wrapper = functools.lru_cache(maxsize=maxsize) + wrapper = functools.lru_cache( # noqa: TID251 + maxsize=maxsize, + ) return cast(Any, wrapper) # type: ignore[no-any-return]