Skip to content

Commit

Permalink
Add a bound to the inference tips cache
Browse files Browse the repository at this point in the history
Small bounds still yield about equal hits and misses.

Further work could determine if storing only the last result
is optimal.
  • Loading branch information
jacobtylerwalls committed May 15, 2023
1 parent c807c03 commit 4757af2
Showing 1 changed file with 9 additions and 3 deletions.
12 changes: 9 additions & 3 deletions astroid/inference_tip.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@

from __future__ import annotations

from collections import OrderedDict
from collections.abc import Generator
from typing import Any, TypeVar

Expand All @@ -18,9 +19,9 @@
TransformFn,
)

_cache: dict[
_cache: OrderedDict[
tuple[InferFn[Any], NodeNG, InferenceContext | None], list[InferenceResult]
] = {}
] = OrderedDict()

_CURRENTLY_INFERRING: set[tuple[InferFn[Any], NodeNG]] = set()

Expand Down Expand Up @@ -61,14 +62,19 @@ def inner(
_CURRENTLY_INFERRING.add(partial_cache_key)
try:
# May raise UseInferenceDefault
result = _cache[func, node, context] = list(func(node, context, **kwargs))
result = _cache[func, node, context] = list(
func(node, context, **kwargs)
)
finally:
# Remove recursion guard.
try:
_CURRENTLY_INFERRING.remove(partial_cache_key)
except KeyError:
pass # Recursion may beat us to the punch.

if len(_cache) > 64:
_cache.popitem(last=False)

yield from result

return inner
Expand Down

0 comments on commit 4757af2

Please sign in to comment.