Skip to content

Commit

Permalink
Add a test to check if numpy tensorflow stuff is now cached, see #1116
Browse files Browse the repository at this point in the history
  • Loading branch information
davidhalter committed Jan 5, 2020
1 parent ea0972d commit 5da9f9f
Show file tree
Hide file tree
Showing 3 changed files with 49 additions and 3 deletions.
2 changes: 2 additions & 0 deletions CHANGELOG.rst
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,8 @@ Changelog
- Completion for "proxies" works now. These are classes that have a
``__getattr__(self, name)`` method that does a ``return getattr(x, name)``.
- Understanding of Pytest fixtures.
- Tensorflow, Numpy and Pandas completions should now be about 4-10x faster
after loading them initially.
- Big **Script API Changes**:
- The line and column parameters of ``jedi.Script`` are now deprecated
- ``completions`` deprecated, use ``complete`` instead
Expand Down
20 changes: 17 additions & 3 deletions test/conftest.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,17 @@
import os
import re
import subprocess
from itertools import count

import pytest

from . import helpers
from . import run
from . import refactor

import jedi
from jedi.api.environment import InterpreterEnvironment
from jedi.inference.compiled.value import create_from_access_path
from jedi.inference.imports import _load_python_module
from jedi.file_io import KnownContentFileIO
from jedi.inference.base_value import ValueSet


def pytest_addoption(parser):
Expand Down Expand Up @@ -144,3 +145,16 @@ def create_compiled_object(inference_state):
inference_state,
inference_state.compiled_subprocess.create_simple_object(obj)
)


@pytest.fixture
def module_injector():
counter = count()

def module_injector(inference_state, names, code):
assert isinstance(names, tuple)
file_io = KnownContentFileIO('/foo/bar/module-injector-%s.py' % next(counter), code)
v = _load_python_module(inference_state, file_io, names)
inference_state.module_cache.add(names, ValueSet([v]))

return module_injector
30 changes: 30 additions & 0 deletions test/test_api/test_completion.py
Original file line number Diff line number Diff line change
Expand Up @@ -392,3 +392,33 @@ def test_fuzzy_match():

def test_ellipsis_completion(Script):
assert Script('...').complete() == []


def test_completion_cache(Script, module_injector):
"""
For some modules like numpy, tensorflow or pandas we cache docstrings and
type to avoid them slowing us down, because they are huge.
"""
script = Script('import numpy; numpy.foo')
module_injector(script._inference_state, ('numpy',), 'def foo(a): "doc"')
c, = script.complete()
assert c.name == 'foo'
assert c.type == 'function'
assert c.docstring() == 'foo(a)\n\ndoc'

code = dedent('''\
class foo:
'doc2'
def __init__(self):
pass
''')
script = Script('import numpy; numpy.foo')
module_injector(script._inference_state, ('numpy',), code)
# The outpus should still be the same
c, = script.complete()
assert c.name == 'foo'
assert c.type == 'function'
assert c.docstring() == 'foo(a)\n\ndoc'
cls, = c.infer()
assert cls.type == 'class'
assert cls.docstring() == 'foo()\n\ndoc2'

0 comments on commit 5da9f9f

Please sign in to comment.