Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Invalidate all cache files if plugins change #5878

Merged
merged 6 commits into from Nov 14, 2018
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
71 changes: 66 additions & 5 deletions mypy/build.py
Expand Up @@ -21,6 +21,7 @@
import sys
import time
import errno
import types

from typing import (AbstractSet, Any, Dict, Iterable, Iterator, List,
Mapping, NamedTuple, Optional, Set, Tuple, Union, Callable)
Expand Down Expand Up @@ -183,7 +184,7 @@ def _build(sources: List[BuildSource],
reports = Reports(data_dir, options.report_dirs)
source_set = BuildSourceSet(sources)
errors = Errors(options.show_error_context, options.show_column_numbers)
plugin = load_plugins(options, errors)
plugin, snapshot = load_plugins(options, errors)

# Construct a build manager object to hold state during the build.
#
Expand All @@ -195,6 +196,7 @@ def _build(sources: List[BuildSource],
options=options,
version_id=__version__,
plugin=plugin,
plugins_snapshot=snapshot,
errors=errors,
flush_errors=flush_errors,
fscache=fscache)
Expand Down Expand Up @@ -304,17 +306,20 @@ def import_priority(imp: ImportBase, toplevel_priority: int) -> int:
return toplevel_priority


def load_plugins(options: Options, errors: Errors) -> Plugin:
def load_plugins(options: Options, errors: Errors) -> Tuple[Plugin, Dict[str, str]]:
"""Load all configured plugins.

Return a plugin that encapsulates all plugins chained together. Always
at least include the default plugin (it's last in the chain).
The second return value is a snapshot of versions/hashes of loaded user
plugins (for cache validation).
"""
import importlib
snapshot = {} # type: Dict[str, str]

default_plugin = DefaultPlugin(options) # type: Plugin
if not options.config_file:
return default_plugin
return default_plugin, snapshot

line = find_config_file_line_number(options.config_file, 'mypy', 'plugins')
if line == -1:
Expand Down Expand Up @@ -375,11 +380,27 @@ def plugin_error(message: str) -> None:
'(in {})'.format(plugin_path))
try:
custom_plugins.append(plugin_type(options))
snapshot[module_name] = take_module_snapshot(module)
except Exception:
print('Error constructing plugin instance of {}\n'.format(plugin_type.__name__))
raise # Propagate to display traceback
# Custom plugins take precedence over the default plugin.
return ChainedPlugin(options, custom_plugins + [default_plugin])
return ChainedPlugin(options, custom_plugins + [default_plugin]), snapshot


def take_module_snapshot(module: types.ModuleType) -> str:
"""Take plugin module snapshot by recording its version and hash.

We record _both_ hash and the version to detect more possible changes
(e.g. if there is a change in modules imported by a plugin).
"""
if hasattr(module, '__file__'):
with open(module.__file__, 'rb') as f:
digest = hashlib.md5(f.read()).hexdigest()
else:
digest = 'unknown'
ver = getattr(module, '__version__', 'none')
return '{}:{}'.format(ver, digest)


def find_config_file_line_number(path: str, section: str, setting_name: str) -> int:
Expand Down Expand Up @@ -426,6 +447,11 @@ class BuildManager(BuildManagerBase):
stale_modules: Set of modules that needed to be rechecked (only used by tests)
version_id: The current mypy version (based on commit id when possible)
plugin: Active mypy plugin(s)
plugins_snapshot:
Snapshot of currently active user plugins (versions and hashes)
old_plugins_snapshot:
Plugins snapshot from previous incremental run (or None in
non-incremental mode and if cache was not found)
errors: Used for reporting all errors
flush_errors: A function for processing errors after each SCC
cache_enabled: Whether cache is being read. This is set based on options,
Expand All @@ -446,6 +472,7 @@ def __init__(self, data_dir: str,
options: Options,
version_id: str,
plugin: Plugin,
plugins_snapshot: Dict[str, str],
errors: Errors,
flush_errors: Callable[[List[str], bool], None],
fscache: FileSystemCache,
Expand All @@ -471,7 +498,6 @@ def __init__(self, data_dir: str,
self.indirection_detector = TypeIndirectionVisitor()
self.stale_modules = set() # type: Set[str]
self.rechecked_modules = set() # type: Set[str]
self.plugin = plugin
self.flush_errors = flush_errors
self.cache_enabled = options.incremental and (
not options.fine_grained_incremental or options.use_fine_grained_cache)
Expand All @@ -487,6 +513,9 @@ def __init__(self, data_dir: str,
in self.options.shadow_file}
# a mapping from each file being typechecked to its possible shadow file
self.shadow_equivalence_map = {} # type: Dict[str, Optional[str]]
self.plugin = plugin
self.plugins_snapshot = plugins_snapshot
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Document the new attributes in the class docstring.

self.old_plugins_snapshot = read_plugins_snapshot(self)

def use_fine_grained_cache(self) -> bool:
return self.cache_enabled and self.options.use_fine_grained_cache
Expand Down Expand Up @@ -685,6 +714,30 @@ def write_protocol_deps_cache(proto_deps: Dict[str, Set[str]],
blocker=True)


def write_plugins_snapshot(manager: BuildManager) -> None:
"""Write snapshot of versions and hashes of currently active plugins."""
name = os.path.join(_cache_dir_prefix(manager), '@plugins_snapshot.json')
if not atomic_write(name, json.dumps(manager.plugins_snapshot), '\n'):
manager.errors.set_file(_cache_dir_prefix(manager), None)
manager.errors.report(0, 0, "Error writing plugins snapshot",
blocker=True)


def read_plugins_snapshot(manager: BuildManager) -> Optional[Dict[str, str]]:
"""Read cached snapshot of versions and hashes of plugins from previous run."""
name = os.path.join(_cache_dir_prefix(manager), '@plugins_snapshot.json')
snapshot = _load_json_file(name, manager,
log_sucess='Plugins snapshot ',
log_error='Could not load plugins snapshot: ')
if snapshot is None:
return None
if not isinstance(snapshot, dict):
manager.log('Could not load plugins snapshot: cache is not a dict: {}'
.format(type(snapshot)))
return None
return snapshot


def read_protocol_cache(manager: BuildManager,
graph: Graph) -> Optional[Dict[str, Set[str]]]:
"""Read and validate protocol dependencies cache.
Expand Down Expand Up @@ -848,6 +901,11 @@ def find_cache_meta(id: str, path: str, manager: BuildManager) -> Optional[Cache
manager.trace(' {}: {} != {}'
.format(key, cached_options.get(key), current_options.get(key)))
return None
if manager.old_plugins_snapshot and manager.plugins_snapshot:
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

What about adding a test for this?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We can't yet add tests for daemon. I opened #5891 for this.

# Check if plugins are still the same.
if manager.plugins_snapshot != manager.old_plugins_snapshot:
manager.log('Metadata abandoned for {}: plugins differ'.format(id))
return None

manager.add_stats(fresh_metas=1)
return m
Expand Down Expand Up @@ -2170,6 +2228,9 @@ def dispatch(sources: List[BuildSource], manager: BuildManager) -> Graph:
process_fine_grained_cache_graph(graph, manager)
else:
process_graph(graph, manager)
# Update plugins snapshot.
write_plugins_snapshot(manager)
manager.old_plugins_snapshot = manager.plugins_snapshot
if manager.options.cache_fine_grained or manager.options.fine_grained_incremental:
# If we are running a daemon or are going to write cache for further fine grained use,
# then we need to collect fine grained protocol dependencies.
Expand Down
6 changes: 6 additions & 0 deletions mypy/dmypy_server.py
Expand Up @@ -250,6 +250,12 @@ def cmd_run(self, version: str, args: Sequence[str]) -> Dict[str, object]:
return {'restart': 'configuration changed'}
if __version__ != version:
return {'restart': 'mypy version changed'}
if self.fine_grained_manager:
manager = self.fine_grained_manager.manager
start_plugins_snapshot = manager.plugins_snapshot
_, current_plugins_snapshot = mypy.build.load_plugins(options, manager.errors)
if current_plugins_snapshot != start_plugins_snapshot:
return {'restart': 'plugins changed'}
except InvalidSourceList as err:
return {'out': '', 'err': str(err), 'status': 2}
return self.check(sources)
Expand Down
9 changes: 8 additions & 1 deletion mypy/test/testcheck.py
Expand Up @@ -10,7 +10,9 @@
from mypy.build import Graph
from mypy.modulefinder import BuildSource, SearchPaths, FindModuleCache
from mypy.test.config import test_temp_dir, test_data_prefix
from mypy.test.data import DataDrivenTestCase, DataSuite, FileOperation, UpdateFile
from mypy.test.data import (
DataDrivenTestCase, DataSuite, FileOperation, UpdateFile, module_from_path
)
from mypy.test.helpers import (
assert_string_arrays_equal, normalize_error_messages, assert_module_equivalence,
retry_on_error, update_testcase_output, parse_options,
Expand Down Expand Up @@ -114,6 +116,11 @@ def run_case_once(self, testcase: DataDrivenTestCase,
original_program_text = '\n'.join(testcase.input)
module_data = self.parse_module(original_program_text, incremental_step)

# Unload already loaded plugins, they may be updated.
for file, _ in testcase.files:
module = module_from_path(file)
if module.endswith('_plugin') and module in sys.modules:
del sys.modules[module]
if incremental_step == 0 or incremental_step == 1:
# In run 1, copy program text to program file.
for module_name, program_path, program_text in module_data:
Expand Down
1 change: 1 addition & 0 deletions mypy/test/testgraph.py
Expand Up @@ -51,6 +51,7 @@ def _make_manager(self) -> BuildManager:
options=options,
version_id=__version__,
plugin=Plugin(options),
plugins_snapshot={},
errors=errors,
flush_errors=lambda msgs, serious: None,
fscache=fscache,
Expand Down
110 changes: 110 additions & 0 deletions test-data/unit/check-incremental.test
Expand Up @@ -24,6 +24,10 @@
-- # flags2: --another-flag
-- (and flags3 for the third run, and so on).
--
-- Incremental tests involving plugins that get updated are also supported.
-- All plugin files that are updated *must* end in '_plugin', so they will
-- be unloaded from 'sys.modules' between incremental steps.
--
-- Any files that we expect to be rechecked should be annotated in the [rechecked]
-- annotation, and any files expect to be stale (aka have a modified interface)
-- should be annotated in the [stale] annotation. Note that a file that ends up
Expand Down Expand Up @@ -5389,3 +5393,109 @@ E = Enum('E', f()) # type: ignore
[builtins fixtures/list.pyi]
[out]
[out2]

[case testChangedPluginsInvalidateCache]
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Maybe add another test where only the version changes (e.g. import version from another module so that the text of the plugin doesn't change).

# flags: --config-file tmp/mypy.ini
import a
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Would it make sense to add a fine-grained incremental test case where a plugin changes?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

As we discussed, this will have no effect.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Hm, I have found there is another daemon command dmypy run, which unlike dmypy check restarts if options change. I think we can also restart in dmypy run if plugins change. There is however no way to test this currently.

[file a.py]
from b import x
y: int = x

[file a.py.2]
from b import x
y: int = x
touch = 1

[file b.py]
class C: ...
def f() -> C: ...
x = f()

[file basic_plugin.py]
from mypy.plugin import Plugin

class MyPlugin(Plugin):
def get_function_hook(self, fullname):
if fullname.endswith('.f'):
return my_hook
assert fullname is not None
return None

def my_hook(ctx):
return ctx.api.named_generic_type('builtins.int', [])

def plugin(version):
return MyPlugin

[file basic_plugin.py.2]
from mypy.plugin import Plugin

class MyPlugin(Plugin):
def get_function_hook(self, fullname):
if fullname.endswith('.f'):
return my_hook
assert fullname is not None
return None

def my_hook(ctx):
return ctx.api.named_generic_type('builtins.str', [])

def plugin(version):
return MyPlugin
[file mypy.ini]
[[mypy]
plugins=basic_plugin.py
[out]
[out2]
tmp/a.py:2: error: Incompatible types in assignment (expression has type "str", variable has type "int")

[case testChangedPluginsInvalidateCache2]
# flags: --config-file tmp/mypy.ini
import a
[file a.py]
from b import x
y: int = x

[file a.py.2]
from b import x
y: int = x
touch = 1

[file b.py]
class C: ...
def f() -> C: ...
x = f()

[file basic_plugin.py]
from mypy.plugin import Plugin
from version_plugin import __version__, choice

class MyPlugin(Plugin):
def get_function_hook(self, fullname):
if fullname.endswith('.f'):
return my_hook
assert fullname is not None
return None

def my_hook(ctx):
if choice:
return ctx.api.named_generic_type('builtins.int', [])
else:
return ctx.api.named_generic_type('builtins.str', [])

def plugin(version):
return MyPlugin

[file version_plugin.py]
__version__ = 0.1
choice = True

[file version_plugin.py.2]
__version__ = 0.2
choice = False
[file mypy.ini]
[[mypy]
plugins=basic_plugin.py
[out]
[out2]
tmp/a.py:2: error: Incompatible types in assignment (expression has type "str", variable has type "int")