diff --git a/mypy/build.py b/mypy/build.py index ac8436ee10af..36cec9e86b39 100644 --- a/mypy/build.py +++ b/mypy/build.py @@ -389,7 +389,6 @@ def default_lib_path(data_dir: str, CacheMeta = NamedTuple('CacheMeta', [('id', str), ('path', str), - ('memory_only', bool), # no corresponding json files (fine-grained only) ('mtime', int), ('size', int), ('hash', str), @@ -415,7 +414,6 @@ def cache_meta_from_dict(meta: Dict[str, Any], data_json: str) -> CacheMeta: return CacheMeta( meta.get('id', sentinel), meta.get('path', sentinel), - meta.get('memory_only', False), int(meta['mtime']) if 'mtime' in meta else sentinel, meta.get('size', sentinel), meta.get('hash', sentinel), @@ -569,7 +567,7 @@ class BuildManager: plugin: Active mypy plugin(s) errors: Used for reporting all errors flush_errors: A function for processing errors after each SCC - saved_cache: Dict with saved cache state for dmypy and fine-grained incremental mode + saved_cache: Dict with saved cache state for coarse-grained dmypy (read-write!) stats: Dict with various instrumentation numbers """ @@ -626,6 +624,8 @@ def all_imported_modules_in_file(self, Return list of tuples (priority, module id, import line number) for all modules imported in file; lower numbers == higher priority. + + Can generate blocking errors on bogus relative imports. """ def correct_rel_imp(imp: Union[ImportFrom, ImportAll]) -> str: @@ -640,6 +640,12 @@ def correct_rel_imp(imp: Union[ImportFrom, ImportAll]) -> str: file_id = ".".join(file_id.split(".")[:-rel]) new_id = file_id + "." + imp.id if imp.id else file_id + if not new_id: + self.errors.set_file(file.path, file.name()) + self.errors.report(imp.line, 0, + "No parent module -- cannot perform relative import", + blocker=True) + return new_id res = [] # type: List[Tuple[int, str, int]] @@ -1129,12 +1135,6 @@ def validate_meta(meta: Optional[CacheMeta], id: str, path: Optional[str], manager.log('Metadata abandoned for {}: errors were previously ignored'.format(id)) return None - if meta.memory_only: - # Special case for fine-grained incremental mode when the JSON file is missing but - # we want to cache the module anyway. - manager.log('Memory-only metadata for {}'.format(id)) - return meta - assert path is not None, "Internal error: meta was provided without a path" # Check data_json; assume if its mtime matches it's good. # TODO: stat() errors @@ -1623,7 +1623,8 @@ def __init__(self, self.ignore_all = True else: # In 'error' mode, produce special error messages. - manager.log("Skipping %s (%s)" % (path, id)) + if id not in manager.missing_modules: + manager.log("Skipping %s (%s)" % (path, id)) if follow_imports == 'error': if ancestor_for: self.skipping_ancestor(id, path, ancestor_for) @@ -1675,7 +1676,7 @@ def __init__(self, else: # Parse the file (and then some) to get the dependencies. self.parse_file() - self.suppressed = [] + self.compute_dependencies() self.child_modules = set() def skipping_ancestor(self, id: str, path: str, ancestor_for: 'State') -> None: @@ -1830,6 +1831,8 @@ def fix_suppressed_dependencies(self, graph: Graph) -> None: """ # TODO: See if it's possible to move this check directly into parse_file in some way. # TODO: Find a way to write a test case for this fix. + # TODO: I suspect that splitting compute_dependencies() out from parse_file + # obviates the need for this but lacking a test case for the problem this fixed... silent_mode = (self.options.ignore_missing_imports or self.options.follow_imports == 'skip') if not silent_mode: @@ -1896,31 +1899,28 @@ def parse_file(self) -> None: # TODO: Why can't SemanticAnalyzerPass1 .analyze() do this? self.tree.names = manager.semantic_analyzer.globals + self.check_blockers() + + def compute_dependencies(self) -> None: + """Compute a module's dependencies after parsing it. + + This is used when we parse a file that we didn't have + up-to-date cache information for. When we have an up-to-date + cache, we just use the cached info. + """ + manager = self.manager + assert self.tree is not None + # Compute (direct) dependencies. # Add all direct imports (this is why we needed the first pass). # Also keep track of each dependency's source line. dependencies = [] - suppressed = [] priorities = {} # type: Dict[str, int] # id -> priority dep_line_map = {} # type: Dict[str, int] # id -> line for pri, id, line in manager.all_imported_modules_in_file(self.tree): priorities[id] = min(pri, priorities.get(id, PRI_ALL)) if id == self.id: continue - # Omit missing modules, as otherwise we could not type-check - # programs with missing modules. - if id in manager.missing_modules: - if id not in dep_line_map: - suppressed.append(id) - dep_line_map[id] = line - continue - if id == '': - # Must be from a relative import. - manager.errors.set_file(self.xpath, self.id) - manager.errors.report(line, 0, - "No parent module -- cannot perform relative import", - blocker=True) - continue if id not in dep_line_map: dependencies.append(id) dep_line_map[id] = line @@ -1928,17 +1928,19 @@ def parse_file(self) -> None: if self.id != 'builtins' and 'builtins' not in dep_line_map: dependencies.append('builtins') - # If self.dependencies is already set, it was read from the - # cache, but for some reason we're re-parsing the file. # NOTE: What to do about race conditions (like editing the # file while mypy runs)? A previous version of this code # explicitly checked for this, but ran afoul of other reasons # for differences (e.g. silent mode). + + # Missing dependencies will be moved from dependencies to + # suppressed when they fail to be loaded in load_graph. self.dependencies = dependencies - self.suppressed = suppressed + self.suppressed = [] self.priorities = priorities self.dep_line_map = dep_line_map - self.check_blockers() + + self.check_blockers() # Can fail due to bogus relative imports def semantic_analysis(self) -> None: assert self.tree is not None, "Internal error: method must be called on parsed file only" @@ -2193,13 +2195,19 @@ def dump_graph(graph: Graph) -> None: print("[" + ",\n ".join(node.dumps() for node in nodes) + "\n]") -def load_graph(sources: List[BuildSource], manager: BuildManager) -> Graph: +def load_graph(sources: List[BuildSource], manager: BuildManager, + old_graph: Optional[Graph] = None) -> Graph: """Given some source files, load the full dependency graph. + If an old_graph is passed in, it is used as the starting point and + modified during graph loading. + As this may need to parse files, this can raise CompileError in case there are syntax errors. """ - graph = {} # type: Graph + + graph = old_graph or {} # type: Graph + # The deque is used to implement breadth-first traversal. # TODO: Consider whether to go depth-first instead. This may # affect the order in which we process files within import cycles. diff --git a/mypy/checker.py b/mypy/checker.py index 922238739475..e58aece05747 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -216,6 +216,24 @@ def __init__(self, errors: Errors, modules: Dict[str, MypyFile], options: Option # for processing module top levels in fine-grained incremental mode. self.recurse_into_functions = True + def reset(self) -> None: + """Cleanup stale state that might be left over from a typechecking run. + + This allows us to reuse TypeChecker objects in fine-grained + incremental mode. + """ + # TODO: verify this is still actually worth it over creating new checkers + self.partial_reported.clear() + self.module_refs.clear() + self.binder = ConditionalTypeBinder() + self.type_map.clear() + + assert self.inferred_attribute_types is None + assert self.partial_types == [] + assert self.deferred_nodes == [] + assert len(self.scope.stack) == 1 + assert self.partial_types == [] + def check_first_pass(self) -> None: """Type check the entire file, but defer functions with unresolved references. diff --git a/mypy/dmypy_server.py b/mypy/dmypy_server.py index cf4b15e3e6aa..a26a256fb011 100644 --- a/mypy/dmypy_server.py +++ b/mypy/dmypy_server.py @@ -282,10 +282,12 @@ def initialize_fine_grained(self, sources: List[mypy.build.BuildSource]) -> Dict if self.options.use_fine_grained_cache: # Pull times and hashes out of the saved_cache and stick them into # the fswatcher, so we pick up the changes. - for meta, mypyfile, type_map in manager.saved_cache.values(): - if meta.mtime is None: continue + for state in self.fine_grained_manager.graph.values(): + meta = state.meta + if meta is None: continue + assert state.path is not None self.fswatcher.set_file_data( - mypyfile.path, + state.path, FileData(st_mtime=float(meta.mtime), st_size=meta.size, md5=meta.hash)) # Run an update diff --git a/mypy/server/update.py b/mypy/server/update.py index d8408c6c09a9..93ff54537339 100644 --- a/mypy/server/update.py +++ b/mypy/server/update.py @@ -115,15 +115,14 @@ - Fully support multiple type checking passes - Use mypy.fscache to access file system -- Don't use load_graph() and update the import graph incrementally """ import os.path from typing import Dict, List, Set, Tuple, Iterable, Union, Optional, Mapping, NamedTuple from mypy.build import ( - BuildManager, State, BuildSource, Graph, load_graph, SavedCache, CacheMeta, - cache_meta_from_dict, find_module_clear_caches, DEBUG_FINE_GRAINED + BuildManager, State, BuildSource, Graph, load_graph, find_module_clear_caches, + PRI_INDIRECT, DEBUG_FINE_GRAINED, ) from mypy.checker import DeferredNode from mypy.errors import Errors, CompileError @@ -161,6 +160,7 @@ def __init__(self, self.previous_modules = get_module_to_path_map(manager) self.deps = get_all_dependencies(manager, graph, self.options) self.previous_targets_with_errors = manager.errors.targets() + self.graph = graph # Module, if any, that had blocking errors in the last run as (id, path) tuple. # TODO: Handle blocking errors in the initial build self.blocking_error = None # type: Optional[Tuple[str, str]] @@ -170,9 +170,7 @@ def __init__(self, # for the cache. This is kind of a hack and it might be better to have # this directly reflected in load_graph's interface. self.options.cache_dir = os.devnull - mark_all_meta_as_memory_only(graph, manager) - manager.saved_cache = preserve_full_cache(graph, manager) - self.type_maps = extract_type_maps(graph) + manager.saved_cache = {} # Active triggers during the last update self.triggered = [] # type: List[str] @@ -253,6 +251,7 @@ def update_single(self, module: str, path: str) -> Tuple[List[str], # TODO: If new module brings in other modules, we parse some files multiple times. manager = self.manager previous_modules = self.previous_modules + graph = self.graph # Record symbol table snaphot of old version the changed module. old_snapshots = {} # type: Dict[str, Dict[str, SnapshotItem]] @@ -261,14 +260,14 @@ def update_single(self, module: str, path: str) -> Tuple[List[str], old_snapshots[module] = snapshot manager.errors.reset() - result = update_single_isolated(module, path, manager, previous_modules) + result = update_single_isolated(module, path, manager, previous_modules, graph) if isinstance(result, BlockedUpdate): # Blocking error -- just give up module, path, remaining, errors = result self.previous_modules = get_module_to_path_map(manager) return errors, remaining, (module, path), True assert isinstance(result, NormalUpdate) # Work around #4124 - module, path, remaining, tree, graph = result + module, path, remaining, tree = result # TODO: What to do with stale dependencies? triggered = calculate_active_triggers(manager, old_snapshots, {module: tree}) @@ -285,34 +284,11 @@ def update_single(self, module: str, path: str) -> Tuple[List[str], # Preserve state needed for the next update. self.previous_targets_with_errors = manager.errors.targets() - # If deleted, module won't be in the graph. - if module in graph: - # Generate metadata so that we can reuse the AST in the next run. - graph[module].write_cache() - for id, state in graph.items(): - # Look up missing ASTs from saved cache. - if state.tree is None and id in manager.saved_cache: - meta, tree, type_map = manager.saved_cache[id] - state.tree = tree - mark_all_meta_as_memory_only(graph, manager) - manager.saved_cache = preserve_full_cache(graph, manager) self.previous_modules = get_module_to_path_map(manager) - self.type_maps = extract_type_maps(graph) return manager.errors.new_messages(), remaining, (module, path), False -def mark_all_meta_as_memory_only(graph: Dict[str, State], - manager: BuildManager) -> None: - for id, state in graph.items(): - if id in manager.saved_cache: - # Don't look at disk. - old = manager.saved_cache[id] - manager.saved_cache[id] = (old[0]._replace(memory_only=True), - old[1], - old[2]) - - def get_all_dependencies(manager: BuildManager, graph: Dict[str, State], options: Options) -> Dict[str, Set[str]]: """Return the fine-grained dependency map for an entire build.""" @@ -327,15 +303,13 @@ def get_all_dependencies(manager: BuildManager, graph: Dict[str, State], # - Id of the changed module (can be different from the module argument) # - Path of the changed module # - New AST for the changed module (None if module was deleted) -# - The entire updated build graph # - Remaining changed modules that are not processed yet as (module id, path) # tuples (non-empty if the original changed module imported other new # modules) NormalUpdate = NamedTuple('NormalUpdate', [('module', str), ('path', str), ('remaining', List[Tuple[str, str]]), - ('tree', Optional[MypyFile]), - ('graph', Graph)]) + ('tree', Optional[MypyFile])]) # The result of update_single_isolated when there is a blocking error. Items # are similar to NormalUpdate (but there are fewer). @@ -350,7 +324,8 @@ def get_all_dependencies(manager: BuildManager, graph: Dict[str, State], def update_single_isolated(module: str, path: str, manager: BuildManager, - previous_modules: Dict[str, str]) -> UpdateResult: + previous_modules: Dict[str, str], + graph: Graph) -> UpdateResult: """Build a new version of one changed module only. Don't propagate changes to elsewhere in the program. Raise CompleError on @@ -371,11 +346,14 @@ def update_single_isolated(module: str, old_modules = dict(manager.modules) sources = get_sources(previous_modules, [(module, path)]) - invalidate_stale_cache_entries(manager.saved_cache, [(module, path)]) - manager.missing_modules.clear() + if module in manager.missing_modules: + manager.missing_modules.remove(module) + try: - graph = load_graph(sources, manager) + if module in graph: + del graph[module] + load_graph(sources, manager, graph) except CompileError as err: # Parse error somewhere in the program -- a blocker assert err.module_with_blocker @@ -392,8 +370,8 @@ def update_single_isolated(module: str, return BlockedUpdate(err.module_with_blocker, path, remaining_modules, err.messages) if not os.path.isfile(path): - graph = delete_module(module, graph, manager) - return NormalUpdate(module, path, [], None, graph) + delete_module(module, graph, manager) + return NormalUpdate(module, path, [], None) # Find any other modules brought in by imports. changed_modules = get_all_changed_modules(module, path, previous_modules, graph) @@ -437,6 +415,7 @@ def update_single_isolated(module: str, replace_modules_with_new_variants(manager, graph, old_modules, new_modules) # Perform type checking. + state.type_checker().reset() state.type_check_first_pass() state.type_check_second_pass() state.compute_fine_grained_deps() @@ -446,7 +425,7 @@ def update_single_isolated(module: str, graph[module] = state - return NormalUpdate(module, path, remaining_modules, state.tree, graph) + return NormalUpdate(module, path, remaining_modules, state.tree) def find_relative_leaf_module(modules: List[Tuple[str, str]], graph: Graph) -> Tuple[str, str]: @@ -483,13 +462,13 @@ def assert_equivalent_paths(path1: str, path2: str) -> None: def delete_module(module_id: str, - graph: Dict[str, State], - manager: BuildManager) -> Dict[str, State]: + graph: Graph, + manager: BuildManager) -> None: manager.log_fine_grained('delete module %r' % module_id) # TODO: Deletion of a package # TODO: Remove deps for the module (this only affects memory use, not correctness) - assert module_id not in graph - new_graph = graph.copy() + if module_id in graph: + del graph[module_id] if module_id in manager.modules: del manager.modules[module_id] if module_id in manager.saved_cache: @@ -503,7 +482,6 @@ def delete_module(module_id: str, parent = manager.modules[parent_id] if components[-1] in parent.names: del parent.names[components[-1]] - return new_graph def dedupe_modules(modules: List[Tuple[str, str]]) -> List[Tuple[str, str]]: @@ -525,12 +503,9 @@ def get_sources(modules: Dict[str, str], changed_modules: List[Tuple[str, str]]) -> List[BuildSource]: # TODO: Race condition when reading from the file system; we should only read each # bit of external state once during a build to have a consistent view of the world - items = sorted(modules.items(), key=lambda x: x[0]) - sources = [BuildSource(path, id, None) - for id, path in items - if os.path.isfile(path)] + sources = [] for id, path in changed_modules: - if os.path.isfile(path) and id not in modules: + if os.path.isfile(path): sources.append(BuildSource(path, id, None)) return sources @@ -549,80 +524,11 @@ def get_all_changed_modules(root_module: str, return changed_modules -def preserve_full_cache(graph: Graph, manager: BuildManager) -> SavedCache: - """Preserve every module with an AST in the graph, including modules with errors.""" - saved_cache = {} - for id, state in graph.items(): - assert state.id == id - if state.tree is not None: - meta = state.meta - if meta is None: - # No metadata, likely because of an error. We still want to retain the AST. - # There is no corresponding JSON so create partial "memory-only" metadata. - assert state.path - dep_prios = state.dependency_priorities() - dep_lines = state.dependency_lines() - meta = memory_only_cache_meta( - id, - state.path, - state.dependencies, - state.suppressed, - list(state.child_modules), - dep_prios, - dep_lines, - state.source_hash, - state.ignore_all, - manager) - else: - meta = meta._replace(memory_only=True) - saved_cache[id] = (meta, state.tree, state.type_map()) - return saved_cache - - -def memory_only_cache_meta(id: str, - path: str, - dependencies: List[str], - suppressed: List[str], - child_modules: List[str], - dep_prios: List[int], - dep_lines: List[int], - source_hash: str, - ignore_all: bool, - manager: BuildManager) -> CacheMeta: - """Create cache metadata for module that doesn't have a JSON cache files. - - JSON cache files aren't written for modules with errors, but we want to still - cache them in fine-grained incremental mode. - """ - options = manager.options.clone_for_module(id) - # Note that we omit attributes related to the JSON files. - meta = {'id': id, - 'path': path, - 'memory_only': True, # Important bit: don't expect JSON files to exist - 'hash': source_hash, - 'dependencies': dependencies, - 'suppressed': suppressed, - 'child_modules': child_modules, - 'options': options.select_options_affecting_cache(), - 'dep_prios': dep_prios, - 'dep_lines': dep_lines, - 'interface_hash': '', - 'version_id': manager.version_id, - 'ignore_all': ignore_all, - } - return cache_meta_from_dict(meta, '') - - -def invalidate_stale_cache_entries(cache: SavedCache, - changed_modules: List[Tuple[str, str]]) -> None: - for name, _ in changed_modules: - if name in cache: - del cache[name] - - def verify_dependencies(state: State, manager: BuildManager) -> None: """Report errors for import targets in module that don't exist.""" - for dep in state.dependencies + state.suppressed: # TODO: ancestors? + # Strip out indirect dependencies. See comment in build.load_graph(). + dependencies = [dep for dep in state.dependencies if state.priorities.get(dep) != PRI_INDIRECT] + for dep in dependencies + state.suppressed: # TODO: ancestors? if dep not in manager.modules and not manager.options.ignore_missing_imports: assert state.tree line = find_import_line(state.tree, dep) or 1 @@ -809,8 +715,8 @@ def reprocess_nodes(manager: BuildManager, Return fired triggers. """ - if module_id not in manager.saved_cache or module_id not in graph: - manager.log_fine_grained('%s not in saved cache or graph (blocking errors or deleted?)' % + if module_id not in graph: + manager.log_fine_grained('%s not in graph (blocking errors or deleted?)' % module_id) return set() @@ -863,10 +769,8 @@ def key(node: DeferredNode) -> int: merge_asts(file_node, old_symbols[name], file_node, new_symbols[name]) # Type check. - meta, file_node, type_map = manager.saved_cache[module_id] - graph[module_id].tree = file_node - graph[module_id].type_checker().type_map = type_map checker = graph[module_id].type_checker() + checker.reset() # We seem to need additional passes in fine-grained incremental mode. checker.pass_num = 0 checker.last_pass = 3 @@ -975,11 +879,5 @@ def lookup_target(modules: Dict[str, MypyFile], target: str) -> List[DeferredNod return [DeferredNode(node, active_class_name, active_class)] -def extract_type_maps(graph: Graph) -> Dict[str, Dict[Expression, Type]]: - # This is used to export information used only by the testmerge harness. - return {id: state.type_map() for id, state in graph.items() - if state.tree} - - def is_verbose(manager: BuildManager) -> bool: return manager.options.verbosity >= 1 or DEBUG_FINE_GRAINED diff --git a/mypy/test/testmerge.py b/mypy/test/testmerge.py index cc79958d5c15..20dd7f9d6980 100644 --- a/mypy/test/testmerge.py +++ b/mypy/test/testmerge.py @@ -5,7 +5,7 @@ from typing import List, Tuple, Dict, Optional from mypy import build -from mypy.build import BuildManager, BuildSource, State +from mypy.build import BuildManager, BuildSource, State, Graph from mypy.errors import Errors, CompileError from mypy.nodes import ( Node, MypyFile, SymbolTable, SymbolTableNode, TypeInfo, Expression, Var, UNBOUND_IMPORTED @@ -77,13 +77,13 @@ def run_case(self, testcase: DataDrivenTestCase) -> None: target_path = os.path.join(test_temp_dir, 'target.py') shutil.copy(os.path.join(test_temp_dir, 'target.py.next'), target_path) - a.extend(self.dump(manager, kind)) + a.extend(self.dump(fine_grained_manager, kind)) old_subexpr = get_subexpressions(manager.modules['target']) a.append('==>') new_file, new_types = self.build_increment(fine_grained_manager, 'target', target_path) - a.extend(self.dump(manager, kind)) + a.extend(self.dump(fine_grained_manager, kind)) for expr in old_subexpr: # Verify that old AST nodes are removed from the expression type map. @@ -119,13 +119,13 @@ def build_increment(self, manager: FineGrainedBuildManager, Dict[Expression, Type]]: manager.update([(module_id, path)]) module = manager.manager.modules[module_id] - type_map = manager.type_maps[module_id] + type_map = manager.graph[module_id].type_map() return module, type_map def dump(self, - manager: BuildManager, + manager: FineGrainedBuildManager, kind: str) -> List[str]: - modules = manager.modules + modules = manager.manager.modules if kind == AST: return self.dump_asts(modules) elif kind == TYPEINFO: @@ -203,14 +203,14 @@ def dump_typeinfo(self, info: TypeInfo) -> List[str]: type_str_conv=self.type_str_conv) return s.splitlines() - def dump_types(self, manager: BuildManager) -> List[str]: + def dump_types(self, manager: FineGrainedBuildManager) -> List[str]: a = [] # To make the results repeatable, we try to generate unique and # deterministic sort keys. - for module_id in sorted(manager.modules): + for module_id in sorted(manager.manager.modules): if not is_dumped_module(module_id): continue - type_map = manager.saved_cache[module_id][2] + type_map = manager.graph[module_id].type_map() if type_map: a.append('## {}'.format(module_id)) for expr in sorted(type_map, key=lambda n: (n.line, short_type(n), diff --git a/test-data/unit/check-dmypy-fine-grained.test b/test-data/unit/check-dmypy-fine-grained.test index 0544c9b700a7..0841085cdf7c 100644 --- a/test-data/unit/check-dmypy-fine-grained.test +++ b/test-data/unit/check-dmypy-fine-grained.test @@ -32,7 +32,7 @@ def f() -> str: [out2] tmp/b.py:3: error: Incompatible types in assignment (expression has type "int", variable has type "str") -[case testAddFileFineGrainedIncremental] +[case testAddFileFineGrainedIncremental1] # cmd: mypy -m a # cmd2: mypy -m a b [file a.py] @@ -46,6 +46,19 @@ tmp/a.py:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-impor [out2] tmp/a.py:2: error: Argument 1 to "f" has incompatible type "int"; expected "str" +[case testAddFileFineGrainedIncremental2] +# flags: --follow-imports=skip --ignore-missing-imports +# cmd: mypy -m a +# cmd2: mypy -m a b +[file a.py] +import b +b.f(1) +[file b.py.2] +def f(x: str) -> None: pass +[out1] +[out2] +tmp/a.py:2: error: Argument 1 to "f" has incompatible type "int"; expected "str" + [case testDeleteFileFineGrainedIncremental] # cmd: mypy -m a b # cmd2: mypy -m a diff --git a/test-data/unit/check-incremental.test b/test-data/unit/check-incremental.test index 599722e993f9..03582bad4e0e 100644 --- a/test-data/unit/check-incremental.test +++ b/test-data/unit/check-incremental.test @@ -4059,3 +4059,71 @@ class Baz: [out] [out2] tmp/a.py:3: error: Unsupported operand types for + ("int" and "str") + +[case testIncrementalWithIgnoresTwice] +import a +[file a.py] +import b +import foo # type: ignore +[file b.py] +x = 1 +[file b.py.2] +x = 'hi' +[file b.py.3] +x = 1 +[builtins fixtures/module.pyi] +[out] +[out2] +[out3] + +[case testIgnoredImport2] +import x +[file y.py] +import xyz # type: ignore +B = 0 +from x import A +[file x.py] +A = 0 +from y import B +[file x.py.2] +A = 1 +from y import B +[file x.py.3] +A = 2 +from y import B +[out] +[out2] +[out3] + +[case testDeletionOfSubmoduleTriggersImportFrom2] +from p.q import f +f() +[file p/__init__.py] +[file p/q.py] +def f() -> None: pass +[delete p/q.py.2] +[file p/q.py.3] +def f(x: int) -> None: pass +[out] +[out2] +main:1: error: Cannot find module named 'p.q' +main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) +[out3] +main:2: error: Too few arguments for "f" + +[case testDeleteIndirectDependency] +import b +b.x.foo() +[file b.py] +import c +x = c.Foo() +[file c.py] +class Foo: + def foo(self) -> None: pass +[delete c.py.2] +[file b.py.2] +class Foo: + def foo(self) -> None: pass +x = Foo() +[out] +[out2] diff --git a/test-data/unit/check-modules.test b/test-data/unit/check-modules.test index d8492dad0436..06e926c7711c 100644 --- a/test-data/unit/check-modules.test +++ b/test-data/unit/check-modules.test @@ -1943,3 +1943,15 @@ main:2: error: Name 'name' is not defined main:3: error: Revealed type is 'Any' [builtins fixtures/module.pyi] + +[case testFailedImportFromTwoModules] +import c +import b +[file b.py] +import c + +[out] +-- TODO: it would be better for this to be in the other order +tmp/b.py:1: error: Cannot find module named 'c' +main:1: error: Cannot find module named 'c' +main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) diff --git a/test-data/unit/fine-grained-cycles.test b/test-data/unit/fine-grained-cycles.test index 737d0ed0d4eb..2688e593482e 100644 --- a/test-data/unit/fine-grained-cycles.test +++ b/test-data/unit/fine-grained-cycles.test @@ -174,44 +174,7 @@ def h() -> None: == a.py:1: error: Module 'b' has no attribute 'C' -[case testReferenceToTypeThroughCycleAndReplaceWithFunction-skip-cache] --- Different cache/no-cache tests because: --- Cache mode has a "need type annotation" message (like coarse incremental does) - -import a - -[file a.py] -from b import C - -def f() -> C: pass - -[file b.py] -import a - -class C: - def g(self) -> None: pass - -def h() -> None: - c = a.f() - c.g() - -[file b.py.2] -import a - -def C() -> int: pass - -def h() -> None: - c = a.f() - c.g() - -[out] -== -a.py:3: error: Invalid type "b.C" - -[case testReferenceToTypeThroughCycleAndReplaceWithFunction-skip-nocache] --- Different cache/no-cache tests because: --- Cache mode has a "need type annotation" message (like coarse incremental does) - +[case testReferenceToTypeThroughCycleAndReplaceWithFunction] import a [file a.py] diff --git a/test-data/unit/fine-grained-modules.test b/test-data/unit/fine-grained-modules.test index 2c60ff30615f..c765af39553c 100644 --- a/test-data/unit/fine-grained-modules.test +++ b/test-data/unit/fine-grained-modules.test @@ -56,7 +56,7 @@ b.py:1: error: Cannot find module named 'a' b.py:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) == -[case testAddFileFixesAndGeneratesError] +[case testAddFileFixesAndGeneratesError1] import b [file b.py] [file b.py.2] @@ -76,6 +76,38 @@ b.py:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" == b.py:2: error: Too many arguments for "f" +[case testAddFileFixesAndGeneratesError2] +import b +[file b.py] +[file b.py.2] +from a import f +f(1) +[file c.py.3] +x = 'whatever' +[file a.py.4] +def f() -> None: pass +[out] +== +b.py:1: error: Cannot find module named 'a' +b.py:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) +== +b.py:1: error: Cannot find module named 'a' +b.py:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) +== +b.py:2: error: Too many arguments for "f" + +[case testAddFileGeneratesError1] +# flags: --ignore-missing-imports +import a +[file a.py] +from b import f +f(1) +[file b.py.2] +def f() -> None: pass +[out] +== +a.py:2: error: Too many arguments for "f" + [case testAddFilePreservesError1] import b [file b.py] @@ -236,8 +268,7 @@ main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" [case testDeletionOfSubmoduleTriggersImportFrom1-skip-cache] -- Different cache/no-cache tests because: --- Cache mode matches the message from regular mode and no-cache mode --- matches the message from coarse incremental mode... +-- missing module error message mismatch from p import q [file p/__init__.py] [file p/q.py] @@ -245,15 +276,15 @@ from p import q [file p/q.py.3] [out] == -main:1: error: Cannot find module named 'p.q' +main:1: error: Module 'p' has no attribute 'q' -- TODO: The following messages are different compared to non-incremental mode +main:1: error: Cannot find module named 'p.q' main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) == [case testDeletionOfSubmoduleTriggersImportFrom1-skip-nocache] -- Different cache/no-cache tests because: --- Cache mode matches the message from regular mode and no-cache mode --- matches the message from coarse incremental mode... +-- missing module error message mismatch from p import q [file p/__init__.py] [file p/q.py] @@ -261,10 +292,10 @@ from p import q [file p/q.py.3] [out] == -main:1: error: Module 'p' has no attribute 'q' +main:1: error: Cannot find module named 'p.q' +main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) == - [case testDeletionOfSubmoduleTriggersImportFrom2] from p.q import f f() @@ -638,7 +669,6 @@ main:2: error: Argument 1 to "f" has incompatible type "int"; expected "str" == main:1: error: Cannot find module named 'p' main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) -main:1: error: Cannot find module named 'p.a' [case testDeletePackage3] import p.a @@ -846,3 +876,98 @@ x = 1 [out] == main:2: error: Incompatible types in assignment (expression has type Module, variable has type "str") + +[case testRefreshImportOfIgnoredModule1] +# flags: --follow-imports=skip --ignore-missing-imports +# cmd: mypy c.py a/__init__.py b.py +[file c.py] +from a import a2 +import b +b.x +[file a/__init__.py] +[file b.py] +x = 0 +[file b.py.2] +x = '' +[file b.py.3] +x = 0 +[file a/a2.py] +[out] +== +== + +[case testRefreshImportOfIgnoredModule2] +# flags: --follow-imports=skip --ignore-missing-imports +# cmd: mypy c.py a/__init__.py b.py +[file c.py] +from a import a2 +import b +b.x +[file a/__init__.py] +[file b.py] +x = 0 +[file b.py.2] +x = '' +[file b.py.3] +x = 0 +[file a/a2/__init__.py] +[out] +== +== + +[case testIncrementalWithIgnoresTwice] +import a +[file a.py] +import b +import foo # type: ignore +[file b.py] +x = 1 +[file b.py.2] +x = 'hi' +[file b.py.3] +x = 1 +[out] +== +== + +[case testIgnoredImport2] +import x +[file y.py] +import xyz # type: ignore +B = 0 +from x import A +[file x.py] +A = 0 +from y import B +[file x.py.2] +A = 1 +from y import B +[file x.py.3] +A = 2 +from y import B +[out] +== +== + +[case testDeleteIndirectDependency] +import b +b.x.foo() +[file b.py] +import c +x = c.Foo() +[file c.py] +class Foo: + def foo(self) -> None: pass +[delete c.py.2] +[file b.py.2] +class Foo: + def foo(self) -> None: pass +x = Foo() +[file b.py.3] +class Foo: + def foo(self, x: int) -> None: pass +x = Foo() +[out] +== +== +main:2: error: Too few arguments for "foo" of "Foo" diff --git a/test-data/unit/fine-grained.test b/test-data/unit/fine-grained.test index 17b022744829..10abcdb72084 100644 --- a/test-data/unit/fine-grained.test +++ b/test-data/unit/fine-grained.test @@ -1108,9 +1108,9 @@ def f() -> Iterator[None]: [out] main:2: error: Revealed type is 'contextlib.GeneratorContextManager[builtins.None]' == +main:2: error: Revealed type is 'contextlib.GeneratorContextManager[builtins.None]' a.py:3: error: Cannot find module named 'b' a.py:3: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) -main:2: error: Revealed type is 'contextlib.GeneratorContextManager[builtins.None]' == main:2: error: Revealed type is 'contextlib.GeneratorContextManager[builtins.None]' @@ -2355,7 +2355,7 @@ def f() -> None: d.py:3: error: Argument 1 to "A" has incompatible type "str"; expected "int" == -[case testNonePartialType] +[case testNonePartialType1] import a a.y