From 84fcec2b74e1424fc8da92f763b27d217b483870 Mon Sep 17 00:00:00 2001 From: Michael Sullivan Date: Wed, 24 Jan 2018 12:11:43 -0800 Subject: [PATCH 1/8] WIP merge correctness checker --- mypy/server/mergecheck.py | 141 +++++++++++++++++++++++++++++++ mypy/test/testfinegrained.py | 4 + test-data/unit/fine-grained.test | 6 +- test-data/unit/merge.test | 24 ++++++ 4 files changed, 172 insertions(+), 3 deletions(-) create mode 100644 mypy/server/mergecheck.py diff --git a/mypy/server/mergecheck.py b/mypy/server/mergecheck.py new file mode 100644 index 000000000000..be9844a04a13 --- /dev/null +++ b/mypy/server/mergecheck.py @@ -0,0 +1,141 @@ +from collections import deque +from collections.abc import Iterable +from typing import List, Dict, Iterator, Optional, Tuple, Mapping +import weakref +import types + +method_descriptor_type = type(object.__dir__) +method_wrapper_type = type(object().__ne__) +wrapper_descriptor_type = type(object.__ne__) +ATTR_BLACKLIST = { + '__doc__', + '__name__', + '__class__', + '__dict__', + + # mypy specific attr blacklists + 'indirection_detector', + 'all_types', + 'type_maps', + 'semantic_analyzer', # semantic analyzer has stale caches + 'semantic_analyzer_pass3', # semantic analyzer has stale caches +} +TYPE_BLACKLIST = { + int, + float, + str, + weakref.ReferenceType, +} + + +def get_edge_candidates(o: object) -> Iterator[Tuple[object, object]]: + for attr in dir(o): + if attr not in ATTR_BLACKLIST and hasattr(o, attr): + yield attr, getattr(o, attr) + if isinstance(o, Iterable) and not isinstance(o, str): + for i, e in enumerate(o): + yield i, e + if isinstance(o, Mapping): + for k, v in o.items(): + yield k, v + + +def get_edges(o: object) -> Iterator[Tuple[object, object]]: + for s, e in get_edge_candidates(o): + if ( + isinstance(e, types.BuiltinFunctionType) or + isinstance(e, types.FunctionType) or + isinstance(e, types.MethodType) or + isinstance(e, method_descriptor_type) or + isinstance(e, wrapper_descriptor_type) or + isinstance(e, method_wrapper_type)): + # We don't want to collect methods, but do want to collect values + # in closures and self pointers to other objects + + if hasattr(e, '__closure__'): + yield (s, '__closure__'), getattr(e, '__closure__') + if hasattr(e, '__self__'): + se = getattr(e, '__self__') + if se is not o and se is not type(o): + yield (s, '__self__'), se + else: + if not type(e) in TYPE_BLACKLIST: + yield s, e + + +def get_reachable_graph(root: object) -> Tuple[Dict[int, object], + Dict[int, Tuple[int, object]]]: + parents = {} + seen = {id(root): root} + worklist = [root] + while worklist: + o = worklist.pop() + for s, e in get_edges(o): + if id(e) in seen: continue + parents[id(e)] = (id(o), s) + seen[id(e)] = e + worklist.append(e) + + return seen, parents + + +def find_all_reachable(root: object) -> List[object]: + return list(get_reachable_graph(root)[0].values()) + + +def aggregate_by_type(objs: List[object]) -> Dict[type, List[object]]: + m = {} # type: Dict[type, List[object]] + for o in objs: + m.setdefault(type(o), []).append(o) + return m + + +def get_path(o: object, + seen: Dict[int, object], + parents: Dict[int, Tuple[int, object]]) -> List[Tuple[object, object]]: + path = [] + while id(o) in parents: + pid, attr = parents[id(o)] + o = seen[pid] + path.append((attr, o)) + path.reverse() + return path + + +##################################################### + +from mypy.nodes import SymbolNode, Var, Decorator, OverloadedFuncDef, FuncDef + +PRINT_MISMATCH = False +def check_consistency(o: object) -> None: + seen, parents = get_reachable_graph(o) + reachable = list(seen.values()) + syms = [x for x in reachable if isinstance(x, SymbolNode)] + + m = {} # type: Dict[str, SymbolNode] + for sym in syms: + fn = sym.fullname() + # Skip stuff that should be expected to have duplicate names + if isinstance(sym, Var): continue + if isinstance(sym, Decorator): continue + if isinstance(sym, FuncDef) and sym.is_overload: continue + + if fn not in m: + m[sym.fullname()] = sym + continue + + # We have trouble and need to decide what to do about it. + sym1, sym2 = sym, m[fn] + + # If the type changed, then it shouldn't have been merged + if type(sym1) is not type(sym2): continue + + # XXX: It is wrong even if the dicts match but it is extra + # wrong if they don't, so I have been looking for those cases. + # if m[fn].__dict__ is sym.__dict__: continue + + path1 = get_path(sym1, seen, parents) + path2 = get_path(sym2, seen, parents) + if PRINT_MISMATCH: + print(sym1, sym2, path1, path2) + assert sym.fullname() not in m diff --git a/mypy/test/testfinegrained.py b/mypy/test/testfinegrained.py index 469f97e6b519..b13d72cded60 100644 --- a/mypy/test/testfinegrained.py +++ b/mypy/test/testfinegrained.py @@ -30,6 +30,8 @@ from mypy.test.testtypegen import ignore_node from mypy.types import TypeStrVisitor, Type from mypy.util import short_type +from mypy.server.mergecheck import check_consistency + import pytest # type: ignore # no pytest in typeshed @@ -80,6 +82,7 @@ def run_case(self, testcase: DataDrivenTestCase) -> None: fine_grained_manager = None if not self.use_cache: fine_grained_manager = FineGrainedBuildManager(manager, graph) + # check_consistency(fine_grained_manager) steps = testcase.find_steps() all_triggered = [] @@ -107,6 +110,7 @@ def run_case(self, testcase: DataDrivenTestCase) -> None: fine_grained_manager = FineGrainedBuildManager(manager, graph) new_messages = fine_grained_manager.update(modules) + # check_consistency(fine_grained_manager) all_triggered.append(fine_grained_manager.triggered) new_messages = normalize_messages(new_messages) diff --git a/test-data/unit/fine-grained.test b/test-data/unit/fine-grained.test index 17b022744829..2ba327d39f9f 100644 --- a/test-data/unit/fine-grained.test +++ b/test-data/unit/fine-grained.test @@ -216,13 +216,13 @@ def f(a: m.A) -> None: a.f() [file m.py] class A: - def f(self) -> None: pass + def f(self) -> None: 1 [file m.py.2] class A: - def g(self) -> None: pass + def g(self) -> None: 2 [file m.py.3] class A: - def f(self) -> None: pass + def f(self) -> None: 3 [out] == main:3: error: "A" has no attribute "f" diff --git a/test-data/unit/merge.test b/test-data/unit/merge.test index cb5a11ac502e..b6c9beae00c2 100644 --- a/test-data/unit/merge.test +++ b/test-data/unit/merge.test @@ -1333,3 +1333,27 @@ target: C: TypeInfo<3> D: TypeInfo<5> NewType: Var<4> + +[case testCallable_symtable] +import target +[file target.py] +def g(o: object) -> None: + if callable(o): + pass +[file target.py.next] +def g(o: object) -> None: + if callable(o): + o() +[builtins fixtures/callable.pyi] +[out] +__main__: + target: MypyFile<0> +target: + : TypeInfo<1> + g: FuncDef<2> +==> +__main__: + target: MypyFile<0> +target: + : TypeInfo<1> + g: FuncDef<2> From 52586409fc183c2695f860368a7a312ade30cb13 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Tue, 27 Feb 2018 10:10:21 +0000 Subject: [PATCH 2/8] Various updates to merge checking --- mypy/server/mergecheck.py | 98 ++++++++++++++++++++++++++++++++------- 1 file changed, 80 insertions(+), 18 deletions(-) diff --git a/mypy/server/mergecheck.py b/mypy/server/mergecheck.py index be9844a04a13..4b7c378d2ac6 100644 --- a/mypy/server/mergecheck.py +++ b/mypy/server/mergecheck.py @@ -4,9 +4,20 @@ import weakref import types +from mypy.nodes import SymbolNode, Var, Decorator, OverloadedFuncDef, FuncDef + + method_descriptor_type = type(object.__dir__) method_wrapper_type = type(object().__ne__) wrapper_descriptor_type = type(object.__ne__) + +FUNCTION_TYPES = (types.BuiltinFunctionType, + types.FunctionType, + types.MethodType, + method_descriptor_type, + wrapper_descriptor_type, + method_wrapper_type) + ATTR_BLACKLIST = { '__doc__', '__name__', @@ -20,35 +31,57 @@ 'semantic_analyzer', # semantic analyzer has stale caches 'semantic_analyzer_pass3', # semantic analyzer has stale caches } -TYPE_BLACKLIST = { + +# Instances of these types can't have references to other objects +ATOMIC_TYPE_BLACKLIST = { + bool, int, float, str, + type(None), + object, +} + +COLLECTION_TYPE_BLACKLIST = { + list, + set, + dict, + tuple, +} + +TYPE_BLACKLIST = { weakref.ReferenceType, } +def isproperty(o: object, attr: str) -> bool: + return isinstance(getattr(type(o), attr, None), property) + + def get_edge_candidates(o: object) -> Iterator[Tuple[object, object]]: - for attr in dir(o): - if attr not in ATTR_BLACKLIST and hasattr(o, attr): - yield attr, getattr(o, attr) - if isinstance(o, Iterable) and not isinstance(o, str): - for i, e in enumerate(o): - yield i, e + if type(o) not in COLLECTION_TYPE_BLACKLIST: + for attr in dir(o): + if attr not in ATTR_BLACKLIST and hasattr(o, attr) and not isproperty(o, attr): + e = getattr(o, attr) + if not type(e) in ATOMIC_TYPE_BLACKLIST: + yield attr, e if isinstance(o, Mapping): for k, v in o.items(): yield k, v + elif isinstance(o, Iterable) and not isinstance(o, str): + for i, e in enumerate(o): + yield i, e def get_edges(o: object) -> Iterator[Tuple[object, object]]: for s, e in get_edge_candidates(o): - if ( - isinstance(e, types.BuiltinFunctionType) or - isinstance(e, types.FunctionType) or - isinstance(e, types.MethodType) or - isinstance(e, method_descriptor_type) or - isinstance(e, wrapper_descriptor_type) or - isinstance(e, method_wrapper_type)): + #if isinstance(e, (types.BuiltinFunctionType, + # method_descriptor_type, + # wrapper_descriptor_type)): + # print(s, e) + #else: + # print(s, type(e)) + if (isinstance(e, FUNCTION_TYPES)): # We don't want to collect methods, but do want to collect values # in closures and self pointers to other objects @@ -104,9 +137,10 @@ def get_path(o: object, ##################################################### -from mypy.nodes import SymbolNode, Var, Decorator, OverloadedFuncDef, FuncDef -PRINT_MISMATCH = False +DUMP_MISMATCH_NODES = False + + def check_consistency(o: object) -> None: seen, parents = get_reachable_graph(o) reachable = list(seen.values()) @@ -115,6 +149,8 @@ def check_consistency(o: object) -> None: m = {} # type: Dict[str, SymbolNode] for sym in syms: fn = sym.fullname() + if fn is None: + continue # Skip stuff that should be expected to have duplicate names if isinstance(sym, Var): continue if isinstance(sym, Decorator): continue @@ -136,6 +172,32 @@ def check_consistency(o: object) -> None: path1 = get_path(sym1, seen, parents) path2 = get_path(sym2, seen, parents) - if PRINT_MISMATCH: - print(sym1, sym2, path1, path2) + if DUMP_MISMATCH_NODES and fn in m: + print('---') + print(id(sym1), sym1) + print('---') + print(id(sym2), sym2) + + if fn in m: + print('\nDuplicate %r nodes with fullname %r found:' % (type(sym).__name__, fn)) + print('[1] %d: %s' % (id(sym1), path_to_str(path1))) + print('[2] %d: %s' % (id(sym2), path_to_str(path2))) assert sym.fullname() not in m + + +def path_to_str(path: List[Tuple[object, object]]) -> str: + result = '' + for attr, obj in path: + t = type(obj).__name__ + if t in ('dict', 'tuple', 'SymbolTable', 'list'): + result += '[%s]' % repr(attr) + else: + if t == 'Var': + result += '.%s(%s:%s)' % (attr, t, obj.name()) + elif t in ('BuildManager', 'FineGrainedBuildManager'): + # Omit class name for some classes that aren't part of a class + # hierarchy since there isn't much ambiguity. + result += '.%s' % attr + else: + result += '.%s(%s)' % (attr, t) + return result From 63d1335a252ebb8bb1b0cb4f811c576eceb2b5b9 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Tue, 27 Feb 2018 12:04:51 +0000 Subject: [PATCH 3/8] Refactor mergecheck --- mypy/server/mergecheck.py | 168 +++++--------------------------------- mypy/server/objgraph.py | 132 ++++++++++++++++++++++++++++++ 2 files changed, 151 insertions(+), 149 deletions(-) create mode 100644 mypy/server/objgraph.py diff --git a/mypy/server/mergecheck.py b/mypy/server/mergecheck.py index 4b7c378d2ac6..35a0e7a7ff20 100644 --- a/mypy/server/mergecheck.py +++ b/mypy/server/mergecheck.py @@ -1,147 +1,14 @@ -from collections import deque -from collections.abc import Iterable -from typing import List, Dict, Iterator, Optional, Tuple, Mapping -import weakref -import types +from typing import Dict, List, Tuple from mypy.nodes import SymbolNode, Var, Decorator, OverloadedFuncDef, FuncDef - - -method_descriptor_type = type(object.__dir__) -method_wrapper_type = type(object().__ne__) -wrapper_descriptor_type = type(object.__ne__) - -FUNCTION_TYPES = (types.BuiltinFunctionType, - types.FunctionType, - types.MethodType, - method_descriptor_type, - wrapper_descriptor_type, - method_wrapper_type) - -ATTR_BLACKLIST = { - '__doc__', - '__name__', - '__class__', - '__dict__', - - # mypy specific attr blacklists - 'indirection_detector', - 'all_types', - 'type_maps', - 'semantic_analyzer', # semantic analyzer has stale caches - 'semantic_analyzer_pass3', # semantic analyzer has stale caches -} - -# Instances of these types can't have references to other objects -ATOMIC_TYPE_BLACKLIST = { - bool, - int, - float, - str, - type(None), - object, -} - -COLLECTION_TYPE_BLACKLIST = { - list, - set, - dict, - tuple, -} - -TYPE_BLACKLIST = { - weakref.ReferenceType, -} - - -def isproperty(o: object, attr: str) -> bool: - return isinstance(getattr(type(o), attr, None), property) - - -def get_edge_candidates(o: object) -> Iterator[Tuple[object, object]]: - if type(o) not in COLLECTION_TYPE_BLACKLIST: - for attr in dir(o): - if attr not in ATTR_BLACKLIST and hasattr(o, attr) and not isproperty(o, attr): - e = getattr(o, attr) - if not type(e) in ATOMIC_TYPE_BLACKLIST: - yield attr, e - if isinstance(o, Mapping): - for k, v in o.items(): - yield k, v - elif isinstance(o, Iterable) and not isinstance(o, str): - for i, e in enumerate(o): - yield i, e - - -def get_edges(o: object) -> Iterator[Tuple[object, object]]: - for s, e in get_edge_candidates(o): - #if isinstance(e, (types.BuiltinFunctionType, - # method_descriptor_type, - # wrapper_descriptor_type)): - # print(s, e) - #else: - # print(s, type(e)) - if (isinstance(e, FUNCTION_TYPES)): - # We don't want to collect methods, but do want to collect values - # in closures and self pointers to other objects - - if hasattr(e, '__closure__'): - yield (s, '__closure__'), getattr(e, '__closure__') - if hasattr(e, '__self__'): - se = getattr(e, '__self__') - if se is not o and se is not type(o): - yield (s, '__self__'), se - else: - if not type(e) in TYPE_BLACKLIST: - yield s, e - - -def get_reachable_graph(root: object) -> Tuple[Dict[int, object], - Dict[int, Tuple[int, object]]]: - parents = {} - seen = {id(root): root} - worklist = [root] - while worklist: - o = worklist.pop() - for s, e in get_edges(o): - if id(e) in seen: continue - parents[id(e)] = (id(o), s) - seen[id(e)] = e - worklist.append(e) - - return seen, parents - - -def find_all_reachable(root: object) -> List[object]: - return list(get_reachable_graph(root)[0].values()) - - -def aggregate_by_type(objs: List[object]) -> Dict[type, List[object]]: - m = {} # type: Dict[type, List[object]] - for o in objs: - m.setdefault(type(o), []).append(o) - return m - - -def get_path(o: object, - seen: Dict[int, object], - parents: Dict[int, Tuple[int, object]]) -> List[Tuple[object, object]]: - path = [] - while id(o) in parents: - pid, attr = parents[id(o)] - o = seen[pid] - path.append((attr, o)) - path.reverse() - return path - - -##################################################### +from mypy.server.objgraph import get_reachable_graph, get_path DUMP_MISMATCH_NODES = False def check_consistency(o: object) -> None: + """Fail if there are two AST nodes with the same fullname reachable from 'o'.""" seen, parents = get_reachable_graph(o) reachable = list(seen.values()) syms = [x for x in reachable if isinstance(x, SymbolNode)] @@ -149,12 +16,15 @@ def check_consistency(o: object) -> None: m = {} # type: Dict[str, SymbolNode] for sym in syms: fn = sym.fullname() + # Skip None names, since they are ambiguous. + # TODO: Everything should have a proper full name? if fn is None: continue # Skip stuff that should be expected to have duplicate names - if isinstance(sym, Var): continue - if isinstance(sym, Decorator): continue - if isinstance(sym, FuncDef) and sym.is_overload: continue + if isinstance(sym, (Var, Decorator)): + continue + if isinstance(sym, FuncDef) and sym.is_overload: + continue if fn not in m: m[sym.fullname()] = sym @@ -163,25 +33,25 @@ def check_consistency(o: object) -> None: # We have trouble and need to decide what to do about it. sym1, sym2 = sym, m[fn] - # If the type changed, then it shouldn't have been merged - if type(sym1) is not type(sym2): continue - - # XXX: It is wrong even if the dicts match but it is extra - # wrong if they don't, so I have been looking for those cases. - # if m[fn].__dict__ is sym.__dict__: continue + # If the type changed, then it shouldn't have been merged. + if type(sym1) is not type(sym2): + continue path1 = get_path(sym1, seen, parents) path2 = get_path(sym2, seen, parents) + + if fn in m: + print('\nDuplicate %r nodes with fullname %r found:' % (type(sym).__name__, fn)) + print('[1] %d: %s' % (id(sym1), path_to_str(path1))) + print('[2] %d: %s' % (id(sym2), path_to_str(path2))) + if DUMP_MISMATCH_NODES and fn in m: + # Add verbose output with full AST node contents. print('---') print(id(sym1), sym1) print('---') print(id(sym2), sym2) - if fn in m: - print('\nDuplicate %r nodes with fullname %r found:' % (type(sym).__name__, fn)) - print('[1] %d: %s' % (id(sym1), path_to_str(path1))) - print('[2] %d: %s' % (id(sym2), path_to_str(path2))) assert sym.fullname() not in m diff --git a/mypy/server/objgraph.py b/mypy/server/objgraph.py new file mode 100644 index 000000000000..1d4f627edfcb --- /dev/null +++ b/mypy/server/objgraph.py @@ -0,0 +1,132 @@ +"""Find all objects reachable from a root object.""" + +from collections import deque +from collections.abc import Iterable +from typing import List, Dict, Iterator, Optional, Tuple, Mapping +import weakref +import types + + +method_descriptor_type = type(object.__dir__) +method_wrapper_type = type(object().__ne__) +wrapper_descriptor_type = type(object.__ne__) + +FUNCTION_TYPES = (types.BuiltinFunctionType, + types.FunctionType, + types.MethodType, + method_descriptor_type, + wrapper_descriptor_type, + method_wrapper_type) + +ATTR_BLACKLIST = { + '__doc__', + '__name__', + '__class__', + '__dict__', + + # mypy specific attr blacklists + 'indirection_detector', + 'all_types', + 'type_maps', + 'semantic_analyzer', # semantic analyzer has stale caches + 'semantic_analyzer_pass3', # semantic analyzer has stale caches +} + +# Instances of these types can't have references to other objects +ATOMIC_TYPE_BLACKLIST = { + bool, + int, + float, + str, + type(None), + object, +} + +# Don't look at most attributes of these types +COLLECTION_TYPE_BLACKLIST = { + list, + set, + dict, + tuple, +} + +# Don't return these objects +TYPE_BLACKLIST = { + weakref.ReferenceType, +} + + +def isproperty(o: object, attr: str) -> bool: + return isinstance(getattr(type(o), attr, None), property) + + +def get_edge_candidates(o: object) -> Iterator[Tuple[object, object]]: + if type(o) not in COLLECTION_TYPE_BLACKLIST: + for attr in dir(o): + if attr not in ATTR_BLACKLIST and hasattr(o, attr) and not isproperty(o, attr): + e = getattr(o, attr) + if not type(e) in ATOMIC_TYPE_BLACKLIST: + yield attr, e + if isinstance(o, Mapping): + for k, v in o.items(): + yield k, v + elif isinstance(o, Iterable) and not isinstance(o, str): + for i, e in enumerate(o): + yield i, e + + +def get_edges(o: object) -> Iterator[Tuple[object, object]]: + for s, e in get_edge_candidates(o): + if (isinstance(e, FUNCTION_TYPES)): + # We don't want to collect methods, but do want to collect values + # in closures and self pointers to other objects + + if hasattr(e, '__closure__'): + yield (s, '__closure__'), getattr(e, '__closure__') + if hasattr(e, '__self__'): + se = getattr(e, '__self__') + if se is not o and se is not type(o): + yield (s, '__self__'), se + else: + if not type(e) in TYPE_BLACKLIST: + yield s, e + + +def get_reachable_graph(root: object) -> Tuple[Dict[int, object], + Dict[int, Tuple[int, object]]]: + parents = {} + seen = {id(root): root} + worklist = [root] + while worklist: + o = worklist.pop() + for s, e in get_edges(o): + if id(e) in seen: + continue + parents[id(e)] = (id(o), s) + seen[id(e)] = e + worklist.append(e) + + return seen, parents + + +def find_all_reachable(root: object) -> List[object]: + return list(get_reachable_graph(root)[0].values()) + + +def aggregate_by_type(objs: List[object]) -> Dict[type, List[object]]: + m = {} # type: Dict[type, List[object]] + for o in objs: + m.setdefault(type(o), []).append(o) + return m + + +def get_path(o: object, + seen: Dict[int, object], + parents: Dict[int, Tuple[int, object]]) -> List[Tuple[object, object]]: + path = [] + while id(o) in parents: + pid, attr = parents[id(o)] + o = seen[pid] + path.append((attr, o)) + path.reverse() + return path From de46f9349ffa5ea3ec2854f84c7d604a30aad6fc Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Tue, 27 Feb 2018 12:06:26 +0000 Subject: [PATCH 4/8] Add constant to enable mergecheck --- mypy/test/testfinegrained.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/mypy/test/testfinegrained.py b/mypy/test/testfinegrained.py index b13d72cded60..f102d6571da2 100644 --- a/mypy/test/testfinegrained.py +++ b/mypy/test/testfinegrained.py @@ -35,6 +35,10 @@ import pytest # type: ignore # no pytest in typeshed +# Set to True to perform (somewhat expensive) checks for duplicate AST nodes after merge +CHECK_CONSISTENCY = False + + class FineGrainedSuite(DataSuite): files = [ 'fine-grained.test', @@ -82,7 +86,8 @@ def run_case(self, testcase: DataDrivenTestCase) -> None: fine_grained_manager = None if not self.use_cache: fine_grained_manager = FineGrainedBuildManager(manager, graph) - # check_consistency(fine_grained_manager) + if CHECK_CONSISTENCY: + check_consistency(fine_grained_manager) steps = testcase.find_steps() all_triggered = [] @@ -110,7 +115,8 @@ def run_case(self, testcase: DataDrivenTestCase) -> None: fine_grained_manager = FineGrainedBuildManager(manager, graph) new_messages = fine_grained_manager.update(modules) - # check_consistency(fine_grained_manager) + if CHECK_CONSISTENCY: + check_consistency(fine_grained_manager) all_triggered.append(fine_grained_manager.triggered) new_messages = normalize_messages(new_messages) From 3b79b43bb490b7ac009ce10161f588eefc6f858b Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Tue, 27 Feb 2018 16:34:14 +0000 Subject: [PATCH 5/8] Docstring and comment updates --- mypy/server/mergecheck.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/mypy/server/mergecheck.py b/mypy/server/mergecheck.py index 35a0e7a7ff20..91239d390493 100644 --- a/mypy/server/mergecheck.py +++ b/mypy/server/mergecheck.py @@ -1,14 +1,20 @@ +"""Check for duplicate AST nodes after merge.""" + from typing import Dict, List, Tuple from mypy.nodes import SymbolNode, Var, Decorator, OverloadedFuncDef, FuncDef from mypy.server.objgraph import get_reachable_graph, get_path +# If True, print more verbose output on failure. DUMP_MISMATCH_NODES = False def check_consistency(o: object) -> None: - """Fail if there are two AST nodes with the same fullname reachable from 'o'.""" + """Fail if there are two AST nodes with the same fullname reachable from 'o'. + + Raise AssertionError on failure and print some debugging output. + """ seen, parents = get_reachable_graph(o) reachable = list(seen.values()) syms = [x for x in reachable if isinstance(x, SymbolNode)] From 5032341021208b5800d65cad5137376a39a40785 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Tue, 27 Feb 2018 16:36:20 +0000 Subject: [PATCH 6/8] Revert test case change --- test-data/unit/fine-grained.test | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/test-data/unit/fine-grained.test b/test-data/unit/fine-grained.test index 2ba327d39f9f..17b022744829 100644 --- a/test-data/unit/fine-grained.test +++ b/test-data/unit/fine-grained.test @@ -216,13 +216,13 @@ def f(a: m.A) -> None: a.f() [file m.py] class A: - def f(self) -> None: 1 + def f(self) -> None: pass [file m.py.2] class A: - def g(self) -> None: 2 + def g(self) -> None: pass [file m.py.3] class A: - def f(self) -> None: 3 + def f(self) -> None: pass [out] == main:3: error: "A" has no attribute "f" From ecc1b6bca2139c4265a5821ff833ed3da73978ca Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Tue, 27 Feb 2018 16:42:44 +0000 Subject: [PATCH 7/8] Fix tests --- mypy/server/objgraph.py | 6 +++--- test-data/unit/merge.test | 3 ++- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/mypy/server/objgraph.py b/mypy/server/objgraph.py index 1d4f627edfcb..e15e780406b8 100644 --- a/mypy/server/objgraph.py +++ b/mypy/server/objgraph.py @@ -24,12 +24,12 @@ '__class__', '__dict__', - # mypy specific attr blacklists + # Mypy specific attribute blacklists 'indirection_detector', 'all_types', 'type_maps', - 'semantic_analyzer', # semantic analyzer has stale caches - 'semantic_analyzer_pass3', # semantic analyzer has stale caches + 'semantic_analyzer', # Semantic analyzer has stale caches + 'semantic_analyzer_pass3', # Semantic analyzer has stale caches } # Instances of these types can't have references to other objects diff --git a/test-data/unit/merge.test b/test-data/unit/merge.test index b6c9beae00c2..5d539ce29e7a 100644 --- a/test-data/unit/merge.test +++ b/test-data/unit/merge.test @@ -1334,7 +1334,8 @@ target: D: TypeInfo<5> NewType: Var<4> -[case testCallable_symtable] +[case testCallable_symtable-skip] +# The TypeInfo is currently not being merged correctly import target [file target.py] def g(o: object) -> None: From 5c241fb2c956e5e7a2a3d4e2b5e898772ef984b6 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Wed, 28 Feb 2018 10:11:23 +0000 Subject: [PATCH 8/8] Fix type check error --- mypy/server/mergecheck.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mypy/server/mergecheck.py b/mypy/server/mergecheck.py index 91239d390493..aab591b813be 100644 --- a/mypy/server/mergecheck.py +++ b/mypy/server/mergecheck.py @@ -68,7 +68,7 @@ def path_to_str(path: List[Tuple[object, object]]) -> str: if t in ('dict', 'tuple', 'SymbolTable', 'list'): result += '[%s]' % repr(attr) else: - if t == 'Var': + if isinstance(obj, Var): result += '.%s(%s:%s)' % (attr, t, obj.name()) elif t in ('BuildManager', 'FineGrainedBuildManager'): # Omit class name for some classes that aren't part of a class