Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[WIP] Fail fast if module like typing not from typeshed #12955

Closed
wants to merge 5 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
38 changes: 26 additions & 12 deletions mypy/build.py
Original file line number Diff line number Diff line change
Expand Up @@ -603,8 +603,27 @@ def __init__(self, data_dir: str,
or options.use_fine_grained_cache)
and not has_reporters)
self.fscache = fscache

self.find_module_cache = FindModuleCache(self.search_paths, self.fscache, self.options,
source_set=self.source_set)

# Check for shadowed core library modules as soon as we have a FindModuleCache
for module in CORE_BUILTIN_MODULES:
# This module is built into Python so it doesn't exist on disk
if module == '_importlib_modulespec':
continue
path = self.find_module_cache.find_module(module)
if not isinstance(path, str):
raise CompileError([
"Failed to find builtin module {module}, perhaps typeshed is broken?",
])
if (not is_typeshed_file(path, options.custom_typeshed_dir)
and not is_stub_package_file(path)):
raise CompileError([
f'mypy: "{os.path.relpath(path)}" shadows library module "{module}"',
f'note: A user-defined top-level module with name "{module}" is not supported'
])

self.metastore = create_metastore(options)

# a mapping from source files to their corresponding shadow files
Expand Down Expand Up @@ -2374,14 +2393,11 @@ def generate_unused_ignore_notes(self) -> None:
# those errors to avoid spuriously flagging them as unused ignores.
if self.meta:
self.verify_dependencies(suppressed_only=True)
self.manager.errors.generate_unused_ignore_errors(self.xpath)
self.manager.errors.generate_unused_ignore_errors(self.xpath, self.options)

def generate_ignore_without_code_notes(self) -> None:
if self.manager.errors.is_error_code_enabled(codes.IGNORE_WITHOUT_CODE):
self.manager.errors.generate_ignore_without_code_errors(
self.xpath,
self.options.warn_unused_ignores,
)
self.manager.errors.generate_ignore_without_code_errors(self.xpath, self.options)


# Module import and diagnostic glue
Expand Down Expand Up @@ -2459,15 +2475,13 @@ def find_module_and_diagnose(manager: BuildManager,
if is_sub_path(result, dir):
# Silence errors in site-package dirs and typeshed
follow_imports = 'silent'
if (id in CORE_BUILTIN_MODULES
and not is_typeshed_file(result)
and not is_stub_package_file(result)
and not options.use_builtins_fixtures
and not options.custom_typeshed_dir):
""" if (id in CORE_BUILTIN_MODULES
and not is_typeshed_file(result, options.custom_typeshed_dir)
and not is_stub_package_file(result)):
raise CompileError([
f'mypy: "{os.path.relpath(result)}" shadows library module "{id}"',
f'note: A user-defined top-level module with name "{id}" is not supported'
])
]) """
return (result, follow_imports)
else:
# Could not find a module. Typically the reason is a
Expand Down Expand Up @@ -3176,7 +3190,7 @@ def process_stale_scc(graph: Graph, scc: List[str], manager: BuildManager) -> No
# SemanticAnalyzerPass2.add_builtin_aliases for details.
typing_mod = graph['typing'].tree
assert typing_mod, "The typing module was not parsed"
mypy.semanal_main.semantic_analysis_for_scc(graph, scc, manager.errors)
mypy.semanal_main.semantic_analysis_for_scc(graph, scc, manager.errors, manager.options)

# Track what modules aren't yet done so we can finish them as soon
# as possible, saving memory.
Expand Down
2 changes: 1 addition & 1 deletion mypy/checker.py
Original file line number Diff line number Diff line change
Expand Up @@ -258,7 +258,7 @@ def __init__(self, errors: Errors, modules: Dict[str, MypyFile], options: Option
self.pass_num = 0
self.current_node_deferred = False
self.is_stub = tree.is_stub
self.is_typeshed_stub = is_typeshed_file(path)
self.is_typeshed_stub = is_typeshed_file(path, options.custom_typeshed_dir)
self.inferred_attribute_types = None
if options.strict_optional_whitelist is None:
self.suppress_none_errors = not options.show_none_errors
Expand Down
11 changes: 6 additions & 5 deletions mypy/errors.py
Original file line number Diff line number Diff line change
Expand Up @@ -550,9 +550,10 @@ def clear_errors_in_targets(self, path: str, targets: Set[str]) -> None:
if not has_blocker and path in self.has_blockers:
self.has_blockers.remove(path)

def generate_unused_ignore_errors(self, file: str) -> None:
def generate_unused_ignore_errors(self, file: str, options: Options) -> None:
custom_typeshed_dir = options.custom_typeshed_dir
ignored_lines = self.ignored_lines[file]
if not is_typeshed_file(file) and file not in self.ignored_files:
if not is_typeshed_file(file, custom_typeshed_dir) and file not in self.ignored_files:
ignored_lines = self.ignored_lines[file]
used_ignored_lines = self.used_ignored_lines[file]
for line, ignored_codes in ignored_lines.items():
Expand All @@ -577,8 +578,8 @@ def generate_unused_ignore_errors(self, file: str) -> None:

def generate_ignore_without_code_errors(self,
file: str,
is_warning_unused_ignores: bool) -> None:
if is_typeshed_file(file) or file in self.ignored_files:
options: Options) -> None:
if is_typeshed_file(file, options.custom_typeshed_dir) or file in self.ignored_files:
return

used_ignored_lines = self.used_ignored_lines[file]
Expand All @@ -595,7 +596,7 @@ def generate_ignore_without_code_errors(self,

# If the ignore is itself unused and that would be warned about, let
# that error stand alone
if is_warning_unused_ignores and not used_ignored_lines[line]:
if options.warn_unused_ignores and not used_ignored_lines[line]:
continue

codes_hint = ''
Expand Down
3 changes: 2 additions & 1 deletion mypy/semanal.py
Original file line number Diff line number Diff line change
Expand Up @@ -573,9 +573,10 @@ def file_context(self,
self.errors.set_file(file_node.path, file_node.fullname, scope=scope)
self.cur_mod_node = file_node
self.cur_mod_id = file_node.fullname
custom_typeshed_dir = options.custom_typeshed_dir
with scope.module_scope(self.cur_mod_id):
self._is_stub_file = file_node.path.lower().endswith('.pyi')
self._is_typeshed_stub_file = is_typeshed_file(file_node.path)
self._is_typeshed_stub_file = is_typeshed_file(file_node.path, custom_typeshed_dir)
self.globals = file_node.names
self.tvar_scope = TypeVarLikeScope()

Expand Down
25 changes: 17 additions & 8 deletions mypy/semanal_main.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,10 @@
core_modules: Final = ['typing', 'builtins', 'abc', 'collections']


def semantic_analysis_for_scc(graph: 'Graph', scc: List[str], errors: Errors) -> None:
def semantic_analysis_for_scc(graph: 'Graph',
scc: List[str],
errors: Errors,
options: Options) -> None:
"""Perform semantic analysis for all modules in a SCC (import cycle).

Assume that reachability analysis has already been performed.
Expand All @@ -85,7 +88,7 @@ def semantic_analysis_for_scc(graph: 'Graph', scc: List[str], errors: Errors) ->
# Run class decorator hooks (they requite complete MROs and no placeholders).
apply_class_plugin_hooks(graph, scc, errors)
# This pass might need fallbacks calculated above and the results of hooks.
check_type_arguments(graph, scc, errors)
check_type_arguments(graph, scc, errors, options)
calculate_class_properties(graph, scc, errors)
check_blockers(graph, scc)
# Clean-up builtins, so that TypeVar etc. are not accessible without importing.
Expand Down Expand Up @@ -134,7 +137,7 @@ def semantic_analysis_for_targets(
n.node.fullname, n.node, n.active_typeinfo, patches)
apply_semantic_analyzer_patches(patches)
apply_class_plugin_hooks(graph, [state.id], state.manager.errors)
check_type_arguments_in_targets(nodes, state, state.manager.errors)
check_type_arguments_in_targets(nodes, state)
calculate_class_properties(graph, [state.id], state.manager.errors)


Expand Down Expand Up @@ -352,28 +355,34 @@ def semantic_analyze_target(target: str,
return [], analyzer.incomplete, analyzer.progress


def check_type_arguments(graph: 'Graph', scc: List[str], errors: Errors) -> None:
def check_type_arguments(graph: 'Graph',
scc: List[str],
errors: Errors,
options: Options) -> None:
for module in scc:
state = graph[module]
assert state.tree
typeshed_file = is_typeshed_file(state.path or '', options.custom_typeshed_dir)
analyzer = TypeArgumentAnalyzer(errors,
state.options,
is_typeshed_file(state.path or ''))
typeshed_file)
with state.wrap_context():
with mypy.state.state.strict_optional_set(state.options.strict_optional):
state.tree.accept(analyzer)


def check_type_arguments_in_targets(targets: List[FineGrainedDeferredNode], state: 'State',
errors: Errors) -> None:
def check_type_arguments_in_targets(targets: List[FineGrainedDeferredNode],
state: 'State') -> None:
"""Check type arguments against type variable bounds and restrictions.

This mirrors the logic in check_type_arguments() except that we process only
some targets. This is used in fine grained incremental mode.
"""
errors = state.manager.errors
typeshed_file = is_typeshed_file(state.path or '', state.manager.options.custom_typeshed_dir)
analyzer = TypeArgumentAnalyzer(errors,
state.options,
is_typeshed_file(state.path or ''))
typeshed_file)
with state.wrap_context():
with mypy.state.state.strict_optional_set(state.options.strict_optional):
for target in targets:
Expand Down
2 changes: 1 addition & 1 deletion mypy/server/update.py
Original file line number Diff line number Diff line change
Expand Up @@ -602,7 +602,7 @@ def restore(ids: List[str]) -> None:
assert state.tree is not None, "file must be at least parsed"
t0 = time.time()
try:
semantic_analysis_for_scc(graph, [state.id], manager.errors)
semantic_analysis_for_scc(graph, [state.id], manager.errors, manager.options)
except CompileError as err:
# There was a blocking error, so module AST is incomplete. Restore old modules.
restore([module])
Expand Down
7 changes: 4 additions & 3 deletions mypy/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -744,9 +744,10 @@ def format_error(
return self.style(msg, 'red', bold=True)


def is_typeshed_file(file: str) -> bool:
# gross, but no other clear way to tell
return 'typeshed' in os.path.abspath(file).split(os.sep)
def is_typeshed_file(file: str, custom_typeshed_dir: Optional[str]) -> bool:
typeshed_dir = custom_typeshed_dir or os.path.join(os.path.dirname(__file__), "typeshed")
# Check that the file prefix is in typeshed_dir
return os.path.commonpath((typeshed_dir, os.path.abspath(file))) == typeshed_dir


def is_stub_package_file(file: str) -> bool:
Expand Down
10 changes: 10 additions & 0 deletions test-data/unit/cmdline.test
Original file line number Diff line number Diff line change
Expand Up @@ -1425,3 +1425,13 @@ b\.c \d+
# cmd: mypy --enable-incomplete-features a.py
[file a.py]
pass

[case testCoreBuiltinFileNotFromTypeshed]
from typing import Union

def foo(a: Union[int, str]) -> str:
return str
[file typing.py]
class Bar:
pass
[out]