diff --git a/mypy/build.py b/mypy/build.py index bfcc897cf2fa..3e8da93b4eae 100644 --- a/mypy/build.py +++ b/mypy/build.py @@ -83,15 +83,32 @@ WORKER_START_TIMEOUT, ) from mypy.error_formatter import OUTPUT_CHOICES, ErrorFormatter -from mypy.errors import CompileError, ErrorInfo, Errors, ErrorTuple, report_internal_error +from mypy.errors import ( + CompileError, + ErrorInfo, + Errors, + ErrorTuple, + ErrorTupleRaw, + report_internal_error, +) from mypy.graph_utils import prepare_sccs, strongly_connected_components, topsort from mypy.indirection import TypeIndirectionVisitor from mypy.ipc import BadStatus, IPCClient, IPCMessage, read_status, ready_to_read, receive, send from mypy.messages import MessageBuilder -from mypy.nodes import Import, ImportAll, ImportBase, ImportFrom, MypyFile, SymbolTable +from mypy.nodes import ( + ClassDef, + Context, + Import, + ImportAll, + ImportBase, + ImportFrom, + MypyFile, + SymbolTable, +) from mypy.partially_defined import PossiblyUndefinedVariableVisitor from mypy.semanal import SemanticAnalyzer from mypy.semanal_pass1 import SemanticAnalyzerPreAnalysis +from mypy.traverser import find_definitions from mypy.util import ( DecodeError, decode_python_encoding, @@ -866,6 +883,10 @@ def __init__( self.queue_order: int = 0 # Is this an instance used by a parallel worker? self.parallel_worker = parallel_worker + # Cache for ASTs created during error message generation. Note these are + # raw parsed trees not analyzed with mypy. We use these to find absolute + # location of a symbol used as a location for an error message. + self.extra_trees: dict[str, MypyFile] = {} def dump_stats(self) -> None: if self.options.dump_build_stats: @@ -1028,6 +1049,58 @@ def report_file( if self.reports is not None and self.source_set.is_source(file): self.reports.file(file, self.modules, type_map, options) + def resolve_location(self, graph: dict[str, State], fullname: str) -> Context | None: + """Resolve an absolute location of a symbol with given fullname.""" + rest = [] + head = fullname + while True: + # TODO: this mimics the logic in lookup.py but it is actually wrong. + # This is because we don't distinguish between submodule and a local symbol + # with the same name. + head, tail = head.rsplit(".", maxsplit=1) + rest.append(tail) + if head in graph: + state = graph[head] + break + if "." not in head: + return None + *prefix, name = reversed(rest) + # If this happens something is wrong, but it is better to give slightly + # less helpful error message than crash. + if state.path is None: + return None + if state.tree is not None and state.tree.defs: + # We usually free ASTs after processing, but reuse an existing AST if + # it is still available. + tree = state.tree + elif state.id in self.extra_trees: + tree = self.extra_trees[state.id] + else: + if state.source is not None: + # Sources are usually discarded after processing as well, check + # if we still have one just in case. + source = state.source + else: + path = state.manager.maybe_swap_for_shadow_path(state.path) + source = decode_python_encoding(state.manager.fscache.read(path)) + tree = parse(source, state.path, state.id, state.manager.errors, state.options) + self.extra_trees[state.id] = tree + statements = tree.defs + while prefix: + part = prefix.pop(0) + for statement in statements: + defs = find_definitions(statement, part) + if not defs or not isinstance((defn := defs[0]), ClassDef): + continue + statements = defn.defs.body + break + else: + return None + for statement in statements: + if defs := find_definitions(statement, name): + return defs[0] + return None + def verbosity(self) -> int: return self.options.verbosity @@ -2359,7 +2432,7 @@ def load_tree(self, temporary: bool = False) -> None: def fix_cross_refs(self) -> None: assert self.tree is not None, "Internal error: method must be called on parsed file only" - # We need to set allow_missing when doing a fine grained cache + # We need to set allow_missing when doing a fine-grained cache # load because we need to gracefully handle missing modules. fixup_module(self.tree, self.manager.modules, self.options.use_fine_grained_cache) @@ -3558,7 +3631,9 @@ def find_stale_sccs( path = manager.errors.simplify_path(graph[id].xpath) formatted = manager.errors.format_messages( path, - deserialize_codes(graph[id].error_lines), + transform_error_tuples( + manager, graph, deserialize_codes(graph[id].error_lines) + ), formatter=manager.error_formatter, ) manager.flush_errors(path, formatted, False) @@ -3813,7 +3888,9 @@ def process_stale_scc( if graph[id].xpath not in manager.errors.ignored_files: errors = manager.errors.file_messages(graph[id].xpath) formatted = manager.errors.format_messages( - graph[id].xpath, errors, formatter=manager.error_formatter + graph[id].xpath, + transform_error_tuples(manager, graph, errors), + formatter=manager.error_formatter, ) manager.flush_errors(manager.errors.simplify_path(graph[id].xpath), formatted, False) errors_by_id[id] = errors @@ -3972,14 +4049,37 @@ def write_undocumented_ref_info( metastore.write(ref_info_file, json_dumps(deps_json)) -def serialize_codes(errs: list[ErrorTuple]) -> list[SerializedError]: +def transform_error_tuples( + manager: BuildManager, graph: dict[str, State], error_tuples_rel: list[ErrorTupleRaw] +) -> list[ErrorTuple]: + """Transform raw error tuples by resolving relative error locations.""" + error_tuples = [] + for e in error_tuples_rel: + file, line_rel, column, end_line, end_column, severity, message, code = e + if isinstance(line_rel, int): + line = line_rel + else: + assert file is not None + loc = manager.resolve_location(graph, line_rel) + if loc is not None: + line = loc.line + column = loc.column + end_line = loc.end_line or -1 + end_column = loc.end_column or -1 + else: + line = -1 + error_tuples.append((file, line, column, end_line, end_column, severity, message, code)) + return error_tuples + + +def serialize_codes(errs: list[ErrorTupleRaw]) -> list[SerializedError]: return [ (path, line, column, end_line, end_column, severity, message, code.code if code else None) for path, line, column, end_line, end_column, severity, message, code in errs ] -def deserialize_codes(errs: list[SerializedError]) -> list[ErrorTuple]: +def deserialize_codes(errs: list[SerializedError]) -> list[ErrorTupleRaw]: return [ ( path, diff --git a/mypy/cache.py b/mypy/cache.py index f67bd627d3b8..81915821e19e 100644 --- a/mypy/cache.py +++ b/mypy/cache.py @@ -71,7 +71,7 @@ # High-level cache layout format CACHE_VERSION: Final = 1 -SerializedError: _TypeAlias = tuple[str | None, int, int, int, int, str, str, str | None] +SerializedError: _TypeAlias = tuple[str | None, int | str, int, int, int, str, str, str | None] class CacheMeta: @@ -479,7 +479,10 @@ def write_errors(data: WriteBuffer, errs: list[SerializedError]) -> None: for path, line, column, end_line, end_column, severity, message, code in errs: write_tag(data, TUPLE_GEN) write_str_opt(data, path) - write_int(data, line) + if isinstance(line, str): + write_str(data, line) + else: + write_int(data, line) write_int(data, column) write_int(data, end_line) write_int(data, end_column) @@ -493,10 +496,17 @@ def read_errors(data: ReadBuffer) -> list[SerializedError]: result = [] for _ in range(read_int_bare(data)): assert read_tag(data) == TUPLE_GEN + path = read_str_opt(data) + tag = read_tag(data) + if tag == LITERAL_STR: + line: str | int = read_str_bare(data) + else: + assert tag == LITERAL_INT + line = read_int_bare(data) result.append( ( - read_str_opt(data), - read_int(data), + path, + line, read_int(data), read_int(data), read_int(data), diff --git a/mypy/errors.py b/mypy/errors.py index edfb3bd1607a..559fcbda6d85 100644 --- a/mypy/errors.py +++ b/mypy/errors.py @@ -130,6 +130,7 @@ def __init__( target: str | None = None, priority: int = 0, parent_error: ErrorInfo | None = None, + location_ref: str | None = None, ) -> None: self.import_ctx = import_ctx self.file = file @@ -151,12 +152,18 @@ def __init__( if parent_error is not None: assert severity == "note", "Only notes can specify parent errors" self.parent_error = parent_error + self.location_ref = location_ref # Type used internally to represent errors: # (path, line, column, end_line, end_column, severity, message, code) ErrorTuple: _TypeAlias = tuple[str | None, int, int, int, int, str, str, ErrorCode | None] +# A raw version of the above that can refer to either absolute or relative location. +# If the location is relative, the first item (line) is a string with a symbol fullname, +# and three other values (column, end_line, end_column) are set to -1. +ErrorTupleRaw: _TypeAlias = tuple[str | None, int | str, int, int, int, str, str, ErrorCode | None] + class ErrorWatcher: """Context manager that can be used to keep track of new errors recorded @@ -569,6 +576,7 @@ def report( end_line: int | None = None, end_column: int | None = None, parent_error: ErrorInfo | None = None, + location_ref: str | None = None, ) -> ErrorInfo: """Report message at the given line using the current error context. @@ -635,6 +643,7 @@ def report( origin=(self.file, origin_span), target=self.current_target(), parent_error=parent_error, + location_ref=location_ref, ) self.add_error_info(info) return info @@ -1014,6 +1023,8 @@ def raise_error(self, use_stdout: bool = True) -> NoReturn: """ # self.new_messages() will format all messages that haven't already # been returned from a file_messages() call. + # TODO: pass resolve_location callback here. + # This will be needed if we are going to use relative locations in blocker errors. raise CompileError( self.new_messages(), use_stdout=use_stdout, module_with_blocker=self.blocker_module() ) @@ -1076,7 +1087,7 @@ def format_messages_default( a.append(" " * (DEFAULT_SOURCE_OFFSET + column) + marker) return a - def file_messages(self, path: str) -> list[ErrorTuple]: + def file_messages(self, path: str) -> list[ErrorTupleRaw]: """Return an error tuple list of new error messages from a given file.""" if path not in self.error_info_map: return [] @@ -1119,7 +1130,9 @@ def find_shadow_file_mapping(self, path: str) -> str | None: return i[1] return None - def new_messages(self) -> list[str]: + def new_messages( + self, resolve_location: Callable[[str], Context | None] | None = None + ) -> list[str]: """Return a string list of new error messages. Use a form suitable for displaying to the user. @@ -1129,7 +1142,29 @@ def new_messages(self) -> list[str]: msgs = [] for path in self.error_info_map.keys(): if path not in self.flushed_files: - error_tuples = self.file_messages(path) + error_tuples_rel = self.file_messages(path) + error_tuples = [] + for e in error_tuples_rel: + # This has a bit of code duplication with build.py, but it is hard + # to avoid without either an import cycle or a performance penalty. + file, line_rel, column, end_line, end_column, severity, message, code = e + if isinstance(line_rel, int): + line = line_rel + elif resolve_location is not None: + assert file is not None + loc = resolve_location(line_rel) + if loc is not None: + line = loc.line + column = loc.column + end_line = loc.end_line or -1 + end_column = loc.end_column or -1 + else: + line = -1 + else: + line = -1 + error_tuples.append( + (file, line, column, end_line, end_column, severity, message, code) + ) msgs.extend(self.format_messages(path, error_tuples)) return msgs @@ -1141,7 +1176,7 @@ def targets(self) -> set[str]: info.target for errs in self.error_info_map.values() for info in errs if info.target } - def render_messages(self, errors: list[ErrorInfo]) -> list[ErrorTuple]: + def render_messages(self, errors: list[ErrorInfo]) -> list[ErrorTupleRaw]: """Translate the messages into a sequence of tuples. Each tuple is of form (path, line, col, severity, message, code). @@ -1149,7 +1184,7 @@ def render_messages(self, errors: list[ErrorInfo]) -> list[ErrorTuple]: The path item may be None. If the line item is negative, the line number is not defined for the tuple. """ - result: list[ErrorTuple] = [] + result: list[ErrorTupleRaw] = [] prev_import_context: list[tuple[str, int]] = [] prev_function_or_member: str | None = None prev_type: str | None = None @@ -1224,9 +1259,21 @@ def render_messages(self, errors: list[ErrorInfo]) -> list[ErrorTuple]: else: result.append((file, -1, -1, -1, -1, "note", f'In class "{e.type}":', None)) - result.append( - (file, e.line, e.column, e.end_line, e.end_column, e.severity, e.message, e.code) - ) + if e.location_ref is not None: + result.append((file, e.location_ref, -1, -1, -1, e.severity, e.message, e.code)) + else: + result.append( + ( + file, + e.line, + e.column, + e.end_line, + e.end_column, + e.severity, + e.message, + e.code, + ) + ) prev_import_context = e.import_ctx prev_function_or_member = e.function_or_member diff --git a/mypy/lookup.py b/mypy/lookup.py index 640481ff703c..e3b195567fa0 100644 --- a/mypy/lookup.py +++ b/mypy/lookup.py @@ -32,6 +32,9 @@ def lookup_fully_qualified( if raise_on_missing: assert "." in head, f"Cannot find module for {name}" return None + # TODO: this logic is not correct as it confuses a submodule and a local symbol. + # A potential solution may be to use format like pkg.mod:Cls.method for fullname, + # but this is a relatively big change. head, tail = head.rsplit(".", maxsplit=1) rest.append(tail) mod = modules.get(head) diff --git a/mypy/messages.py b/mypy/messages.py index 0a255582d139..c5756a463894 100644 --- a/mypy/messages.py +++ b/mypy/messages.py @@ -266,6 +266,11 @@ def span_from_context(ctx: Context) -> Iterable[int]: assert origin_span is not None origin_span = itertools.chain(origin_span, span_from_context(secondary_context)) + location_ref = None + if file is not None and file != self.errors.file: + assert isinstance(context, SymbolNode), "Only symbols can be locations in other files" + location_ref = context.fullname + return self.errors.report( context.line if context else -1, context.column if context else -1, @@ -278,6 +283,7 @@ def span_from_context(ctx: Context) -> Iterable[int]: end_column=context.end_column if context else -1, code=code, parent_error=parent_error, + location_ref=location_ref, ) def fail( diff --git a/mypy/plugins/functools.py b/mypy/plugins/functools.py index c8b370f15e6d..1de0d9efae56 100644 --- a/mypy/plugins/functools.py +++ b/mypy/plugins/functools.py @@ -305,6 +305,9 @@ def handle_partial_with_callee(ctx: mypy.plugin.FunctionContext, callee: Type) - if not mypy.checker.is_valid_inferred_type(ret_type, ctx.api.options): ret_type = fn_type.ret_type # same kind of hack as above + # Technically, we should set definition to None here, since it will not be recovered + # on warm cache runs in fixup.py. This however may hide some helpful info in error + # messages, so we are keeping it for now. See also issue #20640. partially_applied = fn_type.copy_modified( arg_types=partial_types, arg_kinds=partial_kinds, diff --git a/mypy/server/update.py b/mypy/server/update.py index f2e3554abda8..8ecfc95c373d 100644 --- a/mypy/server/update.py +++ b/mypy/server/update.py @@ -136,6 +136,7 @@ from mypy.fscache import FileSystemCache from mypy.modulefinder import BuildSource from mypy.nodes import ( + Context, Decorator, FuncDef, ImportFrom, @@ -295,7 +296,7 @@ def update( if not changed_modules: # Preserve state needed for the next update. self.previous_targets_with_errors = self.manager.errors.targets() - messages = self.manager.errors.new_messages() + messages = self.manager.errors.new_messages(self.resolve_location_cb) break messages = sort_messages_preserving_file_order(messages, self.previous_messages) @@ -319,9 +320,12 @@ def trigger(self, target: str) -> list[str]: ) # Preserve state needed for the next update. self.previous_targets_with_errors = self.manager.errors.targets() - self.previous_messages = self.manager.errors.new_messages().copy() + self.previous_messages = self.manager.errors.new_messages(self.resolve_location_cb).copy() return self.update(changed_modules, []) + def resolve_location_cb(self, fullname: str) -> Context | None: + return self.manager.resolve_location(self.graph, fullname) + def flush_cache(self) -> None: """Flush AST cache. diff --git a/mypy/test/testcheck.py b/mypy/test/testcheck.py index 5e533e3c245d..00268ea998df 100644 --- a/mypy/test/testcheck.py +++ b/mypy/test/testcheck.py @@ -141,6 +141,7 @@ def run_case_once( options.fixed_format_cache = True if testcase.output_files: raise pytest.skip("Reports are not supported in parallel mode") + # Note: do not use this unless really needed! if testcase.name.endswith("_no_parallel"): raise pytest.skip("Test not supported in parallel mode yet") @@ -163,7 +164,7 @@ def run_case_once( sources = [] for module_name, program_path, program_text in module_data: - # Always set to none so we're forced to reread the module in incremental mode + # Always set to None, so we're forced to reread the module in incremental mode sources.append( BuildSource(program_path, module_name, None if incremental_step else program_text) ) diff --git a/mypy/traverser.py b/mypy/traverser.py index baf234cc1b25..83b64ea21e1b 100644 --- a/mypy/traverser.py +++ b/mypy/traverser.py @@ -66,6 +66,7 @@ SetExpr, SliceExpr, StarExpr, + Statement, StrExpr, SuperExpr, TempNode, @@ -96,7 +97,7 @@ StarredPattern, ValuePattern, ) -from mypy.visitor import NodeVisitor +from mypy.visitor import NodeVisitor, StatementVisitor @trait @@ -108,6 +109,10 @@ class TraverserVisitor(NodeVisitor[None]): should override visit methods to perform actions during traversal. Calling the superclass method allows reusing the traversal implementation. + + TODO: split this into more limited visitor (e.g. statements-only etc). + This will improve performance since in many cases we don't need to recurse + all the way down in various visitors that subclass this. """ def __init__(self) -> None: @@ -1084,3 +1089,115 @@ def visit_assignment_stmt(self, stmt: AssignmentStmt) -> None: def visit_yield_from_expr(self, expr: YieldFromExpr) -> None: self.yield_from_expressions.append((expr, self.in_assignment)) + + +def find_definitions(o: Statement, name: str) -> list[Statement]: + visitor = DefinitionSeeker(name) + o.accept(visitor) + return visitor.found + + +class DefinitionSeeker(StatementVisitor[None]): + def __init__(self, name: str) -> None: + self.name = name + self.found: list[Statement] = [] + + def visit_assignment_stmt(self, o: AssignmentStmt, /) -> None: + # TODO: support more kinds of locations (like assignment statements). + # the latter will be helpful for type old-style aliases. + pass + + def visit_for_stmt(self, o: ForStmt, /) -> None: + o.body.accept(self) + if o.else_body: + o.else_body.accept(self) + + def visit_with_stmt(self, o: WithStmt, /) -> None: + o.body.accept(self) + + def visit_del_stmt(self, o: DelStmt, /) -> None: + pass + + def visit_func_def(self, o: FuncDef, /) -> None: + if o.name == self.name: + self.found.append(o) + + def visit_overloaded_func_def(self, o: OverloadedFuncDef, /) -> None: + if o.name == self.name: + self.found.append(o) + + def visit_class_def(self, o: ClassDef, /) -> None: + if o.name == self.name: + self.found.append(o) + + def visit_global_decl(self, o: GlobalDecl, /) -> None: + pass + + def visit_nonlocal_decl(self, o: NonlocalDecl, /) -> None: + pass + + def visit_decorator(self, o: Decorator, /) -> None: + if o.name == self.name: + self.found.append(o) + + def visit_import(self, o: Import, /) -> None: + pass + + def visit_import_from(self, o: ImportFrom, /) -> None: + pass + + def visit_import_all(self, o: ImportAll, /) -> None: + pass + + def visit_block(self, o: Block, /) -> None: + for s in o.body: + s.accept(self) + + def visit_expression_stmt(self, o: ExpressionStmt, /) -> None: + pass + + def visit_operator_assignment_stmt(self, o: OperatorAssignmentStmt, /) -> None: + pass + + def visit_while_stmt(self, o: WhileStmt, /) -> None: + o.body.accept(self) + if o.else_body: + o.else_body.accept(self) + + def visit_return_stmt(self, o: ReturnStmt, /) -> None: + pass + + def visit_assert_stmt(self, o: AssertStmt, /) -> None: + pass + + def visit_if_stmt(self, o: IfStmt, /) -> None: + for b in o.body: + b.accept(self) + if o.else_body: + o.else_body.accept(self) + + def visit_break_stmt(self, o: BreakStmt, /) -> None: + pass + + def visit_continue_stmt(self, o: ContinueStmt, /) -> None: + pass + + def visit_pass_stmt(self, o: PassStmt, /) -> None: + pass + + def visit_raise_stmt(self, o: RaiseStmt, /) -> None: + pass + + def visit_try_stmt(self, o: TryStmt, /) -> None: + o.body.accept(self) + if o.else_body is not None: + o.else_body.accept(self) + if o.finally_body is not None: + o.finally_body.accept(self) + + def visit_match_stmt(self, o: MatchStmt, /) -> None: + for b in o.bodies: + b.accept(self) + + def visit_type_alias_stmt(self, o: TypeAliasStmt, /) -> None: + pass diff --git a/mypy/types.py b/mypy/types.py index 025812e25f0a..c75397224f2b 100644 --- a/mypy/types.py +++ b/mypy/types.py @@ -2241,7 +2241,7 @@ def copy_modified( ret_type: Bogus[Type] = _dummy, fallback: Bogus[Instance] = _dummy, name: Bogus[str | None] = _dummy, - definition: Bogus[SymbolNode] = _dummy, + definition: Bogus[SymbolNode | None] = _dummy, variables: Bogus[Sequence[TypeVarLikeType]] = _dummy, line: int = _dummy_int, column: int = _dummy_int, diff --git a/test-data/unit/check-classes.test b/test-data/unit/check-classes.test index 61e332f8566f..7fd96f9abf94 100644 --- a/test-data/unit/check-classes.test +++ b/test-data/unit/check-classes.test @@ -7683,7 +7683,7 @@ class Child(metaclass=M, thing=0): pass [builtins fixtures/object_with_init_subclass.pyi] -[case testTooManyArgsForObject_no_parallel] +[case testTooManyArgsForObject] class A(thing=5): pass [out] diff --git a/test-data/unit/check-ignore.test b/test-data/unit/check-ignore.test index 0c373c0e2788..d0f6bb6aeb60 100644 --- a/test-data/unit/check-ignore.test +++ b/test-data/unit/check-ignore.test @@ -198,7 +198,7 @@ bar(Child()) [out] main:19: error: Argument 1 to "bar" has incompatible type "Child"; expected "Base[str, str]" -[case testTypeIgnoreLineNumberWithinFile_no_parallel] +[case testTypeIgnoreLineNumberWithinFile] import m pass # type: ignore m.f(kw=1) diff --git a/test-data/unit/check-incremental.test b/test-data/unit/check-incremental.test index d6020be9937d..1f640e6bc61b 100644 --- a/test-data/unit/check-incremental.test +++ b/test-data/unit/check-incremental.test @@ -1090,7 +1090,7 @@ import foo # type: ignore [builtins fixtures/module.pyi] [stale] -[case testIncrementalWithSilentImportsAndIgnore_no_parallel] +[case testIncrementalWithSilentImportsAndIgnore] # cmd: mypy -m main b # cmd2: mypy -m main c c.submodule # flags: --follow-imports=skip @@ -6919,7 +6919,7 @@ class TheClass: [out2] tmp/a.py:3: note: Revealed type is "def (value: builtins.object) -> lib.TheClass.pyenum@6" - +-- Note: do not use _no_parallel unless really needed! [case testIncrementalFunctoolsPartial_no_parallel] import a @@ -7700,3 +7700,52 @@ tmp/b.py:1: error: Cannot resolve name "no_such_export" (possible cyclic definit [out2] tmp/b.py:1: error: Cannot resolve name "no_such_export" (possible cyclic definition) tmp/m.py:2: note: Revealed type is "Any" + +[case testCachedUnexpectedKeywordArgument] +import m +[file m.py] +import a +[file a.py] +import b +b.lol(uhhhh=12) # tweak +[file a.py.2] +import b +b.lol(uhhhh=12) +[file b.py] +def lol() -> None: pass +[file b.py.3] +# space +def lol() -> None: pass +[file m.py.4] +import a # touch +[out] +tmp/a.py:2: error: Unexpected keyword argument "uhhhh" for "lol" +tmp/b.py:1: note: "lol" defined here +[out2] +tmp/a.py:2: error: Unexpected keyword argument "uhhhh" for "lol" +tmp/b.py:1: note: "lol" defined here +[out3] +tmp/a.py:2: error: Unexpected keyword argument "uhhhh" for "lol" +tmp/b.py:2: note: "lol" defined here +[out4] +tmp/a.py:2: error: Unexpected keyword argument "uhhhh" for "lol" +tmp/b.py:2: note: "lol" defined here + +[case testCachedUnexpectedKeywordArgumentNested] +import a +[file a.py] +import b +b.lol(uhhhh=12) # tweak +[file a.py.2] +import b +b.lol(uhhhh=12) +[file b.py] +while True: + if True: + def lol() -> None: pass +[out] +tmp/a.py:2: error: Unexpected keyword argument "uhhhh" for "lol" +tmp/b.py:3: note: "lol" defined here +[out2] +tmp/a.py:2: error: Unexpected keyword argument "uhhhh" for "lol" +tmp/b.py:3: note: "lol" defined here diff --git a/test-data/unit/check-kwargs.test b/test-data/unit/check-kwargs.test index 4099716bcf6b..5ef3a2e45aec 100644 --- a/test-data/unit/check-kwargs.test +++ b/test-data/unit/check-kwargs.test @@ -467,7 +467,7 @@ class A: pass A.B(x=1) # E: Unexpected keyword argument "x" for "B" -[case testUnexpectedMethodKwargFromOtherModule_no_parallel] +[case testUnexpectedMethodKwargFromOtherModule] import m m.A(x=1) [file m.py] diff --git a/test-data/unit/check-namedtuple.test b/test-data/unit/check-namedtuple.test index a24a3964692f..c67c7b62b69b 100644 --- a/test-data/unit/check-namedtuple.test +++ b/test-data/unit/check-namedtuple.test @@ -115,7 +115,7 @@ a, b = x a, b, c = x # E: Need more than 2 values to unpack (3 expected) [builtins fixtures/tuple.pyi] -[case testNamedTupleAdditionalArgs_no_parallel] +[case testNamedTupleAdditionalArgs] from collections import namedtuple A = namedtuple('A', 'a b') diff --git a/test-data/unit/check-serialize.test b/test-data/unit/check-serialize.test index 9932d5e134e7..592f77268b60 100644 --- a/test-data/unit/check-serialize.test +++ b/test-data/unit/check-serialize.test @@ -158,7 +158,7 @@ def f(__x: int) -> None: pass [out2] tmp/a.py:3: error: Argument 1 to "f" has incompatible type "str"; expected "int" tmp/a.py:4: error: Unexpected keyword argument "__x" for "f" -tmp/b.py: note: "f" defined here +tmp/b.py:1: note: "f" defined here [case testSerializeArgumentKindsErrors] import a @@ -224,7 +224,7 @@ def f(x: int) -> int: pass [out2] tmp/a.py:2: note: Revealed type is "builtins.str" tmp/a.py:3: error: Unexpected keyword argument "x" for "f" -tmp/b.py: note: "f" defined here +tmp/b.py:3: note: "f" defined here [case testSerializeTypeGuardFunction] import a diff --git a/test-data/unit/fine-grained.test b/test-data/unit/fine-grained.test index faa46374023a..41fb0ed18ba8 100644 --- a/test-data/unit/fine-grained.test +++ b/test-data/unit/fine-grained.test @@ -11518,3 +11518,17 @@ class Sub(Base[Concatenate[str, P]]): ... [out] == impl.py:7: error: Argument 1 to "meth" of "Base" has incompatible type "int"; expected "str" + +[case testFineGrainedUnexpectedKeywordArgument] +import a +[file a.py] +import b +[file a.py.2] +import b +b.lol(uhhhh=12) +[file b.py] +def lol() -> None: pass +[out] +== +a.py:2: error: Unexpected keyword argument "uhhhh" for "lol" +b.py:1: note: "lol" defined here