Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
116 changes: 109 additions & 7 deletions mypy/build.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,12 +83,29 @@
WORKER_START_TIMEOUT,
)
from mypy.error_formatter import OUTPUT_CHOICES, ErrorFormatter
from mypy.errors import CompileError, ErrorInfo, Errors, ErrorTuple, report_internal_error
from mypy.errors import (
CompileError,
ErrorInfo,
Errors,
ErrorTuple,
ErrorTupleRaw,
report_internal_error,
)
from mypy.graph_utils import prepare_sccs, strongly_connected_components, topsort
from mypy.indirection import TypeIndirectionVisitor
from mypy.ipc import BadStatus, IPCClient, IPCMessage, read_status, ready_to_read, receive, send
from mypy.messages import MessageBuilder
from mypy.nodes import Import, ImportAll, ImportBase, ImportFrom, MypyFile, SymbolTable
from mypy.nodes import (
ClassDef,
Context,
Import,
ImportAll,
ImportBase,
ImportFrom,
MypyFile,
SymbolNode,
SymbolTable,
)
from mypy.partially_defined import PossiblyUndefinedVariableVisitor
from mypy.semanal import SemanticAnalyzer
from mypy.semanal_pass1 import SemanticAnalyzerPreAnalysis
Expand Down Expand Up @@ -866,6 +883,10 @@ def __init__(
self.queue_order: int = 0
# Is this an instance used by a parallel worker?
self.parallel_worker = parallel_worker
# Cache for ASTs created during error message generation. Note these are
# raw parsed trees not analyzed with mypy. We use these to find absolute
# location of a symbol used as a location for an error message.
self.extra_trees: dict[str, MypyFile] = {}

def dump_stats(self) -> None:
if self.options.dump_build_stats:
Expand Down Expand Up @@ -1028,6 +1049,60 @@ def report_file(
if self.reports is not None and self.source_set.is_source(file):
self.reports.file(file, self.modules, type_map, options)

def resolve_location(self, graph: dict[str, State], fullname: str) -> Context | None:
"""Resolve an absolute location of a symbol with given fullname."""
rest = []
head = fullname
while True:
# TODO: this mimics the logic in lookup.py but it is actually wrong.
# This is because we don't distinguish between submodule and a local symbol
# with the same name.
head, tail = head.rsplit(".", maxsplit=1)
rest.append(tail)
if head in graph:
state = graph[head]
break
if "." not in head:
return None
*prefix, name = reversed(rest)
# If this happens something is wrong, but it is better to give slightly
# less helpful error message than crash.
if state.path is None:
return None
if state.tree is not None and state.tree.defs:
# We usually free ASTs after processing, but reuse an existing AST if
# it is still available.
tree = state.tree
elif state.id in self.extra_trees:
tree = self.extra_trees[state.id]
else:
if state.source is not None:
# Sources are usually discarded after processing as well, check
# if we still have one just in case.
source = state.source
else:
path = state.manager.maybe_swap_for_shadow_path(state.path)
source = decode_python_encoding(state.manager.fscache.read(path))
tree = parse(source, state.path, state.id, state.manager.errors, state.options)
self.extra_trees[state.id] = tree
defs = tree.defs
while prefix:
part = prefix.pop(0)
for defn in defs:
if not isinstance(defn, ClassDef):
continue
if defn.name == part:
defs = defn.defs.body
break
else:
return None
for defn in defs:
# TODO: support more kinds of locations (like assignment statements).
# the latter will be helpful for type old-style aliases.
if isinstance(defn, SymbolNode) and defn.name == name:
return defn
return None

def verbosity(self) -> int:
return self.options.verbosity

Expand Down Expand Up @@ -2359,7 +2434,7 @@ def load_tree(self, temporary: bool = False) -> None:

def fix_cross_refs(self) -> None:
assert self.tree is not None, "Internal error: method must be called on parsed file only"
# We need to set allow_missing when doing a fine grained cache
# We need to set allow_missing when doing a fine-grained cache
# load because we need to gracefully handle missing modules.
fixup_module(self.tree, self.manager.modules, self.options.use_fine_grained_cache)

Expand Down Expand Up @@ -3558,7 +3633,9 @@ def find_stale_sccs(
path = manager.errors.simplify_path(graph[id].xpath)
formatted = manager.errors.format_messages(
path,
deserialize_codes(graph[id].error_lines),
transform_error_tuples(
manager, graph, deserialize_codes(graph[id].error_lines)
),
formatter=manager.error_formatter,
)
manager.flush_errors(path, formatted, False)
Expand Down Expand Up @@ -3813,7 +3890,9 @@ def process_stale_scc(
if graph[id].xpath not in manager.errors.ignored_files:
errors = manager.errors.file_messages(graph[id].xpath)
formatted = manager.errors.format_messages(
graph[id].xpath, errors, formatter=manager.error_formatter
graph[id].xpath,
transform_error_tuples(manager, graph, errors),
formatter=manager.error_formatter,
)
manager.flush_errors(manager.errors.simplify_path(graph[id].xpath), formatted, False)
errors_by_id[id] = errors
Expand Down Expand Up @@ -3972,14 +4051,37 @@ def write_undocumented_ref_info(
metastore.write(ref_info_file, json_dumps(deps_json))


def serialize_codes(errs: list[ErrorTuple]) -> list[SerializedError]:
def transform_error_tuples(
manager: BuildManager, graph: dict[str, State], error_tuples_rel: list[ErrorTupleRaw]
) -> list[ErrorTuple]:
"""Transform raw error tuples by resolving relative error locations."""
error_tuples = []
for e in error_tuples_rel:
file, line_rel, column, end_line, end_column, severity, message, code = e
if isinstance(line_rel, int):
line = line_rel
else:
assert file is not None
loc = manager.resolve_location(graph, line_rel)
if loc is not None:
line = loc.line
column = loc.column
end_line = loc.end_line or -1
end_column = loc.end_column or -1
else:
line = -1
error_tuples.append((file, line, column, end_line, end_column, severity, message, code))
return error_tuples


def serialize_codes(errs: list[ErrorTupleRaw]) -> list[SerializedError]:
return [
(path, line, column, end_line, end_column, severity, message, code.code if code else None)
for path, line, column, end_line, end_column, severity, message, code in errs
]


def deserialize_codes(errs: list[SerializedError]) -> list[ErrorTuple]:
def deserialize_codes(errs: list[SerializedError]) -> list[ErrorTupleRaw]:
return [
(
path,
Expand Down
18 changes: 14 additions & 4 deletions mypy/cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@
# High-level cache layout format
CACHE_VERSION: Final = 1

SerializedError: _TypeAlias = tuple[str | None, int, int, int, int, str, str, str | None]
SerializedError: _TypeAlias = tuple[str | None, int | str, int, int, int, str, str, str | None]


class CacheMeta:
Expand Down Expand Up @@ -479,7 +479,10 @@ def write_errors(data: WriteBuffer, errs: list[SerializedError]) -> None:
for path, line, column, end_line, end_column, severity, message, code in errs:
write_tag(data, TUPLE_GEN)
write_str_opt(data, path)
write_int(data, line)
if isinstance(line, str):
write_str(data, line)
else:
write_int(data, line)
write_int(data, column)
write_int(data, end_line)
write_int(data, end_column)
Expand All @@ -493,10 +496,17 @@ def read_errors(data: ReadBuffer) -> list[SerializedError]:
result = []
for _ in range(read_int_bare(data)):
assert read_tag(data) == TUPLE_GEN
path = read_str_opt(data)
tag = read_tag(data)
if tag == LITERAL_STR:
line: str | int = read_str_bare(data)
else:
assert tag == LITERAL_INT
line = read_int_bare(data)
result.append(
(
read_str_opt(data),
read_int(data),
path,
line,
read_int(data),
read_int(data),
read_int(data),
Expand Down
63 changes: 55 additions & 8 deletions mypy/errors.py
Original file line number Diff line number Diff line change
Expand Up @@ -130,6 +130,7 @@ def __init__(
target: str | None = None,
priority: int = 0,
parent_error: ErrorInfo | None = None,
location_ref: str | None = None,
) -> None:
self.import_ctx = import_ctx
self.file = file
Expand All @@ -151,12 +152,18 @@ def __init__(
if parent_error is not None:
assert severity == "note", "Only notes can specify parent errors"
self.parent_error = parent_error
self.location_ref = location_ref


# Type used internally to represent errors:
# (path, line, column, end_line, end_column, severity, message, code)
ErrorTuple: _TypeAlias = tuple[str | None, int, int, int, int, str, str, ErrorCode | None]

# A raw version of the above that can refer to either absolute or relative location.
# If the location is relative, the first item (line) is a string with a symbol fullname,
# and three other values (column, end_line, end_column) are set to -1.
ErrorTupleRaw: _TypeAlias = tuple[str | None, int | str, int, int, int, str, str, ErrorCode | None]


class ErrorWatcher:
"""Context manager that can be used to keep track of new errors recorded
Expand Down Expand Up @@ -569,6 +576,7 @@ def report(
end_line: int | None = None,
end_column: int | None = None,
parent_error: ErrorInfo | None = None,
location_ref: str | None = None,
) -> ErrorInfo:
"""Report message at the given line using the current error context.

Expand Down Expand Up @@ -635,6 +643,7 @@ def report(
origin=(self.file, origin_span),
target=self.current_target(),
parent_error=parent_error,
location_ref=location_ref,
)
self.add_error_info(info)
return info
Expand Down Expand Up @@ -1014,6 +1023,8 @@ def raise_error(self, use_stdout: bool = True) -> NoReturn:
"""
# self.new_messages() will format all messages that haven't already
# been returned from a file_messages() call.
# TODO: pass resolve_location callback here.
# This will be needed if we are going to use relative locations in blocker errors.
raise CompileError(
self.new_messages(), use_stdout=use_stdout, module_with_blocker=self.blocker_module()
)
Expand Down Expand Up @@ -1076,7 +1087,7 @@ def format_messages_default(
a.append(" " * (DEFAULT_SOURCE_OFFSET + column) + marker)
return a

def file_messages(self, path: str) -> list[ErrorTuple]:
def file_messages(self, path: str) -> list[ErrorTupleRaw]:
"""Return an error tuple list of new error messages from a given file."""
if path not in self.error_info_map:
return []
Expand Down Expand Up @@ -1119,7 +1130,9 @@ def find_shadow_file_mapping(self, path: str) -> str | None:
return i[1]
return None

def new_messages(self) -> list[str]:
def new_messages(
self, resolve_location: Callable[[str], Context | None] | None = None
) -> list[str]:
"""Return a string list of new error messages.

Use a form suitable for displaying to the user.
Expand All @@ -1129,7 +1142,29 @@ def new_messages(self) -> list[str]:
msgs = []
for path in self.error_info_map.keys():
if path not in self.flushed_files:
error_tuples = self.file_messages(path)
error_tuples_rel = self.file_messages(path)
error_tuples = []
for e in error_tuples_rel:
# This has a bit of code duplication with build.py, but it is hard
# to avoid without either an import cycle or a performance penalty.
file, line_rel, column, end_line, end_column, severity, message, code = e
if isinstance(line_rel, int):
line = line_rel
elif resolve_location is not None:
assert file is not None
loc = resolve_location(line_rel)
if loc is not None:
line = loc.line
column = loc.column
end_line = loc.end_line or -1
end_column = loc.end_column or -1
else:
line = -1
else:
line = -1
error_tuples.append(
(file, line, column, end_line, end_column, severity, message, code)
)
msgs.extend(self.format_messages(path, error_tuples))
return msgs

Expand All @@ -1141,15 +1176,15 @@ def targets(self) -> set[str]:
info.target for errs in self.error_info_map.values() for info in errs if info.target
}

def render_messages(self, errors: list[ErrorInfo]) -> list[ErrorTuple]:
def render_messages(self, errors: list[ErrorInfo]) -> list[ErrorTupleRaw]:
"""Translate the messages into a sequence of tuples.

Each tuple is of form (path, line, col, severity, message, code).
The rendered sequence includes information about error contexts.
The path item may be None. If the line item is negative, the
line number is not defined for the tuple.
"""
result: list[ErrorTuple] = []
result: list[ErrorTupleRaw] = []
prev_import_context: list[tuple[str, int]] = []
prev_function_or_member: str | None = None
prev_type: str | None = None
Expand Down Expand Up @@ -1224,9 +1259,21 @@ def render_messages(self, errors: list[ErrorInfo]) -> list[ErrorTuple]:
else:
result.append((file, -1, -1, -1, -1, "note", f'In class "{e.type}":', None))

result.append(
(file, e.line, e.column, e.end_line, e.end_column, e.severity, e.message, e.code)
)
if e.location_ref is not None:
result.append((file, e.location_ref, -1, -1, -1, e.severity, e.message, e.code))
else:
result.append(
(
file,
e.line,
e.column,
e.end_line,
e.end_column,
e.severity,
e.message,
e.code,
)
)

prev_import_context = e.import_ctx
prev_function_or_member = e.function_or_member
Expand Down
3 changes: 3 additions & 0 deletions mypy/lookup.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,9 @@ def lookup_fully_qualified(
if raise_on_missing:
assert "." in head, f"Cannot find module for {name}"
return None
# TODO: this logic is not correct as it confuses a submodule and a local symbol.
# A potential solution may be to use format like pkg.mod:Cls.method for fullname,
# but this is a relatively big change.
head, tail = head.rsplit(".", maxsplit=1)
rest.append(tail)
mod = modules.get(head)
Expand Down
6 changes: 6 additions & 0 deletions mypy/messages.py
Original file line number Diff line number Diff line change
Expand Up @@ -266,6 +266,11 @@ def span_from_context(ctx: Context) -> Iterable[int]:
assert origin_span is not None
origin_span = itertools.chain(origin_span, span_from_context(secondary_context))

location_ref = None
if file is not None and file != self.errors.file:
assert isinstance(context, SymbolNode), "Only symbols can be locations in other files"
location_ref = context.fullname

return self.errors.report(
context.line if context else -1,
context.column if context else -1,
Expand All @@ -278,6 +283,7 @@ def span_from_context(ctx: Context) -> Iterable[int]:
end_column=context.end_column if context else -1,
code=code,
parent_error=parent_error,
location_ref=location_ref,
)

def fail(
Expand Down
Loading