Skip to content

Commit

Permalink
Fix various lint (#16726)
Browse files Browse the repository at this point in the history
  • Loading branch information
hauntsaninja authored Jan 2, 2024
1 parent 4aba5ca commit d0d5876
Show file tree
Hide file tree
Showing 41 changed files with 102 additions and 93 deletions.
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ repos:
hooks:
- id: black
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.1.0 # must match test-requirements.txt
rev: v0.1.4 # must match test-requirements.txt
hooks:
- id: ruff
args: [--exit-non-zero-on-fix]
Expand Down
2 changes: 1 addition & 1 deletion mypy/binder.py
Original file line number Diff line number Diff line change
Expand Up @@ -291,7 +291,7 @@ def assign_type(
self.type_assignments[expr].append((type, declared_type))
return
if not isinstance(expr, (IndexExpr, MemberExpr, NameExpr)):
return None
return
if not literal(expr):
return
self.invalidate_dependencies(expr)
Expand Down
10 changes: 4 additions & 6 deletions mypy/build.py
Original file line number Diff line number Diff line change
Expand Up @@ -682,9 +682,7 @@ def __init__(
# for efficient lookup
self.shadow_map: dict[str, str] = {}
if self.options.shadow_file is not None:
self.shadow_map = {
source_file: shadow_file for (source_file, shadow_file) in self.options.shadow_file
}
self.shadow_map = dict(self.options.shadow_file)
# a mapping from each file being typechecked to its possible shadow file
self.shadow_equivalence_map: dict[str, str | None] = {}
self.plugin = plugin
Expand Down Expand Up @@ -1120,7 +1118,7 @@ def read_deps_cache(manager: BuildManager, graph: Graph) -> dict[str, FgDepMeta]
module_deps_metas = deps_meta["deps_meta"]
assert isinstance(module_deps_metas, dict)
if not manager.options.skip_cache_mtime_checks:
for id, meta in module_deps_metas.items():
for meta in module_deps_metas.values():
try:
matched = manager.getmtime(meta["path"]) == meta["mtime"]
except FileNotFoundError:
Expand Down Expand Up @@ -2093,7 +2091,7 @@ def load_tree(self, temporary: bool = False) -> None:
self.meta.data_json, self.manager, "Load tree ", "Could not load tree: "
)
if data is None:
return None
return

t0 = time.time()
# TODO: Assert data file wasn't changed.
Expand Down Expand Up @@ -3383,7 +3381,7 @@ def order_ascc(graph: Graph, ascc: AbstractSet[str], pri_max: int = PRI_ALL) ->
strongly_connected_components() below for a reference.
"""
if len(ascc) == 1:
return [s for s in ascc]
return list(ascc)
pri_spread = set()
for id in ascc:
state = graph[id]
Expand Down
7 changes: 3 additions & 4 deletions mypy/checker.py
Original file line number Diff line number Diff line change
Expand Up @@ -632,7 +632,7 @@ def _visit_overloaded_func_def(self, defn: OverloadedFuncDef) -> None:
if not defn.items:
# In this case we have already complained about none of these being
# valid overloads.
return None
return
if len(defn.items) == 1:
self.fail(message_registry.MULTIPLE_OVERLOADS_REQUIRED, defn)

Expand Down Expand Up @@ -676,7 +676,6 @@ def _visit_overloaded_func_def(self, defn: OverloadedFuncDef) -> None:
self.msg.no_overridable_method(defn.name, defn)
self.check_explicit_override_decorator(defn, found_method_base_classes, defn.impl)
self.check_inplace_operator_method(defn)
return None

def extract_callable_type(self, inner_type: Type | None, ctx: Context) -> CallableType | None:
"""Get type as seen by an overload item caller."""
Expand Down Expand Up @@ -1838,7 +1837,7 @@ def check_match_args(self, var: Var, typ: Type, context: Context) -> None:
return
typ = get_proper_type(typ)
if not isinstance(typ, TupleType) or not all(
[is_string_literal(item) for item in typ.items]
is_string_literal(item) for item in typ.items
):
self.msg.note(
"__match_args__ must be a tuple containing string literals for checking "
Expand Down Expand Up @@ -5045,7 +5044,7 @@ def visit_break_stmt(self, s: BreakStmt) -> None:

def visit_continue_stmt(self, s: ContinueStmt) -> None:
self.binder.handle_continue()
return None
return

def visit_match_stmt(self, s: MatchStmt) -> None:
with self.binder.frame_context(can_skip=False, fall_through=0):
Expand Down
6 changes: 2 additions & 4 deletions mypy/checkexpr.py
Original file line number Diff line number Diff line change
Expand Up @@ -739,7 +739,7 @@ def check_typeddict_call(
context: Context,
orig_callee: Type | None,
) -> Type:
if args and all([ak in (ARG_NAMED, ARG_STAR2) for ak in arg_kinds]):
if args and all(ak in (ARG_NAMED, ARG_STAR2) for ak in arg_kinds):
# ex: Point(x=42, y=1337, **extras)
# This is a bit ugly, but this is a price for supporting all possible syntax
# variants for TypedDict constructors.
Expand Down Expand Up @@ -4017,9 +4017,7 @@ def check_op(
left_variants = [base_type]
base_type = get_proper_type(base_type)
if isinstance(base_type, UnionType):
left_variants = [
item for item in flatten_nested_unions(base_type.relevant_items())
]
left_variants = list(flatten_nested_unions(base_type.relevant_items()))
right_type = self.accept(arg)

# Step 1: We first try leaving the right arguments alone and destructure
Expand Down
2 changes: 1 addition & 1 deletion mypy/checkpattern.py
Original file line number Diff line number Diff line change
Expand Up @@ -187,7 +187,7 @@ def visit_or_pattern(self, o: OrPattern) -> PatternType:
capture_types[node].append((expr, typ))

captures: dict[Expression, Type] = {}
for var, capture_list in capture_types.items():
for capture_list in capture_types.values():
typ = UninhabitedType()
for _, other in capture_list:
typ = join_types(typ, other)
Expand Down
4 changes: 2 additions & 2 deletions mypy/checkstrformat.py
Original file line number Diff line number Diff line change
Expand Up @@ -372,7 +372,7 @@ def check_specs_in_format_call(
):
# TODO: add support for some custom specs like datetime?
self.msg.fail(
"Unrecognized format" ' specification "{}"'.format(spec.format_spec[1:]),
f'Unrecognized format specification "{spec.format_spec[1:]}"',
call,
code=codes.STRING_FORMATTING,
)
Expand Down Expand Up @@ -482,7 +482,7 @@ def find_replacements_in_call(self, call: CallExpr, keys: list[str]) -> list[Exp
expr = self.get_expr_by_name(key, call)
if not expr:
self.msg.fail(
"Cannot find replacement for named" ' format specifier "{}"'.format(key),
f'Cannot find replacement for named format specifier "{key}"',
call,
code=codes.STRING_FORMATTING,
)
Expand Down
2 changes: 1 addition & 1 deletion mypy/dmypy_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -1055,7 +1055,7 @@ def fix_module_deps(graph: mypy.build.Graph) -> None:
This can make some suppressed dependencies non-suppressed, and vice versa (if modules
have been added to or removed from the build).
"""
for module, state in graph.items():
for state in graph.values():
new_suppressed = []
new_dependencies = []
for dep in state.dependencies + state.suppressed:
Expand Down
2 changes: 1 addition & 1 deletion mypy/dmypy_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ def send(connection: IPCBase, data: Any) -> None:
class WriteToConn:
"""Helper class to write to a connection instead of standard output."""

def __init__(self, server: IPCBase, output_key: str = "stdout"):
def __init__(self, server: IPCBase, output_key: str = "stdout") -> None:
self.server = server
self.output_key = output_key

Expand Down
2 changes: 1 addition & 1 deletion mypy/errors.py
Original file line number Diff line number Diff line change
Expand Up @@ -170,7 +170,7 @@ def __init__(
*,
filter_errors: bool | Callable[[str, ErrorInfo], bool] = False,
save_filtered_errors: bool = False,
):
) -> None:
self.errors = errors
self._has_new_errors = False
self._filter = filter_errors
Expand Down
6 changes: 3 additions & 3 deletions mypy/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -145,7 +145,7 @@ def main(
sys.exit(code)

# HACK: keep res alive so that mypyc won't free it before the hard_exit
list([res])
list([res]) # noqa: C410


def run_build(
Expand Down Expand Up @@ -349,7 +349,7 @@ class CapturableArgumentParser(argparse.ArgumentParser):
yet output must be captured to properly support mypy.api.run.
"""

def __init__(self, *args: Any, **kwargs: Any):
def __init__(self, *args: Any, **kwargs: Any) -> None:
self.stdout = kwargs.pop("stdout", sys.stdout)
self.stderr = kwargs.pop("stderr", sys.stderr)
super().__init__(*args, **kwargs)
Expand Down Expand Up @@ -415,7 +415,7 @@ def __init__(
default: str = argparse.SUPPRESS,
help: str = "show program's version number and exit",
stdout: IO[str] | None = None,
):
) -> None:
super().__init__(
option_strings=option_strings, dest=dest, default=default, nargs=0, help=help
)
Expand Down
2 changes: 1 addition & 1 deletion mypy/message_registry.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ def with_additional_msg(self, info: str) -> ErrorMessage:
'"return" with value in async generator is not allowed'
)
INVALID_RETURN_TYPE_FOR_GENERATOR: Final = ErrorMessage(
'The return type of a generator function should be "Generator"' " or one of its supertypes"
'The return type of a generator function should be "Generator" or one of its supertypes'
)
INVALID_RETURN_TYPE_FOR_ASYNC_GENERATOR: Final = ErrorMessage(
'The return type of an async generator function should be "AsyncGenerator" or one of its '
Expand Down
2 changes: 1 addition & 1 deletion mypy/messages.py
Original file line number Diff line number Diff line change
Expand Up @@ -3099,7 +3099,7 @@ def append_invariance_notes(
):
invariant_type = "Dict"
covariant_suggestion = (
'Consider using "Mapping" instead, ' "which is covariant in the value type"
'Consider using "Mapping" instead, which is covariant in the value type'
)
if invariant_type and covariant_suggestion:
notes.append(
Expand Down
6 changes: 3 additions & 3 deletions mypy/nodes.py
Original file line number Diff line number Diff line change
Expand Up @@ -3137,7 +3137,7 @@ def protocol_members(self) -> list[str]:
if name in EXCLUDED_PROTOCOL_ATTRIBUTES:
continue
members.add(name)
return sorted(list(members))
return sorted(members)

def __getitem__(self, name: str) -> SymbolTableNode:
n = self.get(name)
Expand Down Expand Up @@ -3296,7 +3296,7 @@ def serialize(self) -> JsonDict:
else self.typeddict_type.serialize(),
"flags": get_flags(self, TypeInfo.FLAGS),
"metadata": self.metadata,
"slots": list(sorted(self.slots)) if self.slots is not None else None,
"slots": sorted(self.slots) if self.slots is not None else None,
"deletable_attributes": self.deletable_attributes,
"self_type": self.self_type.serialize() if self.self_type is not None else None,
"dataclass_transform_spec": (
Expand Down Expand Up @@ -3966,7 +3966,7 @@ def __init__(
# frozen_default was added to CPythonin https://github.com/python/cpython/pull/99958 citing
# positive discussion in typing-sig
frozen_default: bool | None = None,
):
) -> None:
self.eq_default = eq_default if eq_default is not None else True
self.order_default = order_default if order_default is not None else False
self.kw_only_default = kw_only_default if kw_only_default is not None else False
Expand Down
14 changes: 8 additions & 6 deletions mypy/patterns.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ class ValuePattern(Pattern):

expr: Expression

def __init__(self, expr: Expression):
def __init__(self, expr: Expression) -> None:
super().__init__()
self.expr = expr

Expand All @@ -72,7 +72,7 @@ class SingletonPattern(Pattern):
# This can be exactly True, False or None
value: bool | None

def __init__(self, value: bool | None):
def __init__(self, value: bool | None) -> None:
super().__init__()
self.value = value

Expand All @@ -85,7 +85,7 @@ class SequencePattern(Pattern):

patterns: list[Pattern]

def __init__(self, patterns: list[Pattern]):
def __init__(self, patterns: list[Pattern]) -> None:
super().__init__()
self.patterns = patterns

Expand All @@ -98,7 +98,7 @@ class StarredPattern(Pattern):
# a name.
capture: NameExpr | None

def __init__(self, capture: NameExpr | None):
def __init__(self, capture: NameExpr | None) -> None:
super().__init__()
self.capture = capture

Expand All @@ -111,7 +111,9 @@ class MappingPattern(Pattern):
values: list[Pattern]
rest: NameExpr | None

def __init__(self, keys: list[Expression], values: list[Pattern], rest: NameExpr | None):
def __init__(
self, keys: list[Expression], values: list[Pattern], rest: NameExpr | None
) -> None:
super().__init__()
assert len(keys) == len(values)
self.keys = keys
Expand All @@ -136,7 +138,7 @@ def __init__(
positionals: list[Pattern],
keyword_keys: list[str],
keyword_values: list[Pattern],
):
) -> None:
super().__init__()
assert len(keyword_keys) == len(keyword_values)
self.class_ref = class_ref
Expand Down
4 changes: 2 additions & 2 deletions mypy/plugins/enums.py
Original file line number Diff line number Diff line change
Expand Up @@ -166,11 +166,11 @@ class SomeEnum:
for n in stnodes
if n is None or not n.implicit
)
proper_types = list(
proper_types = [
_infer_value_type_with_auto_fallback(ctx, t)
for t in node_types
if t is None or not isinstance(t, CallableType)
)
]
underlying_type = _first(proper_types)
if underlying_type is None:
return ctx.default_attr_type
Expand Down
2 changes: 1 addition & 1 deletion mypy/renaming.py
Original file line number Diff line number Diff line change
Expand Up @@ -270,7 +270,7 @@ def flush_refs(self) -> None:
This will be called at the end of a scope.
"""
is_func = self.scope_kinds[-1] == FUNCTION
for name, refs in self.refs[-1].items():
for refs in self.refs[-1].values():
if len(refs) == 1:
# Only one definition -- no renaming needed.
continue
Expand Down
10 changes: 4 additions & 6 deletions mypy/semanal.py
Original file line number Diff line number Diff line change
Expand Up @@ -4153,7 +4153,7 @@ def check_typevarlike_name(self, call: CallExpr, name: str, context: Context) ->
if len(call.args) < 1:
self.fail(f"Too few arguments for {typevarlike_type}()", context)
return False
if not isinstance(call.args[0], StrExpr) or not call.arg_kinds[0] == ARG_POS:
if not isinstance(call.args[0], StrExpr) or call.arg_kinds[0] != ARG_POS:
self.fail(f"{typevarlike_type}() expects a string literal as first argument", context)
return False
elif call.args[0].value != name:
Expand Down Expand Up @@ -4961,9 +4961,7 @@ def visit_name_expr(self, expr: NameExpr) -> None:
def bind_name_expr(self, expr: NameExpr, sym: SymbolTableNode) -> None:
"""Bind name expression to a symbol table node."""
if isinstance(sym.node, TypeVarExpr) and self.tvar_scope.get_binding(sym):
self.fail(
'"{}" is a type variable and only valid in type ' "context".format(expr.name), expr
)
self.fail(f'"{expr.name}" is a type variable and only valid in type context', expr)
elif isinstance(sym.node, PlaceholderNode):
self.process_placeholder(expr.name, "name", expr)
else:
Expand Down Expand Up @@ -6809,13 +6807,13 @@ def parse_dataclass_transform_spec(self, call: CallExpr) -> DataclassTransformSp
def parse_dataclass_transform_field_specifiers(self, arg: Expression) -> tuple[str, ...]:
if not isinstance(arg, TupleExpr):
self.fail('"field_specifiers" argument must be a tuple literal', arg)
return tuple()
return ()

names = []
for specifier in arg.items:
if not isinstance(specifier, RefExpr):
self.fail('"field_specifiers" must only contain identifiers', specifier)
return tuple()
return ()
names.append(specifier.fullname)
return tuple(names)

Expand Down
2 changes: 1 addition & 1 deletion mypy/semanal_enum.py
Original file line number Diff line number Diff line change
Expand Up @@ -148,7 +148,7 @@ def parse_enum_call_args(
Return a tuple of fields, values, was there an error.
"""
args = call.args
if not all([arg_kind in [ARG_POS, ARG_NAMED] for arg_kind in call.arg_kinds]):
if not all(arg_kind in [ARG_POS, ARG_NAMED] for arg_kind in call.arg_kinds):
return self.fail_enum_call_arg(f"Unexpected arguments to {class_name}()", call)
if len(args) < 2:
return self.fail_enum_call_arg(f"Too few arguments for {class_name}()", call)
Expand Down
2 changes: 1 addition & 1 deletion mypy/semanal_namedtuple.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@
)

NAMEDTUP_CLASS_ERROR: Final = (
"Invalid statement in NamedTuple definition; " 'expected "field_name: field_type [= default]"'
'Invalid statement in NamedTuple definition; expected "field_name: field_type [= default]"'
)

SELF_TVAR_NAME: Final = "_NT"
Expand Down
2 changes: 1 addition & 1 deletion mypy/semanal_typeddict.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@
)

TPDICT_CLASS_ERROR: Final = (
"Invalid statement in TypedDict definition; " 'expected "field_name: field_type"'
'Invalid statement in TypedDict definition; expected "field_name: field_type"'
)


Expand Down
4 changes: 2 additions & 2 deletions mypy/server/astmerge.py
Original file line number Diff line number Diff line change
Expand Up @@ -394,7 +394,7 @@ def process_synthetic_type_info(self, info: TypeInfo) -> None:
# have bodies in the AST so we need to iterate over their symbol
# tables separately, unlike normal classes.
self.process_type_info(info)
for name, node in info.names.items():
for node in info.names.values():
if node.node:
node.node.accept(self)

Expand Down Expand Up @@ -549,7 +549,7 @@ def fixup(self, node: SN) -> SN:
def replace_nodes_in_symbol_table(
symbols: SymbolTable, replacements: dict[SymbolNode, SymbolNode]
) -> None:
for name, node in symbols.items():
for node in symbols.values():
if node.node:
if node.node in replacements:
new = replacements[node.node]
Expand Down
Loading

0 comments on commit d0d5876

Please sign in to comment.