Skip to content

Commit

Permalink
ruff: enable and fix G, RUF, UP rules
Browse files Browse the repository at this point in the history
  • Loading branch information
alexfikl authored and inducer committed Jul 12, 2024
1 parent e3b4986 commit 70dbe8e
Show file tree
Hide file tree
Showing 29 changed files with 224 additions and 172 deletions.
4 changes: 2 additions & 2 deletions examples/multiple-meshes.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
import sys

import numpy as np # noqa
import numpy as np


order = 4


def main():
from meshmode.mesh.generation import make_curve_mesh, starfish # noqa
from meshmode.mesh.generation import make_curve_mesh, starfish
mesh1 = make_curve_mesh(starfish, np.linspace(0, 1, 20), 4)

from meshmode.mesh.processing import affine_map, merge_disjoint_meshes
Expand Down
2 changes: 1 addition & 1 deletion examples/simple-dg.py
Original file line number Diff line number Diff line change
Expand Up @@ -402,7 +402,7 @@ def wave_operator(actx, discr, c, q):
u=c*discr.div(q.v),
v=c*discr.grad(q.u)
)
- # noqa: W504
-
discr.inverse_mass(
discr.face_mass(
wave_flux(actx, discr, c=c,
Expand Down
29 changes: 10 additions & 19 deletions meshmode/array_context.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@
THE SOFTWARE.
"""

import sys
from warnings import warn

from arraycontext import (
Expand Down Expand Up @@ -326,27 +325,19 @@ def actx_class(self):
)


if sys.version_info >= (3, 7):
def __getattr__(name):
if name not in _actx_names:
raise AttributeError(name)
def __getattr__(name):
if name not in _actx_names:
raise AttributeError(name)

import arraycontext
result = getattr(arraycontext, name)
import arraycontext
result = getattr(arraycontext, name)

warn(f"meshmode.array_context.{name} is deprecated. "
f"Use arraycontext.{name} instead. "
f"meshmode.array_context.{name} will continue to work until 2022.",
DeprecationWarning, stacklevel=2)
warn(f"meshmode.array_context.{name} is deprecated. "
f"Use arraycontext.{name} instead. "
f"meshmode.array_context.{name} will continue to work until 2022.",
DeprecationWarning, stacklevel=2)

return result
else:
def _import_names():
import arraycontext
for name in _actx_names:
globals()[name] = getattr(arraycontext, name)

_import_names()
return result

# }}}

Expand Down
39 changes: 19 additions & 20 deletions meshmode/discretization/connection/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,26 +63,25 @@


__all__ = [
"DiscretizationConnection",
"IdentityDiscretizationConnection",
"DirectDiscretizationConnection",
"ChainedDiscretizationConnection",
"L2ProjectionInverseDiscretizationConnection",
"NodalToModalDiscretizationConnection",
"ModalToNodalDiscretizationConnection",

"make_same_mesh_connection",
"FACE_RESTR_INTERIOR", "FACE_RESTR_ALL",
"make_face_restriction",
"make_face_to_all_faces_embedding",
"make_opposite_face_connection",
"make_partition_connection",
"make_refinement_connection",
"flatten_chained_connection",

"InterpolationBatch",
"DiscretizationConnectionElementGroup",
]
"FACE_RESTR_ALL",
"FACE_RESTR_INTERIOR",
"ChainedDiscretizationConnection",
"DirectDiscretizationConnection",
"DiscretizationConnection",
"DiscretizationConnectionElementGroup",
"IdentityDiscretizationConnection",
"InterpolationBatch",
"L2ProjectionInverseDiscretizationConnection",
"ModalToNodalDiscretizationConnection",
"NodalToModalDiscretizationConnection",
"flatten_chained_connection",
"make_face_restriction",
"make_face_to_all_faces_embedding",
"make_opposite_face_connection",
"make_partition_connection",
"make_refinement_connection",
"make_same_mesh_connection",
]

__doc__ = """
Base classes
Expand Down
4 changes: 2 additions & 2 deletions meshmode/discretization/connection/direct.py
Original file line number Diff line number Diff line change
Expand Up @@ -525,7 +525,7 @@ def _per_target_group_pick_info(
if (from_el_indices[to_el_ind] != -1).any():
from warnings import warn
warn("per-batch target elements not disjoint during "
"attempted merge")
"attempted merge", stacklevel=3)
return None

from_el_indices[to_el_ind] = \
Expand Down Expand Up @@ -927,7 +927,7 @@ def knl():
from_group_sizes = [
grp.nelements*grp.nunit_dofs
for grp in conn.from_discr.groups]
from_group_starts = np.cumsum([0] + from_group_sizes)
from_group_starts = np.cumsum([0, *from_group_sizes])

tgt_node_nr_base = 0
mats = []
Expand Down
4 changes: 2 additions & 2 deletions meshmode/discretization/connection/face.py
Original file line number Diff line number Diff line change
Expand Up @@ -323,11 +323,11 @@ def make_face_restriction(actx, discr, group_factory, boundary_tag,
# Find vertex_indices
glob_face_vertices = mgrp.vertex_indices[
batch_boundary_el_numbers_in_grp][:, face.volume_vertex_indices]
vertex_indices[new_el_numbers] = ( # pylint: disable=possibly-used-before-assignment # noqa: E501
vertex_indices[new_el_numbers] = ( # pylint: disable=possibly-used-before-assignment
vol_to_bdry_vertices[glob_face_vertices])

# Find nodes
nodes[:, new_el_numbers, :] = np.einsum( # pylint: disable=possibly-used-before-assignment # noqa: E501
nodes[:, new_el_numbers, :] = np.einsum( # pylint: disable=possibly-used-before-assignment
"ij,dej->dei",
resampling_mat,
mgrp.nodes[:, batch_boundary_el_numbers_in_grp, :])
Expand Down
4 changes: 3 additions & 1 deletion meshmode/discretization/connection/opposite_face.py
Original file line number Diff line number Diff line change
Expand Up @@ -198,10 +198,12 @@ def get_map_jacobian(unit_nodes):
# {{{ test map applier and jacobian

if 0:
rng = np.random.default_rng(seed=None)

u = src_unit_nodes
f = apply_map(u)
for h in [1e-1, 1e-2]:
du = h*np.random.randn(*u.shape)
du = h*rng.normal(size=u.shape)

f_2 = apply_map(u+du)

Expand Down
3 changes: 2 additions & 1 deletion meshmode/discretization/poly_element.py
Original file line number Diff line number Diff line change
Expand Up @@ -983,7 +983,8 @@ def default_simplex_group_factory(base_dim, order):
elif base_dim == 3:
return PolynomialWarpAndBlend3DRestrictingGroupFactory(order)
else:
raise ValueError(f"no usable set of nodes found for {base_dim}D")
raise ValueError(
f"no usable set of nodes found for {base_dim}D") from None

return PolynomialRecursiveNodesGroupFactory(order, family="lgl")

Expand Down
6 changes: 3 additions & 3 deletions meshmode/discretization/visualization.py
Original file line number Diff line number Diff line change
Expand Up @@ -588,7 +588,7 @@ def show_scalar_in_mayavi(self, field, **kwargs):
nodes.append(0*nodes[0])
assert len(nodes) == 3

args = tuple(nodes) + (field,)
args = (*nodes, field)

# https://docs.enthought.com/mayavi/mayavi/auto/example_plotting_many_lines.html # noqa: E501
src = mlab.pipeline.scalar_scatter(*args)
Expand All @@ -603,7 +603,7 @@ def show_scalar_in_mayavi(self, field, **kwargs):
while len(nodes) < 3:
nodes.append(0*nodes[0])

args = tuple(nodes) + (vis_connectivity.reshape(-1, 3),)
args = (*nodes, vis_connectivity.reshape(-1, 3))
kwargs["scalars"] = field

mlab.triangular_mesh(*args, **kwargs)
Expand Down Expand Up @@ -742,7 +742,7 @@ def write_vtk_file(self, file_name, names_and_fields,
- Added *par_manifest_filename* and *par_file_names*.
- Added *use_high_order*.
""" # noqa: E501
"""

if use_high_order is None:
use_high_order = False
Expand Down
52 changes: 28 additions & 24 deletions meshmode/dof_array.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@
from contextlib import contextmanager
from functools import partial, update_wrapper
from numbers import Number
from typing import Any, Callable, Iterable, Optional, Tuple
from typing import Any, Callable, Iterable
from warnings import warn

import numpy as np
Expand Down Expand Up @@ -138,7 +138,7 @@ class DOFArray:
the array context given to :func:`array_context_for_pickling`.
"""

def __init__(self, actx: Optional[ArrayContext], data: Tuple[Any]) -> None:
def __init__(self, actx: ArrayContext | None, data: tuple[Any, ...]) -> None:
if __debug__:
if not (actx is None or isinstance(actx, ArrayContext)):
raise TypeError("actx must be of type ArrayContext")
Expand All @@ -162,7 +162,7 @@ def entry_dtype(self) -> np.dtype:
return single_valued(subary.dtype for subary in self._data)

@classmethod
def from_list(cls, actx: Optional[ArrayContext], res_list) -> "DOFArray":
def from_list(cls, actx: ArrayContext | None, res_list) -> DOFArray:
r"""Create a :class:`DOFArray` from a list of arrays
(one per :class:`~meshmode.discretization.ElementGroupBase`).
Expand All @@ -178,10 +178,10 @@ def from_list(cls, actx: Optional[ArrayContext], res_list) -> "DOFArray":
return cls(actx, tuple(res_list))

def __str__(self) -> str:
return f"DOFArray({str(self._data)})"
return f"DOFArray({self._data})"

def __repr__(self) -> str:
return f"DOFArray({repr(self._data)})"
return f"DOFArray({self._data!r})"

# {{{ sequence protocol

Expand All @@ -207,7 +207,7 @@ def _like_me(self, data: Iterable[Array]) -> DOFArray:
return DOFArray(self.array_context, tuple(data))

@property
def shape(self) -> Tuple[int]:
def shape(self) -> tuple[int]:
return (len(self),)

@property
Expand Down Expand Up @@ -263,15 +263,15 @@ def _ibop(self, f, arg):

return self

def __iadd__(self, arg): return self._ibop(op.iadd, arg) # noqa: E704
def __isub__(self, arg): return self._ibop(op.isub, arg) # noqa: E704
def __imul__(self, arg): return self._ibop(op.imul, arg) # noqa: E704
def __itruediv__(self, arg): return self._ibop(op.itruediv, arg) # noqa: E704
def __imod__(self, arg): return self._ibop(op.imod, arg) # noqa: E704
def __iadd__(self, arg): return self._ibop(op.iadd, arg)
def __isub__(self, arg): return self._ibop(op.isub, arg)
def __imul__(self, arg): return self._ibop(op.imul, arg)
def __itruediv__(self, arg): return self._ibop(op.itruediv, arg)
def __imod__(self, arg): return self._ibop(op.imod, arg)

def __iand__(self, arg): return self._ibop(op.iand, arg) # noqa: E704
def __ixor__(self, arg): return self._ibop(op.ixor, arg) # noqa: E704
def __ior__(self, arg): return self._ibop(op.ior, arg) # noqa: E704
def __iand__(self, arg): return self._ibop(op.iand, arg)
def __ixor__(self, arg): return self._ibop(op.ixor, arg)
def __ior__(self, arg): return self._ibop(op.ior, arg)

# }}}

Expand Down Expand Up @@ -327,7 +327,8 @@ def __setstate__(self, state):
# For backwards compatibility
from warnings import warn
warn("A DOFArray is being unpickled without (tag) metadata. "
"Program transformation may fail as a result.")
"Program transformation may fail as a result.",
stacklevel=2)

data = state
tags = [frozenset() for _ in range(len(data))]
Expand Down Expand Up @@ -383,7 +384,7 @@ def _serialize_dof_container(ary: DOFArray):

@deserialize_container.register(DOFArray)
def _deserialize_dof_container(
template: Any, iterable: Iterable[Tuple[Any, Any]]):
template: Any, iterable: Iterable[tuple[Any, Any]]):
if __debug__:
def _raise_index_inconsistency(i, stream_i):
raise ValueError(
Expand Down Expand Up @@ -586,14 +587,14 @@ def _unflatten_group_sizes(discr, ndofs_per_element_per_group):
in zip(discr.groups, ndofs_per_element_per_group)]

group_sizes = [nel * ndof for nel, ndof in group_shapes]
group_starts = np.cumsum([0] + group_sizes)
group_starts = np.cumsum([0, *group_sizes])

return group_shapes, group_starts


def unflatten(
actx: ArrayContext, discr, ary: ArrayOrContainerT,
ndofs_per_element_per_group: Optional[Iterable[int]] = None, *,
ndofs_per_element_per_group: Iterable[int] | None = None, *,
strict: bool = True,
) -> ArrayOrContainerT:
r"""Convert all "flat" arrays returned by :func:`flatten` back to
Expand Down Expand Up @@ -668,7 +669,7 @@ def _unflatten_like(_ary, _prototype):
if isinstance(_prototype, DOFArray):
group_shapes = [subary.shape for subary in _prototype]
group_sizes = [subary.size for subary in _prototype]
group_starts = np.cumsum([0] + group_sizes)
group_starts = np.cumsum([0, *group_sizes])

return _unflatten_dof_array(
actx, _ary, group_shapes, group_starts,
Expand All @@ -680,9 +681,11 @@ def _unflatten_like(_ary, _prototype):
serialize_container(_prototype))
except NotAnArrayContainerError:
if strict:
raise ValueError("cannot unflatten array "
f"with prototype '{type(_prototype).__name__}'; "
"use 'strict=False' to leave the array unchanged")
raise ValueError(
"cannot unflatten array with prototype "
f"'{type(_prototype).__name__}': "
"use 'strict=False' to leave the array unchanged"
) from None

assert type(_ary) is type(_prototype)
return _ary
Expand Down Expand Up @@ -716,7 +719,7 @@ def _flatten_to_numpy(subary):

def unflatten_from_numpy(
actx: ArrayContext, discr, ary: ArrayOrContainerT,
ndofs_per_element_per_group: Optional[Iterable[int]] = None, *,
ndofs_per_element_per_group: Iterable[int] | None = None, *,
strict: bool = True,
) -> ArrayOrContainerT:
r"""Takes "flat" arrays returned by :func:`flatten_to_numpy` and
Expand Down Expand Up @@ -808,7 +811,8 @@ def _rec(_ary):
try:
iterable = serialize_container(_ary)
except NotAnArrayContainerError:
raise TypeError(f"unsupported array type: '{type(_ary).__name__}'")
raise TypeError(
f"unsupported array type: '{type(_ary).__name__}'") from None
else:
arys = [_rec(subary) for _, subary in iterable]
return _reduce_norm(actx, arys, ord=ord)
Expand Down
11 changes: 7 additions & 4 deletions meshmode/interop/firedrake/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,10 @@
)


__all__ = ["build_connection_from_firedrake", "build_connection_to_firedrake",
"FiredrakeConnection", "import_firedrake_mesh",
"export_mesh_to_firedrake",
]
__all__ = [
"FiredrakeConnection",
"build_connection_from_firedrake",
"build_connection_to_firedrake",
"export_mesh_to_firedrake",
"import_firedrake_mesh",
]
Loading

0 comments on commit 70dbe8e

Please sign in to comment.