Skip to content

Commit 0e5e3fe

Browse files
authored
Enable mypy warn unused ignores (#7335)
* warn unused ignores in mypy * fix tyoe ignores in typed ops * fix mypy issues * fix one more mypy issue
1 parent 0aee4fe commit 0e5e3fe

22 files changed

+84
-86
lines changed

pyproject.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,7 @@ exclude_lines = ["pragma: no cover", "if TYPE_CHECKING"]
2727
exclude = 'xarray/util/generate_.*\.py'
2828
files = "xarray"
2929
show_error_codes = true
30+
warn_unused_ignores = true
3031

3132
# Most of the numerical computing stack doesn't have type annotations yet.
3233
[[tool.mypy.overrides]]

xarray/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,7 @@
4040
from importlib.metadata import version as _version
4141
except ImportError:
4242
# if the fallback library is missing, we are doomed.
43-
from importlib_metadata import version as _version # type: ignore[no-redef]
43+
from importlib_metadata import version as _version
4444

4545
try:
4646
__version__ = _version("xarray")

xarray/backends/locks.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -61,7 +61,7 @@ def _get_lock_maker(scheduler=None):
6161
try:
6262
from dask.distributed import Lock as DistributedLock
6363
except ImportError:
64-
DistributedLock = None # type: ignore
64+
DistributedLock = None
6565
return DistributedLock
6666
else:
6767
raise KeyError(scheduler)

xarray/core/_typed_ops.pyi

Lines changed: 52 additions & 52 deletions
Large diffs are not rendered by default.

xarray/core/alignment.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -490,7 +490,7 @@ def _get_dim_pos_indexers(
490490
obj_idx = matching_indexes.get(key)
491491
if obj_idx is not None:
492492
if self.reindex[key]:
493-
indexers = obj_idx.reindex_like(aligned_idx, **self.reindex_kwargs) # type: ignore[call-arg]
493+
indexers = obj_idx.reindex_like(aligned_idx, **self.reindex_kwargs)
494494
dim_pos_indexers.update(indexers)
495495

496496
return dim_pos_indexers

xarray/core/combine.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -377,7 +377,7 @@ def _nested_combine(
377377

378378
# Define type for arbitrarily-nested list of lists recursively
379379
# Currently mypy cannot handle this but other linters can (https://stackoverflow.com/a/53845083/3154101)
380-
DATASET_HYPERCUBE = Union[Dataset, Iterable["DATASET_HYPERCUBE"]] # type: ignore
380+
DATASET_HYPERCUBE = Union[Dataset, Iterable["DATASET_HYPERCUBE"]] # type: ignore[misc]
381381

382382

383383
def combine_nested(

xarray/core/computation.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1734,7 +1734,7 @@ def dot(
17341734
dim_counts.update(arr.dims)
17351735
dims = tuple(d for d, c in dim_counts.items() if c > 1)
17361736

1737-
dot_dims: set[Hashable] = set(dims) # type:ignore[arg-type]
1737+
dot_dims: set[Hashable] = set(dims)
17381738

17391739
# dimensions to be parallelized
17401740
broadcast_dims = common_dims - dot_dims

xarray/core/coordinates.py

Lines changed: 4 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -54,11 +54,11 @@ def dtypes(self) -> Frozen[Hashable, np.dtype]:
5454

5555
@property
5656
def indexes(self) -> Indexes[pd.Index]:
57-
return self._data.indexes # type: ignore[attr-defined]
57+
return self._data.indexes
5858

5959
@property
6060
def xindexes(self) -> Indexes[Index]:
61-
return self._data.xindexes # type: ignore[attr-defined]
61+
return self._data.xindexes
6262

6363
@property
6464
def variables(self):
@@ -116,11 +116,9 @@ def to_index(self, ordered_dims: Sequence[Hashable] | None = None) -> pd.Index:
116116
raise ValueError("no valid index for a 0-dimensional object")
117117
elif len(ordered_dims) == 1:
118118
(dim,) = ordered_dims
119-
return self._data.get_index(dim) # type: ignore[attr-defined]
119+
return self._data.get_index(dim)
120120
else:
121-
indexes = [
122-
self._data.get_index(k) for k in ordered_dims # type: ignore[attr-defined]
123-
]
121+
indexes = [self._data.get_index(k) for k in ordered_dims]
124122

125123
# compute the sizes of the repeat and tile for the cartesian product
126124
# (taken from pandas.core.reshape.util)

xarray/core/dataarray.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -428,7 +428,7 @@ def __init__(
428428

429429
# TODO(shoyer): document this argument, once it becomes part of the
430430
# public interface.
431-
self._indexes = indexes # type: ignore[assignment]
431+
self._indexes = indexes
432432

433433
self._close = None
434434

xarray/core/groupby.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -713,7 +713,7 @@ def _flox_reduce(
713713
elif dim is ...:
714714
parsed_dim = tuple(self._original_obj.dims)
715715
else:
716-
parsed_dim = tuple(dim) # type:ignore[arg-type]
716+
parsed_dim = tuple(dim)
717717

718718
# Do this so we raise the same error message whether flox is present or not.
719719
# Better to control it here than in flox.
@@ -747,7 +747,7 @@ def _flox_reduce(
747747
result = xarray_reduce(
748748
self._original_obj.drop_vars(non_numeric),
749749
group,
750-
dim=parsed_dim, # type:ignore[arg-type] # https://github.com/xarray-contrib/flox/issues/96
750+
dim=parsed_dim,
751751
expected_groups=expected_groups,
752752
isbin=isbin,
753753
keep_attrs=keep_attrs,

xarray/core/indexing.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -180,7 +180,7 @@ def map_index_queries(
180180
# forward dimension indexers with no index/coordinate
181181
results.append(IndexSelResult(labels))
182182
else:
183-
results.append(index.sel(labels, **options)) # type: ignore[call-arg]
183+
results.append(index.sel(labels, **options))
184184

185185
merged = merge_sel_results(results)
186186

@@ -1422,7 +1422,7 @@ def __init__(self, array: pd.Index, dtype: DTypeLike = None):
14221422
if dtype is None:
14231423
self._dtype = get_valid_numpy_dtype(array)
14241424
else:
1425-
self._dtype = np.dtype(dtype) # type: ignore[assignment]
1425+
self._dtype = np.dtype(dtype)
14261426

14271427
@property
14281428
def dtype(self) -> np.dtype:

xarray/core/variable.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1509,7 +1509,7 @@ def pad(
15091509
if reflect_type is not None:
15101510
pad_option_kwargs["reflect_type"] = reflect_type
15111511

1512-
array = np.pad( # type: ignore[call-overload]
1512+
array = np.pad(
15131513
self.data.astype(dtype, copy=False),
15141514
pad_width_by_index,
15151515
mode=mode,

xarray/plot/dataarray_plot.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1488,7 +1488,7 @@ def newplotfunc(
14881488
if ax is None:
14891489
# TODO: Importing Axes3D is no longer necessary in matplotlib >= 3.2.
14901490
# Remove when minimum requirement of matplotlib is 3.2:
1491-
from mpl_toolkits.mplot3d import Axes3D # type: ignore # noqa: F401
1491+
from mpl_toolkits.mplot3d import Axes3D # noqa: F401
14921492

14931493
# delete so it does not end up in locals()
14941494
del Axes3D
@@ -1521,7 +1521,7 @@ def newplotfunc(
15211521
and not kwargs.get("_is_facetgrid", False)
15221522
and ax is not None
15231523
):
1524-
import mpl_toolkits # type: ignore
1524+
import mpl_toolkits
15251525

15261526
if not isinstance(ax, mpl_toolkits.mplot3d.Axes3D):
15271527
raise ValueError(

xarray/tests/test_backends.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3153,7 +3153,7 @@ def test_open_badbytes(self) -> None:
31533153
with pytest.raises(
31543154
ValueError, match=r"not the signature of a valid netCDF4 file"
31553155
):
3156-
with open_dataset(BytesIO(b"garbage"), engine="h5netcdf"): # type: ignore[arg-type]
3156+
with open_dataset(BytesIO(b"garbage"), engine="h5netcdf"):
31573157
pass
31583158

31593159
def test_open_twice(self) -> None:

xarray/tests/test_coding_times.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -614,7 +614,7 @@ def test_cf_timedelta_2d() -> None:
614614

615615
actual = coding.times.decode_cf_timedelta(numbers, units)
616616
assert_array_equal(expected, actual)
617-
assert expected.dtype == actual.dtype # type: ignore
617+
assert expected.dtype == actual.dtype
618618

619619

620620
@pytest.mark.parametrize(
@@ -651,7 +651,7 @@ def test_format_cftime_datetime(date_args, expected) -> None:
651651
def test_decode_cf(calendar) -> None:
652652
days = [1.0, 2.0, 3.0]
653653
# TODO: GH5690 — do we want to allow this type for `coords`?
654-
da = DataArray(days, coords=[days], dims=["time"], name="test") # type: ignore
654+
da = DataArray(days, coords=[days], dims=["time"], name="test")
655655
ds = da.to_dataset()
656656

657657
for v in ["test", "time"]:

xarray/tests/test_dataarray.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -512,7 +512,7 @@ def test_equals_and_identical(self) -> None:
512512

513513
def test_equals_failures(self) -> None:
514514
orig = DataArray(np.arange(5.0), {"a": 42}, dims="x")
515-
assert not orig.equals(np.arange(5)) # type: ignore
515+
assert not orig.equals(np.arange(5)) # type: ignore[arg-type]
516516
assert not orig.identical(123) # type: ignore
517517
assert not orig.broadcast_equals({1: 2}) # type: ignore
518518

@@ -2754,9 +2754,9 @@ def test_quantile_method(self, method) -> None:
27542754
actual = DataArray(self.va).quantile(q, method=method)
27552755

27562756
if Version(np.__version__) >= Version("1.22.0"):
2757-
expected = np.nanquantile(self.dv.values, np.array(q), method=method) # type: ignore[call-arg]
2757+
expected = np.nanquantile(self.dv.values, np.array(q), method=method)
27582758
else:
2759-
expected = np.nanquantile(self.dv.values, np.array(q), interpolation=method) # type: ignore[call-arg]
2759+
expected = np.nanquantile(self.dv.values, np.array(q), interpolation=method)
27602760

27612761
np.testing.assert_allclose(actual.values, expected)
27622762

xarray/tests/test_dataset.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6230,7 +6230,7 @@ def test_query(self, backend, engine, parser) -> None:
62306230
with pytest.raises(ValueError):
62316231
ds.query("a > 5") # type: ignore # must be dict or kwargs
62326232
with pytest.raises(ValueError):
6233-
ds.query(x=(a > 5)) # type: ignore # must be query string
6233+
ds.query(x=(a > 5))
62346234
with pytest.raises(IndexError):
62356235
ds.query(y="a > 5") # wrong length dimension
62366236
with pytest.raises(IndexError):

xarray/tests/test_distributed.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -189,7 +189,7 @@ def test_dask_distributed_zarr_integration_test(
189189
write_kwargs: dict[str, Any] = {"consolidated": True}
190190
read_kwargs: dict[str, Any] = {"backend_kwargs": {"consolidated": True}}
191191
else:
192-
write_kwargs = read_kwargs = {} # type: ignore
192+
write_kwargs = read_kwargs = {}
193193
chunks = {"dim1": 4, "dim2": 3, "dim3": 5}
194194
with cluster() as (s, [a, b]):
195195
with Client(s["address"], loop=loop):

xarray/tests/test_extensions.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -37,15 +37,14 @@ def foo(self):
3737

3838
da = xr.DataArray(0)
3939
assert da.demo.foo == "bar"
40-
4140
# accessor is cached
4241
assert ds.demo is ds.demo
4342

4443
# check descriptor
4544
assert ds.demo.__doc__ == "Demo accessor."
4645
# TODO: typing doesn't seem to work with accessors
4746
assert xr.Dataset.demo.__doc__ == "Demo accessor." # type: ignore
48-
assert isinstance(ds.demo, DemoAccessor) # type: ignore
47+
assert isinstance(ds.demo, DemoAccessor)
4948
assert xr.Dataset.demo is DemoAccessor # type: ignore
5049

5150
# ensure we can remove it

xarray/tests/test_plot.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -43,7 +43,7 @@
4343
try:
4444
import matplotlib as mpl
4545
import matplotlib.pyplot as plt
46-
import mpl_toolkits # type: ignore
46+
import mpl_toolkits
4747
except ImportError:
4848
pass
4949

xarray/tests/test_variable.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1769,9 +1769,9 @@ def test_quantile_method(self, method, use_dask) -> None:
17691769
actual = v.quantile(q, dim="y", method=method)
17701770

17711771
if Version(np.__version__) >= Version("1.22"):
1772-
expected = np.nanquantile(self.d, q, axis=1, method=method) # type: ignore[call-arg]
1772+
expected = np.nanquantile(self.d, q, axis=1, method=method)
17731773
else:
1774-
expected = np.nanquantile(self.d, q, axis=1, interpolation=method) # type: ignore[call-arg]
1774+
expected = np.nanquantile(self.d, q, axis=1, interpolation=method)
17751775

17761776
if use_dask:
17771777
assert isinstance(actual.data, dask_array_type)

xarray/util/generate_ops.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -110,7 +110,7 @@ def {method}(self: T_Dataset, other: DsCompatible) -> T_Dataset: ...{override}""
110110
@overload{override}
111111
def {method}(self, other: T_Dataset) -> T_Dataset: ...
112112
@overload
113-
def {method}(self, other: "DatasetGroupBy") -> "Dataset": ... # type: ignore[misc]
113+
def {method}(self, other: "DatasetGroupBy") -> "Dataset": ...
114114
@overload
115115
def {method}(self: T_DataArray, other: DaCompatible) -> T_DataArray: ..."""
116116
stub_var = """\
@@ -124,7 +124,7 @@ def {method}(self: T_Variable, other: VarCompatible) -> T_Variable: ..."""
124124
@overload{override}
125125
def {method}(self, other: T_Dataset) -> T_Dataset: ...
126126
@overload
127-
def {method}(self, other: "DataArray") -> "Dataset": ... # type: ignore[misc]
127+
def {method}(self, other: "DataArray") -> "Dataset": ...
128128
@overload
129129
def {method}(self, other: GroupByIncompatible) -> NoReturn: ..."""
130130
stub_dagb = """\

0 commit comments

Comments
 (0)