Skip to content

Commit 580cc9b

Browse files
Apply ruff/Pylint rule PLW0127
PLW0127 Self-assignment of variable The intent of the "flake8 workaround" might have been to avoid F811 errors. These F811 errors exist only because of the ignored F401 errors. So ignore F401 errors more precisely to clarify the situation.
1 parent 06632d2 commit 580cc9b

File tree

2 files changed

+18
-16
lines changed

2 files changed

+18
-16
lines changed

xarray/core/dataset.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -7966,8 +7966,6 @@ def sortby(
79667966
variables = variables(self)
79677967
if not isinstance(variables, list):
79687968
variables = [variables]
7969-
else:
7970-
variables = variables
79717969
arrays = [v if isinstance(v, DataArray) else self[v] for v in variables]
79727970
aligned_vars = align(self, *arrays, join="left")
79737971
aligned_self = cast("Self", aligned_vars[0])

xarray/tests/test_distributed.py

Lines changed: 18 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -21,14 +21,14 @@
2121

2222
from dask.distributed import Client, Lock
2323
from distributed.client import futures_of
24-
from distributed.utils_test import ( # noqa: F401
25-
cleanup,
26-
client,
24+
from distributed.utils_test import (
25+
cleanup, # noqa: F401
26+
client, # noqa: F401
2727
cluster,
28-
cluster_fixture,
28+
cluster_fixture, # noqa: F401
2929
gen_cluster,
30-
loop,
31-
loop_in_thread,
30+
loop, # noqa: F401
31+
loop_in_thread, # noqa: F401
3232
)
3333

3434
import xarray as xr
@@ -49,9 +49,6 @@
4949
)
5050
from xarray.tests.test_dataset import create_test_data
5151

52-
loop = loop # loop is an imported fixture, which flake8 has issues ack-ing
53-
client = client # client is an imported fixture, which flake8 has issues ack-ing
54-
5552

5653
@pytest.fixture
5754
def tmp_netcdf_filename(tmpdir):
@@ -89,7 +86,10 @@ def tmp_netcdf_filename(tmpdir):
8986

9087
@pytest.mark.parametrize("engine,nc_format", ENGINES_AND_FORMATS)
9188
def test_dask_distributed_netcdf_roundtrip(
92-
loop, tmp_netcdf_filename, engine, nc_format
89+
loop, # noqa: F811
90+
tmp_netcdf_filename,
91+
engine,
92+
nc_format,
9393
):
9494
if engine not in ENGINES:
9595
pytest.skip("engine not available")
@@ -119,7 +119,8 @@ def test_dask_distributed_netcdf_roundtrip(
119119

120120
@requires_netCDF4
121121
def test_dask_distributed_write_netcdf_with_dimensionless_variables(
122-
loop, tmp_netcdf_filename
122+
loop, # noqa: F811
123+
tmp_netcdf_filename,
123124
):
124125
with cluster() as (s, [a, b]):
125126
with Client(s["address"], loop=loop):
@@ -199,7 +200,10 @@ def test_open_mfdataset_multiple_files_parallel(parallel, tmp_path):
199200

200201
@pytest.mark.parametrize("engine,nc_format", ENGINES_AND_FORMATS)
201202
def test_dask_distributed_read_netcdf_integration_test(
202-
loop, tmp_netcdf_filename, engine, nc_format
203+
loop, # noqa: F811
204+
tmp_netcdf_filename,
205+
engine,
206+
nc_format,
203207
):
204208
if engine not in ENGINES:
205209
pytest.skip("engine not available")
@@ -223,7 +227,7 @@ def test_dask_distributed_read_netcdf_integration_test(
223227
# heads-up, this is using quite private zarr API
224228
# https://github.com/dask/dask/blob/e04734b4d8959ba259801f2e2a490cb4ee8d891f/dask/tests/test_distributed.py#L338-L358
225229
@pytest.fixture
226-
def zarr(client):
230+
def zarr(client): # noqa: F811
227231
zarr_lib = pytest.importorskip("zarr")
228232
# Zarr-Python 3 lazily allocates a dedicated thread/IO loop
229233
# for to execute async tasks. To avoid having this thread
@@ -248,7 +252,7 @@ def zarr(client):
248252
@pytest.mark.parametrize("consolidated", [True, False])
249253
@pytest.mark.parametrize("compute", [True, False])
250254
def test_dask_distributed_zarr_integration_test(
251-
client,
255+
client, # noqa: F811
252256
zarr,
253257
consolidated: bool,
254258
compute: bool,

0 commit comments

Comments
 (0)