diff --git a/parcels/grid.py b/parcels/grid.py index 0d8745a09..9b7a6631a 100644 --- a/parcels/grid.py +++ b/parcels/grid.py @@ -279,19 +279,6 @@ class CStructuredGrid(Structure): ) return self._cstruct - def lon_grid_to_target(self): - if self.lon_remapping: - self._lon = self.lon_remapping.to_target(self.lon) - - def lon_grid_to_source(self): - if self.lon_remapping: - self._lon = self.lon_remapping.to_source(self.lon) - - def lon_particle_to_target(self, lon): - if self.lon_remapping: - return self.lon_remapping.particle_to_target(lon) - return lon - @deprecated_made_private # TODO: Remove 6 months after v3.1.0 def check_zonal_periodic(self, *args, **kwargs): return self._check_zonal_periodic(*args, **kwargs) diff --git a/parcels/kernel.py b/parcels/kernel.py index b30b18b76..3cf7c26b4 100644 --- a/parcels/kernel.py +++ b/parcels/kernel.py @@ -367,7 +367,7 @@ def check_fieldsets_in_kernels(self, pyfunc): ) elif pyfunc is AdvectionAnalytical: if self.fieldset.particlefile is not None: - self.fieldset.particlefile.analytical = True + self.fieldset.particlefile._is_analytical = True if self._ptype.uses_jit: raise NotImplementedError("Analytical Advection only works in Scipy mode") if self._fieldset.U.interp_method != "cgrid_velocity": diff --git a/parcels/particle.py b/parcels/particle.py index 47ff8e082..a8f5ead1f 100644 --- a/parcels/particle.py +++ b/parcels/particle.py @@ -29,11 +29,15 @@ class Variable: """ def __init__(self, name, dtype=np.float32, initial=0, to_write: bool | Literal["once"] = True): - self.name = name + self._name = name self.dtype = dtype self.initial = initial self.to_write = to_write + @property + def name(self): + return self._name + def __get__(self, instance, cls): if instance is None: return self diff --git a/parcels/particledata.py b/parcels/particledata.py index 5d15bcbbc..6ccd55061 100644 --- a/parcels/particledata.py +++ b/parcels/particledata.py @@ -5,6 +5,7 @@ import numpy as np from parcels._compat import MPI, KMeans +from parcels.tools._helpers import deprecated from parcels.tools.statuscodes import StatusCode @@ -228,12 +229,15 @@ def __len__(self): """Return the length, in terms of 'number of elements, of a ParticleData instance.""" return self._ncount + @deprecated( + "Use iter(...) instead, or just use the object in an iterator context (e.g. for p in particledata: ...)." + ) # TODO: Remove 6 months after v3.1.0 (or 9 months; doesn't contribute to code debt) def iterator(self): - return ParticleDataIterator(self) + return iter(self) def __iter__(self): """Return an Iterator that allows for forward iteration over the elements in the ParticleData (e.g. `for p in pset:`).""" - return self.iterator() + return ParticleDataIterator(self) def __getitem__(self, index): """Get a particle object from the ParticleData instance based on its index.""" diff --git a/parcels/particlefile.py b/parcels/particlefile.py index 04e9dda33..31011aadf 100644 --- a/parcels/particlefile.py +++ b/parcels/particlefile.py @@ -10,6 +10,7 @@ import parcels from parcels._compat import MPI +from parcels.tools._helpers import deprecated, deprecated_made_private from parcels.tools.warnings import FileWarning __all__ = ["ParticleFile"] @@ -46,31 +47,24 @@ class ParticleFile: ParticleFile object that can be used to write particle data to file """ - outputdt = None - particleset = None - parcels_mesh = None - time_origin = None - lonlatdepth_dtype = None - def __init__(self, name, particleset, outputdt=np.inf, chunks=None, create_new_zarrfile=True): - self.outputdt = outputdt.total_seconds() if isinstance(outputdt, timedelta) else outputdt - self.chunks = chunks - self.particleset = particleset - self.parcels_mesh = "spherical" + self._outputdt = outputdt.total_seconds() if isinstance(outputdt, timedelta) else outputdt + self._chunks = chunks + self._particleset = particleset + self._parcels_mesh = "spherical" if self.particleset.fieldset is not None: - self.parcels_mesh = self.particleset.fieldset.gridset.grids[0].mesh - self.time_origin = self.particleset.time_origin + self._parcels_mesh = self.particleset.fieldset.gridset.grids[0].mesh self.lonlatdepth_dtype = self.particleset.particledata.lonlatdepth_dtype - self.maxids = 0 - self.pids_written = {} - self.create_new_zarrfile = create_new_zarrfile - self.vars_to_write = {} + self._maxids = 0 + self._pids_written = {} + self._create_new_zarrfile = create_new_zarrfile + self._vars_to_write = {} for var in self.particleset.particledata.ptype.variables: if var.to_write: self.vars_to_write[var.name] = var.dtype - self.mpi_rank = MPI.COMM_WORLD.Get_rank() if MPI else 0 + self._mpi_rank = MPI.COMM_WORLD.Get_rank() if MPI else 0 self.particleset.fieldset._particlefile = self - self.analytical = False # Flag to indicate if ParticleFile is used for analytical trajectories + self._is_analytical = False # Flag to indicate if ParticleFile is used for analytical trajectories # Reset obs_written of each particle, in case new ParticleFile created for a ParticleSet particleset.particledata.setallvardata("obs_written", 0) @@ -80,11 +74,11 @@ def __init__(self, name, particleset, outputdt=np.inf, chunks=None, create_new_z "Conventions": "CF-1.6/CF-1.7", "ncei_template_version": "NCEI_NetCDF_Trajectory_Template_v2.0", "parcels_version": parcels.__version__, - "parcels_mesh": self.parcels_mesh, + "parcels_mesh": self._parcels_mesh, } # Create dictionary to translate datatypes and fill_values - self.fill_value_map = { + self._fill_value_map = { np.float16: np.nan, np.float32: np.nan, np.float64: np.nan, @@ -103,7 +97,7 @@ def __init__(self, name, particleset, outputdt=np.inf, chunks=None, create_new_z # But we need to handle incompatibility with MPI mode for now: if MPI and MPI.COMM_WORLD.Get_size() > 1: raise ValueError("Currently, MPI mode is not compatible with directly passing a Zarr store.") - self.fname = name + fname = name else: extension = os.path.splitext(str(name))[1] if extension in [".nc", ".nc4"]: @@ -111,15 +105,74 @@ def __init__(self, name, particleset, outputdt=np.inf, chunks=None, create_new_z "Output in NetCDF is not supported anymore. Use .zarr extension for ParticleFile name." ) if MPI and MPI.COMM_WORLD.Get_size() > 1: - self.fname = os.path.join(name, f"proc{self.mpi_rank:02d}.zarr") + fname = os.path.join(name, f"proc{self._mpi_rank:02d}.zarr") if extension in [".zarr"]: warnings.warn( - f"The ParticleFile name contains .zarr extension, but zarr files will be written per processor in MPI mode at {self.fname}", + f"The ParticleFile name contains .zarr extension, but zarr files will be written per processor in MPI mode at {fname}", FileWarning, stacklevel=2, ) else: - self.fname = name if extension in [".zarr"] else f"{name}.zarr" + fname = name if extension in [".zarr"] else f"{name}.zarr" + self._fname = fname + + @property + def create_new_zarrfile(self): + return self._create_new_zarrfile + + @property + def outputdt(self): + return self._outputdt + + @property + def chunks(self): + return self._chunks + + @property + def particleset(self): + return self._particleset + + @property + def fname(self): + return self._fname + + @property + def vars_to_write(self): + return self._vars_to_write + + @property + def time_origin(self): + return self.particleset.time_origin + + @property + @deprecated_made_private # TODO: Remove 6 months after v3.1.0 + def parcels_mesh(self): + return self._parcels_mesh + + @property + @deprecated_made_private # TODO: Remove 6 months after v3.1.0 + def maxids(self): + return self._maxids + + @property + @deprecated_made_private # TODO: Remove 6 months after v3.1.0 + def pids_written(self): + return self._pids_written + + @property + @deprecated_made_private # TODO: Remove 6 months after v3.1.0 + def mpi_rank(self): + return self._mpi_rank + + @property + @deprecated_made_private # TODO: Remove 6 months after v3.1.0 + def fill_value_map(self): + return self._fill_value_map + + @property + @deprecated_made_private # TODO: Remove 6 months after v3.1.0 + def analytical(self): + return self._is_analytical def _create_variables_attribute_dict(self): """Creates the dictionary with variable attributes. @@ -133,7 +186,7 @@ def _create_variables_attribute_dict(self): "trajectory": { "long_name": "Unique identifier for each particle", "cf_role": "trajectory_id", - "_FillValue": self.fill_value_map[np.int64], + "_FillValue": self._fill_value_map[np.int64], }, "time": {"long_name": "", "standard_name": "time", "units": "seconds", "axis": "T"}, "lon": {"long_name": "", "standard_name": "longitude", "units": "degrees_east", "axis": "X"}, @@ -147,7 +200,7 @@ def _create_variables_attribute_dict(self): for vname in self.vars_to_write: if vname not in ["time", "lat", "lon", "depth", "id"]: attrs[vname] = { - "_FillValue": self.fill_value_map[self.vars_to_write[vname]], + "_FillValue": self._fill_value_map[self.vars_to_write[vname]], "long_name": "", "standard_name": vname, "units": "unknown", @@ -155,6 +208,9 @@ def _create_variables_attribute_dict(self): return attrs + @deprecated( + "ParticleFile.metadata is a dictionary. Use `ParticleFile.metadata['key'] = ...` or other dictionary methods instead." + ) # TODO: Remove 6 months after v3.1.0 def add_metadata(self, name, message): """Add metadata to :class:`parcels.particleset.ParticleSet`. @@ -175,21 +231,25 @@ def _convert_varout_name(self, var): else: return var - def write_once(self, var): + @deprecated_made_private # TODO: Remove 6 months after v3.1.0 + def write_once(self, *args, **kwargs): + return self._write_once(*args, **kwargs) + + def _write_once(self, var): return self.particleset.particledata.ptype[var].to_write == "once" def _extend_zarr_dims(self, Z, store, dtype, axis): if axis == 1: - a = np.full((Z.shape[0], self.chunks[1]), self.fill_value_map[dtype], dtype=dtype) + a = np.full((Z.shape[0], self.chunks[1]), self._fill_value_map[dtype], dtype=dtype) obs = zarr.group(store=store, overwrite=False)["obs"] if len(obs) == Z.shape[1]: obs.append(np.arange(self.chunks[1]) + obs[-1] + 1) else: - extra_trajs = self.maxids - Z.shape[0] + extra_trajs = self._maxids - Z.shape[0] if len(Z.shape) == 2: - a = np.full((extra_trajs, Z.shape[1]), self.fill_value_map[dtype], dtype=dtype) + a = np.full((extra_trajs, Z.shape[1]), self._fill_value_map[dtype], dtype=dtype) else: - a = np.full((extra_trajs,), self.fill_value_map[dtype], dtype=dtype) + a = np.full((extra_trajs,), self._fill_value_map[dtype], dtype=dtype) Z.append(a, axis=axis) zarr.consolidate_metadata(store) @@ -221,11 +281,11 @@ def write(self, pset, time, indices=None): if len(indices_to_write) > 0: pids = pset.particledata.getvardata("id", indices_to_write) - to_add = sorted(set(pids) - set(self.pids_written.keys())) + to_add = sorted(set(pids) - set(self._pids_written.keys())) for i, pid in enumerate(to_add): - self.pids_written[pid] = self.maxids + i - ids = np.array([self.pids_written[p] for p in pids], dtype=int) - self.maxids = len(self.pids_written) + self._pids_written[pid] = self._maxids + i + ids = np.array([self._pids_written[p] for p in pids], dtype=int) + self._maxids = len(self._pids_written) once_ids = np.where(pset.particledata.getvardata("obs_written", indices_to_write) == 0)[0] if len(once_ids) > 0: @@ -234,7 +294,7 @@ def write(self, pset, time, indices=None): if self.create_new_zarrfile: if self.chunks is None: - self.chunks = (len(ids), 1) + self._chunks = (len(ids), 1) if pset._repeatpclass is not None and self.chunks[0] < 1e4: warnings.warn( f"ParticleFile chunks are set to {self.chunks}, but this may lead to " @@ -243,8 +303,8 @@ def write(self, pset, time, indices=None): FileWarning, stacklevel=2, ) - if (self.maxids > len(ids)) or (self.maxids > self.chunks[0]): - arrsize = (self.maxids, self.chunks[1]) + if (self._maxids > len(ids)) or (self._maxids > self.chunks[0]): + arrsize = (self._maxids, self.chunks[1]) else: arrsize = (len(ids), self.chunks[1]) ds = xr.Dataset( @@ -252,28 +312,28 @@ def write(self, pset, time, indices=None): coords={"trajectory": ("trajectory", pids), "obs": ("obs", np.arange(arrsize[1], dtype=np.int32))}, ) attrs = self._create_variables_attribute_dict() - obs = np.zeros((self.maxids), dtype=np.int32) + obs = np.zeros((self._maxids), dtype=np.int32) for var in self.vars_to_write: varout = self._convert_varout_name(var) if varout not in ["trajectory"]: # because 'trajectory' is written as coordinate - if self.write_once(var): + if self._write_once(var): data = np.full( (arrsize[0],), - self.fill_value_map[self.vars_to_write[var]], + self._fill_value_map[self.vars_to_write[var]], dtype=self.vars_to_write[var], ) data[ids_once] = pset.particledata.getvardata(var, indices_to_write_once) dims = ["trajectory"] else: data = np.full( - arrsize, self.fill_value_map[self.vars_to_write[var]], dtype=self.vars_to_write[var] + arrsize, self._fill_value_map[self.vars_to_write[var]], dtype=self.vars_to_write[var] ) data[ids, 0] = pset.particledata.getvardata(var, indices_to_write) dims = ["trajectory", "obs"] ds[varout] = xr.DataArray(data=data, dims=dims, attrs=attrs[varout]) - ds[varout].encoding["chunks"] = self.chunks[0] if self.write_once(var) else self.chunks + ds[varout].encoding["chunks"] = self.chunks[0] if self._write_once(var) else self.chunks ds.to_zarr(self.fname, mode="w") - self.create_new_zarrfile = False + self._create_new_zarrfile = False else: # Either use the store that was provided directly or create a DirectoryStore: if issubclass(type(self.fname), zarr.storage.Store): @@ -284,9 +344,9 @@ def write(self, pset, time, indices=None): obs = pset.particledata.getvardata("obs_written", indices_to_write) for var in self.vars_to_write: varout = self._convert_varout_name(var) - if self.maxids > Z[varout].shape[0]: + if self._maxids > Z[varout].shape[0]: self._extend_zarr_dims(Z[varout], store, dtype=self.vars_to_write[var], axis=0) - if self.write_once(var): + if self._write_once(var): if len(once_ids) > 0: Z[varout].vindex[ids_once] = pset.particledata.getvardata(var, indices_to_write_once) else: diff --git a/parcels/particleset.py b/parcels/particleset.py index 72696eb6f..20f788fc9 100644 --- a/parcels/particleset.py +++ b/parcels/particleset.py @@ -26,7 +26,7 @@ from parcels.particle import JITParticle, Variable from parcels.particledata import ParticleData, ParticleDataIterator from parcels.particlefile import ParticleFile -from parcels.tools._helpers import deprecated_made_private +from parcels.tools._helpers import deprecated, deprecated_made_private from parcels.tools.converters import _get_cftime_calendars, convert_to_flat_array from parcels.tools.global_statics import get_package_dir from parcels.tools.loggers import logger @@ -330,12 +330,14 @@ def __del__(self): del self.particledata self.particledata = None + @deprecated( + "Use iter(pset) instead, or just use the object in an iterator context (e.g. for p in pset: ...)." + ) # TODO: Remove 6 months after v3.1.0 (or 9 months; doesn't contribute to code debt) def iterator(self): - return self.particledata.iterator() + return iter(self) def __iter__(self): - """Allows for more intuitive iteration over a particleset, while in reality iterating over the particles in the ParticleData instance.""" - return self.iterator() + return iter(self.particledata) def __getattr__(self, name): """ @@ -1044,7 +1046,7 @@ def execute( ) self._kernel.load_lib() if output_file: - output_file.add_metadata("parcels_kernels", self._kernel.name) + output_file.metadata["parcels_kernels"] = self._kernel.name # Set up the interaction kernel(s) if not set and given. if self._interaction_kernel is None and pyfunc_inter is not None: @@ -1186,7 +1188,7 @@ def execute( if abs(time - next_output) < tol: if output_file: - if output_file.analytical: # output analytical solution at later time + if output_file._is_analytical: # output analytical solution at later time output_file.write_latest_locations(self, time) else: output_file.write(self, time_at_startofloop) diff --git a/parcels/tools/_helpers.py b/parcels/tools/_helpers.py index 7f698b719..381338f0e 100644 --- a/parcels/tools/_helpers.py +++ b/parcels/tools/_helpers.py @@ -40,6 +40,7 @@ def wrapper(*args, **kwargs): warnings.warn(msg_formatted, category=DeprecationWarning, stacklevel=3) return func(*args, **kwargs) + patch_docstring(wrapper, f"\n\n.. deprecated:: {msg}") return wrapper return decorator @@ -51,3 +52,7 @@ def deprecated_made_private(func: Callable) -> Callable: "the end-user. If you feel that you use this code directly in your scripts, please " "comment on our tracking issue at https://github.com/OceanParcels/Parcels/issues/1695.", )(func) + + +def patch_docstring(obj: Callable, extra: str) -> None: + obj.__doc__ = f"{obj.__doc__ or ''}{extra}".strip() diff --git a/tests/test_deprecations.py b/tests/test_deprecations.py index b2007558b..f75b85b57 100644 --- a/tests/test_deprecations.py +++ b/tests/test_deprecations.py @@ -4,7 +4,7 @@ import numpy as np import pytest -from parcels import Field, FieldSet, JITParticle, ParticleSet +from parcels import Field, FieldSet, JITParticle, ParticleFile, ParticleSet, Variable from parcels.grid import ( CurvilinearGrid, CurvilinearSGrid, @@ -16,13 +16,28 @@ ) from tests.utils import create_fieldset_unit_mesh +Classes = Literal[ + "Field", + "FieldSet", + "ParticleSet", + "Grid", + "RectilinearGrid", + "RectilinearZGrid", + "RectilinearSGrid", + "CurvilinearGrid", + "CurvilinearZGrid", + "CurvilinearSGrid", + "ParticleData", + "ParticleFile", +] + class Action: """Utility class to help manage, document, and test deprecations.""" def __init__( self, - class_: Literal["Field", "FieldSet"], + class_: Classes, name: str, type_: Literal["read_only", "make_private", "remove"], *, @@ -168,6 +183,7 @@ def test_testing_action_class(): Action("Grid", "lat_flipped", "make_private" ), Action("Grid", "defer_load", "read_only" ), Action("Grid", "lonlat_minmax", "read_only" ), + Action("RectilinearGrid", "lonlat_minmax", "read_only" ), Action("Grid", "load_chunk", "make_private" ), Action("Grid", "cgrid", "make_private" ), Action("Grid", "child_ctypes_struct", "make_private" ), @@ -222,7 +238,31 @@ def test_testing_action_class(): Action("CurvilinearSGrid", "z4d", "make_private" ), Action("CurvilinearSGrid", "xdim", "read_only" ), Action("CurvilinearSGrid", "ydim", "read_only" ), -] + + # 1727 + Action("ParticleSet", "iterator()", "remove" ), + Action("ParticleData", "iterator()", "remove" ), + Action("ParticleFile", "add_metadata()", "remove" ), + Action("ParticleFile", "write_once()", "make_private" ), + Action("ParticleFile", "create_new_zarrfile", "read_only" ), + Action("ParticleFile", "outputdt", "read_only" ), + Action("ParticleFile", "chunks", "read_only" ), + Action("ParticleFile", "particleset", "read_only" ), + Action("ParticleFile", "fname", "read_only" ), + Action("ParticleFile", "vars_to_write", "read_only" ), + Action("ParticleFile", "time_origin", "read_only" ), + Action("ParticleFile", "parcels_mesh", "make_private" ), + Action("ParticleFile", "maxids", "make_private" ), + Action("ParticleFile", "pids_written", "make_private" ), + Action("ParticleFile", "mpi_rank", "make_private" ), + Action("ParticleFile", "fill_value_map", "make_private" ), + Action("ParticleFile", "analytical", "make_private" , skip_reason = "Was also renamed"), + Action("Grid", "lon_grid_to_target()", "remove" ), + Action("Grid", "lon_remapping", "remove" ), + Action("Grid", "lon_grid_to_source()", "remove" ), + Action("Grid", "lon_particle_to_target()", "remove" ), + Action("Variable", "name", "read_only" ), + ] # fmt: on assert len({str(a) for a in actions}) == len(actions) # Check that all actions are unique @@ -247,6 +287,8 @@ def create_test_data(): time_g0 = np.linspace(0, 1000, 2, dtype=np.float64) grid = RectilinearZGrid(lon_g0, lat_g0, time=time_g0) + pfile = ParticleFile("test.zarr", pset, outputdt=1) + return { "Field": { "class": Field, @@ -288,6 +330,14 @@ def create_test_data(): "class": CurvilinearSGrid, "object": grid, # not exactly right but good enough }, + "ParticleFile": { + "class": ParticleFile, + "object": pfile, + }, + "Variable": { + "class": Variable, + "object": Variable("test"), + }, } diff --git a/tests/tools/test_helpers.py b/tests/tools/test_helpers.py index 0089fed36..c7510c21b 100644 --- a/tests/tools/test_helpers.py +++ b/tests/tools/test_helpers.py @@ -51,3 +51,5 @@ def some_function(x, y): with pytest.warns(DeprecationWarning): some_function(1, 2) + + assert "deprecated::" in some_function.__doc__