Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion .github/ci/min-core-deps.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@ dependencies:
# Run ci/min_deps_check.py to verify that this file respects the policy.
- python=3.10
- cftime=1.6
- cgen=2020.1
- dask=2022.8
- matplotlib-base=3.5
# netcdf follows a 1.major.minor[.patch] convention
Expand Down
2 changes: 1 addition & 1 deletion docs/examples/tutorial_parcels_structure.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
"source": [
"1. [**FieldSet**](#1.-FieldSet). Load and set up the fields. These can be velocity fields that are used to advect the particles, but it can also be e.g. temperature.\n",
"2. [**ParticleSet**](#2.-ParticleSet). Define the type of particles. Also additional `Variables` can be added to the particles (e.g. temperature, to keep track of the temperature that particles experience).\n",
"3. [**Kernels**](#3.-Kernels). Define and compile kernels. Kernels perform some specific operation on the particles every time step (e.g. interpolate the temperature from the temperature field to the particle location).\n",
"3. [**Kernels**](#3.-Kernels). Kernels perform some specific operation on the particles every time step (e.g. interpolate the temperature from the temperature field to the particle location).\n",
"4. [**Execution and output**](#4.-Execution-and-Output). Execute the simulation and write and store the output in a Zarr file.\n",
"5. [**Optimising and parallelising**](#5.-Optimising-and-parallelising). Optimise and parallelise the code to run faster.\n",
"\n",
Expand Down
1 change: 0 additions & 1 deletion environment.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@ channels:
- conda-forge
dependencies:
- python>=3.10
- cgen
- ffmpeg>=3.2.3
- jupyter
- matplotlib-base>=2.0.2
Expand Down
6 changes: 0 additions & 6 deletions parcels/_typing.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,17 +6,11 @@

"""

import ast
import datetime
import os
from collections.abc import Callable
from typing import Any, Literal, get_args


class ParcelsAST(ast.AST):
ccode: str


InterpMethodOption = Literal[
"linear",
"nearest",
Expand Down
30 changes: 0 additions & 30 deletions parcels/field.py
Original file line number Diff line number Diff line change
Expand Up @@ -149,7 +149,6 @@ class Field:
Maximum allowed value on the field. Data above this value are set to zero
cast_data_dtype : str
Cast Field data to dtype. Supported dtypes are "float32" (np.float32 (default)) and "float64 (np.float64).
Note that dtype can only be "float32" in JIT mode
time_origin : parcels.tools.converters.TimeConverter
Time origin of the time axis (only if grid is None)
interp_method : str
Expand Down Expand Up @@ -320,7 +319,6 @@ def __init__(

self._scaling_factor = None

# Variable names in JIT code
self._dimensions = kwargs.pop("dimensions", None)
self.indices = kwargs.pop("indices", None)
self._dataFiles = kwargs.pop("dataFiles", None)
Expand Down Expand Up @@ -1084,18 +1082,6 @@ def eval(self, time, z, y, x, particle=None, applyConversion=True):
else:
return value

def _ccode_eval(self, var, t, z, y, x):
self._check_velocitysampling()
ccode_str = (
f"temporal_interpolation({t}, {z}, {y}, {x}, {self.ccode_name}, "
+ "&particles->ti[pnum*ngrid], &particles->zi[pnum*ngrid], &particles->yi[pnum*ngrid], &particles->xi[pnum*ngrid], "
+ f"&{var}, {self.interp_method.upper()}, {self.gridindexingtype.upper()})"
)
return ccode_str

def _ccode_convert(self, _, z, y, x):
return self.units.ccode_to_target(z, y, x)

def _get_block_id(self, block):
return np.ravel_multi_index(block, self.nchunks)

Expand Down Expand Up @@ -1923,22 +1909,6 @@ def __getitem__(self, key):
except tuple(AllParcelsErrorCodes.keys()) as error:
return _deal_with_errors(error, key, vector_type=self.vector_type)

def _ccode_eval(self, varU, varV, varW, U, V, W, t, z, y, x):
ccode_str = ""
if "3D" in self.vector_type:
ccode_str = (
f"temporal_interpolationUVW({t}, {z}, {y}, {x}, {U.ccode_name}, {V.ccode_name}, {W.ccode_name}, "
+ "&particles->ti[pnum*ngrid], &particles->zi[pnum*ngrid], &particles->yi[pnum*ngrid], &particles->xi[pnum*ngrid],"
+ f"&{varU}, &{varV}, &{varW}, {U.interp_method.upper()}, {U.gridindexingtype.upper()})"
)
else:
ccode_str = (
f"temporal_interpolationUV({t}, {z}, {y}, {x}, {U.ccode_name}, {V.ccode_name}, "
+ "&particles->ti[pnum*ngrid], &particles->zi[pnum*ngrid], &particles->yi[pnum*ngrid], &particles->xi[pnum*ngrid],"
+ f" &{varU}, &{varV}, {U.interp_method.upper()}, {U.gridindexingtype.upper()})"
)
return ccode_str


class DeferredArray:
"""Class used for throwing error when Field.data is not read in deferred loading mode."""
Expand Down
13 changes: 1 addition & 12 deletions parcels/fieldset.py
Original file line number Diff line number Diff line change
Expand Up @@ -309,16 +309,6 @@ def check_velocityfields(U, V, W):
g._time_origin = self.time_origin
self._add_UVfield()

ccode_fieldnames = []
counter = 1
for fld in self.get_fields():
if fld.name not in ccode_fieldnames:
fld.ccode_name = fld.name
else:
fld.ccode_name = fld.name + str(counter)
counter += 1
ccode_fieldnames.append(fld.ccode_name)

for f in self.get_fields():
if isinstance(f, (VectorField, NestedField)) or f._dataFiles is None:
continue
Expand Down Expand Up @@ -1447,8 +1437,7 @@ def get_fields(self) -> list[Field | VectorField]:

def add_constant(self, name, value):
"""Add a constant to the FieldSet. Note that all constants are
stored as 32-bit floats. While constants can be updated during
execution in SciPy mode, they can not be updated in JIT mode.
stored as 32-bit floats.

Parameters
----------
Expand Down
43 changes: 20 additions & 23 deletions parcels/kernel.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ def __init__(
self.funcvars = funcvars
self.funccode = funccode
self.py_ast = py_ast # TODO v4: check if this is needed
self.scipy_positionupdate_kernels_added = False
self._positionupdate_kernels_added = False

@property
def ptype(self):
Expand Down Expand Up @@ -94,7 +94,7 @@ class Kernel(BaseKernel):

Notes
-----
A Kernel is either created from a compiled <function ...> object
A Kernel is either created from a <function ...> object
or the necessary information (funcname, funccode, funcvars) is provided.
The py_ast argument may be derived from the code string, but for
concatenation, the merged AST plus the new header definition is required.
Expand Down Expand Up @@ -159,7 +159,7 @@ def __init__(
user_ctx = globals()
finally:
del stack # Remove cyclic references
# Compile and generate Python function from AST
# Generate Python function from AST
py_mod = ast.parse("")
py_mod.body = [self.py_ast]
exec(compile(py_mod, "<ast>", "exec"), user_ctx)
Expand All @@ -181,7 +181,7 @@ def pyfunc(self):
def fieldset(self):
return self._fieldset

def add_scipy_positionupdate_kernels(self):
def add_positionupdate_kernels(self):
# Adding kernels that set and update the coordinate changes
def Setcoords(particle, fieldset, time): # pragma: no cover
particle_dlon = 0 # noqa
Expand Down Expand Up @@ -324,23 +324,6 @@ def from_list(cls, fieldset, ptype, pyfunc_list, *args, **kwargs):
pyfunc_list[0] = cls(fieldset, ptype, pyfunc_list[0], *args, **kwargs)
return functools.reduce(lambda x, y: x + y, pyfunc_list)

def execute_python(self, pset, endtime, dt):
"""Performs the core update loop via Python."""
if self.fieldset is not None:
for f in self.fieldset.get_fields():
if isinstance(f, (VectorField, NestedField)):
continue
f.data = np.array(f.data)

if not self.scipy_positionupdate_kernels_added:
self.add_scipy_positionupdate_kernels()
self.scipy_positionupdate_kernels_added = True

for p in pset:
self.evaluate_particle(p, endtime)
if p.state == StatusCode.StopAllExecution:
return StatusCode.StopAllExecution

def execute(self, pset, endtime, dt):
"""Execute this Kernel over a ParticleSet for several timesteps."""
pset.particledata.state[:] = StatusCode.Evaluate
Expand All @@ -359,7 +342,19 @@ def execute(self, pset, endtime, dt):
g._load_chunk == g._chunk_loaded_touched, g._chunk_deprecated, g._load_chunk
)

self.execute_python(pset, endtime, dt)
for f in self.fieldset.get_fields():
if isinstance(f, (VectorField, NestedField)):
continue
f.data = np.array(f.data)

if not self._positionupdate_kernels_added:
self.add_positionupdate_kernels()
self._positionupdate_kernels_added = True

for p in pset:
self.evaluate_particle(p, endtime)
if p.state == StatusCode.StopAllExecution:
return StatusCode.StopAllExecution

# Remove all particles that signalled deletion
self.remove_deleted(pset)
Expand Down Expand Up @@ -398,7 +393,9 @@ def execute(self, pset, endtime, dt):
# Remove all particles that signalled deletion
self.remove_deleted(pset) # Generalizable version!

self.execute_python(pset, endtime, dt)
# Re-execute Kernels to retry particles with StatusCode.Repeat
for p in pset:
self.evaluate_particle(p, endtime)

n_error = pset._num_error_particles

Expand Down
4 changes: 3 additions & 1 deletion parcels/particle.py
Original file line number Diff line number Diff line change
Expand Up @@ -231,4 +231,6 @@

class JITParticle(ScipyParticle):
def __init__(self, *args, **kwargs):
raise NotImplementedError("JITParticle has been deprecated in Parcels v4. Use ScipyParticle instead.")
raise NotImplementedError(
"JITParticle has been deprecated in Parcels v4. Use ScipyParticle instead."

Check warning on line 235 in parcels/particle.py

View check run for this annotation

Codecov / codecov/patch

parcels/particle.py#L234-L235

Added lines #L234 - L235 were not covered by tests
) # TODO v4: link to migration guide
2 changes: 1 addition & 1 deletion parcels/particledata.py
Original file line number Diff line number Diff line change
Expand Up @@ -147,7 +147,7 @@ def __init__(self, pclass, lon, lat, depth, time, lonlatdepth_dtype, pid_orig, n
self._data["id"][:] = pid
self._data["obs_written"][:] = 0

# special case for exceptions which can only be handled from scipy
# special case for exceptions which can only be handled from scipy # TODO v4: check if this can be removed now that JIT is dropped
self._data["exception"] = np.empty(self._ncount, dtype=object)

initialised |= {
Expand Down
4 changes: 2 additions & 2 deletions parcels/particleset.py
Original file line number Diff line number Diff line change
Expand Up @@ -932,13 +932,13 @@ def execute(

Notes
-----
``ParticleSet.execute()`` acts as the main entrypoint for simulations, and provides the simulation time-loop. This method encapsulates the logic controlling the switching between kernel execution (where control in handed to C in JIT mode), output file writing, reading in fields for new timesteps, adding new particles to the simulation domain, stopping the simulation, and executing custom functions (``postIterationCallbacks`` provided by the user).
``ParticleSet.execute()`` acts as the main entrypoint for simulations, and provides the simulation time-loop. This method encapsulates the logic controlling the switching between kernel execution, output file writing, reading in fields for new timesteps, adding new particles to the simulation domain, stopping the simulation, and executing custom functions (``postIterationCallbacks`` provided by the user).
"""
# check if particleset is empty. If so, return immediately
if len(self) == 0:
return

# check if pyfunc has changed since last compile. If so, recompile
# check if pyfunc has changed since last generation. If so, regenerate
if self._kernel is None or (self._kernel.pyfunc is not pyfunc and self._kernel is not pyfunc):
# Generate and store Kernel
if isinstance(pyfunc, Kernel):
Expand Down
30 changes: 0 additions & 30 deletions parcels/tools/converters.py
Original file line number Diff line number Diff line change
Expand Up @@ -169,15 +169,9 @@ class UnitConverter:
def to_target(self, value, z, y, x):
return value

def ccode_to_target(self, z, y, x):
return "1.0"

def to_source(self, value, z, y, x):
return value

def ccode_to_source(self, z, y, x):
return "1.0"


class Geographic(UnitConverter):
"""Unit converter from geometric to geographic coordinates (m to degree)"""
Expand All @@ -191,12 +185,6 @@ def to_target(self, value, z, y, x):
def to_source(self, value, z, y, x):
return value * 1000.0 * 1.852 * 60.0

def ccode_to_target(self, z, y, x):
return "(1.0 / (1000.0 * 1.852 * 60.0))"

def ccode_to_source(self, z, y, x):
return "(1000.0 * 1.852 * 60.0)"


class GeographicPolar(UnitConverter):
"""Unit converter from geometric to geographic coordinates (m to degree)
Expand All @@ -212,12 +200,6 @@ def to_target(self, value, z, y, x):
def to_source(self, value, z, y, x):
return value * 1000.0 * 1.852 * 60.0 * cos(y * pi / 180)

def ccode_to_target(self, z, y, x):
return f"(1.0 / (1000. * 1.852 * 60. * cos({y} * M_PI / 180)))"

def ccode_to_source(self, z, y, x):
return f"(1000. * 1.852 * 60. * cos({y} * M_PI / 180))"


class GeographicSquare(UnitConverter):
"""Square distance converter from geometric to geographic coordinates (m2 to degree2)"""
Expand All @@ -231,12 +213,6 @@ def to_target(self, value, z, y, x):
def to_source(self, value, z, y, x):
return value * pow(1000.0 * 1.852 * 60.0, 2)

def ccode_to_target(self, z, y, x):
return "pow(1.0 / (1000.0 * 1.852 * 60.0), 2)"

def ccode_to_source(self, z, y, x):
return "pow((1000.0 * 1.852 * 60.0), 2)"


class GeographicPolarSquare(UnitConverter):
"""Square distance converter from geometric to geographic coordinates (m2 to degree2)
Expand All @@ -252,12 +228,6 @@ def to_target(self, value, z, y, x):
def to_source(self, value, z, y, x):
return value * pow(1000.0 * 1.852 * 60.0 * cos(y * pi / 180), 2)

def ccode_to_target(self, z, y, x):
return f"pow(1.0 / (1000. * 1.852 * 60. * cos({y} * M_PI / 180)), 2)"

def ccode_to_source(self, z, y, x):
return f"pow((1000. * 1.852 * 60. * cos({y} * M_PI / 180)), 2)"


unitconverters_map = {
"U": GeographicPolar(),
Expand Down
2 changes: 0 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@ classifiers = [
"Intended Audience :: Science/Research",
]
dependencies = [
"cgen",
"cftime",
"numpy",
"dask",
Expand Down Expand Up @@ -152,6 +151,5 @@ module = [
"cftime",
"pykdtree.kdtree",
"netCDF4",
"cgen"
]
ignore_missing_imports = true
2 changes: 1 addition & 1 deletion tests/test_grids.py
Original file line number Diff line number Diff line change
Expand Up @@ -255,7 +255,7 @@ def bath_func(lon):
for i in range(depth_g0.shape[0]):
for k in range(depth_g0.shape[2]):
depth_g0[i, :, k] = bath[i] * k / (depth_g0.shape[2] - 1)
depth_g0 = depth_g0.transpose() # we don't change it on purpose, to check if the transpose op if fixed in jit
depth_g0 = depth_g0.transpose()

grid = RectilinearSGrid(lon_g0, lat_g0, depth=depth_g0)

Expand Down
5 changes: 1 addition & 4 deletions tests/test_interpolation.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,10 +90,7 @@ def data_2d():
],
)
def test_raw_2d_interpolation(data_2d, func, eta, xsi, expected):
"""Test the 2D interpolation functions on the raw arrays.

Interpolation via the other interpolation methods are tested in `test_scipy_vs_jit`.
"""
"""Test the 2D interpolation functions on the raw arrays."""
ti = 0
yi, xi = 1, 1
ctx = interpolation.InterpolationContext2D(data_2d, eta, xsi, ti, yi, xi)
Expand Down
2 changes: 1 addition & 1 deletion tests/test_particles.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ def addOne(particle, fieldset, time): # pragma: no cover

@pytest.mark.parametrize("type", ["np.int8", "mp.float", "np.int16"])
def test_variable_unsupported_dtypes(fieldset, type):
"""Test that checks errors thrown for unsupported dtypes in JIT mode."""
"""Test that checks errors thrown for unsupported dtypes."""
TestParticle = ScipyParticle.add_variable("p", dtype=type, initial=10.0)
with pytest.raises((RuntimeError, TypeError)):
ParticleSet(fieldset, pclass=TestParticle, lon=[0], lat=[0])
Expand Down
Loading