Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
32 commits
Select commit Hold shift + click to select a range
18bd8b4
First implementation of fieldset.from_nemo
erikvansebille Dec 30, 2025
57d4a54
Setting vector_interp_method only when vector fields available
erikvansebille Dec 30, 2025
d47400b
Add error when pset.execute has neither runtime nor endtime
erikvansebille Dec 30, 2025
a60c911
Adding support for NEMO 3D
erikvansebille Dec 30, 2025
6769a2c
Fixing negating vertical velocity code
erikvansebille Dec 30, 2025
d9f7a52
Adding docstring for from_nemo
erikvansebille Dec 30, 2025
1fa20b5
Support for NEMO fields without depth
erikvansebille Dec 30, 2025
c0f5d3e
Simplifying nemo dataset creation in tutorial
erikvansebille Dec 30, 2025
c5ad537
Using glamf, gphif and depth as dimensions in from_nemo
erikvansebille Jan 2, 2026
60e2f38
Using xgcm coordinates for cgrid interpolation selection
erikvansebille Jan 2, 2026
d36cf82
Reordering corner_data in CGrid_Velocity
erikvansebille Jan 5, 2026
0f91c23
Separating U and V interpolation in CGrid_Velocity
erikvansebille Jan 5, 2026
17c9d4e
Cleaning up CGrid_Velocity code
erikvansebille Jan 5, 2026
79b4c37
Fixing bug in CGrid_Tracer interpolation
erikvansebille Jan 6, 2026
7343a43
Using offsets in CGrid_Tracer
erikvansebille Jan 6, 2026
24de000
Move from_nemo body into parcels.convert
VeckoTheGecko Jan 7, 2026
26c6d55
Move selection of vector interpolator to from_sgrid_conventions
VeckoTheGecko Jan 7, 2026
c8800a2
Fix from_nemo
VeckoTheGecko Jan 8, 2026
ba7a953
Refactor test_nemo_3D_curvilinear_fieldset
VeckoTheGecko Jan 8, 2026
914e5ee
Remove setting of fieldset.V.units in from_nemo
VeckoTheGecko Jan 8, 2026
5dc0a5f
Add discovery of `mesh` from convention metadata
VeckoTheGecko Jan 8, 2026
ea4e993
Fix xfail mark
VeckoTheGecko Jan 8, 2026
fd699f2
Add helpers get_value_by_id
VeckoTheGecko Jan 8, 2026
875fe70
Add test_convert.test_nemo_to_sgrid
VeckoTheGecko Jan 8, 2026
31c63dd
Fix param naming to obey pep8
VeckoTheGecko Jan 9, 2026
24357bf
Update API for convert.nemo_to_sgrid
VeckoTheGecko Jan 9, 2026
3670dd5
Remove FieldSet.from_nemo
VeckoTheGecko Jan 9, 2026
0b9c787
Fix from_sgrid_conventions
VeckoTheGecko Jan 9, 2026
1926f6a
Update src/parcels/_core/utils/sgrid.py
VeckoTheGecko Jan 12, 2026
8f76e85
Update src/parcels/convert.py
VeckoTheGecko Jan 12, 2026
4ff5f10
Update src/parcels/_core/utils/sgrid.py
VeckoTheGecko Jan 12, 2026
6e133c9
Update docs/user_guide/index.md
erikvansebille Jan 12, 2026
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
54 changes: 14 additions & 40 deletions docs/user_guide/examples/tutorial_nemo_curvilinear.ipynb
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This and other tutorials still need to be updated to use the new convert workflow? Do you want me to do that? Could be a good test-case how user-friendly the convert functions are

Copy link
Contributor Author

@VeckoTheGecko VeckoTheGecko Jan 12, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yes, they would need to be updated.

Do you want me to do that? Could be a good test-case how user-friendly the convert functions are

That would be great!

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I've tried to change the netcdf ingestion in the Nemo3D-tutorial, but get the error below

Can you check what's going on, @VeckoTheGecko?

data_folder = parcels.download_example_dataset("NemoNorthSeaORCA025-N006_data")
ds_fields = xr.open_mfdataset(
    data_folder.glob("ORCA*.nc"),
    data_vars="minimal",
    coords="minimal",
    compat="override",
)
ds_coords = xr.open_dataset(data_folder / "coordinates.nc", decode_times=False)
ds_fset = parcels.convert.nemo_to_sgrid(U=ds_fields["uo"], V=ds_fields["vo"], W=ds_fields["wo"], coords=ds_coords)
fieldset = parcels.FieldSet.from_sgrid_conventions(ds_fset)
---------------------------------------------------------------------------
CoordinateValidationError                 Traceback (most recent call last)
Cell In[7], line 9
      2 ds_fields = xr.open_mfdataset(
      3     data_folder.glob("ORCA*.nc"),
      4     data_vars="minimal",
      5     coords="minimal",
      6     compat="override",
      7 )
      8 ds_coords = xr.open_dataset(data_folder / "coordinates.nc", decode_times=False)
----> 9 ds_fset = parcels.convert.nemo_to_sgrid(U=ds_fields["uo"], V=ds_fields["vo"], coords=ds_coords)
     10 fieldset = parcels.FieldSet.from_sgrid_conventions(ds_fset)

File ~/Codes/ParcelsCode/src/parcels/convert.py:224, in nemo_to_sgrid(coords, **fields)
    222 ds = _discover_U_and_V(ds, _NEMO_CF_STANDARD_NAME_FALLBACKS)
    223 ds = _maybe_create_depth_dim(ds)
--> 224 ds = _maybe_bring_UV_depths_to_depth(ds)
    225 ds = _drop_unused_dimensions_and_coords(ds, _NEMO_DIMENSION_COORD_NAMES)
    226 ds = _assign_dims_as_coords(ds, _NEMO_DIMENSION_COORD_NAMES)

File ~/Codes/ParcelsCode/src/parcels/convert.py:58, in _maybe_bring_UV_depths_to_depth(ds)
     56 def _maybe_bring_UV_depths_to_depth(ds):
     57     if "U" in ds.variables and "depthu" in ds.U.coords and "depth" in ds.coords:
---> 58         ds["U"] = ds["U"].assign_coords(depthu=ds["depth"].values).rename({"depthu": "depth"})
     59     if "V" in ds.variables and "depthv" in ds.V.coords and "depth" in ds.coords:
     60         ds["V"] = ds["V"].assign_coords(depthv=ds["depth"].values).rename({"depthv": "depth"})

File ~/Codes/ParcelsCode/.pixi/envs/docs/lib/python3.14/site-packages/xarray/core/common.py:664, in DataWithCoords.assign_coords(self, coords, **coords_kwargs)
    661 else:
    662     results = self._calc_assign_results(coords_combined)
--> 664 data.coords.update(results)
    665 return data

File ~/Codes/ParcelsCode/.pixi/envs/docs/lib/python3.14/site-packages/xarray/core/coordinates.py:636, in Coordinates.update(self, other)
    630 # special case for PandasMultiIndex: updating only its dimension coordinate
    631 # is still allowed but depreciated.
    632 # It is the only case where we need to actually drop coordinates here (multi-index levels)
    633 # TODO: remove when removing PandasMultiIndex's dimension coordinate.
    634 self._drop_coords(self._names - coords_to_align._names)
--> 636 self._update_coords(coords, indexes)

File ~/Codes/ParcelsCode/.pixi/envs/docs/lib/python3.14/site-packages/xarray/core/coordinates.py:1104, in DataArrayCoordinates._update_coords(self, coords, indexes)
   1101 def _update_coords(
   1102     self, coords: dict[Hashable, Variable], indexes: dict[Hashable, Index]
   1103 ) -> None:
-> 1104     validate_dataarray_coords(
   1105         self._data.shape, Coordinates._construct_direct(coords, indexes), self.dims
   1106     )
   1108     self._data._coords = coords
   1109     self._data._indexes = indexes

File ~/Codes/ParcelsCode/.pixi/envs/docs/lib/python3.14/site-packages/xarray/core/coordinates.py:1328, in validate_dataarray_coords(shape, coords, dim)
   1326 for d, s in v.sizes.items():
   1327     if d in sizes and s != sizes[d]:
-> 1328         raise CoordinateValidationError(
   1329             f"conflicting sizes for dimension {d!r}: "
   1330             f"length {sizes[d]} on the data but length {s} on "
   1331             f"coordinate {k!r}"
   1332         )

CoordinateValidationError: conflicting sizes for dimension 'depthu': length 75 on the data but length 1 on coordinate 'depthu'

Original file line number Diff line number Diff line change
Expand Up @@ -64,47 +64,17 @@
"outputs": [],
"source": [
"data_folder = parcels.download_example_dataset(\"NemoCurvilinear_data\")\n",
"files = data_folder.glob(\"*.nc4\")\n",
"ds_fields = xr.open_mfdataset(\n",
" files, combine=\"nested\", data_vars=\"minimal\", coords=\"minimal\", compat=\"override\"\n",
" data_folder.glob(\"*.nc4\"),\n",
" data_vars=\"minimal\",\n",
" coords=\"minimal\",\n",
" compat=\"override\",\n",
")\n",
"\n",
"# Drop some auxiliary dimensions - otherwise Parcels will complain\n",
"ds_fields = ds_fields.isel(time=0, z_a=0, z=0, drop=True)\n",
"\n",
"# TODO: replace manual fieldset creation with FieldSet.from_nemo() once available\n",
"ds_fields = (\n",
" ds_fields.isel(time_counter=0, drop=True)\n",
" .isel(time=0, drop=True)\n",
" .isel(z_a=0, drop=True)\n",
" .rename({\"glamf\": \"lon\", \"gphif\": \"lat\", \"z\": \"depth\"})\n",
")\n",
"\n",
"import xgcm\n",
"\n",
"xgcm_grid = xgcm.Grid(\n",
" ds_fields,\n",
" coords={\n",
" \"X\": {\"left\": \"x\"},\n",
" \"Y\": {\"left\": \"y\"},\n",
" },\n",
" periodic=False,\n",
" autoparse_metadata=False,\n",
")\n",
"grid = parcels.XGrid(xgcm_grid, mesh=\"spherical\")\n",
"\n",
"U = parcels.Field(\n",
" \"U\", ds_fields[\"U\"], grid, interp_method=parcels.interpolators.XLinear\n",
")\n",
"V = parcels.Field(\n",
" \"V\", ds_fields[\"V\"], grid, interp_method=parcels.interpolators.XLinear\n",
")\n",
"U.units = parcels.GeographicPolar()\n",
"V.units = (\n",
" parcels.GeographicPolar()\n",
") # U and V need GeographicPolar for C-Grid interpolation to work correctly\n",
"UV = parcels.VectorField(\n",
" \"UV\", U, V, vector_interp_method=parcels.interpolators.CGrid_Velocity\n",
")\n",
"fieldset = parcels.FieldSet([U, V, UV])"
"fieldset = parcels.FieldSet.from_nemo(ds_fields)"
]
},
{
Expand All @@ -122,7 +92,7 @@
"outputs": [],
"source": [
"fig, ax = plt.subplots(1, 2, figsize=(10, 4))\n",
"pc1 = ds_fields.U.plot(cmap=\"viridis\", ax=ax[0], vmin=0)\n",
"pc1 = fieldset.U.data.plot(cmap=\"viridis\", ax=ax[0], vmin=0)\n",
"pc2 = ax[1].pcolormesh(\n",
" fieldset.U.grid.lon,\n",
" fieldset.U.grid.lat,\n",
Expand Down Expand Up @@ -152,8 +122,12 @@
"outputs": [],
"source": [
"u, v = fieldset.UV.eval(\n",
" np.array([0]), np.array([0]), np.array([20]), np.array([50]), applyConversion=False\n",
") # do not convert m/s to deg/s\n",
" time=np.array([0]),\n",
" z=np.array([0]),\n",
" y=np.array([40]),\n",
" x=np.array([50]),\n",
" applyConversion=False, # do not convert m/s to deg/s\n",
")\n",
"print(f\"(u, v) = ({u[0]:.3f}, {v[0]:.3f})\")\n",
"assert np.isclose(u, 1.0, atol=1e-3)"
]
Expand Down
191 changes: 65 additions & 126 deletions docs/user_guide/examples_v3/tutorial_nemo_3D.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -18,26 +18,13 @@
"More information about C-grid interpolation can be found in [Delandmeter et al., 2019](https://www.geosci-model-dev-discuss.net/gmd-2018-339/).\n",
"An example of such a discretisation is the NEMO model, which is one of the models supported in Parcels. A tutorial teaching how to [interpolate 2D data on a NEMO grid](https://docs.oceanparcels.org/en/latest/examples/tutorial_nemo_curvilinear.html) is available within Parcels.\n",
"\n",
"Here, we focus on 3D fields. Basically, it is a straightforward extension of the 2D example, but it is very easy to do a mistake in the setup of the vertical discretisation that would affect the interpolation scheme.\n",
"\n",
"## Preliminary comments\n",
"\n",
"```{note}\n",
"_How to know if your data is discretised on a C grid?_ The best way is to read the documentation that comes with the data. Alternatively, an easy check is to assess the coordinates of the U, V and W fields: for an A grid, U, V and W are distributed on the same nodes, such that the coordinates are the same. For a C grid, there is a shift of half a cell between the different variables.\n",
"```\n",
"\n",
"_What about grid indexing?_ Since the C-grid variables are not located on the same nodes, there is not one obvious way to define the indexing, i.e. where is `u[k,j,i]` compared to `v[k,j,i]` and `w[k,j,i]`. In Parcels, we use the same notation as in NEMO: see [horizontal indexing](https://www.nemo-ocean.eu/doc/img360.png) and [vertical indexing](https://www.nemo-ocean.eu/doc/img362.png).\n",
"It is important that you check if your data is following the same notation. Otherwise, you should re-index your data properly (this can be done within Parcels, there is no need to regenerate new netcdf files).\n",
"\n",
"_What about the accuracy?_ By default in Parcels, particle coordinates (i.e. longitude, latitude and depth) are stored using single-precision `np.float32` numbers. The advantage of this is that it saves some memory resources for the computation. In some applications, especially where particles travel very close to the coast, the single precision accuracy can lead to uncontrolled particle beaching, due to numerical rounding errors. In such case, you may want to double the coordinate precision to `np.float64`. This can be done by adding the parameter `lonlatdepth_dtype=np.float64` to the ParticleSet constructor. Note that for C-grid fieldsets such as in NEMO, the coordinates precision is set by default to `np.float64`.\n",
"\n",
"## How to create a 3D NEMO `dimensions` dictionary?\n",
"\n",
"In the following, we will show how to create the `dimensions` dictionary for 3D NEMO simulations. What you require is a 'mesh_mask' file, which in our case is called `coordinates.nc` but in some other versions of NEMO has a different name. In any case, it will have to contain the variables `glamf`, `gphif` and `depthw`, which are the longitude, latitude and depth of the mesh nodes, respectively. Note that `depthw` is not part of the mesh_mask file, but is in the same file as the w data (`wfiles[0]`).\n",
"\n",
"For the C-grid interpolation in Parcels to work properly, it is important that `U`, `V` and `W` are on the same grid. \n",
"\n",
"All other tracers (e.g. `temperature`, `salinity`) should also be on this same grid. So even though these tracers are computed by NEMO on the T-points, Parcels expects them on the f-points (`glamf`, `gphif` and `depthw`). Parcels then under the hood makes sure the interpolation of these tracers is done correctly.\n",
"Here, we focus on 3D fields. Basically, it is a straightforward extension of the 2D example, but it is very easy to make a mistake in the setup of the vertical discretisation that would affect the interpolation scheme.\n",
"\n",
"The code below is an example of how to create a 3D simulation with particles, starting in the mouth of the river Rhine at 1m depth, and advecting them through the North Sea using the `AdvectionRK4_3D`\n"
"For the C-grid interpolation in Parcels to work properly, it is important that `U`, `V` and `W` are on the same grid. All other tracers (e.g. `temperature`, `salinity`) should also be on this same grid. So even though these tracers are computed by NEMO on the T-points, Parcels expects them on the f-points (`glamf`, `gphif` and `depthw`). Parcels then under the hood makes sure the interpolation of these tracers is done correctly."
]
},
{
Expand All @@ -46,69 +33,18 @@
"metadata": {},
"outputs": [],
"source": [
"import warnings\n",
"from datetime import timedelta\n",
"from glob import glob\n",
"\n",
"import matplotlib.pyplot as plt\n",
"import numpy as np\n",
"import xarray as xr\n",
"\n",
"import parcels\n",
"from parcels import FileWarning\n",
"\n",
"# Add a filter for the xarray decoding warning\n",
"warnings.simplefilter(\"ignore\", FileWarning)\n",
"\n",
"example_dataset_folder = parcels.download_example_dataset(\n",
" \"NemoNorthSeaORCA025-N006_data\"\n",
")\n",
"ufiles = sorted(glob(f\"{example_dataset_folder}/ORCA*U.nc\"))\n",
"vfiles = sorted(glob(f\"{example_dataset_folder}/ORCA*V.nc\"))\n",
"wfiles = sorted(glob(f\"{example_dataset_folder}/ORCA*W.nc\"))\n",
"# tfiles = sorted(glob(f\"{example_dataset_folder}/ORCA*T.nc\")) # Not used in this example\n",
"mesh_mask = f\"{example_dataset_folder}/coordinates.nc\"\n",
"\n",
"filenames = {\n",
" \"U\": {\"lon\": mesh_mask, \"lat\": mesh_mask, \"depth\": wfiles[0], \"data\": ufiles},\n",
" \"V\": {\"lon\": mesh_mask, \"lat\": mesh_mask, \"depth\": wfiles[0], \"data\": vfiles},\n",
" \"W\": {\"lon\": mesh_mask, \"lat\": mesh_mask, \"depth\": wfiles[0], \"data\": wfiles},\n",
" # \"T\": {\"lon\": mesh_mask, \"lat\": mesh_mask, \"depth\": wfiles[0], \"data\": tfiles}, # Not used in this example\n",
"}\n",
"\n",
"variables = {\n",
" \"U\": \"uo\",\n",
" \"V\": \"vo\",\n",
" \"W\": \"wo\",\n",
" # \"T\": \"thetao\", # Not used in this example\n",
"}\n",
"\n",
"# Note that all variables need the same dimensions in a C-Grid\n",
"c_grid_dimensions = {\n",
" \"lon\": \"glamf\",\n",
" \"lat\": \"gphif\",\n",
" \"depth\": \"depthw\",\n",
" \"time\": \"time_counter\",\n",
"}\n",
"dimensions = {\n",
" \"U\": c_grid_dimensions,\n",
" \"V\": c_grid_dimensions,\n",
" \"W\": c_grid_dimensions,\n",
" # \"T\": c_grid_dimensions, # Not used in this example\n",
"}\n",
"\n",
"fieldset = parcels.FieldSet.from_nemo(filenames, variables, dimensions)\n",
"\n",
"pset = parcels.ParticleSet.from_line(\n",
" fieldset=fieldset,\n",
" pclass=parcels.Particle,\n",
" size=10,\n",
" start=(1.9, 52.5),\n",
" finish=(3.4, 51.6),\n",
" depth=1,\n",
")\n",
"\n",
"kernels = pset.Kernel(parcels.kernels.AdvectionRK4_3D)\n",
"pset.execute(kernels, runtime=timedelta(days=4), dt=timedelta(hours=6))"
"import parcels"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Parcels v4 comes with a `from_nemo` method that automatically sets up the correct `Grid` for both 2D and 3D NEMO data. However, you may need to do some data-wrangling on your NEMO data files, such as below:"
]
},
{
Expand All @@ -117,47 +53,70 @@
"metadata": {},
"outputs": [],
"source": [
"depth_level = 8\n",
"print(\n",
" f\"Level[{int(depth_level)}] depth is: \"\n",
" f\"[{fieldset.W.grid.depth[depth_level]:g} \"\n",
" f\"{fieldset.W.grid.depth[depth_level + 1]:g}]\"\n",
"data_folder = parcels.download_example_dataset(\"NemoNorthSeaORCA025-N006_data\")\n",
"\n",
"# Open field datasets (with minimal loading to speed up)\n",
"ds_fields = xr.open_mfdataset(\n",
" data_folder.glob(\"ORCA*.nc\"),\n",
" data_vars=\"minimal\",\n",
" coords=\"minimal\",\n",
" compat=\"override\",\n",
")\n",
"\n",
"plt.pcolormesh(\n",
" fieldset.U.grid.lon, fieldset.U.grid.lat, fieldset.U.data[0, depth_level, :, :]\n",
")\n",
"plt.show()"
"# Open coordinates dataset (with no time decoding because xarray otherwise complains)\n",
"ds_coords = xr.open_dataset(data_folder / \"coordinates.nc\", decode_times=False)\n",
"\n",
"# Remove time dimension from coordinates (otherwise xarray complains)\n",
"ds_coords = ds_coords.isel(time=0, drop=True)\n",
"\n",
"# Combine field and coordinate datasets\n",
"ds = xr.merge([ds_fields, ds_coords[[\"glamf\", \"gphif\"]]])\n",
"\n",
"fieldset = parcels.FieldSet.from_nemo(ds)"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"## Adding other fields like cell edges\n",
"\n",
"It is quite straightforward to add other gridded data, on the same curvilinear or any other type of grid, to the fieldset. Because it is good practice to make no changes to a `FieldSet` once a `ParticleSet` has been defined in it, we redefine the fieldset and add the fields with the cell edges from the coordinates file using `FieldSet.add_field()`.\n",
"\n",
"Note that including tracers like `temperature` and `salinity` needs to be done at the f-points, so on the same grid as the velocity fields. See also the section above.\n"
"The code below is an example of how to then create a 3D simulation with particles, starting on a meridional line through the North Sea from the mouth of the river Rhine at 100m depth, and advecting them through the North Sea using the `AdvectionRK2_3D`"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"metadata": {
"tags": [
"hide-output"
]
},
"outputs": [],
"source": [
"from parcels import Field"
"npart = 10\n",
"pset = parcels.ParticleSet(\n",
" fieldset=fieldset,\n",
" lon=np.linspace(1.9, 3.4, npart),\n",
" lat=np.linspace(65, 51.6, npart),\n",
" z=100 * np.ones(npart),\n",
")\n",
"\n",
"pfile = parcels.ParticleFile(\n",
" store=\"output_nemo3D.zarr\", outputdt=np.timedelta64(1, \"D\")\n",
")\n",
"\n",
"pset.execute(\n",
" parcels.kernels.AdvectionRK2_3D,\n",
" endtime=fieldset.time_interval.right,\n",
" dt=np.timedelta64(6, \"h\"),\n",
" output_file=pfile,\n",
")"
]
},
{
"cell_type": "code",
"execution_count": null,
"cell_type": "markdown",
"metadata": {},
"outputs": [],
"source": [
"fieldset = parcels.FieldSet.from_nemo(filenames, variables, dimensions)"
"We can then plot the trajectories on top of the surface U field"
]
},
{
Expand All @@ -166,34 +125,14 @@
"metadata": {},
"outputs": [],
"source": [
"e1u = Field.from_netcdf(\n",
" filenames=mesh_mask,\n",
" variable=\"e1u\",\n",
" dimensions={\"lon\": \"glamu\", \"lat\": \"gphiu\"},\n",
" interp_method=\"nearest\",\n",
")\n",
"e2u = Field.from_netcdf(\n",
" filenames=mesh_mask,\n",
" variable=\"e2u\",\n",
" dimensions={\"lon\": \"glamu\", \"lat\": \"gphiu\"},\n",
" interp_method=\"nearest\",\n",
")\n",
"e1v = Field.from_netcdf(\n",
" filenames=mesh_mask,\n",
" variable=\"e1v\",\n",
" dimensions={\"lon\": \"glamv\", \"lat\": \"gphiv\"},\n",
" interp_method=\"nearest\",\n",
")\n",
"e2v = Field.from_netcdf(\n",
" filenames=mesh_mask,\n",
" variable=\"e2v\",\n",
" dimensions={\"lon\": \"glamv\", \"lat\": \"gphiv\"},\n",
" interp_method=\"nearest\",\n",
")\n",
"fieldset.add_field(e1u)\n",
"fieldset.add_field(e2u)\n",
"fieldset.add_field(e1v)\n",
"fieldset.add_field(e2v)"
"field = fieldset.U.data[0, 0, :, :]\n",
"field = field.where(field != 0, np.nan) # Mask land values for better plotting\n",
"plt.pcolormesh(fieldset.U.grid.lon, fieldset.U.grid.lat, field, cmap=\"RdBu\")\n",
"\n",
"ds_out = xr.open_zarr(\"output_nemo3D.zarr\")\n",
"plt.scatter(ds_out.lon.T, ds_out.lat.T, c=-ds_out.z.T, marker=\".\")\n",
"plt.colorbar(label=\"Depth (m)\")\n",
"plt.show()"
]
}
],
Expand Down
2 changes: 1 addition & 1 deletion docs/user_guide/index.md
Original file line number Diff line number Diff line change
Expand Up @@ -30,12 +30,12 @@ TODO: Add links to Reference API throughout
:titlesonly:
examples/explanation_grids.md
examples/tutorial_nemo_curvilinear.ipynb
examples_v3/tutorial_nemo_3D.ipynb # TODO move to examples folder
examples/tutorial_unitconverters.ipynb
examples/tutorial_nestedgrids.ipynb
```

<!-- examples/documentation_indexing.ipynb -->
<!-- examples/tutorial_nemo_3D.ipynb -->
<!-- examples/tutorial_croco_3D.ipynb -->
<!-- examples/tutorial_timevaryingdepthdimensions.ipynb -->

Expand Down
Loading