Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Addresses metadata issues for duration subdivision plugin #2094

Merged
merged 3 commits into from
Feb 18, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 4 additions & 2 deletions improver/utilities/save.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,13 +19,15 @@ def _order_cell_methods(cube: Cube) -> None:
"""
Sorts the cell methods on a cube such that if there are multiple methods
they are always written in a consistent order in the output cube. The
input cube is modified.
input cube is modified. Ensure that if there are any identical duplicate
cell methods, only one of these is included in the outputs.

Args:
cube:
The cube on which the cell methods are to be sorted.
"""
cell_methods = tuple(sorted(cube.cell_methods))
cell_methods = set(cube.cell_methods)
cell_methods = tuple(sorted(cell_methods))
cube.cell_methods = cell_methods


Expand Down
11 changes: 8 additions & 3 deletions improver/utilities/temporal_interpolation.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,8 @@
from improver import BasePlugin
from improver.metadata.constants import FLOAT_DTYPE
from improver.metadata.constants.time_types import TIME_COORDS
from improver.metadata.forecast_times import unify_cycletime
from improver.metadata.utilities import enforce_time_point_standard
from improver.utilities.complex_conversion import complex_to_deg, deg_to_complex
from improver.utilities.cube_manipulation import MergeCubes
from improver.utilities.round import round_close
Expand Down Expand Up @@ -884,12 +886,15 @@ def construct_target_periods(self, fidelity_period_cube: Cube) -> Cube:
)
components = fidelity_period_cube.extract(period_constraint)
component_cube = components.collapsed("time", iris.analysis.SUM)
component_cube.coord("time").points = component_cube.coord("time").bounds[
0
][-1]
enforce_time_point_standard(component_cube)
new_period_cubes.append(component_cube)
start_time += interval
# The cycle times are already the same. This code will recalculate
# the forecasts periods relative to the cycletime for each of our
# extracted shorter duration cubes.
cycle_time = fidelity_period_cube.coord("forecast_reference_time").cell(0).point

new_period_cubes = unify_cycletime(new_period_cubes, cycle_time)
return new_period_cubes.merge_cube()

def process(self, cube: Cube) -> Cube:
Expand Down
6 changes: 3 additions & 3 deletions improver_tests/acceptance/SHA256SUMS
Original file line number Diff line number Diff line change
Expand Up @@ -334,9 +334,9 @@ bf7e42be7897606682c3ecdaeb27bf3d3b6ab13a9a88b46c88ae6e92801c6245 ./create-grid-
55ba8a8ca8b5eee667d37fe8ec4a653caddea27f19ea290397428a487eb13ca0 ./cubelist-extract/input_cubelist.nc
33c7e0cf46ac62ead74ffde502ee28076a59550474fb3872c3e22083c4bd3cc3 ./cubelist-extract/kgo.nc
368f3c0c658d1155399ad4bdbfe0f98e0c65f5c53a49ece105bba3758012c0e8 ./duration-subdivision/input.nc
2c8c4972ae2dca29a05ac62f982cdd5727546c19a1699f4366416a12640ed2f8 ./duration-subdivision/kgo_daymask.nc
fc87547220adc1326af0e484a826d8950a7acc6e3c2d76ce63b7beea6133bf73 ./duration-subdivision/kgo_nightmask.nc
f2fd4c7884e50ab90f0d085a66d7b3b41c9bf09508481ceaa409a9025fde8386 ./duration-subdivision/kgo_nomask.nc
f56f65dca4c6887c422c23e10b63e034ed9d5388081dd2030d690c5f5be73fa4 ./duration-subdivision/kgo_daymask.nc
19388549f7a5f1bc616bb353d5a1380a2f26763cd3a223c0d3eabbc5fc4b389d ./duration-subdivision/kgo_nightmask.nc
8dc93f63957a89eb027b8552c1e923586ac8804f63e1d452a0cab31a9ea5cfc9 ./duration-subdivision/kgo_nomask.nc
fe00aadb6854f44d765b40bb6608c23f4eb4f10193c96f43f207db0590739dab ./enforce-consistent-forecasts/double_bound_percentile_kgo.nc
51f9ff2c8e6cad54d04d6323c654ce25b812bea3ba6b0d85df21c19731c580fc ./enforce-consistent-forecasts/percentile_forecast.nc
e210bf956dd3574eda3a64fdc3371ab16f85048ca120a3823e90d751d2325c46 ./enforce-consistent-forecasts/percentile_reference.nc
Expand Down
19 changes: 18 additions & 1 deletion improver_tests/utilities/test_DurationSubdivision.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
import numpy as np
import pytest
from iris.cube import Cube, CubeList
from numpy.testing import assert_array_equal

from improver.synthetic_data.set_up_test_cubes import set_up_variable_cube
from improver.utilities.temporal_interpolation import DurationSubdivision
Expand Down Expand Up @@ -371,7 +372,7 @@ def test_allocate_data(data_cube, kwargs, data, time, period, realizations):
if not any([kwargs[key] for key in kwargs.keys() if "mask" in key]):
# Check that summing over the time dimension returns the original data
# if we've applied no masking.
np.testing.assert_array_equal(collapsed_rslice, data)
assert_array_equal(collapsed_rslice, data)
# Without masking we can test that all the shorter durations are the
# expected fraction of the total.
for cslice in rslice.slices_over("time"):
Expand Down Expand Up @@ -514,6 +515,22 @@ def test_construct_target_periods(kwargs, data, input_period, expected):
assert bounds == kwargs["target_period"]
assert cslice.coord("time").bounds[0][-1] == cslice.coord("time").points[0]

# Check forecast periods have been recalculated relative to time
# coordinate as expected.
expected_fp_lower = (
cslice[0].coord("time").cell(0).bound[0]
- cslice[0].coord("forecast_reference_time").cell(0).point
).total_seconds()
expected_fp_upper = (
cslice[0].coord("time").cell(0).bound[-1]
- cslice[0].coord("forecast_reference_time").cell(0).point
).total_seconds()
assert cslice.coord("forecast_period").points[0] == expected_fp_upper
assert_array_equal(
cslice.coord("forecast_period").bounds,
[[expected_fp_lower, expected_fp_upper]],
)

# Check subdivided data is as expected. Also checks that shape is as
# expected.
np.testing.assert_array_almost_equal(result.data, expected)
Expand Down
29 changes: 29 additions & 0 deletions improver_tests/utilities/test_save.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
# See LICENSE in the root of the repository for full licensing details.
"""Unit tests for saving functionality."""

import copy
import os
import unittest
from tempfile import mkdtemp
Expand Down Expand Up @@ -294,6 +295,34 @@ def test_reordering_cube(self):
# Test that they do match once sorting has occured.
self.assertEqual(self.cube.cell_methods, self.cell_methods)

def test_duplicates_removed(self):
"""Test that only one of any exact duplicate cell method is included
in the output."""
cell_methods = self.cell_methods + self.cell_methods
self.cube.cell_methods = cell_methods
_order_cell_methods(self.cube)
self.assertEqual(self.cube.cell_methods, self.cell_methods)

def test_inexact_duplicates_retained(self):
"""Test that if cell_methods are almost duplicated, but one has an
additional property, e.g comment, both are retained. This test is
overkill as we are effectively testing the ability of the set command
to differentiate the cell methods, but it is included for
completeness."""

extra = (
CellMethod(
method="maximum",
coords="time",
intervals="1 hour",
comments="I am unique and special",
),
)
cell_methods = tuple(sorted(self.cell_methods + (extra)))
self.cube.cell_methods = copy.copy(cell_methods)
_order_cell_methods(self.cube)
self.assertEqual(self.cube.cell_methods, cell_methods)


if __name__ == "__main__":
unittest.main()