Skip to content

Commit

Permalink
Merge branch 'main' into init-zarr
Browse files Browse the repository at this point in the history
  • Loading branch information
dcherian authored Jan 5, 2024
2 parents bb9f72f + e023903 commit a529f1d
Show file tree
Hide file tree
Showing 14 changed files with 65 additions and 46 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/benchmarks-last-release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ jobs:
cp benchmarks/README_CI.md benchmarks.log .asv/results/
working-directory: ${{ env.ASV_DIR }}

- uses: actions/upload-artifact@v3
- uses: actions/upload-artifact@v4
if: always()
with:
name: asv-benchmark-results-${{ runner.os }}
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/benchmarks.yml
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ jobs:
cp benchmarks/README_CI.md benchmarks.log .asv/results/
working-directory: ${{ env.ASV_DIR }}

- uses: actions/upload-artifact@v3
- uses: actions/upload-artifact@v4
if: always()
with:
name: asv-benchmark-results-${{ runner.os }}
Expand Down
13 changes: 6 additions & 7 deletions .github/workflows/ci-additional.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -320,11 +320,6 @@ jobs:
run:
shell: bash -l {0}

strategy:
matrix:
environment-file: ["bare-minimum", "min-all-deps"]
fail-fast: false

steps:
- uses: actions/checkout@v4
with:
Expand All @@ -340,6 +335,10 @@ jobs:
conda
python-dateutil
- name: minimum versions policy
- name: All-deps minimum versions policy
run: |
python ci/min_deps_check.py ci/requirements/min-all-deps.yml
- name: Bare minimum versions policy
run: |
python ci/min_deps_check.py ci/requirements/${{ matrix.environment-file }}.yml
python ci/min_deps_check.py ci/requirements/bare-minimum.yml
8 changes: 5 additions & 3 deletions .github/workflows/ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,8 @@ jobs:
runs-on: ${{ matrix.os }}
needs: detect-ci-trigger
if: needs.detect-ci-trigger.outputs.triggered == 'false'
env:
ZARR_V3_EXPERIMENTAL_API: 1
defaults:
run:
shell: bash -l {0}
Expand Down Expand Up @@ -127,9 +129,9 @@ jobs:

- name: Upload test results
if: always()
uses: actions/upload-artifact@v3
uses: actions/upload-artifact@v4
with:
name: Test results for ${{ runner.os }}-${{ matrix.python-version }}
name: Test results for ${{ runner.os }}-${{ matrix.python-version }} ${{ matrix.env }}
path: pytest.xml

- name: Upload code coverage to Codecov
Expand All @@ -147,7 +149,7 @@ jobs:
if: github.repository == 'pydata/xarray'
steps:
- name: Upload
uses: actions/upload-artifact@v3
uses: actions/upload-artifact@v4
with:
name: Event File
path: ${{ github.event_path }}
2 changes: 1 addition & 1 deletion .github/workflows/pypi-release.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ jobs:
else
echo "✅ Looks good"
fi
- uses: actions/upload-artifact@v3
- uses: actions/upload-artifact@v4
with:
name: releases
path: dist
Expand Down
3 changes: 2 additions & 1 deletion .github/workflows/upstream-dev-ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,8 @@ jobs:
name: upstream-dev
runs-on: ubuntu-latest
needs: detect-ci-trigger
env:
ZARR_V3_EXPERIMENTAL_API: 1
if: |
always()
&& (
Expand Down Expand Up @@ -82,7 +84,6 @@ jobs:
if: success()
id: status
run: |
export ZARR_V3_EXPERIMENTAL_API=1
python -m pytest --timeout=60 -rf \
--report-log output-${{ matrix.python-version }}-log.jsonl
- name: Generate and publish the report
Expand Down
6 changes: 3 additions & 3 deletions ci/requirements/bare-minimum.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,6 @@ dependencies:
- pytest-env
- pytest-xdist
- pytest-timeout
- numpy=1.22
- packaging=21.3
- pandas=1.4
- numpy=1.23
- packaging=22.0
- pandas=1.5
35 changes: 19 additions & 16 deletions ci/requirements/min-all-deps.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,30 +10,35 @@ dependencies:
- python=3.9
- boto3=1.24
- bottleneck=1.3
- cartopy=0.20
- cartopy=0.21
- cftime=1.6
- coveralls
- dask-core=2022.7
- distributed=2022.7
- flox=0.5
- dask-core=2022.12
- distributed=2022.12
# Flox > 0.8 has a bug with numbagg versions
# It will require numbagg > 0.6
# so we should just skip that series eventually
# or keep flox pinned for longer than necessary
- flox=0.7
- h5netcdf=1.1
# h5py and hdf5 tend to cause conflicts
# for e.g. hdf5 1.12 conflicts with h5py=3.1
# prioritize bumping other packages instead
- h5py=3.7
- hdf5=1.12
- hypothesis
- iris=3.2
- iris=3.4
- lxml=4.9 # Optional dep of pydap
- matplotlib-base=3.5
- matplotlib-base=3.6
- nc-time-axis=1.4
# netcdf follows a 1.major.minor[.patch] convention
# (see https://github.com/Unidata/netcdf4-python/issues/1090)
- netcdf4=1.6.0
- numba=0.55
- numpy=1.22
- packaging=21.3
- pandas=1.4
- numba=0.56
- numbagg=0.2.1
- numpy=1.23
- packaging=22.0
- pandas=1.5
- pint=0.22
- pip
- pydap=3.3
Expand All @@ -43,11 +48,9 @@ dependencies:
- pytest-xdist
- pytest-timeout
- rasterio=1.3
- scipy=1.8
- seaborn=0.11
- scipy=1.10
- seaborn=0.12
- sparse=0.13
- toolz=0.12
- typing_extensions=4.3
- zarr=2.12
- pip:
- numbagg==0.2.1
- typing_extensions=4.4
- zarr=2.13
3 changes: 1 addition & 2 deletions doc/ecosystem.rst
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,6 @@ Visualization
Non-Python projects
~~~~~~~~~~~~~~~~~~~
- `xframe <https://github.com/xtensor-stack/xframe>`_: C++ data structures inspired by xarray.
- `AxisArrays <https://github.com/JuliaArrays/AxisArrays.jl>`_ and
`NamedArrays <https://github.com/davidavdav/NamedArrays.jl>`_: similar data structures for Julia.
- `AxisArrays <https://github.com/JuliaArrays/AxisArrays.jl>`_, `NamedArrays <https://github.com/davidavdav/NamedArrays.jl>`_ and `YAXArrays.jl <https://github.com/JuliaDataCubes/YAXArrays.jl>`_: similar data structures for Julia.

More projects can be found at the `"xarray" Github topic <https://github.com/topics/xarray>`_.
20 changes: 20 additions & 0 deletions doc/whats-new.rst
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,26 @@ New Features
Breaking changes
~~~~~~~~~~~~~~~~

- The minimum versions of some dependencies were changed (:pull:`8586`):

===================== ========= ========
Package Old New
===================== ========= ========
cartopy 0.20 0.21
dask-core 2022.7 2022.12
distributed 2022.7 2022.12
flox 0.5 0.7
iris 3.2 3.4
matplotlib-base 3.5 3.6
numpy 1.22 1.23
numba 0.55 0.56
packaging 21.3 22.0
seaborn 0.11 0.12
scipy 1.8 1.10
typing_extensions 4.3 4.4
zarr 2.12 2.13
===================== ========= ========


Deprecations
~~~~~~~~~~~~
Expand Down
6 changes: 3 additions & 3 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -22,9 +22,9 @@ readme = "README.md"
requires-python = ">=3.9"

dependencies = [
"numpy>=1.22",
"packaging>=21.3",
"pandas>=1.4",
"numpy>=1.23",
"packaging>=22",
"pandas>=1.5",
]

[project.urls]
Expand Down
2 changes: 1 addition & 1 deletion xarray/coding/variables.py
Original file line number Diff line number Diff line change
Expand Up @@ -414,7 +414,7 @@ def decode(self, variable: Variable, name: T_Name = None) -> Variable:
class UnsignedIntegerCoder(VariableCoder):
def encode(self, variable: Variable, name: T_Name = None) -> Variable:
# from netCDF best practices
# https://www.unidata.ucar.edu/software/netcdf/docs/BestPractices.html
# https://docs.unidata.ucar.edu/nug/current/best_practices.html#bp_Unsigned-Data
# "_Unsigned = "true" to indicate that
# integer data should be treated as unsigned"
if variable.encoding.get("_Unsigned", "false") == "true":
Expand Down
2 changes: 0 additions & 2 deletions xarray/tests/test_backends.py
Original file line number Diff line number Diff line change
Expand Up @@ -443,8 +443,6 @@ def test_pickle(self) -> None:

@pytest.mark.filterwarnings("ignore:deallocating CachingFileManager")
def test_pickle_dataarray(self) -> None:
if not has_dask:
pytest.xfail("pickling requires dask for SerializableLock")
expected = Dataset({"foo": ("x", [42])})
with self.roundtrip(expected, allow_cleanup_failure=ON_WINDOWS) as roundtripped:
with roundtripped:
Expand Down
7 changes: 2 additions & 5 deletions xarray/tests/test_conventions.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,10 +52,9 @@ def test_decode_cf_with_conflicting_fill_missing_value() -> None:
var = Variable(
["t"], np.arange(3), {"units": "foobar", "missing_value": 0, "_FillValue": 1}
)
with warnings.catch_warnings(record=True) as w:
with pytest.warns(SerializationWarning, match="has multiple fill"):
actual = conventions.decode_cf_variable("t", var)
assert_identical(actual, expected)
assert "has multiple fill" in str(w[0].message)

expected = Variable(["t"], np.arange(10), {"units": "foobar"})

Expand Down Expand Up @@ -293,10 +292,9 @@ def test_0d_int32_encoding(self) -> None:
def test_decode_cf_with_multiple_missing_values(self) -> None:
original = Variable(["t"], [0, 1, 2], {"missing_value": np.array([0, 1])})
expected = Variable(["t"], [np.nan, np.nan, 2], {})
with warnings.catch_warnings(record=True) as w:
with pytest.warns(SerializationWarning, match="has multiple fill"):
actual = conventions.decode_cf_variable("t", original)
assert_identical(expected, actual)
assert "has multiple fill" in str(w[0].message)

def test_decode_cf_with_drop_variables(self) -> None:
original = Dataset(
Expand Down Expand Up @@ -387,7 +385,6 @@ def test_decode_cf_with_dask(self) -> None:
}
).chunk()
decoded = conventions.decode_cf(original)
print(decoded)
assert all(
isinstance(var.data, da.Array)
for name, var in decoded.variables.items()
Expand Down

0 comments on commit a529f1d

Please sign in to comment.