Skip to content

Commit a826249

Browse files
committed
Merge remote-tracking branch 'upstream/main' into duckarray-tests
* upstream/main: (113 commits) Fixed a mispelling of dimension in dataarray documentation for from_dict (pydata#6020) [pre-commit.ci] pre-commit autoupdate (pydata#6014) [pre-commit.ci] pre-commit autoupdate (pydata#5990) Use set_options for asv bottleneck tests (pydata#5986) Fix module name retrieval in `backend.plugins.remove_duplicates()`, plugin tests (pydata#5959) Check for py version instead of try/except when importing entry_points (pydata#5988) Add "see also" in to_dataframe docs (pydata#5978) Alternate method using inline css to hide regular html output in an untrusted notebook (pydata#5880) Fix mypy issue with entry_points (pydata#5979) Remove pre-commit auto update (pydata#5958) Do not change coordinate inplace when throwing error (pydata#5957) Create CITATION.cff (pydata#5956) Add groupby & resample benchmarks (pydata#5922) Fix plot.line crash for data of shape (1, N) in _title_for_slice on format_item (pydata#5948) Disable unit test comments (pydata#5946) Publish test results from workflow_run only (pydata#5947) Generator for groupby reductions (pydata#5871) whats-new dev whats-new for 0.20.1 (pydata#5943) Docs: fix URL for PTSA (pydata#5935) ...
2 parents 1d98fec + 5db4046 commit a826249

File tree

173 files changed

+9164
-3490
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

173 files changed

+9164
-3490
lines changed

.git-blame-ignore-revs

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
# black PR 3142
2+
d089df385e737f71067309ff7abae15994d581ec
3+
4+
# isort PR 1924
5+
0e73e240107caee3ffd1a1149f0150c390d43251

.github/workflows/benchmarks.yml

Lines changed: 74 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,74 @@
1+
name: Benchmark
2+
3+
on:
4+
pull_request:
5+
types: [opened, reopened, synchronize, labeled]
6+
workflow_dispatch:
7+
8+
jobs:
9+
benchmark:
10+
if: ${{ contains( github.event.pull_request.labels.*.name, 'run-benchmark') && github.event_name == 'pull_request' || github.event_name == 'workflow_dispatch' }}
11+
name: Linux
12+
runs-on: ubuntu-20.04
13+
env:
14+
ASV_DIR: "./asv_bench"
15+
16+
steps:
17+
# We need the full repo to avoid this issue
18+
# https://github.com/actions/checkout/issues/23
19+
- uses: actions/checkout@v2
20+
with:
21+
fetch-depth: 0
22+
23+
- name: Setup Miniconda
24+
uses: conda-incubator/setup-miniconda@v2
25+
with:
26+
# installer-url: https://github.com/conda-forge/miniforge/releases/latest/download/Mambaforge-Linux-x86_64.sh
27+
installer-url: https://github.com/conda-forge/miniforge/releases/latest/download/Miniforge3-Linux-x86_64.sh
28+
29+
- name: Setup some dependencies
30+
shell: bash -l {0}
31+
run: |
32+
pip install asv
33+
sudo apt-get update -y
34+
35+
- name: Run benchmarks
36+
shell: bash -l {0}
37+
id: benchmark
38+
env:
39+
OPENBLAS_NUM_THREADS: 1
40+
MKL_NUM_THREADS: 1
41+
OMP_NUM_THREADS: 1
42+
ASV_FACTOR: 1.5
43+
ASV_SKIP_SLOW: 1
44+
run: |
45+
set -x
46+
# ID this runner
47+
asv machine --yes
48+
echo "Baseline: ${{ github.event.pull_request.base.sha }} (${{ github.event.pull_request.base.label }})"
49+
echo "Contender: ${GITHUB_SHA} (${{ github.event.pull_request.head.label }})"
50+
# Use mamba for env creation
51+
# export CONDA_EXE=$(which mamba)
52+
export CONDA_EXE=$(which conda)
53+
# Run benchmarks for current commit against base
54+
ASV_OPTIONS="--split --show-stderr --factor $ASV_FACTOR"
55+
asv continuous $ASV_OPTIONS ${{ github.event.pull_request.base.sha }} ${GITHUB_SHA} \
56+
| sed "/Traceback \|failed$\|PERFORMANCE DECREASED/ s/^/::error::/" \
57+
| tee benchmarks.log
58+
# Report and export results for subsequent steps
59+
if grep "Traceback \|failed\|PERFORMANCE DECREASED" benchmarks.log > /dev/null ; then
60+
exit 1
61+
fi
62+
working-directory: ${{ env.ASV_DIR }}
63+
64+
- name: Add instructions to artifact
65+
if: always()
66+
run: |
67+
cp benchmarks/README_CI.md benchmarks.log .asv/results/
68+
working-directory: ${{ env.ASV_DIR }}
69+
70+
- uses: actions/upload-artifact@v2
71+
if: always()
72+
with:
73+
name: asv-benchmark-results-${{ runner.os }}
74+
path: ${{ env.ASV_DIR }}/.asv/results

.github/workflows/ci-additional.yaml

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,6 @@ jobs:
4242
[
4343
"py37-bare-minimum",
4444
"py37-min-all-deps",
45-
"py37-min-nep18",
4645
"py38-all-but-dask",
4746
"py38-flaky",
4847
]
@@ -103,7 +102,7 @@ jobs:
103102
$PYTEST_EXTRA_FLAGS
104103
105104
- name: Upload code coverage to Codecov
106-
uses: codecov/codecov-action@v2.0.2
105+
uses: codecov/codecov-action@v2.1.0
107106
with:
108107
file: ./coverage.xml
109108
flags: unittests,${{ matrix.env }}

.github/workflows/ci-pre-commit-autoupdate.yaml

Lines changed: 0 additions & 44 deletions
This file was deleted.

.github/workflows/ci.yaml

Lines changed: 7 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -100,27 +100,20 @@ jobs:
100100
path: pytest.xml
101101

102102
- name: Upload code coverage to Codecov
103-
uses: codecov/codecov-action@v2.0.2
103+
uses: codecov/codecov-action@v2.1.0
104104
with:
105105
file: ./coverage.xml
106106
flags: unittests
107107
env_vars: RUNNER_OS,PYTHON_VERSION
108108
name: codecov-umbrella
109109
fail_ci_if_error: false
110110

111-
publish-test-results:
112-
needs: test
111+
event_file:
112+
name: "Event File"
113113
runs-on: ubuntu-latest
114-
# the build-and-test job might be skipped, we don't need to run this job then
115-
if: success() || failure()
116-
117114
steps:
118-
- name: Download Artifacts
119-
uses: actions/download-artifact@v2
120-
with:
121-
path: test-results
122-
123-
- name: Publish Unit Test Results
124-
uses: EnricoMi/publish-unit-test-result-action@v1
115+
- name: Upload
116+
uses: actions/upload-artifact@v2
125117
with:
126-
files: test-results/**/*.xml
118+
name: Event File
119+
path: ${{ github.event_path }}
Lines changed: 7 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
# Copied from https://github.com/EnricoMi/publish-unit-test-result-action/blob/v1.18/README.md#support-fork-repositories-and-dependabot-branches
1+
# Copied from https://github.com/EnricoMi/publish-unit-test-result-action/blob/v1.23/README.md#support-fork-repositories-and-dependabot-branches
22

33
name: Publish test results
44

@@ -12,11 +12,7 @@ jobs:
1212
publish-test-results:
1313
name: Publish test results
1414
runs-on: ubuntu-latest
15-
if: >
16-
github.event.workflow_run.conclusion != 'skipped' && (
17-
github.event.sender.login == 'dependabot[bot]' ||
18-
github.event.workflow_run.head_repository.full_name != github.repository
19-
)
15+
if: github.event.workflow_run.conclusion != 'skipped'
2016

2117
steps:
2218
- name: Download and extract artifacts
@@ -26,13 +22,10 @@ jobs:
2622
mkdir artifacts && cd artifacts
2723
2824
artifacts_url=${{ github.event.workflow_run.artifacts_url }}
29-
artifacts=$(gh api $artifacts_url -q '.artifacts[] | {name: .name, url: .archive_download_url}')
3025
31-
IFS=$'\n'
32-
for artifact in $artifacts
26+
gh api "$artifacts_url" -q '.artifacts[] | [.name, .archive_download_url] | @tsv' | while read artifact
3327
do
34-
name=$(jq -r .name <<<$artifact)
35-
url=$(jq -r .url <<<$artifact)
28+
IFS=$'\t' read name url <<< "$artifact"
3629
gh api $url > "$name.zip"
3730
unzip -d "$name" "$name.zip"
3831
done
@@ -41,4 +34,7 @@ jobs:
4134
uses: EnricoMi/publish-unit-test-result-action@v1
4235
with:
4336
commit: ${{ github.event.workflow_run.head_sha }}
37+
event_file: artifacts/Event File/event.json
38+
event_name: ${{ github.event.workflow_run.event }}
4439
files: "artifacts/**/*.xml"
40+
comment_mode: off

.github/workflows/upstream-dev-ci.yaml

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -122,7 +122,7 @@ jobs:
122122
shopt -s globstar
123123
python .github/workflows/parse_logs.py logs/**/*-log
124124
- name: Report failures
125-
uses: actions/github-script@v4.0.2
125+
uses: actions/github-script@v5
126126
with:
127127
github-token: ${{ secrets.GITHUB_TOKEN }}
128128
script: |
@@ -158,15 +158,15 @@ jobs:
158158
// If no issue is open, create a new issue,
159159
// else update the body of the existing issue.
160160
if (result.repository.issues.edges.length === 0) {
161-
github.issues.create({
161+
github.rest.issues.create({
162162
owner: variables.owner,
163163
repo: variables.name,
164164
body: issue_body,
165165
title: title,
166166
labels: [variables.label]
167167
})
168168
} else {
169-
github.issues.update({
169+
github.rest.issues.update({
170170
owner: variables.owner,
171171
repo: variables.name,
172172
issue_number: result.repository.issues.edges[0].node.number,

.pre-commit-config.yaml

Lines changed: 11 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -8,20 +8,22 @@ repos:
88
- id: check-yaml
99
# isort should run before black as black sometimes tweaks the isort output
1010
- repo: https://github.com/PyCQA/isort
11-
rev: 5.9.3
11+
rev: 5.10.1
1212
hooks:
1313
- id: isort
1414
# https://github.com/python/black#version-control-integration
1515
- repo: https://github.com/psf/black
16-
rev: 21.7b0
16+
rev: 21.11b1
1717
hooks:
1818
- id: black
19+
- id: black-jupyter
1920
- repo: https://github.com/keewis/blackdoc
2021
rev: v0.3.4
2122
hooks:
2223
- id: blackdoc
23-
- repo: https://gitlab.com/pycqa/flake8
24-
rev: 3.9.2
24+
exclude: "generate_reductions.py"
25+
- repo: https://github.com/PyCQA/flake8
26+
rev: 4.0.1
2527
hooks:
2628
- id: flake8
2729
# - repo: https://github.com/Carreau/velin
@@ -30,20 +32,21 @@ repos:
3032
# - id: velin
3133
# args: ["--write", "--compact"]
3234
- repo: https://github.com/pre-commit/mirrors-mypy
33-
rev: v0.910
35+
rev: v0.910-1
3436
hooks:
3537
- id: mypy
36-
# Copied from setup.cfg
37-
exclude: "properties|asv_bench"
38+
# `properies` & `asv_bench` are copied from setup.cfg.
39+
# `_typed_ops.py` is added since otherwise mypy will complain (but notably only in pre-commit)
40+
exclude: "properties|asv_bench|_typed_ops.py"
3841
additional_dependencies: [
3942
# Type stubs
4043
types-python-dateutil,
4144
types-pkg_resources,
4245
types-PyYAML,
4346
types-pytz,
47+
typing-extensions==3.10.0.0,
4448
# Dependencies that are typed
4549
numpy,
46-
typing-extensions==3.10.0.0,
4750
]
4851
# run this occasionally, ref discussion https://github.com/pydata/xarray/pull/3194
4952
# - repo: https://github.com/asottile/pyupgrade

CITATION.cff

Lines changed: 96 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,96 @@
1+
cff-version: 1.2.0
2+
message: "If you use this software, please cite it as below."
3+
authors:
4+
- family-names: "Hoyer"
5+
given-names: "Stephan"
6+
orcid: "https://orcid.org/0000-0002-5207-0380"
7+
- family-names: "Roos"
8+
given-names: "Maximilian"
9+
- family-names: "Joseph"
10+
given-names: "Hamman"
11+
orcid: "https://orcid.org/0000-0001-7479-8439"
12+
- family-names: "Magin"
13+
given-names: "Justus"
14+
- family-names: "Cherian"
15+
given-names: "Deepak"
16+
orcid: "https://orcid.org/0000-0002-6861-8734"
17+
- family-names: "Fitzgerald"
18+
given-names: "Clark"
19+
orcid: "https://orcid.org/0000-0003-3446-6389"
20+
- family-names: "Hauser"
21+
given-names: "Mathias"
22+
orcid: "https://orcid.org/0000-0002-0057-4878"
23+
- family-names: "Fujii"
24+
given-names: "Keisuke"
25+
orcid: "https://orcid.org/0000-0003-0390-9984"
26+
- family-names: "Maussion"
27+
given-names: "Fabien"
28+
orcid: "https://orcid.org/0000-0002-3211-506X"
29+
- family-names: "Imperiale"
30+
given-names: "Guido"
31+
- family-names: "Clark"
32+
given-names: "Spencer"
33+
orcid: "https://orcid.org/0000-0001-5595-7895"
34+
- family-names: "Kleeman"
35+
given-names: "Alex"
36+
- family-names: "Nicholas"
37+
given-names: "Thomas"
38+
orcid: "https://orcid.org/0000-0002-2176-0530"
39+
- family-names: "Kluyver"
40+
given-names: "Thomas"
41+
orcid: "https://orcid.org/0000-0003-4020-6364"
42+
- family-names: "Westling"
43+
given-names: "Jimmy"
44+
- family-names: "Munroe"
45+
given-names: "James"
46+
orcid: "https://orcid.org/0000-0001-9098-6309"
47+
- family-names: "Amici"
48+
given-names: "Alessandro"
49+
orcid: "https://orcid.org/0000-0002-1778-4505"
50+
- family-names: "Barghini"
51+
given-names: "Aureliana"
52+
- family-names: "Banihirwe"
53+
given-names: "Anderson"
54+
orcid: "https://orcid.org/0000-0001-6583-571X"
55+
- family-names: "Bell"
56+
given-names: "Ray"
57+
orcid: "https://orcid.org/0000-0003-2623-0587"
58+
- family-names: "Hatfield-Dodds"
59+
given-names: "Zac"
60+
orcid: "https://orcid.org/0000-0002-8646-8362"
61+
- family-names: "Abernathey"
62+
given-names: "Ryan"
63+
orcid: "https://orcid.org/0000-0001-5999-4917"
64+
- family-names: "Bovy"
65+
given-names: "Benoît"
66+
- family-names: "Omotani"
67+
given-names: "John"
68+
orcid: "https://orcid.org/0000-0002-3156-8227"
69+
- family-names: "Mühlbauer"
70+
given-names: "Kai"
71+
orcid: "https://orcid.org/0000-0001-6599-1034"
72+
- family-names: "Roszko"
73+
given-names: "Maximilian K."
74+
orcid: "https://orcid.org/0000-0001-9424-2526"
75+
- family-names: "Wolfram"
76+
given-names: "Phillip J."
77+
orcid: "https://orcid.org/0000-0001-5971-4241"
78+
title: "xarray"
79+
doi: 10.5281/zenodo.598201
80+
url: "https://github.com/pydata/xarray"
81+
preferred-citation:
82+
type: article
83+
authors:
84+
- family-names: "Hoyer"
85+
given-names: "Stephan"
86+
orcid: "https://orcid.org/0000-0002-5207-0380"
87+
- family-names: "Joseph"
88+
given-names: "Hamman"
89+
orcid: "https://orcid.org/0000-0001-7479-8439"
90+
doi: "10.5334/jors.148"
91+
journal: "Journal of Open Research Software"
92+
month: 4
93+
title: "xarray: N-D labeled Arrays and Datasets in Python"
94+
volume: 5
95+
issue: 1
96+
year: 2017

0 commit comments

Comments
 (0)