Skip to content
forked from pydata/xarray

Commit

Permalink
Merge remote-tracking branch 'upstream/main' into sparse-unstack
Browse files Browse the repository at this point in the history
* upstream/main: (39 commits)
  Fixed a mispelling of dimension in dataarray documentation for from_dict (pydata#6020)
  [pre-commit.ci] pre-commit autoupdate (pydata#6014)
  [pre-commit.ci] pre-commit autoupdate (pydata#5990)
  Use set_options for asv bottleneck tests (pydata#5986)
  Fix module name retrieval in `backend.plugins.remove_duplicates()`, plugin tests (pydata#5959)
  Check for py version instead of try/except when importing entry_points (pydata#5988)
  Add "see also" in to_dataframe docs (pydata#5978)
  Alternate method using inline css to hide regular html output in an untrusted notebook (pydata#5880)
  Fix mypy issue with entry_points (pydata#5979)
  Remove pre-commit auto update (pydata#5958)
  Do not change coordinate inplace when throwing error (pydata#5957)
  Create CITATION.cff (pydata#5956)
  Add groupby & resample benchmarks (pydata#5922)
  Fix plot.line crash for data of shape (1, N) in _title_for_slice on format_item (pydata#5948)
  Disable unit test comments (pydata#5946)
  Publish test results from workflow_run only (pydata#5947)
  Generator for groupby reductions (pydata#5871)
  whats-new dev
  whats-new for 0.20.1 (pydata#5943)
  Docs: fix URL for PTSA (pydata#5935)
  ...
  • Loading branch information
dcherian committed Nov 24, 2021
2 parents 267a14f + 5db4046 commit d381a62
Show file tree
Hide file tree
Showing 67 changed files with 5,416 additions and 918 deletions.
44 changes: 0 additions & 44 deletions .github/workflows/ci-pre-commit-autoupdate.yaml

This file was deleted.

19 changes: 6 additions & 13 deletions .github/workflows/ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -108,19 +108,12 @@ jobs:
name: codecov-umbrella
fail_ci_if_error: false

publish-test-results:
needs: test
event_file:
name: "Event File"
runs-on: ubuntu-latest
# the build-and-test job might be skipped, we don't need to run this job then
if: success() || failure()

steps:
- name: Download Artifacts
uses: actions/download-artifact@v2
with:
path: test-results

- name: Publish Unit Test Results
uses: EnricoMi/publish-unit-test-result-action@v1
- name: Upload
uses: actions/upload-artifact@v2
with:
files: test-results/**/*.xml
name: Event File
path: ${{ github.event_path }}
18 changes: 7 additions & 11 deletions .github/workflows/publish-test-results.yaml
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# Copied from https://github.com/EnricoMi/publish-unit-test-result-action/blob/v1.18/README.md#support-fork-repositories-and-dependabot-branches
# Copied from https://github.com/EnricoMi/publish-unit-test-result-action/blob/v1.23/README.md#support-fork-repositories-and-dependabot-branches

name: Publish test results

Expand All @@ -12,11 +12,7 @@ jobs:
publish-test-results:
name: Publish test results
runs-on: ubuntu-latest
if: >
github.event.workflow_run.conclusion != 'skipped' && (
github.event.sender.login == 'dependabot[bot]' ||
github.event.workflow_run.head_repository.full_name != github.repository
)
if: github.event.workflow_run.conclusion != 'skipped'

steps:
- name: Download and extract artifacts
Expand All @@ -26,13 +22,10 @@ jobs:
mkdir artifacts && cd artifacts
artifacts_url=${{ github.event.workflow_run.artifacts_url }}
artifacts=$(gh api $artifacts_url -q '.artifacts[] | {name: .name, url: .archive_download_url}')
IFS=$'\n'
for artifact in $artifacts
gh api "$artifacts_url" -q '.artifacts[] | [.name, .archive_download_url] | @tsv' | while read artifact
do
name=$(jq -r .name <<<$artifact)
url=$(jq -r .url <<<$artifact)
IFS=$'\t' read name url <<< "$artifact"
gh api $url > "$name.zip"
unzip -d "$name" "$name.zip"
done
Expand All @@ -41,4 +34,7 @@ jobs:
uses: EnricoMi/publish-unit-test-result-action@v1
with:
commit: ${{ github.event.workflow_run.head_sha }}
event_file: artifacts/Event File/event.json
event_name: ${{ github.event.workflow_run.event }}
files: "artifacts/**/*.xml"
comment_mode: off
6 changes: 3 additions & 3 deletions .github/workflows/upstream-dev-ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,7 @@ jobs:
shopt -s globstar
python .github/workflows/parse_logs.py logs/**/*-log
- name: Report failures
uses: actions/github-script@v4.1
uses: actions/github-script@v5
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
Expand Down Expand Up @@ -158,15 +158,15 @@ jobs:
// If no issue is open, create a new issue,
// else update the body of the existing issue.
if (result.repository.issues.edges.length === 0) {
github.issues.create({
github.rest.issues.create({
owner: variables.owner,
repo: variables.name,
body: issue_body,
title: title,
labels: [variables.label]
})
} else {
github.issues.update({
github.rest.issues.update({
owner: variables.owner,
repo: variables.name,
issue_number: result.repository.issues.edges[0].node.number,
Expand Down
9 changes: 5 additions & 4 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -8,21 +8,22 @@ repos:
- id: check-yaml
# isort should run before black as black sometimes tweaks the isort output
- repo: https://github.com/PyCQA/isort
rev: 5.9.3
rev: 5.10.1
hooks:
- id: isort
# https://github.com/python/black#version-control-integration
- repo: https://github.com/psf/black
rev: 21.9b0
rev: 21.11b1
hooks:
- id: black
- id: black-jupyter
- repo: https://github.com/keewis/blackdoc
rev: v0.3.4
hooks:
- id: blackdoc
- repo: https://gitlab.com/pycqa/flake8
rev: 3.9.2
exclude: "generate_reductions.py"
- repo: https://github.com/PyCQA/flake8
rev: 4.0.1
hooks:
- id: flake8
# - repo: https://github.com/Carreau/velin
Expand Down
96 changes: 96 additions & 0 deletions CITATION.cff
Original file line number Diff line number Diff line change
@@ -0,0 +1,96 @@
cff-version: 1.2.0
message: "If you use this software, please cite it as below."
authors:
- family-names: "Hoyer"
given-names: "Stephan"
orcid: "https://orcid.org/0000-0002-5207-0380"
- family-names: "Roos"
given-names: "Maximilian"
- family-names: "Joseph"
given-names: "Hamman"
orcid: "https://orcid.org/0000-0001-7479-8439"
- family-names: "Magin"
given-names: "Justus"
- family-names: "Cherian"
given-names: "Deepak"
orcid: "https://orcid.org/0000-0002-6861-8734"
- family-names: "Fitzgerald"
given-names: "Clark"
orcid: "https://orcid.org/0000-0003-3446-6389"
- family-names: "Hauser"
given-names: "Mathias"
orcid: "https://orcid.org/0000-0002-0057-4878"
- family-names: "Fujii"
given-names: "Keisuke"
orcid: "https://orcid.org/0000-0003-0390-9984"
- family-names: "Maussion"
given-names: "Fabien"
orcid: "https://orcid.org/0000-0002-3211-506X"
- family-names: "Imperiale"
given-names: "Guido"
- family-names: "Clark"
given-names: "Spencer"
orcid: "https://orcid.org/0000-0001-5595-7895"
- family-names: "Kleeman"
given-names: "Alex"
- family-names: "Nicholas"
given-names: "Thomas"
orcid: "https://orcid.org/0000-0002-2176-0530"
- family-names: "Kluyver"
given-names: "Thomas"
orcid: "https://orcid.org/0000-0003-4020-6364"
- family-names: "Westling"
given-names: "Jimmy"
- family-names: "Munroe"
given-names: "James"
orcid: "https://orcid.org/0000-0001-9098-6309"
- family-names: "Amici"
given-names: "Alessandro"
orcid: "https://orcid.org/0000-0002-1778-4505"
- family-names: "Barghini"
given-names: "Aureliana"
- family-names: "Banihirwe"
given-names: "Anderson"
orcid: "https://orcid.org/0000-0001-6583-571X"
- family-names: "Bell"
given-names: "Ray"
orcid: "https://orcid.org/0000-0003-2623-0587"
- family-names: "Hatfield-Dodds"
given-names: "Zac"
orcid: "https://orcid.org/0000-0002-8646-8362"
- family-names: "Abernathey"
given-names: "Ryan"
orcid: "https://orcid.org/0000-0001-5999-4917"
- family-names: "Bovy"
given-names: "Benoît"
- family-names: "Omotani"
given-names: "John"
orcid: "https://orcid.org/0000-0002-3156-8227"
- family-names: "Mühlbauer"
given-names: "Kai"
orcid: "https://orcid.org/0000-0001-6599-1034"
- family-names: "Roszko"
given-names: "Maximilian K."
orcid: "https://orcid.org/0000-0001-9424-2526"
- family-names: "Wolfram"
given-names: "Phillip J."
orcid: "https://orcid.org/0000-0001-5971-4241"
title: "xarray"
doi: 10.5281/zenodo.598201
url: "https://github.com/pydata/xarray"
preferred-citation:
type: article
authors:
- family-names: "Hoyer"
given-names: "Stephan"
orcid: "https://orcid.org/0000-0002-5207-0380"
- family-names: "Joseph"
given-names: "Hamman"
orcid: "https://orcid.org/0000-0001-7479-8439"
doi: "10.5334/jors.148"
journal: "Journal of Open Research Software"
month: 4
title: "xarray: N-D labeled Arrays and Datasets in Python"
volume: 5
issue: 1
year: 2017
2 changes: 1 addition & 1 deletion asv_bench/asv.conf.json
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@
"pandas": [""],
"netcdf4": [""],
"scipy": [""],
"bottleneck": ["", null],
"bottleneck": [""],
"dask": [""],
"distributed": [""],
"sparse": [""]
Expand Down
8 changes: 0 additions & 8 deletions asv_bench/benchmarks/dataarray_missing.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,13 +16,6 @@ def make_bench_data(shape, frac_nan, chunks):
return da


def requires_bottleneck():
try:
import bottleneck # noqa: F401
except ImportError:
raise NotImplementedError()


class DataArrayMissingInterpolateNA:
def setup(self, shape, chunks, limit):
if chunks is not None:
Expand All @@ -46,7 +39,6 @@ def time_interpolate_na(self, shape, chunks, limit):

class DataArrayMissingBottleneck:
def setup(self, shape, chunks, limit):
requires_bottleneck()
if chunks is not None:
requires_dask()
self.da = make_bench_data(shape, 0.1, chunks)
Expand Down
Loading

0 comments on commit d381a62

Please sign in to comment.