Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

MAINT: Remove datalad #867

Merged
merged 1 commit into from
Mar 1, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 0 additions & 2 deletions .circleci/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -286,7 +286,6 @@ jobs:
keys:
- data-cache-eeg_matchingpennies-1
- bash_env
- gitconfig # email address is needed for datalad
- run:
name: Get eeg_matchingpennies
command: |
Expand All @@ -306,7 +305,6 @@ jobs:
keys:
- data-cache-MNE-phantom-KIT-data-1
- bash_env
- gitconfig # email address is needed for datalad
- run:
name: Get MNE-phantom-KIT-data
command: |
Expand Down
4 changes: 1 addition & 3 deletions .circleci/setup_bash.sh
Original file line number Diff line number Diff line change
Expand Up @@ -33,15 +33,13 @@ fi

# Set up image
sudo ln -s /usr/lib/x86_64-linux-gnu/libxcb-util.so.0 /usr/lib/x86_64-linux-gnu/libxcb-util.so.1
wget -q -O- http://neuro.debian.net/lists/focal.us-tn.libre | sudo tee /etc/apt/sources.list.d/neurodebian.sources.list
sudo apt-key adv --recv-keys --keyserver hkps://keyserver.ubuntu.com 0xA5D32F012649A5A9
echo "export RUN_TESTS=\".circleci/run_dataset_and_copy_files.sh\"" >> "$BASH_ENV"
echo "export DOWNLOAD_DATA=\"coverage run -m mne_bids_pipeline._download\"" >> "$BASH_ENV"

# Similar CircleCI setup to mne-python (Xvfb, venv, minimal commands, env vars)
wget -q https://raw.githubusercontent.com/mne-tools/mne-python/main/tools/setup_xvfb.sh
bash setup_xvfb.sh
sudo apt install -qq tcsh git-annex-standalone python3.10-venv python3-venv libxft2
sudo apt install -qq tcsh python3.10-venv python3-venv libxft2
python3.10 -m venv ~/python_env
wget -q https://raw.githubusercontent.com/mne-tools/mne-python/main/tools/get_minimal_commands.sh
source get_minimal_commands.sh
Expand Down
8 changes: 1 addition & 7 deletions CONTRIBUTING.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,17 +11,11 @@ Once this is done, you should be able to run this in a terminal:

`$ python -c "import mne; mne.sys_info()"`

You can then install the following additional packages via `pip`. Note that
You can then install the following additional package via `pip`. Note that
the URL points to the bleeding edge version of `mne_bids`:

`$ pip install datalad`
`$ pip install https://github.com/mne-tools/mne-bids/zipball/main`

To get the test data, you need to install `git-annex` on your system. If you
installed MNE-Python via `conda`, you can simply call:

`conda install -c conda-forge git-annex`

Now, get the pipeline through git:

`$ git clone https://github.com/mne-tools/mne-bids-pipeline.git`
Expand Down
2 changes: 0 additions & 2 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,6 @@ doc:

check:
which python
git-annex version
datalad --version
openneuro-py --version
mri_convert --version
mne_bids --version
Expand Down
13 changes: 3 additions & 10 deletions docs/source/examples/gen_examples.py
Original file line number Diff line number Diff line change
Expand Up @@ -203,21 +203,14 @@ def _gen_demonstrated_funcs(example_config_path: Path) -> dict:
f"{fname.name} :fontawesome-solid-square-poll-vertical:</a>\n\n"
)

assert (
sum(key in options for key in ("openneuro", "git", "web", "datalad", "mne"))
== 1
)
assert sum(key in options for key in ("openneuro", "web", "mne")) == 1
if "openneuro" in options:
url = f'https://openneuro.org/datasets/{options["openneuro"]}'
elif "git" in options:
url = options["git"]
elif "web" in options:
url = options["web"]
elif "mne" in options:
url = f"https://mne.tools/dev/generated/mne.datasets.{options['mne']}.data_path.html" # noqa: E501
else:
assert "datalad" in options # guaranteed above
url = ""
assert "mne" in options
url = f"https://mne.tools/dev/generated/mne.datasets.{options['mne']}.data_path.html" # noqa: E501

source_str = (
f"## Dataset source\n\nThis dataset was acquired from " f"[{url}]({url})\n"
Expand Down
1 change: 1 addition & 0 deletions docs/source/v1.6.md.inc
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@
### :medical_symbol: Code health

- The package build backend has been switched from `setuptools` to `hatchling`. (#825 by @hoechenberger)
- Removed dependencies on `datalad` and `git-annex` for testing (#867 by @larsoner)
- Code formatting now uses `ruff format` instead of `black` (#834, #838 by @larsoner)
- Code caching is now tested using GitHub Actions (#836 by @larsoner)
- Steps in the documentation are now automatically parsed into flowcharts (#859 by @larsoner)
Expand Down
33 changes: 1 addition & 32 deletions mne_bids_pipeline/_download.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,29 +9,6 @@
DEFAULT_DATA_DIR = Path("~/mne_data").expanduser()


def _download_via_datalad(*, ds_name: str, ds_path: Path):
import datalad.api as dl

print(f'datalad installing "{ds_name}"')
options = DATASET_OPTIONS[ds_name]
git_url = options["git"]
assert "exclude" not in options
assert "hash" not in options
dataset = dl.install(path=ds_path, source=git_url)

# XXX: git-annex bug:
# https://github.com/datalad/datalad/issues/3583
# if datalad fails, use "get" twice, or set `n_jobs=1`
if ds_name == "ds003104":
n_jobs = 16
else:
n_jobs = 1

for to_get in DATASET_OPTIONS[ds_name].get("include", []):
print(f'datalad get data "{to_get}" for "{ds_name}"')
dataset.get(to_get, jobs=n_jobs)


def _download_via_openneuro(*, ds_name: str, ds_path: Path):
import openneuro

Expand Down Expand Up @@ -88,20 +65,12 @@ def _download_via_mne(*, ds_name: str, ds_path: Path):
def _download(*, ds_name: str, ds_path: Path):
options = DATASET_OPTIONS[ds_name]
openneuro_name = options.get("openneuro", "")
git_url = options.get("git", "")
osf_node = options.get("osf", "")
web_url = options.get("web", "")
mne_mod = options.get("mne", "")
assert (
sum(bool(x) for x in (openneuro_name, git_url, osf_node, web_url, mne_mod)) == 1
)
assert sum(bool(x) for x in (openneuro_name, web_url, mne_mod)) == 1

if openneuro_name:
download_func = _download_via_openneuro
elif git_url:
download_func = _download_via_datalad
elif osf_node:
raise RuntimeError("OSF downloads are currently not supported.")
elif mne_mod:
download_func = _download_via_mne
else:
Expand Down
10 changes: 0 additions & 10 deletions mne_bids_pipeline/tests/datasets.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,16 +22,6 @@ class DATASET_OPTIONS_T(TypedDict, total=False):
"hash": "sha256:ddc94a7c9ba1922637f2770592dd51c019d341bf6bc8558e663e1979a4cb002f", # noqa: E501
},
"eeg_matchingpennies": {
# This dataset started out on osf.io as dataset https://osf.io/cj2dr
# then moved to g-node.org. As of 2023/02/28 when we download it via
# datalad it's too (~200 kB/sec!) and times out at the end:
#
# "git": "https://gin.g-node.org/sappelhoff/eeg_matchingpennies",
# "web": "",
# "include": ["sub-05"],
#
# So now we mirror this datalad-fetched git repo back on osf.io!
# original dataset: "osf": "cj2dr"
"web": "https://osf.io/download/8rbfk?version=1",
"hash": "sha256:06bfbe52c50b9343b6b8d2a5de3dd33e66ad9303f7f6bfbe6868c3c7c375fafd", # noqa: E501
},
Expand Down
1 change: 0 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,6 @@ tests = [
"pytest-cov",
"pooch",
"psutil",
"datalad",
"ruff",
"mkdocs",
"mkdocs-material >= 9.0.4",
Expand Down
Loading