Skip to content

Commit

Permalink
generate conda env yml and pip requirements files automatically from …
Browse files Browse the repository at this point in the history
…pyproject.toml using script
  • Loading branch information
gboeing committed Dec 11, 2024
1 parent 4ac5752 commit d447ee6
Show file tree
Hide file tree
Showing 11 changed files with 319 additions and 126 deletions.
2 changes: 1 addition & 1 deletion docs/.readthedocs.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ formats: all

python:
install:
- requirements: docs/requirements.txt
- requirements: docs/requirements-rtd.txt

sphinx:
configuration: docs/source/conf.py
Expand Down
6 changes: 6 additions & 0 deletions docs/requirements-rtd.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
# Do not edit this file. It is automatically generated by the script
# environments/make-env-files.py using the environment definition data
# in environments/environments.json and the requirements in pyproject.toml.
furo
sphinx-autodoc-typehints
sphinx>=7
3 changes: 0 additions & 3 deletions docs/requirements.txt

This file was deleted.

2 changes: 1 addition & 1 deletion docs/source/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@
html_static_path: list[str] = []
html_theme = "furo"
language = "en"
needs_sphinx = "7" # same value as pinned in /docs/requirements.txt
needs_sphinx = "7" # match version from pyproject.toml optional-dependencies
root_doc = "index"
source_suffix = ".rst"
templates_path: list[str] = []
51 changes: 51 additions & 0 deletions environments/environments.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
{
"env-test-minimum-deps": {
"filepath": "./environments/tests/env-test-minimum-deps.yml",
"force_pin": true,
"which_optionals": [
"entropy",
"neighbors",
"raster",
"visualization",
"tests"
]
},
"env-ci": {
"filepath": "./environments/tests/env-ci.yml"
},
"requirements-test-latest-deps": {
"filepath": "./environments/tests/requirements-test-latest-deps.txt",
"needs_python": false
},
"requirements-rtd": {
"filepath": "./docs/requirements-rtd.txt",
"needs_python": false,
"needs_dependencies": false,
"which_optionals": [
"docs"
]
},
"requirements-env": {
"filepath": "./environments/requirements.txt",
"needs_python": false,
"extras": [
"bottleneck",
"cartopy",
"conda-smithy",
"folium",
"hatch",
"jupyterlab",
"nbdime",
"nbqa",
"numexpr",
"pillow",
"pip",
"pysal>24",
"python-igraph",
"seaborn",
"statsmodels",
"twine",
"validate-pyproject"
]
}
}
167 changes: 167 additions & 0 deletions environments/make-env-files.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,167 @@
# noqa: INP001
"""Make conda env.yml and pip requirements.txt files from environments.json data."""

from __future__ import annotations

import argparse
import itertools
import json
from pathlib import Path

import tomllib
from packaging.requirements import Requirement

# path to package's pyproject and the config json file
pyproject_path = "./pyproject.toml"
environments_config_path = "./environments/environments.json"

# what channels to specify in conda env yml files
CHANNELS = ["conda-forge"]

# if any envs in config json are missing these keys, use these default values
ENV_DEFAULTS = {
"filepath": "./requirements-TEST.txt",
"needs_python": True,
"needs_dependencies": True,
"needs_optionals": True,
"which_optionals": None,
"force_pin": False,
"extras": None,
}

HEADER = (
"# Do not edit this file. It is automatically generated by the script\n"
"# environments/make-env-files.py using the environment definition data\n"
"# in environments/environments.json and the requirements in pyproject.toml.\n"
)


def extract_optional_deps(which: list[str] | None = None) -> list[Requirement]:
"""
Extract a list of the optional dependencies/versions from pyproject.toml.
Parameters
----------
which
Which optional dependencies to extract. If None, extract them all.
Returns
-------
optional_deps
"""
opts = pyproject["project"]["optional-dependencies"]
opts = [v for k, v in opts.items() if k in which] if which is not None else opts.values()
return list({Requirement(o) for o in itertools.chain.from_iterable(opts)})


def make_requirement(
requirement: Requirement,
force_pin: bool = False, # noqa: FBT001,FBT002
is_conda: bool = True, # noqa: FBT001,FBT002
) -> str:
"""
Make a requirement specification string.
The string result comprises the requirement's name and its specifier(s).
Parameters
----------
requirement
A requirement object
force_pin
If True, pin requirement to version rather than using existing
specifier. Allows you to convert minimum versions to pinned versions.
is_conda
If True and if `force_pin` is True, format the requirement string to
end with ".*" for conda environment file pinning format compatibility.
Returns
-------
requirement_str
"""
specifiers = list(requirement.specifier)
if force_pin and len(specifiers) == 1:
spec = f"{requirement.name}=={specifiers[0].version}"
if is_conda and not spec.endswith(".*"):
spec += ".*"
return spec
return str(requirement)


def make_file(env_name: str) -> None:
"""
Write a conda environment yaml file or pip requirements.txt file.
Parameters
----------
env_name
An enviroment name among the keys of environments.json.
Returns
-------
None
"""
# fill in any missing configurations with default values
env = envs[env_name]
env = {k: env.get(k, v) for k, v in ENV_DEFAULTS.items()}

# it's a conda env file if it ends with ".yml", otherwise it's a pip
# requirements.txt file
is_conda = env["filepath"].endswith(".yml")

# determine which dependencies to add based on the configuration
depends_on = []
if env["needs_python"]:
python_dep = Requirement(f"python{pyproject['project']['requires-python']}")
depends_on.append(python_dep)
if env["needs_dependencies"]:
dependencies = [Requirement(d) for d in pyproject["project"]["dependencies"]]
depends_on.extend(dependencies)
if env["needs_optionals"]:
optionals = extract_optional_deps(which=env["which_optionals"])
depends_on.extend(optionals)

# make the list of requirements
requirements = sorted(
make_requirement(dep, force_pin=env["force_pin"], is_conda=is_conda) for dep in depends_on
)

# add any extra requirements if provided in the configuration
if env["extras"] is not None:
requirements = sorted(requirements + env["extras"])

# write the conda env yml or pip requirements.txt file to disk
with Path(env["filepath"]).open("w") as f:
if is_conda:
data = {"name": env_name, "channels": CHANNELS, "dependencies": requirements}
text = ""
for k, v in data.items():
if isinstance(v, list):
text += k + ":\n- " + "\n- ".join(v) + "\n"
elif isinstance(v, str):
text += k + ": " + v + "\n"
f.writelines(HEADER + text)
else:
f.writelines(HEADER + "\n".join(requirements) + "\n")

print(f"Wrote {len(requirements)} requirements to {env['filepath']!r}") # noqa: T201


if __name__ == "__main__":
# load the pyproject.toml and the environments.json config files
pyproject = tomllib.loads(Path(pyproject_path).read_text())
with Path(environments_config_path).open() as f:
envs = json.load(f)

# parse any command-line arguments passed by the user
arg_parser = argparse.ArgumentParser()
arg_parser.add_argument("-n", dest="env_name", type=str)
args = arg_parser.parse_args()

if args.env_name is not None:
# if user passed -n command line argument, generate only that file
make_file(args.env_name)
else:
# otherwise, make all environment files
for env_name in envs:
make_file(env_name)
70 changes: 28 additions & 42 deletions environments/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,53 +1,39 @@
# required dependencies
geopandas
networkx
numpy
pandas
requests
shapely

# optional dependencies
matplotlib
rasterio
rio-vrt
scipy
scikit-learn

# helpful extras
# Do not edit this file. It is automatically generated by the script
# environments/make-env-files.py using the environment definition data
# in environments/environments.json and the requirements in pyproject.toml.
bottleneck
cartopy
conda-smithy
folium
jupyterlab
numexpr
pillow
pysal
python-igraph
seaborn
statsmodels

# docs
furo
sphinx
sphinx-autodoc-typehints

# packaging
conda-smithy
geopandas>=1.0
hatch
pip
twine
validate-pyproject

# typing
mypy
pandas-stubs
typeguard
types-requests

# linting/testing
jupyterlab
lxml
matplotlib>=3.5
nbdime
nbqa
networkx>=2.5
numexpr
numpy>=1.22
pandas>=1.4
pillow
pip
pre-commit
pysal>24
pytest
pytest-cov
ruff
python-igraph
rasterio>=1.3
requests>=2.27
rio-vrt>=0.3
scikit-learn>=0.23
scipy>=1.5
seaborn
shapely>=2.0
sphinx-autodoc-typehints
sphinx>=7
statsmodels
twine
typeguard
validate-pyproject
53 changes: 24 additions & 29 deletions environments/tests/env-ci.yml
Original file line number Diff line number Diff line change
@@ -1,32 +1,27 @@
# Do not edit this file. It is automatically generated by the script
# environments/make-env-files.py using the environment definition data
# in environments/environments.json and the requirements in pyproject.toml.
name: env-ci

channels:
- conda-forge

- conda-forge
dependencies:
# requirements
- geopandas
- networkx
- numpy
- pandas
- requests
- shapely

# extras
- matplotlib
- rasterio
- rio-vrt
- scikit-learn
- scipy

# linting/testing
- lxml
- pre-commit
- pytest
- pytest-cov
- typeguard

# docs
- furo
- sphinx=7
- sphinx-autodoc-typehints
- furo
- geopandas>=1.0
- lxml
- matplotlib>=3.5
- networkx>=2.5
- numpy>=1.22
- pandas>=1.4
- pre-commit
- pytest
- pytest-cov
- python>=3.9
- rasterio>=1.3
- requests>=2.27
- rio-vrt>=0.3
- scikit-learn>=0.23
- scipy>=1.5
- shapely>=2.0
- sphinx-autodoc-typehints
- sphinx>=7
- typeguard
Loading

0 comments on commit d447ee6

Please sign in to comment.