diff --git a/.github/workflows/codestyle.yml b/.github/workflows/codestyle.yml index a03354e9..f0819b15 100644 --- a/.github/workflows/codestyle.yml +++ b/.github/workflows/codestyle.yml @@ -8,15 +8,19 @@ jobs: flake8: runs-on: ubuntu-latest steps: - - name: Checkout Repository - uses: actions/checkout@v2 - - name: Install Python 3.x - uses: actions/setup-python@v2 - with: - python-version: 3.x - - name: Install Dependencies - run: | - pip install tox - - name: Check Code Style - run: | - tox -e codestyle \ No newline at end of file + - name: Checkout repository + uses: actions/checkout@v2 + - name: Install Conda w/ Python 3.10 + uses: conda-incubator/setup-miniconda@v2 + with: + auto-activate-base: false + python-version: '3.10' + channels: conda-forge + - name: Install Dependencies + shell: bash -el {0} + run: | + pip install tox-conda flake8 + - name: Check Code Style + shell: bash -el {0} + run: | + tox -e codestyle \ No newline at end of file diff --git a/.github/workflows/testing.yml b/.github/workflows/testing.yml index 007c4741..c7006cc8 100644 --- a/.github/workflows/testing.yml +++ b/.github/workflows/testing.yml @@ -3,66 +3,46 @@ name: Testing on: [push, pull_request] jobs: - build: - + test: + name: ${{ matrix.name }} runs-on: ${{ matrix.os }} - defaults: - run: - shell: bash -l {0} strategy: fail-fast: false matrix: - os: [ubuntu-latest, macos-latest] - python-version: [3.8, 3.9] - steps: - - uses: actions/checkout@v2 - - - uses: conda-incubator/setup-miniconda@v2 - with: - python-version: ${{ matrix.py }} - auto-update-conda: true - channels: conda-forge,defaults - channel-priority: strict - show-channel-urls: true - - - name: install reqs - run: | - conda install pip compilers pytest pytest-cov pyccl cython - pip install cobaya + include: - - name: install extra reqs - run: | - pip install cosmopower + - name: latest supported versions + os: ubuntu-latest + python-version: '3.10' + toxenv: py310-test-all-latest-cov - env: - MATRIX_OS: ${{ matrix.os }} + - name: oldest supported versions + os: ubuntu-latest + python-version: '3.7' + toxenv: py37-test-oldest - - name: Install classy - run: | - if [[ ${MATRIX_OS} == "macos-latest" ]]; then - . ci_scripts/install_class_osx.sh - else - . ci_scripts/install_class_linux.sh - fi - python -c "import classy; print(classy)" - env: - MATRIX_OS: ${{ matrix.os }} + - name: macOS 11 + os: macos-11 + python-version: '3.10' + toxenv: py310-test-latest - - name: install - run: | - pip install . - - - name: install test data - run: | - export COBAYA_PACKAGES_PATH="../packages" - cobaya-install planck_2018_highl_plik.TTTEEE_lite_native - - - name: Unit tests - run: | - pytest -vv soliket --cov=soliket --cov-report=xml:coverage.xml --cov-config=setup.cfg - env: - PYTEST_ADDOPTS: "--color=yes" - - - name: Report Coverage (codecov) - if: matrix.os == 'ubuntu-latest' - uses: codecov/codecov-action@v2 + steps: + - name: Checkout repository + uses: actions/checkout@v3 + - name: Install Conda w/ Python ${{ matrix.python-version }} + uses: conda-incubator/setup-miniconda@v2 + with: + auto-activate-base: false + python-version: ${{ matrix.python-version }} + channels: conda-forge + - name: Install Dependencies + shell: bash -el {0} + run: | + pip install tox-conda + - name: Run Tests + shell: bash -el {0} + run: | + tox -e ${{ matrix.toxenv }} ${{ matrix.toxargs }} -- ${{ matrix.toxposargs }} + - if: contains(matrix.toxenv, '-cov') + name: Report Coverage + uses: codecov/codecov-action@v3 diff --git a/INSTALL.rst b/INSTALL.rst index 0e629282..ed4a12e4 100644 --- a/INSTALL.rst +++ b/INSTALL.rst @@ -1,6 +1,13 @@ INSTALL AND RUN COBAYA+SOLIKET ============================== +Using conda +----------- + +We have provided a conda environment defined in `soliket-tests.yml `_ which provides easy set up of a virtual envrinoment with all the dependencies installed in order to run SOLikeT and its tests on multiple platforms (explicitly tested for ubuntu and MacOS-11). + +If you wish to install it using your own system tools some useful information is provided below. + On your own laptop/virtual machine ---------------------------------- diff --git a/README.md b/README.md index bd5428b9..b1af55f5 100644 --- a/README.md +++ b/README.md @@ -7,31 +7,66 @@ A centralized package for likelihood and theory implementations for SO. ## Installation -For a set of detailed instructions, please see [here](INSTALL.rst). +For a set of detailed instructions for different machines (e.g. NERSC), please see [here](INSTALL.rst). + +To install SOLikeT we expect that you have the following system-level tools: + - python>=3.7,<3.11 + - pip + - compilers (c, cxx, fortran) + - cmake + - swig + - gsl + - fftw + - cython + - mpi4py + +A convenient way to obtain these things (along with the python dependencies listed in requirements.txt) is through using the conda environment in soliket-tests.yml. This conda environment is the one we use for running tests. + +You can then install SOLikeT in the usual way with pip: ``` git clone https://github.com/simonsobs/soliket cd soliket pip install -e . ``` -You will also need to either run + +## Running Tests + +Running tests + +There are (at least) two reasons you might want to run tests: + +1. To see if tests you have written when developing SOLikeT are valid and will pass the Continuous Integration (CI) tests which we require for merging on github. + +If you are using conda, the easiest way to run tests (and the way we run them) is to use tox-conda ``` -pip install camb +pip install tox-conda +tox -e test ``` -or, for a fuller cobaya install: + +This will create a fresh virtual environment replicating the one which is used for CI then run the tests (i.e. without touching your current environment). Note that any args after a '--' string will be passed to pytest, so + ``` -cobaya-install cosmo -p /your/path/to/cobaya/packages +tox -e test -- -k my_new_module ``` -To run tests, you will also need the original LAT_MFlike package: + +will only run tests which have names containing the string 'my_new_model', and + ``` -pip install git+https://github.com/simonsobs/lat_mflike +tox -e test -- -pdb ``` -Then, you can run tests with + +will start a pdb debug instance when (sorry, _if_) a test fails. + +2. Check SOLikeT is working as intended in an environment of your own specification. + +For this you need to make sure all of the above system-level and python dependencies are working correctly, then run: ``` -pip install pytest -pytest -v . +pytest -v soliket ``` +Good luck! + Please raise an issue if you have trouble installing or any of the tests fail. ## Contains diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 00000000..f2618ed1 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,20 @@ +numpy +scipy +pandas +scikit-learn +pyyaml +py-bobyqa +packaging +tqdm +portalocker +dill +fuzzywuzzy +astropy +camb +cosmopower +getdist +cobaya +pyccl +sacc +fgspectra @ git+https://github.com/simonsobs/fgspectra@act_sz_x_cib#egg=fgspectra +mflike @ git+https://github.com/simonsobs/lat_mflike@master \ No newline at end of file diff --git a/setup.cfg b/setup.cfg index 13b1b16a..a0be6810 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,8 +1,42 @@ +[metadata] +name = soliket +author = simonsobs +version = 0.1.dev +description = "SO Likelihoods & Theories" +long_description = file: README.rst +url = https://github.com/simonsobs/SOLikeT/ +edit_on_github = True + +[options] +zip_safe = False +packages = find: +python_requires = >=3.7 +setup_requires = setuptools_scm +install_requires = + numpy + scipy + pandas + scikit-learn + astropy + camb + cosmopower + getdist + cobaya + pyccl + sacc + fgspectra @ git+https://github.com/simonsobs/fgspectra@act_sz_x_cib#egg=fgspectra + mflike @ git+https://github.com/simonsobs/lat_mflike@master + +[options.package_data] +soliket = *.yaml,*.bibtex,clusters/data/*,clusters/data/selFn_equD56/*,lensing/data/*.txt,mflike/*.yaml,tests/*.yaml,data/xcorr_simulated/*.txt +testpaths = "soliket" +text_file_format = rst + [flake8] select = E713,E704,E703,E714,E741,E10,E11,E20,E22,E23,E25,E27,E301,E302,E304,E9, F405,F406,F5,F6,F7,F8,W1,W2,W3,W6,E501 max-line-length = 90 -exclude = .tox,build +exclude = .tox,build,cobaya_packages,test,.eggs [coverage:run] omit = diff --git a/setup.py b/setup.py index 50a2311b..995997e7 100644 --- a/setup.py +++ b/setup.py @@ -1,36 +1,41 @@ -# example cobaya-compliant SO likelihood package; -# adapted from github.com/cobayasampler/example_external_likelihood +#!/usr/bin/env python +# Licensed under a 3-clause BSD style license - see LICENSE.rst + +# NOTE: The configuration for the package, including the name, version, and +# other information are set in the setup.cfg file. + +import os +import sys from setuptools import setup -setup( - name="soliket", - version="0.0", - description="SO Likelihoods & Theories", - zip_safe=False, - packages=["soliket", "soliket.tests", "soliket.clusters"], - package_data={ - "soliket": [ - "*.yaml", - "*.bibtex", - # "data/simulated*/*.txt", - "clusters/data/*", - "clusters/data/selFn_equD56/*", - "lensing/data/*.txt", - ] - }, - install_requires=[ - "astropy", - "scikit-learn", - "cobaya", - "sacc", - "pyccl", - "fgspectra @ git+https://github.com/simonsobs/fgspectra@act_sz_x_cib#egg=fgspectra", # noqa E501 - "mflike @ git+https://github.com/simonsobs/lat_mflike@master" - ], - extras_requires=[ - "cosmopower" - ], - test_suite="soliket.tests", - include_package_data=True, -) + +# First provide helpful messages if contributors try and run legacy commands +# for tests or docs. + +TEST_HELP = """ +Note: running tests is no longer done using 'python setup.py test'. Instead +you will need to run: + tox -e test +If you don't already have tox-conda installed, you can install it with: + pip install tox-conda +If you only want to run part of the test suite, you can also pass pytest +args through directly following a '--': + tox -e test -- -k name_of_my_test +For more information, see: + https://github.com/simonsobs/SOLikeT#running-tests +""" + +VERSION_TEMPLATE = """ +# Note that we need to fall back to the hard-coded version if either +# setuptools_scm can't be imported or setuptools_scm can't determine the +# version, so we catch the generic 'Exception'. +try: + from setuptools_scm import get_version + version = get_version(root='..', relative_to=__file__) +except Exception: + version = '{version}' +""".lstrip() + +setup(use_scm_version={'write_to': os.path.join('.', 'version.py'), + 'write_to_template': VERSION_TEMPLATE}) diff --git a/soliket-tests.yml b/soliket-tests.yml new file mode 100644 index 00000000..10768a08 --- /dev/null +++ b/soliket-tests.yml @@ -0,0 +1,18 @@ +name: soliket-tests +channels: + - conda-forge + - nodefaults +dependencies: + - python>=3.7,<3.11 + - pip + - pytest-cov + - compilers + - make + - cmake + - swig + - gsl + - fftw + - cython + - mpi4py + - pip: + - -r requirements.txt \ No newline at end of file diff --git a/soliket/tests/test_cosmopower.py b/soliket/tests/test_cosmopower.py index e0259053..1ef05efa 100644 --- a/soliket/tests/test_cosmopower.py +++ b/soliket/tests/test_cosmopower.py @@ -41,12 +41,16 @@ @pytest.mark.skipif(not HAS_COSMOPOWER, reason='test requires cosmopower') -def test_cosmopower_theory(): +def test_cosmopower_theory(request): + info_dict['theory']['soliket.CosmoPower']['soliket_data_path'] = \ + os.path.join(request.config.rootdir, 'soliket/data/CosmoPower') model_fiducial = get_model(info_dict) # noqa F841 @pytest.mark.skipif(not HAS_COSMOPOWER, reason='test requires cosmopower') -def test_cosmopower_loglike(): +def test_cosmopower_loglike(request): + info_dict['theory']['soliket.CosmoPower']['soliket_data_path'] = \ + os.path.join(request.config.rootdir, 'soliket/data/CosmoPower') model_cp = get_model(info_dict) logL_cp = float(model_cp.loglikes({})[0]) @@ -55,7 +59,7 @@ def test_cosmopower_loglike(): @pytest.mark.skipif(not HAS_COSMOPOWER, reason='test requires cosmopower') -def test_cosmopower_against_camb(): +def test_cosmopower_against_camb(request): info_dict['theory'] = {'camb': {'stop_at_error': True}} model_camb = get_model(info_dict) @@ -64,7 +68,8 @@ def test_cosmopower_against_camb(): info_dict['theory'] = { "soliket.CosmoPower": { - "soliket_data_path": "soliket/data/CosmoPower", + "soliket_data_path": os.path.join(request.config.rootdir, + 'soliket/data/CosmoPower'), "stop_at_error": True, "extra_args": {'lmax': camb_cls['ell'].max() + 1}} } diff --git a/soliket/tests/test_cross_correlation.py b/soliket/tests/test_cross_correlation.py index a5f4e173..a758c2f7 100644 --- a/soliket/tests/test_cross_correlation.py +++ b/soliket/tests/test_cross_correlation.py @@ -1,8 +1,14 @@ import numpy as np +import os import pytest from soliket.ccl import CCL from cobaya.model import get_model +auto_file = 'soliket/data/xcorr_simulated/clgg_noiseless.txt' +cross_file = 'soliket/data/xcorr_simulated/clkg_noiseless.txt' +dndz_file = 'soliket/data/xcorr_simulated/dndz.txt' +sacc_file = 'soliket/tests/data/des_s-act_kappa.toy-sim.sacc.fits' + cosmo_params = {"Omega_c": 0.25, "Omega_b": 0.05, "h": 0.67, "n_s": 0.96} info = { @@ -22,45 +28,59 @@ } -def test_galaxykappa_import(): +def test_galaxykappa_import(request): from soliket.cross_correlation import GalaxyKappaLikelihood -def test_shearkappa_import(): +def test_shearkappa_import(request): from soliket.cross_correlation import ShearKappaLikelihood -def test_galaxykappa_model(): +def test_galaxykappa_model(request): from soliket.cross_correlation import GalaxyKappaLikelihood info["likelihood"] = { "GalaxyKappaLikelihood": {"external": GalaxyKappaLikelihood, - "datapath": None} + "datapath": None, + 'cross_file': os.path.join(request.config.rootdir, + cross_file), + 'auto_file': os.path.join(request.config.rootdir, + auto_file), + 'dndz_file': os.path.join(request.config.rootdir, + dndz_file)} } model = get_model(info) # noqa F841 # @pytest.mark.xfail(reason="data file not in repo") -def test_shearkappa_model(): +def test_shearkappa_model(request): from soliket.cross_correlation import ShearKappaLikelihood - info["likelihood"] = {"ShearKappaLikelihood": {"external": ShearKappaLikelihood}} + info["likelihood"] = {"ShearKappaLikelihood": + {"external": ShearKappaLikelihood, + "datapath": os.path.join(request.config.rootdir, sacc_file)}} model = get_model(info) # noqa F841 -def test_galaxykappa_like(): +def test_galaxykappa_like(request): from soliket.cross_correlation import GalaxyKappaLikelihood info["likelihood"] = { "GalaxyKappaLikelihood": {"external": GalaxyKappaLikelihood, - "datapath": None} + "datapath": None, + 'cross_file': os.path.join(request.config.rootdir, + cross_file), + 'auto_file': os.path.join(request.config.rootdir, + auto_file), + 'dndz_file': os.path.join(request.config.rootdir, + dndz_file)} } model = get_model(info) @@ -69,11 +89,14 @@ def test_galaxykappa_like(): # @pytest.mark.xfail(reason="data file not in repo") -def test_shearkappa_like(): +def test_shearkappa_like(request): from soliket.cross_correlation import ShearKappaLikelihood - test_datapath = "soliket/tests/data/cs82_gs-planck_kappa_binned.sim.sacc.fits" + rootdir = request.config.rootdir + + cs82_file = "soliket/tests/data/cs82_gs-planck_kappa_binned.sim.sacc.fits" + test_datapath = os.path.join(rootdir, cs82_file) info["likelihood"] = { "ShearKappaLikelihood": {"external": ShearKappaLikelihood, @@ -100,12 +123,14 @@ def test_shearkappa_like(): assert np.isclose(loglikes, 637.64473666) -def test_shearkappa_deltaz(): +def test_shearkappa_deltaz(request): from soliket.cross_correlation import ShearKappaLikelihood - info["likelihood"] = {"ShearKappaLikelihood": {"external": ShearKappaLikelihood, - "z_nuisance_mode": "deltaz"}} + info["likelihood"] = {"ShearKappaLikelihood": + {"external": ShearKappaLikelihood, + "datapath": os.path.join(request.config.rootdir, sacc_file), + "z_nuisance_mode": "deltaz"}} model = get_model(info) # noqa F841 loglikes, derived = model.loglikes() @@ -113,12 +138,14 @@ def test_shearkappa_deltaz(): assert np.isfinite(loglikes) -def test_shearkappa_m(): +def test_shearkappa_m(request): from soliket.cross_correlation import ShearKappaLikelihood - info["likelihood"] = {"ShearKappaLikelihood": {"external": ShearKappaLikelihood, - "m_nuisance_mode": True}} + info["likelihood"] = {"ShearKappaLikelihood": + {"external": ShearKappaLikelihood, + "datapath": os.path.join(request.config.rootdir, sacc_file), + "m_nuisance_mode": True}} model = get_model(info) # noqa F841 loglikes, derived = model.loglikes() @@ -126,12 +153,14 @@ def test_shearkappa_m(): assert np.isfinite(loglikes) -def test_shearkappa_ia_nla_noevo(): +def test_shearkappa_ia_nla_noevo(request): from soliket.cross_correlation import ShearKappaLikelihood - info["likelihood"] = {"ShearKappaLikelihood": {"external": ShearKappaLikelihood, - "ia_mode": 'nla-noevo'}} + info["likelihood"] = {"ShearKappaLikelihood": + {"external": ShearKappaLikelihood, + "datapath": os.path.join(request.config.rootdir, sacc_file), + "ia_mode": 'nla-noevo'}} model = get_model(info) # noqa F841 loglikes, derived = model.loglikes() @@ -139,12 +168,14 @@ def test_shearkappa_ia_nla_noevo(): assert np.isfinite(loglikes) -def test_shearkappa_ia_nla(): +def test_shearkappa_ia_nla(request): from soliket.cross_correlation import ShearKappaLikelihood - info["likelihood"] = {"ShearKappaLikelihood": {"external": ShearKappaLikelihood, - "ia_mode": 'nla'}} + info["likelihood"] = {"ShearKappaLikelihood": + {"external": ShearKappaLikelihood, + "datapath": os.path.join(request.config.rootdir, sacc_file), + "ia_mode": 'nla'}} info["params"]["eta_IA"] = 1.7 @@ -154,12 +185,14 @@ def test_shearkappa_ia_nla(): assert np.isfinite(loglikes) -def test_shearkappa_ia_perbin(): +def test_shearkappa_ia_perbin(request): from soliket.cross_correlation import ShearKappaLikelihood - info["likelihood"] = {"ShearKappaLikelihood": {"external": ShearKappaLikelihood, - "ia_mode": 'nla-perbin'}} + info["likelihood"] = {"ShearKappaLikelihood": + {"external": ShearKappaLikelihood, + "datapath": os.path.join(request.config.rootdir, sacc_file), + "ia_mode": 'nla-perbin'}} model = get_model(info) # noqa F841 loglikes, derived = model.loglikes() @@ -167,11 +200,13 @@ def test_shearkappa_ia_perbin(): assert np.isfinite(loglikes) -def test_shearkappa_hmcode(): +def test_shearkappa_hmcode(request): from soliket.cross_correlation import ShearKappaLikelihood - info["likelihood"] = {"ShearKappaLikelihood": ShearKappaLikelihood} + info["likelihood"] = {"ShearKappaLikelihood": + {"external": ShearKappaLikelihood, + "datapath": os.path.join(request.config.rootdir, sacc_file)}} info["theory"] = {"camb": {'extra_args': {'halofit_version': 'mead2020_feedback', 'HMCode_logT_AGN': 7.8}}, "ccl": {"external": CCL, "nonlinear": False}} diff --git a/soliket/tests/test_lensing.py b/soliket/tests/test_lensing.py index cee56ef8..03a9e642 100644 --- a/soliket/tests/test_lensing.py +++ b/soliket/tests/test_lensing.py @@ -7,6 +7,13 @@ from cobaya.yaml import yaml_load from cobaya.model import get_model +try: + import classy # noqa F401 +except ImportError: + boltzmann_codes = ["camb"] +else: + boltzmann_codes = ["camb", "classy"] + packages_path = os.environ.get("COBAYA_PACKAGES_PATH") or os.path.join( tempfile.gettempdir(), "lensing_packages" ) @@ -82,7 +89,7 @@ def get_demo_lensing_model(theory): return model, test_point -@pytest.mark.parametrize("theory", ["camb", "classy"]) +@pytest.mark.parametrize("theory", boltzmann_codes) def test_lensing(theory): model, test_point = get_demo_lensing_model(theory) lnl = model.loglike(test_point)[0] diff --git a/soliket/tests/test_lensing_lite.py b/soliket/tests/test_lensing_lite.py index b41d974a..ace9f52e 100644 --- a/soliket/tests/test_lensing_lite.py +++ b/soliket/tests/test_lensing_lite.py @@ -4,6 +4,13 @@ from cobaya.yaml import yaml_load from cobaya.model import get_model +try: + import classy # noqa F401 +except ImportError: + boltzmann_codes = ["camb"] +else: + boltzmann_codes = ["camb", "classy"] + def get_demo_lensing_model(theory): if theory == "camb": @@ -56,7 +63,7 @@ def get_demo_lensing_model(theory): return model -@pytest.mark.parametrize("theory", ["camb", "classy"]) +@pytest.mark.parametrize("theory", boltzmann_codes) def test_lensing(theory): model = get_demo_lensing_model(theory) ns_param = "ns" if theory == "camb" else "n_s" diff --git a/soliket/tests/test_mflike.py b/soliket/tests/test_mflike.py index c3892143..81711f89 100644 --- a/soliket/tests/test_mflike.py +++ b/soliket/tests/test_mflike.py @@ -4,6 +4,7 @@ import os import tempfile import unittest +import pytest from distutils.version import LooseVersion import camb @@ -78,7 +79,9 @@ def setUp(self): from cobaya.install import install install({"likelihood": {"mflike.MFLike": None}}, - path=packages_path, skip_global=True) + path=packages_path, skip_global=False, force=True, debug=True) + install({"likelihood": {"soliket.MFLike": None}}, + path=packages_path, skip_global=False, force=True, debug=True) def get_mflike_type(self, as_string=False): if self.orig: @@ -91,6 +94,7 @@ def get_mflike_type(self, as_string=False): else: return eval(t) + @pytest.mark.skip(reason="don't want to install 300Mb of data!") def test_mflike(self): # As of now, there is not a mechanism # in soliket to ensure there is .loglike that can be called like this @@ -155,6 +159,7 @@ def test_mflike(self): self.assertAlmostEqual(-2 * (loglike - my_mflike.logp_const), chi2, 2) + @pytest.mark.skip(reason="don't want to install 300Mb of data!") def test_cobaya(self): mflike_type = self.get_mflike_type(as_string=True) diff --git a/soliket/tests/test_runs.py b/soliket/tests/test_runs.py index 4f3f4d8a..b7496278 100644 --- a/soliket/tests/test_runs.py +++ b/soliket/tests/test_runs.py @@ -18,6 +18,12 @@ def test_evaluate(lhood): if lhood == "lensing" or lhood == "multi": pytest.xfail(reason="lensing lhood install failure") + if lhood == "mflike": + pytest.skip(reason="don't want to install 300Mb of data!") + + if lhood == "cross_correlation": + pytest.skip(reason="cannot locate data files") + info = yaml_load(pkgutil.get_data("soliket", f"tests/test_{lhood}.yaml")) info["force"] = True info['sampler'] = {'evaluate': {}} @@ -38,6 +44,12 @@ def test_mcmc(lhood): if lhood == "lensing" or lhood == "multi": pytest.xfail(reason="lensing lhood install failure") + if lhood == "mflike": + pytest.skip(reason="don't want to install 300Mb of data!") + + if lhood == "cross_correlation": + pytest.skip(reason="cannot locate data files") + info = yaml_load(pkgutil.get_data("soliket", f"tests/test_{lhood}.yaml")) info["force"] = True info['sampler'] = {'mcmc': {'max_samples': 10, 'max_tries': 1000}} diff --git a/tox.ini b/tox.ini index d61fc10a..a42e594e 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,55 @@ +[tox] +requires = + tox-conda + setuptools >= 30.3.0 + pip >= 19.3.1 +envlist = + py{37,38,39,310}-test{,-all}{,-latest,-oldest}{,-cov} + codestyle + +[testenv] + +conda_env = soliket-tests.yml +conda_setup_args= + --override-channels +conda_install_args= + --override-channels + +setenv = + COBAYA_PACKAGES_PATH = ./cobaya_packages + SKLEARN_ALLOW_DEPRECATED_SKLEARN_PACKAGE_INSTALL = True + +# Pass through the following environment variables which may be needed for the CI +passenv = HOME WINDIR LC_ALL LC_CTYPE CI TRAVIS + +# Run the tests in a temporary directory to make sure that we don't import +# this package from the source tree +changedir = .tmp/{envname} + +# tox environments are constructed with so-called 'factors' (or terms) +# separated by hyphens, e.g. test-devdeps-cov. Lines below starting with factor: +# will only take effect if that factor is included in the environment name. To +# see a list of example environments that can be run, along with a description, +# run: +# +# tox -l -v +# +description = + run tests + all: using all optional dependencies + latest: with the latest supported version of key dependencies + oldest: with the oldest supported version of key dependencies + cov: and test coverage + +commands = + pip freeze + cobaya-install planck_2018_highl_plik.TTTEEE_lite_native --no-set-global + !cov: pytest --pyargs soliket {posargs} + cov: pytest --pyargs soliket --cov soliket --cov-config={toxinidir}/setup.cfg {posargs} + [testenv:codestyle] skip_install = true +conda_env = changedir = . description = check code style, e.g. with flake8 deps = flake8