diff --git a/docs/index.rst b/docs/index.rst index 3282ab02..617dbead 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -45,6 +45,12 @@ The pages here describe how to install and run SOLikeT, and document the functio clusters xcorr +.. toctree:: + :caption: Miscellaneous + :maxdepth: 1 + + utils + .. toctree:: :caption: Development guidelines :maxdepth: 1 diff --git a/docs/utils.rst b/docs/utils.rst new file mode 100644 index 00000000..8564a54c --- /dev/null +++ b/docs/utils.rst @@ -0,0 +1,7 @@ +Utils +================== + +.. automodule:: soliket.utils + :members: + :show-inheritance: + :private-members: \ No newline at end of file diff --git a/soliket/cash.py b/soliket/cash.py index f33b1d9e..8063a32a 100644 --- a/soliket/cash.py +++ b/soliket/cash.py @@ -20,8 +20,11 @@ def _get_data(self): x = data[:, :-1] return x, N - def _get_theory(self, pk_intp, **kwargs): - raise NotImplementedError + def _get_theory(self, **kwargs): + if ("cash_test_logp" in kwargs): + return np.arange(kwargs["cash_test_logp"]) + else: + raise NotImplementedError def logp(self, **params_values): theory = self._get_theory(**params_values) diff --git a/soliket/cosmopower.py b/soliket/cosmopower.py index 81bf6899..e458901f 100755 --- a/soliket/cosmopower.py +++ b/soliket/cosmopower.py @@ -113,7 +113,7 @@ class CosmoPower(BoltzmannBase): def initialize(self) -> None: super().initialize() - if self.network_settings is None: + if self.network_settings is None: # pragma: no cover raise LoggedError("No network settings were provided.") self.networks = {} @@ -130,10 +130,10 @@ def initialize(self) -> None: elif nettype["type"] == "PCAplusNN": network = cp.cosmopower_PCAplusNN( restore=True, restore_filename=netpath) - elif self.stop_at_error: + elif self.stop_at_error: # pragma: no cover raise ValueError( f"Unknown network type {nettype['type']} for network {spectype}.") - else: + else: # pragma: no cover self.log.warn( f"Unknown network type {nettype['type']}\ for network {spectype}: skipped!") @@ -150,7 +150,7 @@ def initialize(self) -> None: if network is not None: self.networks[spectype.lower()] = netdata - if "lmax" not in self.extra_args: + if "lmax" not in self.extra_args: # pragma: no cover self.extra_args["lmax"] = None self.log.info(f"Loaded CosmoPower from directory {self.network_path}") diff --git a/soliket/gaussian.py b/soliket/gaussian.py index ae400173..d945dd73 100644 --- a/soliket/gaussian.py +++ b/soliket/gaussian.py @@ -72,12 +72,12 @@ def initialize(self): self.log.info('Initialized.') - def initialize_with_provider(self, provider): + def initialize_with_provider(self, provider): # pragma: no cover for like in self.likelihoods: like.initialize_with_provider(provider) # super().initialize_with_provider(provider) - def get_helper_theories(self): + def get_helper_theories(self): # pragma: no cover helpers = {} for like in self.likelihoods: helpers.update(like.get_helper_theories()) @@ -87,7 +87,7 @@ def get_helper_theories(self): def _get_theory(self, **kwargs): return np.concatenate([like._get_theory(**kwargs) for like in self.likelihoods]) - def get_requirements(self): + def get_requirements(self): # pragma: no cover # Reqs with arguments like 'lmax', etc. may have to be carefully treated here to # merge diff --git a/soliket/tests/data/cash_data.txt b/soliket/tests/data/cash_data.txt new file mode 100644 index 00000000..e7561b37 --- /dev/null +++ b/soliket/tests/data/cash_data.txt @@ -0,0 +1,20 @@ +0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 +1.000000000000000000e+00 1.000000000000000000e+00 1.000000000000000000e+00 +2.000000000000000000e+00 2.000000000000000000e+00 2.000000000000000000e+00 +3.000000000000000000e+00 3.000000000000000000e+00 3.000000000000000000e+00 +4.000000000000000000e+00 4.000000000000000000e+00 4.000000000000000000e+00 +5.000000000000000000e+00 5.000000000000000000e+00 5.000000000000000000e+00 +6.000000000000000000e+00 6.000000000000000000e+00 6.000000000000000000e+00 +7.000000000000000000e+00 7.000000000000000000e+00 7.000000000000000000e+00 +8.000000000000000000e+00 8.000000000000000000e+00 8.000000000000000000e+00 +9.000000000000000000e+00 9.000000000000000000e+00 9.000000000000000000e+00 +1.000000000000000000e+01 1.000000000000000000e+01 1.000000000000000000e+01 +1.100000000000000000e+01 1.100000000000000000e+01 1.100000000000000000e+01 +1.200000000000000000e+01 1.200000000000000000e+01 1.200000000000000000e+01 +1.300000000000000000e+01 1.300000000000000000e+01 1.300000000000000000e+01 +1.400000000000000000e+01 1.400000000000000000e+01 1.400000000000000000e+01 +1.500000000000000000e+01 1.500000000000000000e+01 1.500000000000000000e+01 +1.600000000000000000e+01 1.600000000000000000e+01 1.600000000000000000e+01 +1.700000000000000000e+01 1.700000000000000000e+01 1.700000000000000000e+01 +1.800000000000000000e+01 1.800000000000000000e+01 1.800000000000000000e+01 +1.900000000000000000e+01 1.900000000000000000e+01 1.900000000000000000e+01 diff --git a/soliket/tests/test_cash.py b/soliket/tests/test_cash.py index a3bd0632..a0d336f4 100644 --- a/soliket/tests/test_cash.py +++ b/soliket/tests/test_cash.py @@ -1,6 +1,16 @@ import numpy as np from soliket.cash import CashCData +from cobaya.theory import Theory + + +class cash_theory_calculator(Theory): + + def calculate(self, state, want_derived=False, **params_values_dict): + state["cash_theory"] = np.arange(params_values_dict["param_test_cash"]) + + def get_cash_theory(self): + return self.current_state["cash_theory"] def toy_data(): @@ -12,6 +22,35 @@ def toy_data(): return x, y, xx, yy +def test_cash_import(): + from soliket.cash import CashCLikelihood + + +def test_cash_read_data(request): + import os + from soliket.cash import CashCLikelihood + + cash_data_path = os.path.join(request.config.rootdir, + "soliket/tests/data/cash_data.txt") + + cash_lkl = CashCLikelihood({"datapath": cash_data_path}) + cash_data = cash_lkl._get_data() + assert np.allclose(cash_data[1], np.arange(20)) + + +def test_cash_logp(request): + import os + from soliket.cash import CashCLikelihood + + params = {"cash_test_logp": 20} + cash_data_path = os.path.join(request.config.rootdir, + "soliket/tests/data/cash_data.txt") + + cash_lkl = CashCLikelihood({"datapath": cash_data_path}) + cash_logp = cash_lkl.logp(**params) + assert np.allclose(cash_logp, -37.3710640070228) + + def test_cash(): data1d, theory1d, data2d, theory2d = toy_data() diff --git a/soliket/tests/test_cosmopower.py b/soliket/tests/test_cosmopower.py index 55e90a6d..9a2a5949 100644 --- a/soliket/tests/test_cosmopower.py +++ b/soliket/tests/test_cosmopower.py @@ -2,6 +2,7 @@ Check that CosmoPower gives the correct Planck CMB power spectrum. """ import os +import tempfile import pytest import numpy as np import matplotlib.pyplot as plt @@ -9,6 +10,7 @@ from cobaya.model import get_model from soliket.cosmopower import HAS_COSMOPOWER + fiducial_params = { "ombh2": 0.0224, "omch2": 0.122, diff --git a/soliket/tests/test_poisson.py b/soliket/tests/test_poisson.py index 216dfc7f..1fbfb5ba 100644 --- a/soliket/tests/test_poisson.py +++ b/soliket/tests/test_poisson.py @@ -35,21 +35,23 @@ def generate_data(a, with_samples=False, unc=0.3, Nk=64): def test_poisson_experiment(a_true=3, N=100, with_samples=False, Nk=64): - a_maxlikes = [] - for i in range(N): - observations = generate_data(a_true, with_samples=with_samples, Nk=Nk) - if not with_samples: - catalog = pd.DataFrame({"x": observations}) - data = PoissonData("toy", catalog, ["x"]) - else: - catalog = pd.DataFrame({"x": observations.mean(axis=1)}) - samples = {"x": observations, "prior": np.ones(observations.shape)} - data = PoissonData("toy_samples", catalog, ["x"], samples=samples) - - a_grid = np.arange(0.1, 10, 0.1) - lnl = [data.loglike(partial(rate_density, a=a), n_expected(a)) for a in a_grid] - a_maxlike = a_grid[np.argmax(lnl)] - - a_maxlikes.append(a_maxlike) - - assert abs(np.mean(a_maxlikes) - a_true) < 0.1 + for with_samples in [False, True]: + a_maxlikes = [] + for i in range(N): + observations = generate_data(a_true, with_samples=with_samples, Nk=Nk) + if not with_samples: + catalog = pd.DataFrame({"x": observations}) + data = PoissonData("toy", catalog, ["x"]) + else: + catalog = pd.DataFrame({"x": observations.mean(axis=1)}) + samples = {"x": observations, "prior": np.ones(observations.shape)} + data = PoissonData("toy_samples", catalog, ["x"], samples=samples) + + a_grid = np.arange(0.1, 10, 0.1) + lnl = [data.loglike(partial(rate_density, a=a), + n_expected(a)) for a in a_grid] + a_maxlike = a_grid[np.argmax(lnl)] + + a_maxlikes.append(a_maxlike) + + assert abs(np.mean(a_maxlikes) - a_true) < 0.1 diff --git a/soliket/tests/test_utils.py b/soliket/tests/test_utils.py new file mode 100644 index 00000000..eb6523bd --- /dev/null +++ b/soliket/tests/test_utils.py @@ -0,0 +1,36 @@ +import numpy as np + +from soliket.utils import binner + + +def naive_binner(bmin, bmax, x, tobin): + + binned = list() + bcent = list() + # All but the last bins are open to the right + for bm, bmx in zip(bmin[:-1], bmax[:-1]): + bcent.append(0.5 * (bmx + bm)) + binned.append(np.mean(tobin[np.where((x >= bm) & (x < bmx))[0]])) + # The last bin is closed to the right + bcent.append(0.5 * (bmax[-1] + bmin[-1])) + binned.append(np.mean(tobin[np.where((x >= bmin[-1]) & (x <= bmax[-1]))[0]])) + + return (np.array(bcent), np.array(binned)) + + +def test_binning(): + + #bmin = np.arange(10, step=3) + #bmax = np.array([2, 5, 8, 12]) + binedge = np.arange(13, step=3) + bmin = binedge[:-1] + bmax = binedge[1:] + ell = np.arange(13) + cell = np.arange(13) + + centers_test, values_test = naive_binner(bmin, bmax, ell, cell) + + bincent, binval = binner(ell, cell, binedge) + + assert np.allclose(bincent, centers_test) + assert np.allclose(binval, values_test) diff --git a/soliket/utils.py b/soliket/utils.py index bab60b04..62a89af7 100644 --- a/soliket/utils.py +++ b/soliket/utils.py @@ -1,3 +1,10 @@ +r""" +.. module:: utils + +:Synopsis: Compilation of some useful classes and functions for use in SOLikeT. + +""" + from importlib import import_module from scipy.stats import binned_statistic as binnedstat @@ -8,6 +15,23 @@ def binner(ls, cls, bin_edges): + r""" + Simple function intended for binning :math:`\ell`-by-:math:`\ell` data into + band powers with a top hat window function. + + Note that the centers are computed as :math:`0.5({\rm LHE}+{\rm RHE})`, + where :math:`{\rm LHE}` and :math:`{\rm RHE}` are the bin edges. + While this is ok for plotting purposes, the user may need + to recompute the bin center in case of integer ``ls`` + if the correct baricenter is needed. + + :param ls: Axis along which to bin + :param cls: Values to be binned + :param bin_edges: The edges of the bins. Note that all but the last bin + are open to the right. The last bin is closed. + + :return: The centers of the bins and the average of ``cls`` within the bins. + """ x = ls.copy() y = cls.copy() cents = (bin_edges[:-1] + bin_edges[1:]) / 2.0 @@ -31,6 +55,14 @@ def get_likelihood(name, options=None): class OneWithCls(one): + r""" + Extension of + `cobaya.likelihoods.one + `_ + which creates a dummy :math:`C_\ell` requirements dictionary with an + :math:`\ell_{\rm max}` of 1000 to force computation of ``pp``, ``tt``, ``te``, ``ee`` + and ``bb`` :math:`C_\ell` s. + """ lmax = 10000 def get_requirements(self):