Skip to content

Commit

Permalink
flake8 for codestyle (#30)
Browse files Browse the repository at this point in the history
* added flake8 configs, started on some code style

* added back codestyle workflow

* code style passes :-)

* restored imports in mflike test

* made codestyle checks coherent with cobaya
  • Loading branch information
itrharrison authored Oct 4, 2021
1 parent 0853722 commit 9a80163
Show file tree
Hide file tree
Showing 30 changed files with 585 additions and 408 deletions.
22 changes: 22 additions & 0 deletions .github/workflows/codestyle.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
name: Code Style
on:
push:
branches: [ master ]
pull_request:
branches: [ master ]
jobs:
flake8:
runs-on: ubuntu-latest
steps:
- name: Checkout Repository
uses: actions/checkout@v2
- name: Install Python 3.x
uses: actions/setup-python@v2
with:
python-version: 3.x
- name: Install Dependencies
run: |
pip install tox
- name: Check Code Style
run: |
tox -e codestyle
5 changes: 5 additions & 0 deletions setup.cfg
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
[flake8]
select = E713,E704,E703,E714,E741,E10,E11,E20,E22,E23,E25,E27,E301,E302,E304,E9,
F405,F406,F5,F6,F7,F8,W1,W2,W3,W6,E501
max-line-length = 90
exclude = .tox,build
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
"cobaya",
"sacc",
"pyccl",
"fgspectra @ git+https://github.com/simonsobs/fgspectra@act_sz_x_cib#egg=fgspectra",
"fgspectra @ git+https://github.com/simonsobs/fgspectra@act_sz_x_cib#egg=fgspectra", # noqa E501
"mflike @ git+https://github.com/simonsobs/lat_mflike@master"
],
test_suite="soliket.tests",
Expand Down
18 changes: 9 additions & 9 deletions soliket/__init__.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
from .lensing import LensingLiteLikelihood, LensingLikelihood
from .gaussian import GaussianLikelihood, MultiGaussianLikelihood
from .ps import PSLikelihood, BinnedPSLikelihood
from .clusters import ClusterLikelihood
from .mflike import MFLike
from .lensing import LensingLiteLikelihood, LensingLikelihood # noqa: F401
from .gaussian import GaussianLikelihood, MultiGaussianLikelihood # noqa: F401
from .ps import PSLikelihood, BinnedPSLikelihood # noqa: F401
from .clusters import ClusterLikelihood # noqa: F401
from .mflike import MFLike # noqa: F401
try:
import pyccl as ccl
from .ccl import CCL
from .cross_correlation import CrossCorrelationLikelihood
import pyccl as ccl # noqa: F401
from .ccl import CCL # noqa: F401
from .cross_correlation import CrossCorrelationLikelihood # noqa: F401
except ImportError:
print('Skipping CCL module as pyCCL is not installed')
pass
pass
101 changes: 58 additions & 43 deletions soliket/ccl.py
Original file line number Diff line number Diff line change
@@ -1,35 +1,40 @@
"""
Simple CCL wrapper with function to return CCL cosmo object, and (optional) result of
calling various custom methods on the ccl object. The idea is this is included with the CCL
package, so it can easily be used as a Cobaya component whenever CCL is installed, here for now.
calling various custom methods on the ccl object. The idea is this is included with the
CCL package, so it can easily be used as a Cobaya component whenever CCL is installed,
here for now.
First version by AL. Untested example of usage at
https://github.com/cmbant/SZCl_like/blob/methods/szcl_like/szcl_like.py
get_CCL results a dictionary of results, where results['cosmo'] is the CCL cosmology object.
get_CCL results a dictionary of results, where results['cosmo'] is the CCL cosmology
object.
Classes that need other CCL-computed results (without additional free parameters), should
pass them in the requirements list.
e.g. a Likelihood with get_requirements() returning {'CCL': {'methods:{'name': self.method}}}
[where self is the Theory instance] will have results['name'] set to the result
of self.method(cosmo) being called with the CCL cosmo object.
e.g. a Likelihood with get_requirements() returning
{'CCL': {'methods:{'name': self.method}}} [where self is the Theory instance] will have
results['name'] set to the result of self.method(cosmo) being called with theCCL cosmo
object.
The Likelihood class can therefore handle for itself which results specifically it needs from CCL,
and just give the method to return them (to be called and cached by Cobaya with the right
parameters at the appropriate time).
The Likelihood class can therefore handle for itself which results specifically it needs
from CCL, and just give the method to return them (to be called and cached by Cobaya with
the right parameters at the appropriate time).
Alternatively the Likelihood can compute what it needs from results['cosmo'], however in this
case it will be up to the Likelihood to cache the results appropriately itself.
Alternatively the Likelihood can compute what it needs from results['cosmo'], however in
this case it will be up to the Likelihood to cache the results appropriately itself.
Note that this approach preclude sharing results other than the cosmo object itself between different likelihoods.
Note that this approach preclude sharing results other than the cosmo object itself
between different likelihoods.
Also note lots of things still cannot be done consistently in CCL, so this is far from general.
Also note lots of things still cannot be done consistently in CCL, so this is far from
general.
April 2021:
-----------
Second version by PL. Using CCL's newly implemented cosmology calculator.
Second version by PL. Using CCL's newly implemented cosmology calculator.
"""

# For Cobaya docs see
Expand All @@ -41,6 +46,7 @@
from cobaya.theory import Theory
import pyccl as ccl


class CCL(Theory):
# Options for Pk.
# Default options can be set globally, and updated from requirements as needed
Expand Down Expand Up @@ -74,7 +80,8 @@ def must_provide(self, **requirements):

self.kmax = max(self.kmax, options.get('kmax', self.kmax))
self.z = np.unique(np.concatenate(
(np.atleast_1d(options.get("z", self._default_z_sampling)), np.atleast_1d(self.z))))
(np.atleast_1d(options.get("z", self._default_z_sampling)),
np.atleast_1d(self.z))))

# Dictionary of the things CCL needs from CAMB/CLASS
needs = {}
Expand Down Expand Up @@ -115,7 +122,7 @@ def calculate(self, state, want_derived=True, **params_values_dict):
distance = self.provider.get_comoving_radial_distance(self.z)
hubble_z = self.provider.get_Hubble(self.z)
H0 = hubble_z[0]
h = H0/100
h = H0 / 100
E_of_z = hubble_z / H0

Omega_c = self.provider.get_param('omch2') / h ** 2
Expand All @@ -130,9 +137,8 @@ def calculate(self, state, want_derived=True, **params_values_dict):
# Array z is sorted in ascending order. CCL requires an ascending scale
# factor as input
a = 1. / (1 + self.z[::-1])
#growth = ccl.background.growth_factor(cosmo, a)
#fgrowth = ccl.background.growth_rate(cosmo, a)

# growth = ccl.background.growth_factor(cosmo, a)
# fgrowth = ccl.background.growth_rate(cosmo, a)

if self.kmax:
for pair in self._var_pairs:
Expand All @@ -141,38 +147,47 @@ def calculate(self, state, want_derived=True, **params_values_dict):
Pk_lin = np.flip(Pk_lin, axis=0)

if self.nonlinear:
_, z, Pk_nonlin = self.provider.get_Pk_grid(var_pair=pair, nonlinear=True)
_, z, Pk_nonlin = self.provider.get_Pk_grid(var_pair=pair,
nonlinear=True)
Pk_nonlin = np.flip(Pk_nonlin, axis=0)

# Create a CCL cosmology object. Because we are giving it background
# quantities, it should not depend on the cosmology parameters given
# Create a CCL cosmology object. Because we are giving it background
# quantities, it should not depend on the cosmology parameters given
cosmo = ccl.CosmologyCalculator(
Omega_c=Omega_c, Omega_b=Omega_b, h=h, sigma8=0.8, n_s=0.96,
background={'a': a,
'chi': distance,
'h_over_h0': E_of_z},
pk_linear={'a': a,
'k': k,
'delta_matter:delta_matter': Pk_lin},
pk_nonlin={'a': a,
'k': k,
'delta_matter:delta_matter': Pk_nonlin}
)

Omega_c=Omega_c,
Omega_b=Omega_b,
h=h,
sigma8=0.8,
n_s=0.96,
background={'a': a,
'chi': distance,
'h_over_h0': E_of_z},
pk_linear={'a': a,
'k': k,
'delta_matter:delta_matter': Pk_lin}, # noqa E501
pk_nonlin={'a': a,
'k': k,
'delta_matter:delta_matter': Pk_nonlin} # noqa E501
)

else:
cosmo = ccl.CosmologyCalculator(
Omega_c=Omega_c, Omega_b=Omega_b, h=h, sigma8=0.8, n_s=0.96,
background={'a': a,
'chi': distance,
'h_over_h0': E_of_z},
pk_linear={'a': a,
'k': k,
'delta_matter:delta_matter': Pk_lin}
)
Omega_c=Omega_c,
Omega_b=Omega_b,
h=h,
sigma8=0.8,
n_s=0.96,
background={'a': a,
'chi': distance,
'h_over_h0': E_of_z},
pk_linear={'a': a,
'k': k,
'delta_matter:delta_matter': Pk_lin} # noqa E501
)

state['CCL'] = {'cosmo': cosmo}
for required_result, method in self._required_results.items():
state['CCL'][required_result] = method(cosmo)

def get_CCL(self):
return self._current_state['CCL']
return self._current_state['CCL']
2 changes: 1 addition & 1 deletion soliket/clusters/__init__.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
from .clusters import ClusterLikelihood
from .clusters import ClusterLikelihood # noqa: F401
28 changes: 18 additions & 10 deletions soliket/clusters/clusters.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,9 +52,10 @@ def get_requirements(self):
def _get_sz_model(self, cosmo):
model = SZModel()
model.hmf = ccl.halos.MassFuncTinker08(cosmo, mass_def=self.mdef)
model.hmb = ccl.halos.HaloBiasTinker10(cosmo, mass_def=self.mdef, mass_def_strict=False)
model.hmb = ccl.halos.HaloBiasTinker10(cosmo,
mass_def=self.mdef, mass_def_strict=False)
model.hmc = ccl.halos.HMCalculator(cosmo, model.hmf, model.hmb, self.mdef)
model.szk = SZTracer(cosmo)
# model.szk = SZTracer(cosmo)
return model

def _get_catalog(self):
Expand All @@ -77,7 +78,8 @@ def _get_om(self):
)

def _get_ob(self):
return (self.theory.get_param("ombh2")) / ((self.theory.get_param("H0") / 100.0) ** 2)
return (self.theory.get_param("ombh2")) \
/ ((self.theory.get_param("H0") / 100.0) ** 2)

def _get_Ez(self):
return self.theory.get_Hubble(self.zarr) / self.theory.get_param("H0")
Expand All @@ -94,12 +96,13 @@ def _get_DAz_interpolator(self):
def _get_HMF(self):
h = self.theory.get_param("H0") / 100.0

Pk_interpolator = self.theory.get_Pk_interpolator(("delta_nonu", "delta_nonu"), nonlinear=False).P
Pk_interpolator = self.theory.get_Pk_interpolator(("delta_nonu", "delta_nonu"),
nonlinear=False).P
pks = Pk_interpolator(self.zarr, self.k)
# pkstest = Pk_interpolator(0.125, self.k )
# print (pkstest * h**3 )

Ez = self._get_Ez() # self.theory.get_Hubble(self.zarr) / self.theory.get_param("H0")
Ez = self._get_Ez()
om = self._get_om()

hmf = mf.HMF(om, Ez, pk=pks * h ** 3, kh=self.k / h, zarr=self.zarr)
Expand All @@ -113,7 +116,8 @@ def _get_param_vals(self):
H0 = self.theory.get_param("H0")
ob = self._get_ob()
om = self._get_om()
param_vals = {"om": om, "ob": ob, "H0": H0, "B0": B0, "scat": scat, "massbias": massbias}
param_vals = {"om": om, "ob": ob, "H0": H0, "B0": B0, "scat": scat,
"massbias": massbias}
return param_vals

def _get_rate_fn(self, **kwargs):
Expand Down Expand Up @@ -152,7 +156,8 @@ def _get_dVdz(self):
"""
DA_z = self.theory.get_angular_diameter_distance(self.zarr)

dV_dz = DA_z ** 2 * (1.0 + self.zarr) ** 2 / (self.theory.get_Hubble(self.zarr) / C_KM_S)
dV_dz = DA_z ** 2 * (1.0 + self.zarr) ** 2\
/ (self.theory.get_Hubble(self.zarr) / C_KM_S)

# dV_dz *= (self.theory.get_param("H0") / 100.0) ** 3.0 # was h0
return dV_dz
Expand All @@ -175,13 +180,16 @@ def _get_n_expected(self, **kwargs):

for Yt, frac in zip(self.survey.Ythresh, self.survey.frac_of_survey):
Pfunc = self.szutils.PfuncY(Yt, HMF.M, z_arr, param_vals, Ez_fn, DA_fn)
N_z = np.trapz(dn_dzdm * Pfunc, dx=np.diff(HMF.M[:, None] / h, axis=0), axis=0)
Ntot += np.trapz(N_z * dVdz, x=z_arr) * 4.0 * np.pi * self.survey.fskytotal * frac
N_z = np.trapz(dn_dzdm * Pfunc, dx=np.diff(HMF.M[:, None] / h, axis=0),
axis=0)
Ntot += np.trapz(N_z * dVdz, x=z_arr) \
* 4.0 * np.pi * self.survey.fskytotal * frac

# To test Mass function against Nemo.
# Pfunc = 1.
# N_z = np.trapz(dn_dzdm * Pfunc, dx=np.diff(HMF.M[:, None]/h, axis=0), axis=0)
# Ntot = np.trapz(N_z * dVdz, x=z_arr) * 4.0 * np.pi * (600./(4*np.pi * (180/np.pi)**2))
# Ntot = np.trapz(N_z * dVdz, x=z_arr) \
# * 4.0 * np.pi * (600./(4*np.pi * (180/np.pi)**2))
# print("Ntot", Ntot)

return Ntot
Expand Down
6 changes: 4 additions & 2 deletions soliket/clusters/massfunc.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,8 @@ def __init__(self, om, Ez, pk=None, kh=None, zarr=None):

# Initialize rho critical values for usage
self.om = om
self.rho_crit0H100 = (3. / (8. * np.pi) * (100 * 1.e5) ** 2.) / G_CGS * MPC2CM / MSUN_CGS
self.rho_crit0H100 = (3. / (8. * np.pi) * (100 * 1.e5) ** 2.) \
/ G_CGS * MPC2CM / MSUN_CGS
self.rhoc0om = self.rho_crit0H100 * self.om

if pk is None:
Expand Down Expand Up @@ -60,7 +61,8 @@ def dn_dM(self, M, delta):
M here is in MDeltam but we can convert
"""
delts = self.critdensThreshold(delta)
dn_dlnm = dn_dlogM(M, self.zarr, self.rhoc0om, delts, self.kh, self.pk, 'comoving')
dn_dlnm = dn_dlogM(M, self.zarr, self.rhoc0om, delts, self.kh, self.pk,
'comoving')
dn_dm = dn_dlnm / M[:, None]
return dn_dm

Expand Down
Loading

0 comments on commit 9a80163

Please sign in to comment.