Skip to content

Commit

Permalink
Merge pull request #99 from joezuntz/kids-only-like
Browse files Browse the repository at this point in the history
Add kids-only cosebis likelihood
  • Loading branch information
joezuntz authored Aug 23, 2023
2 parents 8436306 + 7de4d96 commit f351d5c
Show file tree
Hide file tree
Showing 3 changed files with 218 additions and 0 deletions.
38 changes: 38 additions & 0 deletions examples/kids-1000-values.ini
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
;In KiDS-1000 the photo-z uncertainty is assumed to be correlated across the z-bins (see prior file)
[nofz_shifts_kids]
uncorr_bias_1 = -5.0 0.000 5.0
uncorr_bias_2 = -5.0 -0.181 5.0
uncorr_bias_3 = -5.0 -1.110 5.0
uncorr_bias_4 = -5.0 -1.395 5.0
uncorr_bias_5 = -5.0 1.265 5.0

; The Hybrid pipeline set up uses independent IA parameters for each survey
; Here we use Tophat priors for an NLA-z analysis, but if you wanted to analyse
; TATT you can modify the priors on the A2, alpha2 and bias_ta parameters
[intrinsic_alignment_parameters]
z_piv = 0.62
A1 = -5.0 0.0 5.0
A2 = 0.0
alpha1 = -5.0 0.0 5.0
alpha2 = 0.0
bias_ta = 0.0


; We use a Tophat prior to marginalise over our uncertainty on baryon feedback using HMCode2020
[halo_model_parameters]
logt_agn = 7.3 7.8 8.0

; This is the set of cosmological parameter priors that were found to
; introduce the least projection effects on the marginal S8 distribution
; Note that CosmoSIS v3 onwards can sample over S8
[cosmological_parameters]
omch2 = 0.051 0.11812972217650827 0.255
ombh2 = 0.019 0.025939374402978773 0.026
h0 = 0.64 0.7666550530735352 0.82
n_s = 0.84 0.9007697522848085 1.1
S_8 = 0.1 0.7567464875805479 1.3
omega_k = 0.0
w = -1.0
wa = 0.0
mnu = 0.055 0.07740741 0.6
tau = 0.0697186
175 changes: 175 additions & 0 deletions examples/kids-1000.ini
Original file line number Diff line number Diff line change
@@ -0,0 +1,175 @@
; We will carry out a joint Hybrid pipeline analysis of
; DES Y3 xi_pm and KiDS-1000 COSEBIS including scale cuts (2'<theta_min<300')
[DEFAULT]
DATAFILE=likelihood/des-y3_and_kids-1000/DES-Y3_xipm_and_KiDS-1000_COSEBIs_2.0_300.0.fits
RUN_NAME = des-y3_and_kids-1000

[pipeline]
modules = consistency camb extrapolate
correlated_dz_priors fits_nz_kids photoz_bias_kids
fast_pt IA pk_to_cl_kids add_intrinsic cosebis cosebis_like
likelihoods = cosebis
values = examples/kids-1000-values.ini
priors = examples/des-y3_and_kids-1000-priors.ini
extra_output = cosmological_parameters/sigma_8
cosmological_parameters/A_s cosmological_parameters/omega_m
cosmological_parameters/omega_lambda cosmological_parameters/cosmomc_theta
delta_z_out_kids/bin_1 delta_z_out_kids/bin_2 delta_z_out_kids/bin_3
delta_z_out_kids/bin_4 delta_z_out_kids/bin_5
likelihoods/cosebis_like
timing = F
debug = T

; Since CosmoSIS v3, the consistency interface allows for sampling over S8
; Here we set up the non-linear power spectrum
[consistency]
file = utility/consistency/consistency_interface.py
cosmomc_theta=T

[camb]
file = boltzmann/camb/camb_interface.py
mode = all
halofit_version = mead2020_feedback
neutrino_hierarchy = normal
lmax=2500
kmax=100.0
zmid = 2.0
nz_mid = 100
zmax = 6.0
nz = 150
feedback=0

[extrapolate]
file = boltzmann/extrapolate/extrapolate_power.py
kmax = 500.

; Next we define the redshift bins and how we're going to marginalise over
; our uncertainty on these distributions
[fits_nz_des]
file = number_density/load_nz_fits/load_nz_fits.py
nz_file = %(DATAFILE)s
data_sets = source_des

; This module allows for correlated priors for KiDS given a covariance matrix
[correlated_dz_priors]
file = number_density/correlated_priors/correlated_priors.py
uncorrelated_parameters = nofz_shifts_kids/uncorr_bias_1 nofz_shifts_kids/uncorr_bias_2
nofz_shifts_kids/uncorr_bias_3 nofz_shifts_kids/uncorr_bias_4
nofz_shifts_kids/uncorr_bias_5
output_parameters = nofz_shifts_kids/bias_1 nofz_shifts_kids/bias_2
nofz_shifts_kids/bias_3 nofz_shifts_kids/bias_4
nofz_shifts_kids/bias_5
covariance = likelihood/des-y3_and_kids-1000/nofz_covariance/SOM_cov_multiplied.asc

[fits_nz_kids]
file = number_density/load_nz_fits/load_nz_fits.py
nz_file = %(DATAFILE)s
data_sets = source_kids


[photoz_bias_kids]
file = number_density/photoz_bias/photoz_bias.py
mode = additive
sample = nz_source_kids
bias_section = nofz_shifts_kids
interpolation = cubic
output_deltaz_section_name = delta_z_out_kids

; Here we are using the TATT modules for our IA model
; to allow for flexibility in extensions to our fiducial analyses
; The hybrid set-up uses NLA-z, with the non-NLA parameters
; in the TATT model set to zero in the values file

[fast_pt]
file = structure/fast_pt/fast_pt_interface.py
do_ia = T
k_res_fac = 0.5
verbose = F


[IA]
file = intrinsic_alignments/tatt/tatt_interface.py
sub_lowk=F
do_galaxy_intrinsic=F
ia_model=tatt

; As DES and KiDS are assumed to be uncorrelated we can
; calculate the likelihoods independently


[add_intrinsic]
file=shear/add_intrinsic/add_intrinsic.py
shear-shear=T
position-shear=F
perbin=F


[pk_to_cl_kids]
file = structure/projection/project_2d.py
ell_min_logspaced = 0.1
ell_max_logspaced = 5.0e5
n_ell_logspaced = 100
shear-shear = source_kids-source_kids
shear-intrinsic = source_kids-source_kids
intrinsic-intrinsic = source_kids-source_kids
intrinsicb-intrinsicb = source_kids-source_kids
verbose = F
get_kernel_peaks = F
sig_over_dchi = 20.
shear_kernel_dchi = 10.

;This calculates COSEBIs from Cls
[cosebis]
file=shear/cosebis/cl_to_cosebis/cl_to_cosebis_interface.so
theta_min = 2.0 ; default=0.5
theta_max = 300.0 ; default=300
n_max = 5 ; default=5
input_section_name = shear_cl
output_section_name = cosebis

;This is a simplified version of the the KiDS-1000 2pt_likelihood module
;which is not all-singing nor all-dancing because we only need to
;analyse COSEBIs for this Joint DES+KiDS analysis
[cosebis_like]
file = likelihood/2pt/cosebis/simple_like.py
data_set = En n
data_file = %(DATAFILE)s
like_name = cosebis
;If you're interested in using KiDS-1000 data products other than COSEBIs
;The 2pt-likelihood that KiDS-1000 used for their 3x2pt analysis can be found
;on the KiDS-1000 KCAP repo:
;https://github.com/KiDS-WL/kcap/blob/master/modules/scale_cuts/scale_cuts.py
;https://github.com/KiDS-WL/kcap/blob/master/utils/mini_like.py


; If I run on the command line "cosmosis examples/des-y3_and_kids-1000.ini", this will run this quick test sampler.
[runtime]
sampler = test
verbosity = standard
; saving the output to output/des-y3_and_kids-1000
[test]
save_dir=output/%(RUN_NAME)s
fatal_errors=T

; If I run command "cosmosis examples/des-y3_and_kids-1000.ini runtime.sampler='polychord'", this will run polychord.
; These are the settings used for the Hybrid DES+KiDS analysis, but be warned it's CPU intensive.
[polychord]
base_dir = chain_checkpoints
polychord_outfile_root=poly_%(RUN_NAME)s
resume=F
feedback = 3
fast_fraction = 0.1
;Minimum settings for a "good enough" quick test
;live_points = 250
;num_repeats = 30
;tolerance = 0.1
;Settings for high quality paper runs
live_points = 500
num_repeats=60
tolerance=0.01
boost_posteriors=10.0

[output]
filename= output/kids-1000.txt
format=text
privacy=F
5 changes: 5 additions & 0 deletions tests/test_cosmosis_standard_library.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,3 +99,8 @@ def test_log_w_example():
def test_des_kids(capsys):
run_cosmosis("examples/des-y3_and_kids-1000.ini")
check_likelihood(capsys, "-199.40", "-199.41")


def test_kids(capsys):
run_cosmosis("examples/kids-1000.ini")
check_likelihood(capsys, "-47.6")

0 comments on commit f351d5c

Please sign in to comment.