Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
173 changes: 173 additions & 0 deletions pipeline/compute_covariance.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,173 @@
import argparse
from soopercool import BBmeta
import numpy as np
from soopercool import cov_utils as cov
from soopercool import map_utils as mu
import pymaster as nmt
from soopercool import utils as su


def main(args):
"""
This script computes an analytical estimate of the covariance matrices
given a parameter file.
"""
meta = BBmeta(args.globals)

out_dir = meta.output_directory

# Define output directory
cov_dir = f"{out_dir}/covariances"
meta.make_dir(cov_dir)
ps_dir = f"{out_dir}/cells"

# Load binning
binning = np.load(meta.binning_file)
nmt_bins = nmt.NmtBin.from_edges(binning["bin_low"],
binning["bin_high"] + 1)
n_bins = nmt_bins.get_n_bands()

# Define useful variables
field_pairs = [f"{m1}{m2}" for m1 in "TEB" for m2 in "TEB"]

cross_ps_names = meta.get_ps_names_list(type="all", coadd=True)

# Load beams
beams = {}
for map_set in meta.map_sets_list:
beam_dir = meta.beam_dir_from_map_set(map_set)
beam_file = meta.beam_file_from_map_set(map_set)

_, bl = su.read_beam_from_file(
f"{beam_dir}/{beam_file}",
lmax=nmt_bins.lmax
)
bb = nmt_bins.bin_cell(bl)
beams[map_set] = bb

# Load some signal theory power spectra
signal = {}
for ms1, ms2 in cross_ps_names:
cl = np.load(
f"{ps_dir}/weighted_cross_pcls_{ms1}_x_{ms2}.npz"
)
for fp in field_pairs:
signal[ms1, ms2, fp] = cl[fp]
if ms1 != ms2:
for fp in field_pairs:
if fp == fp[::-1]:
signal[ms2, ms1, fp] = signal[ms1, ms2, fp].copy()
else:
signal[ms2, ms1, fp] = signal[ms1, ms2, fp[::-1]].copy()

noise_interp = {}
for ms1, ms2 in cross_ps_names:
cl = np.load(
f"{ps_dir}/weighted_noise_pcls_{ms1}_x_{ms2}.npz"
)
for fp in field_pairs:
noise_interp[ms1, ms2, fp] = cl[fp]
if ms1 != ms2:
for fp in field_pairs:
if fp == fp[::-1]:
noise_interp[ms2, ms1, fp] = (
noise_interp[ms1, ms2, fp].copy()
)
else:
noise_interp[ms2, ms1, fp] = (
noise_interp[ms1, ms2, fp[::-1]].copy()
)

# Lists all covmat elements to compute
cov_names = []
for i, (ms1, ms2) in enumerate(cross_ps_names):
for j, (ms3, ms4) in enumerate(cross_ps_names):
if i > j:
continue
cov_names.append((ms1, ms2, ms3, ms4))

# Load analysis mask
mask = mu.read_map(
meta.masks["analysis_mask"],
pix_type=meta.pix_type,
car_template=meta.car_template
)
wcs = mask.wcs if meta.pix_type == "car" else None

# Compute NaMaster workspaces
print("Loading workspaces ...")
wsp, cwsp = cov.compute_covariance_workspace(
mask,
nmt_bins,
wcs=wcs,
purify_b=meta.pure_B,
save_dir=cov_dir
)

# Load transfer functions
tf_settings = meta.transfer_settings
tf_dir = tf_settings["transfer_directory"]
tf_dict = {}
for ms1, ms2 in cross_ps_names:
ftag1 = meta.filtering_tag_from_map_set(ms1)
ftag2 = meta.filtering_tag_from_map_set(ms2)
tf = np.load(f"{tf_dir}/transfer_function_{ftag1}_x_{ftag2}.npz")
tf_dict[ms1, ms2] = tf

# Load number of bundles
n_bundles = {}
for map_set in meta.map_sets:
n_bundles[map_set] = meta.n_bundles_from_map_set(map_set)

for ms1, ms2, ms3, ms4 in cov_names:

print(f"Computing covariance for {ms1} x {ms2}, {ms3} x {ms4}")
# Compute each covariance blocks
cov_dict = cov.compute_covariance_block(
ms1, ms2, ms3, ms4,
meta.exp_tag_from_map_set(ms1),
meta.exp_tag_from_map_set(ms2),
meta.exp_tag_from_map_set(ms3),
meta.exp_tag_from_map_set(ms4),
wsp, cwsp,
signal, noise_interp,
nmt_bins,
n_bundles=n_bundles
)
beam12 = beams[ms1] * beams[ms2]
beam34 = beams[ms3] * beams[ms4]

# Normalize with transfer function
# and beams
for fp1 in field_pairs:
for fp2 in field_pairs:
tf1 = tf_dict[ms1, ms2][f"{fp1}_to_{fp1}"]
tf2 = tf_dict[ms3, ms4][f"{fp2}_to_{fp2}"]
cov_dict[fp1, fp2] /= tf1[:, None] * tf2[None, :]
cov_dict[fp1, fp2] /= beam12[:, None] * beam34[None, :]

full_cov = np.zeros((n_bins*len(field_pairs), n_bins*len(field_pairs)))
print(field_pairs)
for i, fp1 in enumerate(field_pairs):
for j, fp2 in enumerate(field_pairs):
full_cov[
i*n_bins:(i+1)*n_bins,
j*n_bins:(j+1)*n_bins
] = cov_dict[fp1, fp2]
# Save block covariance matrix
np.savez(
f"{cov_dir}/analytic_cov_{ms1}_x_{ms2}_{ms3}_x_{ms4}.npz",
cov=full_cov,
lb=nmt_bins.get_effective_ells()
)


if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="Analytic covariance matrix computation."
)
parser.add_argument("--globals", type=str, help="Path to the yaml file")

args = parser.parse_args()

main(args)
17 changes: 14 additions & 3 deletions pipeline/compute_pseudo_cells.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,8 @@ def main(args):
BBmeta.make_dir(cells_dir)

mask = mu.read_map(meta.masks["analysis_mask"],
pix_type=meta.pix_type)
pix_type=meta.pix_type,
car_template=meta.car_template)

binning = np.load(meta.binning_file)
nmt_bins = nmt.NmtBin.from_edges(binning["bin_low"],
Expand Down Expand Up @@ -69,6 +70,7 @@ def main(args):

m = mu.read_map(f"{map_dir}/{map_file}", pix_type=meta.pix_type,
fields_hp=[0, 1, 2],
car_template=meta.car_template,
convert_K_to_muK=True)
if do_plots:
for i, f in enumerate(["T", "Q", "U"]):
Expand Down Expand Up @@ -100,12 +102,21 @@ def main(args):
coadd=False):
map_set1, id_bundle1 = map_name1.split("__")
map_set2, id_bundle2 = map_name2.split("__")
pcls = pu.get_coupled_pseudo_cls(
pcls, pcls_unbinned = pu.get_coupled_pseudo_cls(
fields[map_set1, id_bundle1],
fields[map_set2, id_bundle2],
nmt_bins
nmt_bins,
return_unbinned=True
)

weighted_pcls = pu.get_weighted_pcls(
pcls_unbinned,
mask,
pix_type=meta.pix_type
)
np.savez(f"{cells_dir}/weighted_pcls_{map_name1}_x_{map_name2}.npz",
**weighted_pcls, ell=np.arange(len(weighted_pcls["TT"])))

decoupled_pcls = pu.decouple_pseudo_cls(
pcls, inv_couplings_beamed[map_set1, map_set2]
)
Expand Down
165 changes: 165 additions & 0 deletions pipeline/prepare_cov_inputs.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,165 @@
from soopercool import BBmeta
from itertools import product
import numpy as np
import argparse
import pymaster as nmt
from soopercool import utils as su
from scipy.interpolate import interp1d


def main(args):
"""
The main purpose of this script is to prepare signal
and noise power spectra used in the analytic covariance
prescription. These are computed on the data from
coupled cross-bundle power spectra.
"""
meta = BBmeta(args.globals)

out_dir = meta.output_directory
cells_dir = f"{out_dir}/cells"

binning = np.load(meta.binning_file)
nmt_bins = nmt.NmtBin.from_edges(binning["bin_low"],
binning["bin_high"] + 1)
lb = nmt_bins.get_effective_ells()
ell = np.arange(nmt_bins.lmax + 1)
field_pairs = [m1+m2 for m1, m2 in product("TEB", repeat=2)]

ps_names = {
"cross": meta.get_ps_names_list(type="cross", coadd=False),
"auto": meta.get_ps_names_list(type="auto", coadd=False)
}

cross_split_list = meta.get_ps_names_list(type="all", coadd=False)
cross_map_set_list = meta.get_ps_names_list(type="all", coadd=True)

# Load split C_ells

# Initialize output dictionary
cells_coadd = {
"cross": {
(ms1, ms2): {
fp: [] for fp in field_pairs
} for ms1, ms2 in cross_map_set_list
},
"auto": {
(ms1, ms2): {
fp: [] for fp in field_pairs
} for ms1, ms2 in cross_map_set_list
}
}

# Load beams
beams = {}
for map_set in meta.map_sets_list:
beam_dir = meta.beam_dir_from_map_set(map_set)
beam_file = meta.beam_file_from_map_set(map_set)

_, bl = su.read_beam_from_file(
f"{beam_dir}/{beam_file}",
lmax=nmt_bins.lmax
)
beams[map_set] = bl

# Loop over all map set pairs
for map_name1, map_name2 in cross_split_list:

map_set1, _ = map_name1.split("__")
map_set2, _ = map_name2.split("__")

cells_dict = np.load(
f"{cells_dir}/weighted_pcls_{map_name1}_x_{map_name2}.npz"
)

if (map_name1, map_name2) in ps_names["cross"]:
type = "cross"
elif (map_name1, map_name2) in ps_names["auto"]:
type = "auto"

for field_pair in field_pairs:

cells_coadd[type][map_set1, map_set2][field_pair] += [cells_dict[field_pair]] # noqa

# Average the cross-split power spectra
cells_coadd["noise"] = {}
for map_set1, map_set2 in cross_map_set_list:
cells_coadd["noise"][(map_set1, map_set2)] = {}
for field_pair in field_pairs:
for type in ["cross", "auto"]:
if len(cells_coadd[type][map_set1, map_set2][field_pair]) != 0:
cells_coadd[type][map_set1, map_set2][field_pair] = \
np.mean(
cells_coadd[type][map_set1, map_set2][field_pair],
axis=0
)

if len(cells_coadd["auto"][map_set1, map_set2][field_pair]) == 0:
cells_coadd["noise"][map_set1, map_set2][field_pair] = np.zeros_like(cells_coadd["cross"][map_set1, map_set2][field_pair]) # noqa
cells_coadd["auto"][map_set1, map_set2][field_pair] = np.zeros_like(cells_coadd["cross"][map_set1, map_set2][field_pair]) # noqa
else:
cells_coadd["noise"][(map_set1, map_set2)][field_pair] = \
cells_coadd["auto"][map_set1, map_set2][field_pair] - \
cells_coadd["cross"][map_set1, map_set2][field_pair]

# First attempt at introducing filtering corrections
# This is a handwavy approach to interpolate the per
# multipole TF
tf_settings = meta.transfer_settings
tf_dir = tf_settings["transfer_directory"]
ftag1 = meta.filtering_tag_from_map_set(map_set1)
ftag2 = meta.filtering_tag_from_map_set(map_set2)
tf = np.load(f"{tf_dir}/transfer_function_{ftag1}_x_{ftag2}.npz")
tf_interp = {}
for fp in field_pairs:
tfint = interp1d(
lb, tf[f"{fp}_to_{fp}"],
fill_value="extrapolate"
)
tfint = tfint(ell)
tfint[ell > 600] = tfint[600]
tfint[ell <= ell[tfint < 0][-1]] = tfint[ell[tfint < 0][-1]+1]
tf_interp[fp] = tfint

for type in ["cross", "noise"]:
cells_to_save = {
# ansatz for the fitlering to be improved.
# This is a temporary placeholder
# Need to fit for the TF exponent
fp: cells_coadd[type][map_set1, map_set2][fp] / tf_interp[fp] * tf_interp[fp] ** 0.75 # noqa
for fp in field_pairs
}
# Note that we don't deconvolve the beam above to
# avoid huge numerical instabilities when deconvolving
# the MCMs
if type == "cross":
for fp in ["TB", "EB", "BE", "BT"]:
cells_to_save[fp] = np.zeros_like(cells_to_save[fp])
elif type == "noise":
for fp in field_pairs:
if fp != fp[::-1]:
cells_to_save[fp] = np.zeros_like(cells_to_save[fp])
np.savez(
f"{cells_dir}/weighted_{type}_pcls_{map_set1}_x_{map_set2}.npz", # noqa
ell=np.arange(len(cells_to_save["TT"])),
**cells_to_save
)


if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="Prepare covariance inputs for SOOPERCOOL pipeline"
)
parser.add_argument(
"--globals",
type=str,
required=True,
help="Path to the globals file",
)
parser.add_argument(
"--verbose",
action="store_true",
help="Enable verbose output",
)
args = parser.parse_args()
main(args)
Loading
Loading