From 1fdd6443528435a713060b2c7bd79f340e3a5985 Mon Sep 17 00:00:00 2001 From: martinkilbinger Date: Wed, 12 Nov 2025 11:55:28 +0100 Subject: [PATCH 1/9] added script to plot r-band magnitude histograms for different selections; cleaned up plotting functions --- .../cosmo_val/catalog_paper_plot/hist_mag.py | 384 ++++++++++++++++++ src/sp_validation/__init__.py | 22 +- src/sp_validation/basic.py | 2 - src/sp_validation/plots.py | 83 ++++ src/sp_validation/run_joint_cat.py | 102 +---- 5 files changed, 490 insertions(+), 103 deletions(-) create mode 100644 notebooks/cosmo_val/catalog_paper_plot/hist_mag.py diff --git a/notebooks/cosmo_val/catalog_paper_plot/hist_mag.py b/notebooks/cosmo_val/catalog_paper_plot/hist_mag.py new file mode 100644 index 00000000..3ee2c88a --- /dev/null +++ b/notebooks/cosmo_val/catalog_paper_plot/hist_mag.py @@ -0,0 +1,384 @@ +# %% +# hist_mag.py +# +# Plot magnitude histogram for various cuts and selection criteria + +# %% +import matplotlib +import matplotlib.pylab as plt + +# enable autoreload for interactive sessions +from IPython import get_ipython +ipython = get_ipython() +if ipython is not None: + ipython.run_line_magic("matplotlib", "inline") + ipython.run_line_magic("reload_ext", "autoreload") + ipython.run_line_magic("autoreload", "2") + ipython.run_line_magic("reload_ext", "log_cell_time") + +# %% +import sys +import os +import re +import numpy as np +from astropy.io import fits +from io import StringIO + +from sp_validation import run_joint_cat as sp_joint +from sp_validation import util +from sp_validation.basic import metacal +from sp_validation import calibration +import sp_validation.cat as cat + +# %% +# Initialize calibration class instance +obj = sp_joint.CalibrateCat() + +config = obj.read_config_set_params("config_mask.yaml") + +hist_data_path = "magnitude_histograms_data.npz" + +test_only = False + +# %% +def get_data(obj, test_only=False): + + + # Get data. Set load_into_memory to False for very large files + dat, dat_ext = obj.read_cat(load_into_memory=False) + + if test_only: + n_max = 1_000_000 + print(f"MKDEBUG testing only first {n_max} objects") + dat = dat[:n_max] + dat_ext = dat_ext[:n_max] + + return dat, dat_ext + + +def read_hist_data(hist_data_path): + """ + Read histogram data from npz file. + + Parameters + ---------- + hist_data_path : str + Path to the npz file containing histogram data + + Returns + ------- + hist_data : dict + Dictionary with keys for each selection criterion containing: + - 'counts': histogram counts + - 'bins': bin edges + - 'label': label for the histogram + """ + loaded = np.load(hist_data_path, allow_pickle=True) + hist_data = {} + + for key in loaded.files: + data = loaded[key] + hist_data[key] = { + 'counts': data[0], + 'bins': data[1], + 'label': str(data[2]) + } + + return hist_data + + + +def get_mask(masks, col_name): + + # Get mask fomr masks with col_name = col_name + for mask in masks: + if mask._col_name == col_name: + return mask + + +def compute_hist(masks, col_name, mask_cumul, mag, bins): + """ + Compute histogram for given mask and magnitude data. + + Parameters + ---------- + masks : list + List of mask objects + col_name : str + Column name to identify the mask + mask_cumul : array or None + Cumulative mask array + mag : array + Magnitude data + bins : array + Bin edges for histogram + + Returns + ------- + counts : array + Histogram counts + bins : array + Bin edges + label : str + Label for the histogram + n_valid : int + Number of valid data points after masking + mask_cumul : array + Updated cumulative mask array + """ + this_mask = get_mask(masks, col_name) + + # First time: + if mask_cumul is None: + print("Init mask_cumul") + mask_cumul = this_mask._mask + else: + mask_cumul &= this_mask._mask + + # Data values + my_mag = mag[mask_cumul] + + # Data count + n_valid = np.sum(mask_cumul) + + # Label + string_buffer = StringIO() + this_mask.print_condition(string_buffer) + label = string_buffer.getvalue().strip() + if label == "": + label = col_name + + counts, bin_edges = np.histogram(my_mag, bins=bins) + + return counts, bin_edges, label, n_valid, mask_cumul + + +def plot_hist(counts, bins, label, alpha=1, ax=None, color=None): + """ + Plot histogram from counts and bins using bar plot. + + Parameters + ---------- + counts : array + Histogram counts + bins : array + Bin edges + label : str + Label for the histogram + alpha : float + Transparency for plot + ax : matplotlib axis + Axis to plot on + color : str or None + Color for the histogram + + """ + bin_centers = 0.5 * (bins[:-1] + bins[1:]) + ax.bar( + bin_centers, + counts, + width=np.diff(bins), + alpha=alpha, + label=label, + align='center', + color=color + ) + +# %% + +if os.path.exists(hist_data_path): + print(f"Histogram data file {hist_data_path} found.") + print("Reading and plotting.") + + dat = dat_ext = None + hist_data = read_hist_data(hist_data_path) + +else: + print(f"Histogram data file {hist_data_path} not found.") + print("Reading UNIONS cat and computing.") + + dat, dat_ext = get_data(obj, test_only=test_only) + hist_data = None + + +# ## Masking +# %% +# Get all masks, with or without dat, dat_ext +masks, labels = sp_joint.get_masks_from_config( + config, + dat, + dat_ext, + verbose=obj._params["verbose"], +) + +# %% +# List of basic masks to apply to all cases +masks_labels_basic = [ + "FLAGS", + "overlap", + "IMAFLAGS_ISO", + "mag", + "NGMIX_MOM_FAIL", + "NGMIX_ELL_PSFo_NOSHEAR_0", + "NGMIX_ELL_PSFo_NOSHEAR_1", + "4_Stars", + "8_Manual", + "64_r", + "1024_Maximask", +] + +# %% +masks_basic = [] +for mask in masks: + if mask._col_name in masks_labels_basic: + masks_basic.append(mask) + +if dat is not None: + print("Creating combined basic mask") + masks_basic_combined = sp_joint.Mask.from_list( + masks_basic, + label="combined", + verbose=obj._params["verbose"], + ) +else: + # Create dummy combined mask (for plot label) + print("Creating dummy combined basic mask") + masks_basic_combined = sp_joint.Mask( + "combined", + "combined", + kind="none", + ) + +masks.append(masks_basic_combined) + +# %% +# Metacal mask (cuts) +mask_tmp = sp_joint.Mask( + "metacal", + "metacal", + kind="none", +) + +# %% +if dat is not None: + cm = config["metacal"] + gal_metacal = metacal( + dat, + masks_basic_combined._mask, + snr_min=cm["gal_snr_min"], + snr_max=cm["gal_snr_max"], + rel_size_min=cm["gal_rel_size_min"], + rel_size_max=cm["gal_rel_size_max"], + size_corr_ell=cm["gal_size_corr_ell"], + sigma_eps=cm["sigma_eps_prior"], + global_R_weight=cm["global_R_weight"], + col_2d=False, + verbose=True, + ) + + g_corr_mc, g_uncorr, w, mask_metacal, c, c_err = ( + calibration.get_calibrated_m_c(gal_metacal) + ) + + # Convert index array to boolean mask + mask_metacal_bool = np.zeros(len(dat), dtype=bool) + mask_metacal_bool[mask_metacal] = True + + mask_tmp._mask = mask_metacal_bool + +masks.append(mask_tmp) + + +# %% +# Plot magnitude histograms for various selection criteria + +# Define magnitude bins +mag_bins = np.arange(15, 30, 0.05) +mag_centers = 0.5 * (mag_bins[:-1] + mag_bins[1:]) + + +# %% +# Create figure with multiple subplots +figsize = 10 +alpha = 0.5 + +col_names = ["combined", "N_EPOCH", "npoint3", "metacal"] + +# Define explicit colors for each histogram +colors = ['C0', 'C1', 'C2', 'C3'] # Use matplotlib default color cycle +color_map = dict(zip(col_names, colors)) + + +# %% +# If hist_data not loaded, compute it +if hist_data is None: + hist_data = {} + +if dat is not None: + # Get magnitude column + mag = dat['mag'] + + mask_cumul = None + for col_name in col_names: + counts, bins, label, n_valid, mask_cumul = compute_hist( + masks=masks, + col_name=col_name, + mask_cumul=mask_cumul, + mag=mag, + bins=mag_bins + ) + hist_data[col_name] = { + 'counts': counts, + 'bins': bins, + 'label': label, + 'n_valid': n_valid, + } + +# Plot histogram data +plt.figure() +fig, (ax) = plt.subplots(1, 1, figsize=(figsize, figsize)) + +for col_name in col_names: + if col_name in hist_data: + data = hist_data[col_name] + plot_hist( + data['counts'], + data['bins'], + data['label'], + alpha=alpha, + ax=ax, + color=color_map[col_name] + ) + #print(f"{col_name}: n_valid = {data['n_valid']}") + +ax.set_xlabel('$r$') +ax.set_ylabel('Number') +ax.set_xlim(17.5, 26.5) +ax.legend() + +plt.tight_layout() +plt.savefig('magnitude_histograms.png', dpi=150, bbox_inches='tight') + +# Save histogram data to file (only if we computed it) +if dat is not None: + np.savez( + hist_data_path, + **{ + key: np.array( + [ + val['counts'], + val['bins'], + val['label'], + val["n_valid"], + ], + dtype=object + ) + for key, val in hist_data.items() + } + ) + print(f"Histogram data saved to {hist_data_path}") + +# %% +if dat is not None: + obj.close_hd5() +# %% diff --git a/src/sp_validation/__init__.py b/src/sp_validation/__init__.py index b31411d6..62e2307b 100644 --- a/src/sp_validation/__init__.py +++ b/src/sp_validation/__init__.py @@ -16,14 +16,14 @@ ] # Explicit imports to avoid circular issues -from . import util -from . import io -from . import basic -from . import galaxy -from . import cosmology -from . import calibration -from . import cat -from . import plot_style -from . import plots -from . import run_joint_cat -from . import survey \ No newline at end of file +#from . import util +#from . import io +#from . import basic +#from . import galaxy +#from . import cosmology +#from . import calibration +#from . import cat +#from . import plot_style +#from . import plots +#from . import run_joint_cat +#from . import survey diff --git a/src/sp_validation/basic.py b/src/sp_validation/basic.py index 9c0972ee..37200047 100644 --- a/src/sp_validation/basic.py +++ b/src/sp_validation/basic.py @@ -15,8 +15,6 @@ from scipy.spatial import cKDTree from scipy.special import gamma -import matplotlib.pyplot as plt - from tqdm import tqdm import operator as op import itertools as itools diff --git a/src/sp_validation/plots.py b/src/sp_validation/plots.py index d9c5d0e5..d565048c 100644 --- a/src/sp_validation/plots.py +++ b/src/sp_validation/plots.py @@ -669,3 +669,86 @@ def hsp_map_logical_or(maps, verbose=False): ) return map_comb + + +def plot_area_mask(ra, dec, zoom, mask=None): + """Plot Area Mask. + + Create sky plot of objects. + + Parameters + ---------- + ra : list + R.A. coordinates + dec : list + Dec. coordinates + zoom : TBD + mask: TBD, optional + + """ + if mask is None: + mask == np.ones_like(ra) + + fig, axes = plt.subplots(nrows=1, ncols=2, figsize=(30,15)) + axes[0].hexbin(ra[mask], dec[mask], gridsize=100) + axes[1].hexbin(ra[mask & zoom], dec[mask & zoom], gridsize=200) + for idx in (0, 1): + axes[idx].set_xlabel("R.A. [deg]") + axes[idx].set_ylabel("Dec [deg]") + + +def sky_plots(dat, masks, labels, zoom_ra, zoom_dec): + """Sky Plots. + + Plot sky regions with different masks. + + Parameters + ---------- + masks : list + masks to be applied + labels : dict + labels for masks + zoom_ra : list + min and max R.A. for zoom-in plot + zoom_dec : list + min and max Dec. for zoom-in plot + + """ + ra = dat["RA"][:] + dec = dat["Dec"][:] + + zoom_ra = (room_ra[0] < dat["RA"]) & (dat["RA"] < zoom_ra[1]) + zoom_dec = (zoom_dec[0] < dat["Dec"]) & (dat["Dec"] < zoom_dec[1]) + zoom = zoom_ra & zoom_dec + + # No mask + plot_area_mask(ra, dec, zoom) + + # SExtractor and SP flags + m_flags = masks[labels["FLAGS"]]._mask & masks[labels["IMAFLAGS_ISO"]]._mask + plot_area_mask(ra, dec, zoom, mask=m_flags) + + # Overlap regions + m_over = masks[labels["overlap"]]._mask & m_flags + plot_area_mask(ra, dec, zoom, mask=m_over) + + # Coverage mask + m_point = masks[labels["npoint3"]]._mask & m_over + plot_area_mask(ra, dec, zoom, mask=m_point) + + # Maximask + m_maxi = masks[labels["1024_Maximask"]]._mask & m_point + plot_area_mask(ra, dec, zoom, mask=m_maxi) + + m_comb = mask_combined._mask + plot_area_mask(ra, dec, zoom, mask=m_comb) + + m_man = m_maxi & masks[labels["8_Manual"]]._mask + plot_area_mask(ra, dec, zoom, mask=m_man) + + m_halos = ( + m_maxi + & masks[labels['1_Faint_star_halos']]._mask + & masks[labels['2_Bright_star_halos']]._mask + ) + plot_area_mask(ra, dec, zoom, mask=m_halos) diff --git a/src/sp_validation/run_joint_cat.py b/src/sp_validation/run_joint_cat.py index 9177eeb9..c9d59657 100644 --- a/src/sp_validation/run_joint_cat.py +++ b/src/sp_validation/run_joint_cat.py @@ -1181,89 +1181,6 @@ def run(self): """ -def sky_plots(dat, masks, labels, zoom_ra, zoom_dec): - """Sky Plots. - - Plot sky regions with different masks. - - Parameters - ---------- - masks : list - masks to be applied - labels : dict - labels for masks - zoom_ra : list - min and max R.A. for zoom-in plot - zoom_dec : list - min and max Dec. for zoom-in plot - - """ - ra = dat["RA"][:] - dec = dat["Dec"][:] - - zoom_ra = (room_ra[0] < dat["RA"]) & (dat["RA"] < zoom_ra[1]) - zoom_dec = (zoom_dec[0] < dat["Dec"]) & (dat["Dec"] < zoom_dec[1]) - zoom = zoom_ra & zoom_dec - - # No mask - plot_area_mask(ra, dec, zoom) - - # SExtractor and SP flags - m_flags = masks[labels["FLAGS"]]._mask & masks[labels["IMAFLAGS_ISO"]]._mask - plot_area_mask(ra, dec, zoom, mask=m_flags) - - # Overlap regions - m_over = masks[labels["overlap"]]._mask & m_flags - plot_area_mask(ra, dec, zoom, mask=m_over) - - # Coverage mask - m_point = masks[labels["npoint3"]]._mask & m_over - plot_area_mask(ra, dec, zoom, mask=m_point) - - # Maximask - m_maxi = masks[labels["1024_Maximask"]]._mask & m_point - plot_area_mask(ra, dec, zoom, mask=m_maxi) - - m_comb = mask_combined._mask - plot_area_mask(ra, dec, zoom, mask=m_comb) - - m_man = m_maxi & masks[labels["8_Manual"]]._mask - plot_area_mask(ra, dec, zoom, mask=m_man) - - m_halos = ( - m_maxi - & masks[labels['1_Faint_star_halos']]._mask - & masks[labels['2_Bright_star_halos']]._mask - ) - plot_area_mask(ra, dec, zoom, mask=m_halos) - - -def plot_area_mask(ra, dec, zoom, mask=None): - """Plot Area Mask. - - Create sky plot of objects. - - Parameters - ---------- - ra : list - R.A. coordinates - dec : list - Dec. coordinates - zoom : TBD - mask: TBD, optional - - """ - if mask is None: - mask == np.ones_like(ra) - - fig, axes = plt.subplots(nrows=1, ncols=2, figsize=(30,15)) - axes[0].hexbin(ra[mask], dec[mask], gridsize=100) - axes[1].hexbin(ra[mask & zoom], dec[mask & zoom], gridsize=200) - for idx in (0, 1): - axes[idx].set_xlabel("R.A. [deg]") - axes[idx].set_ylabel("Dec [deg]") - - def confusion_matrix(mask, confidence_level=0.9): n_key = len(mask) @@ -1361,7 +1278,7 @@ class Mask(): """ - def __init__(self, col_name, label, kind="equal", value=0, dat=None, verbose=False): + def __init__(self, col_name, label, kind=None, value=0, dat=None, verbose=False): self._col_name = col_name self._label = label @@ -1439,8 +1356,7 @@ def print_strings(cls, coln, lab, num, fnum, f_out=None): print(msg) if f_out: print(msg, file=f_out) - - + def print_stats(self, num_obj, f_out=None): if self._num_ok is None: self._num_ok = sum(self._mask) @@ -1461,10 +1377,12 @@ def get_sign(self): elif self._kind =="smaller_equal": sign = "<=" return sign - - def print_summary(self, f_out): - print(f"[{self._label}]\t\t\t", file=f_out, end="") - + + def print_condition(self, f_out): + + if self._value is None: + return "" + sign = self.get_sign() if sign is not None: @@ -1472,6 +1390,10 @@ def print_summary(self, f_out): if self._kind == "range": print(f"{self._value[0]} <= {self._col_name} <= {self._value[1]}", file=f_out) + + def print_summary(self, f_out): + print(f"[{self._label}]\t\t\t", file=f_out, end="") + self.print_condition(f_out) def create_descr(self): """Create Descr. From 9a39d94a83dbfe5f9fa5259fe5da3d3d0cace56c Mon Sep 17 00:00:00 2001 From: martinkilbinger Date: Fri, 14 Nov 2025 11:29:43 +0100 Subject: [PATCH 2/9] Improved mask labels for plots --- config/calibration/mask_v1.X.6.yaml | 6 +- .../cosmo_val/catalog_paper_plot/hist_mag.py | 283 ++++++++++++++---- src/sp_validation/run_joint_cat.py | 32 +- 3 files changed, 244 insertions(+), 77 deletions(-) diff --git a/config/calibration/mask_v1.X.6.yaml b/config/calibration/mask_v1.X.6.yaml index c3c7b254..9dee1944 100644 --- a/config/calibration/mask_v1.X.6.yaml +++ b/config/calibration/mask_v1.X.6.yaml @@ -31,13 +31,13 @@ dat: # Number of epochs - col_name: N_EPOCH - label: r"$n_{\rm epoch}$" + label: $n_{\rm epoch}$ kind: greater_equal value: 2 # Magnitude range - col_name: mag - label: mag range + label: r kind: range value: [15, 30] @@ -86,7 +86,7 @@ dat_ext: # Rough pointing coverage - col_name: npoint3 - label: r"$n_{\rm pointing}$" + label: $n_{\rm pointing}$ kind: greater_equal value: 3 diff --git a/notebooks/cosmo_val/catalog_paper_plot/hist_mag.py b/notebooks/cosmo_val/catalog_paper_plot/hist_mag.py index 3ee2c88a..4ced3036 100644 --- a/notebooks/cosmo_val/catalog_paper_plot/hist_mag.py +++ b/notebooks/cosmo_val/catalog_paper_plot/hist_mag.py @@ -36,14 +36,24 @@ config = obj.read_config_set_params("config_mask.yaml") -hist_data_path = "magnitude_histograms_data.npz" - -test_only = False +test_only = True # %% +# Funcitons def get_data(obj, test_only=False): - - + """Get Data. + + Returns catalogue. + + Parameters + ---------- + obj : CalibrateCat instance + Instance of CalibrateCat class + test_only : bool, optional + If True, only load a subset of data for testing; + default is False. + + """ # Get data. Set load_into_memory to False for very large files dat, dat_ext = obj.read_cat(load_into_memory=False) @@ -58,6 +68,8 @@ def get_data(obj, test_only=False): def read_hist_data(hist_data_path): """ + Read Hist Data. + Read histogram data from npz file. Parameters @@ -72,6 +84,7 @@ def read_hist_data(hist_data_path): - 'counts': histogram counts - 'bins': bin edges - 'label': label for the histogram + """ loaded = np.load(hist_data_path, allow_pickle=True) hist_data = {} @@ -87,13 +100,29 @@ def read_hist_data(hist_data_path): return hist_data - def get_mask(masks, col_name): - + """Get Mask. + + Returns mask corresponding to col_name. + + Parameters + ---------- + masks : list + List of mask objects + col_name : str + Column name to identify the mask + Returns + ------- + list + Mask object + integer + Mask position in list + + """ # Get mask fomr masks with col_name = col_name - for mask in masks: + for idx, mask in enumerate(masks): if mask._col_name == col_name: - return mask + return mask, idx def compute_hist(masks, col_name, mask_cumul, mag, bins): @@ -125,12 +154,12 @@ def compute_hist(masks, col_name, mask_cumul, mag, bins): Number of valid data points after masking mask_cumul : array Updated cumulative mask array + """ - this_mask = get_mask(masks, col_name) + this_mask, _ = get_mask(masks, col_name) # First time: if mask_cumul is None: - print("Init mask_cumul") mask_cumul = this_mask._mask else: mask_cumul &= this_mask._mask @@ -143,14 +172,15 @@ def compute_hist(masks, col_name, mask_cumul, mag, bins): # Label string_buffer = StringIO() - this_mask.print_condition(string_buffer) + this_mask.print_condition(string_buffer, latex=True) label = string_buffer.getvalue().strip() if label == "": label = col_name + print("MKDEBUG", label) counts, bin_edges = np.histogram(my_mag, bins=bins) - return counts, bin_edges, label, n_valid, mask_cumul + return counts, bin_edges, rf"{label}", n_valid, mask_cumul def plot_hist(counts, bins, label, alpha=1, ax=None, color=None): @@ -184,7 +214,68 @@ def plot_hist(counts, bins, label, alpha=1, ax=None, color=None): color=color ) + +def plot_all_hists( + hist_data, + col_names, + figsize=10, + alpha=0.5, + color_map=None, + fraction=False, + ax=None, + out_path=None, +): + + if ax is None: + plt.figure() + fig, (ax) = plt.subplots( + 1, + 1, + figsize=(figsize, figsize) + ) + + counts0 = None + for col_name in col_names: + if col_name in hist_data: + + data = hist_data[col_name] + if fraction: + if counts0 is None: + counts0 = data["counts"] + counts = data["counts"] / counts0 + else: + counts = data["counts"] + + plot_hist( + counts, + data['bins'], + data['label'], + alpha=alpha, + ax=ax, + color=color_map[col_name] + ) + #print(f"{col_name}: n_valid = {data['n_valid']}") + + ax.set_xlabel('$r$') + ylabel = "fraction" if fraction else "number" + ax.set_ylabel(ylabel) + ax.set_xlim(17.5, 26.5) + if not fraction: + ax.legend() + + if out_path: + plt.tight_layout() + plt.savefig( + out_path, + dpi=150, + bbox_inches='tight' + ) + + # %% +# Main program +scenario = 1 +hist_data_path = f"magnitude_histograms_data_scenario-{scenario}.npz" if os.path.exists(hist_data_path): print(f"Histogram data file {hist_data_path} found.") @@ -201,8 +292,8 @@ def plot_hist(counts, bins, label, alpha=1, ax=None, color=None): hist_data = None -# ## Masking # %% +# Masking # Get all masks, with or without dat, dat_ext masks, labels = sp_joint.get_masks_from_config( config, @@ -212,22 +303,89 @@ def plot_hist(counts, bins, label, alpha=1, ax=None, color=None): ) # %% +# Combine mask according to scenario # List of basic masks to apply to all cases -masks_labels_basic = [ - "FLAGS", - "overlap", - "IMAFLAGS_ISO", - "mag", - "NGMIX_MOM_FAIL", - "NGMIX_ELL_PSFo_NOSHEAR_0", - "NGMIX_ELL_PSFo_NOSHEAR_1", - "4_Stars", - "8_Manual", - "64_r", - "1024_Maximask", -] + +masks_labels_basic = ["overlap", "mag", "64_r"] +col_names = ["basic masks"] + +if scenario == 0: + masks_labels_basic.extend([ + "FLAGS", + "IMAFLAGS_ISO", + "NGMIX_MOM_FAIL", + "NGMIX_ELL_PSFo_NOSHEAR_0", + "NGMIX_ELL_PSFo_NOSHEAR_1", + "4_Stars", + "8_Manual", + "1024_Maximask", + ]) + + col_names.extend(["N_EPOCH", "npoint3", "metacal"]) + +elif scenario == 1: + + col_names.extend([ + "IMAFLAGS_ISO", + "FLAGS", + "NGMIX_MOM_FAIL", + "NGMIX_ELL_PSFo_NOSHEAR_0", + "NGMIX_ELL_PSFo_NOSHEAR_1", + "4_Stars", + "8_Manual", + "1024_Maximask", + "N_EPOCH", + "npoint3", + "metacal", + ]) + + combine_cols = { + "ngmix failures": [ + "NGMIX_MOM_FAIL", + "NGMIX_ELL_PSFo_NOSHEAR_0", + "NGMIX_ELL_PSFo_NOSHEAR_1", + ] + } + +# %% +# Combine columns if specified. +# Remove old columns after combining. +if combine_cols is not None: + for new_col, old_cols in combine_cols.items(): + print(f"Combining columns {old_cols} into {new_col}") + # Create combined mask + old_masks = [] + idx_first = None + for col in old_cols: + mask, idx = get_mask(masks, col) + old_masks.append(mask) + if idx_first is None: + idx_first = idx + if dat is not None: + print(f"Creating combined mask for {new_col}") + masks_combined = sp_joint.Mask.from_list( + old_masks, + label=new_col, + verbose=obj._params["verbose"], + ) + else: + print(f"Creating dummy mask for {new_col} (for plot label)") + masks_combined = sp_joint.Mask( + new_col, + new_col, + kind="none", + ) + masks.insert(idx, masks_combined) + col_names.insert(idx, new_col) + + for old_mask, old_col in zip(old_masks, old_cols): + masks.remove(old_mask) + col_names.remove(old_col) + + print("After combining: masks =", [mask._col_name for mask in masks]) # %% +# Createe list of masks masks_basic = [] for mask in masks: if mask._col_name in masks_labels_basic: @@ -237,15 +395,15 @@ def plot_hist(counts, bins, label, alpha=1, ax=None, color=None): print("Creating combined basic mask") masks_basic_combined = sp_joint.Mask.from_list( masks_basic, - label="combined", + label="basic masks", verbose=obj._params["verbose"], ) else: # Create dummy combined mask (for plot label) print("Creating dummy combined basic mask") masks_basic_combined = sp_joint.Mask( - "combined", - "combined", + "basic masks", + "basic masks", kind="none", ) @@ -260,6 +418,7 @@ def plot_hist(counts, bins, label, alpha=1, ax=None, color=None): ) # %% +# Call metacal if data is available if dat is not None: cm = config["metacal"] gal_metacal = metacal( @@ -290,22 +449,17 @@ def plot_hist(counts, bins, label, alpha=1, ax=None, color=None): # %% -# Plot magnitude histograms for various selection criteria - # Define magnitude bins mag_bins = np.arange(15, 30, 0.05) mag_centers = 0.5 * (mag_bins[:-1] + mag_bins[1:]) - # %% # Create figure with multiple subplots figsize = 10 alpha = 0.5 -col_names = ["combined", "N_EPOCH", "npoint3", "metacal"] - # Define explicit colors for each histogram -colors = ['C0', 'C1', 'C2', 'C3'] # Use matplotlib default color cycle +colors = [f'C{i}' for i in range(len(col_names))] # Use matplotlib default color cycle color_map = dict(zip(col_names, colors)) @@ -333,32 +487,38 @@ def plot_hist(counts, bins, label, alpha=1, ax=None, color=None): 'label': label, 'n_valid': n_valid, } - +# %% +# Create plots +fig, axes = plt.subplots( + 1, + 2, + figsize=(2 * figsize, figsize) +) # Plot histogram data -plt.figure() -fig, (ax) = plt.subplots(1, 1, figsize=(figsize, figsize)) - -for col_name in col_names: - if col_name in hist_data: - data = hist_data[col_name] - plot_hist( - data['counts'], - data['bins'], - data['label'], - alpha=alpha, - ax=ax, - color=color_map[col_name] - ) - #print(f"{col_name}: n_valid = {data['n_valid']}") - -ax.set_xlabel('$r$') -ax.set_ylabel('Number') -ax.set_xlim(17.5, 26.5) -ax.legend() - +plot_all_hists( + hist_data, + col_names, + alpha=alpha, + color_map=color_map, + ax=axes[0], +) +plot_all_hists( + hist_data, + col_names, + alpha=alpha, + color_map=color_map, + fraction=True, + ax=axes[1], +) plt.tight_layout() -plt.savefig('magnitude_histograms.png', dpi=150, bbox_inches='tight') +out_path = f"magnitude_histograms_scenario-{scenario}.png" +plt.savefig( + out_path, + dpi=150, + bbox_inches='tight' +) +# %% # Save histogram data to file (only if we computed it) if dat is not None: np.savez( @@ -381,4 +541,9 @@ def plot_hist(counts, bins, label, alpha=1, ax=None, color=None): # %% if dat is not None: obj.close_hd5() + +# %% +for mask in masks: + mask.print_condition(sys.stdout, latex=True) + # %% diff --git a/src/sp_validation/run_joint_cat.py b/src/sp_validation/run_joint_cat.py index c9d59657..e30bc438 100644 --- a/src/sp_validation/run_joint_cat.py +++ b/src/sp_validation/run_joint_cat.py @@ -1365,32 +1365,34 @@ def print_stats(self, num_obj, f_out=None): sf = f"{self._num_ok/num_obj:10.2%}" self.print_strings(self._col_name, self._label, si, sf, f_out=f_out) - def get_sign(self): + def get_sign(self, latex=False): sign = None - if self._kind =="equal": - sign = "=" - elif self._kind =="not_equal": - sign = "!=" - elif self._kind =="greater_equal": - sign = ">=" - elif self._kind =="smaller_equal": - sign = "<=" + if self._kind == "equal": + sign = "$=$" if latex else "=" + elif self._kind == "not_equal": + sign = "$\ne$" if latex else "!=" + elif self._kind in ("greater_equal", "range"): + sign = "$\leq$" if latex else ">=" + elif self._kind == "smaller_equal": + sign = "$\geq$" if latex else "<=" return sign - def print_condition(self, f_out): + def print_condition(self, f_out, latex=False): if self._value is None: return "" - sign = self.get_sign() + sign = self.get_sign(latex=latex) + + name = self._label if latex else self._col_name if sign is not None: - print(f"{self._col_name} {sign} {self._value}", file=f_out) - + print(f"{name} {sign} {self._value}", file=f_out) + if self._kind == "range": - print(f"{self._value[0]} <= {self._col_name} <= {self._value[1]}", file=f_out) - + print(f"{self._value[0]} {sign} {name} {sign} {self._value[1]}", file=f_out) + def print_summary(self, f_out): print(f"[{self._label}]\t\t\t", file=f_out, end="") self.print_condition(f_out) From f4f0bb99df63569837bfa19cd3bdad4a76791791 Mon Sep 17 00:00:00 2001 From: martinkilbinger Date: Mon, 17 Nov 2025 13:13:17 +0100 Subject: [PATCH 3/9] hist plot, basic: mask size and SNR separately --- .../cosmo_val/catalog_paper_plot/hist_mag.py | 23 ++++++++++++++++ src/sp_validation/basic.py | 26 +++++++++++++++++++ 2 files changed, 49 insertions(+) diff --git a/notebooks/cosmo_val/catalog_paper_plot/hist_mag.py b/notebooks/cosmo_val/catalog_paper_plot/hist_mag.py index 4ced3036..7d202d7e 100644 --- a/notebooks/cosmo_val/catalog_paper_plot/hist_mag.py +++ b/notebooks/cosmo_val/catalog_paper_plot/hist_mag.py @@ -417,6 +417,25 @@ def plot_all_hists( kind="none", ) +# %% +def get_info_for_metacal_masking(dat, mask, prefix = "NGMIX", name_shear = "NOSHEAR"): + + res = {} + + res["flag"] = dat[mask][f"{prefix}_FLAGS_{name_shear}"] + + for key in ("flux", "flux_err", "T"): + res[key] = dat[mask][f"{prefix}_{key.upper()}_{name_shear}"] + res["Tpsf"] = dat[mask][f"{prefix}_Tpsf_{name_shear}"] + + return res + +# %% +if dat is not None: + cm = config["metacal"] + + + # %% # Call metacal if data is available if dat is not None: @@ -547,3 +566,7 @@ def plot_all_hists( mask.print_condition(sys.stdout, latex=True) # %% +# print number of valid objects and name +for data in hist_data + +# %% diff --git a/src/sp_validation/basic.py b/src/sp_validation/basic.py index 37200047..fc96230a 100644 --- a/src/sp_validation/basic.py +++ b/src/sp_validation/basic.py @@ -597,3 +597,29 @@ def jackknif_weighted_average2( all_est = np.array(all_est) return np.mean(all_est), np.std(all_est) + + +def mask_gal_size(T, Tpsf, rel_size_min, rel_size_max, size_corr_ell=False, g1=None, g2=None): + + Tr_tmp = T + if size_corr_ell: + Tr_tmp *= ( + (1 - g1 **2 + g2 ** 2) / (1 + g1 ** 2 + g2 **2) + ) + + mask = ( + (Tr_tmp / Tpsf > rel_size_min) + & (Tr_tmp / Tpsf < rel_size_max) + ) + + return mask + + +def mask_gal_SNR(SNR, snr_min, snr_max): + + mask = ( + (SNR > snr_min) + & (SNR < snr_max) + ) + + return mask From cc43bd1fed5a139df84bee84f3ffec649d77007f Mon Sep 17 00:00:00 2001 From: martinkilbinger Date: Fri, 5 Dec 2025 11:37:25 +0100 Subject: [PATCH 4/9] Added scripts to plot star-gal correlations --- .../catalog_paper_plot/check_gaia.ipynb | 73 +++++++++ .../download_gaia_catalogues.py | 100 ++++++++++++ .../catalog_paper_plot/plot_gaia_sky.py | 154 ++++++++++++++++++ .../catalog_paper_plot/run_cosmo_val_GAIA.py | 42 +++++ .../run_cosmo_val_gammat.py | 74 +++++++++ .../run_cosmo_val_lambda.py | 82 ++++++++++ 6 files changed, 525 insertions(+) create mode 100644 notebooks/cosmo_val/catalog_paper_plot/check_gaia.ipynb create mode 100644 notebooks/cosmo_val/catalog_paper_plot/download_gaia_catalogues.py create mode 100644 notebooks/cosmo_val/catalog_paper_plot/plot_gaia_sky.py create mode 100644 notebooks/cosmo_val/catalog_paper_plot/run_cosmo_val_GAIA.py create mode 100644 notebooks/cosmo_val/catalog_paper_plot/run_cosmo_val_gammat.py create mode 100644 notebooks/cosmo_val/catalog_paper_plot/run_cosmo_val_lambda.py diff --git a/notebooks/cosmo_val/catalog_paper_plot/check_gaia.ipynb b/notebooks/cosmo_val/catalog_paper_plot/check_gaia.ipynb new file mode 100644 index 00000000..0de529ae --- /dev/null +++ b/notebooks/cosmo_val/catalog_paper_plot/check_gaia.ipynb @@ -0,0 +1,73 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "id": "8049856e", + "metadata": {}, + "outputs": [], + "source": [ + "from matplotlib import pyplot as plt" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "d7f7568b", + "metadata": {}, + "outputs": [], + "source": [ + "from astropy.io import fits" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "62854cbc", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "/automnt/n17data/mkilbing/astro/Runs/flexion/simuls_MAB\n" + ] + } + ], + "source": [ + "!pwd" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "4c311b19", + "metadata": {}, + "outputs": [], + "source": [ + "cat = fits.getdata(\"gaia2.fits\", 1)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "base", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.18" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/notebooks/cosmo_val/catalog_paper_plot/download_gaia_catalogues.py b/notebooks/cosmo_val/catalog_paper_plot/download_gaia_catalogues.py new file mode 100644 index 00000000..746a641b --- /dev/null +++ b/notebooks/cosmo_val/catalog_paper_plot/download_gaia_catalogues.py @@ -0,0 +1,100 @@ +# %% +# Calibrate comprehensive catalogue + +# %% +from IPython import get_ipython + +# %% +# enable autoreload for interactive sessions +ipython = get_ipython() +if ipython is not None: + ipython.run_line_magic("reload_ext", "autoreload") + ipython.run_line_magic("autoreload", "2") + ipython.run_line_magic("reload_ext", "log_cell_time") + +import numpy as np +import time +from astroquery.gaia import Gaia +import astropy.units +from astropy.coordinates import SkyCoord +from textwrap import dedent + + +def get_bins(results, key="phot_g_mean_mag", n_bins=3): + + quantiles_raw = np.percentile(results[key], np.linspace(0, 100, n_bins + 1)) + # round + quantiles = np.round(quantiles_raw, 1) + + print("Magnitude bin edges:") + subsets = [] + for i in range(n_bins): + this_subset = results[ + (results[key] >= quantiles[i]) & (results[key] < quantiles[i + 1]) + ] + subsets.append(this_subset) + + print( + f" Bin {i+1}: {quantiles[i]:.2f}, {quantiles[i+1]:.2f}, N={len(this_subset)}" + ) + + return subsets, quantiles + + +def do_query(do_async=True, nmax=3_000_000): + + # Define query + query = dedent( + f""" + SELECT TOP {nmax} ra, dec, phot_g_mean_mag, ruwe + FROM gaiadr3.gaia_source + WHERE dec > 30 + AND dec < 75 + AND phot_g_mean_mag < 20 + AND phot_g_mean_mag IS NOT NULL + AND ruwe < 1.4 + ORDER BY random_index + """ + ).strip() + + + # Launch the query + if not do_async: + print("Retrieveing GAIA data (sync)") + job = Gaia.launch_job(query) + else: + print("Retrieveing GAIA data (async)") + job = Gaia.launch_job_async(query) + + while not job.is_finished(): + print(f"Job status: {job.get_phase()}") + time.sleep(10) + + results = job.get_results() + + print(f"Retrieved {len(results)} sources") + print(results[:10]) # Show first 10 results + + return results + + +def write_subsets(subsets, quantiles): + + for idx, subset in enumerate(subsets): + + if idx == 0: + out_path = f"gaia_stars_g_smaller_{quantiles[idx + 1]}.fits" + elif idx == len(subsets) - 1: + out_path = f"gaia_stars_g_larger_{quantiles[idx]}.fits" + else: + out_path = f"gaia_stars_g_in_{quantiles[idx]}_{quantiles[idx + 1]}.fits" + print(f"Writing {len(subset)} objects to {out_path}") + subset.write(out_path, format="fits", overwrite=True) + + +# %% Main program +results = do_query(do_async=True) + +subsets, quantiles = get_bins(results) + +write_subsets(subsets, quantiles) diff --git a/notebooks/cosmo_val/catalog_paper_plot/plot_gaia_sky.py b/notebooks/cosmo_val/catalog_paper_plot/plot_gaia_sky.py new file mode 100644 index 00000000..ebe31997 --- /dev/null +++ b/notebooks/cosmo_val/catalog_paper_plot/plot_gaia_sky.py @@ -0,0 +1,154 @@ +"""Plot GAIA stars on sky maps by magnitude bins.""" + +import glob +import os +import re +import numpy as np +from astropy.table import Table +import matplotlib.pyplot as plt +from sp_validation.plots import FootprintPlotter + + +def load_gaia_fits(file_path): + """Load GAIA FITS file and return RA, DEC arrays. + + Parameters + ---------- + file_path : str + Path to FITS file + + Returns + ------- + ra : np.ndarray + Right ascension values + dec : np.ndarray + Declination values + n_objects : int + Number of objects in file + """ + table = Table.read(file_path, format='fits') + print(f"Loaded {len(table)} objects from {file_path}") + return table['ra'], table['dec'], len(table) + + +def extract_label_from_filename(filename): + """Extract magnitude information from filename to create label. + + Parameters + ---------- + filename : str + FITS filename + + Returns + ------- + str + Label describing the magnitude bin + """ + basename = os.path.basename(filename) + + # Match pattern: g_smaller_XX.X + match = re.search(r'g_smaller_([0-9.]+)', basename) + if match: + mag = match.group(1) + return f"G < {mag} (bright)" + + # Match pattern: g_in_XX.X_YY.Y + match = re.search(r'g_in_([0-9.]+)_([0-9.]+)', basename) + if match: + mag1, mag2 = match.group(1), match.group(2) + return f"{mag1} ≤ G < {mag2} (medium)" + + # Match pattern: g_larger_XX.X + match = re.search(r'g_larger_([0-9.]+)', basename) + if match: + mag = match.group(1) + return f"G ≥ {mag} (faint)" + + # Fallback + return basename.replace('.fits', '') + + +def main(): + """Create sky plots for GAIA stars in three magnitude bins.""" + + # Auto-detect GAIA files using patterns + pattern_smaller = "gaia_stars_g_smaller_*.fits" + pattern_in = "gaia_stars_g_in_*.fits" + pattern_larger = "gaia_stars_g_larger_*.fits" + + files = [] + files.extend(sorted(glob.glob(pattern_smaller))) + files.extend(sorted(glob.glob(pattern_in))) + files.extend(sorted(glob.glob(pattern_larger))) + + if not files: + print("ERROR: No GAIA FITS files found matching patterns:") + print(f" - {pattern_smaller}") + print(f" - {pattern_in}") + print(f" - {pattern_larger}") + return + + print(f"Found {len(files)} GAIA FITS files:") + for f in files: + print(f" - {f}") + print() + + # Generate labels from filenames + labels = [extract_label_from_filename(f) for f in files] + + # Initialize the footprint plotter + plotter = FootprintPlotter(nside_coverage=32, nside_map=2048) + + # Process each magnitude bin + hsp_maps = [] + for file_path, label in zip(files, labels): + print(f"Processing: {label}") + + # Load data + ra, dec, n_objects = load_gaia_fits(file_path) + + # Create healsparse map + hsp_map = plotter.create_hsp_map(ra, dec) + hsp_maps.append(hsp_map) + + # Plot all regions (NGC, SGC, fullsky) + plotter.plot_all_regions( + hsp_map, + outbase=f"gaia_sky_{file_path.replace('.fits', '')}" + ) + print(f"Created plots for {label}") + + # Create combined plot showing all bins + n_files = len(files) + if n_files > 0: + print("Creating combined plot") + + fig, axes = plt.subplots(1, n_files, figsize=(10 * n_files, 10)) + + # Handle case of single file + if n_files == 1: + axes = [axes] + + for idx, (hsp_map, label) in enumerate(zip(hsp_maps, labels)): + ax = axes[idx] + + # Use fullsky region parameters + region = plotter._regions['fullsky'] + + projection = plotter.plot_area( + hsp_map, + ra_0=region['ra_0'], + extend=region['extend'], + vmax=region['vmax'], + title=label + ) + + plt.tight_layout() + plt.savefig('gaia_sky_combined_all_bins.png', dpi=150, bbox_inches='tight') + print("Saved combined plot: gaia_sky_combined_all_bins.png") + + print("All plots created successfully!") + + +if __name__ == "__main__": + main() diff --git a/notebooks/cosmo_val/catalog_paper_plot/run_cosmo_val_GAIA.py b/notebooks/cosmo_val/catalog_paper_plot/run_cosmo_val_GAIA.py new file mode 100644 index 00000000..af58f0e9 --- /dev/null +++ b/notebooks/cosmo_val/catalog_paper_plot/run_cosmo_val_GAIA.py @@ -0,0 +1,42 @@ +# %% +from IPython import get_ipython + +ipython = get_ipython() + +# enable autoreload for interactive sessions +if ipython is not None: + ipython.run_line_magic("load_ext", "autoreload") + ipython.run_line_magic("autoreload", "2") + ipython.run_line_magic("load_ext", "log_cell_time") + +import matplotlib.pyplot as plt # noqa: E402, F401 +import numpy as np # noqa: E402, F401 +import sys # noqa: E402 +sys.path.insert(0, '/home/mkilbing/astro/repositories/gitlab.euclid-sgs/FDQA/rho_tau_stats') +from cosmo_val import CosmologyValidation # noqa: E402 + +# enable inline plotting for interactive sessions +# (must be done *after* importing package that sets agg backend) +if ipython is not None: + ipython.run_line_magic("matplotlib", "inline") + + +# %% +cv = CosmologyValidation( + versions=["SP_v1.4.6_GAIA_bright", "SP_v1.4.6_GAIA_medium", "SP_v1.4.6_GAIA_faint"], + data_base_dir="/n17data/mkilbing/astro/data/", + catalog_config="./cat_config.yaml", + output_dir="./output", + theta_min=1.0, + theta_max=250.0, + nbins=20, + cov_estimate_method='jk', + theta_min_plot=0.8, + theta_max_plot=260.0, + rho_tau_method='emcee', + n_cov=1, +) + +# %% +cv.plot_gammat_around_stars(offset=0.05, gammax=True) +cv.plot_gammat_around_stars(offset=0.05, gammax=True, logy=True) diff --git a/notebooks/cosmo_val/catalog_paper_plot/run_cosmo_val_gammat.py b/notebooks/cosmo_val/catalog_paper_plot/run_cosmo_val_gammat.py new file mode 100644 index 00000000..4ddb1c35 --- /dev/null +++ b/notebooks/cosmo_val/catalog_paper_plot/run_cosmo_val_gammat.py @@ -0,0 +1,74 @@ +# %% +from IPython import get_ipython + +ipython = get_ipython() + +# enable autoreload for interactive sessions +if ipython is not None: + ipython.run_line_magic("load_ext", "autoreload") + ipython.run_line_magic("autoreload", "2") + ipython.run_line_magic("reload_ext", "log_cell_time") + +import matplotlib.pyplot as plt +import numpy as np +import sys +import os +sys.path.insert(0, '/home/mkilbing/astro/repositories/gitlab.euclid-sgs/FDQA/rho_tau_stats') +from cosmo_val import CosmologyValidation + + +def rename_output(output_bases, output_dir, suff, version_string, rand_str): + + for base in output_bases: + old_path = f"{output_dir}/plots/{base}{suff}" + new_path = f"{output_dir}/plots/{base}_{version_string}{rand_str}{suff}" + print(f"mv {old_path} -> {new_path}") + os.rename(old_path, new_path) + + +# enable inline plotting for interactive sessions +# (must be done *after* importing package that sets agg backend) +if ipython is not None: + ipython.run_line_magic("matplotlib", "inline") + +versions_146 = ["SP_v1.4.6", "SP_v1.4.6_leak_corr"] +versions_var = ["SP_v1.4.5", "SP_v1.4.6", "SP_v1.4.7", "SP_v1.4.8"] + +versions = versions_146 + +output_dir = "./output" + +# %% +cv = CosmologyValidation( + versions=versions_146, + data_base_dir="/n17data/mkilbing/astro/data/", + catalog_config="./cat_config.yaml", + output_dir=output_dir, + theta_min=1.0, + theta_max=250.0, + nbins=20, + cov_estimate_method='jk', + theta_min_plot=0.8, + theta_max_plot=260.0, + rho_tau_method='emcee', + n_cov=1, + star_weight_type="uniform", +) + + +output_bases = [ + "gammat_around_stars_lin_non_tomographic", + "gammat_around_stars_log_non_tomographic", +] +suff = ".png" +version_string = "_".join(versions) + +# %% +cv.plot_gammat_around_stars(offset=0.05, gammax=True, wo_rand_subtr=True) +cv.plot_gammat_around_stars(offset=0.05, gammax=True, logy=True, wo_rand_subtr=True) +rename_output(output_bases, output_dir, suff, version_string, "_wo_rand_subtr") + +# %% +cv.plot_gammat_around_stars(offset=0.05, gammax=True) +cv.plot_gammat_around_stars(offset=0.05, gammax=True, logy=True) +rename_output(output_bases, output_dir, suff, version_string, "") diff --git a/notebooks/cosmo_val/catalog_paper_plot/run_cosmo_val_lambda.py b/notebooks/cosmo_val/catalog_paper_plot/run_cosmo_val_lambda.py new file mode 100644 index 00000000..7f7d566f --- /dev/null +++ b/notebooks/cosmo_val/catalog_paper_plot/run_cosmo_val_lambda.py @@ -0,0 +1,82 @@ +# %% +from IPython import get_ipython + +ipython = get_ipython() + +# enable autoreload for interactive sessions +if ipython is not None: + ipython.run_line_magic("load_ext", "autoreload") + ipython.run_line_magic("autoreload", "2") + ipython.run_line_magic("load_ext", "log_cell_time") + +import sys +import os +import matplotlib.pyplot as plt +import numpy as np + +sys.path.insert(0, '/home/mkilbing/astro/repositories/gitlab.euclid-sgs/FDQA/rho_tau_stats') +from cosmo_val import CosmologyValidation # noqa: E402 + + +def rename_output(output_bases, output_dir, suff, version_string, rand_str): + + for base in output_bases: + old_path = f"{output_dir}/plots/{base}{suff}" + new_path = f"{output_dir}/plots/{base}_{version_string}{rand_str}{suff}" + print(f"mv {old_path} -> {new_path}") + os.rename(old_path, new_path) + + +# enable inline plotting for interactive sessions +# (must be done *after* importing package that sets agg backend) +if ipython is not None: + ipython.run_line_magic("matplotlib", "inline") + +# Different options +versions_146 = ["SP_v1.4.6", "SP_v1.4.6_leak_corr"] +versions_var = ["SP_v1.4.5", "SP_v1.4.6", "SP_v1.4.7", "SP_v1.4.8"] + +# We use this combination of versions +versions = versions_146 + +output_dir = "./output" + +theta_min = 1.0 +theta_max = 300.0 +plot_range_fac = 1.1 + +# %% +cv = CosmologyValidation( + versions=versions, + data_base_dir="/n17data/mkilbing/astro/data/", + catalog_config="./cat_config.yaml", + output_dir=output_dir, + theta_min=theta_min, + theta_max=theta_max, + nbins=15, + cov_estimate_method='jk', + theta_min_plot=theta_min / plot_range_fac, + theta_max_plot=theta_max * plot_range_fac, + rho_tau_method='emcee', + n_cov=1, + star_weight_type="uniform", + random_multiple=5, +) + +output_bases = [ + "gammat_stars_around_galaxies_lin_non_tomographic", + "gammat_stars_around_galaxies_log_non_tomographic", +] +suff = ".png" +version_string = "_".join(versions) + + +cv.plot_gammat_stars_around_galaxies(offset=0.025, gammax=True, wo_rand_subtr=True) +cv.plot_gammat_stars_around_galaxies(offset=0.025, gammax=True, logy=True, wo_rand_subtr=True) +rename_output(output_bases, output_dir, suff, version_string, "_wo_rand_subtr") + +# %% +cv.plot_gammat_stars_around_galaxies(offset=0.025, gammax=True) +cv.plot_gammat_stars_around_galaxies(offset=0.025, gammax=True, logy=True) +rename_output(output_bases, output_dir, suff, version_string, "") + From 52ca866cc02414609ddc08fe0bc219bafa2d60fb Mon Sep 17 00:00:00 2001 From: martinkilbinger Date: Fri, 5 Dec 2025 11:37:57 +0100 Subject: [PATCH 5/9] cat yaml config file --- notebooks/cosmo_val/cat_config.yaml | 192 +++++++++++++++++++++++++++- 1 file changed, 191 insertions(+), 1 deletion(-) diff --git a/notebooks/cosmo_val/cat_config.yaml b/notebooks/cosmo_val/cat_config.yaml index 67a0cad4..47e26d61 100644 --- a/notebooks/cosmo_val/cat_config.yaml +++ b/notebooks/cosmo_val/cat_config.yaml @@ -694,6 +694,8 @@ SP_v1.4.5: shear: path: ../../../../../../sguerrini/unions_shapepipe_cut_struc_2024_v1.4.5_rerun.fits covmat_file: ./covs/shapepipe_A/cov_shapepipe_A.txt + ra_col: RA + dec_col: Dec e1_col: e1 e2_col: e2 e1_PSF_col: e1_PSF @@ -722,6 +724,7 @@ SP_v1.4.5: PSF_flag: "FLAG_PSF_HSM" star_flag: "FLAG_STAR_HSM" square_size: True + patch_number: 100 SP_v1.4.6: pipeline: SP @@ -738,6 +741,8 @@ SP_v1.4.6: shear: path: ../../../../../../UNIONS/WL/v1.4.x/v1.4.6/unions_shapepipe_cut_struc_2024_v1.4.6.fits covmat_file: ./covs/shapepipe_A/cov_shapepipe_A.txt + ra_col: RA + dec_col: Dec e1_col: e1 e2_col: e2 e1_PSF_col: e1_PSF @@ -766,6 +771,179 @@ SP_v1.4.6: PSF_flag: "FLAG_PSF_HSM" star_flag: "FLAG_STAR_HSM" square_size: True + patch_number: 100 + +SP_v1.4.7: + pipeline: SP + subdir: CFIS/v1.0/ShapePipe/ + ls: dashed + colour: darkorchid + marker: d + getdist_colour: 0.0, 0.5, 1.0 + cov_th: + A: 2420.2651014497287 #deg^2 + n_e: 7.040818382014773 #arcmin-2 + n_psf: 0.752316232272063 #arcmin-2 + sigma_e: 0.30961528707207325 + shear: + path: ../../../../../../UNIONS/WL/v1.4.x/v1.4.7/unions_shapepipe_cut_struc_2024_v1.4.7.fits + covmat_file: ./covs/shapepipe_A/cov_shapepipe_A.txt + ra_col: RA + dec_col: Dec + e1_col: e1 + e2_col: e2 + e1_PSF_col: e1_PSF + e2_PSF_col: e2_PSF + w_col: w_des + R: 1.0 + redshift_distr: CFIS/v1.0/nz/dndz_SP_A.txt + psf: + path: ../../../../../../UNIONS/WL/v1.4.x/unions_shapepipe_psf_2024_v1.4.a.fits + hdu: 1 + ra_col: RA + dec_col: Dec + e1_PSF_col: E1_PSF_HSM + e2_PSF_col: E2_PSF_HSM + e1_star_col: E1_STAR_HSM + e2_star_col: E2_STAR_HSM + PSF_size: SIGMA_PSF_HSM + star_size: SIGMA_STAR_HSM + PSF_flag: "FLAG_PSF_HSM" + star_flag: "FLAG_STAR_HSM" + square_size: True + patch_number: 100 + +SP_v1.4.6_GAIA_bright: + pipeline: SP + subdir: CFIS/v1.0/ShapePipe/ + ls: dashed + colour: green + marker: s + getdist_colour: 0.0, 0.5, 1.0 + cov_th: + A: 2420.2651014497287 #deg^2 + n_e: 7.040818382014773 #arcmin-2 + n_psf: 0.752316232272063 #arcmin-2 + sigma_e: 0.30961528707207325 + shear: + path: ../../../../../../UNIONS/WL/v1.4.x/v1.4.6/unions_shapepipe_cut_struc_2024_v1.4.6.fits + covmat_file: ./covs/shapepipe_A/cov_shapepipe_A.txt + ra_col: RA + dec_col: Dec + e1_col: e1 + e2_col: e2 + e1_PSF_col: e1_PSF + e2_PSF_col: e2_PSF + w_col: w_des + R: 1.0 + mask: /n09data/guerrini/glass_mock/mask_v1.4.5_nside8192.fits + redshift_distr: ../../../sguerrini/UNIONS/WL/nz/v1.4.6/nz_SP_v1.4.6_A.txt + psf: + path: ../../../GAIA/gaia_stars_g_bright.fits + hdu: 1 + ra_col: RA + dec_col: Dec + square_size: True + patch_number: 100 + +SP_v1.4.6_GAIA_medium: + pipeline: SP + subdir: CFIS/v1.0/ShapePipe/ + ls: dashed + colour: darkgreen + marker: o + getdist_colour: 0.0, 0.5, 1.0 + cov_th: + A: 2420.2651014497287 #deg^2 + n_e: 7.040818382014773 #arcmin-2 + n_psf: 0.752316232272063 #arcmin-2 + sigma_e: 0.30961528707207325 + shear: + path: ../../../../../../UNIONS/WL/v1.4.x/v1.4.6/unions_shapepipe_cut_struc_2024_v1.4.6.fits + covmat_file: ./covs/shapepipe_A/cov_shapepipe_A.txt + ra_col: RA + dec_col: Dec + e1_col: e1 + e2_col: e2 + e1_PSF_col: e1_PSF + e2_PSF_col: e2_PSF + w_col: w_des + R: 1.0 + mask: /n09data/guerrini/glass_mock/mask_v1.4.5_nside8192.fits + redshift_distr: ../../../sguerrini/UNIONS/WL/nz/v1.4.6/nz_SP_v1.4.6_A.txt + psf: + path: ../../../GAIA/gaia_stars_g_medium.fits + hdu: 1 + ra_col: RA + dec_col: Dec + square_size: True + patch_number: 100 + +SP_v1.4.6_GAIA_faint: + pipeline: SP + subdir: CFIS/v1.0/ShapePipe/ + ls: dashed + colour: lightgreen + marker: d + getdist_colour: 0.0, 0.5, 1.0 + cov_th: + A: 2420.2651014497287 #deg^2 + n_e: 7.040818382014773 #arcmin-2 + n_psf: 0.752316232272063 #arcmin-2 + sigma_e: 0.30961528707207325 + shear: + path: ../../../../../../UNIONS/WL/v1.4.x/v1.4.6/unions_shapepipe_cut_struc_2024_v1.4.6.fits + covmat_file: ./covs/shapepipe_A/cov_shapepipe_A.txt + ra_col: RA + dec_col: Dec + e1_col: e1 + e2_col: e2 + e1_PSF_col: e1_PSF + e2_PSF_col: e2_PSF + w_col: w_des + R: 1.0 + mask: /n09data/guerrini/glass_mock/mask_v1.4.5_nside8192.fits + redshift_distr: ../../../sguerrini/UNIONS/WL/nz/v1.4.6/nz_SP_v1.4.6_A.txt + psf: + path: ../../../GAIA/gaia_stars_g_faint.fits + hdu: 1 + ra_col: RA + dec_col: Dec + square_size: True + patch_number: 100 + + +SP_v1.4.7_GAIA_bright: + pipeline: SP + subdir: CFIS/v1.0/ShapePipe/ + ls: dashed + colour: darkorchid + marker: d + getdist_colour: 0.0, 0.5, 1.0 + cov_th: + A: 2420.2651014497287 #deg^2 + n_e: 7.040818382014773 #arcmin-2 + n_psf: 0.752316232272063 #arcmin-2 + sigma_e: 0.30961528707207325 + shear: + path: ../../../../../../UNIONS/WL/v1.4.x/v1.4.7/unions_shapepipe_cut_struc_2024_v1.4.7.fits + covmat_file: ./covs/shapepipe_A/cov_shapepipe_A.txt + ra_col: RA + dec_col: Dec + e1_col: e1 + e2_col: e2 + e1_PSF_col: e1_PSF + e2_PSF_col: e2_PSF + w_col: w_des + R: 1.0 + redshift_distr: CFIS/v1.0/nz/dndz_SP_A.txt + psf: + path: ../../../GAIA/gaia_stars_g_smaller_16.0.fits + hdu: 1 + ra_col: RA + dec_col: dec + square_size: True + patch_number: 100 SP_v1.4.8: pipeline: SP @@ -782,6 +960,8 @@ SP_v1.4.8: shear: path: ../../../../../../UNIONS/WL/v1.4.x/v1.4.8/unions_shapepipe_cut_struc_2024_v1.4.8.fits covmat_file: ./covs/shapepipe_A/cov_shapepipe_A.txt + ra_col: RA + dec_col: Dec e1_col: e1 e2_col: e2 e1_PSF_col: e1_PSF @@ -791,7 +971,7 @@ SP_v1.4.8: mask: /n09data/guerrini/glass_mock/mask_v1.4.5_nside8192.fits redshift_distr: ../../../sguerrini/UNIONS/WL/nz/v1.4.8/nz_SP_v1.4.8_A.txt star: - path: ../../../../../../UNIONS/WL/v1.4.x/unions_shapepipe_star_2024_v1.4.a.fits + path: ../../../../../UNIONS/WL/v1.4.x/unions_shapepipe_star_2024_v1.4.a.fits ra_col: RA dec_col: Dec e1_col: e1 @@ -810,6 +990,7 @@ SP_v1.4.8: PSF_flag: "FLAG_PSF_HSM" star_flag: "FLAG_STAR_HSM" square_size: True + patch_number: 100 SP_v1.4.5.A: pipeline: SP @@ -998,6 +1179,8 @@ SP_v1.4.6_leak_corr: shear: path: ../../../../../../UNIONS/WL/v1.4.x/v1.4.6/unions_shapepipe_cut_struc_2024_v1.4.6.fits covmat_file: ./covs/shapepipe_A/cov_shapepipe_A.txt + ra_col: RA + dec_col: Dec e1_col: e1_leak_corrected e2_col: e2_leak_corrected e1_PSF_col: e1_PSF @@ -1026,6 +1209,7 @@ SP_v1.4.6_leak_corr: PSF_flag: "FLAG_PSF_HSM" star_flag: "FLAG_STAR_HSM" square_size: True + patch_number: 100 SP_v1.4.7_leak_corr: pipeline: SP @@ -1042,6 +1226,8 @@ SP_v1.4.7_leak_corr: shear: path: ../../../../../../UNIONS/WL/v1.4.x/v1.4.7/unions_shapepipe_cut_struc_2024_v1.4.7.fits covmat_file: ./covs/shapepipe_A/cov_shapepipe_A.txt + ra_col: RA + dec_col: Dec e1_col: e1_leak_corrected e2_col: e2_leak_corrected e1_PSF_col: e1_PSF @@ -1069,6 +1255,7 @@ SP_v1.4.7_leak_corr: PSF_flag: "FLAG_PSF_HSM" star_flag: "FLAG_STAR_HSM" square_size: True + patch_number: 100 SP_v1.4.8_leak_corr: pipeline: SP @@ -1388,6 +1575,8 @@ SP_v1.4.5_leak_corr: shear: path: ../../../../../../sguerrini/unions_shapepipe_cut_struc_2024_v1.4.5_rerun.fits covmat_file: ./covs/shapepipe_A/cov_shapepipe_A.txt + ra_col: RA + dec_col: Dec e1_col: e1_leak_corrected e2_col: e2_leak_corrected e1_PSF_col: e1_PSF @@ -1416,6 +1605,7 @@ SP_v1.4.5_leak_corr: PSF_flag: "FLAG_PSF_HSM" star_flag: "FLAG_STAR_HSM" square_size: True + patch_number: 100 SP_v1.5.4: pipeline: SP From dd962f34d4324190923e219d17ea385ac7a9b6a1 Mon Sep 17 00:00:00 2001 From: martinkilbinger Date: Wed, 17 Dec 2025 15:44:10 +0100 Subject: [PATCH 6/9] cosmo val config file: added v1.4.6 test cases --- notebooks/cosmo_val/cat_config.yaml | 104 ++++++++++++++++++++++++++++ 1 file changed, 104 insertions(+) diff --git a/notebooks/cosmo_val/cat_config.yaml b/notebooks/cosmo_val/cat_config.yaml index 47e26d61..686dd5e3 100644 --- a/notebooks/cosmo_val/cat_config.yaml +++ b/notebooks/cosmo_val/cat_config.yaml @@ -992,6 +992,110 @@ SP_v1.4.8: square_size: True patch_number: 100 +# For additive bias only +SP_v1.4.6_uncal: + pipeline: SP + subdir: CFIS/v1.0/ShapePipe/ + shear: + path: ../../../../../../UNIONS/WL/v1.4.x/v1.4.6/unions_shapepipe_cut_struc_2024_v1.4.6.fits + covmat_file: ./covs/shapepipe_A/cov_shapepipe_A.txt + ra_col: RA + dec_col: Dec + e1_col: e1_uncal + e2_col: e2_uncal + w_col: w_des + R: 1.0 + psf: + path: ../../../../../../UNIONS/WL/v1.4.x/unions_shapepipe_psf_2024_v1.4.a.fits + hdu: 1 + patch_number: 100 + +SP_v1.4.6_uncal_w_iv: + pipeline: SP + subdir: CFIS/v1.0/ShapePipe/ + shear: + path: ../../../../../../UNIONS/WL/v1.4.x/v1.4.6/unions_shapepipe_cut_struc_2024_v1.4.6.fits + covmat_file: ./covs/shapepipe_A/cov_shapepipe_A.txt + ra_col: RA + dec_col: Dec + e1_col: e1_uncal + e2_col: e2_uncal + w_col: w_iv + R: 1.0 + psf: + path: ../../../../../../UNIONS/WL/v1.4.x/unions_shapepipe_psf_2024_v1.4.a.fits + hdu: 1 + patch_number: 100 + +SP_v1.4.6_uncal_w_1: + pipeline: SP + subdir: CFIS/v1.0/ShapePipe/ + shear: + path: ../../../../../../UNIONS/WL/v1.4.x/v1.4.6/unions_shapepipe_cut_struc_2024_v1.4.6.fits + covmat_file: ./covs/shapepipe_A/cov_shapepipe_A.txt + ra_col: RA + dec_col: Dec + e1_col: e1_uncal + e2_col: e2_uncal + w_col: one + R: 1.0 + psf: + path: ../../../../../../UNIONS/WL/v1.4.x/unions_shapepipe_psf_2024_v1.4.a.fits + hdu: 1 + patch_number: 100 + +SP_v1.4.5_uncal: + pipeline: SP + subdir: CFIS/v1.0/ShapePipe/ + shear: + path: ../../../../../../UNIONS/WL/v1.4.x/v1.4.5/unions_shapepipe_cut_struc_2024_v1.4.5.fits + covmat_file: ./covs/shapepipe_A/cov_shapepipe_A.txt + ra_col: RA + dec_col: Dec + e1_col: e1_uncal + e2_col: e2_uncal + w_col: w_des + R: 1.0 + psf: + path: ../../../../../../UNIONS/WL/v1.4.x/unions_shapepipe_psf_2024_v1.4.a.fits + hdu: 1 + patch_number: 100 + +SP_v1.4.7_uncal: + pipeline: SP + subdir: CFIS/v1.0/ShapePipe/ + shear: + path: ../../../../../../UNIONS/WL/v1.4.x/v1.4.7/unions_shapepipe_cut_struc_2024_v1.4.7.fits + covmat_file: ./covs/shapepipe_A/cov_shapepipe_A.txt + ra_col: RA + dec_col: Dec + e1_col: e1_uncal + e2_col: e2_uncal + w_col: w_des + R: 1.0 + psf: + path: ../../../../../../UNIONS/WL/v1.4.x/unions_shapepipe_psf_2024_v1.4.a.fits + hdu: 1 + patch_number: 100 + +SP_v1.4.8_uncal: + pipeline: SP + subdir: CFIS/v1.0/ShapePipe/ + shear: + path: ../../../../../../UNIONS/WL/v1.4.x/v1.4.8/unions_shapepipe_cut_struc_2024_v1.4.8.fits + covmat_file: ./covs/shapepipe_A/cov_shapepipe_A.txt + ra_col: RA + dec_col: Dec + e1_col: e1_uncal + e2_col: e2_uncal + w_col: w_des + R: 1.0 + psf: + path: ../../../../../../UNIONS/WL/v1.4.x/unions_shapepipe_psf_2024_v1.4.a.fits + hdu: 1 + patch_number: 100 + + SP_v1.4.5.A: pipeline: SP subdir: CFIS/v1.0/ShapePipe/ From 6d3f5daa09bc08070fd5c9ffeb361bbc9ff14557 Mon Sep 17 00:00:00 2001 From: martinkilbinger Date: Wed, 17 Dec 2025 20:05:00 +0100 Subject: [PATCH 7/9] updated cosmo_val config to new path structure --- notebooks/cosmo_val/cat_config.yaml | 43 +++++++++++-------- .../run_cosmo_val_lambda.py | 42 ++++++++++++------ 2 files changed, 54 insertions(+), 31 deletions(-) diff --git a/notebooks/cosmo_val/cat_config.yaml b/notebooks/cosmo_val/cat_config.yaml index f3ca1114..a1a6b756 100644 --- a/notebooks/cosmo_val/cat_config.yaml +++ b/notebooks/cosmo_val/cat_config.yaml @@ -616,6 +616,8 @@ SP_v1.4.6: covmat_file: ./covs/shapepipe_A/cov_shapepipe_A.txt path: v1.4.6/unions_shapepipe_cut_struc_2024_v1.4.6.fits redshift_path: /n17data/mkilbing/astro/data/CFIS/v1.0/nz/dndz_SP_A.txt + ra_col: RA + dec_col: Dec w_col: w_des e1_col: e1 e1_col_corrected: e1_leak_corrected @@ -629,6 +631,7 @@ SP_v1.4.6: e1_col: e1 e2_col: e2 path: unions_shapepipe_star_2024_v1.4.a.fits + patch_number: 100 SP_v1.4.7: subdir: /n17data/UNIONS/WL/v1.4.x pipeline: SP @@ -661,6 +664,8 @@ SP_v1.4.7: covmat_file: ./covs/shapepipe_A/cov_shapepipe_A.txt path: v1.4.7/unions_shapepipe_cut_struc_2024_v1.4.7.fits redshift_path: /n17data/mkilbing/astro/data/CFIS/v1.0/nz/dndz_SP_A.txt + ra_col: RA + dec_col: Dec w_col: w_des e1_col: e1 e1_col_corrected: e1_leak_corrected @@ -706,6 +711,8 @@ SP_v1.4.8: covmat_file: ./covs/shapepipe_A/cov_shapepipe_A.txt path: v1.4.8/unions_shapepipe_cut_struc_2024_v1.4.8.fits redshift_path: /n17data/mkilbing/astro/data/CFIS/v1.0/nz/dndz_SP_A.txt + ra_col: RA + dec_col: Dec w_col: w_des e1_col: e1 e1_col_corrected: e1_leak_corrected @@ -723,9 +730,9 @@ SP_v1.4.8: # For additive bias only SP_v1.4.6_uncal: pipeline: SP - subdir: CFIS/v1.0/ShapePipe/ + subdir: /n17data/UNIONS/WL/v1.4.x shear: - path: ../../../../../../UNIONS/WL/v1.4.x/v1.4.6/unions_shapepipe_cut_struc_2024_v1.4.6.fits + path: v1.4.6/unions_shapepipe_cut_struc_2024_v1.4.6.fits covmat_file: ./covs/shapepipe_A/cov_shapepipe_A.txt ra_col: RA dec_col: Dec @@ -734,15 +741,15 @@ SP_v1.4.6_uncal: w_col: w_des R: 1.0 psf: - path: ../../../../../../UNIONS/WL/v1.4.x/unions_shapepipe_psf_2024_v1.4.a.fits + path: unions_shapepipe_psf_2024_v1.4.a.fits hdu: 1 patch_number: 100 SP_v1.4.6_uncal_w_iv: pipeline: SP - subdir: CFIS/v1.0/ShapePipe/ + subdir: /n17data/UNIONS/WL/v1.4.x shear: - path: ../../../../../../UNIONS/WL/v1.4.x/v1.4.6/unions_shapepipe_cut_struc_2024_v1.4.6.fits + path: v1.4.6/unions_shapepipe_cut_struc_2024_v1.4.6.fits covmat_file: ./covs/shapepipe_A/cov_shapepipe_A.txt ra_col: RA dec_col: Dec @@ -751,15 +758,15 @@ SP_v1.4.6_uncal_w_iv: w_col: w_iv R: 1.0 psf: - path: ../../../../../../UNIONS/WL/v1.4.x/unions_shapepipe_psf_2024_v1.4.a.fits + path: unions_shapepipe_psf_2024_v1.4.a.fits hdu: 1 patch_number: 100 SP_v1.4.6_uncal_w_1: pipeline: SP - subdir: CFIS/v1.0/ShapePipe/ + subdir: /n17data/UNIONS/WL/v1.4.x shear: - path: ../../../../../../UNIONS/WL/v1.4.x/v1.4.6/unions_shapepipe_cut_struc_2024_v1.4.6.fits + path: v1.4.6/unions_shapepipe_cut_struc_2024_v1.4.6.fits covmat_file: ./covs/shapepipe_A/cov_shapepipe_A.txt ra_col: RA dec_col: Dec @@ -768,15 +775,15 @@ SP_v1.4.6_uncal_w_1: w_col: one R: 1.0 psf: - path: ../../../../../../UNIONS/WL/v1.4.x/unions_shapepipe_psf_2024_v1.4.a.fits + path: unions_shapepipe_psf_2024_v1.4.a.fits hdu: 1 patch_number: 100 SP_v1.4.5_uncal: pipeline: SP - subdir: CFIS/v1.0/ShapePipe/ + subdir: /n17data/UNIONS/WL/v1.4.x shear: - path: ../../../../../../UNIONS/WL/v1.4.x/v1.4.5/unions_shapepipe_cut_struc_2024_v1.4.5.fits + path: v1.4.5/unions_shapepipe_cut_struc_2024_v1.4.5.fits covmat_file: ./covs/shapepipe_A/cov_shapepipe_A.txt ra_col: RA dec_col: Dec @@ -785,15 +792,15 @@ SP_v1.4.5_uncal: w_col: w_des R: 1.0 psf: - path: ../../../../../../UNIONS/WL/v1.4.x/unions_shapepipe_psf_2024_v1.4.a.fits + path: unions_shapepipe_psf_2024_v1.4.a.fits hdu: 1 patch_number: 100 SP_v1.4.7_uncal: pipeline: SP - subdir: CFIS/v1.0/ShapePipe/ + subdir: /n17data/UNIONS/WL/v1.4.x shear: - path: ../../../../../../UNIONS/WL/v1.4.x/v1.4.7/unions_shapepipe_cut_struc_2024_v1.4.7.fits + path: v1.4.7/unions_shapepipe_cut_struc_2024_v1.4.7.fits covmat_file: ./covs/shapepipe_A/cov_shapepipe_A.txt ra_col: RA dec_col: Dec @@ -802,15 +809,15 @@ SP_v1.4.7_uncal: w_col: w_des R: 1.0 psf: - path: ../../../../../../UNIONS/WL/v1.4.x/unions_shapepipe_psf_2024_v1.4.a.fits + path: unions_shapepipe_psf_2024_v1.4.a.fits hdu: 1 patch_number: 100 SP_v1.4.8_uncal: pipeline: SP - subdir: CFIS/v1.0/ShapePipe/ + subdir: /n17data/UNIONS/WL/v1.4.x shear: - path: ../../../../../../UNIONS/WL/v1.4.x/v1.4.8/unions_shapepipe_cut_struc_2024_v1.4.8.fits + path: v1.4.8/unions_shapepipe_cut_struc_2024_v1.4.8.fits covmat_file: ./covs/shapepipe_A/cov_shapepipe_A.txt ra_col: RA dec_col: Dec @@ -819,7 +826,7 @@ SP_v1.4.8_uncal: w_col: w_des R: 1.0 psf: - path: ../../../../../../UNIONS/WL/v1.4.x/unions_shapepipe_psf_2024_v1.4.a.fits + path: unions_shapepipe_psf_2024_v1.4.a.fits hdu: 1 patch_number: 100 diff --git a/notebooks/cosmo_val/catalog_paper_plot/run_cosmo_val_lambda.py b/notebooks/cosmo_val/catalog_paper_plot/run_cosmo_val_lambda.py index 7f7d566f..9ce7322b 100644 --- a/notebooks/cosmo_val/catalog_paper_plot/run_cosmo_val_lambda.py +++ b/notebooks/cosmo_val/catalog_paper_plot/run_cosmo_val_lambda.py @@ -15,16 +15,7 @@ import numpy as np sys.path.insert(0, '/home/mkilbing/astro/repositories/gitlab.euclid-sgs/FDQA/rho_tau_stats') -from cosmo_val import CosmologyValidation # noqa: E402 - - -def rename_output(output_bases, output_dir, suff, version_string, rand_str): - - for base in output_bases: - old_path = f"{output_dir}/plots/{base}{suff}" - new_path = f"{output_dir}/plots/{base}_{version_string}{rand_str}{suff}" - print(f"mv {old_path} -> {new_path}") - os.rename(old_path, new_path) +from cosmo_val import CosmologyValidation, rename_output # noqa: E402 # enable inline plotting for interactive sessions @@ -33,7 +24,7 @@ def rename_output(output_bases, output_dir, suff, version_string, rand_str): ipython.run_line_magic("matplotlib", "inline") # Different options -versions_146 = ["SP_v1.4.6", "SP_v1.4.6_leak_corr"] +versions_146 = ["SP_v1.4.6"] #, "SP_v1.4.6_leak_corr"] versions_var = ["SP_v1.4.5", "SP_v1.4.6", "SP_v1.4.7", "SP_v1.4.8"] # We use this combination of versions @@ -45,11 +36,12 @@ def rename_output(output_bases, output_dir, suff, version_string, rand_str): theta_max = 300.0 plot_range_fac = 1.1 + # %% cv = CosmologyValidation( versions=versions, - data_base_dir="/n17data/mkilbing/astro/data/", catalog_config="./cat_config.yaml", + data_base_dir="/", output_dir=output_dir, theta_min=theta_min, theta_max=theta_max, @@ -66,17 +58,41 @@ def rename_output(output_bases, output_dir, suff, version_string, rand_str): output_bases = [ "gammat_stars_around_galaxies_lin_non_tomographic", "gammat_stars_around_galaxies_log_non_tomographic", + "dgammat_stars_around_galaxies_lin_non_tomographic", + "dsize_stars_around_galaxies_lin_non_tomographic", ] suff = ".png" version_string = "_".join(versions) - +# lambda_1 +print("Computing λ_1...") cv.plot_gammat_stars_around_galaxies(offset=0.025, gammax=True, wo_rand_subtr=True) cv.plot_gammat_stars_around_galaxies(offset=0.025, gammax=True, logy=True, wo_rand_subtr=True) + +# lambda_2 +print("Computing λ_2...") +cv.plot_dgammat_stars_around_galaxies(offset=0.025, gammax=True, wo_rand_subtr=True) + +# lambda_3 +print("Computing λ_3...") +cv.plot_dsize_stars_around_galaxies(offset=0.025, gammax=True, wo_rand_subtr=True) + + rename_output(output_bases, output_dir, suff, version_string, "_wo_rand_subtr") # %% +# lambda_1 +print("Computing λ_1...") cv.plot_gammat_stars_around_galaxies(offset=0.025, gammax=True) cv.plot_gammat_stars_around_galaxies(offset=0.025, gammax=True, logy=True) + +# lambda_2 +print("Computing λ_2...") +cv.plot_dgammat_stars_around_galaxies(offset=0.025, gammax=True) + +# lambda_3 +print("Computing λ_3...") +cv.plot_dsize_stars_around_galaxies(offset=0.025, gammax=True) + rename_output(output_bases, output_dir, suff, version_string, "") From e6fb33aebcff19c3ef0fa79ac62c6e6b5e7100aa Mon Sep 17 00:00:00 2001 From: martinkilbinger Date: Thu, 8 Jan 2026 09:16:09 +0100 Subject: [PATCH 8/9] cosmo val GAIA --- notebooks/cosmo_val/catalog_paper_plot/run_cosmo_val_GAIA.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/notebooks/cosmo_val/catalog_paper_plot/run_cosmo_val_GAIA.py b/notebooks/cosmo_val/catalog_paper_plot/run_cosmo_val_GAIA.py index af58f0e9..a3eae753 100644 --- a/notebooks/cosmo_val/catalog_paper_plot/run_cosmo_val_GAIA.py +++ b/notebooks/cosmo_val/catalog_paper_plot/run_cosmo_val_GAIA.py @@ -24,7 +24,7 @@ # %% cv = CosmologyValidation( versions=["SP_v1.4.6_GAIA_bright", "SP_v1.4.6_GAIA_medium", "SP_v1.4.6_GAIA_faint"], - data_base_dir="/n17data/mkilbing/astro/data/", + data_base_dir="/", catalog_config="./cat_config.yaml", output_dir="./output", theta_min=1.0, From bad2fdddeb0407ccb9be73ae6e3b537dc13bb6af Mon Sep 17 00:00:00 2001 From: martinkilbinger Date: Mon, 26 Jan 2026 13:59:47 +0100 Subject: [PATCH 9/9] Ran black formatting --- src/sp_validation/cosmo_val.py | 1006 +++++++++++++++++++++----------- 1 file changed, 678 insertions(+), 328 deletions(-) diff --git a/src/sp_validation/cosmo_val.py b/src/sp_validation/cosmo_val.py index d1f276b7..1b5aa82a 100644 --- a/src/sp_validation/cosmo_val.py +++ b/src/sp_validation/cosmo_val.py @@ -203,13 +203,13 @@ def __init__( ylim_alpha=[-0.005, 0.05], ylim_xi_sys_ratio=[-0.02, 0.5], nside=1024, - nside_mask = 2**12, + nside_mask=2**12, binning="powspace", power=1 / 2, n_ell_bins=32, ell_step=10, pol_factor=True, - cell_method='map', + cell_method="map", nrandom_cell=10, path_onecovariance=None, cosmo_params=None, @@ -239,7 +239,10 @@ def __init__( self.nside_mask = nside_mask self.path_onecovariance = path_onecovariance - assert self.cell_method in ["map", "catalog"], "cell_method must be 'map' or 'catalog'" + assert self.cell_method in [ + "map", + "catalog", + ], "cell_method must be 'map' or 'catalog'" # For theory calculations: # Create cosmology object using new functionality @@ -249,7 +252,6 @@ def __init__( # Use Planck 2018 defaults self.cosmo = get_cosmo() - self.treecorr_config = { "ra_units": "degrees", "dec_units": "degrees", @@ -258,7 +260,9 @@ def __init__( "sep_units": "arcmin", "nbins": nbins, "var_method": var_method, - "cross_patch_weight": "match" if var_method == "jackknife" else "simple", + "cross_patch_weight": ( + "match" if var_method == "jackknife" else "simple" + ), } self.catalog_config_path = Path(catalog_config) @@ -295,7 +299,10 @@ def ensure_version_exists(ver): base_ver = ver[: -len(leak_suffix)] ensure_version_exists(base_ver) shear_cfg = cc[base_ver]["shear"] - if "e1_col_corrected" not in shear_cfg or "e2_col_corrected" not in shear_cfg: + if ( + "e1_col_corrected" not in shear_cfg + or "e2_col_corrected" not in shear_cfg + ): raise ValueError( f"{base_ver} does not have e1_col_corrected/e2_col_corrected " f"fields; cannot create {ver}" @@ -414,7 +421,9 @@ def compute_survey_stats( weight_column = weights_key_override or shear_cfg["w_col"] if weight_column not in data.columns.names: - raise KeyError(f"Weight column '{weight_column}' missing in {catalog_path}") + raise KeyError( + f"Weight column '{weight_column}' missing in {catalog_path}" + ) w = np.asarray(data[weight_column], dtype=float) @@ -428,9 +437,15 @@ def compute_survey_stats( mask_candidate = mask_path else: mask_candidate = self.cc[ver].get("mask") - if isinstance(mask_candidate, str) and not os.path.isabs(mask_candidate): - mask_candidate = str(Path(self.cc[ver]["subdir"]) / mask_candidate) - if mask_candidate is not None and not os.path.exists(mask_candidate): + if isinstance(mask_candidate, str) and not os.path.isabs( + mask_candidate + ): + mask_candidate = str( + Path(self.cc[ver]["subdir"]) / mask_candidate + ) + if mask_candidate is not None and not os.path.exists( + mask_candidate + ): mask_candidate = None area_deg2 = None @@ -481,7 +496,9 @@ def _area_from_catalog(self, catalog_path, nside): def _area_from_mask(self, mask_map_path): mask = hp.read_map(mask_map_path, dtype=np.float64) - return float(mask.sum() * hp.nside2pixarea(hp.get_nside(mask), degrees=True)) + return float( + mask.sum() * hp.nside2pixarea(hp.get_nside(mask), degrees=True) + ) def _write_catalog_config(self): with self.catalog_config_path.open("w") as file: @@ -528,8 +545,12 @@ def set_params_leakage_scale(self, ver): params_in["e1_col"] = self.cc[ver]["shear"]["e1_col"] params_in["e2_col"] = self.cc[ver]["shear"]["e2_col"] params_in["w_col"] = self.cc[ver]["shear"]["w_col"] - params_in["R11"] = None if ver != "DES" else self.cc[ver]["shear"]["R11"] - params_in["R22"] = None if ver != "DES" else self.cc[ver]["shear"]["R22"] + params_in["R11"] = ( + None if ver != "DES" else self.cc[ver]["shear"]["R11"] + ) + params_in["R22"] = ( + None if ver != "DES" else self.cc[ver]["shear"]["R22"] + ) params_in["ra_star_col"] = self.cc[ver]["star"]["ra_col"] params_in["dec_star_col"] = self.cc[ver]["star"]["dec_col"] @@ -610,7 +631,6 @@ def basename(self, version, treecorr_config=None, npatch=None): f"_npatch={patches}" ) - def calculate_rho_tau_stats(self): out_dir = f"{self.cc['paths']['output']}/rho_tau_stats" if not os.path.exists(out_dir): @@ -649,13 +669,13 @@ def tau_stat_handler(self): @property def colors(self): return [self.cc[ver]["colour"] for ver in self.versions] - + @property def area(self): if not hasattr(self, "_area"): self.calculate_area() return self._area - + @property def n_eff_gal(self): if not hasattr(self, "_n_eff_gal"): @@ -674,7 +694,7 @@ def pseudo_cls(self): self.calculate_pseudo_cl() self.calculate_pseudo_cl_eb_cov() return self._pseudo_cls - + @property def pseudo_cls_onecov(self): if not hasattr(self, "_pseudo_cls_onecov"): @@ -687,33 +707,42 @@ def calculate_area(self): for ver in self.versions: self.print_magenta(ver) - if not hasattr(self.cc[ver]['shear'], 'mask'): - print("Mask not found in config file, calculating area from binned catalog") + if not hasattr(self.cc[ver]["shear"], "mask"): + print( + "Mask not found in config file, calculating area from binned catalog" + ) area[ver] = self.calculate_area_from_binned_catalog(ver) else: - mask = hp.read_map(self.cc[ver]['shear']['mask'], verbose=False) + mask = hp.read_map(self.cc[ver]["shear"]["mask"], verbose=False) nside_mask = hp.get_nside(mask) print(f"nside_mask = {nside_mask}") - area[ver] = np.sum(mask) * hp.nside2pixarea(nside_mask, degrees=True) + area[ver] = np.sum(mask) * hp.nside2pixarea( + nside_mask, degrees=True + ) print(f"Area = {area[ver]:.2f} deg^2") self._area = area self.print_done("Area calculation finished") - + def calculate_area_from_binned_catalog(self, ver): print(f"nside_mask = {self.nside_mask}") with self.results[ver].temporarily_read_data(): ra = self.results[ver].dat_shear["RA"] dec = self.results[ver].dat_shear["Dec"] hsp_map = hp.ang2pix( - self.nside_mask, + self.nside_mask, np.radians(90 - dec), np.radians(ra), lonlat=False, ) - mask = np.bincount(hsp_map, minlength=hp.nside2npix(self.nside_mask)) > 0 + mask = ( + np.bincount(hsp_map, minlength=hp.nside2npix(self.nside_mask)) + > 0 + ) - area = np.sum(mask) * hp.nside2pixarea(self.nside_mask, degrees=True) + area = np.sum(mask) * hp.nside2pixarea( + self.nside_mask, degrees=True + ) print(f"Area = {area:.2f} deg^2") return area @@ -725,9 +754,14 @@ def calculate_n_eff_gal(self): self.print_magenta(ver) with self.results[ver].temporarily_read_data(): w = self.results[ver].dat_shear[self.cc[ver]["shear"]["w_col"]] - n_eff_gal[ver] = 1/(self.area[ver]*60*60)* np.sum(w)**2/np.sum(w**2) + n_eff_gal[ver] = ( + 1 + / (self.area[ver] * 60 * 60) + * np.sum(w) ** 2 + / np.sum(w**2) + ) print(f"n_eff_gal = {n_eff_gal[ver]:.2f} gal./arcmin^-2") - + self._n_eff_gal = n_eff_gal self.print_done("Effective number of galaxy calculation finished") @@ -737,13 +771,23 @@ def calculate_ellipticity_dispersion(self): for ver in self.versions: self.print_magenta(ver) with self.results[ver].temporarily_read_data(): - e1 = self.results[ver].dat_shear[self.cc[ver]["shear"]["e1_col"]] - e2 = self.results[ver].dat_shear[self.cc[ver]["shear"]["e2_col"]] + e1 = self.results[ver].dat_shear[ + self.cc[ver]["shear"]["e1_col"] + ] + e2 = self.results[ver].dat_shear[ + self.cc[ver]["shear"]["e2_col"] + ] w = self.results[ver].dat_shear[self.cc[ver]["shear"]["w_col"]] ellipticity_dispersion[ver] = np.sqrt( - 0.5*(np.average(e1**2, weights=w**2) + np.average(e2**2, weights=w**2)) + 0.5 + * ( + np.average(e1**2, weights=w**2) + + np.average(e2**2, weights=w**2) + ) + ) + print( + f"Ellipticity dispersion = {ellipticity_dispersion[ver]:.4f}" ) - print(f"Ellipticity dispersion = {ellipticity_dispersion[ver]:.4f}") self._ellipticity_dispersion = ellipticity_dispersion def plot_rho_stats(self, abs=False): @@ -836,7 +880,11 @@ def calculate_rho_tau_fits(self): assert self.rho_tau_method != "none" # this initializes the rho_tau_fits attribute - self._rho_tau_fits = {"flat_sample_list": [], "result_list": [], "q_list": []} + self._rho_tau_fits = { + "flat_sample_list": [], + "result_list": [], + "q_list": [], + } quantiles = [1 - self.quantile, self.quantile] self._xi_psf_sys = {} @@ -866,7 +914,9 @@ def calculate_rho_tau_fits(self): self.rho_tau_fits["result_list"].append(result) self.rho_tau_fits["q_list"].append(q) - self.psf_fitter.load_rho_stat(f"rho_stats_{self.basename(ver)}.fits") + self.psf_fitter.load_rho_stat( + f"rho_stats_{self.basename(ver)}.fits" + ) nbins = self.psf_fitter.rho_stat_handler._treecorr_config["nbins"] xi_psf_sys_samples = np.array([]).reshape(0, nbins) @@ -902,7 +952,9 @@ def plot_rho_tau_fits(self): show=True, close=True, ) - self.print_done(f"Tau contours plot saved to {os.path.abspath(savefig)}") + self.print_done( + f"Tau contours plot saved to {os.path.abspath(savefig)}" + ) plt.figure(figsize=(15, 6)) for mcmc_result, ver, color, flat_sample in zip( @@ -911,7 +963,9 @@ def plot_rho_tau_fits(self): self.colors, self.rho_tau_fits["flat_sample_list"], ): - self.psf_fitter.load_rho_stat(f"rho_stats_{self.basename(ver)}.fits") + self.psf_fitter.load_rho_stat( + f"rho_stats_{self.basename(ver)}.fits" + ) for i in range(100): self.psf_fitter.plot_xi_psf_sys( flat_sample[-i + 1], ver, color, alpha=0.1 @@ -934,8 +988,12 @@ def plot_rho_tau_fits(self): theta = self.psf_fitter.rho_stat_handler.rho_stats["theta"] xi_psf_sys = self.xi_psf_sys[ver] plt.plot(theta, xi_psf_sys["mean"], linestyle=ls, color=color) - plt.plot(theta, xi_psf_sys["quantiles"][0], linestyle=ls, color=color) - plt.plot(theta, xi_psf_sys["quantiles"][1], linestyle=ls, color=color) + plt.plot( + theta, xi_psf_sys["quantiles"][0], linestyle=ls, color=color + ) + plt.plot( + theta, xi_psf_sys["quantiles"][1], linestyle=ls, color=color + ) plt.fill_between( theta, xi_psf_sys["quantiles"][0], @@ -961,7 +1019,9 @@ def plot_rho_tau_fits(self): self.versions, self.rho_tau_fits["flat_sample_list"], ): - self.psf_fitter.load_rho_stat(f"rho_stats_{self.basename(ver)}.fits") + self.psf_fitter.load_rho_stat( + f"rho_stats_{self.basename(ver)}.fits" + ) for yscale in ("linear", "log"): out_path = os.path.abspath( f"{out_dir}/xi_psf_sys_terms_{yscale}_{ver}.png" @@ -1013,20 +1073,28 @@ def calculate_scale_dependent_leakage(self): output_path_ab = f"{output_base_path}_a_b.txt" output_path_aa = f"{output_base_path}_a_a.txt" with self.results[ver].temporarily_read_data(): - if os.path.exists(output_path_ab) and os.path.exists(output_path_aa): + if os.path.exists(output_path_ab) and os.path.exists( + output_path_aa + ): self.print_green( f"Skipping computation, reading {output_path_ab} and " f"{output_path_aa} instead" ) - results.r_corr_gp = treecorr.GGCorrelation(self.treecorr_config) + results.r_corr_gp = treecorr.GGCorrelation( + self.treecorr_config + ) results.r_corr_gp.read(output_path_ab) - results.r_corr_pp = treecorr.GGCorrelation(self.treecorr_config) + results.r_corr_pp = treecorr.GGCorrelation( + self.treecorr_config + ) results.r_corr_pp.read(output_path_aa) else: - results.compute_corr_gp_pp_alpha(output_base_path=output_base_path) + results.compute_corr_gp_pp_alpha( + output_base_path=output_base_path + ) results.do_alpha(fast=True) results.do_xi_sys() @@ -1130,7 +1198,9 @@ def plot_scale_dependent_leakage(self): xlabel = r"$\theta$ [arcmin]" ylabel = r"$\xi^{\rm sys}_+(\theta)$" title = "Cross-correlation leakage" - out_path = os.path.abspath(f"{self.cc['paths']['output']}/xi_sys_p.png") + out_path = os.path.abspath( + f"{self.cc['paths']['output']}/xi_sys_p.png" + ) cs_plots.plot_data_1d( theta, y, @@ -1161,7 +1231,9 @@ def plot_scale_dependent_leakage(self): xlabel = r"$\theta$ [arcmin]" ylabel = r"$\xi^{\rm sys}_-(\theta)$" title = "Cross-correlation leakage" - out_path = os.path.abspath(f"{self.cc['paths']['output']}/xi_sys_m.png") + out_path = os.path.abspath( + f"{self.cc['paths']['output']}/xi_sys_m.png" + ) cs_plots.plot_data_1d( theta, y, @@ -1197,9 +1269,6 @@ def calculate_objectwise_leakage(self): results_obj.update_params() results_obj.prepare_output() - # Skip read_data() and copy catalogue from scale leakage instance instead - # results_obj._dat = self.results[ver].dat_shear - out_base = results_obj.get_out_base(mix, order) out_path = f"{out_base}.pkl" if os.path.exists(out_path): @@ -1301,12 +1370,46 @@ def plot_objectwise_leakage(self): ) cs_plots.savefig(out_path, close_fig=False) cs_plots.show() - self.print_done(f"Object-wise leakage coefficients plot saved to {out_path}") + self.print_done( + f"Object-wise leakage coefficients plot saved to {out_path}" + ) + + def calculate_objectwise_leakage_aux(self): + + self.print_start("Object-wise leakage auxiliary quantities:") + for ver in self.versions: + self.print_magenta(ver) + + results_obj = self.results_objectwise[ver] + print("MKDEBUG query done") + results_obj.check_params() + results_obj.update_params() + results_obj.prepare_output() + + with results_obj.temporarily_read_data(): + results_obj._dat = results_obj.dat_shear + + if not "cols" in results_obj._params: + self.print_green( + "Skipping object-wise leakage (aux quantities), no input columns for regression found" + ) + else: + self.print_cyan( + f"Computing object-wise leakage regression with aux quantities: {results_obj._params['cols']}" + ) + + # Run + results_obj.obs_leakage() + + def plot_objectwise_leakage_aux(self): + self.calculate_objectwise_leakage_aux() def plot_ellipticity(self, nbins=200): out_path = os.path.abspath(f"{self.cc['paths']['output']}/ell_hist.png") if os.path.exists(out_path): - self.print_done(f"Skipping ellipticity histograms, {out_path} exists") + self.print_done( + f"Skipping ellipticity histograms, {out_path} exists" + ) else: self.print_start("Computing ellipticity histograms:") @@ -1317,12 +1420,20 @@ def plot_ellipticity(self, nbins=200): R = self.cc[ver]["shear"]["R"] with self.results[ver].temporarily_read_data(): e1 = ( - self.results[ver].dat_shear[self.cc[ver]["shear"]["e1_col"]] / R + self.results[ver].dat_shear[ + self.cc[ver]["shear"]["e1_col"] + ] + / R ) e2 = ( - self.results[ver].dat_shear[self.cc[ver]["shear"]["e2_col"]] / R + self.results[ver].dat_shear[ + self.cc[ver]["shear"]["e2_col"] + ] + / R ) - w = self.results[ver].dat_shear[self.cc[ver]["shear"]["w_col"]] + w = self.results[ver].dat_shear[ + self.cc[ver]["shear"]["w_col"] + ] axs[0].hist( e1, @@ -1353,7 +1464,9 @@ def plot_ellipticity(self, nbins=200): self.print_done("Ellipticity histograms saved to " + out_path) def plot_weights(self, nbins=200): - out_path = os.path.abspath(f"{self.cc['paths']['output']}/weight_hist.png") + out_path = os.path.abspath( + f"{self.cc['paths']['output']}/weight_hist.png" + ) if os.path.exists(out_path): self.print_done(f"Skipping weight histograms, {out_path} exists") else: @@ -1363,7 +1476,9 @@ def plot_weights(self, nbins=200): for ver in self.versions: self.print_magenta(ver) with self.results[ver].temporarily_read_data(): - w = self.results[ver].dat_shear[self.cc[ver]["shear"]["w_col"]] + w = self.results[ver].dat_shear[ + self.cc[ver]["shear"]["w_col"] + ] plt.hist( w, @@ -1437,7 +1552,9 @@ def c2(self): self.calculate_additive_bias() return self._c2 - def calculate_2pcf(self, ver, npatch=None, save_fits=False, **treecorr_config): + def calculate_2pcf( + self, ver, npatch=None, save_fits=False, **treecorr_config + ): """ Calculate the two-point correlation function (2PCF) ξ± for a given catalog version with TreeCorr. @@ -1496,8 +1613,12 @@ def calculate_2pcf(self, ver, npatch=None, save_fits=False, **treecorr_config): else: # Load data and create a catalog with self.results[ver].temporarily_read_data(): - e1 = self.results[ver].dat_shear[self.cc[ver]["shear"]["e1_col"]] - e2 = self.results[ver].dat_shear[self.cc[ver]["shear"]["e2_col"]] + e1 = self.results[ver].dat_shear[ + self.cc[ver]["shear"]["e1_col"] + ] + e2 = self.results[ver].dat_shear[ + self.cc[ver]["shear"]["e2_col"] + ] w = self.results[ver].dat_shear[self.cc[ver]["shear"]["w_col"]] if ver != "DES": R = self.cc[ver]["shear"]["R"] @@ -1527,7 +1648,9 @@ def calculate_2pcf(self, ver, npatch=None, save_fits=False, **treecorr_config): ra_units=self.treecorr_config["ra_units"], dec_units=self.treecorr_config["dec_units"], npatch=npatch, - patch_centers=patch_file if os.path.exists(patch_file) else None, + patch_centers=( + patch_file if os.path.exists(patch_file) else None + ), ) # If no patch file exists, save the current patches @@ -1547,13 +1670,17 @@ def calculate_2pcf(self, ver, npatch=None, save_fits=False, **treecorr_config): col2 = fits.Column(name="BIN2", format="K", array=np.ones(len(lst))) col3 = fits.Column(name="ANGBIN", format="K", array=lst) col4 = fits.Column(name="VALUE", format="D", array=gg.xip) - col5 = fits.Column(name="ANG", format="D", unit="arcmin", array=gg.meanr) + col5 = fits.Column( + name="ANG", format="D", unit="arcmin", array=gg.meanr + ) coldefs = fits.ColDefs([col1, col2, col3, col4, col5]) xiplus_hdu = fits.BinTableHDU.from_columns(coldefs, name="XI_PLUS") col4 = fits.Column(name="VALUE", format="D", array=gg.xim) coldefs = fits.ColDefs([col1, col2, col3, col4, col5]) - ximinus_hdu = fits.BinTableHDU.from_columns(coldefs, name="XI_MINUS") + ximinus_hdu = fits.BinTableHDU.from_columns( + coldefs, name="XI_MINUS" + ) # append xi_plus header info xiplus_dict = { @@ -1567,19 +1694,27 @@ def calculate_2pcf(self, ver, npatch=None, save_fits=False, **treecorr_config): for key in xiplus_dict: xiplus_hdu.header[key] = xiplus_dict[key] - col1 = fits.Column(name="BIN1", format="K", array=np.ones(len(lst))) - col2 = fits.Column(name="BIN2", format="K", array=np.ones(len(lst))) + col1 = fits.Column( + name="BIN1", format="K", array=np.ones(len(lst)) + ) + col2 = fits.Column( + name="BIN2", format="K", array=np.ones(len(lst)) + ) col3 = fits.Column(name="ANGBIN", format="K", array=lst) col4 = fits.Column(name="VALUE", format="D", array=gg.xip) col5 = fits.Column( name="ANG", format="D", unit="arcmin", array=gg.rnom ) coldefs = fits.ColDefs([col1, col2, col3, col4, col5]) - xiplus_hdu = fits.BinTableHDU.from_columns(coldefs, name="XI_PLUS") + xiplus_hdu = fits.BinTableHDU.from_columns( + coldefs, name="XI_PLUS" + ) col4 = fits.Column(name="VALUE", format="D", array=gg.xim) coldefs = fits.ColDefs([col1, col2, col3, col4, col5]) - ximinus_hdu = fits.BinTableHDU.from_columns(coldefs, name="XI_MINUS") + ximinus_hdu = fits.BinTableHDU.from_columns( + coldefs, name="XI_MINUS" + ) # append xi_plus header info xiplus_dict = { @@ -1689,7 +1824,8 @@ def plot_2pcf(self): plt.errorbar( self.cat_ggs[ver].meanr, self.cat_ggs[ver].xip * self.cat_ggs[ver].meanr, - yerr=np.sqrt(self.cat_ggs[ver].varxip) * self.cat_ggs[ver].meanr, + yerr=np.sqrt(self.cat_ggs[ver].varxip) + * self.cat_ggs[ver].meanr, label=ver, ls=self.cc[ver]["ls"], color=self.cc[ver]["colour"], @@ -1700,7 +1836,9 @@ def plot_2pcf(self): plt.xlabel(rf"$\theta$ [{self.treecorr_config['sep_units']}]") plt.xlim([self.theta_min_plot, self.theta_max_plot]) plt.ylabel(r"$\theta \xi_+(\theta)$") - out_path = os.path.abspath(f"{self.cc['paths']['output']}/xi_p_theta.png") + out_path = os.path.abspath( + f"{self.cc['paths']['output']}/xi_p_theta.png" + ) cs_plots.savefig(out_path, close_fig=False) cs_plots.show() self.print_done(f"xi_plus_theta plot saved to {out_path}") @@ -1711,7 +1849,8 @@ def plot_2pcf(self): plt.errorbar( self.cat_ggs[ver].meanr * cs_plots.dx(idx, len(ver)), self.cat_ggs[ver].xim * self.cat_ggs[ver].meanr, - yerr=np.sqrt(self.cat_ggs[ver].varxim) * self.cat_ggs[ver].meanr, + yerr=np.sqrt(self.cat_ggs[ver].varxim) + * self.cat_ggs[ver].meanr, label=ver, ls=self.cc[ver]["ls"], color=self.cc[ver]["colour"], @@ -1722,7 +1861,9 @@ def plot_2pcf(self): plt.xlabel(rf"$\theta$ [{self.treecorr_config['sep_units']}]") plt.xlim([self.theta_min_plot, self.theta_max_plot]) plt.ylabel(r"$\theta \xi_-(\theta)$") - out_path = os.path.abspath(f"{self.cc['paths']['output']}/xi_m_theta.png") + out_path = os.path.abspath( + f"{self.cc['paths']['output']}/xi_m_theta.png" + ) cs_plots.savefig(out_path, close_fig=False) cs_plots.show() self.print_done(f"xi_minus_theta plot saved to {out_path}") @@ -1772,13 +1913,14 @@ def plot_2pcf(self): ) cs_plots.savefig(out_path, close_fig=False) cs_plots.show() - self.print_done(f"xi_plus_xi_psf_sys {ver} plot saved to {out_path}") + self.print_done( + f"xi_plus_xi_psf_sys {ver} plot saved to {out_path}" + ) def plot_ratio_xi_sys_xi(self, threshold=0.1, offset=0.02): fig, _ = plt.subplots(ncols=1, nrows=1, figsize=(10, 7)) - for idx, ver in enumerate(self.versions): xi_psf_sys = self.xi_psf_sys[ver] gg = self.cat_ggs[ver] @@ -1790,7 +1932,7 @@ def plot_ratio_xi_sys_xi(self, threshold=0.1, offset=0.02): ) theta = gg.meanr - jittered_theta = theta * (1+idx*offset) + jittered_theta = theta * (1 + idx * offset) plt.errorbar( jittered_theta, @@ -1800,13 +1942,13 @@ def plot_ratio_xi_sys_xi(self, threshold=0.1, offset=0.02): ls=self.cc[ver]["ls"], color=self.cc[ver]["colour"], fmt=self.cc[ver].get("marker", None), - capsize=5 + capsize=5, ) plt.fill_between( [self.theta_min_plot, self.theta_max_plot], - - threshold, - + threshold, + -threshold, + +threshold, color="black", alpha=0.1, label=f"{threshold:.0%} threshold", @@ -1815,24 +1957,27 @@ def plot_ratio_xi_sys_xi(self, threshold=0.1, offset=0.02): [self.theta_min_plot, self.theta_max_plot], [threshold, threshold], ls="dashed", - color="black") + color="black", + ) plt.plot( [self.theta_min_plot, self.theta_max_plot], [-threshold, -threshold], ls="dashed", - color="black") + color="black", + ) plt.xscale("log") plt.xlabel(rf"$\theta$ [{self.treecorr_config['sep_units']}]") plt.ylabel(r"$\xi^{\rm psf}_{+, {\rm sys}} / \xi_+$") plt.gca().yaxis.set_major_formatter(mticker.PercentFormatter(xmax=1)) plt.legend() plt.title("Ratio of PSF systematics to cosmic shear signal") - out_path = os.path.abspath(f"{self.cc['paths']['output']}/ratio_xi_sys_xi.png") + out_path = os.path.abspath( + f"{self.cc['paths']['output']}/ratio_xi_sys_xi.png" + ) cs_plots.savefig(out_path, close_fig=False) cs_plots.show() print(f"Ratio of xi_psf_sys to xi plot saved to {out_path}") - def calculate_aperture_mass_dispersion( self, theta_min=0.3, theta_max=200, nbins=500, nbins_map=15, npatch=25 ): @@ -1864,11 +2009,15 @@ def calculate_aperture_mass_dispersion( with self.results[ver].temporarily_read_data(): R = self.cc[ver]["shear"]["R"] g1 = ( - self.results[ver].dat_shear[self.cc[ver]["shear"]["e1_col"]] + self.results[ver].dat_shear[ + self.cc[ver]["shear"]["e1_col"] + ] - self.c1[ver] ) / R g2 = ( - self.results[ver].dat_shear[self.cc[ver]["shear"]["e2_col"]] + self.results[ver].dat_shear[ + self.cc[ver]["shear"]["e2_col"] + ] - self.c2[ver] ) / R cat_gal = treecorr.Catalog( @@ -1876,7 +2025,9 @@ def calculate_aperture_mass_dispersion( dec=self.results[ver].dat_shear["Dec"], g1=g1, g2=g2, - w=self.results[ver].dat_shear[self.cc[ver]["shear"]["w_col"]], + w=self.results[ver].dat_shear[ + self.cc[ver]["shear"]["w_col"] + ], ra_units=self.treecorr_config["ra_units"], dec_units=self.treecorr_config["dec_units"], npatch=npatch, @@ -1935,7 +2086,9 @@ def plot_aperture_mass_dispersion(self): xlabel = r"$\theta$ [arcmin]" ylabel = "dispersion" title = f"Aperture-mass dispersion {mode}" - out_path = os.path.abspath(f"{self.cc['paths']['output']}/{mode}.png") + out_path = os.path.abspath( + f"{self.cc['paths']['output']}/{mode}.png" + ) cs_plots.plot_data_1d( x, y, @@ -1967,7 +2120,9 @@ def plot_aperture_mass_dispersion(self): xlabel = r"$\theta$ [arcmin]" ylabel = "dispersion" title = f"Aperture-mass dispersion mode {mode}" - out_path = os.path.abspath(f"{self.cc['paths']['output']}/{mode}_log.png") + out_path = os.path.abspath( + f"{self.cc['paths']['output']}/{mode}_log.png" + ) cs_plots.plot_data_1d( x, y, @@ -2095,7 +2250,9 @@ def calculate_pure_eb( # Calculate correlation functions gg = self.calculate_2pcf(version, npatch=npatch, **treecorr_config) - gg_int = self.calculate_2pcf(version, npatch=npatch, **treecorr_config_int) + gg_int = self.calculate_2pcf( + version, npatch=npatch, **treecorr_config_int + ) # Get redshift distribution if using analytic covariance if cov_path_int is not None: @@ -2112,7 +2269,7 @@ def calculate_pure_eb( cov_path_int=cov_path_int, cosmo_cov=cosmo_cov, n_samples=n_samples, - z_dist=z_dist + z_dist=z_dist, ) return results @@ -2135,7 +2292,7 @@ def plot_pure_eb( cosmo_cov=None, n_samples=1000, results=None, - **kwargs + **kwargs, ): """ Generate comprehensive pure E/B mode analysis plots. @@ -2200,7 +2357,7 @@ def plot_pure_eb( # Use instance defaults for unspecified parameters versions = versions or self.versions - output_dir = output_dir or self.cc['paths']['output'] + output_dir = output_dir or self.cc["paths"]["output"] npatch = npatch or self.npatch # Override var_method to analytic when cov_path_int is provided @@ -2246,26 +2403,26 @@ def plot_pure_eb( # Get or calculate results for this version version_results = results_list[idx] or self.calculate_pure_eb( - version, - min_sep=min_sep, - max_sep=max_sep, - nbins=nbins, - min_sep_int=min_sep_int, - max_sep_int=max_sep_int, - nbins_int=nbins_int, - npatch=npatch, - var_method=var_method, - cov_path_int=cov_path_int, - cosmo_cov=cosmo_cov, - n_samples=n_samples, - ) + version, + min_sep=min_sep, + max_sep=max_sep, + nbins=nbins, + min_sep_int=min_sep_int, + max_sep_int=max_sep_int, + nbins_int=nbins_int, + npatch=npatch, + var_method=var_method, + cov_path_int=cov_path_int, + cosmo_cov=cosmo_cov, + n_samples=n_samples, + ) # Calculate E/B statistics for all bin combinations (only if not provided) version_results = calculate_eb_statistics( version_results, cov_path_int=cov_path_int, n_samples=n_samples, - **kwargs + **kwargs, ) # Generate all plots using specialized plotting functions @@ -2273,9 +2430,7 @@ def plot_pure_eb( # Integration vs Reporting comparison plot plot_integration_vs_reporting( - gg, gg_int, - out_stub + "_integration_vs_reporting.png", - version + gg, gg_int, out_stub + "_integration_vs_reporting.png", version ) # E/B/Ambiguous correlation functions plot @@ -2284,7 +2439,7 @@ def plot_pure_eb( out_stub + "_xis.png", version, fiducial_xip_scale_cut=fiducial_xip_scale_cut, - fiducial_xim_scale_cut=fiducial_xim_scale_cut + fiducial_xim_scale_cut=fiducial_xim_scale_cut, ) # 2D PTE heatmaps plot @@ -2293,7 +2448,7 @@ def plot_pure_eb( version, out_stub + "_ptes.png", fiducial_xip_scale_cut=fiducial_xip_scale_cut, - fiducial_xim_scale_cut=fiducial_xim_scale_cut + fiducial_xim_scale_cut=fiducial_xim_scale_cut, ) # Covariance matrix plot @@ -2301,7 +2456,7 @@ def plot_pure_eb( version_results["cov"], var_method, out_stub + "_covariance.png", - version + version, ) def calculate_cosebis( @@ -2405,7 +2560,7 @@ def calculate_cosebis( scale_cuts = [ (bin_edges[start], bin_edges[stop]) for start in range(nbins) - for stop in range(start+1, nbins+1) + for stop in range(start + 1, nbins + 1) ] print(f"Evaluating {len(scale_cuts)} scale cut combinations") @@ -2428,11 +2583,17 @@ def plot_cosebis( self, version=None, output_dir=None, - min_sep_int=0.5, max_sep_int=500, nbins_int=1000, # Integration binning - npatch=None, nmodes=10, cov_path=None, - evaluate_all_scale_cuts=False, # New parameter - min_sep=None, max_sep=None, nbins=None, # Reporting binning - fiducial_scale_cut=None, # For plotting reference + min_sep_int=0.5, + max_sep_int=500, + nbins_int=1000, # Integration binning + npatch=None, + nmodes=10, + cov_path=None, + evaluate_all_scale_cuts=False, # New parameter + min_sep=None, + max_sep=None, + nbins=None, # Reporting binning + fiducial_scale_cut=None, # For plotting reference results=None, ): """ @@ -2488,8 +2649,8 @@ def plot_cosebis( # Use instance defaults if not specified version = version or self.versions[0] - output_dir = output_dir or self.cc['paths']['output'] - npatch = npatch or self.treecorr_config.get('npatch', 256) + output_dir = output_dir or self.cc["paths"]["output"] + npatch = npatch or self.treecorr_config.get("npatch", 256) # Determine variance method based on whether theoretical covariance is used var_method = "analytic" if cov_path is not None else "jackknife" @@ -2503,7 +2664,9 @@ def plot_cosebis( # Add scale cut info if provided if fiducial_scale_cut is not None: - out_stub += f"_scalecut={fiducial_scale_cut[0]}-{fiducial_scale_cut[1]}" + out_stub += ( + f"_scalecut={fiducial_scale_cut[0]}-{fiducial_scale_cut[1]}" + ) # if evaluate_all_scale_cuts: # out_stub += f"_allcuts_minsep={min_sep}_maxsep={max_sep}_nbins={nbins}" @@ -2527,8 +2690,9 @@ def plot_cosebis( # Generate plots using specialized plotting functions # Extract single result for plotting if multiple scale cuts were evaluated - if (isinstance(results, dict) and - all(isinstance(k, tuple) for k in results.keys())): + if isinstance(results, dict) and all( + isinstance(k, tuple) for k in results.keys() + ): # Multiple scale cuts: use fiducial_scale_cut if provided, otherwise use # full range if fiducial_scale_cut is not None: @@ -2547,20 +2711,19 @@ def plot_cosebis( plot_results, version, out_stub + "_cosebis.png", - fiducial_scale_cut=fiducial_scale_cut + fiducial_scale_cut=fiducial_scale_cut, ) plot_cosebis_covariance_matrix( - plot_results, - version, - var_method, - out_stub + "_covariance.png" + plot_results, version, var_method, out_stub + "_covariance.png" ) # Generate scale cut heatmap if we have multiple scale cuts - if (isinstance(results, dict) and - all(isinstance(k, tuple) for k in results.keys()) and - len(results) > 1): + if ( + isinstance(results, dict) + and all(isinstance(k, tuple) for k in results.keys()) + and len(results) > 1 + ): # Create temporary gg object with correct binning for mapping treecorr_config_temp = { **self.treecorr_config, @@ -2577,10 +2740,9 @@ def plot_cosebis( gg_temp, version, out_stub + "_scalecut_ptes.png", - fiducial_scale_cut=fiducial_scale_cut + fiducial_scale_cut=fiducial_scale_cut, ) - def calculate_pseudo_cl_eb_cov(self): """ Compute a theoretical Gaussian covariance of the Pseudo-Cl for EE, EB and BB. @@ -2599,14 +2761,20 @@ def calculate_pseudo_cl_eb_cov(self): if ver not in self._pseudo_cls.keys(): self._pseudo_cls[ver] = {} - out_path = os.path.abspath(f"{self.cc['paths']['output']}/pseudo_cl_cov_{ver}.fits") + out_path = os.path.abspath( + f"{self.cc['paths']['output']}/pseudo_cl_cov_{ver}.fits" + ) if os.path.exists(out_path): - self.print_done(f"Skipping Pseudo-Cl covariance calculation, {out_path} exists") - self._pseudo_cls[ver]['cov'] = fits.open(out_path) + self.print_done( + f"Skipping Pseudo-Cl covariance calculation, {out_path} exists" + ) + self._pseudo_cls[ver]["cov"] = fits.open(out_path) else: params = get_params_rho_tau(self.cc[ver], survey=ver) - self.print_cyan(f"Extracting the fiducial power spectrum for {ver}") + self.print_cyan( + f"Extracting the fiducial power spectrum for {ver}" + ) lmax = 2 * self.nside z, dndz = self.get_redshift(ver) @@ -2617,7 +2785,7 @@ def calculate_pseudo_cl_eb_cov(self): "Unexpected pixwin length for lmax=" f"{lmax}: got {pw.shape[0]}, expected {len(ell)+1}" ) - pw = pw[1:len(ell)+1] + pw = pw[1 : len(ell) + 1] # Load redshift distribution and calculate theory C_ell fiducial_cl = ( @@ -2631,51 +2799,84 @@ def calculate_pseudo_cl_eb_cov(self): * pw**2 ) - self.print_cyan("Getting a sample of the fiducial Cl's with noise") + self.print_cyan( + "Getting a sample of the fiducial Cl's with noise" + ) lmin = 8 - lmax = 2*self.nside + lmax = 2 * self.nside b_lmax = lmax - 1 - ells = np.arange(lmin, lmax+1) + ells = np.arange(lmin, lmax + 1) - if self.binning == 'linear': + if self.binning == "linear": # Linear bands of width ell_step, respecting actual lmax bpws = (ells - lmin) // self.ell_step - bpws = np.minimum(bpws, bpws[-1]) # Ensure last bin captures all + bpws = np.minimum( + bpws, bpws[-1] + ) # Ensure last bin captures all b = nmt.NmtBin(ells=ells, bpws=bpws, lmax=b_lmax) - elif self.binning == 'powspace': + elif self.binning == "powspace": start = np.power(lmin, self.power) end = np.power(lmax, self.power) - bins_ell = np.power(np.linspace(start, end, self.n_ell_bins+1), 1/self.power) + bins_ell = np.power( + np.linspace(start, end, self.n_ell_bins + 1), + 1 / self.power, + ) - #Get bandpowers + # Get bandpowers bpws = np.digitize(ells.astype(float), bins_ell) - 1 bpws[0] = 0 - bpws[-1] = self.n_ell_bins-1 + bpws[-1] = self.n_ell_bins - 1 b = nmt.NmtBin(ells=ells, bpws=bpws, lmax=b_lmax) - #Load data and create shear and noise maps + # Load data and create shear and noise maps cat_gal = fits.getdata(self.cc[ver]["shear"]["path"]) - n_gal, unique_pix, idx, idx_rep = self.get_n_gal_map(params, nside, cat_gal) + n_gal, unique_pix, idx, idx_rep = self.get_n_gal_map( + params, nside, cat_gal + ) mask = n_gal != 0 - - cl_noise, f, wsp = self.get_sample(params, self.nside, b_lmax, b, cat_gal, n_gal, mask, unique_pix, idx_rep) - - fiducial_cl = np.array([fiducial_cl, 0.*fiducial_cl, 0.*fiducial_cl, 0.*fiducial_cl])+ np.mean(cl_noise, axis=1, keepdims=True) - + + cl_noise, f, wsp = self.get_sample( + params, + self.nside, + b_lmax, + b, + cat_gal, + n_gal, + mask, + unique_pix, + idx_rep, + ) + + fiducial_cl = np.array( + [ + fiducial_cl, + 0.0 * fiducial_cl, + 0.0 * fiducial_cl, + 0.0 * fiducial_cl, + ] + ) + np.mean(cl_noise, axis=1, keepdims=True) + self.print_cyan("Computing the Pseudo-Cl covariance") cw = nmt.NmtCovarianceWorkspace.from_fields(f, f, f, f) - covar_22_22 = nmt.gaussian_covariance(cw, 2, 2, 2, 2, - fiducial_cl, - fiducial_cl, - fiducial_cl, - fiducial_cl, - wsp, wb=wsp).reshape([self.n_ell_bins, 4, self.n_ell_bins, 4]) + covar_22_22 = nmt.gaussian_covariance( + cw, + 2, + 2, + 2, + 2, + fiducial_cl, + fiducial_cl, + fiducial_cl, + fiducial_cl, + wsp, + wb=wsp, + ).reshape([self.n_ell_bins, 4, self.n_ell_bins, 4]) covar_EE_EE = covar_22_22[:, 0, :, 0] covar_EE_EB = covar_22_22[:, 0, :, 1] @@ -2717,7 +2918,7 @@ def calculate_pseudo_cl_eb_cov(self): hdu.writeto(out_path, overwrite=True) - self._pseudo_cls[ver]['cov'] = hdu + self._pseudo_cls[ver]["cov"] = hdu self.print_done("Done Pseudo-Cl covariance") @@ -2728,51 +2929,88 @@ def calculate_pseudo_cl_onecovariance(self): self.print_start("Computing Pseudo-Cl covariance with OneCovariance") if self.path_onecovariance is None: - raise ValueError("path_onecovariance must be provided to use OneCovariance") - + raise ValueError( + "path_onecovariance must be provided to use OneCovariance" + ) + if not os.path.exists(self.path_onecovariance): - raise ValueError(f"OneCovariance path {self.path_onecovariance} does not exist") - - template_config = os.path.join(self.path_onecovariance, "config_files", "config_3x2pt_pure_Cell_UNIONS.ini") + raise ValueError( + f"OneCovariance path {self.path_onecovariance} does not exist" + ) + + template_config = os.path.join( + self.path_onecovariance, + "config_files", + "config_3x2pt_pure_Cell_UNIONS.ini", + ) if not os.path.exists(template_config): - raise ValueError(f"Template config file {template_config} does not exist") + raise ValueError( + f"Template config file {template_config} does not exist" + ) self._pseudo_cls_onecov = {} for ver in self.versions: self.print_magenta(ver) - out_dir = os.path.abspath(f"{self.cc['paths']['output']}/pseudo_cl_cov_onecov_{ver}/") + out_dir = os.path.abspath( + f"{self.cc['paths']['output']}/pseudo_cl_cov_onecov_{ver}/" + ) os.makedirs(out_dir, exist_ok=True) - if os.path.exists(os.path.join(out_dir, "covariance_list_3x2pt_pure_Cell.dat")): - self.print_done(f"Skipping OneCovariance calculation, {out_dir} exists") + if os.path.exists( + os.path.join(out_dir, "covariance_list_3x2pt_pure_Cell.dat") + ): + self.print_done( + f"Skipping OneCovariance calculation, {out_dir} exists" + ) self._load_onecovariance_cov(out_dir, ver) else: - mask_path = self.cc[ver]['shear']['mask'] - redshift_distr_path = os.path.join(self.data_base_dir, self.cc[ver]['shear']['redshift_distr']) + mask_path = self.cc[ver]["shear"]["mask"] + redshift_distr_path = os.path.join( + self.data_base_dir, self.cc[ver]["shear"]["redshift_distr"] + ) config_path = os.path.join(out_dir, f"config_onecov_{ver}.ini") - self.print_cyan(f"Modifying OneCovariance config file and saving it to {config_path}") - self._modify_onecov_config(template_config, config_path, out_dir, mask_path, redshift_distr_path, ver) + self.print_cyan( + f"Modifying OneCovariance config file and saving it to {config_path}" + ) + self._modify_onecov_config( + template_config, + config_path, + out_dir, + mask_path, + redshift_distr_path, + ver, + ) self.print_cyan("Running OneCovariance...") cmd = f"python {os.path.join(self.path_onecovariance, 'covariance.py')} {config_path}" self.print_cyan(f"Command: {cmd}") ret = os.system(cmd) if ret != 0: - raise RuntimeError(f"OneCovariance command failed with return code {ret}") + raise RuntimeError( + f"OneCovariance command failed with return code {ret}" + ) self.print_cyan("OneCovariance completed successfully.") self._load_onecovariance_cov(out_dir, ver) - + self.print_done("Done Pseudo-Cl covariance with OneCovariance") - def _modify_onecov_config(self, template_config, config_path, out_dir, mask_path, redshift_distr_path, ver): + def _modify_onecov_config( + self, + template_config, + config_path, + out_dir, + mask_path, + redshift_distr_path, + ver, + ): """ - Modify OneCovariance configuration file with correct mask, redshift distribution, + Modify OneCovariance configuration file with correct mask, redshift distribution, and ellipticity dispersion parameters. - + Parameters ---------- template_config : str @@ -2784,30 +3022,38 @@ def _modify_onecov_config(self, template_config, config_path, out_dir, mask_path redshift_distr_path : str Path to the redshift distribution file """ - config = configparser.ConfigParser() + config = configparser.ConfigParser() # Load the template configuration config.read(template_config) - + # Update mask path mask_base = os.path.basename(os.path.abspath(mask_path)) mask_folder = os.path.dirname(os.path.abspath(mask_path)) - config['survey specs']['mask_directory'] = mask_folder - config['survey specs']['mask_file_lensing'] = mask_base - config['survey specs']['survey_area_lensing_in_deg2'] = str(self.area[ver]) - config['survey specs']['ellipticity_dispersion'] = str(self.ellipticity_dispersion[ver]) - config['survey specs']['n_eff_lensing'] = str(self.n_eff_gal[ver]) + config["survey specs"]["mask_directory"] = mask_folder + config["survey specs"]["mask_file_lensing"] = mask_base + config["survey specs"]["survey_area_lensing_in_deg2"] = str( + self.area[ver] + ) + config["survey specs"]["ellipticity_dispersion"] = str( + self.ellipticity_dispersion[ver] + ) + config["survey specs"]["n_eff_lensing"] = str(self.n_eff_gal[ver]) # Update redshift distribution path - redshift_distr_base = os.path.basename(os.path.abspath(redshift_distr_path)) - redshift_distr_folder = os.path.dirname(os.path.abspath(redshift_distr_path)) - config['redshift']['z_directory'] = redshift_distr_folder - config['redshift']['zlens_file'] = redshift_distr_base - - #Update output directory - config['output settings']['directory'] = out_dir - + redshift_distr_base = os.path.basename( + os.path.abspath(redshift_distr_path) + ) + redshift_distr_folder = os.path.dirname( + os.path.abspath(redshift_distr_path) + ) + config["redshift"]["z_directory"] = redshift_distr_folder + config["redshift"]["zlens_file"] = redshift_distr_base + + # Update output directory + config["output settings"]["directory"] = out_dir + # Save the modified configuration - with open(config_path, 'w') as f: + with open(config_path, "w") as f: config.write(f) def get_cov_from_onecov(self, cov_one_cov, gaussian=True): @@ -2818,52 +3064,73 @@ def get_cov_from_onecov(self, cov_one_cov, gaussian=True): for i in range(n_bins): for j in range(n_bins): cov[i, j] = cov_one_cov[i * n_bins + j, index_value] - + return cov def _load_onecovariance_cov(self, out_dir, ver): self.print_cyan(f"Loading OneCovariance results from {out_dir}") - cov_one_cov = np.genfromtxt(os.path.join(out_dir, "covariance_list_3x2pt_pure_Cell.dat")) + cov_one_cov = np.genfromtxt( + os.path.join(out_dir, "covariance_list_3x2pt_pure_Cell.dat") + ) gaussian_one_cov = self.get_cov_from_onecov(cov_one_cov, gaussian=True) all_one_cov = self.get_cov_from_onecov(cov_one_cov, gaussian=False) self._pseudo_cls_onecov[ver] = { - 'gaussian_cov': gaussian_one_cov, - 'all_cov': all_one_cov + "gaussian_cov": gaussian_one_cov, + "all_cov": all_one_cov, } def calculate_pseudo_cl_g_ng_cov(self, gaussian_part="iNKA"): - assert gaussian_part in ["iNKA", "OneCovariance"], "gaussian_part must be 'iNKA' or 'OneCovariance'" - self.print_start(f"Gaussian and Non-Gaussian covariance of the Pseudo-Cl's using {gaussian_part} for the Gaussian part") - + assert gaussian_part in [ + "iNKA", + "OneCovariance", + ], "gaussian_part must be 'iNKA' or 'OneCovariance'" + self.print_start( + f"Gaussian and Non-Gaussian covariance of the Pseudo-Cl's using {gaussian_part} for the Gaussian part" + ) + self._pseudo_cls_cov_g_ng = {} for ver in self.versions: self.print_magenta(ver) - out_file = os.path.abspath(f"{self.cc['paths']['output']}/pseudo_cl_cov_g_ng_{gaussian_part}_{ver}.fits") + out_file = os.path.abspath( + f"{self.cc['paths']['output']}/pseudo_cl_cov_g_ng_{gaussian_part}_{ver}.fits" + ) if os.path.exists(out_file): - self.print_done(f"Skipping Gaussian and Non-Gaussian covariance calculation, {out_file} exists") + self.print_done( + f"Skipping Gaussian and Non-Gaussian covariance calculation, {out_file} exists" + ) cov_hdu = fits.open(out_file) self._pseudo_cls_cov_g_ng[ver] = cov_hdu continue if gaussian_part == "iNKA": - gaussian_cov = self.pseudo_cls[ver]['cov']['COVAR_EE_EE'].data - non_gaussian_cov = self.pseudo_cls_onecov[ver]['all_cov'] - self.pseudo_cls_onecov[ver]['gaussian_cov'] + gaussian_cov = self.pseudo_cls[ver]["cov"]["COVAR_EE_EE"].data + non_gaussian_cov = ( + self.pseudo_cls_onecov[ver]["all_cov"] + - self.pseudo_cls_onecov[ver]["gaussian_cov"] + ) full_cov = gaussian_cov + non_gaussian_cov elif gaussian_part == "OneCovariance": - gaussian_cov = self.pseudo_cls_onecov[ver]['gaussian_cov'] - non_gaussian_cov = self.pseudo_cls_onecov[ver]['all_cov'] - self.pseudo_cls_onecov[ver]['gaussian_cov'] - full_cov = self.pseudo_cls_onecov[ver]['all_cov'] + gaussian_cov = self.pseudo_cls_onecov[ver]["gaussian_cov"] + non_gaussian_cov = ( + self.pseudo_cls_onecov[ver]["all_cov"] + - self.pseudo_cls_onecov[ver]["gaussian_cov"] + ) + full_cov = self.pseudo_cls_onecov[ver]["all_cov"] else: raise ValueError(f"Unknown gaussian_part: {gaussian_part}") self.print_cyan("Saving Gaussian and Non-Gaussian covariance...") hdu = fits.HDUList() hdu.append(fits.ImageHDU(gaussian_cov, name="COVAR_GAUSSIAN")) - hdu.append(fits.ImageHDU(non_gaussian_cov, name="COVAR_NON_GAUSSIAN")) + hdu.append( + fits.ImageHDU(non_gaussian_cov, name="COVAR_NON_GAUSSIAN") + ) hdu.append(fits.ImageHDU(full_cov, name="COVAR_FULL")) hdu.writeto(out_file, overwrite=True) self._pseudo_cls_cov_g_ng[ver] = hdu - self.print_done(f"Done Gaussian and Non-Gaussian covariance of the Pseudo-Cl's using {gaussian_part} for the Gaussian part") + self.print_done( + f"Done Gaussian and Non-Gaussian covariance of the Pseudo-Cl's using {gaussian_part} for the Gaussian part" + ) def calculate_pseudo_cl(self): """ @@ -2882,14 +3149,18 @@ def calculate_pseudo_cl(self): self._pseudo_cls[ver] = {} - out_path = os.path.abspath(f"{self.cc['paths']['output']}/pseudo_cl_{ver}.fits") + out_path = os.path.abspath( + f"{self.cc['paths']['output']}/pseudo_cl_{ver}.fits" + ) if os.path.exists(out_path): - self.print_done(f"Skipping Pseudo-Cl's calculation, {out_path} exists") + self.print_done( + f"Skipping Pseudo-Cl's calculation, {out_path} exists" + ) cl_shear = fits.getdata(out_path) - self._pseudo_cls[ver]['pseudo_cl'] = cl_shear - elif self.cell_method == 'map': + self._pseudo_cls[ver]["pseudo_cl"] = cl_shear + elif self.cell_method == "map": self.calculate_pseudo_cl_map(ver, nside, out_path) - elif self.cell_method == 'catalog': + elif self.cell_method == "catalog": self.calculate_pseudo_cl_catalog(ver, out_path) else: raise ValueError(f"Unknown cell method: {self.cell_method}") @@ -2899,35 +3170,37 @@ def calculate_pseudo_cl(self): def calculate_pseudo_cl_map(self, ver, nside, out_path): params = get_params_rho_tau(self.cc[ver], survey=ver) - #Load data and create shear and noise maps + # Load data and create shear and noise maps cat_gal = fits.getdata(self.cc[ver]["shear"]["path"]) - w = cat_gal[params['w_col']] + w = cat_gal[params["w_col"]] self.print_cyan("Creating maps and computing Cl's...") - n_gal_map, unique_pix, idx, idx_rep = self.get_n_gal_map(params, nside, cat_gal) + n_gal_map, unique_pix, idx, idx_rep = self.get_n_gal_map( + params, nside, cat_gal + ) mask = n_gal_map != 0 shear_map_e1 = np.zeros(hp.nside2npix(nside)) shear_map_e2 = np.zeros(hp.nside2npix(nside)) - e1 = cat_gal[params['e1_col']] - e2 = cat_gal[params['e2_col']] + e1 = cat_gal[params["e1_col"]] + e2 = cat_gal[params["e2_col"]] del cat_gal - - shear_map_e1[unique_pix] += np.bincount(idx_rep, weights=e1*w) - shear_map_e2[unique_pix] += np.bincount(idx_rep, weights=e2*w) + + shear_map_e1[unique_pix] += np.bincount(idx_rep, weights=e1 * w) + shear_map_e2[unique_pix] += np.bincount(idx_rep, weights=e2 * w) shear_map_e1[mask] /= n_gal_map[mask] shear_map_e2[mask] /= n_gal_map[mask] - shear_map = shear_map_e1 + 1j*shear_map_e2 + shear_map = shear_map_e1 + 1j * shear_map_e2 del shear_map_e1, shear_map_e2 ell_eff, cl_shear, wsp = self.get_pseudo_cls_map(shear_map) cl_noise = np.zeros_like(cl_shear) - + for i in range(self.nrandom_cell): noise_map_e1 = np.zeros(hp.nside2npix(nside)) @@ -2935,64 +3208,67 @@ def calculate_pseudo_cl_map(self, ver, nside, out_path): e1_rot, e2_rot = self.apply_random_rotation(e1, e2) - - noise_map_e1[unique_pix] += np.bincount(idx_rep, weights=e1_rot*w) - noise_map_e2[unique_pix] += np.bincount(idx_rep, weights=e2_rot*w) + noise_map_e1[unique_pix] += np.bincount(idx_rep, weights=e1_rot * w) + noise_map_e2[unique_pix] += np.bincount(idx_rep, weights=e2_rot * w) noise_map_e1[mask] /= n_gal_map[mask] noise_map_e2[mask] /= n_gal_map[mask] - noise_map = noise_map_e1 + 1j*noise_map_e2 + noise_map = noise_map_e1 + 1j * noise_map_e2 del noise_map_e1, noise_map_e2 _, cl_noise_, _ = self.get_pseudo_cls_map(noise_map, wsp) cl_noise += cl_noise_ - + cl_noise /= self.nrandom_cell del e1, e2, w try: del e1_rot, e2_rot - except NameError: #Continue if the random generation has been skipped. + except NameError: # Continue if the random generation has been skipped. pass - del n_gal_map + del n_gal_map - #This is a problem because the measurement depends on the seed. To be fixed. - #cl_shear = cl_shear - np.mean(cl_noise, axis=1, keepdims=True) + # This is a problem because the measurement depends on the seed. To be fixed. + # cl_shear = cl_shear - np.mean(cl_noise, axis=1, keepdims=True) cl_shear = cl_shear - cl_noise self.print_cyan("Saving pseudo-Cl's...") self.save_pseudo_cl(ell_eff, cl_shear, out_path) cl_shear = fits.getdata(out_path) - self._pseudo_cls[ver]['pseudo_cl'] = cl_shear + self._pseudo_cls[ver]["pseudo_cl"] = cl_shear def calculate_pseudo_cl_catalog(self, ver, out_path): params = get_params_rho_tau(self.cc[ver], survey=ver) - #Load data and create shear and noise maps + # Load data and create shear and noise maps cat_gal = fits.getdata(self.cc[ver]["shear"]["path"]) - ell_eff, cl_shear, wsp = self.get_pseudo_cls_catalog(catalog=cat_gal, params=params) + ell_eff, cl_shear, wsp = self.get_pseudo_cls_catalog( + catalog=cat_gal, params=params + ) self.print_cyan("Saving pseudo-Cl's...") self.save_pseudo_cl(ell_eff, cl_shear, out_path) cl_shear = fits.getdata(out_path) - self._pseudo_cls[ver]['pseudo_cl'] = cl_shear + self._pseudo_cls[ver]["pseudo_cl"] = cl_shear def get_n_gal_map(self, params, nside, cat_gal): """ Compute the galaxy number density map. """ - ra = cat_gal[params['ra_col']] - dec = cat_gal[params['dec_col']] - w = cat_gal[params['w_col']] + ra = cat_gal[params["ra_col"]] + dec = cat_gal[params["dec_col"]] + w = cat_gal[params["w_col"]] - theta = (90. - dec) * np.pi / 180. - phi = ra * np.pi / 180. + theta = (90.0 - dec) * np.pi / 180.0 + phi = ra * np.pi / 180.0 pix = hp.ang2pix(nside, theta, phi) - unique_pix, idx, idx_rep = np.unique(pix, return_index=True, return_inverse=True) + unique_pix, idx, idx_rep = np.unique( + pix, return_index=True, return_inverse=True + ) n_gal = np.zeros(hp.nside2npix(nside)) n_gal[unique_pix] = np.bincount(idx_rep, weights=w) return n_gal, unique_pix, idx, idx_rep @@ -3006,18 +3282,24 @@ def get_gaussian_real( noise_map_e1 = np.zeros(hp.nside2npix(nside)) noise_map_e2 = np.zeros(hp.nside2npix(nside)) - w = cat_gal[params['w_col']] - noise_map_e1[unique_pix] += np.bincount(idx_rep, weights=e1_rot*w) - noise_map_e2[unique_pix] += np.bincount(idx_rep, weights=e2_rot*w) + w = cat_gal[params["w_col"]] + noise_map_e1[unique_pix] += np.bincount(idx_rep, weights=e1_rot * w) + noise_map_e2[unique_pix] += np.bincount(idx_rep, weights=e2_rot * w) noise_map_e1[mask] /= n_gal[mask] noise_map_e2[mask] /= n_gal[mask] - return noise_map_e1 + 1j*noise_map_e2 + return noise_map_e1 + 1j * noise_map_e2 - def get_sample(self, params, nside, lmax, b, cat_gal, n_gal, mask, unique_pix, idx_rep): - noise_map = self.get_gaussian_real(params, nside, lmax, cat_gal, n_gal, mask, unique_pix, idx_rep) + def get_sample( + self, params, nside, lmax, b, cat_gal, n_gal, mask, unique_pix, idx_rep + ): + noise_map = self.get_gaussian_real( + params, nside, lmax, cat_gal, n_gal, mask, unique_pix, idx_rep + ) - f = nmt.NmtField(mask=mask, maps=[noise_map.real, noise_map.imag], lmax=lmax) + f = nmt.NmtField( + mask=mask, maps=[noise_map.real, noise_map.imag], lmax=lmax + ) wsp = nmt.NmtWorkspace.from_fields(f, f, b) @@ -3025,32 +3307,34 @@ def get_sample(self, params, nside, lmax, b, cat_gal, n_gal, mask, unique_pix, i cl_noise = wsp.decouple_cell(cl_noise) return cl_noise, f, wsp - + def get_pseudo_cls_map(self, map, wsp=None): """ Compute the pseudo-cl for a given map. """ lmin = 8 - lmax = 2*self.nside + lmax = 2 * self.nside b_lmax = lmax - 1 - ells = np.arange(lmin, lmax+1) + ells = np.arange(lmin, lmax + 1) - if self.binning == 'linear': + if self.binning == "linear": # Linear bands of width ell_step, respecting actual lmax bpws = (ells - lmin) // self.ell_step bpws = np.minimum(bpws, bpws[-1]) # Ensure last bin captures all b = nmt.NmtBin(ells=ells, bpws=bpws, lmax=b_lmax) - elif self.binning == 'powspace': + elif self.binning == "powspace": start = np.power(lmin, self.power) end = np.power(lmax, self.power) - bins_ell = np.power(np.linspace(start, end, self.n_ell_bins+1), 1/self.power) + bins_ell = np.power( + np.linspace(start, end, self.n_ell_bins + 1), 1 / self.power + ) - #Get bandpowers + # Get bandpowers bpws = np.digitize(ells.astype(float), bins_ell) - 1 bpws[0] = 0 - bpws[-1] = self.n_ell_bins-1 + bpws[-1] = self.n_ell_bins - 1 b = nmt.NmtBin(ells=ells, bpws=bpws, lmax=b_lmax) @@ -3058,11 +3342,13 @@ def get_pseudo_cls_map(self, map, wsp=None): factor = -1 if self.pol_factor else 1 - f_all = nmt.NmtField(mask=(map!=0), maps=[map.real, factor*map.imag], lmax=b_lmax) + f_all = nmt.NmtField( + mask=(map != 0), maps=[map.real, factor * map.imag], lmax=b_lmax + ) if wsp is None: wsp = nmt.NmtWorkspace.from_fields(f_all, f_all, b) - + cl_coupled = nmt.compute_coupled_cell(f_all, f_all) cl_all = wsp.decouple_cell(cl_coupled) @@ -3074,25 +3360,27 @@ def get_pseudo_cls_catalog(self, catalog, params, wsp=None): """ lmin = 8 - lmax = 2*self.nside + lmax = 2 * self.nside b_lmax = lmax - 1 - ells = np.arange(lmin, lmax+1) + ells = np.arange(lmin, lmax + 1) - if self.binning == 'linear': + if self.binning == "linear": # Linear bands of width ell_step, respecting actual lmax bpws = (ells - lmin) // self.ell_step bpws = np.minimum(bpws, bpws[-1]) # Ensure last bin captures all b = nmt.NmtBin(ells=ells, bpws=bpws, lmax=b_lmax) - elif self.binning == 'powspace': + elif self.binning == "powspace": start = np.power(lmin, self.power) end = np.power(lmax, self.power) - bins_ell = np.power(np.linspace(start, end, self.n_ell_bins+1), 1/self.power) + bins_ell = np.power( + np.linspace(start, end, self.n_ell_bins + 1), 1 / self.power + ) - #Get bandpowers + # Get bandpowers bpws = np.digitize(ells.astype(float), bins_ell) - 1 bpws[0] = 0 - bpws[-1] = self.n_ell_bins-1 + bpws[-1] = self.n_ell_bins - 1 b = nmt.NmtBin(ells=ells, bpws=bpws, lmax=b_lmax) @@ -3100,17 +3388,22 @@ def get_pseudo_cls_catalog(self, catalog, params, wsp=None): factor = -1 if self.pol_factor else 1 - f_all = nmt.NmtFieldCatalog(positions=[catalog[params['ra_col']], catalog[params['dec_col']]], - weights=catalog[params['w_col']], - field=[catalog[params['e1_col']], factor*catalog[params['e2_col']]], - lmax=b_lmax, - lmax_mask=b_lmax, - spin=2, - lonlat=True) - + f_all = nmt.NmtFieldCatalog( + positions=[catalog[params["ra_col"]], catalog[params["dec_col"]]], + weights=catalog[params["w_col"]], + field=[ + catalog[params["e1_col"]], + factor * catalog[params["e2_col"]], + ], + lmax=b_lmax, + lmax_mask=b_lmax, + spin=2, + lonlat=True, + ) + if wsp is None: wsp = nmt.NmtWorkspace.from_fields(f_all, f_all, b) - + cl_coupled = nmt.compute_coupled_cell(f_all, f_all) cl_all = wsp.decouple_cell(cl_coupled) @@ -3135,9 +3428,9 @@ def apply_random_rotation(self, e1, e2): Second component of the rotated ellipticity. """ np.random.seed() - rot_angle = np.random.rand(len(e1))*2*np.pi - e1_out = e1*np.cos(rot_angle) + e2*np.sin(rot_angle) - e2_out = -e1*np.sin(rot_angle) + e2*np.cos(rot_angle) + rot_angle = np.random.rand(len(e1)) * 2 * np.pi + e1_out = e1 * np.cos(rot_angle) + e2 * np.sin(rot_angle) + e2_out = -e1 * np.sin(rot_angle) + e2 * np.cos(rot_angle) return e1_out, e2_out def save_pseudo_cl(self, ell_eff, pseudo_cl, out_path): @@ -3151,7 +3444,7 @@ def save_pseudo_cl(self, ell_eff, pseudo_cl, out_path): out_path : str Path to save the pseudo-Cl's to. """ - #Create columns of the fits file + # Create columns of the fits file col1 = fits.Column(name="ELL", format="D", array=ell_eff) col2 = fits.Column(name="EE", format="D", array=pseudo_cl[0]) col3 = fits.Column(name="EB", format="D", array=pseudo_cl[1]) @@ -3174,126 +3467,183 @@ def plot_pseudo_cl(self): """ self.print_cyan("Plotting pseudo-Cl's") - #Plotting EE + # Plotting EE out_path = os.path.abspath(f"{self.cc['paths']['output']}/cell_ee.png") fig, ax = plt.subplots(nrows=2, ncols=1, figsize=(8, 8)) for ver in self.versions: - ell = self.pseudo_cls[ver]['pseudo_cl']["ELL"] - cov = self.pseudo_cls[ver]['cov']["COVAR_EE_EE"].data - ax[0].errorbar(ell, ell*self.pseudo_cls[ver]['pseudo_cl']["EE"], yerr=ell*np.sqrt(np.diag(cov)), fmt=self.cc[ver]["marker"], label=ver+" EE", color=self.cc[ver]["colour"], capsize=2) + ell = self.pseudo_cls[ver]["pseudo_cl"]["ELL"] + cov = self.pseudo_cls[ver]["cov"]["COVAR_EE_EE"].data + ax[0].errorbar( + ell, + ell * self.pseudo_cls[ver]["pseudo_cl"]["EE"], + yerr=ell * np.sqrt(np.diag(cov)), + fmt=self.cc[ver]["marker"], + label=ver + " EE", + color=self.cc[ver]["colour"], + capsize=2, + ) ax[0].set_ylabel(r"$\ell C_\ell$") - ax[0].set_xlim(ell.min()-10, ell.max()+100) - ax[0].set_xscale('squareroot') + ax[0].set_xlim(ell.min() - 10, ell.max() + 100) + ax[0].set_xscale("squareroot") ax[0].set_xticks(np.array([100, 400, 900, 1600])) ax[0].minorticks_on() - ax[0].tick_params(axis='x', which='minor', length=2, width=0.8) - minor_ticks = [i*10 for i in range(1, 10)] + [i*100 for i in range(1, 21)] + ax[0].tick_params(axis="x", which="minor", length=2, width=0.8) + minor_ticks = [i * 10 for i in range(1, 10)] + [ + i * 100 for i in range(1, 21) + ] ax[0].xaxis.set_ticks(minor_ticks, minor=True) for ver in self.versions: - ell = self.pseudo_cls[ver]['pseudo_cl']["ELL"] - cov = self.pseudo_cls[ver]['cov']["COVAR_EE_EE"].data - ax[1].errorbar(ell, self.pseudo_cls[ver]['pseudo_cl']["EE"], yerr=np.sqrt(np.diag(cov)), fmt=self.cc[ver]["marker"], label=ver+" EE", color=self.cc[ver]["colour"]) + ell = self.pseudo_cls[ver]["pseudo_cl"]["ELL"] + cov = self.pseudo_cls[ver]["cov"]["COVAR_EE_EE"].data + ax[1].errorbar( + ell, + self.pseudo_cls[ver]["pseudo_cl"]["EE"], + yerr=np.sqrt(np.diag(cov)), + fmt=self.cc[ver]["marker"], + label=ver + " EE", + color=self.cc[ver]["colour"], + ) ax[1].set_xlabel(r"$\ell$") ax[1].set_ylabel(r"$C_\ell$") - ax[1].set_xlim(ell.min()-10, ell.max()+100) - ax[1].set_xscale('squareroot') - ax[1].set_yscale('log') + ax[1].set_xlim(ell.min() - 10, ell.max() + 100) + ax[1].set_xscale("squareroot") + ax[1].set_yscale("log") ax[1].set_xticks(np.array([100, 400, 900, 1600])) ax[1].minorticks_on() - ax[1].tick_params(axis='x', which='minor', length=2, width=0.8) - minor_ticks = [i*10 for i in range(1, 10)] + [i*100 for i in range(1, 21)] + ax[1].tick_params(axis="x", which="minor", length=2, width=0.8) + minor_ticks = [i * 10 for i in range(1, 10)] + [ + i * 100 for i in range(1, 21) + ] ax[1].xaxis.set_ticks(minor_ticks, minor=True) - plt.suptitle('Pseudo-Cl EE (Gaussian covariance)') + plt.suptitle("Pseudo-Cl EE (Gaussian covariance)") plt.legend() plt.savefig(out_path) - #Plotting EB + # Plotting EB out_path = os.path.abspath(f"{self.cc['paths']['output']}/cell_eb.png") fig, ax = plt.subplots(nrows=2, ncols=1, figsize=(8, 8)) for ver in self.versions: - ell = self.pseudo_cls[ver]['pseudo_cl']["ELL"] - cov = self.pseudo_cls[ver]['cov']["COVAR_EB_EB"].data - ax[0].errorbar(ell, ell*self.pseudo_cls[ver]['pseudo_cl']["EB"], yerr=ell*np.sqrt(np.diag(cov)), fmt=self.cc[ver]["marker"], label=ver+" EB", color=self.cc[ver]["colour"], capsize=2) + ell = self.pseudo_cls[ver]["pseudo_cl"]["ELL"] + cov = self.pseudo_cls[ver]["cov"]["COVAR_EB_EB"].data + ax[0].errorbar( + ell, + ell * self.pseudo_cls[ver]["pseudo_cl"]["EB"], + yerr=ell * np.sqrt(np.diag(cov)), + fmt=self.cc[ver]["marker"], + label=ver + " EB", + color=self.cc[ver]["colour"], + capsize=2, + ) ax[0].axhline(0, color="black", linestyle="--") ax[0].set_ylabel(r"$\ell C_\ell$") - ax[0].set_xlim(ell.min()-10, ell.max()+100) - ax[0].set_xscale('squareroot') + ax[0].set_xlim(ell.min() - 10, ell.max() + 100) + ax[0].set_xscale("squareroot") ax[0].set_xticks(np.array([100, 400, 900, 1600])) ax[0].minorticks_on() - ax[0].tick_params(axis='x', which='minor', length=2, width=0.8) - minor_ticks = [i*10 for i in range(1, 10)] + [i*100 for i in range(1, 21)] + ax[0].tick_params(axis="x", which="minor", length=2, width=0.8) + minor_ticks = [i * 10 for i in range(1, 10)] + [ + i * 100 for i in range(1, 21) + ] ax[0].xaxis.set_ticks(minor_ticks, minor=True) for ver in self.versions: - ell = self.pseudo_cls[ver]['pseudo_cl']["ELL"] - cov = self.pseudo_cls[ver]['cov']["COVAR_EB_EB"].data - ax[1].errorbar(ell, self.pseudo_cls[ver]['pseudo_cl']["EB"], yerr=np.sqrt(np.diag(cov)), fmt=self.cc[ver]["marker"], label=ver+" EB", color=self.cc[ver]["colour"]) + ell = self.pseudo_cls[ver]["pseudo_cl"]["ELL"] + cov = self.pseudo_cls[ver]["cov"]["COVAR_EB_EB"].data + ax[1].errorbar( + ell, + self.pseudo_cls[ver]["pseudo_cl"]["EB"], + yerr=np.sqrt(np.diag(cov)), + fmt=self.cc[ver]["marker"], + label=ver + " EB", + color=self.cc[ver]["colour"], + ) ax[1].set_xlabel(r"$\ell$") ax[1].set_ylabel(r"$C_\ell$") - ax[1].set_xlim(ell.min()-10, ell.max()+100) - ax[1].set_xscale('squareroot') - ax[1].set_yscale('log') + ax[1].set_xlim(ell.min() - 10, ell.max() + 100) + ax[1].set_xscale("squareroot") + ax[1].set_yscale("log") ax[1].set_xticks(np.array([100, 400, 900, 1600])) ax[1].minorticks_on() - ax[1].tick_params(axis='x', which='minor', length=2, width=0.8) - minor_ticks = [i*10 for i in range(1, 10)] + [i*100 for i in range(1, 21)] + ax[1].tick_params(axis="x", which="minor", length=2, width=0.8) + minor_ticks = [i * 10 for i in range(1, 10)] + [ + i * 100 for i in range(1, 21) + ] ax[1].xaxis.set_ticks(minor_ticks, minor=True) - plt.suptitle('Pseudo-Cl EB (Gaussian covariance)') + plt.suptitle("Pseudo-Cl EB (Gaussian covariance)") plt.legend() plt.savefig(out_path) - #Plotting BB + # Plotting BB out_path = os.path.abspath(f"{self.cc['paths']['output']}/cell_bb.png") fig, ax = plt.subplots(nrows=2, ncols=1, figsize=(8, 8)) for ver in self.versions: - ell = self.pseudo_cls[ver]['pseudo_cl']["ELL"] - cov = self.pseudo_cls[ver]['cov']["COVAR_BB_BB"].data - ax[0].errorbar(ell, ell*self.pseudo_cls[ver]['pseudo_cl']["BB"], yerr=ell*np.sqrt(np.diag(cov)), fmt=self.cc[ver]["marker"], label=ver+" BB", color=self.cc[ver]["colour"], capsize=2) + ell = self.pseudo_cls[ver]["pseudo_cl"]["ELL"] + cov = self.pseudo_cls[ver]["cov"]["COVAR_BB_BB"].data + ax[0].errorbar( + ell, + ell * self.pseudo_cls[ver]["pseudo_cl"]["BB"], + yerr=ell * np.sqrt(np.diag(cov)), + fmt=self.cc[ver]["marker"], + label=ver + " BB", + color=self.cc[ver]["colour"], + capsize=2, + ) ax[0].axhline(0, color="black", linestyle="--") ax[0].set_ylabel(r"$\ell C_\ell$") - ax[0].set_xlim(ell.min()-10, ell.max()+100) - ax[0].set_xscale('squareroot') + ax[0].set_xlim(ell.min() - 10, ell.max() + 100) + ax[0].set_xscale("squareroot") ax[0].set_xticks(np.array([100, 400, 900, 1600])) ax[0].minorticks_on() - ax[0].tick_params(axis='x', which='minor', length=2, width=0.8) - minor_ticks = [i*10 for i in range(1, 10)] + [i*100 for i in range(1, 21)] + ax[0].tick_params(axis="x", which="minor", length=2, width=0.8) + minor_ticks = [i * 10 for i in range(1, 10)] + [ + i * 100 for i in range(1, 21) + ] ax[0].xaxis.set_ticks(minor_ticks, minor=True) for ver in self.versions: - ell = self.pseudo_cls[ver]['pseudo_cl']["ELL"] - cov = self.pseudo_cls[ver]['cov']["COVAR_BB_BB"].data - ax[1].errorbar(ell, self.pseudo_cls[ver]['pseudo_cl']["BB"], yerr=np.sqrt(np.diag(cov)), fmt=self.cc[ver]["marker"], label=ver+" BB", color=self.cc[ver]["colour"]) + ell = self.pseudo_cls[ver]["pseudo_cl"]["ELL"] + cov = self.pseudo_cls[ver]["cov"]["COVAR_BB_BB"].data + ax[1].errorbar( + ell, + self.pseudo_cls[ver]["pseudo_cl"]["BB"], + yerr=np.sqrt(np.diag(cov)), + fmt=self.cc[ver]["marker"], + label=ver + " BB", + color=self.cc[ver]["colour"], + ) ax[1].set_xlabel(r"$\ell$") ax[1].set_ylabel(r"$C_\ell$") - ax[1].set_xlim(ell.min()-10, ell.max()+100) - ax[1].set_xscale('squareroot') - ax[1].set_yscale('log') + ax[1].set_xlim(ell.min() - 10, ell.max() + 100) + ax[1].set_xscale("squareroot") + ax[1].set_yscale("log") ax[1].set_xticks(np.array([100, 400, 900, 1600])) ax[1].minorticks_on() - ax[1].tick_params(axis='x', which='minor', length=2, width=0.8) - minor_ticks = [i*10 for i in range(1, 10)] + [i*100 for i in range(1, 21)] + ax[1].tick_params(axis="x", which="minor", length=2, width=0.8) + minor_ticks = [i * 10 for i in range(1, 10)] + [ + i * 100 for i in range(1, 21) + ] ax[1].xaxis.set_ticks(minor_ticks, minor=True) - plt.suptitle('Pseudo-Cl BB (Gaussian covariance)') + plt.suptitle("Pseudo-Cl BB (Gaussian covariance)") plt.legend() plt.savefig(out_path)