Skip to content

Commit

Permalink
Merge 0a18a92 into c45205b
Browse files Browse the repository at this point in the history
  • Loading branch information
cramirezpe committed Jun 3, 2022
2 parents c45205b + 0a18a92 commit 2c45469
Show file tree
Hide file tree
Showing 29 changed files with 1,216 additions and 3,982 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/pylint.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ jobs:
sudo apt-get -y install libbz2-dev
python -m pip install --upgrade pip
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
pip install pylint
pip install pylint==2.12.2
- name: This path
run: |
ls
Expand Down
36 changes: 32 additions & 4 deletions bin/picca_export.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
import argparse
import fitsio
import numpy as np
import scipy.interpolate
import scipy.linalg
import h5py
import os.path
Expand Down Expand Up @@ -267,15 +268,42 @@ def main(cmdargs):
raise ValueError("Blinding strategy 'corr_yshift' requires"
" argument --blind_corr_type.")

# Read the blinding file and get the right template
blinding_filename = ('/global/cfs/projectdirs/desi/science/lya/y1-kp6/'
'blinding/y1_blinding_v1_standard_04_10_2021.h5')
# Check type of correlation and get size and regular binning
if args.blind_corr_type in ['lyaxlya', 'lyaxlyb']:
corr_size = 2500
rp_interp_grid = np.arange(2., 202., 4)
rt_interp_grid = np.arange(2., 202., 4)
elif args.blind_corr_type in ['qsoxlya', 'qsoxlyb']:
corr_size = 5000
rp_interp_grid = np.arange(-197.99, 202.01, 4)
rt_interp_grid = np.arange(2., 202, 4)
else:
raise ValueError("Unknown correlation type: {}".format(args.blind_corr_type))

if corr_size == len(xi):
# Read the blinding file and get the right template
blinding_filename = ('/global/cfs/projectdirs/desi/science/lya/y1-kp6/'
'blinding/y1_blinding_v1.2_standard_29_03_2022.h5')
else:
# Read the regular grid blinding file and get the right template
blinding_filename = ('/global/cfs/projectdirs/desi/science/lya/y1-kp6/'
'blinding/y1_blinding_v1.2_regular_grid_29_03_2022.h5')

if not os.path.isfile(blinding_filename):
raise RuntimeError("Missing blinding file. Make sure you are running at"
" NERSC or contact picca developers")
blinding_file = h5py.File(blinding_filename, 'r')
hex_diff = np.array(blinding_file['blinding'][args.blind_corr_type]).astype(str)
diff = np.array([float.fromhex(x) for x in hex_diff])
diff_grid = np.array([float.fromhex(x) for x in hex_diff])

if corr_size == len(xi):
diff = diff_grid
else:
# Interpolate the blinding template on the regular grid
interp = scipy.interpolate.RectBivariateSpline(
rp_interp_grid, rt_interp_grid,
diff_grid.reshape(len(rp_interp_grid), len(rt_interp_grid)), kx=3, ky=3)
diff = interp.ev(r_par, r_trans)

# Check that the shapes match
if np.shape(xi) != np.shape(diff):
Expand Down
144 changes: 142 additions & 2 deletions py/picca/delta_extraction/expected_flux.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,14 @@
classes computing the mean expected flux must inherit. The mean expected flux
is the product of the unabsorbed quasar continuum and the mean transmission
"""
import fitsio
import multiprocessing
import numpy as np
from scipy.interpolate import interp1d
from picca.delta_extraction.astronomical_objects.forest import Forest
from picca.delta_extraction.astronomical_objects.pk1d_forest import Pk1dForest
from picca.delta_extraction.errors import ExpectedFluxError
from picca.delta_extraction.utils import find_bins

class ExpectedFlux:
"""Abstract class from which all classes computing the expected flux
Expand Down Expand Up @@ -53,6 +58,54 @@ def __init__(self, config):
if self.num_processors == 0:
self.num_processors = (multiprocessing.cpu_count() // 2)

def compute_delta_stack(self, forests, stack_from_deltas=False):
"""Compute a stack of the delta field as a function of wavelength
Arguments
---------
forests: List of Forest
A list of Forest from which to compute the deltas.
stack_from_deltas: bool - default: False
Flag to determine whether to stack from deltas or compute them
"""
stack_delta = np.zeros_like(Forest.log_lambda_grid)
stack_weight = np.zeros_like(Forest.log_lambda_grid)

for forest in forests:
if stack_from_deltas:
delta = forest.delta
weights = forest.weights
else:
# ignore forest if continuum could not be computed
if forest.continuum is None:
continue
delta = forest.flux / forest.continuum
variance = self.compute_forest_variance(
forest, forest.continuum)
weights = 1. / variance

bins = find_bins(forest.log_lambda, Forest.log_lambda_grid,
Forest.wave_solution)
rebin = np.bincount(bins, weights=delta * weights)
stack_delta[:len(rebin)] += rebin
rebin = np.bincount(bins, weights=weights)
stack_weight[:len(rebin)] += rebin

w = stack_weight > 0
stack_delta[w] /= stack_weight[w]

self.get_stack_delta = interp1d(
Forest.log_lambda_grid[stack_weight > 0.],
stack_delta[stack_weight > 0.],
kind="nearest",
fill_value="extrapolate")
self.get_stack_delta_weights = interp1d(
Forest.log_lambda_grid[stack_weight > 0.],
stack_weight[stack_weight > 0.],
kind="nearest",
fill_value=0.0,
bounds_error=False)

# pylint: disable=no-self-use
# this method should use self in child classes
Expand Down Expand Up @@ -83,11 +136,98 @@ def extract_deltas(self, forest):
A Forest instance to which the continuum is applied
"""
if self.los_ids.get(forest.los_id) is not None:
expected_flux = self.los_ids.get(forest.los_id).get("mean expected flux")
forest.deltas = forest.flux/expected_flux - 1
expected_flux = self.los_ids.get(
forest.los_id).get("mean expected flux")
forest.deltas = forest.flux / expected_flux - 1
forest.weights = self.los_ids.get(forest.los_id).get("weights")
if isinstance(forest, Pk1dForest):
forest.ivar = self.los_ids.get(forest.los_id).get("ivar")
forest.exposures_diff /= expected_flux

forest.continuum = self.los_ids.get(forest.los_id).get("continuum")

def hdu_cont(self, results):
"""Add to the results file an HDU with the continuum information
Arguments
---------
results: fitsio.FITS
The open fits file
"""
results.write([
Forest.log_lambda_rest_frame_grid,
self.get_mean_cont(Forest.log_lambda_rest_frame_grid),
self.get_mean_cont_weight(Forest.log_lambda_rest_frame_grid),
],
names=['loglam_rest', 'mean_cont', 'weight'],
extname='CONT')

def hdu_stack_deltas(self, results):
"""Add to the results file an HDU with the delta stack
Arguments
---------
results: fitsio.FITS
The open fits file
"""
header = {}
if hasattr(self, 'order'):
header["FITORDER"] = self.order

results.write([
Forest.log_lambda_grid,
self.get_stack_delta(Forest.log_lambda_grid),
self.get_stack_delta_weights(Forest.log_lambda_grid)
],
names=['loglam', 'stack', 'weight'],
header=header,
extname='STACK_DELTAS')

def hdu_var_func(self, results):
"""Add to the results file an HDU with the variance functions
Arguments
---------
results: fitsio.FITS
The open fits file
"""
values = [self.log_lambda_var_func_grid]
names = ['loglam']

if hasattr(self, 'get_eta'):
values.append(self.get_eta(self.log_lambda_var_func_grid))
names.append('eta')
if hasattr(self, 'get_var_lss'):
values.append(self.get_var_lss(self.log_lambda_var_func_grid))
names.append('var_lss')
if hasattr(self, 'get_fudge'):
values.append(self.get_fudge(self.log_lambda_var_func_grid))
names.append('fudge')
if hasattr(self, 'get_num_pixels'):
values.append(self.get_num_pixels(self.log_lambda_var_func_grid))
names.append('num_pixels')
if hasattr(self, 'get_valid_fit'):
values.append(self.get_valid_fit(self.log_lambda_var_func_grid))
names.append('valid_fit')

results.write(values, names=names, extname='VAR_FUNC')

def save_iteration_step(self, iteration):
"""Save the statistical properties of deltas at a given iteration
step
Arguments
---------
iteration: int
Iteration number. -1 for final iteration
"""
if iteration == -1:
iter_out_file = self.iter_out_prefix + ".fits.gz"
else:
iter_out_file = self.iter_out_prefix + f"_iteration{iteration+1}.fits.gz"

with fitsio.FITS(self.out_dir + iter_out_file, 'rw',
clobber=True) as results:
self.hdu_stack_deltas(results)
self.hdu_var_func(results)
self.hdu_cont(results)

0 comments on commit 2c45469

Please sign in to comment.