Skip to content

Commit

Permalink
hfi tests and toast
Browse files Browse the repository at this point in the history
  • Loading branch information
zonca committed Dec 21, 2012
1 parent 5efc56d commit 22cb296
Show file tree
Hide file tree
Showing 8 changed files with 112 additions and 55 deletions.
91 changes: 53 additions & 38 deletions differences.py
Expand Up @@ -41,7 +41,7 @@ def combine_maps(maps_and_weights):

return combined_map

def smooth_combine(maps_and_weights, variance_maps_and_weights, fwhm=np.radians(2.0), degraded_nside=32, spectra=False, smooth_mask=False, spectra_mask=False, base_filename="out", root_folder=".", metadata={}):
def smooth_combine(maps_and_weights, variance_maps_and_weights=None, fwhm=np.radians(2.0), degraded_nside=32, spectra=False, smooth_mask=False, spectra_mask=False, base_filename="out", root_folder=".", metadata={}, chi2=False):
"""Combine, smooth, take-spectra, write metadata
The maps (I or IQU) are first combined with their own weights, then smoothed and degraded.
Expand Down Expand Up @@ -81,10 +81,13 @@ def smooth_combine(maps_and_weights, variance_maps_and_weights, fwhm=np.radians(
assert hp.isnpixok(len(maps_and_weights[0][0])), "Input maps must have either 1 or 3 components"

combined_map = combine_maps(maps_and_weights)
combined_variance_map = combine_maps(variance_maps_and_weights)
all_maps = combined_map
if not variance_maps_and_weights is None:
combined_variance_map = combine_maps(variance_maps_and_weights)
all_maps += combined_variance_map

# apply mask
for m in combined_map + combined_variance_map:
for m in all_maps:
m.mask |= smooth_mask

monopole_I, dipole_I = hp.fit_dipole(combined_map[0], gal_cut=30)
Expand Down Expand Up @@ -118,12 +121,13 @@ def smooth_combine(maps_and_weights, variance_maps_and_weights, fwhm=np.radians(
log.error("Write IQU Cls to fits requires more recent version of healpy")
del cl

# expected cl from white noise
# /4. to have same normalization of cl
metadata["whitenoise_cl"] = utils.get_whitenoise_cl(combined_variance_map[0]/4., mask=combined_map[0].mask) / sky_frac
if is_IQU:
# /2. is the mean, /4. is the half difference in power
metadata["whitenoise_cl_P"] = utils.get_whitenoise_cl((combined_variance_map[1] + combined_variance_map[2])/2./4., mask=combined_map[1].mask | combined_map[2].mask) / sky_frac
if not variance_maps_and_weights is None:
# expected cl from white noise
# /4. to have same normalization of cl
metadata["whitenoise_cl"] = utils.get_whitenoise_cl(combined_variance_map[0]/4., mask=combined_map[0].mask) / sky_frac
if is_IQU:
# /2. is the mean, /4. is the half difference in power
metadata["whitenoise_cl_P"] = utils.get_whitenoise_cl((combined_variance_map[1] + combined_variance_map[2])/2./4., mask=combined_map[1].mask | combined_map[2].mask) / sky_frac

# restore masks
for m, mask in zip(combined_map, orig_mask):
Expand All @@ -134,19 +138,20 @@ def smooth_combine(maps_and_weights, variance_maps_and_weights, fwhm=np.radians(

smoothed_map = hp.smoothing(combined_map, fwhm=fwhm)

log.debug("Smooth Variance")
if is_IQU:
smoothed_variance_map = [utils.smooth_variance_map(var, fwhm=fwhm) for var in combined_variance_map]
for comp,m,var in zip("IQU", smoothed_map, smoothed_variance_map):
metadata["map_chi2_%s" % comp] = np.mean(m**2 / var)
for comp,m,var in zip("IQU", combined_map, combined_variance_map):
metadata["map_unsm_chi2_%s" % comp] = np.mean(m**2 / var)
else:
smoothed_variance_map = utils.smooth_variance_map(combined_variance_map[0], fwhm=fwhm)
metadata["map_chi2"] = np.mean(smoothed_map**2 / smoothed_variance_map)
metadata["map_unsm_chi2"] = np.mean(combined_map[0]**2 / combined_variance_map[0])
if not variance_maps_and_weights is None:
log.debug("Smooth Variance")
if is_IQU:
smoothed_variance_map = [utils.smooth_variance_map(var, fwhm=fwhm) for var in combined_variance_map]
for comp,m,var in zip("IQU", smoothed_map, smoothed_variance_map):
metadata["map_chi2_%s" % comp] = np.mean(m**2 / var)
for comp,m,var in zip("IQU", combined_map, combined_variance_map):
metadata["map_unsm_chi2_%s" % comp] = np.mean(m**2 / var)
else:
smoothed_variance_map = utils.smooth_variance_map(combined_variance_map[0], fwhm=fwhm)
metadata["map_chi2"] = np.mean(smoothed_map**2 / smoothed_variance_map)
metadata["map_unsm_chi2"] = np.mean(combined_map[0]**2 / combined_variance_map[0])

del smoothed_variance_map
del smoothed_variance_map
# removed downgrade of variance
# smoothed_variance_map = hp.ud_grade(smoothed_variance_map, degraded_nside, power=2)

Expand Down Expand Up @@ -228,12 +233,15 @@ def halfrings(freq, ch, surv, pol='I', smooth_combine_config=None, root_folder="
title="Halfring difference survey %s ch %s" % (str(surv), chtag),
)
log.info("Call smooth_combine")
var_pol = 'A' if len(pol) == 1 else 'ADF' # for I only read sigma_II, else read sigma_II, sigma_QQ, sigma_UU
variance_maps_and_weights = None
if smooth_combine_config["chi2"]:
var_pol = 'A' if len(pol) == 1 else 'ADF' # for I only read sigma_II, else read sigma_II, sigma_QQ, sigma_UU
variance_maps_and_weights = [(mapreader(freq, surv, ch, halfring=1, pol=var_pol), 1.),
(mapreader(freq, surv, ch, halfring=2, pol=var_pol), 1.)]
smooth_combine(
[(mapreader(freq, surv, ch, halfring=1, pol=pol), 1),
(mapreader(freq, surv, ch, halfring=2, pol=pol), -1)],
[(mapreader(freq, surv, ch, halfring=1, pol=var_pol), 1.),
(mapreader(freq, surv, ch, halfring=2, pol=var_pol), 1.)],
variance_maps_and_weights,
base_filename=base_filename,
metadata=metadata,
root_folder=root_folder,
Expand Down Expand Up @@ -272,11 +280,12 @@ def surveydiff(freq, ch, survlist=[1,2,3,4,5], pol='I', root_folder="out/", smoo
# read all maps
maps = dict([(surv, mapreader(freq, surv, ch, halfring=0, pol=pol, bp_corr=bp_corr)) for surv in survlist])

log.debug("Read variance")
var_pol = 'A' if len(pol) == 1 else 'ADF' # for I only read sigma_II, else read sigma_II, sigma_QQ, sigma_UU
variance_maps = dict([(surv, mapreader(freq, surv, ch, halfring=0, pol=var_pol, bp_corr=False)) for surv in survlist])
for var_m in variance_maps.values():
assert np.all(var_m >= 0)
if smooth_combine_config["chi2"]:
log.debug("Read variance")
var_pol = 'A' if len(pol) == 1 else 'ADF' # for I only read sigma_II, else read sigma_II, sigma_QQ, sigma_UU
variance_maps = dict([(surv, mapreader(freq, surv, ch, halfring=0, pol=var_pol, bp_corr=False)) for surv in survlist])
for var_m in variance_maps.values():
assert np.all(var_m >= 0)

log.debug("All maps read")

Expand All @@ -288,6 +297,7 @@ def surveydiff(freq, ch, survlist=[1,2,3,4,5], pol='I', root_folder="out/", smoo
channel=chtag,
)

variance_maps_and_weights = None
combs = list(itertools.combinations(survlist, 2))
for comb in combs:
metadata["file_type"]="surveydiff_%s" % (reader.type_of_channel_set(ch),)
Expand All @@ -302,12 +312,14 @@ def surveydiff(freq, ch, survlist=[1,2,3,4,5], pol='I', root_folder="out/", smoo
metadata["title"] += " BPCORR"
base_filename += "_bpcorr"

if smooth_combine_config["chi2"]:
variance_maps_and_weights = [ (variance_maps[comb[0]], 1),
(variance_maps[comb[1]], 1) ]
log.debug("Launching smooth_combine")
smooth_combine(
[ (maps[comb[0]], 1),
(maps[comb[1]], -1) ],
[ (variance_maps[comb[0]], 1),
(variance_maps[comb[1]], 1) ],
variance_maps_and_weights,
base_filename=base_filename,
root_folder=root_folder,
metadata=dict(metadata.items() + {"surveys": comb}.items()),
Expand Down Expand Up @@ -340,11 +352,12 @@ def chdiff(freq, chlist, surv, pol='I', smooth_combine_config=None, root_folder=
# read all maps
maps = dict([(ch, mapreader(freq, surv, ch, halfring=0, pol=pol)) for ch in chlist])

log.debug("Read variance")
var_pol = 'A' if len(pol) == 1 else 'ADF' # for I only read sigma_II, else read sigma_II, sigma_QQ, sigma_UU
variance_maps = dict([(ch, mapreader(freq, surv, ch, halfring=0, pol=var_pol, bp_corr=False)) for ch in chlist])
for var_m in variance_maps.values():
assert np.all(var_m >= 0)
if smooth_combine_config["chi2"]:
log.debug("Read variance")
var_pol = 'A' if len(pol) == 1 else 'ADF' # for I only read sigma_II, else read sigma_II, sigma_QQ, sigma_UU
variance_maps = dict([(ch, mapreader(freq, surv, ch, halfring=0, pol=var_pol, bp_corr=False)) for ch in chlist])
for var_m in variance_maps.values():
assert np.all(var_m >= 0)

ps_mask, gal_mask = mapreader.read_masks(freq)

Expand All @@ -357,11 +370,13 @@ def chdiff(freq, chlist, surv, pol='I', smooth_combine_config=None, root_folder=
metadata["title"]="Channel difference %s-%s SS%s" % (comb[0], comb[1], surv)
metadata["channel"] = comb
metadata["file_type"] = "chdiff"
if smooth_combine_config["chi2"]:
variance_maps_and_weights = [ (variance_maps[comb[0]], 1),
(variance_maps[comb[1]], 1) ]
smooth_combine(
[ (maps[comb[0]], 1),
(maps[comb[1]], -1) ],
[ (variance_maps[comb[0]], 1),
(variance_maps[comb[1]], 1) ],
variance_maps_and_weights,
base_filename=os.path.join("chdiff", "%s-%s_SS%s" % (comb[0], comb[1], surv)),
root_folder=root_folder,
metadata=metadata,
Expand Down
5 changes: 4 additions & 1 deletion read_dx9_nersc.conf
@@ -1,10 +1,13 @@
[Templates]
base_dir = /global/project/projectdirs/planck/data/mission/DPC_maps/dx9_delta/lfi
base_dir_toast = /global/project/projectdirs/planck/user/zonca/issues/dx9_delta_toast/maps/out/map_ecn512s
suffix =
map_channel = %(base_dir_toast)s_LFI{channel}_{survey}%(suffix)s.fits

map_frequency = %(base_dir)s/LFI_{frequency}_????_????????_{survey}*.fits
map_frequency_halfring = %(base_dir)s/JackKnife/LFI_{frequency}_????_????????_ringhalf_{halfring}_{survey}*.fits

map_frequency_survey = %(base_dir)s/Surveys/LFI_{frequency}_????_????????_survey_{survey}*.fits
map_frequency_survey = %(base_dir)s/LFI_{frequency}_????_????????_survey_{survey}*.fits
map_frequency_survey_halfring = %(base_dir)s/JackKnife/LFI_{frequency}_????_????????_ringhalf_{halfring}_survey_{survey}*.fits
map_channel_survey = %(base_dir)s/Single_Radiometer/LFI_{frequency}_????_????????_{channel}_survey_{survey}*.fits

Expand Down
17 changes: 17 additions & 0 deletions read_hfi_dx9_nersc.conf
@@ -0,0 +1,17 @@
[Templates]
base_dir = /global/project/projectdirs/planck/data/mission/DPC_maps/dx9/hfi

base_map_frequency = %(base_dir)s/official/HFI_{frequency}_????_????????
map_frequency = %(base_map_frequency)s_{survey}.fits
map_frequency_halfring = %(base_map_frequency)s_ringhalf_{halfring}_{survey}.fits

map_frequency_survey = %(base_map_frequency)s_survey_{survey}.fits
map_channel_survey = %(base_dir)s/Single_Radiometer/LFI_{frequency}_????_????????_{channel}_survey_{survey}*.fits

map_detset = %(base_dir)s/Couple_horn/LFI_{frequency}_????_????????_{channel}_{survey}*.fits
map_detset_halfring = %(base_dir)s/JackKnife/LFI_{frequency}_????_????????_ringhalf_{halfring}_{channel}_{survey}*.fits
map_detset_survey = %(base_dir)s/Couple_horn_Surveys/LFI_{frequency}_????_????????_{channel}_survey_{survey}*.fits
map_detset_survey_halfring = %(base_dir)s/Couple_horn_Surveys/LFI_{frequency}_????_????????ringhalf_{channel}_{halfring}_survey_{survey}*.fits

ps_mask = /global/project/projectdirs/planck/user/zonca/masks/ptsrc_apo.fits
spectra_mask = /global/project/projectdirs/planck/user/zonca/masks/union_mask_{frequency}.fits
2 changes: 1 addition & 1 deletion read_toast.conf
@@ -1,5 +1,5 @@
[Templates]
base_dir = /global/project/projectdirs/planck/user/zonca/issues/dx9_delta_toast/maps/out/map_osgn
base_dir = /global/project/projectdirs/planck/user/zonca/issues/dx9_delta_toast/maps/out/map_ecn512s
suffix =

map_frequency = %(base_dir)s_0{frequency}_{survey}%(suffix)s.fits
Expand Down
4 changes: 3 additions & 1 deletion reader.py
Expand Up @@ -105,7 +105,9 @@ def __call__(self, freq, surv, chtag='', halfring=0, pol="I", bp_corr=False):
is_halfring = halfring != 0

# stokes component
stokes = stokes_I if channel_type in ["channel", "horn"] else stokes_IQU
stokes = stokes_IQU
if (channel_type in ["channel", "horn"]) or freq >= 545:
stokes = stokes_I
if isinstance(pol, str):
components = [stokes.index(p) for p in pol]
if len(components) == 1:
Expand Down
9 changes: 6 additions & 3 deletions run_dx9_10deg.conf
Expand Up @@ -2,10 +2,13 @@
nside = 1024
smoothing = 10
degraded_nside = 128
spectra = false
chi2 = false
[run]
frequency = [30]
paral = false
reader_conf = read_dx9.conf
output_folder = ddx9t
reader_conf = read_toast.conf
output_folder = ecn512s
run_halfrings = true
run_surveydiff = true
run_chdiff = true
run_chdiff = false
12 changes: 12 additions & 0 deletions run_hfi_dx9_10deg.conf
@@ -0,0 +1,12 @@
[smooth_combine]
nside = 256
smoothing = 10
degraded_nside = 128
[run]
frequency = [100, 143, 217, 353, 545, 857]
paral = false
reader_conf = read_hfi_dx9_nersc.conf
output_folder = ddx9h
run_halfrings = true
run_surveydiff = true
run_chdiff = false
27 changes: 16 additions & 11 deletions run_null.py
Expand Up @@ -4,6 +4,8 @@
from differences import halfrings, surveydiff, chdiff
import reader
from ConfigParser import SafeConfigParser, NoOptionError
import exceptions
import json

import utils
import sys
Expand All @@ -29,42 +31,46 @@

# create map reader
mapreader = reader.DXReader(config.get("run", "reader_conf"), nside=config.getint("smooth_combine", "nside"))
smooth_combine_config = dict(fwhm=np.radians(config.getfloat("smooth_combine", "smoothing")), degraded_nside=config.getint("smooth_combine", "degraded_nside"), spectra=True)
smooth_combine_config = dict(fwhm=np.radians(config.getfloat("smooth_combine", "smoothing")), degraded_nside=config.getint("smooth_combine", "degraded_nside"), spectra=config.getboolean("smooth_combine", "spectra"), chi2=config.getboolean("smooth_combine", "chi2"))

if paral:
tasks = []
tc = Client()
lview = tc.load_balanced_view() # default load-balanced view

# get list of frequencies
freqs = json.loads(config.get("run", "frequency"))

if config.getboolean("run", "run_halfrings"):
print "HALFRINGS"
survs = ["nominal", "full"]
freqs = [30, 44, 70]
for freq in freqs:
chtags = [""]
pol = "IQU"
if freq == 70:
chtags += ["18_23", "19_22", "20_21"]
if freq >= 545:
pol = "I"
for chtag in chtags:
for surv in survs:
if paral:
tasks.append(lview.apply_async(halfrings,freq, chtag,
surv, pol='IQU',
surv, pol=pol,
smooth_combine_config=smooth_combine_config,
root_folder=root_folder,log_to_file=True,
mapreader=mapreader))
else:
try:
halfrings(freq, chtag, surv, pol='IQU',
halfrings(freq, chtag, surv, pol=pol,
smooth_combine_config=smooth_combine_config,
root_folder=root_folder,log_to_file=False,
mapreader=mapreader)
except NoOptionError as e:
except (NoOptionError, exceptions.IOError) as e:
log.error("SKIP TEST: " + e.message)

if config.getboolean("run", "run_surveydiff"):
print "SURVDIFF"
survs = [1,2,3,4,5]
freqs = [30, 44, 70]
for bp_corr in [False, True]:
for freq in freqs:
chtags = [""]
Expand All @@ -76,7 +82,7 @@
for chtag in chtags:
if bp_corr and chtag: # no corr for single ch
continue
if chtag and chtag.find("_") < 0:
if (freq >= 545) or (chtag and chtag.find("_") < 0):
pol='I'
else:
pol="IQU"
Expand All @@ -93,13 +99,12 @@
smooth_combine_config=smooth_combine_config,
root_folder=root_folder,log_to_file=False,
bp_corr=bp_corr, mapreader=mapreader)
except NoOptionError as e:
except (NoOptionError, exceptions.IOError) as e:
log.error("SKIP TEST: " + e.message)

if config.getboolean("run", "run_chdiff"):
print "CHDIFF"
survs = [1,2,3,4,5]
freqs = [30, 44, 70]

for freq in freqs:
for surv in survs:
Expand All @@ -119,9 +124,9 @@
root_folder=root_folder,
log_to_file=False,
mapreader=mapreader)
except NoOptionError as e:
except (NoOptionError, exceptions.IOError) as e:
log.error("SKIP TEST: " + e.message)

if paral:
print("Wait for %d tasks to complete" % len(tasks))
tc.wait(tasks)
#tc.wait(tasks)

0 comments on commit 22cb296

Please sign in to comment.