Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

DM-40917: Add description and units to healsparse property map metadata #836

Merged
merged 3 commits into from
Oct 4, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
4 changes: 3 additions & 1 deletion python/lsst/pipe/tasks/healSparseMapping.py
Original file line number Diff line number Diff line change
Expand Up @@ -978,7 +978,9 @@ def consolidate_map(self, sky_map, input_refs):
input_map.nside_sparse,
input_map.dtype,
sentinel=input_map._sentinel,
cov_pixels=cov_pix)
cov_pixels=cov_pix,
metadata=input_map.metadata,
)

# Only use pixels that are properly inside the tract.
vpix, ra, dec = input_map.valid_pixels_pos(return_pixels=True)
Expand Down
59 changes: 54 additions & 5 deletions python/lsst/pipe/tasks/healSparseMappingProperties.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@

import numpy as np
import healsparse as hsp
import copy

import lsst.pex.config as pexConfig
import lsst.geom
Expand Down Expand Up @@ -209,6 +210,9 @@ class BasePropertyMap:
dtype = np.float64
requires_psf = False

description = ""
unit = ""

ConfigClass = BasePropertyMapConfig

registry = PropertyMapRegistry(BasePropertyMapConfig)
Expand All @@ -229,26 +233,45 @@ def initialize_tract_maps(self, nside_coverage, nside):
nside : `int`
Healpix nside of the property map.
"""
base_metadata = {
"DESCRIPTION": self.description,
"UNIT": self.unit,
}
if self.config.do_min:
metadata = copy.copy(base_metadata)
metadata["OPERATION"] = "minimum"
self.min_map = hsp.HealSparseMap.make_empty(nside_coverage,
nside,
self.dtype)
self.dtype,
metadata=metadata)
if self.config.do_max:
metadata = copy.copy(base_metadata)
metadata["OPERATION"] = "maximum"
self.max_map = hsp.HealSparseMap.make_empty(nside_coverage,
nside,
self.dtype)
self.dtype,
metadata=metadata)
if self.config.do_mean:
metadata = copy.copy(base_metadata)
metadata["OPERATION"] = "mean"
self.mean_map = hsp.HealSparseMap.make_empty(nside_coverage,
nside,
self.dtype)
self.dtype,
metadata=metadata)
if self.config.do_weighted_mean:
metadata = copy.copy(base_metadata)
metadata["OPERATION"] = "weighted mean"
self.weighted_mean_map = hsp.HealSparseMap.make_empty(nside_coverage,
nside,
self.dtype)
self.dtype,
metadata=metadata)
if self.config.do_sum:
metadata = copy.copy(base_metadata)
metadata["OPERATION"] = "sum"
self.sum_map = hsp.HealSparseMap.make_empty(nside_coverage,
nside,
self.dtype)
self.dtype,
metadata=metadata)

def initialize_values(self, n_pixels):
"""Initialize the value arrays for accumulation.
Expand Down Expand Up @@ -387,6 +410,8 @@ def _post_process(self, total_weights, total_inputs):
@register_property_map("exposure_time")
class ExposureTimePropertyMap(BasePropertyMap):
"""Exposure time property map."""
description = "Exposure time"
unit = "s"

def _compute(self, row, ra, dec, scalings, psf_array=None):
return row.getVisitInfo().getExposureTime()
Expand All @@ -395,6 +420,8 @@ def _compute(self, row, ra, dec, scalings, psf_array=None):
@register_property_map("psf_size")
class PsfSizePropertyMap(BasePropertyMap):
"""PSF size property map."""
description = "PSF size"
unit = "pixel"
requires_psf = True

def _compute(self, row, ra, dec, scalings, psf_array=None):
Expand All @@ -404,6 +431,7 @@ def _compute(self, row, ra, dec, scalings, psf_array=None):
@register_property_map("psf_e1")
class PsfE1PropertyMap(BasePropertyMap):
"""PSF shape e1 property map."""
description = "PSF e1"
requires_psf = True

def _compute(self, row, ra, dec, scalings, psf_array=None):
Expand All @@ -413,6 +441,7 @@ def _compute(self, row, ra, dec, scalings, psf_array=None):
@register_property_map("psf_e2")
class PsfE2PropertyMap(BasePropertyMap):
"""PSF shape e2 property map."""
description = "PSF e2"
requires_psf = True

def _compute(self, row, ra, dec, scalings, psf_array=None):
Expand All @@ -422,6 +451,7 @@ def _compute(self, row, ra, dec, scalings, psf_array=None):
@register_property_map("n_exposure")
class NExposurePropertyMap(BasePropertyMap):
"""Number of exposures property map."""
description = "Number of exposures"
dtype = np.int32

def _compute(self, row, ra, dec, scalings, psf_array=None):
Expand All @@ -442,6 +472,8 @@ def validate(self):
@register_property_map("psf_maglim")
class PsfMaglimPropertyMap(BasePropertyMap):
"""PSF magnitude limit property map."""
description = "PSF magnitude limit"
unit = "mag(AB)"
requires_psf = True

ConfigClass = PsfMaglimPropertyMapConfig
Expand All @@ -460,20 +492,28 @@ def _post_process(self, total_weights, total_inputs):
@register_property_map("sky_background")
class SkyBackgroundPropertyMap(BasePropertyMap):
"""Sky background property map."""
description = "Sky background"
unit = "nJy"

def _compute(self, row, ra, dec, scalings, psf_array=None):
return scalings*row["skyBg"]


@register_property_map("sky_noise")
class SkyNoisePropertyMap(BasePropertyMap):
"""Sky noise property map."""
description = "Sky noise"
unit = "nJy"

def _compute(self, row, ra, dec, scalings, psf_array=None):
return scalings*row["skyNoise"]


@register_property_map("dcr_dra")
class DcrDraPropertyMap(BasePropertyMap):
"""Effect of DCR on delta-RA property map."""
description = "Effect of DCR on delta-RA"

def _compute(self, row, ra, dec, scalings, psf_array=None):
par_angle = row.getVisitInfo().getBoresightParAngle().asRadians()
return np.tan(np.deg2rad(row["zenithDistance"]))*np.sin(par_angle)
Expand All @@ -482,6 +522,8 @@ def _compute(self, row, ra, dec, scalings, psf_array=None):
@register_property_map("dcr_ddec")
class DcrDdecPropertyMap(BasePropertyMap):
"""Effect of DCR on delta-Dec property map."""
description = "Effect of DCR on delta-Dec"

def _compute(self, row, ra, dec, scalings, psf_array=None):
par_angle = row.getVisitInfo().getBoresightParAngle().asRadians()
return np.tan(np.deg2rad(row["zenithDistance"]))*np.cos(par_angle)
Expand All @@ -490,6 +532,8 @@ def _compute(self, row, ra, dec, scalings, psf_array=None):
@register_property_map("dcr_e1")
class DcrE1PropertyMap(BasePropertyMap):
"""Effect of DCR on psf shape e1 property map."""
description = "Effect of DCR on PSF shape e1"

def _compute(self, row, ra, dec, scalings, psf_array=None):
par_angle = row.getVisitInfo().getBoresightParAngle().asRadians()
return (np.tan(np.deg2rad(row["zenithDistance"]))**2.)*np.cos(2.*par_angle)
Expand All @@ -498,6 +542,8 @@ def _compute(self, row, ra, dec, scalings, psf_array=None):
@register_property_map("dcr_e2")
class DcrE2PropertyMap(BasePropertyMap):
"""Effect of DCR on psf shape e2 property map."""
description = "Effect of DCR on PSF shape e2"

def _compute(self, row, ra, dec, scalings, psf_array=None):
par_angle = row.getVisitInfo().getBoresightParAngle().asRadians()
return (np.tan(np.deg2rad(row["zenithDistance"]))**2.)*np.sin(2.*par_angle)
Expand All @@ -506,6 +552,9 @@ def _compute(self, row, ra, dec, scalings, psf_array=None):
@register_property_map("epoch")
class EpochPropertyMap(BasePropertyMap):
"""Observation epoch (mjd) property map."""
description = "Observation epoch"
unit = "day"

def _compute(self, row, ra, dec, scalings, psf_array=None):
date = row.getVisitInfo().getDate()
return date.get(date.MJD)
28 changes: 28 additions & 0 deletions tests/test_makeSurveyPropertyMaps.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
import esutil
import warnings
import logging
from astropy import units

import lsst.utils.tests
import lsst.daf.butler
Expand All @@ -50,6 +51,8 @@
class DistTimesPsfAreaPropertyMap(BasePropertyMap):
"""Property map to compute the distance from the boresight center
by the psf area. Do not try this at home."""
description = "Distance times psf area"
unit = "deg*pixel**2"
requires_psf = True

def _compute(self, row, ra, dec, scalings, psf_array=None):
Expand Down Expand Up @@ -190,26 +193,51 @@ def testPropertyMapCreation(self):
if map_config.do_min:
self.assertTrue(hasattr(property_map, 'min_map'))
np.testing.assert_array_equal(property_map.min_map.valid_pixels, valid_pixels)
metadata = property_map.min_map.metadata
self.assertIsNotNone(metadata["DESCRIPTION"])
self.assertEqual(metadata["OPERATION"], "minimum")
unit = units.Unit(metadata["UNIT"])
self.assertIsNotNone(unit)
else:
self.assertFalse(hasattr(property_map, 'min_map'))
if map_config.do_max:
self.assertTrue(hasattr(property_map, 'max_map'))
np.testing.assert_array_equal(property_map.max_map.valid_pixels, valid_pixels)
metadata = property_map.max_map.metadata
self.assertIsNotNone(metadata["DESCRIPTION"])
self.assertEqual(metadata["OPERATION"], "maximum")
unit = units.Unit(metadata["UNIT"])
self.assertIsNotNone(unit)
else:
self.assertFalse(hasattr(property_map, 'max_map'))
if map_config.do_mean:
self.assertTrue(hasattr(property_map, 'mean_map'))
np.testing.assert_array_equal(property_map.mean_map.valid_pixels, valid_pixels)
metadata = property_map.mean_map.metadata
self.assertIsNotNone(metadata["DESCRIPTION"])
self.assertEqual(metadata["OPERATION"], "mean")
unit = units.Unit(metadata["UNIT"])
self.assertIsNotNone(unit)
else:
self.assertFalse(hasattr(property_map, 'mean_map'))
if map_config.do_weighted_mean:
self.assertTrue(hasattr(property_map, 'weighted_mean_map'))
np.testing.assert_array_equal(property_map.weighted_mean_map.valid_pixels, valid_pixels)
metadata = property_map.weighted_mean_map.metadata
self.assertIsNotNone(metadata["DESCRIPTION"])
self.assertEqual(metadata["OPERATION"], "weighted mean")
unit = units.Unit(metadata["UNIT"])
self.assertIsNotNone(unit)
else:
self.assertFalse(hasattr(property_map, 'weighted_mean_map'))
if map_config.do_sum:
self.assertTrue(hasattr(property_map, 'sum_map'))
np.testing.assert_array_equal(property_map.sum_map.valid_pixels, valid_pixels)
metadata = property_map.sum_map.metadata
self.assertIsNotNone(metadata["DESCRIPTION"])
self.assertEqual(metadata["OPERATION"], "sum")
unit = units.Unit(metadata["UNIT"])
self.assertIsNotNone(unit)
else:
self.assertFalse(hasattr(property_map, 'sum_map'))

Expand Down