Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

DM-28542: Implement RFC-750 #33

Merged
merged 3 commits into from
Mar 17, 2021
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
32 changes: 22 additions & 10 deletions python/lsst/meas/extensions/scarlet/scarletDeblendTask.py
Original file line number Diff line number Diff line change
Expand Up @@ -414,6 +414,18 @@ class ScarletDeblendConfig(pexConfig.Config):
doc=("If True, catch exceptions thrown by the deblender, log them, "
"and set a flag on the parent, instead of letting them propagate up"))

# Other options
columnInheritance = pexConfig.DictField(
keytype=str, itemtype=str, default={
"deblend_nChild": "deblend_parentNChild",
"deblend_nPeaks": "deblend_parentNPeaks",
"deblend_spectrumInitFlag": "deblend_spectrumInitFlag",
},
doc="Columns to pass from the parent to the child. "
"The key is the name of the column for the parent record, "
"the value is the name of the column to use for the child."
)


class ScarletDeblendTask(pipeBase.Task):
"""ScarletDeblendTask
Expand Down Expand Up @@ -523,8 +535,9 @@ def _addSchemaKeys(self, schema):
"This includes peaks that may have been culled "
"during deblending or failed to deblend")
self.parentNPeaksKey = schema.addField("deblend_parentNPeaks", type=np.int32,
doc="Same as deblend_n_peaks, but the number of peaks "
"in the parent footprint")
doc="deblend_nPeaks from this records parent.")
self.parentNChildKey = schema.addField("deblend_parentNChild", type=np.int32,
doc="deblend_nChild from this records parent.")
self.scarletFluxKey = schema.addField("deblend_scarletFlux", type=np.float32,
doc="Flux measurement from scarlet")
self.scarletLogLKey = schema.addField("deblend_logL", type=np.float32,
Expand Down Expand Up @@ -680,19 +693,22 @@ def deblend(self, mExposure, sources):
self._skipParent(src, mExposure.mask)
continue

# Calculate the number of children deblended from the parent
nChild = len([k for k in range(len(sources)) if k not in skipped])

# Add the merged source as a parent in the catalog for each band
templateParents = {}
parentId = src.getId()
for f in filters:
templateParents[f] = templateCatalogs[f][pk]
templateParents[f].set(self.nChildKey, nChild)
templateParents[f].set(self.nPeaksKey, len(foot.peaks))
templateParents[f].set(self.runtimeKey, runtime)
templateParents[f].set(self.iterKey, len(blend.loss))
logL = blend.loss[-1]-blend.observations[0].log_norm
templateParents[f].set(self.scarletLogLKey, logL)

# Add each source to the catalogs in each band
nchild = 0
for k, source in enumerate(sources):
# Skip any sources with no flux or that scarlet skipped because
# it could not initialize
Expand All @@ -715,11 +731,6 @@ def deblend(self, mExposure, sources):
if parentId == 0:
child.setId(src.getId())
child.set(self.runtimeKey, runtime)
nchild += 1

# Set the number of children for each parent
for f in filters:
templateParents[f].set(self.nChildKey, nchild)

K = len(list(templateCatalogs.values())[0])
self.log.info('Deblended: of %i sources, %i were deblended, creating %i children, total %i sources'
Expand Down Expand Up @@ -858,6 +869,7 @@ def _addChild(self, parent, sources, heavy, scarletSource, blend_converged, xy0,
# measurement.
src.set(self.scarletFluxKey, flux)

# Set the spectrum init flag from the parent
src.set(self.scarletSpectrumInitKey, parent.get(self.scarletSpectrumInitKey))
# Propagate columns from the parent to the child
for parentColumn, childColumn in self.config.columnInheritance.items():
src.set(childColumn, parent.get(parentColumn))
return src