Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

tickets/DM-27722 #30

Merged
merged 4 commits into from
Jan 14, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
35 changes: 10 additions & 25 deletions python/lsst/meas/extensions/scarlet/scarletDeblendTask.py
Original file line number Diff line number Diff line change
Expand Up @@ -310,7 +310,7 @@ class ScarletDeblendConfig(pexConfig.Config):
dtype=bool, default=True,
doc="Whether or not to process isolated sources in the deblender")
sourceModel = pexConfig.Field(
dtype=str, default="single",
dtype=str, default="double",
doc=("How to determine which model to use for sources, from\n"
"- 'single': use a single component for all sources\n"
"- 'double': use a bulge disk model for all sources\n"
Expand Down Expand Up @@ -421,6 +421,8 @@ def __init__(self, schema, peakSchema=None, **kwargs):
assert schema == self.peakSchemaMapper.getOutputSchema(), "Logic bug mapping schemas"
self._addSchemaKeys(schema)
self.schema = schema
self.toCopyFromParent = [item.key for item in self.schema
if item.field.getName().startswith("merge_footprint")]

def _addSchemaKeys(self, schema):
"""Add deblender specific keys to the schema
Expand Down Expand Up @@ -504,20 +506,11 @@ def run(self, mExposure, mergedSources):

Returns
-------
fluxCatalogs: dict or None
Keys are the names of the filters and the values are
`lsst.afw.table.source.source.SourceCatalog`'s.
These are the flux-conserved catalogs with heavy footprints with
the image data weighted by the multiband templates.
If `self.config.conserveFlux` is `False`, then this item will be
None
templateCatalogs: dict or None
templateCatalogs: dict
Keys are the names of the filters and the values are
`lsst.afw.table.source.source.SourceCatalog`'s.
These are catalogs with heavy footprints that are the templates
created by the multiband templates.
If `self.config.saveTemplates` is `False`, then this item will be
None
"""
return self.deblend(mExposure, mergedSources)

Expand All @@ -536,20 +529,11 @@ def deblend(self, mExposure, sources):

Returns
-------
fluxCatalogs : dict or None
Keys are the names of the filters and the values are
`lsst.afw.table.source.source.SourceCatalog`'s.
These are the flux-conserved catalogs with heavy footprints with
the image data weighted by the multiband templates.
If `self.config.conserveFlux` is `False`, then this item will be
None
templateCatalogs : dict or None
Keys are the names of the filters and the values are
`lsst.afw.table.source.source.SourceCatalog`'s.
These are catalogs with heavy footprints that are the templates
created by the multiband templates.
If `self.config.saveTemplates` is `False`, then this item will be
None
"""
import time

Expand All @@ -560,7 +544,6 @@ def deblend(self, mExposure, sources):
templateCatalogs = {}
# This must be returned but is not calculated right now, setting it to
# None to be consistent with doc string
fluxCatalogs = None
for f in filters:
_catalog = afwTable.SourceCatalog(sources.table.clone())
_catalog.extend(sources)
Expand Down Expand Up @@ -677,7 +660,7 @@ def deblend(self, mExposure, sources):
err = "Heavy footprint should have a single peak, got {0}"
raise ValueError(err.format(len(models[f].peaks)))
cat = templateCatalogs[f]
child = self._addChild(parentId, cat, models[f], source, converged,
child = self._addChild(src, cat, models[f], source, converged,
xy0=bbox.getMin(), flux=flux[fidx])
if parentId == 0:
child.setId(src.getId())
Expand All @@ -691,7 +674,7 @@ def deblend(self, mExposure, sources):
K = len(list(templateCatalogs.values())[0])
self.log.info('Deblended: of %i sources, %i were deblended, creating %i children, total %i sources'
% (n0, nparents, K-n0, K))
return fluxCatalogs, templateCatalogs
return templateCatalogs

def _isLargeFootprint(self, footprint):
"""Returns whether a Footprint is large
Expand Down Expand Up @@ -766,7 +749,7 @@ def _skipParent(self, source, masks):
# Top level parents also have no parentNPeaks
source.set(self.parentNPeaksKey, 0)

def _addChild(self, parentId, sources, heavy, scarletSource, blend_converged, xy0, flux):
def _addChild(self, parent, sources, heavy, scarletSource, blend_converged, xy0, flux):
"""Add a child to a catalog

This creates a new child in the source catalog,
Expand All @@ -776,8 +759,10 @@ def _addChild(self, parentId, sources, heavy, scarletSource, blend_converged, xy
"""
assert len(heavy.getPeaks()) == 1
src = sources.addNew()
for key in self.toCopyFromParent:
src.set(key, parent.get(key))
src.assign(heavy.getPeaks()[0], self.peakSchemaMapper)
src.setParent(parentId)
src.setParent(parent.getId())
src.setFootprint(heavy)
# Set the psf key based on whether or not the source was
# deblended using the PointSource model.
Expand Down
2 changes: 1 addition & 1 deletion tests/test_deblend.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ def test_deblend_task(self):
detectionResult = detectionTask.run(table, coadds["r"])
catalog = detectionResult.sources
self.assertEqual(len(catalog), 1)
_, result = deblendTask.run(coadds, catalog)
result = deblendTask.run(coadds, catalog)


class MemoryTester(lsst.utils.tests.MemoryTestCase):
Expand Down