Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

DM-17023: update to new daf_butler APIs. #312

Merged
merged 3 commits into from Sep 24, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
22 changes: 18 additions & 4 deletions python/lsst/pipe/tasks/calibrate.py
Expand Up @@ -84,7 +84,7 @@ class CalibrateConnections(pipeBase.PipelineTaskConnections, dimensions=("instru

astromRefCat = cT.PrerequisiteInput(
doc="Reference catalog to use for astrometry",
name="ref_cat",
name="cal_ref_cat",
storageClass="SimpleCatalog",
dimensions=("skypix",),
deferLoad=True,
Expand All @@ -93,7 +93,7 @@ class CalibrateConnections(pipeBase.PipelineTaskConnections, dimensions=("instru

photoRefCat = cT.PrerequisiteInput(
doc="Reference catalog to use for photometric calibration",
name="ref_cat",
name="cal_ref_cat",
storageClass="SimpleCatalog",
dimensions=("skypix",),
deferLoad=True,
Expand Down Expand Up @@ -286,6 +286,21 @@ class CalibrateConfig(pipeBase.PipelineTaskConfig, pipelineConnections=Calibrate
"normal calexp but as a fakes_calexp."
)

def validate(self):
super().validate()
astromRefCatGen2 = getattr(self.astromRefObjLoader, "ref_dataset_name", None)
if astromRefCatGen2 is not None and astromRefCatGen2 != self.connections.astromRefCat:
raise ValueError(
f"Gen2 ({astromRefCatGen2}) and Gen3 ({self.connections.astromRefCat}) astrometry reference "
f"catalogs are different. These options must be kept in sync until Gen2 is retired."
)
photoRefCatGen2 = getattr(self.photoRefObjLoader, "ref_dataset_name", None)
if photoRefCatGen2 is not None and photoRefCatGen2 != self.connections.photoRefCat:
raise ValueError(
f"Gen2 ({photoRefCatGen2}) and Gen3 ({self.connections.photoRefCat}) photometry reference "
f"catalogs are different. These options must be kept in sync until Gen2 is retired."
)


## \addtogroup LSST_task_documentation
## \{
Expand Down Expand Up @@ -575,8 +590,7 @@ def runDataRef(self, dataRef, exposure=None, background=None, icSourceCat=None,

def runQuantum(self, butlerQC, inputRefs, outputRefs):
inputs = butlerQC.get(inputRefs)
expId, expBits = butlerQC.registry.packDataId("visit_detector",
butlerQC.quantum.dataId,
expId, expBits = butlerQC.quantum.dataId.pack("visit_detector",
returnMaxBits=True)
inputs['exposureIdInfo'] = ExposureIdInfo(expId, expBits)

Expand Down
5 changes: 2 additions & 3 deletions python/lsst/pipe/tasks/characterizeImage.py
Expand Up @@ -305,10 +305,9 @@ def DebugInfo(name):
def runQuantum(self, butlerQC, inputRefs, outputRefs):
inputs = butlerQC.get(inputRefs)
if 'exposureIdInfo' not in inputs.keys():
packer = butlerQC.registry.makeDataIdPacker("visit_detector", inputRefs.exposure.dataId)
exposureIdInfo = ExposureIdInfo()
exposureIdInfo.expId = packer.pack(inputRefs.exposure.dataId)
exposureIdInfo.expBits = packer.maxBits
exposureIdInfo.expId, exposureIdInfo.expBits = butlerQC.quantum.dataId.pack("visit_detector",
returnMaxBits=True)
inputs['exposureIdInfo'] = exposureIdInfo
outputs = self.run(**inputs)
butlerQC.put(outputs, outputRefs)
Expand Down
6 changes: 2 additions & 4 deletions python/lsst/pipe/tasks/deblendCoaddSourcesPipeline.py
Expand Up @@ -155,8 +155,7 @@ def __init__(self, initInputs, **kwargs):

def runQuantum(self, butlerQC, inputRefs, outputRefs):
inputs = butlerQC.get(inputRefs)
packedId, maxBits = butlerQC.registry.packDataId("tract_patch", inputRefs.mergedDetections.dataId,
returnMaxBits=True)
packedId, maxBits = butlerQC.quantum.dataId.pack("tract_patch", returnMaxBits=True)
inputs["idFactory"] = afwTable.IdFactory.makeSource(packedId, 64 - maxBits)
outputs = self.run(**inputs)
butlerQC.put(outputs, outputRefs)
Expand Down Expand Up @@ -196,8 +195,7 @@ def __init__(self, initInputs, **kwargs):

def runQuantum(self, butlerQC, inputRefs, outputRefs):
inputs = butlerQC.get(inputRefs)
packedId, maxBits = butlerQC.registry.packDataId("tract_patch", inputRefs.mergedDetections.dataId,
returnMaxBits=True)
packedId, maxBits = butlerQC.quantum.dataId.pack("tract_patch", returnMaxBits=True)
inputs["idFactory"] = afwTable.IdFactory.makeSource(packedId, 64 - maxBits)
inputs["filters"] = [dRef.dataId["abstract_filter"] for dRef in inputRefs.coadds]
outputs = self.run(**inputs)
Expand Down
2 changes: 1 addition & 1 deletion python/lsst/pipe/tasks/makeCoaddTempExp.py
Expand Up @@ -670,7 +670,7 @@ def runQuantum(self, butlerQC, inputRefs, outputRefs):
dataIdList = [ref.dataId for ref in inputRefs.calExpList]

# Construct list of packed integer IDs expected by `run`
ccdIdList = [butlerQC.registry.packDataId("visit_detector", dataId) for dataId in dataIdList]
ccdIdList = [dataId.pack("visit_detector") for dataId in dataIdList]

# Extract integer visitId requested by `run`
visits = [dataId['visit'] for dataId in dataIdList]
Expand Down
29 changes: 12 additions & 17 deletions python/lsst/pipe/tasks/makeGen3SkyMap.py
Expand Up @@ -23,6 +23,8 @@
from lsst.daf.butler import DatasetType
from lsst.skymap import skyMapRegistry

from sqlalchemy.exc import IntegrityError


class MakeGen3SkyMapConfig(pexConfig.Config):
"""Config for MakeGen3SkyMapTask
Expand Down Expand Up @@ -76,24 +78,17 @@ def run(self, butler):
skyMap = self.config.skyMap.apply()
skyMap.logSkyMapInfo(self.log)
skyMapHash = skyMap.getSha1()
try:
existing, = butler.registry.query("SELECT skymap FROM skymap WHERE hash=:hash",
hash=skyMapHash)
raise RuntimeError(
(f"SkyMap with name {existing.name} and hash {skyMapHash} already exist in "
f"the butler collection {self.collection}, SkyMaps must be unique within "
"a collection")
)
except ValueError:
self.log.info(f"Inserting SkyMap {self.config.name} with hash={skyMapHash}")
with butler.registry.transaction():
self.log.info(f"Inserting SkyMap {self.config.name} with hash={skyMapHash}")
with butler.registry.transaction():
try:
skyMap.register(self.config.name, butler.registry)
butler.registry.registerDatasetType(DatasetType(name=self.config.datasetTypeName,
dimensions=["skymap"],
storageClass="SkyMap",
universe=butler.registry.dimensions))
butler.put(skyMap, self.config.datasetTypeName, {"skymap": self.config.name})

except IntegrityError as err:
raise RuntimeError("A skymap with the same name or hash already exists.") from err
butler.registry.registerDatasetType(DatasetType(name=self.config.datasetTypeName,
dimensions=["skymap"],
storageClass="SkyMap",
universe=butler.registry.dimensions))
butler.put(skyMap, self.config.datasetTypeName, {"skymap": self.config.name})
return pipeBase.Struct(
skyMap=skyMap
)
3 changes: 1 addition & 2 deletions python/lsst/pipe/tasks/mergeDetections.py
Expand Up @@ -245,8 +245,7 @@ def runDataRef(self, patchRefList):

def runQuantum(self, butlerQC, inputRefs, outputRefs):
inputs = butlerQC.get(inputRefs)
packedId, maxBits = butlerQC.registry.packDataId("tract_patch", outputRefs.outputCatalog.dataId,
returnMaxBits=True)
packedId, maxBits = butlerQC.quantum.dataId.pack("tract_patch", returnMaxBits=True)
inputs["skySeed"] = packedId
inputs["idFactory"] = afwTable.IdFactory.makeSource(packedId, 64 - maxBits)
catalogDict = {ref.dataId['abstract_filter']: cat for ref, cat in zip(inputRefs.catalogs,
Expand Down
17 changes: 12 additions & 5 deletions python/lsst/pipe/tasks/multiBand.py
Expand Up @@ -285,9 +285,7 @@ def runDataRef(self, patchRef):

def runQuantum(self, butlerQC, inputRefs, outputRefs):
inputs = butlerQC.get(inputRefs)
packedId, maxBits = butlerQC.registry.packDataId("tract_patch_abstract_filter",
inputRefs.exposure.dataId,
returnMaxBits=True)
packedId, maxBits = butlerQC.quantum.dataId.pack("tract_patch_abstract_filter", returnMaxBits=True)
inputs["idFactory"] = afwTable.IdFactory.makeSource(packedId, 64 - maxBits)
inputs["expId"] = packedId
outputs = self.run(**inputs)
Expand Down Expand Up @@ -740,6 +738,16 @@ def setDefaults(self):
self.measurement.plugins['base_PixelFlags'].masksFpCenter = ['CLIPPED', 'SENSOR_EDGE',
'INEXACT_PSF']

def validate(self):
super().validate()
refCatGen2 = getattr(self.refObjLoader, "ref_dataset_name", None)
if refCatGen2 is not None and refCatGen2 != self.connections.refCat:
raise ValueError(
f"Gen2 ({refCatGen2}) and Gen3 ({self.connections.refCat}) reference catalogs "
f"are different. These options must be kept in sync until Gen2 is retired."
)


## @addtogroup LSST_task_documentation
## @{
## @page MeasureMergedCoaddSourcesTask
Expand Down Expand Up @@ -936,8 +944,7 @@ def runQuantum(self, butlerQC, inputRefs, outputRefs):
inputs['exposure'].getPsf().setCacheCapacity(self.config.psfCache)

# Get unique integer ID for IdFactory and RNG seeds
packedId, maxBits = butlerQC.registry.packDataId("tract_patch", outputRefs.outputSources.dataId,
returnMaxBits=True)
packedId, maxBits = butlerQC.quantum.dataId.pack("tract_patch", returnMaxBits=True)
inputs['exposureId'] = packedId
idFactory = afwTable.IdFactory.makeSource(packedId, 64 - maxBits)
# Transform inputCatalog
Expand Down
3 changes: 1 addition & 2 deletions python/lsst/pipe/tasks/processCcdWithFakes.py
Expand Up @@ -229,8 +229,7 @@ def runDataRef(self, dataRef):
def runQuantum(self, butlerQC, inputRefs, outputRefs):
inputs = butlerQC.get(inputRefs)
if 'exposureIdInfo' not in inputs.keys():
expId, expBits = butlerQC.registry.packDataId("visit_detector", butlerQC.quantum.dataId,
returnMaxBits=True)
expId, expBits = butlerQC.quantum.dataId.pack("visit_detector", returnMaxBits=True)
inputs['exposureIdInfo'] = ExposureIdInfo(expId, expBits)

if inputs["wcs"] is None:
Expand Down