Skip to content

Commit

Permalink
Adapt to daf_butler API changes.
Browse files Browse the repository at this point in the history
  • Loading branch information
TallJimbo committed Nov 21, 2023
1 parent efd52a1 commit 5c286fe
Show file tree
Hide file tree
Showing 3 changed files with 16 additions and 14 deletions.
17 changes: 10 additions & 7 deletions python/lsst/pipe/tasks/hips.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@

from lsst.sphgeom import RangeSet, HealpixPixelization
from lsst.utils.timer import timeMethod
from lsst.daf.butler import Butler, DataCoordinate, DatasetRef, Quantum, SkyPixDimension
from lsst.daf.butler import Butler, DataCoordinate, DatasetRef, Quantum
import lsst.pex.config as pexConfig
import lsst.pipe.base as pipeBase
import lsst.afw.geom as afwGeom
Expand Down Expand Up @@ -548,16 +548,19 @@ def build_quantum_graph(
output_dataset_type = dataset_types.outputs[task_def.connections.hips_exposures.name]
incidental_output_dataset_types = dataset_types.outputs.copy()
incidental_output_dataset_types.remove(output_dataset_type)
(hpx_output_dimension,) = (d for d in output_dataset_type.dimensions
if isinstance(d, SkyPixDimension))
(hpx_output_dimension,) = (
registry.dimensions.skypix_dimensions[d] for d in output_dataset_type.dimensions.skypix.names
)

constraint_hpx_pixelization = registry.dimensions[f"healpix{constraint_order}"].pixelization
common_skypix_name = registry.dimensions.commonSkyPix.name
common_skypix_pixelization = registry.dimensions.commonSkyPix.pixelization

# We will need all the pixels at the quantum resolution as well
task_dimensions = registry.dimensions.extract(task_def.connections.dimensions)
(hpx_dimension,) = (d for d in task_dimensions if d.name != "band")
task_dimensions = registry.dimensions.conform(task_def.connections.dimensions)
(hpx_dimension,) = (
registry.dimensions.skypix_dimensions[d] for d in task_dimensions.names if d != "band"
)
hpx_pixelization = hpx_dimension.pixelization

if hpx_pixelization.level < constraint_order:
Expand Down Expand Up @@ -608,7 +611,7 @@ def build_quantum_graph(
bind=bind
).expanded()
inputs_by_patch = defaultdict(set)
patch_dimensions = registry.dimensions.extract(["patch"])
patch_dimensions = registry.dimensions.conform(["patch"])
for input_ref in input_refs:
inputs_by_patch[input_ref.dataId.subset(patch_dimensions)].add(input_ref)
if not inputs_by_patch:
Expand Down Expand Up @@ -862,7 +865,7 @@ class GenerateHipsTask(pipeBase.PipelineTask):
def runQuantum(self, butlerQC, inputRefs, outputRefs):
inputs = butlerQC.get(inputRefs)

dims = inputRefs.hips_exposure_handles[0].dataId.names
dims = inputRefs.hips_exposure_handles[0].dataId.dimensions.names
order = None
for dim in dims:
if "healpix" in dim:
Expand Down
10 changes: 5 additions & 5 deletions python/lsst/pipe/tasks/postprocess.py
Original file line number Diff line number Diff line change
Expand Up @@ -1022,7 +1022,7 @@ def runQuantum(self, butlerQC, inputRefs, outputRefs):
raise ValueError("config.functorFile is None. "
"Must be a valid path to yaml in order to run Task as a PipelineTask.")
result = self.run(handle=inputs['inputCatalog'], funcs=self.funcs,
dataId=outputRefs.outputCatalog.dataId.full)
dataId=dict(outputRefs.outputCatalog.dataId.mapping))
outputs = pipeBase.Struct(outputCatalog=result)
butlerQC.put(outputs, outputRefs)

Expand Down Expand Up @@ -1071,7 +1071,7 @@ def transform(self, band, handles, funcs, dataId):
if dataId and self.config.columnsFromDataId:
for key in self.config.columnsFromDataId:
if key in dataId:
df[str(key)] = dataId[key]
df[key] = dataId[key]
else:
raise ValueError(f"'{key}' in config.columnsFromDataId not found in dataId: {dataId}")

Expand Down Expand Up @@ -1400,7 +1400,7 @@ def __init__(self, **kwargs):

def runQuantum(self, butlerQC, inputRefs, outputRefs):
dataRefs = butlerQC.get(inputRefs.calexp)
visit = dataRefs[0].dataId.byName()['visit']
visit = dataRefs[0].dataId['visit']

self.log.debug("Concatenating metadata from %d per-detector calexps (visit %d)",
len(dataRefs), visit)
Expand Down Expand Up @@ -1770,7 +1770,7 @@ def runQuantum(self, butlerQC, inputRefs, outputRefs):
# Add ccdVisitId to allow joining with CcdVisitTable
idGenerator = self.config.idGenerator.apply(butlerQC.quantum.dataId)
inputs['ccdVisitId'] = idGenerator.catalog_id
inputs['band'] = butlerQC.quantum.dataId.full['band']
inputs['band'] = butlerQC.quantum.dataId['band']
outputs = self.run(**inputs)
butlerQC.put(outputs, outputRefs)

Expand Down Expand Up @@ -1870,7 +1870,7 @@ def runQuantum(self, butlerQC, inputRefs, outputRefs):
raise ValueError("config.functorFile is None. "
"Must be a valid path to yaml in order to run Task as a PipelineTask.")
outputs = self.run(inputs['inputCatalogs'], inputs['referenceCatalog'], funcs=self.funcs,
dataId=outputRefs.outputCatalog.dataId.full)
dataId=dict(outputRefs.outputCatalog.dataId.mapping))

butlerQC.put(outputs, outputRefs)

Expand Down
3 changes: 1 addition & 2 deletions python/lsst/pipe/tasks/skyCorrection.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,6 @@
import lsst.afw.math as afwMath
import lsst.pipe.base.connectionTypes as cT
import numpy as np
from lsst.daf.butler import DimensionGraph
from lsst.pex.config import Config, ConfigField, ConfigurableField, Field, FieldValidationError
from lsst.pipe.base import PipelineTask, PipelineTaskConfig, PipelineTaskConnections, Struct
from lsst.pipe.tasks.background import (
Expand Down Expand Up @@ -59,7 +58,7 @@ def _skyFrameLookup(datasetType, registry, quantumDataId, collections):
results : `list` [`lsst.daf.butler.DatasetRef`]
List of datasets that will be used as sky calibration frames.
"""
newDataId = quantumDataId.subset(DimensionGraph(registry.dimensions, names=["instrument", "visit"]))
newDataId = quantumDataId.subset(registry.dimensions.conform(["instrument", "visit"]))
skyFrames = []
for dataId in registry.queryDataIds(["visit", "detector"], dataId=newDataId).expanded():
skyFrame = registry.findDataset(
Expand Down

0 comments on commit 5c286fe

Please sign in to comment.