From d0e1dbb1bfc1e48bdd28b3dc1bc7c3213a2a4c67 Mon Sep 17 00:00:00 2001 From: Tim Jenness Date: Wed, 26 Nov 2025 12:51:56 -0700 Subject: [PATCH 1/2] Use Astropy time for calibration time headers This avoids a deprecated API and simplifies the use of the DATE header in FITS format. --- python/lsst/meas/algorithms/simple_curve.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/python/lsst/meas/algorithms/simple_curve.py b/python/lsst/meas/algorithms/simple_curve.py index ab82e1d54..04bec0202 100755 --- a/python/lsst/meas/algorithms/simple_curve.py +++ b/python/lsst/meas/algorithms/simple_curve.py @@ -25,9 +25,9 @@ from scipy.interpolate import interp1d from astropy.table import QTable +from astropy.time import Time import astropy.units as u from abc import ABC, abstractmethod -import datetime import os import numpy @@ -242,12 +242,12 @@ def _check_cols(cols, table): def _to_table_with_meta(self): """Compute standard metadata before writing file out""" - now = datetime.datetime.utcnow() + now = Time.now() table = self.toTable() metadata = table.meta - metadata["DATE"] = now.isoformat() + metadata["DATE"] = now.fits metadata["CALIB_CREATION_DATE"] = now.strftime("%Y-%m-%d") - metadata["CALIB_CREATION_TIME"] = now.strftime("%T %Z").strip() + metadata["CALIB_CREATION_TIME"] = now.strftime("%T") return table def writeText(self, filename): From a5775204112463428d44b1da66dbaed338e1ccb6 Mon Sep 17 00:00:00 2001 From: Tim Jenness Date: Wed, 26 Nov 2025 13:08:24 -0700 Subject: [PATCH 2/2] Release butler resources when no longer neeed --- tests/convertReferenceCatalogTestBase.py | 3 ++- tests/nopytest_convertReferenceCatalog.py | 28 +++++++++++------------ tests/test_referenceObjectLoader.py | 4 +++- 3 files changed, 19 insertions(+), 16 deletions(-) diff --git a/tests/convertReferenceCatalogTestBase.py b/tests/convertReferenceCatalogTestBase.py index dc06e44b7..52cbe9700 100644 --- a/tests/convertReferenceCatalogTestBase.py +++ b/tests/convertReferenceCatalogTestBase.py @@ -258,6 +258,7 @@ def setUpClass(cls): def setUp(self): self.repoPath = tempfile.TemporaryDirectory() # cleaned up automatically when test ends self.butler = self.makeTemporaryRepo(self.repoPath.name, self.depth) + self.enterContext(self.butler) self.logger = logging.getLogger('lsst.ReferenceObjectLoader') def tearDown(self): @@ -283,7 +284,7 @@ def makeTemporaryRepo(rootPath, depth): dimensionConfig = lsst.daf.butler.DimensionConfig() dimensionConfig['skypix']['common'] = f'htm{depth}' lsst.daf.butler.Butler.makeRepo(rootPath, dimensionConfig=dimensionConfig) - return lsst.daf.butler.Butler(rootPath, writeable=True) + return lsst.daf.butler.Butler.from_config(rootPath, writeable=True) def checkAllRowsInRefcat(self, refObjLoader, skyCatalog, config): """Check that every item in ``skyCatalog`` is in the converted catalog, diff --git a/tests/nopytest_convertReferenceCatalog.py b/tests/nopytest_convertReferenceCatalog.py index c8eb67f95..5e0c6fb44 100644 --- a/tests/nopytest_convertReferenceCatalog.py +++ b/tests/nopytest_convertReferenceCatalog.py @@ -101,20 +101,20 @@ def testIngestTwoFilesTwoCores(self): transfer="auto") # Test if we can get back the catalogs, with a new butler. - butler = lsst.daf.butler.Butler(repoPath) - datasetRefs = list(butler.registry.queryDatasets(config.dataset_config.ref_dataset_name, - collections=[run]).expanded()) - handlers = [] - for dataRef in datasetRefs: - handlers.append(DeferredDatasetHandle(butler=butler, ref=dataRef, parameters=None)) - loaderConfig = ReferenceObjectLoader.ConfigClass() - loader = ReferenceObjectLoader([dataRef.dataId for dataRef in datasetRefs], - handlers, - name="testRefCat", - config=loaderConfig, - log=self.logger) - self.checkAllRowsInRefcat(loader, skyCatalog1, config) - self.checkAllRowsInRefcat(loader, skyCatalog2, config) + with lsst.daf.butler.Butler.from_config(repoPath) as butler: + datasetRefs = list(butler.registry.queryDatasets(config.dataset_config.ref_dataset_name, + collections=[run]).expanded()) + handlers = [] + for dataRef in datasetRefs: + handlers.append(DeferredDatasetHandle(butler=butler, ref=dataRef, parameters=None)) + loaderConfig = ReferenceObjectLoader.ConfigClass() + loader = ReferenceObjectLoader([dataRef.dataId for dataRef in datasetRefs], + handlers, + name="testRefCat", + config=loaderConfig, + log=self.logger) + self.checkAllRowsInRefcat(loader, skyCatalog1, config) + self.checkAllRowsInRefcat(loader, skyCatalog2, config) class TestConvertRefcatManager(convertReferenceCatalogTestBase.ConvertReferenceCatalogTestBase, diff --git a/tests/test_referenceObjectLoader.py b/tests/test_referenceObjectLoader.py index a56852bff..d8c47ffe3 100644 --- a/tests/test_referenceObjectLoader.py +++ b/tests/test_referenceObjectLoader.py @@ -188,6 +188,7 @@ def setUpClass(cls): # Make a temporary butler to ingest them into. butler = cls.makeTemporaryRepo(repoPath, config.dataset_config.indexer.active.depth) + cls.enterClassContext(butler) dimensions = [f"htm{depth}"] datasetType = DatasetType(config.dataset_config.ref_dataset_name, dimensions, @@ -206,7 +207,8 @@ def setUpClass(cls): transfer="auto") # Test if we can get back the catalogs, with a new butler. - butler = lsst.daf.butler.Butler(repoPath) + butler = lsst.daf.butler.Butler.from_config(repoPath) + cls.enterClassContext(butler) datasetRefs = list(butler.registry.queryDatasets(config.dataset_config.ref_dataset_name, collections=[run]).expanded()) handles = []