Skip to content

Commit

Permalink
Cleanup other errors introduced by the verify merge
Browse files Browse the repository at this point in the history
  • Loading branch information
parejkoj committed Jul 26, 2018
1 parent 5c0f4d2 commit e51e047
Showing 1 changed file with 4 additions and 39 deletions.
43 changes: 4 additions & 39 deletions python/lsst/validate/drp/matchreduce.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,12 +25,10 @@

import numpy as np
import astropy.units as u
import sqlite3
from sqlalchemy.exc import OperationalError

import lsst.afw.geom as afwGeom
import lsst.afw.image.utils as afwImageUtils
import lsst.afw.image as afwImage
import lsst.daf.persistence as dafPersist
from lsst.afw.table import (SourceCatalog, SchemaMapper, Field,
MultiMatch, SimpleRecord, GroupView,
Expand All @@ -46,7 +44,7 @@


def build_matched_dataset(repo, dataIds, matchRadius=None, safeSnr=50.,
useJointCal=False):
useJointCal=False, skipTEx=False):
"""Construct a container for matched star catalogs from multple visits, with filtering,
summary statistics, and modelling.
Expand Down Expand Up @@ -111,10 +109,6 @@ def build_matched_dataset(repo, dataIds, matchRadius=None, safeSnr=50.,
*Not serialized.*
"""


def build_matched_dataset(repo, dataIds, matchRadius=None, safeSnr=50.,
useJointCal=False, skipTEx=False):
blob = Blob('MatchedMultiVisitDataset')

if not matchRadius:
Expand All @@ -128,7 +122,6 @@ def build_matched_dataset(repo, dataIds, matchRadius=None, safeSnr=50.,
blob['useJointCal'] = Datum(quantity=useJointCal,
description='Whether jointcal/meas_mosaic calibrations were used')


# Match catalogs across visits
blob._catalog, blob._matchedCatalog = \
_loadAndMatchCatalogs(repo, dataIds, matchRadius,
Expand All @@ -140,6 +133,7 @@ def build_matched_dataset(repo, dataIds, matchRadius=None, safeSnr=50.,
_reduceStars(blob, blob._matchedCatalog, safeSnr)
return blob


def _loadAndMatchCatalogs(repo, dataIds, matchRadius,
useJointCal=False, skipTEx=False):
"""Load data from specific visit. Match with reference.
Expand Down Expand Up @@ -251,45 +245,15 @@ def _loadAndMatchCatalogs(repo, dataIds, matchRadius,
print("Skipping this dataId.")
continue

# We don't want to put this above the first "if useJointCal block"
# because we need to use the first `butler.get` above to quickly
# catch data IDs with no usable outputs.
try:
calexpMetadata = butler.get("calexp_md", vId)
except (FitsError, dafPersist.NoResults) as e:
print(e)
print("Could not open calibrated image file for ", vId)
print("Skipping %s " % repr(vId))
continue
except TypeError as te:
# DECam images that haven't been properly reformatted
# can trigger a TypeError because of a residual FITS header
# LTV2 which is a float instead of the expected integer.
# This generates an error of the form:
#
# lsst::pex::exceptions::TypeError: 'LTV2 has mismatched type'
#
# See, e.g., DM-2957 for details.
print(te)
print("Calibration image header information malformed.")
print("Skipping %s " % repr(vId))
continue

calib = afwImage.Calib(calexpMetadata)

# We don't want to put this above the first "if useJointCal block"
# because we need to use the first `butler.get` above to quickly
# catch data IDs with no usable outputs.
try:
# HSC supports these flags, which dramatically improve I/O
# performance; support for other cameras is DM-6927.
oldSrc = butler.get('src', vId, flags=SOURCE_IO_NO_FOOTPRINTS)
calexp = butler.get("calexp", vId, flags=SOURCE_IO_NO_FOOTPRINTS)
except:
except OperationalError:
oldSrc = butler.get('src', vId)
calexp = butler.get("calexp", vId)

psf = calexp.getPsf()

print(len(oldSrc), "sources in ccd %s visit %s" %
(vId[ccdKeyName], vId["visit"]))
Expand Down Expand Up @@ -331,6 +295,7 @@ def _loadAndMatchCatalogs(repo, dataIds, matchRadius,

return srcVis, allMatches


def _reduceStars(blob, allMatches, safeSnr=50.0):
"""Calculate summary statistics for each star. These are persisted
as object attributes.
Expand Down

0 comments on commit e51e047

Please sign in to comment.