Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

DM-6999: Logging framework migration #9

Merged
merged 2 commits into from
Sep 2, 2016
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
80 changes: 40 additions & 40 deletions python/lsst/meas/deblender/baseline.py
Original file line number Diff line number Diff line change
Expand Up @@ -267,13 +267,13 @@ def deblend(footprint, maskedImage, psf, psffwhm,
% strayFluxAssignment) + str(validStrayAssign))

if log is None:
import lsst.pex.logging as pexLogging
import lsst.log as lsstLog

component = 'meas_deblender.baseline'
log = pexLogging.Log(pexLogging.Log.getDefaultLog(), component)
log = lsstLog.Log.getLogger(component)

if verbose: # pexLogging defines DEBUG in terms of "importance" == -verbosity
pexLogging.Trace.setVerbosity(component, -pexLogging.Log.DEBUG)
if verbose:
log.setLevel(lsstLog.Log.TRACE)

img = maskedImage.getImage()
varimg = maskedImage.getVariance()
Expand All @@ -300,7 +300,7 @@ def deblend(footprint, maskedImage, psf, psffwhm,
# FIXME -- just within the bbox?
stats = afwMath.makeStatistics(varimg, mask, afwMath.MEDIAN)
sigma1 = math.sqrt(stats.getValue(afwMath.MEDIAN))
log.logdebug('Estimated sigma1 = %f' % sigma1)
log.trace('Estimated sigma1 = %f', sigma1)
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why the change in level from debug to trace (here and above on line 276: Log.TRACE)


# get object that will hold our results
res = PerFootprint(fp, peaks=peaks)
Expand All @@ -312,28 +312,28 @@ def deblend(footprint, maskedImage, psf, psffwhm,
tinyFootprintSize=tinyFootprintSize)

# Create templates...
log.logdebug(('Creating templates for footprint at x0,y0,W,H = ' +
'(%i, %i, %i, %i)') % (x0, y0, W, H))
log.trace('Creating templates for footprint at x0,y0,W,H = ' +
'(%i, %i, %i, %i)', x0, y0, W, H)
for peaki, pkres in enumerate(res.peaks):
log.logdebug('Deblending peak %i of %i' % (peaki, len(res.peaks)))
log.trace('Deblending peak %i of %i', peaki, len(res.peaks))
if pkres.skip or pkres.deblendedAsPsf:
continue
pk = pkres.peak
cx, cy = pk.getIx(), pk.getIy()
if not imbb.contains(afwGeom.Point2I(cx,cy)):
log.logdebug('Peak center is not inside image; skipping %i' % pkres.pki)
log.trace('Peak center is not inside image; skipping %i', pkres.pki)
pkres.setOutOfBounds()
continue
log.logdebug('computing template for peak %i at (%i, %i)' % (pkres.pki, cx, cy))
log.trace('computing template for peak %i at (%i, %i)', pkres.pki, cx, cy)
S, patched = butils.buildSymmetricTemplate(maskedImage, fp, pk, sigma1, True, patchEdges)
# SWIG doesn't know how to unpack a std::pair into a 2-tuple...
# (in this case, a nested pair)
t1, tfoot = S[0], S[1]
del S

if t1 is None:
log.logdebug(('Peak %i at (%i, %i): failed to build symmetric ' +
'template') % (pkres.pki, cx, cy))
log.trace('Peak %i at (%i, %i): failed to build symmetric ' +
'template', pkres.pki, cx, cy)
pkres.setFailedSymmetricTemplate()
continue

Expand All @@ -344,10 +344,10 @@ def deblend(footprint, maskedImage, psf, psffwhm,
pkres.setOrigTemplate(t1, tfoot)

if rampFluxAtEdge:
log.logdebug('Checking for significant flux at edge: sigma1=%g' % sigma1)
log.trace('Checking for significant flux at edge: sigma1=%g', sigma1)
if (rampFluxAtEdge and
butils.hasSignificantFluxAtEdge(t1, tfoot, 3*sigma1)):
log.logdebug("Template %i has significant flux at edge: ramping" % pkres.pki)
log.trace("Template %i has significant flux at edge: ramping", pkres.pki)
try:
(t2, tfoot2, patched) = _handle_flux_at_edge(log, psffwhm, t1, tfoot, fp,
maskedImage, x0, x1, y0, y1,
Expand All @@ -366,20 +366,20 @@ def deblend(footprint, maskedImage, psf, psffwhm,
if medianSmoothTemplate:
filtsize = medianFilterHalfsize*2 + 1
if t1.getWidth() >= filtsize and t1.getHeight() >= filtsize:
log.logdebug('Median filtering template %i' % pkres.pki)
log.trace('Median filtering template %i', pkres.pki)
# We want the output to go in "t1", so copy it into
# "inimg" for input
inimg = t1.Factory(t1, True)
butils.medianFilter(inimg, t1, medianFilterHalfsize)
# possible save this median-filtered template
pkres.setMedianFilteredTemplate(t1, tfoot)
else:
log.logdebug(('Not median-filtering template %i: size '
+ '%i x %i smaller than required %i x %i') %
(pkres.pki, t1.getWidth(), t1.getHeight(), filtsize, filtsize))
log.trace('Not median-filtering template %i: size ' +
'%i x %i smaller than required %i x %i',
pkres.pki, t1.getWidth(), t1.getHeight(), filtsize, filtsize)

if monotonicTemplate:
log.logdebug('Making template %i monotonic' % pkres.pki)
log.trace('Making template %i monotonic', pkres.pki)
butils.makeMonotonic(t1, pk)

if clipFootprintToNonzero:
Expand Down Expand Up @@ -444,7 +444,7 @@ def deblend(footprint, maskedImage, psf, psffwhm,
# FIXME -- Remove templates that are too similar (via dot-product test)?

# Now apportion flux according to the templates
log.logdebug('Apportioning flux among %i templates' % len(tmimgs))
log.trace('Apportioning flux among %i templates', len(tmimgs))
sumimg = afwImage.ImageF(bb)
#.getDimensions())
#sumimg.setXY0(bb.getMinX(), bb.getMinY())
Expand Down Expand Up @@ -569,7 +569,7 @@ def _fitPsfs(fp, peaks, fpres, log, psf, psffwhm, img, varimg,
peakF = [pk.getF() for pk in peaks]

for pki,(pk,pkres,pkF) in enumerate(zip(peaks, fpres.peaks, peakF)):
log.logdebug('Peak %i' % pki)
log.trace('Peak %i', pki)
_fitPsf(fp, fmask, pk, pkF, pkres, fbb, peaks, peakF, log, cpsf, psffwhm,
img, varimg, psfChisqCut1, psfChisqCut2, psfChisqCut2b,
**kwargs)
Expand Down Expand Up @@ -632,15 +632,15 @@ def _fitPsf(fp, fmask, pk, pkF, pkres, fbb, peaks, peaksF, log, psf, psffwhm,
xlo, xhi = stampbb.getMinX(), stampbb.getMaxX()
ylo, yhi = stampbb.getMinY(), stampbb.getMaxY()
if xlo > xhi or ylo > yhi:
log.logdebug('Skipping this peak: out of bounds')
log.trace('Skipping this peak: out of bounds')
pkres.setOutOfBounds()
return

# drop tiny footprints too?
if min(stampbb.getWidth(), stampbb.getHeight()) <= max(tinyFootprintSize, 2):
# Minimum size limit of 2 comes from the "PSF dx" calculation, which involves shifting the PSF
# by one pixel to the left and right.
log.logdebug('Skipping this peak: tiny footprint / close to edge')
log.trace('Skipping this peak: tiny footprint / close to edge')
pkres.setTinyFootprint()
return

Expand All @@ -655,7 +655,7 @@ def _fitPsf(fp, fmask, pk, pkF, pkres, fbb, peaks, peaksF, log, psf, psffwhm,
if not opsfimg.getBBox(afwImage.LOCAL).overlaps(stampbb):
continue
otherpeaks.append(opsfimg)
log.logdebug('%i other peaks within range' % len(otherpeaks))
log.trace('%i other peaks within range', len(otherpeaks))

# Now we are going to do a least-squares fit for the flux in this
# PSF, plus a decenter term, a linear sky, and fluxes of nearby
Expand Down Expand Up @@ -789,7 +789,7 @@ def _overlap(xlo, xhi, xmin, xmax):
w = np.sqrt(rw[valid]/var_sub[valid])
# save the effective number of pixels
sumr = np.sum(rw[valid])
log.log(-9, 'sumr = %g' % sumr)
log.debug('sumr = %g' % sumr)

del ii

Expand Down Expand Up @@ -833,7 +833,7 @@ def _overlap(xlo, xhi, xmin, xmax):
pkres.setPsfFitFailed()
return

log.log(-9, 'r1 r2 %s %s' % (r1, r2))
log.debug('r1 r2 %s %s' % (r1, r2))

# r is weighted chi-squared = sum over pixels: ramp * (model -
# data)**2/sigma**2
Expand All @@ -847,17 +847,17 @@ def _overlap(xlo, xhi, xmin, xmax):
chisq2 = 1e30
dof1 = sumr - len(X1)
dof2 = sumr - len(X2)
log.log(-9, 'dof1, dof2 %g %g' % (dof1, dof2))
log.debug('dof1, dof2 %g %g' % (dof1, dof2))

# This can happen if we're very close to the edge (?)
if dof1 <= 0 or dof2 <= 0:
log.logdebug('Skipping this peak: bad DOF %g, %g' % (dof1, dof2))
log.trace('Skipping this peak: bad DOF %g, %g', dof1, dof2)
pkres.setBadPsfDof()
return

q1 = chisq1/dof1
q2 = chisq2/dof2
log.logdebug('PSF fits: chisq/dof = %g, %g' % (q1, q2))
log.trace('PSF fits: chisq/dof = %g, %g', q1, q2)
ispsf1 = (q1 < psfChisqCut1)
ispsf2 = (q2 < psfChisqCut2)

Expand All @@ -872,8 +872,8 @@ def _overlap(xlo, xhi, xmin, xmax):
dx = fdx/f0
dy = fdy/f0
ispsf2 = ispsf2 and (abs(dx) < 1. and abs(dy) < 1.)
log.logdebug('isPSF2 -- checking derivatives: dx,dy = %g, %g -> %s' %
(dx, dy, str(ispsf2)))
log.trace('isPSF2 -- checking derivatives: dx,dy = %g, %g -> %s',
dx, dy, str(ispsf2))
if not ispsf2:
pkres.psfFitBigDecenter = True

Expand Down Expand Up @@ -921,8 +921,8 @@ def _overlap(xlo, xhi, xmin, xmax):
ispsf2 = (qb < psfChisqCut2b)
q2 = qb
X2 = Xb
log.logdebug('shifted PSF: new chisq/dof = %g; good? %s' %
(qb, ispsf2))
log.trace('shifted PSF: new chisq/dof = %g; good? %s',
qb, ispsf2)
pkres.psfFit3 = (chisqb, dofb)

# Which one do we keep?
Expand All @@ -931,8 +931,8 @@ def _overlap(xlo, xhi, xmin, xmax):
Xpsf = X2
chisq = chisq2
dof = dof2
log.log(-9, 'dof %g' % dof)
log.logdebug('Keeping shifted-PSF model')
log.debug('dof %g' % dof)
log.trace('Keeping shifted-PSF model')
cx += dx
cy += dy
pkres.psfFitWithDecenter = True
Expand All @@ -941,8 +941,8 @@ def _overlap(xlo, xhi, xmin, xmax):
Xpsf = X1
chisq = chisq1
dof = dof1
log.log(-9, 'dof %g' % dof)
log.logdebug('Keeping unshifted PSF model')
log.debug('dof %g' % dof)
log.trace('Keeping unshifted PSF model')

ispsf = (ispsf1 or ispsf2)

Expand Down Expand Up @@ -988,7 +988,7 @@ def _overlap(xlo, xhi, xmin, xmax):
pkres.psfFitR1 = R1
pkres.psfFitStampExtent = (xlo, xhi, ylo, yhi)
pkres.psfFitCenter = (cx, cy)
log.log(-9, 'saving chisq,dof %g %g' % (chisq, dof))
log.debug('saving chisq,dof %g %g' % (chisq, dof))
pkres.psfFitBest = (chisq, dof)
pkres.psfFitParams = Xpsf
pkres.psfFitFlux = Xpsf[I_psf]
Expand All @@ -999,7 +999,7 @@ def _overlap(xlo, xhi, xmin, xmax):

# replace the template image by the PSF + derivatives
# image.
log.logdebug('Deblending as PSF; setting template to PSF model')
log.trace('Deblending as PSF; setting template to PSF model')

# Instantiate the PSF model and clip it to the footprint
psfimg = psf.computeImage(cx, cy)
Expand Down Expand Up @@ -1031,7 +1031,7 @@ def _handle_flux_at_edge(log, psffwhm, t1, tfoot, fp, maskedImage,
import lsst.meas.deblender as deb
butils = deb.BaselineUtilsF

log.logdebug('Found significant flux at template edge.')
log.trace('Found significant flux at template edge.')
# Compute the max of:
# -symmetric-template-clipped image * PSF
# -footprint-clipped image
Expand Down
14 changes: 7 additions & 7 deletions python/lsst/meas/deblender/deblend.py
Original file line number Diff line number Diff line change
Expand Up @@ -207,7 +207,7 @@ def addSchemaKeys(self, schema):
'deblend_hasStrayFlux', type='Flag',
doc=('This source was assigned some stray flux'))

self.log.logdebug('Added keys to schema: %s' % ", ".join(str(x) for x in (
self.log.trace('Added keys to schema: %s', ", ".join(str(x) for x in (
self.nChildKey, self.psfKey, self.psfCenterKey, self.psfFluxKey,
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why the change in level from debug to trace?

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It was discussed previously here (and noted in the commit message). For this logger named "meas_deblender.baseline", two tracing levels are currently used in master: a less verbose log.log(-9, ...) and a more verbose log.logdebug(...). So the former maps to log.debug() and the latter maps to log.trace() in the conversion.

self.tooManyPeaksKey, self.tooBigKey)))

Expand Down Expand Up @@ -250,7 +250,7 @@ def deblend(self, exposure, srcs, psf):
statsCtrl.setAndMask(mi.getMask().getPlaneBitMask(self.config.maskPlanes))
stats = afwMath.makeStatistics(mi.getVariance(), mi.getMask(), afwMath.MEDIAN, statsCtrl)
sigma1 = math.sqrt(stats.getValue(afwMath.MEDIAN))
self.log.logdebug('sigma1: %g' % sigma1)
self.log.trace('sigma1: %g', sigma1)

n0 = len(srcs)
nparents = 0
Expand All @@ -270,19 +270,19 @@ def deblend(self, exposure, srcs, psf):
if self.isLargeFootprint(fp):
src.set(self.tooBigKey, True)
self.skipParent(src, mi.getMask())
self.log.logdebug('Parent %i: skipping large footprint' % (int(src.getId()),))
self.log.trace('Parent %i: skipping large footprint', int(src.getId()))
continue
if self.isMasked(fp, exposure.getMaskedImage().getMask()):
src.set(self.maskedKey, True)
self.skipParent(src, mi.getMask())
self.log.logdebug('Parent %i: skipping masked footprint' % (int(src.getId()),))
self.log.trace('Parent %i: skipping masked footprint', int(src.getId()))
continue

nparents += 1
bb = fp.getBBox()
psf_fwhm = self._getPsfFwhm(psf, bb)

self.log.logdebug('Parent %i: deblending %i peaks' % (int(src.getId()), len(pks)))
self.log.trace('Parent %i: deblending %i peaks', int(src.getId()), len(pks))

self.preSingleDeblendHook(exposure, srcs, i, fp, psf, psf_fwhm, sigma1)
npre = len(srcs)
Expand Down Expand Up @@ -328,8 +328,8 @@ def deblend(self, exposure, srcs, psf):
# Don't care
continue
# We need to preserve the peak: make sure we have enough info to create a minimal child src
self.log.logdebug("Peak at (%i,%i) failed. Using minimal default info for child." %
(pks[j].getIx(), pks[j].getIy()))
self.log.trace("Peak at (%i,%i) failed. Using minimal default info for child.",
pks[j].getIx(), pks[j].getIy())
if heavy is None:
# copy the full footprint and strip out extra peaks
foot = afwDet.Footprint(src.getFootprint())
Expand Down
9 changes: 5 additions & 4 deletions tests/testFitPsf.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@
import lsst.afw.detection as afwDet
import lsst.afw.geom as afwGeom
import lsst.afw.image as afwImage
import lsst.pex.logging as pexLogging
from lsst.log import Log
import lsst.meas.algorithms as measAlg
from lsst.meas.deblender.baseline import _fitPsf, CachingPsf, PerPeak

Expand Down Expand Up @@ -129,10 +129,11 @@ def makePeak(x, y):

pkres = PerPeak()

loglvl = pexLogging.Log.INFO
loglvl = Log.INFO
# if verbose:
# loglvl = pexLogging.Log.DEBUG
log = pexLogging.Log(pexLogging.Log.getDefaultLog(), 'tests.fit_psf', loglvl)
# loglvl = Log.DEBUG
log = Log.getLogger('tests.fit_psf')
log.setLevel(loglvl)

cpsf = CachingPsf(psf1)

Expand Down
1 change: 1 addition & 0 deletions ups/meas_deblender.table
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
setupRequired(afw)
setupRequired(log)
setupRequired(meas_algorithms)
setupRequired(numpy)
setupRequired(scons)
Expand Down