Skip to content

Commit

Permalink
change to logging (#169) [skip ci]
Browse files Browse the repository at this point in the history
* change to logging

* checking coverage this time!

* test logger correctly

* remove extra space
  • Loading branch information
arjunsavel committed Feb 22, 2024
1 parent 78bae11 commit 313a025
Show file tree
Hide file tree
Showing 10 changed files with 158 additions and 114 deletions.
52 changes: 32 additions & 20 deletions src/simmer/analyze_image.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,9 @@

import simmer.contrast as sim_con_curve

import logging
logger = logging.getLogger('simmer')

#Test file
startname = '/Users/courtney/Documents/data/shaneAO/data-'
datestr = '2019-07-19'
Expand All @@ -39,6 +42,8 @@
outdir = startname + datestr + midname + datestr + '/'+starname+'/'+filt+'/'

def analyze(filename=filename, maxiter = 10, postol=1, fwhmtol = 0.5, inst = 'ShARCS', outdir='', verbose=True):
if verbose:
logger.setLevel(logging.DEBUG)

#set defaults
if inst == 'PHARO':
Expand All @@ -58,38 +63,41 @@ def analyze(filename=filename, maxiter = 10, postol=1, fwhmtol = 0.5, inst = 'Sh

#Determine FWHM using that center
fwhm = find_FWHM(im, [xcen,ycen])
if verbose == True:
print('Estimated FWHM: ', fwhm)
logger.debug('Estimated FWHM: ', fwhm)


#Iterate until coordinates and FWHM agree to within tolerance
#or until maximum number of iterations is reached
posdiff = 5
fwhmdiff = 2
niter = 1
while np.logical_and(niter < maxiter, np.logical_or(posdiff > postol, fwhmdiff > fwhmtol)):
if verbose == True:
print('Beginning iteration ', niter)

logger.debug('Beginning iteration ', niter)


#Find sources again
updated_sources = find_sources(im, fwhm=fwhm)
if verbose == True:
print('Updated sources')
logger.debug('Updated sources')

if verbose:
print(updated_sources)

#Find brightest peak again using updated list of stars
updated_xcen, updated_ycen = find_center(updated_sources, verbose=verbose)

#Determine FWHM using that center
updated_fwhm = find_FWHM(im, [updated_xcen,updated_ycen])
if verbose == True:
print('Estimated FWHM: ', updated_fwhm)
logger.debug('Estimated FWHM: ', updated_fwhm)


#Compute differences
posdiff = np.sqrt((updated_xcen - xcen)**2. + (updated_ycen - ycen)**2.)
fwhmdiff = np.sqrt((updated_fwhm - fwhm)**2.)
if verbose == True:
print('Current posdiff: ', posdiff)
print('Current fwhmdiff: ', fwhmdiff)

logger.debug('posdiff: ', posdiff)
logger.debug('fwhmdiff: ', fwhmdiff)


#Update reference values
xcen = updated_xcen
Expand Down Expand Up @@ -117,21 +125,23 @@ def find_sources(im, sigma=5, fwhm=5, tscale=10, verbose=False, plot=True, **kwa
"""
Determines sources in an image. Based on astropy tutorial here: https://photutils.readthedocs.io/en/stable/detection.html
"""
if verbose:
logger.setLevel(logging.DEBUG)
mean, median, std = sigma_clipped_stats(im, sigma=sigma)
if verbose == True:
print((mean, median, std))
logger.debug('mean, median, std: ', mean, median, std)


sources = None
while np.logical_and(type(sources) == type(None), fwhm < 200):
if verbose == True:
print('trying FWHM: ', fwhm)
logger.debug('trying FWHM: ', fwhm)

#Detect stars >threshold above the background. Needed to adjust FWHM and threshold
daofind = DAOStarFinder(fwhm=fwhm, threshold=tscale*std, exclude_border=True, **kwargs)
sources = daofind(im - median)
# for col in sources.colnames:
# sources[col].info.format = '%.8g' # for consistent table output
if verbose == True:
print(sources)
logger.debug('sources: ', sources)

fwhm += 1

if plot:
Expand All @@ -145,7 +155,6 @@ def find_sources(im, sigma=5, fwhm=5, tscale=10, verbose=False, plot=True, **kwa

#Convert sources to a dataframe
df = sources.to_pandas()
# print(len(sources))
return df

def find_FWHM(image, center, min_fwhm = 2, verbose=False):
Expand Down Expand Up @@ -208,9 +217,12 @@ def aperture_photometry(im, df, fwhm):

def find_center(df, verbose=False):
'''Returns coordinates of star with highest peak'''
if verbose:
logger.setLevel(logging.DEBUG)

ww = np.where(df['peak'] == np.max(df['peak']))
xcen = int(np.round(float(df.iloc[ww]['xcentroid'])))
ycen = int(np.round(float(df.iloc[ww]['ycentroid'])))
if verbose == True:
print('Closest pixels to center: ', xcen, ycen)
logger.debug('Closest pixels to center: ', xcen, ycen)

return xcen, ycen
32 changes: 18 additions & 14 deletions src/simmer/check_logsheet.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,8 @@
import pandas as pd

from . import add_dark_exp as ad
import logging
logger = logging.getLogger('simmer')


def check_logsheet(inst, log_name, tab=None, add_dark_times=False):
Expand Down Expand Up @@ -57,58 +59,60 @@ def check_tab(inst, add_dark_times, tab=None):
if not np.isin(col, frame_cols):
missing.append(col)
if len(missing) != 0:
print(f"Missing columns for {missing}.")
logger.error(f"Missing columns for {missing}.")
failed += 1

objects = log_frame["Object"].dropna().values
exptimes = log_frame["ExpTime"].dropna().values
if len(exptimes) != len(objects):
print("Missing an exposure time.")
logger.error("Missing an exposure time.")
failed += 1

filters = log_frame["Filter"].dropna().values
if len(filters) != len(log_frame[log_frame["Object"] != "dark"]):
print("Missing a filter.")
logger.error("Missing a filter.")
failed += 1

starts = log_frame["Start"].dropna().values
if len(starts) != len(objects):
print("Missing a start exposure.")
logger.error("Missing a start exposure.")
failed += 1

ends = log_frame["End"].dropna().values
if len(ends) != len(objects):
print("Missing an end exposure.")
logger.error("Missing an end exposure.")
failed += 1

coadds = log_frame["Coadds"].dropna().values
if len(coadds) != len(objects):
print("Missing a coadd.")
logger.error("Missing a coadd.")

failed += 1

if not np.all(exptimes > 0):
print("There are negative or 0 exposure times.")
logger.error("There are negative or 0 exposure times.")

failed += 1
try:
inter = ends - starts
if not np.all(inter >= 0):
print("Check the start and end exposures.")
logger.error("There are negative exposure times.")

failed += 1
except ValueError:
print("Check the start and end exposures.")
logger.error("Check the start and end exposures.")

failed += 1

exposes = log_frame["Expose"].dropna().values
try:
if not np.all(exposes == inter + 1):
print(
"Incorrect number of exposures for start and end exposure."
)
logger.error('Incorrect number of exposures for start and end exposure.')
failed += 1
except UnboundLocalError:
print("Incorrect number of exposures for start and end exposure.")
logger.error('Incorrect number of exposures for start and end exposure.')
failed += 1
print(f"{9-failed}/9 logsheet checks passed.")
logger.info(f"{9-failed}/9 logsheet checks passed.")
return failed

failed = 0
Expand Down
8 changes: 6 additions & 2 deletions src/simmer/drivers.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,10 @@
from . import sky
from . import summarize as summarize

import logging

logger = logging.getLogger('simmer')

def all_driver(

inst, config_file, raw_dir, reddir, sep_skies = False, plotting_yml=None, searchsize=10, just_images=False, verbose=True
Expand Down Expand Up @@ -77,8 +81,8 @@ def all_driver(
leave=True,
)
):
print('searchsize: ', searchsize)
print('s_dir: ', s_dir)
logger.info(f"Running registration for {s_dir}")
logger.info(f"searchsize: {searchsize}")
image.create_im(s_dir, searchsize, method=methods[i], verbose=verbose)

#make summary plot showing reduced images of all stars observed
Expand Down
17 changes: 9 additions & 8 deletions src/simmer/image.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,9 @@
from . import utils as u
from . import contrast as contrast

import logging
logger = logging.getLogger('simmer')


class FlatOpeningError(ValueError):
pass
Expand Down Expand Up @@ -254,12 +257,10 @@ def create_im(s_dir, ssize1, plotting_yml=None, fdirs=None, method="quick_look",
#Only register star images, not sky images
dirparts = sf_dir.split('/')
if 'sky' in dirparts[len(dirparts)-3]:
if verbose == True:
print('this is a sky directory: ', sf_dir)
logger.debug('this is a sky directory: ', sf_dir)
continue

if verbose == True:
print('working on sf_dir ', sf_dir)
logger.debug('working on sf_dir ', sf_dir)

files = glob(
sf_dir + f"sh*.fits"
Expand Down Expand Up @@ -300,7 +301,7 @@ def create_im(s_dir, ssize1, plotting_yml=None, fdirs=None, method="quick_look",
image[image < 0.0] = 0.0
image_centered = reg.register_bruteforce(image)
if len(image_centered) == 0:
print("Resorting to saturated mode.")
logger.info("Resorting to saturated mode.")
image_centered, rot, newshifts1 = reg.register_saturated(
image, ssize1, newshifts1
)
Expand Down Expand Up @@ -329,9 +330,9 @@ def create_im(s_dir, ssize1, plotting_yml=None, fdirs=None, method="quick_look",
aend = astart+cutsize
bend = bstart+cutsize
if np.logical_or(aend > final_im.shape[0],bend > final_im.shape[1]):
print('ERROR: Requested cutout is too large. Using full image instead.')
print('Current image dimensions: ', final_im.shape)
print('Desired cuts: ', astart, aend, bstart, bend)
logger.error('ERROR: Requested cutout is too large. Using full image instead.')
logger.info('Current image dimensions: ', final_im.shape)
logger.info('Desired cuts: ', astart, aend, bstart, bend)
else:
final_im = final_im[astart:astart+cutsize,bstart:bstart+cutsize] #extract central cutsize x cutsize pixel region from larger image

Expand Down

0 comments on commit 313a025

Please sign in to comment.