Skip to content

Commit

Permalink
filter updates + minor updates
Browse files Browse the repository at this point in the history
  • Loading branch information
mommermi committed Aug 7, 2018
1 parent 17156e8 commit b70bf75
Show file tree
Hide file tree
Showing 3 changed files with 58 additions and 76 deletions.
4 changes: 2 additions & 2 deletions pp_prepare.py
Original file line number Diff line number Diff line change
Expand Up @@ -307,8 +307,8 @@ def prepare(filenames, obsparam, header_update, keep_wcs=False,
except KeyError:
logging.warning('cannot translate filter keyword \"' +
header[obsparam['filter']] +
'\"; assume clear filter')
header[obsparam['filter']] = 'clear'
'\"')
#header[obsparam['filter']] = 'clear'
header['FILTER'] = (header[obsparam['filter']], 'PP:copied')

# perform header update
Expand Down
123 changes: 52 additions & 71 deletions pp_run.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,20 +34,21 @@
import shutil
import logging
import subprocess
import argparse, shlex
import argparse
import shlex
import time
try:
from astropy.io import fits
except ImportError:
print('Module astropy not found. Please install with: pip install astropy')
sys.exit()

# only import if Python3 is used
if sys.version_info > (3,0):
if sys.version_info > (3, 0):
from builtins import str
from builtins import range

### pipeline-specific modules
# pipeline-specific modules
import _pp_conf
from catalog import *
import pp_prepare
Expand All @@ -59,10 +60,10 @@
import diagnostics as diag

# setup logging
logging.basicConfig(filename = _pp_conf.log_filename,
level = _pp_conf.log_level,
format = _pp_conf.log_formatline,
datefmt = _pp_conf.log_datefmt)
logging.basicConfig(filename=_pp_conf.log_filename,
level=_pp_conf.log_level,
format=_pp_conf.log_formatline,
datefmt=_pp_conf.log_datefmt)


def run_the_pipeline(filenames, man_targetname, man_filtername,
Expand All @@ -76,19 +77,19 @@ def run_the_pipeline(filenames, man_targetname, man_filtername,

# reset diagnostics for this data set
_pp_conf.dataroot, _pp_conf.diagroot, \
_pp_conf.index_filename, _pp_conf.reg_filename, _pp_conf.cal_filename, \
_pp_conf.res_filename = _pp_conf.setup_diagnostics()
_pp_conf.index_filename, _pp_conf.reg_filename, _pp_conf.cal_filename, \
_pp_conf.res_filename = _pp_conf.setup_diagnostics()

# setup logging again (might be a different directory)
logging.basicConfig(filename = _pp_conf.log_filename,
level = _pp_conf.log_level,
format = _pp_conf.log_formatline,
datefmt = _pp_conf.log_datefmt)
logging.basicConfig(filename=_pp_conf.log_filename,
level=_pp_conf.log_level,
format=_pp_conf.log_formatline,
datefmt=_pp_conf.log_datefmt)

### read telescope information from fits headers
# read telescope information from fits headers
# check that they are the same for all images
logging.info('##### new pipeline process in %s #####' % _pp_conf.dataroot)
logging.info(('check for same telescope/instrument for %d ' + \
logging.info(('check for same telescope/instrument for %d ' +
'frames') % len(filenames))
instruments = []
for idx, filename in enumerate(filenames):
Expand All @@ -110,14 +111,13 @@ def run_the_pipeline(filenames, man_targetname, man_filtername,
raise IOError('cannot find any data...')

if len(instruments) == 0:
raise KeyError('cannot identify telescope/instrument; please update' + \
raise KeyError('cannot identify telescope/instrument; please update' +
'_pp_conf.instrument_keys accordingly')


# check if there is only one unique instrument
if len(set(instruments)) > 1:
print('ERROR: multiple instruments used in dataset: %s' % \
str(set(instruemnts)))
print('ERROR: multiple instruments used in dataset: %s' %
str(set(instruemnts)))
logging.error('multiple instruments used in dataset: %s' %
str(set(instruments)))
for i in range(len(filenames)):
Expand All @@ -128,10 +128,9 @@ def run_the_pipeline(filenames, man_targetname, man_filtername,
obsparam = _pp_conf.telescope_parameters[telescope]
logging.info('%d %s frames identified' % (len(filenames), telescope))


### read filter information from fits headers
# read filter information from fits headers
# check that they are the same for all images
logging.info(('check for same filter for %d ' + \
logging.info(('check for same filter for %d ' +
'frames') % len(filenames))
filters = []
for idx, filename in enumerate(filenames):
Expand All @@ -147,7 +146,7 @@ def run_the_pipeline(filenames, man_targetname, man_filtername,
filters.append(header[obsparam['filter']])

if len(filters) == 0:
raise KeyError('cannot identify filter; please update' + \
raise KeyError('cannot identify filter; please update' +
'setup/telescopes.py accordingly')

if len(set(filters)) > 1:
Expand All @@ -162,31 +161,30 @@ def run_the_pipeline(filenames, man_targetname, man_filtername,
try:
filtername = obsparam['filter_translations'][filters[0]]
except KeyError:
print(('Cannot translate filter name (%s); please adjust ' + \
'keyword "filter_translations" for %s in ' + \
'setup/telescopes.py') % (filters[0], telescope))
logging.error(('Cannot translate filter name (%s); please adjust '+\
'keyword "filter_translations" for %s in ' + \
print(('Cannot translate filter name (%s); please adjust ' +
'keyword "filter_translations" for %s in ' +
'setup/telescopes.py') % (filters[0], telescope))
logging.error(('Cannot translate filter name (%s); please adjust ' +
'keyword "filter_translations" for %s in ' +
'setup/telescopes.py') % (filters[0], telescope))
return None
else:
filtername = man_filtername
logging.info('%d %s frames identified' % (len(filenames), filtername))

print('run photometry pipeline on %d %s %s frames' % \
print('run photometry pipeline on %d %s %s frames' %
(len(filenames), telescope, filtername))

change_header = {}
if man_targetname is not None:
change_header['OBJECT'] = man_targetname

### prepare fits files for photometry pipeline
# prepare fits files for photometry pipeline
preparation = pp_prepare.prepare(filenames, obsparam,
change_header,
diagnostics=True, display=True)


### run wcs registration
# run wcs registration

# default sextractor/scamp parameters
snr, source_minarea = obsparam['source_snr'], obsparam['source_minarea']
Expand All @@ -200,24 +198,21 @@ def run_the_pipeline(filenames, man_targetname, man_filtername,
display=True,
diagnostics=True)


if len(registration['badfits']) == len(filenames):
summary_message = "<FONT COLOR=\"red\">registration failed</FONT>"
elif len(registration['goodfits']) == len(filenames):
summary_message = "<FONT COLOR=\"green\">all images registered" + \
"</FONT>; "
"</FONT>; "
else:
summary_message = "<FONT COLOR=\"orange\">registration failed for " + \
("%d/%d images</FONT>; " %
(len(registration['badfits']),
len(filenames)))
("%d/%d images</FONT>; " %
(len(registration['badfits']),
len(filenames)))

# add information to summary website, if requested
if _pp_conf.use_diagnostics_summary:
diag.insert_into_summary(summary_message)



# in case not all image were registered successfully
filenames = registration['goodfits']

Expand All @@ -235,14 +230,14 @@ def run_the_pipeline(filenames, man_targetname, man_filtername,
diag.abort('pp_registration')
return None

### run photometry (curve-of-growth analysis)
# run photometry (curve-of-growth analysis)
snr, source_minarea = 1.5, obsparam['source_minarea']
background_only = False
target_only = False
if fixed_aprad == 0:
aprad = None # force curve-of-growth analysis
aprad = None # force curve-of-growth analysis
else:
aprad = fixed_aprad # skip curve_of_growth analysis
aprad = fixed_aprad # skip curve_of_growth analysis

print('\n----- derive optimium photometry aperture\n')
phot = pp_photometry.photometry(filenames, snr, source_minarea, aprad,
Expand All @@ -253,7 +248,7 @@ def run_the_pipeline(filenames, man_targetname, man_filtername,

# data went through curve-of-growth analysis
if phot is not None:
summary_message = ("<FONT COLOR=\"green\">aprad = %5.1f px, " + \
summary_message = ("<FONT COLOR=\"green\">aprad = %5.1f px, " +
"</FONT>") % phot['optimum_aprad']
if phot['n_target'] > 0:
summary_message += "<FONT COLOR=\"green\">based on target and " + \
Expand All @@ -266,14 +261,11 @@ def run_the_pipeline(filenames, man_targetname, man_filtername,
if _pp_conf.photmode == 'APER':
summary_message += "using a fixed aperture radius of %.1f px;" % aprad


# add information to summary website, if requested
if _pp_conf.use_diagnostics_summary:
diag.insert_into_summary(summary_message)



### run photometric calibration
# run photometric calibration
minstars = _pp_conf.minstars
manualcatalog = None

Expand All @@ -299,22 +291,21 @@ def run_the_pipeline(filenames, man_targetname, man_filtername,
else:
refcatname = 'instrumental magnitudes'
summary_message = "<FONT COLOR=\"green\">average zeropoint = " + \
("%5.2f+-%5.2f using %s</FONT>; " %
(numpy.average(zps),
numpy.average(zp_errs),
refcatname))
("%5.2f+-%5.2f using %s</FONT>; " %
(numpy.average(zps),
numpy.average(zp_errs),
refcatname))
except TypeError:
summary_message = "<FONT COLOR=\"red\">no phot. calibration</FONT>; "

# add information to summary website, if requested
if _pp_conf.use_diagnostics_summary:
diag.insert_into_summary(summary_message)


### distill photometry results
# distill photometry results
print('\n----- distill photometry results\n')
distillate = pp_distill.distill(calibration['catalogs'],
man_targetname, [0,0],
man_targetname, [0, 0],
None, None,
display=True, diagnostics=True)

Expand All @@ -328,15 +319,14 @@ def run_the_pipeline(filenames, man_targetname, man_filtername,
except IndexError:
summary_message = "no primary target extracted"


# add information to summary website, if requested
if _pp_conf.use_diagnostics_summary:
diag.insert_into_summary(summary_message)

print('\nDone!\n')
logging.info('----- successfully done with this process ----')

gc.collect() # collect garbage; just in case, you never know...
gc.collect() # collect garbage; just in case, you never know...


if __name__ == '__main__':
Expand All @@ -357,7 +347,7 @@ def run_the_pipeline(filenames, man_targetname, man_filtername,
default='high')
parser.add_argument('-solar',
help='restrict to solar-color stars',
action="store_true", default=False)
action="store_true", default=False)
parser.add_argument('images', help='images to process or \'all\'',
nargs='+')

Expand All @@ -368,15 +358,13 @@ def run_the_pipeline(filenames, man_targetname, man_filtername,
fixed_aprad = float(args.fixed_aprad)
source_tolerance = args.source_tolerance
solar = args.solar
filenames = args.images

filenames = sorted(args.images)

##### if filenames = ['all'], walk through directories and run pipeline
# if filenames = ['all'], walk through directories and run pipeline
# each dataset
_masterroot_directory = os.getcwd()


if len(filenames) == 1 and filenames[0]=='all':
if len(filenames) == 1 and filenames[0] == 'all':

# dump data set information into summary file
_pp_conf.use_diagnostics_summary = True
Expand All @@ -393,7 +381,7 @@ def run_the_pipeline(filenames, man_targetname, man_filtername,
# ignore .diagnostics directories
if '.diagnostics' in root:
continue

# identify data frames
filenames = sorted([s for s in files if re.match(regex, s)])

Expand All @@ -408,15 +396,8 @@ def run_the_pipeline(filenames, man_targetname, man_filtername,
else:
print('\n NOTHING TO DO IN %s' % root)


else:
# call run_the_pipeline only on filenames
run_the_pipeline(filenames, man_targetname, man_filtername,
fixed_aprad, source_tolerance, solar)
pass






7 changes: 4 additions & 3 deletions setup/telescopes.py
Original file line number Diff line number Diff line change
Expand Up @@ -146,7 +146,7 @@
# registration settings (Scamp)
'scamp-config-file': rootpath+'/setup/dctlmi.scamp',
'reg_max_mag': 19,
'reg_search_radius': 0.2, # deg
'reg_search_radius': 0.5, # deg
'source_tolerance': 'high',

# swarp settings
Expand Down Expand Up @@ -1287,7 +1287,8 @@
# pp_prepare
'object': 'OBJECT', # object name keyword
'filter': 'FILTER', # filter keyword
'filter_translations': {'Rc': 'R', 'V': 'V', 'B': 'B', 'u': 'u',
'filter_translations': {'Rc': 'R', 'R-Bessel': 'R',
'V': 'V', 'B': 'B', 'u': 'u',
'g-SDSS': 'g', 'r-SDSS': 'r', 'i-SDSS': 'i',
'z-SDSS': 'z', 'VR': None},
# filtername translation dictionary
Expand Down Expand Up @@ -2749,7 +2750,7 @@
# pp_prepare
'object': 'OBJECT', # object name keyword
'filter': 'FILTER2', # filter keyword
'filter_translations': {'i_G0302': 'i'},
'filter_translations': {'i_G0302': 'i', 'clear': None},
# filtername translation dictionary
'exptime': 'EXPTIME', # exposure time keyword (s)
'airmass': 'AIRMASS', # airmass keyword
Expand Down

0 comments on commit b70bf75

Please sign in to comment.