Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with
or
.
Download ZIP
Browse files

Mering more of Scott's and committing beta version of GBNCC_search.py.

  • Loading branch information...
commit 19cc148ddf5c9dd8c420a95f41381f5d07653df5 1 parent 22e785c
Kevin Stovall authored
View
175 bin/GBNCC_search.py
@@ -2,17 +2,16 @@
import glob, os, os.path, shutil, socket, struct, tarfile, stat
import numpy, sys, presto, time, sigproc, sifting
import psr_utils as pu
+import pyfits
-institution = "NRAOCV"
+institution = "UTB"
base_tmp_dir = "/dev/shm/"
-base_output_dir = "/home/sransom/results/GBT/GBNCC"
+base_output_dir = "/home/kstovall/data/GBNCC/results/"
#-------------------------------------------------------------------
# Tunable parameters for searching and folding
# (you probably don't need to tune any of them)
-orig_N = 1440000 # Number of samples to analyze at a time (~118 sec)
-raw_N = 1900000 # Number of samples to step through .fits files
-overlap_factor = 0.5 # Overlap each orig_N samples by this fraction
+raw_N = 1440000 # Number of samples to analyze (~118 secs)
rfifind_chunk_time = 25600 * 0.00008192 # ~2.1 sec
singlepulse_threshold = 5.0 # threshold SNR for candidate determination
singlepulse_plot_SNR = 5.5 # threshold SNR for singlepulse plot
@@ -38,6 +37,7 @@
sifting.short_period = 0.0005 # Shortest period candidates to consider (s)
sifting.long_period = 15.0 # Longest period candidates to consider (s)
sifting.harm_pow_cutoff = 8.0 # Power required in at least one harmonic
+foldnsubs = 128 # Number of subbands to use when folding
#-------------------------------------------------------------------
def get_baryv(ra, dec, mjd, T, obs="GB"):
@@ -52,6 +52,7 @@ def get_baryv(ra, dec, mjd, T, obs="GB"):
nn = len(tts)
bts = numpy.zeros(nn, dtype=numpy.float64)
vel = numpy.zeros(nn, dtype=numpy.float64)
+
presto.barycenter(tts, bts, vel, nn, ra, dec, obs, "DE200")
return vel.mean()
@@ -108,9 +109,9 @@ def get_folding_command(cand, obs, ddplans):
downsamp = dfact
break
if downsamp==1:
- filfile = obs.fil_filenm
+ fitsfile = obs.fits_filenm
else:
- filfile = obs.basefilenm+"_DS%d.fil"%downsamp
+ fitsfile = obs.dsbasefilenm+"_DS%d%s"%(downsamp,obs.fits_filenm[obs.fits_filenm.rfind("_"):])
p = 1.0 / cand.f
if (p < 0.002):
Mp, Mdm, N = 2, 2, 24
@@ -124,36 +125,36 @@ def get_folding_command(cand, obs, ddplans):
else:
Mp, Mdm, N = 1, 1, 200
otheropts = "-npart 30 -nopdsearch -pstep 1 -pdstep 2 -dmstep 1"
- return "prepfold -noxwin -accelcand %d -accelfile %s.cand -dm %.2f -o %s %s -n %d -npfact %d -ndmfact %d %s" % \
+ return "prepfold -noxwin -accelcand %d -accelfile %s.cand -dm %.2f -o %s %s -n %d -npfact %d -ndmfact %d -nsub %d %s" % \
(cand.candnum, cand.filename, cand.DM, outfilenm,
- otheropts, N, Mp, Mdm, filfile)
+ otheropts, N, Mp, Mdm, foldnsubs, fitsfile)
class obs_info:
"""
- class obs_info(fil_filenm)
+ class obs_info(fits_filenm)
A class describing the observation and the analysis.
"""
- def __init__(self, fil_filenm):
- self.fil_filenm = fil_filenm
- self.basefilenm = fil_filenm[:fil_filenm.find(".fil")]
- filhdr, hdrlen = sigproc.read_header(fil_filenm)
- self.MJD = filhdr['tstart']
- self.nchans = filhdr['nchans']
- self.ra_rad = sigproc.ra2radians(filhdr['src_raj'])
- self.ra_string = pu.coord_to_string(*pu.rad_to_hms(self.ra_rad))
- self.dec_rad = sigproc.dec2radians(filhdr['src_dej'])
- self.dec_string = pu.coord_to_string(*pu.rad_to_dms(self.dec_rad))
+ def __init__(self, fits_filenm):
+ self.fits_filenm = fits_filenm
+ self.basefilenm = fits_filenm[:fits_filenm.find(".fits")]
+ self.dsbasefilenm = fits_filenm[:fits_filenm.rfind("_")]
+ fitshandle=pyfits.open(fits_filenm)
+ self.MJD = fitshandle[0].header['STT_IMJD']+fitshandle[0].header['STT_SMJD']/86400.0+fitshandle[0].header['STT_OFFS']/86400.0
+ self.nchans = fitshandle[0].header['OBSNCHAN']
+ self.ra_string = fitshandle[0].header['RA']
+ self.dec_string = fitshandle[0].header['DEC']
self.str_coords = "J"+"".join(self.ra_string.split(":")[:2])
- if self.dec_rad >= 0.0: self.str_coords += "+"
self.str_coords += "".join(self.dec_string.split(":")[:2])
- self.az = filhdr['az_start']
- self.el = 90.0-filhdr['za_start']
- fillen = os.stat(fil_filenm)[6]
- self.raw_N = (fillen-hdrlen)/(filhdr['nbits']/8)/filhdr['nchans']
- self.dt = filhdr['tsamp']
+ self.nbits=fitshandle[0].header['BITPIX']
+
+ self.raw_N=fitshandle[1].header['NAXIS2']*fitshandle[1].header['NSBLK']
+ self.dt=fitshandle[1].header['TBIN']*1000000
self.raw_T = self.raw_N * self.dt
- self.N = orig_N
+ self.N = raw_N
+ if self.dt == 163.84:
+ self.N=self.N/2
self.T = self.N * self.dt
+ self.srcname=fitshandle[0].header['SRC_NAME']
# Determine the average barycentric velocity of the observation
self.baryv = get_baryv(self.ra_string, self.dec_string,
self.MJD, self.T, obs="GB")
@@ -162,7 +163,7 @@ def __init__(self, fil_filenm):
# according to base/MJD/filenmbase/beam
self.outputdir = os.path.join(base_output_dir,
str(int(self.MJD)),
- self.str_coords)
+ self.srcname)
# Figure out which host we are processing on
self.hostname = socket.gethostname()
# The fraction of the data recommended to be masked by rfifind
@@ -186,7 +187,7 @@ def __init__(self, fil_filenm):
def write_report(self, filenm):
report_file = open(filenm, "w")
report_file.write("---------------------------------------------------------\n")
- report_file.write("%s was processed on %s\n"%(self.fil_filenm, self.hostname))
+ report_file.write("%s was processed on %s\n"%(self.fits_filenm, self.hostname))
report_file.write("Ending UTC time: %s\n"%(time.asctime(time.gmtime())))
report_file.write("Total wall time: %.1f s (%.2f hrs)\n"%\
(self.total_time, self.total_time/3600.0))
@@ -234,18 +235,57 @@ def __init__(self, lodm, dmstep, dmsperpass, numpasses, numsub, downsamp):
numpy.arange(self.dmsperpass)*self.dmstep + lodm]
self.dmlist.append(dmlist)
-def main(fil_filenm, workdir, ddplans):
+def remove_crosslist_duplicate_candidates(candlist1,candlist2):
+ n1 = len(candlist1)
+ n2 = len(candlist2)
+ removelist1 = []
+ removelist2 = []
+ candlist2.sort(sifting.cmp_freq)
+ candlist1.sort(sifting.cmp_freq)
+ print " Searching for crosslist dupes..."
+ ii = 0
+ while ii < n1:
+ jj=0
+ while jj < n2:
+ if numpy.fabs(candlist1[ii].r-candlist2[jj].r) < sifting.r_err:
+ if sifting.cmp_sigma(candlist1[ii],candlist2[jj])<0:
+ print "Crosslist remove from candlist 2, %f > %f, %d:%f~%f" % (candlist1[ii].sigma,candlist2[jj].sigma,jj,candlist1[ii].r,candlist2[jj].r)
+ if jj not in removelist2:
+ removelist2.append(jj)
+ else:
+ print "Crosslist remove from candlist 1, %f > %f, %d:%f~%f" % (candlist2[jj].sigma,candlist1[ii].sigma,ii,candlist1[ii].r,candlist2[jj].r)
+ if ii not in removelist1:
+ removelist1.append(ii)
+ jj += 1
+ ii += 1
+ for ii in range(len(removelist2)-1,-1,-1):
+ print "Removing %d from candlist2" % removelist2[ii]
+ del(candlist2[removelist2[ii]])
+ for ii in range(len(removelist1)-1,-1,-1):
+ print "Removing %d from candlist1" % removelist1[ii]
+ del(candlist1[removelist1[ii]])
+ print "Removed %d crosslist candidates\n" % (len(removelist1)+len(removelist2))
+ print "Found %d candidates. Sorting them by significance...\n" % (len(candlist1)+len(candlist2))
+ candlist1.sort(sifting.cmp_sigma)
+ candlist2.sort(sifting.cmp_sigma)
+ return candlist1,candlist2
+
+
+def main(fits_filenm, workdir, ddplans):
# Change to the specified working directory
os.chdir(workdir)
# Get information on the observation and the job
- job = obs_info(fil_filenm)
+ job = obs_info(fits_filenm)
if job.raw_T < low_T_to_search:
print "The observation is too short (%.2f s) to search."%job.raw_T
sys.exit()
job.total_time = time.time()
- ddplans = ddplans[job.nchans]
+ if job.dt == 163.84:
+ ddplans = ddplans[str(job.nchans)+"slow"]
+ else:
+ ddplans = ddplans[str(job.nchans)+"fast"]
# Use whatever .zaplist is found in the current directory
default_zaplist = glob.glob("*.zaplist")[0]
@@ -262,14 +302,19 @@ def main(fil_filenm, workdir, ddplans):
os.makedirs(tmpdir)
except: pass
- print "\nBeginning GBNCC search of '%s'"%job.fil_filenm
+ print "\nBeginning GBNCC search of '%s'"%job.fits_filenm
print "UTC time is: %s"%(time.asctime(time.gmtime()))
- # rfifind the filterbank file
- cmd = "rfifind -time %.17g -o %s %s > %s_rfifind.out"%\
- (rfifind_chunk_time, job.basefilenm,
- job.fil_filenm, job.basefilenm)
- job.rfifind_time += timed_execute(cmd)
+ rfifindout=job.basefilenm+"_rfifind.out"
+ rfifindmask=job.basefilenm+"_rfifind.mask"
+
+ if not os.path.exists(rfifindout) or not os.path.exists(rfifindmask):
+
+ # rfifind the filterbank file
+ cmd = "rfifind -time %.17g -o %s %s > %s_rfifind.out"%\
+ (rfifind_chunk_time, job.basefilenm,
+ job.fits_filenm, job.basefilenm)
+ job.rfifind_time += timed_execute(cmd)
maskfilenm = job.basefilenm + "_rfifind.mask"
# Find the fraction that was suggested to be masked
# Note: Should we stop processing if the fraction is
@@ -278,26 +323,28 @@ def main(fil_filenm, workdir, ddplans):
# Iterate over the stages of the overall de-dispersion plan
dmstrs = []
+
for ddplan in ddplans:
# Make a downsampled filterbank file
if ddplan.downsamp > 1:
- cmd = "downsample_filterbank.py %d %s"%(ddplan.downsamp, job.fil_filenm)
+ cmd = "psrfits_subband -dstime %d -nsub %d -o %s_DS%d %s"%\
+ (ddplan.downsamp, job.nchans, job.dsbasefilenm, ddplan.downsamp, job.dsbasefilenm )
job.downsample_time += timed_execute(cmd)
- fil_filenm = job.fil_filenm[:job.fil_filenm.find(".fil")] + \
- "_DS%d.fil"%ddplan.downsamp
+ fits_filenm = job.dsbasefilenm + "_DS%d%s"%\
+ (ddplan.downsamp,job.fits_filenm[job.fits_filenm.rfind("_"):])
else:
- fil_filenm = job.fil_filenm
-
+ fits_filenm = job.fits_filenm
# Iterate over the individual passes through the .fil file
for passnum in range(ddplan.numpasses):
subbasenm = "%s_DM%s"%(job.basefilenm, ddplan.subdmlist[passnum])
# Now de-disperse
cmd = "prepsubband -mask %s -lodm %.2f -dmstep %.2f -nsub %d -numdms %d -numout %d -o %s/%s %s"%\
- (maskfilenm, ddplan.lodm+passnum*ddplan.sub_dmstep, ddplan.dmstep,
- ddplan.numsub, ddplan.dmsperpass, job.N/ddplan.downsamp,
- tmpdir, job.basefilenm, fil_filenm)
+ (maskfilenm, ddplan.lodm+passnum*ddplan.sub_dmstep,
+ ddplan.dmstep, ddplan.numsub,
+ ddplan.dmsperpass, job.N/ddplan.downsamp,
+ tmpdir, job.basefilenm, fits_filenm)
job.dedispersing_time += timed_execute(cmd)
# Iterate over all the new DMs
@@ -329,7 +376,7 @@ def main(fil_filenm, workdir, ddplans):
except: pass
# Do the low-acceleration search
- cmd = "accelsearch -numharm %d -sigma %f -zmax %d -flo %f %s"%\
+ cmd = "accelsearch -harmpolish -numharm %d -sigma %f -zmax %d -flo %f %s"%\
(lo_accel_numharm, lo_accel_sigma, lo_accel_zmax, lo_accel_flo, fftnm)
job.lo_accelsearch_time += timed_execute(cmd)
try:
@@ -341,7 +388,7 @@ def main(fil_filenm, workdir, ddplans):
except: pass
# Do the high-acceleration search
- cmd = "accelsearch -numharm %d -sigma %f -zmax %d -flo %f %s"%\
+ cmd = "accelsearch -harmpolish -numharm %d -sigma %f -zmax %d -flo %f %s"%\
(hi_accel_numharm, hi_accel_sigma, hi_accel_zmax, hi_accel_flo, fftnm)
job.hi_accelsearch_time += timed_execute(cmd)
try:
@@ -398,10 +445,6 @@ def main(fil_filenm, workdir, ddplans):
if len(lo_accel_cands):
lo_accel_cands = sifting.remove_DM_problems(lo_accel_cands, numhits_to_fold,
dmstrs, low_DM_cutoff)
- if len(lo_accel_cands):
- lo_accel_cands.sort(sifting.cmp_sigma)
- sifting.write_candlist(lo_accel_cands,
- job.basefilenm+".accelcands_Z%d"%lo_accel_zmax)
hi_accel_cands = sifting.read_candidates(glob.glob("*ACCEL_%d"%hi_accel_zmax))
if len(hi_accel_cands):
@@ -409,6 +452,14 @@ def main(fil_filenm, workdir, ddplans):
if len(hi_accel_cands):
hi_accel_cands = sifting.remove_DM_problems(hi_accel_cands, numhits_to_fold,
dmstrs, low_DM_cutoff)
+
+ if len(lo_accel_cands) and len(hi_accel_cands):
+ lo_accel_cands, hi_accel_cands = remove_crosslist_duplicate_candidates(lo_accel_cands, hi_accel_cands)
+
+ if len(lo_accel_cands):
+ lo_accel_cands.sort(sifting.cmp_sigma)
+ sifting.write_candlist(lo_accel_cands,
+ job.basefilenm+".accelcands_Z%d"%lo_accel_zmax)
if len(hi_accel_cands):
hi_accel_cands.sort(sifting.cmp_sigma)
sifting.write_candlist(hi_accel_cands,
@@ -480,11 +531,11 @@ def main(fil_filenm, workdir, ddplans):
os.remove(infile)
tf.close()
- # Remove all the downsampled .fil files
+ # Remove all the downsampled .fits files
- filfiles = glob.glob("*_DS?.fil") + glob.glob("*_DS??.fil")
- for filfile in filfiles:
- os.remove(filfile)
+ fitsfiles = glob.glob("*_DS?*.fits") + glob.glob("*_DS??*.fits")
+ for fitsfile in fitsfiles:
+ os.remove(fitsfile)
# Remove the tmp directory (in a tmpfs mount)
try:
@@ -516,7 +567,7 @@ def main(fil_filenm, workdir, ddplans):
#
# If there is <=1GB of RAM per CPU core, the following are preferred
#
- # For 4096slow chan data: lodm dmstep dms/call #calls #subs downsamp
+ # For 4096slow chan data: lodm dmstep dms/call #calls #subs downsamp
ddplans['4096slow'].append(dedisp_plan( 0.0, 0.02, 86, 81, 128, 1))
ddplans['4096slow'].append(dedisp_plan(139.32, 0.03, 102, 27, 128, 2))
ddplans['4096slow'].append(dedisp_plan(221.94, 0.05, 102, 33, 128, 4))
@@ -547,10 +598,10 @@ def main(fil_filenm, workdir, ddplans):
# sys.argv[2] = working directory name
if len(sys.argv) >= 3:
workdir = sys.argv[2]
- fil_filenm = sys.argv[1]
- main(fil_filenm, workdir, ddplans)
+ fits_filenm = sys.argv[1]
+ main(fits_filenm, workdir, ddplans)
elif len(sys.argv) == 2:
- fil_filenm = sys.argv[1]
- main(fil_filenm, '.', ddplans)
+ fits_filenm = sys.argv[1]
+ main(fits_filenm, '.', ddplans)
else:
- print "GBT350_drift_search.py fil_filenm [workdir]"
+ print "GBNCC_search.py fits_filenm [workdir]"
View
649 bin/pyplotres.py
@@ -0,0 +1,649 @@
+#!/usr/bin/env python
+
+# A simple command line version of plotres written in python
+# using matplotlib and numpy
+#
+# Patrick Lazarus, Feb 26th, 2009
+
+import optparse
+import sys
+import re
+import os
+import types
+import warnings
+
+import matplotlib
+import matplotlib.pyplot as plt
+import numpy as np
+
+import pyslalib.slalib as slalib
+import binary_psr
+import parfile as par
+import residuals
+
+# Available x-axis types
+xvals = ['mjd', 'year', 'numtoa', 'orbitphase']
+xind = 0
+# Available y-axis types
+yvals = ['phase', 'usec', 'sec']
+yind = 0
+
+class TempoResults:
+ def __init__(self, freqbands=[[0, 'inf']]):
+ """Read TEMPO results (resid2.tmp, tempo.lis, timfile and parfiles)
+ freqbands is a list of frequency pairs to display.
+ """
+ # Open tempo.lis. Parse it and find input .tim and .par files. Also find output .par file.
+ inputfiles_re = re.compile(r"Input data from (.*\.tim.*), Parameters from (.*\.par.*)")
+ outputfile_re = re.compile(r"Assumed parameters -- PSR (.*)$")
+ tempolisfile = open("tempo.lis")
+ intimfn, inparfn, outparfn = None, None, None
+ for line in tempolisfile:
+ match = inputfiles_re.search(line)
+ if match:
+ intimfn = match.group(1).strip()
+ inparfn = match.group(2).strip()
+ else:
+ match = outputfile_re.search(line)
+ if match:
+ outparfn = "%s.par" % match.group(1).strip()
+ if (intimfn != None) and (inparfn != None) and (outparfn != None):
+ # Found what we're looking for no need to continue parsing the file
+ break
+ tempolisfile.close()
+
+ # Record filename
+ self.inparfn = inparfn
+ self.outparfn = outparfn
+ self.intimfn = intimfn
+
+ # Read parfiles
+ self.inpar = par.psr_par(inparfn)
+ self.outpar = par.psr_par(outparfn)
+
+ # Read residuals
+ # Need to check if on 32-bit or 64-bit computer
+ #(the following is a hack to find out if we're on a borg or borgii node)
+#PATRICKS QUICK FIX 10/14 - 3/39/11 read_residuals_64bit does not work on nimrod or my machine, but 32bit does...
+ #print os.uname()[4]
+ if True: #os.uname()[4]=='i686':
+ # 32-bit computer
+ r = residuals.read_residuals()
+ else:
+ # 64-bit computer
+ r = residuals.read_residuals_64bit()
+
+ self.max_TOA = r.bary_TOA.max()
+ self.min_TOA = r.bary_TOA.min()
+ self.freqbands = freqbands
+ self.residuals = {}
+ for lo,hi in self.freqbands:
+ indices = (r.bary_freq>=lo) & (r.bary_freq<hi)
+ self.residuals[get_freq_label(lo, hi)] = \
+ Resids(r.bary_TOA[indices], r.bary_freq[indices], \
+ np.arange(r.numTOAs)[indices], r.orbit_phs[indices], \
+ r.postfit_phs[indices], r.postfit_sec[indices], \
+ r.prefit_phs[indices], r.prefit_sec[indices], \
+ r.uncertainty[indices], r.weight[indices], \
+ self.inpar, self.outpar)
+
+ def get_info(self, freq_label, index, postfit=True):
+ """Given a freq_label and index return formatted text
+ describing the TOA residual.
+
+ Assume postfit period for calculating residual in phase,
+ unless otherwise indicated.
+ """
+ r = self.residuals[freq_label]
+ description = []
+ description.append("TOA Selected:")
+ description.append("\tNumber: %s" % r.TOA_index[index][0])
+ description.append("\tEpoch (MJD): %s" % r.bary_TOA[index][0])
+ if yvals[yind] == "phase":
+ description.append("\tPre-fit residual (phase): %s" % r.prefit_phs[index][0])
+ description.append("\tPost-fit residual (phase): %s" % r.postfit_phs[index][0])
+ if postfit:
+ description.append("\tUncertainty (phase): %s" % (r.uncertainty[index][0]/r.outpar.P0))
+ else:
+ description.append("\tUncertainty (phase): %s" % (r.uncertainty[index][0]/r.inpar.P0))
+ elif yvals[yind] == "usec":
+ description.append("\tPre-fit residual (usec): %s" % (r.prefit_sec[index][0]*1e6))
+ description.append("\tPost-fit residual (usec): %s" % (r.postfit_sec[index][0]*1e6))
+ description.append("\tUncertainty (usec): %s" % (r.uncertainty[index][0]*1e6))
+ elif yvals[yind] == "sec":
+ description.append("\tPre-fit residual (sec): %s" % r.prefit_sec[index][0])
+ description.append("\tPost-fit residual (sec): %s" % r.postfit_sec[index][0])
+ description.append("\tUncertainty (sec): %s" % r.uncertainty[index][0])
+ description.append("\tFrequency (MHz): %s" % r.bary_freq[index][0])
+ return description
+
+
+
+class Resids:
+ """The Resids object contains the following information
+ about TEMPO residuals:
+ bary_TOA
+ bary_freq
+ numTOAs
+ orbit_phs
+ postfit_phs
+ postfit_sec
+ prefit_phs
+ prefit_sec
+ uncertainty
+ weight
+ """
+ def __init__(self, bary_TOA, bary_freq, TOA_index, orbit_phs, \
+ postfit_phs, postfit_sec, prefit_phs, prefit_sec, \
+ uncertainty, weight, inpar, outpar):
+ self.bary_TOA = bary_TOA
+ self.bary_freq = bary_freq
+ self.TOA_index = TOA_index
+ self.orbit_phs = orbit_phs
+ self.postfit_phs = postfit_phs
+ self.postfit_sec = postfit_sec
+ self.prefit_phs = prefit_phs
+ self.prefit_sec = prefit_sec
+ self.uncertainty = uncertainty
+ self.weight = weight
+ self.inpar = inpar
+ self.outpar = outpar
+
+
+ def get_xdata(self, key):
+ """Return label describing xaxis and the corresponding
+ data given keyword 'key'.
+ """
+ if not isinstance(key, types.StringType):
+ raise ValueError("key must be of type string.")
+ xopt = key.lower()
+ if xopt == 'numtoa':
+ xdata = self.TOA_index
+ xlabel = "TOA Number"
+ elif xopt == 'mjd':
+ xdata = self.bary_TOA
+ xlabel = "MJD"
+ elif xopt == 'orbitphase':
+ xdata = self.orbit_phs
+ xlabel = "Orbital Phase"
+ elif xopt == 'year':
+ xdata = mjd_to_year(self.bary_TOA)
+ xlabel = "Year"
+ else:
+ raise ValueError("Unknown xaxis type (%s)." % xopt)
+ return (xlabel, xdata)
+
+
+ def get_ydata(self, key, postfit=True):
+ """Return label describing yaxis and the corresponding
+ data/errors given keyword 'key'.
+ 'postfit' is a boolean argument that determines if
+ postfit, or prefit data is to be returned.
+ """
+ if not isinstance(key, types.StringType):
+ raise ValueError("key must be of type string.")
+ yopt = key.lower()
+ if postfit:
+ if yopt == 'phase':
+ ydata = self.postfit_phs
+ #
+ # NOTE: Should use P at TOA not at PEPOCH
+ #
+ yerror = self.uncertainty/self.outpar.P0
+ ylabel = "Residuals (Phase)"
+ elif yopt == 'usec':
+ ydata = self.postfit_sec*1e6
+ yerror = self.uncertainty*1e6
+ ylabel = "Residuals (uSeconds)"
+ elif yopt == 'sec':
+ ydata = self.postfit_sec
+ yerror = self.uncertainty
+ ylabel = "Residuals (Seconds)"
+ else:
+ raise ValueError("Unknown yaxis type (%s)." % yopt)
+ else:
+ if yopt=='phase':
+ ydata = self.prefit_phs
+ #
+ # NOTE: Should use P at TOA not at PEPOCH
+ #
+ yerror = self.uncertainty/self.inpar.P0
+ ylabel = "Residuals (Phase)"
+ elif yopt=='usec':
+ ydata = self.prefit_sec*1e6
+ yerror = self.uncertainty*1e6
+ ylabel = "Residuals (uSeconds)"
+ elif yopt=='sec':
+ ydata = self.prefit_sec
+ yerror = self.uncertainty
+ ylabel = "Residuals (Seconds)"
+ else:
+ raise ValueError("Unknown yaxis type (%s)." % yopt)
+ return (ylabel, ydata, yerror)
+
+
+def plot_data(tempo_results, xkey, ykey, postfit=True, prefit=False, \
+ interactive=True, mark_peri=False, show_legend=True):
+ # figure out what should be plotted
+ # True means to plot postfit
+ # False means to plot prefit
+ if postfit and prefit:
+ to_plot_postfit = [False, True]
+ elif postfit and not prefit:
+ to_plot_postfit = [True]
+ elif not postfit and prefit:
+ to_plot_postfit = [False]
+ else:
+ raise EmptyPlotValueError("At least one of prefit and postfit must be True.")
+ subplot = 1
+ numsubplots = len(to_plot_postfit)
+ global axes
+ axes = []
+ handles = []
+ labels = []
+ for usepostfit in to_plot_postfit:
+ TOAcount = 0
+ # All subplots are in a single column
+ if subplot == 1:
+ axes.append(plt.subplot(numsubplots, 1, subplot))
+ else:
+ axes.append(plt.subplot(numsubplots, 1, subplot, sharex=axes[0]))
+
+ # set tick formatter to not use scientific notation or an offset
+ tick_formatter = matplotlib.ticker.ScalarFormatter(useOffset=False)
+ tick_formatter.set_scientific(False)
+ axes[-1].xaxis.set_major_formatter(tick_formatter)
+
+ for lo,hi in tempo_results.freqbands:
+ freq_label = get_freq_label(lo, hi)
+ resids = tempo_results.residuals[freq_label]
+ xlabel, xdata = resids.get_xdata(xkey)
+ ylabel, ydata, yerr = resids.get_ydata(ykey, usepostfit)
+ if len(xdata):
+ # Plot the residuals
+ handle = plt.errorbar(xdata, ydata, yerr=yerr, fmt='.', \
+ label=freq_label, picker=5)
+ if subplot == 1:
+ handles.append(handle[0])
+ labels.append(freq_label)
+ TOAcount += xdata.size
+ # Finish off the plot
+ plt.axhline(0, ls='--', label="_nolegend_", c='k', lw=0.5)
+ axes[-1].ticklabel_format(style='plain', axis='x')
+
+ if mark_peri and hasattr(tempo_results.outpar, 'BINARY'):
+ # Be sure to check if pulsar is in a binary
+ # Cannot mark passage of periastron if not a binary
+ if usepostfit:
+ binpsr = binary_psr.binary_psr(tempo_results.outpar.FILE)
+ else:
+ binpsr = binary_psr.binary_psr(tempo_results.inpar.FILE)
+ xmin, xmax = plt.xlim()
+ mjd_min = tempo_results.min_TOA
+ mjd_max = tempo_results.max_TOA
+ guess_mjds = np.arange(mjd_max + binpsr.par.PB, \
+ mjd_min - binpsr.par.PB, -binpsr.par.PB)
+ for mjd in guess_mjds:
+ peri_mjd = binpsr.most_recent_peri(float(mjd))
+ if xkey == 'mjd':
+ plt.axvline(peri_mjd, ls=':', label='_nolegend_', c='k', lw=0.5)
+ elif xkey == 'year':
+ print "plotting peri passage"
+ plt.axvline(mjd_to_year(peri_mjd), ls=':', label='_nolegend_', c='k', lw=0.5)
+ plt.xlim((xmin, xmax))
+ plt.xlabel(xlabel)
+ plt.ylabel(ylabel)
+ if usepostfit:
+ plt.title("Postfit Redisuals (Number of TOAs: %d)" % TOAcount)
+ else:
+ plt.title("Prefit Redisuals (Number of TOAs: %d)" % TOAcount)
+ subplot += 1
+
+ if numsubplots > 1:
+ # Increase spacing between subplots.
+ plt.subplots_adjust(hspace=0.25)
+
+ # Write name of input files used for timing on figure
+ if interactive:
+ fntext = "TOA file: %s, Parameter file: %s" % \
+ (tempo_results.intimfn, tempo_results.inparfn)
+ figure_text = plt.figtext(0.01, 0.01, fntext, verticalalignment='bottom', \
+ horizontalalignment='left')
+
+ # Make the legend and set its visibility state
+ leg = plt.figlegend(handles, labels, 'upper right')
+ leg.set_visible(show_legend)
+ leg.legendPatch.set_alpha(0.5)
+
+
+def create_plot():
+ # Set up the plot
+ fig = plt.figure(figsize=(11,8.5))
+
+
+def get_freq_label(lo, hi):
+ """Return frequency label given a lo and hi
+ frequency pair.
+ """
+ return "%s - %s MHz" % (lo, hi)
+
+
+def savefigure(savefn='./resid2.tmp.ps'):
+ print "Saving plot to %s" % savefn
+ plt.savefig(savefn, orientation='landscape', papertype='letter')
+
+def reloadplot():
+ # Reload residuals and replot
+ print "Plotting..."
+ fig = plt.gcf()
+ fig.set_visible(False)
+ plt.clf() # clear figure
+ tempo_results = TempoResults(options.freqbands)
+ try:
+ plot_data(tempo_results, options.xaxis, options.yaxis, \
+ postfit=options.postfit, prefit=options.prefit, \
+ interactive=options.interactive, \
+ mark_peri=options.mark_peri, show_legend=options.legend)
+ except EmptyPlotValueError, msg:
+ print msg
+ print "Press 'p'/'P' to add prefit/postfit plot."
+ plt.figtext(0.5, 0.5, (str(msg) + "\n" + \
+ "Press 'p'/'P' to add prefit/postfit plot."), \
+ horizontalalignment='center', \
+ verticalalignment='center', \
+ bbox=dict(facecolor='white', alpha=0.75))
+ fig.set_visible(True)
+ redrawplot()
+
+
+def redrawplot():
+ plt.draw() #plt.show is keeping the plot open on nimrod, as opposed to plt.draw
+ #plt.show()
+
+def quit():
+ print "Quiting..."
+ sys.exit(0)
+
+
+def pick(event):
+ global tempo_results
+ index = event.ind
+ axes = event.mouseevent.inaxes
+ if axes:
+ title = axes.get_title()
+ postfit = ("Postfit" in title)
+ if len(index) == 1:
+ freq_label = event.artist.get_label()
+ info = tempo_results.get_info(freq_label, index, postfit)
+ print_text(info)
+ else:
+ print "Multiple TOAs selected. Zoom in and try again."
+
+
+def print_text(lines, *args, **kwargs):
+ """Print lines of text (in a list) in the terminal."""
+ print '\n'.join(lines)
+
+
+def print_help():
+ # Display help
+ print "Helping..."
+ print "-"*80
+ print "Help - Hotkeys definitions:"
+ print "\th - Display this help"
+ print "\tq - Quit"
+ print "\ts - Save current plot(s) to PostScript file"
+ print "\tp - Toggle prefit display on/off"
+ print "\tP - Toggle postfit display on/off"
+ print "\tz - Toggle Zoom-mode on/off"
+ print "\tL - Toggle legend on/off"
+ print "\to - Go to original view"
+ print "\t< - Go to previous view"
+ print "\t> - Go to next view"
+ print "\tx - Set x-axis limits (terminal input required)"
+ print "\ty - Sey y-axis limits (terminal input required)"
+ print "\tr - Reload residuals"
+ print "\tt - Cycle through y-axis types ('phase', 'usec', 'sec')"
+ print "\t[Space] - Cycle through x-axis types ('MJD', 'year', 'numTOA', 'orbitphase')"
+ print "\t[Left mouse] - Select TOA (display info in terminal)"
+ print "\t - Select zoom region (if Zoom-mode is on)"
+ print "-"*80
+
+
+def keypress(event):
+ global tempo_results
+ global options
+ global xind, xvals
+ global yind, yvals
+ if type(event.key) == types.StringType:
+ if event.key.lower() == 'q':
+ quit()
+ elif event.key.lower() == 's':
+ savefigure()
+ elif event.key.lower() == 'r':
+ reloadplot()
+ elif event.key.upper() == 'L':
+ leg = plt.gcf().legends[0]
+ options.legend = not options.legend
+ leg.set_visible(options.legend)
+ redrawplot()
+ elif event.key.lower() == 'z':
+ # Turn on zoom mode
+ print "Toggling zoom mode..."
+ event.canvas.toolbar.zoom()
+ elif event.key.lower() == 'o':
+ # Restore plot to original view
+ print "Restoring plot..."
+ event.canvas.toolbar.home()
+ elif event.key.lower() == ',' or event.key.lower() == '<':
+ # Go back to previous plot view
+ print "Going back..."
+ event.canvas.toolbar.back()
+ elif event.key.lower() == '.' or event.key.lower() == '>':
+ # Go forward to next plot view
+ print "Going forward..."
+ event.canvas.toolbar.forward()
+ elif event.key.lower() == ' ':
+ xind = (xind + 1) % len(xvals)
+ print "Toggling plot type...[%s]"%xvals[xind], xind
+ options.xaxis = xvals[xind]
+ reloadplot()
+ elif event.key.lower() == 't':
+ yind = (yind + 1) % len(yvals)
+ print "Toggling plot scale...[%s]"%yvals[yind], yind
+ options.yaxis = yvals[yind]
+ reloadplot()
+ elif event.key == 'p':
+ options.prefit = not options.prefit
+ print "Toggling prefit-residuals display to: %s" % \
+ ((options.prefit and "ON") or "OFF")
+ reloadplot()
+ elif event.key == 'P':
+ options.postfit = not options.postfit
+ print "Toggling postfit-residuals display to: %s" % \
+ ((options.postfit and "ON") or "OFF")
+ reloadplot()
+ elif event.key.lower() == 'x':
+ # Set x-axis limits
+ print "Setting x-axis limits. User input required..."
+ xmin = raw_input("X-axis minimum: ")
+ xmax = raw_input("X-axis maximum: ")
+ try:
+ xmin = float(xmin)
+ xmax = float(xmax)
+ if xmax <= xmin:
+ raise ValueError
+ except ValueError:
+ print "Bad values provided!"
+ return
+ plt.xlim(xmin, xmax)
+ elif event.key.lower() == 'y':
+ global axes
+ # Set y-axis limits
+ print "Setting y-axis limits. User input required..."
+ if len(axes) == 2:
+ axes_to_adjust = raw_input("Axes to adjust (pre/post): ")
+ if axes_to_adjust.lower().startswith('pre'):
+ plt.axes(axes[0])
+ elif axes_to_adjust.lower().startswith('post'):
+ plt.axes(axes[1])
+ else:
+ raise ValueError
+ ymin = raw_input("Y-axis minimum: ")
+ ymax = raw_input("Y-axis maximum: ")
+ try:
+ ymin = float(ymin)
+ ymax = float(ymax)
+ if ymax <= ymin:
+ raise ValueError
+ except ValueError:
+ print "Bad values provided!"
+ return
+ plt.ylim(ymin, ymax)
+ elif event.key.lower() == 'h':
+ print_help()
+
+
+def mjd_to_year(mjds):
+ mjds = np.asarray(mjds)
+ if mjds.size < 1:
+ return mjds
+ old_shape = mjds.shape # Remember original shape
+ mjds.shape = (mjds.size, 1)
+ years, months, days, fracs, stats = np.apply_along_axis(slalib.sla_djcl, 1, mjds).transpose()
+ # Take into account leap years
+ daysperyear = (((years % 4) == 0) & (((years % 100) != 0) | ((years % 400) == 0))) * 1 + 365.0
+ years, days, stats = np.array([slalib.sla_clyd(*ymd) for ymd in np.vstack((years, months, days)).transpose()]).transpose()
+ mjds.shape = old_shape # Change back to original shape
+ return (years + (days + fracs) / daysperyear)
+
+
+def parse_options():
+ (options, sys.argv) = parser.parse_args()
+ if sys.argv==[]:
+ sys.argv = ['pyplotres.py']
+ if not options.freqs:
+ # Default frequency bands
+ freqbands = [['0', '400'],
+ ['400', '600'],
+ ['600', '1000'],
+ ['1000', '1600'],
+ ['1600', '2400'],
+ ['2400', 'inf']]
+ #freqbands = [['0', 'inf']]
+ else:
+ freqbands = []
+ for fopt in options.freqs:
+ f = fopt.split(':')
+ if f[0]=='':
+ f[0] = '0'
+ if f[-1]=='':
+ f[-1] = 'inf'
+ if len(f) > 2:
+ for i in range(0, len(f)-1):
+ freqbands.append(f[i:i+2])
+ else:
+ freqbands.append(f)
+ freqbands = np.array(freqbands).astype(float)
+ freqbands[freqbands.argsort(axis=0).transpose()[0]]
+ if np.any(freqbands.flat != sorted(freqbands.flat)):
+ raise ValueError("Frequency bands have overlaps or are inverted.")
+ options.freqbands = freqbands
+
+ options.mark_peri = False
+
+ if not options.prefit and not options.postfit:
+ # If neither prefit or postfit are selected
+ # show postfit
+ options.postfit = True
+
+ if options.xaxis.lower() not in xvals:
+ raise BadOptionValueError("Option to -x/--x-axis (%s) is not permitted." % \
+ options.xaxis)
+ if options.yaxis.lower() not in yvals:
+ raise BadOptionValueError("Option to -y/--y-axis (%s) is not permitted." % \
+ options.yaxis)
+ return options
+
+
+def main():
+ global tempo_results
+ global options
+ options = parse_options()
+ tempo_results = TempoResults(options.freqbands)
+ create_plot()
+ reloadplot()
+
+ if options.interactive:
+ fig = plt.gcf() # current figure
+
+ # Before setting up our own event handlers delete matplotlib's
+ # default 'key_press_event' handler.
+ defcids = fig.canvas.callbacks.callbacks['key_press_event'].keys()
+ for cid in defcids:
+ fig.canvas.callbacks.disconnect(cid)
+
+ # Now, register our event callback functions
+ cid_keypress = fig.canvas.mpl_connect('key_press_event', keypress)
+ cid_pick = fig.canvas.mpl_connect('pick_event', pick)
+
+ # Finally, let the show begin!
+ #plt.ion()
+ plt.show()
+ else:
+ # Save figure and quit
+ savefigure()
+ quit()
+
+
+class BadOptionValueError(ValueError):
+ """Bad value passed to option parser.
+ """
+ pass
+
+
+class EmptyPlotValueError(ValueError):
+ """Empty plot.
+ """
+ pass
+
+
+if __name__=='__main__':
+ parser = optparse.OptionParser(prog="pyplotres.py", \
+ version="v1.2 Patrick Lazarus (Mar. 29, 2010)")
+ parser.add_option('-f', '--freq', dest='freqs', action='append', \
+ help="Band of frequencies, in MHz, to be plotted " \
+ "(format xxx:yyy). Each band will have a " \
+ " different colour. Multiple -f/--freq options " \
+ " are allowed. (Default: Plot all frequencies " \
+ "in single colour.)", \
+ default=[])
+ parser.add_option('-x', '--x-axis', dest='xaxis', type='string', \
+ help="Values to plot on x-axis. Must be one of " \
+ "%s. (Default: '%s')" % (str(xvals), xvals[xind]),
+ default=xvals[xind])
+ parser.add_option('-y', '--y-axis', dest='yaxis', type='string', \
+ help="Values to plot on y-axis. Must be one of "
+ "%s. (Default: '%s')" % (str(yvals), yvals[yind]), \
+ default=yvals[yind])
+ parser.add_option('--post', dest='postfit', action='store_true', \
+ help="Show postfit residuals. (Default: Don't show " \
+ "postfit.)", \
+ default=False)
+ parser.add_option('--pre', dest='prefit', action='store_true', \
+ help="Show prefit residuals. (Default: Don't show " \
+ "prefit.)", \
+ default=False)
+ parser.add_option('-l', '--legend', dest='legend', action='store_true', \
+ help="Show legend of frequencies. (Default: Do not " \
+ "show legend.)", \
+ default=False)
+ parser.add_option('--mark-peri', dest='mark_peri', action='store_true', \
+ help="Mark passage of periastron. (Default: don't " \
+ "mark periastron.)", \
+ default=False)
+ parser.add_option('--non-interactive', dest='interactive', \
+ action='store_false', default=True, \
+ help="Save figure and exit. (Default: Show plot, " \
+ "only save if requested.)")
+ main()
View
BIN  docs/PRESTO_search_tutorial.odp
Binary file not shown
View
BIN  docs/PRESTO_search_tutorial.pdf
Binary file not shown
View
80 python/dedisp.py
@@ -0,0 +1,80 @@
+import os
+
+# To use this script to help you dedisperse a bunch of time series, first
+# run DDplan.py with appropriate values for your data to generate a
+# dedispersion plan:
+#
+# sransom@foops:~$ DDplan.py -d 200 -t 0.000072 -s 32 -n 96 -b 48.0 -f 820.0
+#
+# Minimum total smearing : 0.102 ms
+# --------------------------------------------
+# Minimum channel smearing : 3.76e-05 ms
+# Minimum smearing across BW : 0.00361 ms
+# Minimum sample time : 0.072 ms
+#
+# Setting the new 'best' resolution to : 0.072 ms
+# Best guess for optimal initial dDM is 0.199
+#
+# Low DM High DM dDM DownSamp dsubDM #DMs DMs/call calls WorkFract
+# 0.000 38.400 0.20 1 4.80 192 24 8 0.7273
+# 38.400 60.000 0.30 2 7.20 72 24 3 0.1364
+# 60.000 108.000 0.50 4 12.00 96 24 4 0.09091
+# 108.000 204.000 1.00 8 24.00 96 24 4 0.04545
+#
+#
+# Now with that plan, fill in the lists below and appropriate variables
+# for your data and you can then generate the subbands and time series
+# using "python dedisp.py"
+#
+
+
+def myexecute(cmd):
+ print "'%s'"%cmd
+ os.system(cmd)
+
+
+# dDM steps from DDplan.py
+dDMs = [0.2, 0.3, 0.5, 1.0]
+# dsubDM steps
+dsubDMs = [4.8, 7.2, 12.0, 24.0]
+# downsample factors
+downsamps = [1, 2, 4, 8]
+# number of calls per set of subbands
+subcalls = [8, 3, 4, 4]
+# The low DM for each set of DMs
+startDMs = [0.0, 38.4, 60.0, 108.0]
+# DMs/call
+dmspercall = 24
+# Number of subbands
+nsub = 32
+# The number of points in the least-downsampled time series
+numout = 500000
+# The basename of the output files you want to use
+basename = "S0062004000"
+# The name of the raw data file (or files if you use wildcards) to use
+rawfiles = basename+"*.bcpm2"
+# The name of the maskfile to apply (if no mask, use None)
+maskfile = basename+"_rfifind.mask"
+
+# Loop over the DDplan plans
+for dDM, dsubDM, downsamp, subcall, startDM in \
+ zip(dDMs, dsubDMs, downsamps, subcalls, startDMs):
+ # Get our downsampling right
+ subdownsamp = downsamp/2
+ datdownsamp = 2
+ if downsamp < 2: subdownsamp = datdownsamp = 1
+ # Loop over the number of calls
+ for ii in range(subcall):
+ subDM = startDM + (ii+0.5)*dsubDM
+ # First create the subbands
+ if maskfile:
+ myexecute("prepsubband -mask %s -sub -subdm %.2f -nsub %d -downsamp %d -o %s %s" %
+ (maskfile, subDM, nsub, subdownsamp, basename, rawfiles))
+ else:
+ myexecute("prepsubband -sub -subdm %.2f -nsub %d -downsamp %d -o %s %s" %
+ (subDM, nsub, subdownsamp, basename, rawfiles))
+ # And now create the time series
+ loDM = startDM + ii*dsubDM
+ subnames = basename+"_DM%.2f.sub[0-9]*"%subDM
+ myexecute("prepsubband -numout %d -lodm %.2f -dmstep %.2f -numdms %d -downsamp %d -o %s %s" %
+ (numout/downsamp, loDM, dDM, dmspercall, datdownsamp, basename, subnames))
Please sign in to comment.
Something went wrong with that request. Please try again.