Skip to content

Commit

Permalink
working on data checker.
Browse files Browse the repository at this point in the history
  • Loading branch information
Kiyoshi Masui committed Aug 16, 2011
1 parent 507c6f7 commit aaaefb8
Show file tree
Hide file tree
Showing 2 changed files with 69 additions and 68 deletions.
8 changes: 4 additions & 4 deletions input/km/psrfits.ini
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
"""Input file for scripts/psrfits_tosdfits.py"""

guppi_input_root = "/data1/tvoytek/AGBT10B_036/20110417/guppi_55668_wigglez15hrst_"
guppi_input_roots = ["/data1/tvoytek/AGBT10B_036/20110811/guppi_55784_wigglez1hr_centre_"]
guppi_input_end = "_0001.fits"
fits_log_dir = "/home/gbtdata/AGBT10B_036_50/"
fits_log_dir = "/home/gbtdata/AGBT10B_036_88/"
partition_cal = True
combine_map_scans = True
scans = tuple(range(218, 329, 8))
scans = (30,)
#scans = tuple(range(10, 153, 8))
output_root = '/home/scratch/kmasui/converted_fits/'
output_root = '/home/scratch/kmasui/tmp/'
time_bins_to_average = 128
129 changes: 65 additions & 64 deletions scripts/psrfits_to_sdfits.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,32 +94,23 @@ def execute(self, nprocesses=1) :
# Open the go fits file.
scan_log_files = scan_log.field('FILEPATH')[
scan_log.field('SCAN')==initial_scan]
# Find the one that's a GO fits file.
found_file = False
for file_name in scan_log_files :
if 'GO' in file_name :
go_file = (log_dir + "/GO" + file_name.split("GO")[1])
found_file = True
break
if not found_file :
raise ce.DataError("GO file not in scan log file: " + log_dir
+ "/ScanLog.fits. Scan: "
+ str(initial_scan))
go_file = log_dir + get_filename_from_key(scan_log_files, "GO")
go_hdu = pyfits.open(go_file)[0].header

# From the go information get the source and the scan type.
object = go_hdu["OBJECT"].strip()
self.proceedure = go_hdu["PROCNAME"].strip().lower()

if params["combine_map_scans"] :
# Read the go file and figure out all the scans in the same map.
# Read the go file and figure out all the scans in the same
# map.
# Check the go files for all scans make sure everything is
# consistant.
self.n_scans_proc = go_hdu["PROCSIZE"]
# Which scan this is of the sequence (1 indexed).
self.initial_scan_ind = go_hdu["PROCSEQN"]
scans_this_file = (sp.arange(self.n_scans_proc, dtype=int) + 1 -
self.initial_scan_ind + initial_scan)
scans_this_file = (sp.arange(self.n_scans_proc, dtype=int) + 1
- self.initial_scan_ind + initial_scan)
scans_this_file = list(scans_this_file)

else :
Expand Down Expand Up @@ -178,8 +169,8 @@ def execute(self, nprocesses=1) :
out_file = (params["output_root"] + session + '_' + object + '_' +
self.proceedure + '_' + str_scan_range + '.fits')

# Output data is pretty large so we'd better protect the pyfits part
# in a process lest memory leaks kill us.
# Output data is pretty large so we'd better protect the
# pyfits part in a process lest memory leaks kill us.
if len(Block_list) > 0 :
p = mp.Process(target=out_write, args=(Block_list, out_file))
p.start()
Expand All @@ -202,16 +193,7 @@ def processfile(self, scan, Pipe) :
# First find the GO fits file.
scan_log_files = scan_log.field('FILEPATH')[
scan_log.field('SCAN')==scan]
# Find the one that's a GO fits file.
found_file = False
for file_name in scan_log_files :
if 'GO' in file_name :
go_file = (log_dir + "/GO" + file_name.split("GO")[1])
found_file = True
break
if not found_file :
raise ce.DataError("GO file not in scan log file: " + log_dir
+ "/ScanLog.fits. Scan: " + str(scan))
go_file = log_dir + get_filename_from_key(scan_log_files, "GO")
# If we are combining multiple scans from one proceedure, make sure
# that they all came from the same one (ie that the proceedure
# wasn't aborted).
Expand All @@ -224,34 +206,10 @@ def processfile(self, scan, Pipe) :
Pipe.send(-1)
return

# Now find the Antenna fits file.
found_file = False
for file_name in scan_log_files :
if 'Antenna' in file_name :
antenna_file = (log_dir + "/Antenna" +
file_name.split("Antenna")[1])
found_file = True
break
if not found_file :
raise ce.DataError("Antenna file not in Scan Log")
# Open the antenna fits file.
antenna_hdu_list = pyfits.open(antenna_file, 'readonly')
ant_main = antenna_hdu_list[0].header
ant_header = antenna_hdu_list[2].header
ant_data = antenna_hdu_list[2].data
# Get the time of the scan midpoint in seconds since 00:00 UTC.
# Times in Julian Days.
ant_times = ant_data.field("DMJD")
# Times in days since midnight UTC on day of scan start. Time in this
# array become greater than one if scan spans midnight UTC.
ant_times -= (ant_times[0] - ant_times[0]%1.0)
# Seconds.
ant_times *= 24.0 * 3600.0
# Get the az and el out of the antenna.
ant_az = ant_data.field("OBSC_AZ")
ant_el = ant_data.field("OBSC_EL")
az_interp = interp.interp1d(ant_times, ant_az)
el_interp = interp.interp1d(ant_times, ant_el)
# Now get the pointing data from the antenna fits file.
antenna_file = (log_dir + get_filename_from_key(scan_log_files,
"/Antenna"))
az_interp, el_interp, ant_time_range = get_antenna_data(antenna_file)

# Get the guppi file name. We check all of the guppi input roots to
# see if there is a corresponding file.
Expand Down Expand Up @@ -330,7 +288,7 @@ def processfile(self, scan, Pipe) :
# fits file as a good (but scan strategy specific) check.
if self.proceedure == 'ralongmap' :
zenith_angle = psrdata[0]["TEL_ZEN"]
if not sp.allclose(90.0 - zenith_angle, ant_el, atol=0.1) :
if not sp.allclose(90.0 - zenith_angle, el_interp.y, atol=0.1) :
raise ce.DataError("Antenna el disagrees with guppi el.")
# Occationally, the guppi integrates longer than the scan and the
# antenna fits file won't cover the first or last few records.
Expand All @@ -343,10 +301,10 @@ def processfile(self, scan, Pipe) :
- sample_time*ntime/2)
latest = (start_seconds + record_offsets[n_records-1]
+ sample_time*ntime/2)
if earliest < min(ant_times) :
if earliest < ant_time_range[0] :
start_record -= 1
print "Discarded first record due to antenna time mismatch."
if latest > max(ant_times) :
if latest > ant_time_range[1] :
n_records -= 1
print "Discarded last record due to antenna time mismatch."

Expand Down Expand Up @@ -377,7 +335,7 @@ def processfile(self, scan, Pipe) :
if psrheader["POL_TYPE"].strip() == 'IQUV' :
Data.set_field('CRVAL4', [1,2,3,4], ('pol',), '1I')
else :
raise ce.DataError("Unrecognized polarizations.")
raise ce.DataError("Unsupported polarizations.")
Data.set_field('OBSERVER', psrmain['OBSERVER'], (), '32A')
if get_cal :
Data.set_field('CAL', ['T', 'F'], ('cal',), '1A')
Expand Down Expand Up @@ -443,11 +401,11 @@ def processfile(self, scan, Pipe) :
* resampled_time)
# Make sure that our pointing data covers the guppi data time
# range within the 0.1 second accuracy.
if ((max(time) - max(ant_times) > 0.) or
(min(time) - min(ant_times) < 0.)) :
if ((max(time) - ant_time_range[1] > 0.) or
(min(time) - ant_time_range[0] < 0.)) :
message = ("Guppi data time range not covered by antenna."
" Antenna: file " + antenna_file + " range "
+ str((min(ant_times), max(ant_times)))
+ str((ant_time_range[0], ant_time_range[1]))
+ " guppi: file " + guppi_file + " range "
+ str((min(time), max(time))))
raise ce.DataError(message)
Expand Down Expand Up @@ -481,8 +439,50 @@ def processfile(self, scan, Pipe) :
if self.feedback > 1 :
print
# Send Data back on the pipe.
#XXX
print
print Data.data[0, 0, 0, 0], Data.data[20, 0, 1, 400],
print Data.data[40, 2, 1, 300], Data.data[170, 1, 0, 200]
Pipe.send(Data)

# These functions not unit tested, simply made functions for convienience.

def get_antenna_data(ant_filename) :

# Open the antenna fits file.
antenna_hdu_list = pyfits.open(ant_filename, 'readonly')
ant_main = antenna_hdu_list[0].header
ant_header = antenna_hdu_list[2].header
ant_data = antenna_hdu_list[2].data
# Get the time of the scan midpoint in seconds since 00:00 UTC.
# Times in Julian Days.
ant_times = ant_data.field("DMJD")
# Times in days since midnight UTC on day of scan start. Time in this
# array become greater than one if scan spans midnight UTC.
ant_times -= (ant_times[0] - ant_times[0]%1.0)
# Seconds.
ant_times *= 24.0 * 3600.0
# Get the az and el out of the antenna.
ant_az = ant_data.field("OBSC_AZ")
ant_el = ant_data.field("OBSC_EL")
az_interp = interp.interp1d(ant_times, ant_az)
el_interp = interp.interp1d(ant_times, ant_el)
return az_interp, el_interp, (min(ant_times), max(ant_times))


def get_filename_from_key(file_list, key) :
found_file = False
for file_name in file_list :
if key in file_name :
file = ("/" + key + file_name.split(key)[1])
found_file = True
break
if not found_file :
raise ce.DataError(key + " file not in scan log.")
else :
return file


# Functions for the most error prone parts of the above script. Unit testing
# applied to these.

Expand Down Expand Up @@ -522,9 +522,10 @@ def get_cal_mask(data, n_bins_cal) :
ntime = data.shape[0]

if ntime%n_bins_cal != 0 :
raise ValueError("Number of bins in a cal period must devide the number"
" of time bins in a subintegration (generally number"
" of bins in a cal period should be a power of 2).")
msg = ("Number of bins in a cal period must devide the number"
" of time bins in a subintegration (generally number"
" of bins in a cal period should be a power of 2).")
raise ValueError(msg)

# Fold the Stokes I data on the cal period to figure out the phase.
# Sum over the frequencies.
Expand Down

0 comments on commit aaaefb8

Please sign in to comment.