diff --git a/nuclockutils/barycorr.py b/nuclockutils/barycorr.py index 367ee37..6a0f053 100644 --- a/nuclockutils/barycorr.py +++ b/nuclockutils/barycorr.py @@ -108,7 +108,8 @@ def get_barycentric_correction(orbfile, parfile, dt=5, ephem='DE421'): ) bats = modelin.get_barycentric_toas(ts) return interp1d(mets, (bats.value - mjds) * 86400, - assume_sorted=True, bounds_error=False, fill_value='extrapolate') + assume_sorted=True, bounds_error=False, fill_value='extrapolate', + kind='quadratic') def correct_times(times, bary_fun, clock_fun=None): @@ -128,6 +129,7 @@ def apply_clock_correction( bary_fun = get_barycentric_correction(orbfile, parfile, ephem=ephem) with fits.open(fname, memmap=True) as hdul: times = hdul[1].data['TIME'] + unique_times = np.unique(times) clock_fun = None if clockfile is not None and os.path.exists(clockfile): hduname = 'NU_FINE_CLOCK' @@ -135,8 +137,9 @@ def apply_clock_correction( hduname = 'NU_FINE_CLOCK_NODETREND' log.info(f"Read extension {hduname}") clocktable = Table.read(clockfile, hdu=hduname) - clock_corr, _ = interpolate_clock_function(clocktable, times) - clock_fun = interp1d(times, clock_corr, + clock_corr, _ = \ + interpolate_clock_function(clocktable, unique_times) + clock_fun = interp1d(unique_times, clock_corr, assume_sorted=True, bounds_error=False, fill_value='extrapolate') elif clockfile is not None and not os.path.exists(clockfile): raise FileNotFoundError(f"Clock file {clockfile} not found") @@ -147,11 +150,20 @@ def apply_clock_correction( if hdu.data is not None and keyname in hdu.data.names: log.info(f"Updating column {keyname}") hdu.data[keyname] = \ - correct_times(hdu.data[keyname] + shift_times, bary_fun, clock_fun) + correct_times(hdu.data[keyname] + shift_times, + bary_fun, clock_fun) if keyname in hdu.header: log.info(f"Updating header keyword {keyname}") - hdu.header[keyname] = \ - correct_times(hdu.header[keyname] + shift_times, bary_fun, clock_fun) + corrected_time = \ + correct_times(hdu.header[keyname] + shift_times, + bary_fun, clock_fun) + if not np.isfinite(corrected_time): + log.error( + f"Bad value when updating header keyword {keyname}: " + f"{hdu.header[keyname]}->{corrected_time}") + else: + hdu.header[keyname] = corrected_time + hdu.header['CREATOR'] = f'NuSTAR Clock Utils - v. {version}' hdu.header['DATE'] = Time.now().fits diff --git a/nuclockutils/clean_clock/app.py b/nuclockutils/clean_clock/app.py index d04b9cb..eec6124 100644 --- a/nuclockutils/clean_clock/app.py +++ b/nuclockutils/clean_clock/app.py @@ -153,7 +153,7 @@ def plot_dash(all_data, table_new, gti, all_nustar_obs, else: all_nustar_obs[col][-1] *= 0 - idx = np.searchsorted(all_nustar_obs['MET'][:-1], table_new['met']) + idx = np.searchsorted(all_nustar_obs['MET'][1:], table_new['met']) all_nustar_obs_reindex = all_nustar_obs[idx] diff --git a/nuclockutils/diagnostics/bary_and_fold_all.py b/nuclockutils/diagnostics/bary_and_fold_all.py index d66c936..e3a4acc 100644 --- a/nuclockutils/diagnostics/bary_and_fold_all.py +++ b/nuclockutils/diagnostics/bary_and_fold_all.py @@ -2,6 +2,7 @@ import glob import re import traceback +import subprocess as sp import numpy as np import matplotlib.pyplot as plt @@ -41,12 +42,14 @@ def get_observing_mjd(fname): return np.array([tstart, tstop]) / 86400 + mjdref -def fold_file_to_ephemeris(fname, parfile, emin=None, emax=None, nbin=128, outroot="out"): +def fold_file_to_ephemeris(fname, parfile, emin=None, emax=None, + nbin=128, outroot="out"): mjdstart, mjdstop = get_observing_mjd(fname) mjdmean = (mjdstart + mjdstop) / 2 ephem = get_ephemeris_from_parfile(parfile) - log.info(f"Calculating correction function between {mjdstart} and {mjdstop}...") + log.info(f"Calculating correction function between " + f"{mjdstart} and {mjdstop}...") correction_fun = get_phase_from_ephemeris_file(mjdstart, mjdstop, parfile) events = get_events_from_fits(fname) @@ -90,6 +93,9 @@ def main(args=None): parser.add_argument('-c', '--clockfile', required=True, type=str, help="Clock correction file") parser.add_argument('--bary-suffix', default='_bary') + parser.add_argument('--plot-phaseogram', default=False, + action='store_true', + help='Plot the phaseogram (requires PINT)') args = parser.parse_args(args) @@ -99,7 +105,9 @@ def main(args=None): emax = args.emax nbin = args.nbin - outdir = os.path.basename(args.clockfile).replace('.gz', '').replace('.fits', '') + outdir = \ + os.path.basename(args.clockfile + ).replace('.gz', '').replace('.fits', '') for fname in args.fnames: basename = os.path.basename(fname) try: @@ -125,16 +133,25 @@ def main(args=None): bary_file = outroot + args.bary_suffix + '.evt.gz' mkdir(outdir) + if not os.path.exists(bary_file): - cmd = f'{fname} {attorb_file} -p {parfile} -c {args.clockfile} -o {bary_file}' + cmd = f'{fname} {attorb_file} -p {parfile} ' \ + f'-c {args.clockfile} -o {bary_file}' nubarycorr(cmd.split()) + + if args.plot_phaseogram: + cmd = f'photonphase {bary_file} {parfile} ' \ + f'--plotfile {outroot}_phaseogram.jpg' + sp.check_call(cmd.split()) + if emin is not None or emax is not None: outroot += f'_{emin}-{emax}keV' fold_file_to_ephemeris( bary_file, parfile, emin=emin, emax=emax, nbin=nbin, outroot=outroot) + except Exception as e: log.error(f"Error processing {fname}") tb = traceback.format_exc() @@ -144,7 +161,8 @@ def main(args=None): cmd = glob.glob(os.path.join(outdir, '*.ecsv')) if len(cmd) > 0: - cmd += ['--outfile', outdir + '.jpg', '--template', 'dead_time_crab_template.ecsv'] + cmd += ['--outfile', outdir + '.jpg', + '--template', 'profile_template.ecsv'] main_compare_pulses(cmd) diff --git a/nuclockutils/diagnostics/compare_pulses.py b/nuclockutils/diagnostics/compare_pulses.py index 26f0444..dcfaa66 100644 --- a/nuclockutils/diagnostics/compare_pulses.py +++ b/nuclockutils/diagnostics/compare_pulses.py @@ -1,9 +1,11 @@ import sys +import os +import numpy as np from astropy.time import Time from astropy.table import Table import matplotlib.pyplot as plt from uncertainties import ufloat -import numpy as np +from astropy import log from statsmodels.robust import mad from .fftfit import fftfit @@ -28,7 +30,9 @@ def format_profile_and_get_phase(file, template=None): freq = table.meta['F0'] fdot = table.meta['F1'] - fddot = table.meta['F2'] + fddot = 0 + if 'F2' in table.meta: + fddot = table.meta['F2'] delta_t = (mjd - pepoch) * 86400 f0 = freq + fdot * delta_t + 0.5 * fddot * delta_t ** 2 @@ -74,7 +78,7 @@ def main(args=None): mjds = [] plt.figure(figsize=(6, 9)) ref_profile = None - if args.template is not None: + if args.template is not None and os.path.exists(args.template): mjd, phase, prof, _, _, period_ms = \ format_profile_and_get_phase(args.template, template=None) phase_time_plot, prof_plot = format_for_plotting(phase, prof, period_ms) @@ -87,6 +91,7 @@ def main(args=None): if ref_profile is None: log.warning("No template provided; using maxima for phase calculation") + for i, f in enumerate(files): mjd, phase, prof, phase_res, phase_res_err, period_ms = \ format_profile_and_get_phase(f, template=ref_profile) @@ -100,9 +105,13 @@ def main(args=None): mjds.append(mjd) maxs.append(local_max) - plt.plot(phase_time_plot, (i + 1) * 0.2 + prof_plot, drawstyle='steps-mid', label=f, alpha=0.5, color='grey') + plt.plot(phase_time_plot, (i + 1) * 0.2 + prof_plot, + drawstyle='steps-mid', label=f, alpha=0.5, color='grey') + if len(files) < 2: + continue for plot_shift in [0, period_ms, 2 * period_ms]: - plt.scatter(plot_shift + phase_res * period_ms, (i + 1) * 0.2, s=10, color='b') + plt.scatter(plot_shift + phase_res * period_ms, (i + 1) * 0.2, + s=10, color='b') mjds = np.array(mjds) maxs = np.array(maxs) - ref_max @@ -125,12 +134,18 @@ def main(args=None): fit_shift = ufloat(fit_max, tot_err) # print(f"Fitted Mean shift = {fit_shift}" + " ms") # print(f"Mean shift = {shift}" + " ms") - plt.title(f"Mean shift = {fit_shift}" + " ms") + if len(files) >= 2: + plt.title(f"Mean shift = {fit_shift}" + " ms") + else: + plt.title(files[0]) + plt.xlim([0, period_ms * 2]) plt.ylim([-0.1, None]) plt.axvline(ref_max, alpha=0.5, color='b') - for t0 in [0, period_ms, 2 * period_ms]: - plt.axvspan(t0 + fit_max - tot_err, t0 + fit_max + tot_err, alpha=0.5, color='red') + if len(files) >= 2: + for t0 in [0, period_ms, 2 * period_ms]: + plt.axvspan(t0 + fit_max - tot_err, t0 + fit_max + tot_err, + alpha=0.5, color='red') if len(maxs) <= 5: plt.legend() diff --git a/nuclockutils/nustarclock.py b/nuclockutils/nustarclock.py index 4e0045c..57f8218 100644 --- a/nuclockutils/nustarclock.py +++ b/nuclockutils/nustarclock.py @@ -138,7 +138,7 @@ def spline_detrending(clock_offset_table, temptable, outlier_cuts=None): clock_residuals = clock_residuals[better_points] detrend_fun = spline_through_data(clock_offset_table['met'], - clock_residuals, downsample=4) + clock_residuals, downsample=20) r_std = residual_roll_std( clock_residuals - detrend_fun(clock_offset_table['met'])) @@ -202,7 +202,7 @@ def eliminate_trends_in_residuals(temp_table, clock_offset_table, # # if p_new is not None: # p = p_new - poly_order = min(met.size // 300 + 1, 10) + poly_order = min(met.size // 300 + 1, 2) p0 = np.zeros(poly_order + 1) p0[0] = q if p0.size > 1: @@ -1317,16 +1317,13 @@ def clock_ppm_model(nustar_met, temperature, craig_fit=False): # offset = 13.8874536353 - 4.095179312091239e-4 offset = 13.91877 -0.02020554 # sum the "e" parameter from long term ppm_vs_T_pars = [-0.07413, 0.00158] - # ppm_vs_T_pars = [-0.073795, 0.0015002] - # ppm_vs_time_pars = [0.008276, 256., -220.043, - # 3.408586903702425e-05] + ppm_vs_time_pars = [0.00874492067, 100.255995, -0.251789345, 0.0334934847] if craig_fit: offset = 1.3847529679329989e+01 ppm_vs_T_pars = [-7.3964896025586133e-02, 1.5055740907563737e-03] - temp = (temperature - T0) ftemp = offset + ppm_vs_T_pars[0] * temp + \ ppm_vs_T_pars[1] * temp ** 2 # Temperature dependence diff --git a/nuclockutils/utils.py b/nuclockutils/utils.py index f5ad790..82de500 100644 --- a/nuclockutils/utils.py +++ b/nuclockutils/utils.py @@ -142,7 +142,7 @@ def get_obsid_list_from_heasarc(cache_file='heasarc.hdf5'): try: heasarc = Heasarc() all_nustar_obs = heasarc.query_object( - '*', 'numaster', resultmax=10000, + '*', 'numaster', resultmax=100000, fields='OBSID,TIME,END_TIME,NAME,OBSERVATION_MODE,OBS_TYPE') except Exception: return Table({ @@ -242,8 +242,8 @@ def rolling_std(a, window, pad='center'): return rolling_stat(np.std, a, window, pad, axis=-1) -def spline_through_data(x, y, k=2, grace_intv=1000., smoothing_factor=0.0001, - downsample=5): +def spline_through_data(x, y, k=2, grace_intv=1000., smoothing_factor=0.001, + downsample=10): """Pass a spline through the data Examples