Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Better detrending #17

Merged
merged 9 commits into from
Aug 12, 2020
24 changes: 18 additions & 6 deletions nuclockutils/barycorr.py
Original file line number Diff line number Diff line change
Expand Up @@ -108,7 +108,8 @@ def get_barycentric_correction(orbfile, parfile, dt=5, ephem='DE421'):
)
bats = modelin.get_barycentric_toas(ts)
return interp1d(mets, (bats.value - mjds) * 86400,
assume_sorted=True, bounds_error=False, fill_value='extrapolate')
assume_sorted=True, bounds_error=False, fill_value='extrapolate',
kind='quadratic')


def correct_times(times, bary_fun, clock_fun=None):
Expand All @@ -128,15 +129,17 @@ def apply_clock_correction(
bary_fun = get_barycentric_correction(orbfile, parfile, ephem=ephem)
with fits.open(fname, memmap=True) as hdul:
times = hdul[1].data['TIME']
unique_times = np.unique(times)
clock_fun = None
if clockfile is not None and os.path.exists(clockfile):
hduname = 'NU_FINE_CLOCK'
if nodetrend:
hduname = 'NU_FINE_CLOCK_NODETREND'
log.info(f"Read extension {hduname}")
clocktable = Table.read(clockfile, hdu=hduname)
clock_corr, _ = interpolate_clock_function(clocktable, times)
clock_fun = interp1d(times, clock_corr,
clock_corr, _ = \
interpolate_clock_function(clocktable, unique_times)
clock_fun = interp1d(unique_times, clock_corr,
assume_sorted=True, bounds_error=False, fill_value='extrapolate')
elif clockfile is not None and not os.path.exists(clockfile):
raise FileNotFoundError(f"Clock file {clockfile} not found")
Expand All @@ -147,11 +150,20 @@ def apply_clock_correction(
if hdu.data is not None and keyname in hdu.data.names:
log.info(f"Updating column {keyname}")
hdu.data[keyname] = \
correct_times(hdu.data[keyname] + shift_times, bary_fun, clock_fun)
correct_times(hdu.data[keyname] + shift_times,
bary_fun, clock_fun)
if keyname in hdu.header:
log.info(f"Updating header keyword {keyname}")
hdu.header[keyname] = \
correct_times(hdu.header[keyname] + shift_times, bary_fun, clock_fun)
corrected_time = \
correct_times(hdu.header[keyname] + shift_times,
bary_fun, clock_fun)
if not np.isfinite(corrected_time):
log.error(
f"Bad value when updating header keyword {keyname}: "
f"{hdu.header[keyname]}->{corrected_time}")
else:
hdu.header[keyname] = corrected_time


hdu.header['CREATOR'] = f'NuSTAR Clock Utils - v. {version}'
hdu.header['DATE'] = Time.now().fits
Expand Down
2 changes: 1 addition & 1 deletion nuclockutils/clean_clock/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -153,7 +153,7 @@ def plot_dash(all_data, table_new, gti, all_nustar_obs,
else:
all_nustar_obs[col][-1] *= 0

idx = np.searchsorted(all_nustar_obs['MET'][:-1], table_new['met'])
idx = np.searchsorted(all_nustar_obs['MET'][1:], table_new['met'])

all_nustar_obs_reindex = all_nustar_obs[idx]

Expand Down
28 changes: 23 additions & 5 deletions nuclockutils/diagnostics/bary_and_fold_all.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
import glob
import re
import traceback
import subprocess as sp

import numpy as np
import matplotlib.pyplot as plt
Expand Down Expand Up @@ -41,12 +42,14 @@ def get_observing_mjd(fname):
return np.array([tstart, tstop]) / 86400 + mjdref


def fold_file_to_ephemeris(fname, parfile, emin=None, emax=None, nbin=128, outroot="out"):
def fold_file_to_ephemeris(fname, parfile, emin=None, emax=None,
nbin=128, outroot="out"):
mjdstart, mjdstop = get_observing_mjd(fname)
mjdmean = (mjdstart + mjdstop) / 2
ephem = get_ephemeris_from_parfile(parfile)

log.info(f"Calculating correction function between {mjdstart} and {mjdstop}...")
log.info(f"Calculating correction function between "
f"{mjdstart} and {mjdstop}...")
correction_fun = get_phase_from_ephemeris_file(mjdstart, mjdstop, parfile)

events = get_events_from_fits(fname)
Expand Down Expand Up @@ -90,6 +93,9 @@ def main(args=None):
parser.add_argument('-c', '--clockfile', required=True, type=str,
help="Clock correction file")
parser.add_argument('--bary-suffix', default='_bary')
parser.add_argument('--plot-phaseogram', default=False,
action='store_true',
help='Plot the phaseogram (requires PINT)')

args = parser.parse_args(args)

Expand All @@ -99,7 +105,9 @@ def main(args=None):
emax = args.emax
nbin = args.nbin

outdir = os.path.basename(args.clockfile).replace('.gz', '').replace('.fits', '')
outdir = \
os.path.basename(args.clockfile
).replace('.gz', '').replace('.fits', '')
for fname in args.fnames:
basename = os.path.basename(fname)
try:
Expand All @@ -125,16 +133,25 @@ def main(args=None):

bary_file = outroot + args.bary_suffix + '.evt.gz'
mkdir(outdir)

if not os.path.exists(bary_file):
cmd = f'{fname} {attorb_file} -p {parfile} -c {args.clockfile} -o {bary_file}'
cmd = f'{fname} {attorb_file} -p {parfile} ' \
f'-c {args.clockfile} -o {bary_file}'

nubarycorr(cmd.split())

if args.plot_phaseogram:
cmd = f'photonphase {bary_file} {parfile} ' \
f'--plotfile {outroot}_phaseogram.jpg'
sp.check_call(cmd.split())

if emin is not None or emax is not None:
outroot += f'_{emin}-{emax}keV'

fold_file_to_ephemeris(
bary_file, parfile, emin=emin, emax=emax, nbin=nbin,
outroot=outroot)

except Exception as e:
log.error(f"Error processing {fname}")
tb = traceback.format_exc()
Expand All @@ -144,7 +161,8 @@ def main(args=None):

cmd = glob.glob(os.path.join(outdir, '*.ecsv'))
if len(cmd) > 0:
cmd += ['--outfile', outdir + '.jpg', '--template', 'dead_time_crab_template.ecsv']
cmd += ['--outfile', outdir + '.jpg',
'--template', 'profile_template.ecsv']
main_compare_pulses(cmd)


Expand Down
31 changes: 23 additions & 8 deletions nuclockutils/diagnostics/compare_pulses.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,11 @@
import sys
import os
import numpy as np
from astropy.time import Time
from astropy.table import Table
import matplotlib.pyplot as plt
from uncertainties import ufloat
import numpy as np
from astropy import log
from statsmodels.robust import mad

from .fftfit import fftfit
Expand All @@ -28,7 +30,9 @@ def format_profile_and_get_phase(file, template=None):

freq = table.meta['F0']
fdot = table.meta['F1']
fddot = table.meta['F2']
fddot = 0
if 'F2' in table.meta:
fddot = table.meta['F2']
delta_t = (mjd - pepoch) * 86400
f0 = freq + fdot * delta_t + 0.5 * fddot * delta_t ** 2

Expand Down Expand Up @@ -74,7 +78,7 @@ def main(args=None):
mjds = []
plt.figure(figsize=(6, 9))
ref_profile = None
if args.template is not None:
if args.template is not None and os.path.exists(args.template):
mjd, phase, prof, _, _, period_ms = \
format_profile_and_get_phase(args.template, template=None)
phase_time_plot, prof_plot = format_for_plotting(phase, prof, period_ms)
Expand All @@ -87,6 +91,7 @@ def main(args=None):

if ref_profile is None:
log.warning("No template provided; using maxima for phase calculation")

for i, f in enumerate(files):
mjd, phase, prof, phase_res, phase_res_err, period_ms = \
format_profile_and_get_phase(f, template=ref_profile)
Expand All @@ -100,9 +105,13 @@ def main(args=None):
mjds.append(mjd)
maxs.append(local_max)

plt.plot(phase_time_plot, (i + 1) * 0.2 + prof_plot, drawstyle='steps-mid', label=f, alpha=0.5, color='grey')
plt.plot(phase_time_plot, (i + 1) * 0.2 + prof_plot,
drawstyle='steps-mid', label=f, alpha=0.5, color='grey')
if len(files) < 2:
continue
for plot_shift in [0, period_ms, 2 * period_ms]:
plt.scatter(plot_shift + phase_res * period_ms, (i + 1) * 0.2, s=10, color='b')
plt.scatter(plot_shift + phase_res * period_ms, (i + 1) * 0.2,
s=10, color='b')

mjds = np.array(mjds)
maxs = np.array(maxs) - ref_max
Expand All @@ -125,12 +134,18 @@ def main(args=None):
fit_shift = ufloat(fit_max, tot_err)
# print(f"Fitted Mean shift = {fit_shift}" + " ms")
# print(f"Mean shift = {shift}" + " ms")
plt.title(f"Mean shift = {fit_shift}" + " ms")
if len(files) >= 2:
plt.title(f"Mean shift = {fit_shift}" + " ms")
else:
plt.title(files[0])

plt.xlim([0, period_ms * 2])
plt.ylim([-0.1, None])
plt.axvline(ref_max, alpha=0.5, color='b')
for t0 in [0, period_ms, 2 * period_ms]:
plt.axvspan(t0 + fit_max - tot_err, t0 + fit_max + tot_err, alpha=0.5, color='red')
if len(files) >= 2:
for t0 in [0, period_ms, 2 * period_ms]:
plt.axvspan(t0 + fit_max - tot_err, t0 + fit_max + tot_err,
alpha=0.5, color='red')

if len(maxs) <= 5:
plt.legend()
Expand Down
9 changes: 3 additions & 6 deletions nuclockutils/nustarclock.py
Original file line number Diff line number Diff line change
Expand Up @@ -138,7 +138,7 @@ def spline_detrending(clock_offset_table, temptable, outlier_cuts=None):
clock_residuals = clock_residuals[better_points]

detrend_fun = spline_through_data(clock_offset_table['met'],
clock_residuals, downsample=4)
clock_residuals, downsample=20)

r_std = residual_roll_std(
clock_residuals - detrend_fun(clock_offset_table['met']))
Expand Down Expand Up @@ -202,7 +202,7 @@ def eliminate_trends_in_residuals(temp_table, clock_offset_table,
#
# if p_new is not None:
# p = p_new
poly_order = min(met.size // 300 + 1, 10)
poly_order = min(met.size // 300 + 1, 2)
p0 = np.zeros(poly_order + 1)
p0[0] = q
if p0.size > 1:
Expand Down Expand Up @@ -1317,16 +1317,13 @@ def clock_ppm_model(nustar_met, temperature, craig_fit=False):
# offset = 13.8874536353 - 4.095179312091239e-4
offset = 13.91877 -0.02020554 # sum the "e" parameter from long term
ppm_vs_T_pars = [-0.07413, 0.00158]
# ppm_vs_T_pars = [-0.073795, 0.0015002]
# ppm_vs_time_pars = [0.008276, 256., -220.043,
# 3.408586903702425e-05]

ppm_vs_time_pars = [0.00874492067, 100.255995, -0.251789345,
0.0334934847]
if craig_fit:
offset = 1.3847529679329989e+01
ppm_vs_T_pars = [-7.3964896025586133e-02, 1.5055740907563737e-03]


temp = (temperature - T0)
ftemp = offset + ppm_vs_T_pars[0] * temp + \
ppm_vs_T_pars[1] * temp ** 2 # Temperature dependence
Expand Down
6 changes: 3 additions & 3 deletions nuclockutils/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -142,7 +142,7 @@ def get_obsid_list_from_heasarc(cache_file='heasarc.hdf5'):
try:
heasarc = Heasarc()
all_nustar_obs = heasarc.query_object(
'*', 'numaster', resultmax=10000,
'*', 'numaster', resultmax=100000,
fields='OBSID,TIME,END_TIME,NAME,OBSERVATION_MODE,OBS_TYPE')
except Exception:
return Table({
Expand Down Expand Up @@ -242,8 +242,8 @@ def rolling_std(a, window, pad='center'):
return rolling_stat(np.std, a, window, pad, axis=-1)


def spline_through_data(x, y, k=2, grace_intv=1000., smoothing_factor=0.0001,
downsample=5):
def spline_through_data(x, y, k=2, grace_intv=1000., smoothing_factor=0.001,
downsample=10):
"""Pass a spline through the data

Examples
Expand Down