Skip to content

Commit

Permalink
Merge branch 'master' of https://github.com/JarronL/pynrc
Browse files Browse the repository at this point in the history
  • Loading branch information
JarronL committed Feb 10, 2019
2 parents 47314b2 + 3f6d7a4 commit 422c19e
Show file tree
Hide file tree
Showing 3 changed files with 86 additions and 34 deletions.
88 changes: 64 additions & 24 deletions pynrc/WFE_interpolation.py
Original file line number Diff line number Diff line change
@@ -1,26 +1,47 @@
from astropy.table import Table
from astropy.table import Table, join
from astropy.io import fits
from scipy.interpolate import griddata
from scipy.interpolate import griddata, RegularGridInterpolator

import pynrc, webbpsf, os
#inst = webbpsf.NIRCam()

outdir = pynrc.conf.path
outdir = pynrc.conf.path + 'opd_mod/'

# Read in measured SI Zernike data
data_dir = webbpsf.utils.get_webbpsf_data_path() + '/'
zernike_file = data_dir + 'si_zernikes_isim_cv3.fits'

# Read in Zemax Zernike data to remove edge effects
# zemax_file = outdir + 'si_zernikes_Zemax_wfe.csv'

# Coordinate limits (oversized) for each FoV
v2v3_limits = {}
v2v3_limits['SW'] = {'V2':[-160, 160], 'V3':[-570, -420]}
v2v3_limits['LW'] = v2v3_limits['SW']
v2v3_limits['SWA'] = {'V2':[0, 160], 'V3':[-570, -420]}
v2v3_limits['LWA'] = v2v3_limits['SWA']
v2v3_limits['SWB'] = {'V2':[-160, 0], 'V3':[-570, -420]}
v2v3_limits['LWB'] = v2v3_limits['SWB']

if not os.path.exists(zernike_file):
print('Zernike file does not exist:')
print(' {}'.format(zernike_file))
else:
ztable_full = Table.read(zernike_file)
# zemax_full = Table.read(zemax_file, format='ascii.csv')
# zemax_full.rename_column('\ufeffinstrument', 'instrument')
#
# for k in zemax_full.keys():
# zemax_full[k] = zemax_full[k].astype(ztable_full[k].dtype)

# ztable_full = join(ztable_full, zemax_full, join_type='outer')

keys = np.array(ztable_full.keys())
ind_z = ['Zernike' in k for k in keys]
zkeys = keys[ind_z]

for mod in ['SW', 'LW', 'SWA', 'LWA', 'SWB', 'LWB']:
# for mod in ['SW', 'LW', 'SWA', 'LWA', 'SWB', 'LWB']:
for mod in ['SWA', 'LWA', 'SWB', 'LWB']:
ind_nrc = ['NIRCam'+mod in row['instrument'] for row in ztable_full]
ind_nrc = np.where(ind_nrc)

Expand All @@ -30,11 +51,15 @@
v3 = ztable_full[ind_nrc]['V3']

# Create finer mesh grid
v2_lims = np.array(v2v3_limits[mod]['V2']) / 60.
v3_lims = np.array(v2v3_limits[mod]['V3']) / 60.
dstep = 1. / 60. # 1" steps
xgrid = np.arange(v2.min()-dstep/2, v2.max()+dstep/2, dstep)
ygrid = np.arange(v3.min()-dstep/2, v3.max()+dstep/2, dstep)
xgrid = np.arange(v2_lims[0], v2_lims[1]+dstep, dstep)
ygrid = np.arange(v3_lims[0], v3_lims[1]+dstep, dstep)
X, Y = np.meshgrid(xgrid,ygrid)

extent = [X.min(), X.max(), Y.min(), Y.max()]

# Create a Zernike cube
zcube = []
for k in zkeys:
Expand All @@ -44,16 +69,27 @@
# There will be some NaNs along the outer borders
zgrid = griddata((v2, v3), z, (X, Y), method='cubic')
ind_nan = np.isnan(zgrid)

# Replace NaNs with nearest neighbors
zgrid2 = griddata((X[~ind_nan], Y[~ind_nan]), zgrid[~ind_nan], (X,Y), method='nearest')
zgrid[ind_nan] = zgrid2[ind_nan]

#ind_min = zgrid<z.min()
#ind_max = zgrid>z.max()
#zgrid[ind_min] = z.min()
#zgrid[ind_max] = z.max()


# Cut out a square region whose values are not NaNs
xnans = ind_nan.sum(axis=0)
ynans = ind_nan.sum(axis=1)
x_ind = xnans < ygrid.size
y_ind = ynans < xgrid.size
zgrid2 = zgrid[y_ind, :][:, x_ind]
ygrid2 = ygrid[y_ind]
xgrid2 = xgrid[x_ind]
# Remove rows/cols 1 by 1 until no NaNs
while np.isnan(zgrid2.sum()):
zgrid2 = zgrid2[1:-1,1:-1]
ygrid2 = ygrid2[1:-1]
xgrid2 = xgrid2[1:-1]

# Create regular grid interpolator function for extrapolation at NaN's
func = RegularGridInterpolator((ygrid2,xgrid2), zgrid2, method='linear',
bounds_error=False, fill_value=None)

pts = np.array([Y[ind_nan], X[ind_nan]]).transpose()
zgrid[ind_nan] = func(pts)
zcube.append(zgrid)

zcube = np.array(zcube)
Expand All @@ -71,20 +107,24 @@
hdr['ymax'] = Y.max()
hdr['ydel'] = dstep

#hdr['wave'] =
hdr['wave'] = 2.10 if 'SW' in mod else 3.23

hdr['comment'] = 'X and Y values correspond to V2 and V3 coordinates (arcmin).'
hdr['comment'] = 'Slices in the cube correspond to Zernikes 1 to 36.'
hdr['comment'] = 'Zernike values calculated using 2D cubic interpolation.'
hdr['comment'] = 'Zernike values calculated using 2D cubic interpolation'
hdr['comment'] = 'and linear extrapolation outside gridded data.'

fname = 'NIRCam{}_zernikes_isim_cv3.fits'.format(mod)
hdu.writeto(outdir + fname, overwrite=True)


#plt.clf()
#plt.contourf(xgrid,ygrid,zcube[i],20)
#plt.scatter(v2,v3,marker='o',c='k',s=5)
#plt.xlim([X.min(),X.max()])
#plt.ylim([Y.min(),Y.max()])
#plt.axes().set_aspect('equal', 'datalim')
# plt.clf()
# plt.contourf(xgrid,ygrid,zcube[i],20)
# #plt.imshow(zcube[i], extent = [X.min(), X.max(), Y.min(), Y.max()])
# plt.scatter(v2,v3,marker='o',c='r',s=5)
# plt.xlim([X.min(),X.max()])
# plt.ylim([Y.min(),Y.max()])
# plt.axes().set_aspect('equal', 'datalim')

# names = ['Guider1', 'Guider2', 'MIRI', 'NIRCamLWA', 'NIRCamLWB',
# 'NIRCamSWA', 'NIRCamSWB', 'NIRISS', 'NIRSpec']
7 changes: 5 additions & 2 deletions pynrc/nrc_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -844,7 +844,7 @@ def _wrap_coeff_for_mp(args):
# current exception being handled.
traceback.print_exc()

print()
print('')
#raise e
poppy.conf.use_multiprocessing = mp_prev
return None
Expand Down Expand Up @@ -1206,7 +1206,10 @@ def psf_coeff(filter_or_bp, pupil=None, mask=None, module='A',
# Pass arguments to the helper function
hdu_arr = []
for wa in worker_arguments:
hdu_arr.append(_wrap_coeff_for_mp(wa))
hdu = _wrap_coeff_for_mp(wa)
if hdu is None:
raise RuntimeError('Returned None values. Issue with multiprocess or WebbPSF??')
hdu_arr.append(hdu)
t1 = time.time()

# Reset to original log levels
Expand Down
25 changes: 17 additions & 8 deletions pynrc/pynrc_core.py
Original file line number Diff line number Diff line change
Expand Up @@ -942,6 +942,8 @@ class NIRCam(object):
Updates :attr:`wfe_drift` attribute.
opd : tuple or HDUList
Tuple (file, slice) or filename or HDUList specifying OPD.
include_si_wfe : bool
Include SI WFE measurements? Default=True.
tel_pupil : str
File name or HDUList specifying telescope entrance pupil.
jitter : str or None
Expand Down Expand Up @@ -1517,7 +1519,7 @@ def bar_offset(self, value):

def update_psf_coeff(self, fov_pix=None, oversample=None,
offset_r=None, offset_theta=None, tel_pupil=None, opd=None,
wfe_drift=None, jitter='gaussian', jitter_sigma=0.007,
wfe_drift=None, include_si_wfe=None, jitter='gaussian', jitter_sigma=0.007,
bar_offset=None, save=None, force=False, **kwargs):
"""Create new PSF coefficients.
Expand Down Expand Up @@ -1562,10 +1564,12 @@ def update_psf_coeff(self, fov_pix=None, oversample=None,
Updates :attr:`wfe_drift` attribute and coefficients appropriately.
opd : tuple, str, or HDUList
Tuple (file, slice) or filename or HDUList specifying OPD.
tel_pupil : str
include_si_wfe : bool
Include SI WFE measurements? Default=True.
tel_pupil : str or HDUList
File name or HDUList specifying telescope entrance pupil.
jitter : str or None
Currently either 'gaussian' or None.
jitter : str
Either 'gaussian' or 'none'.
jitter_sigma : float
If ``jitter = 'gaussian'``, then this is the size of the blurring effect.
save : bool
Expand Down Expand Up @@ -1626,9 +1630,12 @@ def update_psf_coeff(self, fov_pix=None, oversample=None,
if tel_pupil is None:
try: tel_pupil = self._psf_info['tel_pupil']
except (AttributeError, KeyError): tel_pupil = None
if include_si_wfe is None:
try: include_si_wfe = self._psf_info['include_si_wfe']
except (AttributeError, KeyError): include_si_wfe = True
if jitter is None:
try: jitter = self._psf_info['jitter']
except (AttributeError, KeyError): jitter = None
except (AttributeError, KeyError): jitter = 'gaussian'
if jitter_sigma is None:
try: jitter_sigma = self._psf_info['jitter_sigma']
except (AttributeError, KeyError): jitter_sigma = 0.007
Expand All @@ -1638,12 +1645,15 @@ def update_psf_coeff(self, fov_pix=None, oversample=None,
if save is None:
try: save = self._psf_info['save']
except (AttributeError, KeyError): save = True

if jitter=='none':
jitter = None

#print(opd)
self._psf_info={'fov_pix':fov_pix, 'oversample':oversample,
'offset_r':offset_r, 'offset_theta':offset_theta,
'tel_pupil':tel_pupil, 'save':save, 'force':force,
'opd':opd, 'jitter':jitter, 'jitter_sigma':jitter_sigma}
'include_si_wfe':include_si_wfe, 'opd':opd, 'jitter':jitter, 'jitter_sigma':jitter_sigma}
self._psf_coeff = psf_coeff(self.bandpass, self.pupil, self.mask, self.module,
**self._psf_info)

Expand Down Expand Up @@ -1674,7 +1684,6 @@ def update_psf_coeff(self, fov_pix=None, oversample=None,
self._psf_coeff += cf_mod
else:
self._bar_offset = 0


# WFE Drift is handled differently than the rest of the parameters
# This is because we use wfed_coeff() to determine the resid values
Expand Down Expand Up @@ -1711,7 +1720,7 @@ def update_psf_coeff(self, fov_pix=None, oversample=None,
self._psf_info_bg = {'fov_pix':self._fov_pix_bg, 'oversample':oversample,
'offset_r':0, 'offset_theta':0, 'tel_pupil':tel_pupil,
'opd':opd, 'jitter':jitter, 'jitter_sigma':jitter_sigma,
'save':True, 'force':False}
'include_si_wfe':include_si_wfe, 'save':True, 'force':False}
self._psf_coeff_bg = psf_coeff(self.bandpass, self.pupil, None, self.module,
**self._psf_info_bg)

Expand Down

0 comments on commit 422c19e

Please sign in to comment.