Skip to content

Commit

Permalink
JP-3463: Fix poisson variance calculation (#255)
Browse files Browse the repository at this point in the history
* Fix variance calculations

* Bugfix

* Clean up special case of all zero variances

* Add changelog entry

* Add unit test for average dark

* Add more info to change log
  • Loading branch information
drlaw1558 committed Apr 23, 2024
1 parent 18993bb commit fa14c81
Show file tree
Hide file tree
Showing 3 changed files with 52 additions and 9 deletions.
10 changes: 9 additions & 1 deletion CHANGES.rst
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,15 @@ Changes to API
Bug Fixes
---------

-
ramp_fitting
~~~~~~~~~~~~

- Fix a bug in Poisson variance calculation visible when providing an average
dark current value in which the specified dark current was not converted to the
appropriate units for pixels with negative slopes. This resulted in
incorrect SCI, ERR, and VAR_POISSON values. Also required revising the approach
for catching all-zero variance cases when average dark current was not
specified. [#255]

1.7.0 (2024-03-25)
==================
Expand Down
18 changes: 10 additions & 8 deletions src/stcal/ramp_fitting/ols_fit.py
Original file line number Diff line number Diff line change
Expand Up @@ -1140,7 +1140,7 @@ def ramp_fit_compute_variances(ramp_data, gain_2d, readnoise_2d, fit_slopes_ans)
# Suppress harmless arithmetic warnings for now
warnings.filterwarnings("ignore", ".*invalid value.*", RuntimeWarning)
warnings.filterwarnings("ignore", ".*divide by zero.*", RuntimeWarning)
var_p4[num_int, :, rlo:rhi, :] = den_p3 * (med_rates[rlo:rhi, :] +
var_p4[num_int, :, rlo:rhi, :] = den_p3 * (np.maximum(med_rates[rlo:rhi, :], 0) +
ramp_data.average_dark_current[rlo:rhi, :])

# Find the segment variance due to read noise and convert back to DN
Expand All @@ -1157,7 +1157,7 @@ def ramp_fit_compute_variances(ramp_data, gain_2d, readnoise_2d, fit_slopes_ans)
# set the variances for segments having negative slopes (the segment
# variance is proportional to the median estimated slope) to
# outrageously large values so that they will have negligible
# contributions.
# contributions to the inverse variance summed across segments.

# Suppress, then re-enable harmless arithmetic warnings
warnings.filterwarnings("ignore", ".*invalid value.*", RuntimeWarning)
Expand All @@ -1176,13 +1176,13 @@ def ramp_fit_compute_variances(ramp_data, gain_2d, readnoise_2d, fit_slopes_ans)
var_r3[num_int, :, :] = 1.0 / s_inv_var_r3[num_int, :, :]

# Huge variances correspond to non-existing segments, so are reset to 0
# to nullify their contribution.
# to nullify their contribution now that computation of var_p3 and var_r3 is done.
var_p3[var_p3 > utils.LARGE_VARIANCE_THRESHOLD] = 0.0
med_rate_mask = med_rates <= 0.0
var_p3[:, med_rate_mask] = 0.0
var_p4[var_p4 > utils.LARGE_VARIANCE_THRESHOLD] = 0.0
# Deal with the special case where poisson variance for all segments was zero
var_p3[:,np.sum(np.sum(var_p4,axis=0),axis=0) == 0] = 0.0
warnings.resetwarnings()

var_p4[num_int, :, med_rate_mask] = ramp_data.average_dark_current[med_rate_mask][..., np.newaxis]
var_both4[num_int, :, :, :] = var_r4[num_int, :, :, :] + var_p4[num_int, :, :, :]
inv_var_both4[num_int, :, :, :] = 1.0 / var_both4[num_int, :, :, :]

Expand Down Expand Up @@ -1448,7 +1448,6 @@ def ramp_fit_overall(

if slope_int is not None:
del slope_int
del var_p3
del var_r3
del var_both3

Expand Down Expand Up @@ -1494,11 +1493,14 @@ def ramp_fit_overall(
warnings.filterwarnings("ignore", "invalid value.*", RuntimeWarning)
var_p2[var_p2 > utils.LARGE_VARIANCE_THRESHOLD] = 0.0
var_r2[var_r2 > utils.LARGE_VARIANCE_THRESHOLD] = 0.0
# Deal with the special case where poisson variance for all integrations was zero
var_p2[np.sum(var_p3,axis=0) == 0] = 0.0

del var_p3

# Some contributions to these vars may be NaN as they are from ramps
# having PIXELDQ=DO_NOT_USE
var_p2[np.isnan(var_p2)] = 0.0
var_p2[med_rates <= 0.0] = 0.0
var_r2[np.isnan(var_r2)] = 0.0

# Suppress, then re-enable, harmless arithmetic warning
Expand Down
33 changes: 33 additions & 0 deletions tests/test_ramp_fitting.py
Original file line number Diff line number Diff line change
Expand Up @@ -252,6 +252,39 @@ def test_neg_med_rates_single_integration_multi_segment_optional():
np.testing.assert_allclose(neg_ramp_poisson, np.zeros(3), tol)


def test_neg_with_avgdark():
"""
In the case where an average dark current was provided, make sure the negative ramp has negative slope,
the Poisson variance is the expected value, readnoise is non-zero and the ERR array is bigger than the RNOISE.
"""
nints, ngroups, nrows, ncols = 1, 10, 1, 1
rnoise_val, gain_val = 10.0, 1.0
nframes, gtime, dtime = 1, 1.0, 1
dims = (nints, ngroups, nrows, ncols)
var = (rnoise_val, gain_val)
tm = (nframes, gtime, dtime)
ramp_data, rnoise, gain = setup_inputs(dims, var, tm)

# Set up negative ramp
neg_ramp = np.array([k + 1 for k in range(ngroups)])
nslope = -0.5
neg_ramp = neg_ramp * nslope
ramp_data.data[0, :, 0, 0] = neg_ramp
ramp_data.average_dark_current[:] = 1.0

# Run ramp fit on RampData
buffsize, save_opt, algo, wt, ncores = 512, True, "OLS", "optimal", "none"
slopes, cube, optional, gls_dummy = ramp_fit_data(
ramp_data, buffsize, save_opt, rnoise, gain, algo, wt, ncores, dqflags
)

sdata, sdq, svp, svr, serr = slopes
assert sdata[0, 0] < 0.0
np.testing.assert_almost_equal(svp[0,0], 0.11, 2)
assert svr[0, 0] != 0.0
np.testing.assert_almost_equal(np.sqrt(svp[0,0] + svr[0,0]), serr[0,0], 2)


def test_utils_dq_compress_final():
"""
If there is any integration that has usable data, the DO_NOT_USE flag
Expand Down

0 comments on commit fa14c81

Please sign in to comment.