Skip to content

Commit

Permalink
Fixing small bug in how min_iter is checked and handled
Browse files Browse the repository at this point in the history
  • Loading branch information
shz9 committed Jun 10, 2024
1 parent a1285cb commit 8530ef0
Show file tree
Hide file tree
Showing 4 changed files with 30 additions and 34 deletions.
9 changes: 2 additions & 7 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,13 +5,6 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).

## [0.1.3] - TBD

### Changed

- Fixed bug in `psuedo_metrics` when extracting summary statistics data.
-

## [0.1.2] - 2024-06-03

### Changed
Expand All @@ -25,13 +18,15 @@ object wasn't refreshed.
- Refactored pars of `VIPRS` to cache some recurring computations.
- Updated `VIPRSBMA` & `VIPRSGridSearch` to only consider models that
successfully converged.
- Fixed bug in `psuedo_metrics` when extracting summary statistics data.

### Added

- Added SNP position to output table from VIPRS objects.
- Added measure of time taken to prepare data in `viprs_fit`.
- Added option to keep long-range LD regions in `viprs_fit`.
- Added convergence check based on parameter values.
- Added `min_iter` parameter to `.fit` methods to ensure CAVI is run for at least `min_iter` iterations.
- Added separate method for initializing optimization-related objects.

## [0.1.1] - 2024-04-24
Expand Down
4 changes: 3 additions & 1 deletion benchmarks/benchmark_e_step.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,9 @@ def exec_func():
# Here, we roughly repeat until the total time is at least ~1 second:
# Note that the minimum number of calls is 5. If this takes too long,
# then set the number of calls manually?
time_iter = math.ceil(1. / np.mean(timeit.repeat(exec_func, number=5, repeat=5)))
time_iter = math.ceil(1. / np.median(
timeit.repeat(exec_func, repeat=5 + warm_up, number=5)[warm_up:]
))
n_calls = 5 * int(time_iter)

with ResourceProfiler(dt=0.1) as rprof:
Expand Down
25 changes: 12 additions & 13 deletions viprs/model/VIPRS.py
Original file line number Diff line number Diff line change
Expand Up @@ -730,7 +730,7 @@ def fit(self,
theta_0=None,
param_0=None,
continued=False,
min_iter=5,
min_iter=3,
f_abs_tol=1e-6,
x_abs_tol=1e-7,
drop_r_tol=0.01,
Expand Down Expand Up @@ -801,18 +801,17 @@ def fit(self,
curr_elbo = self.history['ELBO'][-1]
prev_elbo = self.history['ELBO'][-2]

if i > min_iter:
# Check for convergence in the objective + parameters:
if np.isclose(prev_elbo, curr_elbo, atol=f_abs_tol, rtol=0.):
self.optim_result.update(curr_elbo,
stop_iteration=True,
success=True,
message='Objective (ELBO) converged successfully.')
elif max([np.max(np.abs(diff)) for diff in self.eta_diff.values()]) < x_abs_tol:
self.optim_result.update(curr_elbo,
stop_iteration=True,
success=True,
message='Variational parameters converged successfully.')
# Check for convergence in the objective + parameters:
if (i > min_iter) & np.isclose(prev_elbo, curr_elbo, atol=f_abs_tol, rtol=0.):
self.optim_result.update(curr_elbo,
stop_iteration=True,
success=True,
message='Objective (ELBO) converged successfully.')
elif (i > min_iter) & max([np.max(np.abs(diff)) for diff in self.eta_diff.values()]) < x_abs_tol:
self.optim_result.update(curr_elbo,
stop_iteration=True,
success=True,
message='Variational parameters converged successfully.')

# Check to see if the objective drops due to numerical instabilities:
elif curr_elbo < prev_elbo and not np.isclose(curr_elbo, prev_elbo, atol=0., rtol=drop_r_tol):
Expand Down
26 changes: 13 additions & 13 deletions viprs/model/gridsearch/VIPRSGrid.py
Original file line number Diff line number Diff line change
Expand Up @@ -334,19 +334,19 @@ def fit(self,

for m in np.where(self.active_models)[0]:

if i > min_iter:
if np.isclose(prev_elbo[m], curr_elbo[m], atol=f_abs_tol, rtol=0.):
self.active_models[m] = False
self.optim_results[m].update(curr_elbo[m],
stop_iteration=True,
success=True,
message='Objective (ELBO) converged successfully.')
elif max([np.max(np.abs(diff[:, m])) for diff in self.eta_diff.values()]) < x_abs_tol:
self.active_models[m] = False
self.optim_results[m].update(curr_elbo[m],
stop_iteration=True,
success=True,
message='Variational parameters converged successfully.')
if (i > min_iter) & np.isclose(prev_elbo[m], curr_elbo[m], atol=f_abs_tol, rtol=0.):
self.active_models[m] = False
self.optim_results[m].update(curr_elbo[m],
stop_iteration=True,
success=True,
message='Objective (ELBO) converged successfully.')
elif (i > min_iter) & max([np.max(np.abs(diff[:, m]))
for diff in self.eta_diff.values()]) < x_abs_tol:
self.active_models[m] = False
self.optim_results[m].update(curr_elbo[m],
stop_iteration=True,
success=True,
message='Variational parameters converged successfully.')

# Check to see if the objective drops due to numerical instabilities:
elif curr_elbo[m] < prev_elbo[m] and not np.isclose(curr_elbo[m],
Expand Down

0 comments on commit 8530ef0

Please sign in to comment.