diff --git a/CHANGELOG.md b/CHANGELOG.md index fe33eee..a40ba3b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,13 +5,6 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). -## [0.1.3] - TBD - -### Changed - -- Fixed bug in `psuedo_metrics` when extracting summary statistics data. -- - ## [0.1.2] - 2024-06-03 ### Changed @@ -25,6 +18,7 @@ object wasn't refreshed. - Refactored pars of `VIPRS` to cache some recurring computations. - Updated `VIPRSBMA` & `VIPRSGridSearch` to only consider models that successfully converged. +- Fixed bug in `psuedo_metrics` when extracting summary statistics data. ### Added @@ -32,6 +26,7 @@ successfully converged. - Added measure of time taken to prepare data in `viprs_fit`. - Added option to keep long-range LD regions in `viprs_fit`. - Added convergence check based on parameter values. +- Added `min_iter` parameter to `.fit` methods to ensure CAVI is run for at least `min_iter` iterations. - Added separate method for initializing optimization-related objects. ## [0.1.1] - 2024-04-24 diff --git a/benchmarks/benchmark_e_step.py b/benchmarks/benchmark_e_step.py index b42a6ec..5ff4e6e 100644 --- a/benchmarks/benchmark_e_step.py +++ b/benchmarks/benchmark_e_step.py @@ -68,7 +68,9 @@ def exec_func(): # Here, we roughly repeat until the total time is at least ~1 second: # Note that the minimum number of calls is 5. If this takes too long, # then set the number of calls manually? - time_iter = math.ceil(1. / np.mean(timeit.repeat(exec_func, number=5, repeat=5))) + time_iter = math.ceil(1. / np.median( + timeit.repeat(exec_func, repeat=5 + warm_up, number=5)[warm_up:] + )) n_calls = 5 * int(time_iter) with ResourceProfiler(dt=0.1) as rprof: diff --git a/viprs/model/VIPRS.py b/viprs/model/VIPRS.py index 7820a2a..e7672bc 100644 --- a/viprs/model/VIPRS.py +++ b/viprs/model/VIPRS.py @@ -730,7 +730,7 @@ def fit(self, theta_0=None, param_0=None, continued=False, - min_iter=5, + min_iter=3, f_abs_tol=1e-6, x_abs_tol=1e-7, drop_r_tol=0.01, @@ -801,18 +801,17 @@ def fit(self, curr_elbo = self.history['ELBO'][-1] prev_elbo = self.history['ELBO'][-2] - if i > min_iter: - # Check for convergence in the objective + parameters: - if np.isclose(prev_elbo, curr_elbo, atol=f_abs_tol, rtol=0.): - self.optim_result.update(curr_elbo, - stop_iteration=True, - success=True, - message='Objective (ELBO) converged successfully.') - elif max([np.max(np.abs(diff)) for diff in self.eta_diff.values()]) < x_abs_tol: - self.optim_result.update(curr_elbo, - stop_iteration=True, - success=True, - message='Variational parameters converged successfully.') + # Check for convergence in the objective + parameters: + if (i > min_iter) & np.isclose(prev_elbo, curr_elbo, atol=f_abs_tol, rtol=0.): + self.optim_result.update(curr_elbo, + stop_iteration=True, + success=True, + message='Objective (ELBO) converged successfully.') + elif (i > min_iter) & max([np.max(np.abs(diff)) for diff in self.eta_diff.values()]) < x_abs_tol: + self.optim_result.update(curr_elbo, + stop_iteration=True, + success=True, + message='Variational parameters converged successfully.') # Check to see if the objective drops due to numerical instabilities: elif curr_elbo < prev_elbo and not np.isclose(curr_elbo, prev_elbo, atol=0., rtol=drop_r_tol): diff --git a/viprs/model/gridsearch/VIPRSGrid.py b/viprs/model/gridsearch/VIPRSGrid.py index 620f788..821e0bd 100644 --- a/viprs/model/gridsearch/VIPRSGrid.py +++ b/viprs/model/gridsearch/VIPRSGrid.py @@ -334,19 +334,19 @@ def fit(self, for m in np.where(self.active_models)[0]: - if i > min_iter: - if np.isclose(prev_elbo[m], curr_elbo[m], atol=f_abs_tol, rtol=0.): - self.active_models[m] = False - self.optim_results[m].update(curr_elbo[m], - stop_iteration=True, - success=True, - message='Objective (ELBO) converged successfully.') - elif max([np.max(np.abs(diff[:, m])) for diff in self.eta_diff.values()]) < x_abs_tol: - self.active_models[m] = False - self.optim_results[m].update(curr_elbo[m], - stop_iteration=True, - success=True, - message='Variational parameters converged successfully.') + if (i > min_iter) & np.isclose(prev_elbo[m], curr_elbo[m], atol=f_abs_tol, rtol=0.): + self.active_models[m] = False + self.optim_results[m].update(curr_elbo[m], + stop_iteration=True, + success=True, + message='Objective (ELBO) converged successfully.') + elif (i > min_iter) & max([np.max(np.abs(diff[:, m])) + for diff in self.eta_diff.values()]) < x_abs_tol: + self.active_models[m] = False + self.optim_results[m].update(curr_elbo[m], + stop_iteration=True, + success=True, + message='Variational parameters converged successfully.') # Check to see if the objective drops due to numerical instabilities: elif curr_elbo[m] < prev_elbo[m] and not np.isclose(curr_elbo[m],