From 05534833f6252b469b81b16d38f5e490e69c9f2f Mon Sep 17 00:00:00 2001 From: CamDavidsonPilon Date: Wed, 14 Aug 2019 13:10:23 -0400 Subject: [PATCH] need to set the stop tol a bit higher to get equal or better accuracy --- lifelines/fitters/log_logistic_aft_fitter.py | 1 + lifelines/fitters/piecewise_exponential_regression_fitter.py | 1 + lifelines/fitters/weibull_aft_fitter.py | 1 + 3 files changed, 3 insertions(+) diff --git a/lifelines/fitters/log_logistic_aft_fitter.py b/lifelines/fitters/log_logistic_aft_fitter.py index 8f806b883..a475082c9 100644 --- a/lifelines/fitters/log_logistic_aft_fitter.py +++ b/lifelines/fitters/log_logistic_aft_fitter.py @@ -64,6 +64,7 @@ class LogLogisticAFTFitter(ParametericAFTRegressionFitter): # about 25% faster than BFGS _scipy_fit_method = "SLSQP" + _scipy_fit_options = {"ftol": 1e-6, "maxiter": 200} def __init__(self, alpha=0.05, penalizer=0.0, l1_ratio=0.0, fit_intercept=True, model_ancillary=False): self._ancillary_parameter_name = "beta_" diff --git a/lifelines/fitters/piecewise_exponential_regression_fitter.py b/lifelines/fitters/piecewise_exponential_regression_fitter.py index 281202a52..9f90852d3 100644 --- a/lifelines/fitters/piecewise_exponential_regression_fitter.py +++ b/lifelines/fitters/piecewise_exponential_regression_fitter.py @@ -22,6 +22,7 @@ class PiecewiseExponentialRegressionFitter(ParametricRegressionFitter): # about 50% faster than BFGS _scipy_fit_method = "SLSQP" + _scipy_fit_options = {"ftol": 1e-6, "maxiter": 200} def __init__(self, breakpoints, alpha=0.05, penalizer=0.0): super(PiecewiseExponentialRegressionFitter, self).__init__(alpha=alpha) diff --git a/lifelines/fitters/weibull_aft_fitter.py b/lifelines/fitters/weibull_aft_fitter.py index 6483ec25b..3a86b16f5 100644 --- a/lifelines/fitters/weibull_aft_fitter.py +++ b/lifelines/fitters/weibull_aft_fitter.py @@ -65,6 +65,7 @@ class WeibullAFTFitter(ParametericAFTRegressionFitter): # about 25% faster than BFGS _scipy_fit_method = "SLSQP" + _scipy_fit_options = {"ftol": 1e-10, "maxiter": 200} def __init__(self, alpha=0.05, penalizer=0.0, l1_ratio=0.0, fit_intercept=True, model_ancillary=False): self._ancillary_parameter_name = "rho_"