From 386baf138300292f31e5e92175ac067c9769457f Mon Sep 17 00:00:00 2001 From: Erik Bernhardsson Date: Mon, 22 Jul 2019 23:28:56 -0400 Subject: [PATCH] add linear model --- convoys/regression.py | 28 ++++++++++++++++++---------- test_convoys.py | 6 ++++++ 2 files changed, 24 insertions(+), 10 deletions(-) diff --git a/convoys/regression.py b/convoys/regression.py index 2bd6c49..7dc3f4e 100644 --- a/convoys/regression.py +++ b/convoys/regression.py @@ -17,8 +17,8 @@ 'GeneralizedGamma'] -def generalized_gamma_LL(x, X, B, T, W, fix_k, fix_p, - hierarchical, callback=None): +def generalized_gamma_loss(x, X, B, T, W, fix_k, fix_p, + hierarchical, flavor, callback=None): k = exp(x[0]) if fix_k is None else fix_k p = exp(x[1]) if fix_p is None else fix_p log_sigma_alpha = x[2] @@ -29,15 +29,20 @@ def generalized_gamma_LL(x, X, B, T, W, fix_k, fix_p, alpha = x[6:6+n_features] beta = x[6+n_features:6+2*n_features] lambd = exp(dot(X, alpha)+a) - c = expit(dot(X, beta)+b) # PDF: p*lambda^(k*p) / gamma(k) * t^(k*p-1) * exp(-(x*lambda)^p) log_pdf = log(p) + (k*p) * log(lambd) - gammaln(k) \ + (k*p-1) * log(T) - (T*lambd)**p cdf = gammainc(k, (T*lambd)**p) - LL_observed = log(c) + log_pdf - LL_censored = log((1-c) + c * (1 - cdf)) + if flavor == 'logistic': # Log-likelihood with sigmoid + c = expit(dot(X, beta)+b) + LL_observed = log(c) + log_pdf + LL_censored = log((1 - c) + c * (1 - cdf)) + elif flavor == 'linear': # L2 loss, linear + c = dot(X, beta)+b + LL_observed = -(1 - c)**2 + log_pdf + LL_censored = -c**2 * cdf - (1 - c)**2 * (1 - cdf) LL_data = sum( W * B * LL_observed + @@ -147,11 +152,13 @@ class GeneralizedGamma(RegressionModel): to sample from the full posterior in order to generate uncertainty estimates for all parameters. ''' - def __init__(self, ci=False, fix_k=None, fix_p=None, hierarchical=True): + def __init__(self, ci=False, fix_k=None, fix_p=None, hierarchical=True, + flavor='logistic'): self._ci = ci self._fix_k = fix_k self._fix_p = fix_p self._hierarchical = hierarchical + self._flavor = flavor def fit(self, X, B, T, W=None): '''Fits the model. @@ -181,7 +188,8 @@ def fit(self, X, B, T, W=None): x0 = numpy.zeros(6+2*n_features) x0[0] = +1 if self._fix_k is None else log(self._fix_k) x0[1] = -1 if self._fix_p is None else log(self._fix_p) - args = (X, B, T, W, self._fix_k, self._fix_p, self._hierarchical) + args = (X, B, T, W, self._fix_k, self._fix_p, + self._hierarchical, self._flavor) # Callback for progress to stdout sys.stdout.write('\n') @@ -193,8 +201,8 @@ def callback(LL, value_history=[]): sys.stdout.flush() # Define objective and use automatic differentiation - f = lambda x: -generalized_gamma_LL(x, *args, callback=callback) - jac = autograd.grad(lambda x: -generalized_gamma_LL(x, *args)) + f = lambda x: -generalized_gamma_loss(x, *args, callback=callback) + jac = autograd.grad(lambda x: -generalized_gamma_loss(x, *args)) # Find the maximum a posteriori of the distribution res = scipy.optimize.minimize(f, x0, jac=jac, method='SLSQP', @@ -225,7 +233,7 @@ def callback(LL, value_history=[]): sampler = emcee.EnsembleSampler( nwalkers=n_walkers, dim=dim, - lnpostfn=generalized_gamma_LL, + lnpostfn=generalized_gamma_loss, args=args, ) mcmc_initial_noise = 1e-3 diff --git a/test_convoys.py b/test_convoys.py index b140033..bc39824 100644 --- a/test_convoys.py +++ b/test_convoys.py @@ -94,6 +94,12 @@ def test_exponential_regression_model(c=0.3, lambd=0.1, n=10000): assert model.cdf([1], 0).shape == () assert model.cdf([1], [0, 1, 2, 3]).shape == (4,) + # Fit a linear model + model = convoys.regression.Exponential(ci=False, flavor='linear') + model.fit(X, B, T) + model_c = model.params['map']['b'] + model.params['map']['beta'][0] + assert 0.9*c < model_c < 1.1*c + @flaky.flaky def test_weibull_regression_model(cs=[0.3, 0.5, 0.7],