Skip to content

Commit

Permalink
add linear model
Browse files Browse the repository at this point in the history
  • Loading branch information
erikbern committed Jul 23, 2019
1 parent ff45812 commit 386baf1
Show file tree
Hide file tree
Showing 2 changed files with 24 additions and 10 deletions.
28 changes: 18 additions & 10 deletions convoys/regression.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,8 @@
'GeneralizedGamma']


def generalized_gamma_LL(x, X, B, T, W, fix_k, fix_p,
hierarchical, callback=None):
def generalized_gamma_loss(x, X, B, T, W, fix_k, fix_p,
hierarchical, flavor, callback=None):
k = exp(x[0]) if fix_k is None else fix_k
p = exp(x[1]) if fix_p is None else fix_p
log_sigma_alpha = x[2]
Expand All @@ -29,15 +29,20 @@ def generalized_gamma_LL(x, X, B, T, W, fix_k, fix_p,
alpha = x[6:6+n_features]
beta = x[6+n_features:6+2*n_features]
lambd = exp(dot(X, alpha)+a)
c = expit(dot(X, beta)+b)

# PDF: p*lambda^(k*p) / gamma(k) * t^(k*p-1) * exp(-(x*lambda)^p)
log_pdf = log(p) + (k*p) * log(lambd) - gammaln(k) \
+ (k*p-1) * log(T) - (T*lambd)**p
cdf = gammainc(k, (T*lambd)**p)

LL_observed = log(c) + log_pdf
LL_censored = log((1-c) + c * (1 - cdf))
if flavor == 'logistic': # Log-likelihood with sigmoid
c = expit(dot(X, beta)+b)
LL_observed = log(c) + log_pdf
LL_censored = log((1 - c) + c * (1 - cdf))
elif flavor == 'linear': # L2 loss, linear
c = dot(X, beta)+b
LL_observed = -(1 - c)**2 + log_pdf
LL_censored = -c**2 * cdf - (1 - c)**2 * (1 - cdf)

LL_data = sum(
W * B * LL_observed +
Expand Down Expand Up @@ -147,11 +152,13 @@ class GeneralizedGamma(RegressionModel):
to sample from the full posterior in order to generate uncertainty
estimates for all parameters.
'''
def __init__(self, ci=False, fix_k=None, fix_p=None, hierarchical=True):
def __init__(self, ci=False, fix_k=None, fix_p=None, hierarchical=True,
flavor='logistic'):
self._ci = ci
self._fix_k = fix_k
self._fix_p = fix_p
self._hierarchical = hierarchical
self._flavor = flavor

def fit(self, X, B, T, W=None):
'''Fits the model.
Expand Down Expand Up @@ -181,7 +188,8 @@ def fit(self, X, B, T, W=None):
x0 = numpy.zeros(6+2*n_features)
x0[0] = +1 if self._fix_k is None else log(self._fix_k)
x0[1] = -1 if self._fix_p is None else log(self._fix_p)
args = (X, B, T, W, self._fix_k, self._fix_p, self._hierarchical)
args = (X, B, T, W, self._fix_k, self._fix_p,
self._hierarchical, self._flavor)

# Callback for progress to stdout
sys.stdout.write('\n')
Expand All @@ -193,8 +201,8 @@ def callback(LL, value_history=[]):
sys.stdout.flush()

# Define objective and use automatic differentiation
f = lambda x: -generalized_gamma_LL(x, *args, callback=callback)
jac = autograd.grad(lambda x: -generalized_gamma_LL(x, *args))
f = lambda x: -generalized_gamma_loss(x, *args, callback=callback)
jac = autograd.grad(lambda x: -generalized_gamma_loss(x, *args))

# Find the maximum a posteriori of the distribution
res = scipy.optimize.minimize(f, x0, jac=jac, method='SLSQP',
Expand Down Expand Up @@ -225,7 +233,7 @@ def callback(LL, value_history=[]):
sampler = emcee.EnsembleSampler(
nwalkers=n_walkers,
dim=dim,
lnpostfn=generalized_gamma_LL,
lnpostfn=generalized_gamma_loss,
args=args,
)
mcmc_initial_noise = 1e-3
Expand Down
6 changes: 6 additions & 0 deletions test_convoys.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,6 +94,12 @@ def test_exponential_regression_model(c=0.3, lambd=0.1, n=10000):
assert model.cdf([1], 0).shape == ()
assert model.cdf([1], [0, 1, 2, 3]).shape == (4,)

# Fit a linear model
model = convoys.regression.Exponential(ci=False, flavor='linear')
model.fit(X, B, T)
model_c = model.params['map']['b'] + model.params['map']['beta'][0]
assert 0.9*c < model_c < 1.1*c


@flaky.flaky
def test_weibull_regression_model(cs=[0.3, 0.5, 0.7],
Expand Down

0 comments on commit 386baf1

Please sign in to comment.