Skip to content

Commit

Permalink
Merge a8f6bf8 into 6611e87
Browse files Browse the repository at this point in the history
  • Loading branch information
josef-pkt committed May 27, 2018
2 parents 6611e87 + a8f6bf8 commit cd2bb15
Show file tree
Hide file tree
Showing 5 changed files with 789 additions and 9 deletions.
2 changes: 2 additions & 0 deletions statsmodels/base/_penalized.py
Expand Up @@ -51,6 +51,8 @@ def __init__(self, *args, **kwds):
self.pen_weight = 0

self._init_keys.extend(['penal', 'pen_weight'])
self._null_drop_keys = getattr(self, '_null_drop_keys', [])
self._null_drop_keys.extend(['penal'])

def loglike(self, params, pen_weight=None, **kwds):
if pen_weight is None:
Expand Down
44 changes: 35 additions & 9 deletions statsmodels/base/_penalties.py
Expand Up @@ -91,6 +91,20 @@ def grad(self, params):
DeprecationWarning)
return self.deriv(params)

def _null_weights(self, params):
"""work around for Null model
This will not be needed anymore when we can use `self._null_drop_keys`
as in DiscreteModels.
TODO: check other models
"""
if np.size(self.weights) > 1:
if len(params) == 1:
raise # raise to identify models where this would be needed
return 0.

return self.weights


class NonePenalty(Penalty):
"""
Expand Down Expand Up @@ -339,22 +353,29 @@ def __init__(self, tau, c=3.7, c0=None, weights=None, restriction=None):

# get coefficients for quadratic approximation
c0 = self.c0
# need to temporarily override weights for call to super
weights = self.weights
self.weights = 1.
deriv_c0 = super(SCADSmoothed, self).grad(c0)
value_c0 = super(SCADSmoothed, self).func(c0)
self.weights = weights

self.aq1 = value_c0 - 0.5 * deriv_c0 * c0
self.aq2 = 0.5 * deriv_c0 / c0
self.restriction = restriction

def func(self, params):
# workaround for Null model
weights = self._null_weights(params)
# TODO: `and np.size(params) > 1` is hack for llnull, need better solution
if self.restriction is not None and np.size(params) > 1:
params = self.restriction.dot(params)
# need to temporarily override weights for call to super
# Note: we have the same problem with `restriction`
weights = self.weights
self_weights = self.weights
self.weights = 1.
value = super(SCADSmoothed, self).func(params[None, ...])
self.weights = weights
self.weights = self_weights

# shift down so func(0) == 0
value -= self.aq1
Expand All @@ -364,16 +385,18 @@ def func(self, params):
p_abs_masked = p_abs[mask]
value[mask] = self.aq2 * p_abs_masked**2

return (self.weights * value).sum(0)
return (weights * value).sum(0)

def grad(self, params):
# workaround for Null model
weights = self._null_weights(params)
if self.restriction is not None and np.size(params) > 1:
params = self.restriction.dot(params)
# need to temporarily override weights for call to super
weights = self.weights
self_weights = self.weights
self.weights = 1.
value = super(SCADSmoothed, self).grad(params)
self.weights = weights
self.weights = self_weights

#change the segment corrsponding to quadratic approximation
p = np.atleast_1d(params)
Expand All @@ -386,13 +409,15 @@ def grad(self, params):
return weights * value

def deriv2(self, params):
# workaround for Null model
weights = self._null_weights(params)
if self.restriction is not None and np.size(params) > 1:
params = self.restriction.dot(params)
# need to temporarily override weights for call to super
weights = self.weights
self_weights = self.weights
self.weights = 1.
value = super(SCADSmoothed, self).deriv2(params)
self.weights = weights
self.weights = self_weights

# change the segment corrsponding to quadratic approximation
p = np.atleast_1d(params)
Expand All @@ -402,9 +427,10 @@ def deriv2(self, params):
if self.restriction is not None and np.size(params) > 1:
# note: super returns 1d array for diag, i.e. hessian_diag
# TODO: weights are missing
return (self.restriction.T * value).dot(self.restriction)
return (self.restriction.T * (weights * value)
).dot(self.restriction)
else:
return value
return weights * value


class ConstraintsPenalty(object):
Expand Down

0 comments on commit cd2bb15

Please sign in to comment.