Skip to content

Commit

Permalink
REF: norms _set_tuning_param default to inplace=False
Browse files Browse the repository at this point in the history
  • Loading branch information
josef-pkt committed May 2, 2024
1 parent bb0a83c commit 530be61
Show file tree
Hide file tree
Showing 3 changed files with 59 additions and 21 deletions.
68 changes: 53 additions & 15 deletions statsmodels/robust/norms.py
Original file line number Diff line number Diff line change
Expand Up @@ -202,12 +202,16 @@ class HuberT(RobustNorm):
def __init__(self, t=1.345):
self.t = t

def _set_tuning_param(self, c):
def _set_tuning_param(self, c, inplace=False):
"""Set and change the tuning parameter of the Norm.
Warning: this needs to wipe cached attributes that depend on the param.
"""
self.t = c
if inplace:
self.t = c
return self
else:
return self.__class__(t=c)

Check warning on line 214 in statsmodels/robust/norms.py

View check run for this annotation

Codecov / codecov/patch

statsmodels/robust/norms.py#L214

Added line #L214 was not covered by tests

def max_rho(self):
return np.inf
Expand Down Expand Up @@ -325,6 +329,18 @@ class RamsayE(RobustNorm):
def __init__(self, a=.3):
self.a = a

def _set_tuning_param(self, c, inplace=False):
"""Set and change the tuning parameter of the Norm.
Warning: this needs to wipe cached attributes that depend on the param.
"""
# todo : change default to inplace=False, when tools are fixed
if inplace:
self.a = c
return self

Check warning on line 340 in statsmodels/robust/norms.py

View check run for this annotation

Codecov / codecov/patch

statsmodels/robust/norms.py#L339-L340

Added lines #L339 - L340 were not covered by tests
else:
return self.__class__(a=c)

Check warning on line 342 in statsmodels/robust/norms.py

View check run for this annotation

Codecov / codecov/patch

statsmodels/robust/norms.py#L342

Added line #L342 was not covered by tests

def max_rho(self):
return np.inf

Expand Down Expand Up @@ -422,12 +438,16 @@ class AndrewWave(RobustNorm):
def __init__(self, a=1.339):
self.a = a

def _set_tuning_param(self, a):
def _set_tuning_param(self, c, inplace=False):
"""Set and change the tuning parameter of the Norm.
Warning: this needs to wipe cached attributes that depend on the param.
"""
self.a = a
if inplace:
self.a = c
return self
else:
return self.__class__(a=c)

Check warning on line 450 in statsmodels/robust/norms.py

View check run for this annotation

Codecov / codecov/patch

statsmodels/robust/norms.py#L450

Added line #L450 was not covered by tests

def max_rho(self):
return 2 * self.a**2
Expand Down Expand Up @@ -556,12 +576,16 @@ class TrimmedMean(RobustNorm):
def __init__(self, c=2.):
self.c = c

def _set_tuning_param(self, c):
def _set_tuning_param(self, c, inplace=False):
"""Set and change the tuning parameter of the Norm.
Warning: this needs to wipe cached attributes that depend on the param.
"""
self.c = c
if inplace:
self.c = c
return self
else:
return self.__class__(c=c)

Check warning on line 588 in statsmodels/robust/norms.py

View check run for this annotation

Codecov / codecov/patch

statsmodels/robust/norms.py#L588

Added line #L588 was not covered by tests

def max_rho(self):
return self.rho(self.c)
Expand Down Expand Up @@ -677,14 +701,20 @@ def __init__(self, a=2., b=4., c=8.):
self.b = b
self.c = c

def _set_tuning_param(self, c):
def _set_tuning_param(self, c, inplace=False):
"""Set and change the tuning parameter of the Norm.
Warning: this needs to wipe cached attributes that depend on the param.
"""
self.c = c
self.a = c / 4
self.b = c / 2
a = c / 4
b = c / 2
if inplace:
self.c = c
self.a = a
self.b = b
return self
else:
return self.__class__(a=a, b=b, c=c)

Check warning on line 717 in statsmodels/robust/norms.py

View check run for this annotation

Codecov / codecov/patch

statsmodels/robust/norms.py#L717

Added line #L717 was not covered by tests

def max_rho(self):
return self.rho(self.c)
Expand Down Expand Up @@ -897,7 +927,7 @@ def get_tuning(cls, bp=None, eff=None):
elif eff is not None:
return rtools.tukeybiweight_eff[eff]

Check warning on line 928 in statsmodels/robust/norms.py

View check run for this annotation

Codecov / codecov/patch

statsmodels/robust/norms.py#L928

Added line #L928 was not covered by tests

def _set_tuning_param(self, c, inplace=True):
def _set_tuning_param(self, c, inplace=False):
"""Set and change the tuning parameter of the Norm.
Warning: this needs to wipe cached attributes that depend on the param.
Expand Down Expand Up @@ -1023,12 +1053,16 @@ def __init__(self, c=3.61752, k=4):
self.c = c
self.k = k

def _set_tuning_param(self, c):
def _set_tuning_param(self, c, inplace=False):
"""Set and change the tuning parameter of the Norm.
Warning: this needs to wipe cached attributes that depend on the param.
"""
self.c = c
if inplace:
self.c = c
return self
else:
return self.__class__(c=c, k=self.k)

Check warning on line 1065 in statsmodels/robust/norms.py

View check run for this annotation

Codecov / codecov/patch

statsmodels/robust/norms.py#L1065

Added line #L1065 was not covered by tests

def max_rho(self):
return self.rho(self.c)
Expand Down Expand Up @@ -1161,12 +1195,16 @@ def __init__(self, c=2.3849, df=4):
self.c = c
self.df = df

def _set_tuning_param(self, c):
def _set_tuning_param(self, c, inplace=False):
"""Set and change the tuning parameter of the Norm.
Warning: this needs to wipe cached attributes that depend on the param.
"""
self.c = c
if inplace:
self.c = c
return self
else:
return self.__class__(c=c, df=self.df)

Check warning on line 1207 in statsmodels/robust/norms.py

View check run for this annotation

Codecov / codecov/patch

statsmodels/robust/norms.py#L1207

Added line #L1207 was not covered by tests

def max_rho(self):
return np.inf
Expand Down
4 changes: 2 additions & 2 deletions statsmodels/robust/tests/test_tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,14 +57,14 @@ def test_eff(case):

res_eff = []
for c in res2:
norm._set_tuning_param(c)
norm._set_tuning_param(c, inplace=True)
res_eff.append(1 / var_func(norm))

assert_allclose(res_eff, effs, atol=0.0005)

for c in res2:
# bp = stats.norm.expect(lambda x : norm.rho(x)) / norm.rho(norm.c)
norm._set_tuning_param(c)
norm._set_tuning_param(c, inplace=True)
eff = 1 / _var_normal(norm)
tune = _get_tuning_param(norm, eff)
assert_allclose(tune, c, rtol=1e-6, atol=5e-4)
Expand Down
8 changes: 4 additions & 4 deletions statsmodels/robust/tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -150,11 +150,11 @@ def _get_tuning_param(norm, eff, kwd="c", kwargs=None, use_jump=False,
def func(c):
# kwds.update({kwd: c})
# return _var_normal(norm(**kwds)) - 1 / eff
norm._set_tuning_param(c)
norm._set_tuning_param(c, inplace=True)
return _var_normal(norm) - 1 / eff
else:
def func(c):
norm._set_tuning_param(c)
norm._set_tuning_param(c, inplace=True)

Check warning on line 157 in statsmodels/robust/tools.py

View check run for this annotation

Codecov / codecov/patch

statsmodels/robust/tools.py#L157

Added line #L157 was not covered by tests
return _var_normal_jump(norm(**kwds) - 1 / eff)

res = optimize.brentq(func, *bracket)
Expand Down Expand Up @@ -215,14 +215,14 @@ def tuning_s_estimator_mean(norm, breakdown=None):

def func(c):
norm_ = norm
norm_._set_tuning_param(c)
norm_._set_tuning_param(c, inplace=True)
bp = stats.norm.expect(lambda x : norm_.rho(x)) / norm_.max_rho()
return bp

res = []
for bp in bps:
c_bp = optimize.brentq(lambda c0: func(c0) - bp, 0.1, 10)
norm._set_tuning_param(c_bp) # inplace modification
norm._set_tuning_param(c_bp, inplace=True) # inplace modification
eff = 1 / _var_normal(norm)
b = stats.norm.expect(lambda x : norm.rho(x))
res.append([bp, eff, c_bp, b])
Expand Down

0 comments on commit 530be61

Please sign in to comment.