Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix for cornercases where ppf and isf methods fails #463

Closed
wants to merge 5 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
60 changes: 35 additions & 25 deletions scipy/stats/distributions.py
Expand Up @@ -492,7 +492,8 @@ def interval(self, alpha):
def valarray(shape,value=nan,typecode=None):
"""Return an array of all value.
"""
out = reshape(repeat([value],product(shape,axis=0),axis=0),shape)

out = ones(shape, dtype=bool) * value
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

A lot more understandable than the old code. This is not really performance-critical, so no need for using empty and .fill, which would be faster probably.

if typecode is not None:
out = out.astype(typecode)
if not isinstance(out, ndarray):
Expand Down Expand Up @@ -1454,26 +1455,31 @@ def ppf(self,q,*args,**kwds):
quantile corresponding to the lower tail probability q.

"""
loc,scale=map(kwds.get,['loc','scale'])
loc, scale = map(kwds.get,['loc', 'scale'])
args, loc, scale = self._fix_loc_scale(args, loc, scale)
q,loc,scale = map(asarray,(q,loc,scale))
args = tuple(map(asarray,args))
q, loc, scale = map(asarray,(q, loc, scale))
args = tuple(map(asarray, args))
cond0 = self._argcheck(*args) & (scale > 0) & (loc==loc)
cond1 = (q > 0) & (q < 1)
cond2 = (q==1) & cond0
cond = cond0 & cond1
output = valarray(shape(cond),value=self.a*scale + loc)
place(output,(1-cond0)+(1-cond1)*(q!=0.0), self.badvalue)
place(output,cond2,self.b*scale + loc)
cond1 = (0 < q) & (q < 1)
cond2 = cond0 & (q==0)
cond3 = cond0 & (q==1)
cond = cond0 & cond1
output = valarray(shape(cond), value=self.badvalue)

lower_bound = self.a * scale + loc
upper_bound = self.b * scale + loc
place(output, cond2, argsreduce(cond2, lower_bound)[0])
place(output, cond3, argsreduce(cond3, upper_bound)[0])

if any(cond): #call only if at least 1 entry
goodargs = argsreduce(cond, *((q,)+args+(scale,loc)))
scale, loc, goodargs = goodargs[-2], goodargs[-1], goodargs[:-2]
place(output,cond,self._ppf(*goodargs)*scale + loc)
place(output, cond, self._ppf(*goodargs) * scale + loc)
if output.ndim == 0:
return output[()]
return output

def isf(self,q,*args,**kwds):
def isf(self, q, *args, **kwds):
"""
Inverse survival function at q of the given RV.

Expand All @@ -1495,22 +1501,26 @@ def isf(self,q,*args,**kwds):
Quantile corresponding to the upper tail probability q.

"""
loc,scale=map(kwds.get,['loc','scale'])
loc, scale = map(kwds.get,['loc', 'scale'])
args, loc, scale = self._fix_loc_scale(args, loc, scale)
q,loc,scale = map(asarray,(q,loc,scale))
args = tuple(map(asarray,args))
q, loc, scale = map(asarray,(q, loc, scale))
args = tuple(map(asarray, args))
cond0 = self._argcheck(*args) & (scale > 0) & (loc==loc)
cond1 = (q > 0) & (q < 1)
cond2 = (q==1) & cond0
cond1 = (0 < q) & (q < 1)
cond2 = cond0 & (q==1)
cond3 = cond0 & (q==0)
cond = cond0 & cond1
output = valarray(shape(cond),value=self.b)
#place(output,(1-cond0)*(cond1==cond1), self.badvalue)
place(output,(1-cond0)*(cond1==cond1)+(1-cond1)*(q!=0.0), self.badvalue)
place(output,cond2,self.a)
if any(cond): #call only if at least 1 entry
goodargs = argsreduce(cond, *((q,)+args+(scale,loc))) #PB replace 1-q by q
output = valarray(shape(cond), value=self.badvalue)

lower_bound = self.a * scale + loc
upper_bound = self.b * scale + loc
place(output, cond2, argsreduce(cond2, lower_bound)[0])
place(output, cond3, argsreduce(cond3, upper_bound)[0])

if any(cond):
goodargs = argsreduce(cond, *((q,)+args+(scale,loc)))
scale, loc, goodargs = goodargs[-2], goodargs[-1], goodargs[:-2]
place(output,cond,self._isf(*goodargs)*scale + loc) #PB use _isf instead of _ppf
place(output, cond, self._isf(*goodargs) * scale + loc)
if output.ndim == 0:
return output[()]
return output
Expand Down Expand Up @@ -2436,7 +2446,7 @@ def _stats(self):
return inf, inf, nan, nan
def _entropy(self):
return log(4*pi)
def _fitstart(data, args=None):
def _fitstart(self, data, args=None):
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Hmm, that was quite a blatant bug.

return (0, 1)
cauchy = cauchy_gen(name='cauchy')

Expand Down
16 changes: 16 additions & 0 deletions scipy/stats/tests/test_distributions.py
Expand Up @@ -197,6 +197,22 @@ def test_cdf_sf(self):
assert_array_almost_equal(vals,expected)
assert_array_almost_equal(vals_sf,1-expected)

class TestTruncnorm(TestCase):
def test_ppf_ticket1131(self):
vals = stats.truncnorm.ppf([-0.5,0,1e-4,0.5, 1-1e-4,1,2],-1., 1.,
loc=[3]*7,scale=2)
NaN = np.NaN
expected = np.array([ NaN, 1. , 1.00056419, 3.
, 4.99943581, 5. , NaN])
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

these 3 lines could fit on a single line, will change that

assert_array_almost_equal(vals, expected)

def test_isf_ticket1131(self):
NaN = np.NaN
vals = stats.truncnorm.isf([-0.5,0,1e-4,0.5, 1-1e-4,1,2],-1., 1.,
loc=[3]*7,scale=2)
expected = np.array([ NaN, 5. , 4.99943581, 3.,
1.00056419, 1. , NaN])
assert_array_almost_equal(vals, expected)

class TestHypergeom(TestCase):
def test_rvs(self):
Expand Down