Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Stats: Added a standard function mean to compute arithmetic average #16314

Closed
wants to merge 1 commit into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
6 changes: 5 additions & 1 deletion sympy/stats/__init__.py
Expand Up @@ -17,12 +17,13 @@
``density(expression)`` Probability Density Function
``sample(expression)`` Produce a realization
``where(condition)`` Where the condition is true
``mean(expression)`` Arithmetic average value
========================= =============================

Examples
========

>>> from sympy.stats import P, E, variance, Die, Normal
>>> from sympy.stats import P, E, variance, mean, Die, Normal
>>> from sympy import Eq, simplify
>>> X, Y = Die('X', 6), Die('Y', 6) # Define two six sided dice
>>> Z = Normal('Z', 0, 1) # Declare a Normal random variable with mean 0, std 1
Expand All @@ -34,6 +35,8 @@
35/6
>>> simplify(P(Z>1)) # Probability of Z being greater than 1
1/2 - erf(sqrt(2)/2)/2
>>> mean(X) # Average value of outcome of dice
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I would merge this with the E(X+y) as

E(X + Y)  # or mean(X + Y), the expected average of two die

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yes, this can be done. But a specific example should also be present, I think

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Tell me if it is better this way

7/2
"""

__all__ = []
Expand All @@ -43,6 +46,7 @@
cdf, characteristic_function, covariance, density, dependent, E, given, independent, P, pspace,
random_symbols, sample, sample_iter, skewness, std, variance, where,
correlation, moment, cmoment, smoment, sampling_density, moment_generating_function,
mean
)
__all__.extend(rv_interface.__all__)

Expand Down
3 changes: 2 additions & 1 deletion sympy/stats/rv_interface.py
Expand Up @@ -8,7 +8,7 @@
__all__ = ['P', 'E', 'density', 'where', 'given', 'sample', 'cdf', 'characteristic_function', 'pspace',
'sample_iter', 'variance', 'std', 'skewness', 'covariance',
'dependent', 'independent', 'random_symbols', 'correlation',
'moment', 'cmoment', 'sampling_density', 'moment_generating_function']
'moment', 'cmoment', 'sampling_density', 'moment_generating_function', 'mean']



Expand Down Expand Up @@ -211,3 +211,4 @@ def skewness(X, condition=None, **kwargs):

P = probability
E = expectation
mean = expectation
3 changes: 2 additions & 1 deletion sympy/stats/tests/test_continuous_rv.py
Expand Up @@ -15,7 +15,7 @@
QuadraticU, RaisedCosine, Rayleigh, ShiftedGompertz,
StudentT, Trapezoidal, Triangular, Uniform, UniformSum,
VonMises, Weibull, WignerSemicircle, correlation,
moment, cmoment, smoment)
moment, cmoment, smoment, mean)
from sympy.stats.crv_types import NormalDistribution
from sympy.stats.joint_rv import JointPSpace
from sympy.utilities.pytest import raises, XFAIL, slow, skip
Expand Down Expand Up @@ -343,6 +343,7 @@ def test_exponential():
X = Exponential('x', rate)

assert E(X) == 1/rate
assert mean(X) == 1/rate
assert variance(X) == 1/rate**2
assert skewness(X) == 2
assert skewness(X) == smoment(X, 3)
Expand Down
4 changes: 3 additions & 1 deletion sympy/stats/tests/test_finite_rv.py
Expand Up @@ -4,7 +4,7 @@
from sympy.core.compatibility import range
from sympy.matrices import Matrix
from sympy.stats import (DiscreteUniform, Die, Bernoulli, Coin, Binomial,
Hypergeometric, Rademacher, P, E, variance, covariance, skewness, sample,
Hypergeometric, Rademacher, P, E, mean, variance, covariance, skewness, sample,
density, where, FiniteRV, pspace, cdf,
correlation, moment, cmoment, smoment, characteristic_function, moment_generating_function)
from sympy.stats.frv_types import DieDistribution
Expand Down Expand Up @@ -51,6 +51,7 @@ def test_dice():
a, b, t = symbols('a b t')

assert E(X) == 3 + S.Half
assert mean(X) == S(7)/2
assert variance(X) == S(35)/12
assert E(X + Y) == 7
assert E(X + X) == 7
Expand Down Expand Up @@ -156,6 +157,7 @@ def test_bernoulli():
assert density(X)[b] == 1 - p
assert characteristic_function(X)(t) == p * exp(I * a * t) + (-p + 1) * exp(I * b * t)
assert moment_generating_function(X)(t) == p * exp(a * t) + (-p + 1) * exp(b * t)
assert mean(X) == a*p + b*(-p + 1)

X = Bernoulli('B', p, 1, 0)

Expand Down