Skip to content

Commit

Permalink
black fromatting
Browse files Browse the repository at this point in the history
  • Loading branch information
Andrew McCluskey committed Apr 23, 2020
1 parent 10dc169 commit c67d591
Show file tree
Hide file tree
Showing 7 changed files with 44 additions and 154 deletions.
18 changes: 4 additions & 14 deletions uravu/distribution.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,11 +34,7 @@ class Distribution:
"""

def __init__(
self,
samples,
name="Distribution",
ci_points=None,
unit=UREG.dimensionless,
self, samples, name="Distribution", ci_points=None, unit=UREG.dimensionless,
):
"""
Initialisation function for a :py:class:`~uravu.distribution.Distribution` object.
Expand Down Expand Up @@ -132,18 +128,12 @@ def __str__(self):
Returns:
:py:attr:`str`: Description of the distribution.
"""
representation = "Distribution: {}\nSize: {}\n".format(
self.name, self.size
)
representation = "Distribution: {}\nSize: {}\n".format(self.name, self.size)
representation += "Samples: "
representation += "["
representation += " ".join(
["{:.2e}".format(i) for i in self.samples[:2]]
)
representation += " ".join(["{:.2e}".format(i) for i in self.samples[:2]])
representation += " ... "
representation += " ".join(
["{:.2e}".format(i) for i in self.samples[-2:]]
)
representation += " ".join(["{:.2e}".format(i) for i in self.samples[-2:]])
representation += "]\n"
representation += "Median: {:.2e}\n".format(self.n)
if self.normal:
Expand Down
30 changes: 7 additions & 23 deletions uravu/plotting.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,9 +24,7 @@
)


def plot_relationship(
relationship, axes=None, figsize=(10, 6)
): # pragma: no cover
def plot_relationship(relationship, axes=None, figsize=(10, 6)): # pragma: no cover
"""
Plot the relationship. Additional plots will be included on this if the MCMC sampling has been used to find distributions.
Expand All @@ -43,9 +41,7 @@ def plot_relationship(
variables = relationship.variables
if relationship.unaccounted_uncertainty:
variables = relationship.variables[:-1]
axes.plot(
relationship.x_n, relationship.y_n, c=list(_fig_params.TABLEAU)[0]
)
axes.plot(relationship.x_n, relationship.y_n, c=list(_fig_params.TABLEAU)[0])
x_label = "{}".format(relationship.abscissa_name)
if relationship.x_u != UREG.dimensionless:
x_label += "/${:~L}$".format(relationship.x_u)
Expand All @@ -54,9 +50,7 @@ def plot_relationship(
if relationship.y_u != UREG.dimensionless:
y_label += "/${:~L}$".format(relationship.y_u)
axes.set_ylabel(y_label)
if isinstance(
relationship.ordinate.m.any(), uncertainties.core.AffineScalarFunc
):
if isinstance(relationship.ordinate.m.any(), uncertainties.core.AffineScalarFunc):
axes.fill_between(
relationship.x_n,
relationship.y_n - relationship.y_s,
Expand All @@ -72,9 +66,7 @@ def plot_relationship(
color=list(_fig_params.TABLEAU)[1],
)
else:
plot_samples = np.random.randint(
0, variables[0].samples.size, size=100
)
plot_samples = np.random.randint(0, variables[0].samples.size, size=100)
for i in plot_samples:
float_variables = [var.samples[i] for var in variables]
axes.plot(
Expand Down Expand Up @@ -161,9 +153,7 @@ def plot_corner(relationship, figsize=(8, 8)): # pragma: no cover
- :py:class:`matplotlib.axes.Axes`: The axes with new plots.
"""
n = len(relationship.variables)
if not all(
[isinstance(relationship.variables[i], Distribution) for i in range(n)]
):
if not all([isinstance(relationship.variables[i], Distribution) for i in range(n)]):
raise ValueError(
"In order to use the corner plot functionality, all relationship "
"variables must be Distributions. Please run MCMC before "
Expand All @@ -177,8 +167,7 @@ def plot_corner(relationship, figsize=(8, 8)): # pragma: no cover
else:
var_labels.append(
"{}/${:L}$".format(
relationship.variable_names[i],
relationship.variable_units[i],
relationship.variable_names[i], relationship.variable_units[i],
)
)
corner(
Expand All @@ -199,12 +188,7 @@ def plot_corner(relationship, figsize=(8, 8)): # pragma: no cover
]
)
ax[n - 1, j].set_xlim(
[
i
for i in np.percentile(
relationship.variables[j].samples, [0.5, 99.5]
)
]
[i for i in np.percentile(relationship.variables[j].samples, [0.5, 99.5])]
)
for j in range(n - 1):
ax[j + 1, 0].set_yticks(
Expand Down
38 changes: 10 additions & 28 deletions uravu/relationship.py
Original file line number Diff line number Diff line change
Expand Up @@ -143,29 +143,19 @@ def __str__(self):
for i in self.y_n:
string += "{:.2e} ".format(i)
string += "] \n"
if isinstance(
self.ordinate.m.any(), uncertainties.core.AffineScalarFunc
):
if isinstance(self.ordinate.m.any(), uncertainties.core.AffineScalarFunc):
string += "Ordinate uncertainty: [ "
for i in self.y_s:
string += "{:.2e} ".format(i)
string += "] \n"
else:
string += (
"Abscissa: "
"[ {:.2e} {:.2e} ... {:.2e} {:.2e} ] \n".format(
*self.x_n[:2], *self.x_n[-2:]
)
string += "Abscissa: " "[ {:.2e} {:.2e} ... {:.2e} {:.2e} ] \n".format(
*self.x_n[:2], *self.x_n[-2:]
)
string += (
"Ordinate: "
"[ {:.2e} {:.2e} ... {:.2e} {:.2e} ] \n".format(
*self.y_n[:2], *self.y_n[-2:]
)
string += "Ordinate: " "[ {:.2e} {:.2e} ... {:.2e} {:.2e} ] \n".format(
*self.y_n[:2], *self.y_n[-2:]
)
if isinstance(
self.ordinate.m.any(), uncertainties.core.AffineScalarFunc
):
if isinstance(self.ordinate.m.any(), uncertainties.core.AffineScalarFunc):
string += (
"Ordinate uncertainty: "
"[ {:.2e} {:.2e} ... {:.2e} {:.2e} ]\n".format(
Expand All @@ -180,9 +170,7 @@ def __str__(self):
for var in self.variables:
if isinstance(var, Distribution):
if var.normal:
string += "{:.2e}+/-{:.2e} ".format(
var.n, var.n - var.con_int[0]
)
string += "{:.2e}+/-{:.2e} ".format(var.n, var.n - var.con_int[0])
else:
string += "{:.2e}+{:.2e}-{:.2e} ".format(
var.n, var.con_int[1] - var.n, var.n - var.con_int[0]
Expand All @@ -192,13 +180,9 @@ def __str__(self):
string += "] \n"
if self.nested_sampling_done:
string += "ln(evidence): {:.2e} \n".format(self.ln_evidence)
string += "Unaccounted uncertainty: {} \n".format(
self.unaccounted_uncertainty
)
string += "Unaccounted uncertainty: {} \n".format(self.unaccounted_uncertainty)
string += "MCMC performed: {} \n".format(self.mcmc_done)
string += "Nested sampling performed: {} \n".format(
self.nested_sampling_done
)
string += "Nested sampling performed: {} \n".format(self.nested_sampling_done)
return string

def __repr__(self):
Expand Down Expand Up @@ -298,9 +282,7 @@ def y_s(self):
Returns:
:py:attr:`array_like`: Ordinate uncertainties.
"""
if isinstance(
self.ordinate.m.any(), uncertainties.core.AffineScalarFunc
):
if isinstance(self.ordinate.m.any(), uncertainties.core.AffineScalarFunc):
return unp.std_devs(self.ordinate.m)
else:
return None
Expand Down
4 changes: 1 addition & 3 deletions uravu/sampling.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,9 +107,7 @@ def ln_probability(
)


def nested_sampling(
relationship, prior_function=None, progress=True, **kwargs
):
def nested_sampling(relationship, prior_function=None, progress=True, **kwargs):
"""
Perform the nested sampling in order to determine the Bayesian natural log evidence. See the :py:func:`dynesty.NestedSampler.run_nested()` documentation.
Expand Down
10 changes: 2 additions & 8 deletions uravu/tests/test_optimize.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,9 +39,7 @@ def test_ln_likelihood_b(self):
"""
test_x = np.linspace(0, 99, 10)
test_y = np.ones(10)
test_rel = relationship.Relationship(
utils.straight_line, test_x, test_y
)
test_rel = relationship.Relationship(utils.straight_line, test_x, test_y)
expected_lnl = -45.21054955719477
actual_lnl = optimize.ln_likelihood(
test_rel.variables,
Expand Down Expand Up @@ -76,11 +74,7 @@ def test_negative_lnl_b(self):
test_y = np.ones(10)
test_y_e = np.ones(10) * 0.1
test_rel = relationship.Relationship(
utils.straight_line,
test_x,
test_y,
test_y_e,
unaccounted_uncertainty=True,
utils.straight_line, test_x, test_y, test_y_e, unaccounted_uncertainty=True,
)
expected_negtive_lnl = 45.21123241122563
actual_negative_lnl = optimize.negative_lnl(
Expand Down
Loading

0 comments on commit c67d591

Please sign in to comment.