Skip to content

Commit

Permalink
log_prior -> log_prior_density (#1329)
Browse files Browse the repository at this point in the history
  • Loading branch information
st-- committed Mar 17, 2020
1 parent 516f97c commit e680397
Show file tree
Hide file tree
Showing 4 changed files with 17 additions and 15 deletions.
4 changes: 2 additions & 2 deletions gpflow/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,8 +84,8 @@ def __init__(
unconstrained_value, dtype=dtype, name=name, trainable=trainable
)

def log_prior(self):
""" Prior probability density of the constrained variable. """
def log_prior_density(self):
""" Log of the prior probability density of the constrained variable. """

if self.prior is None:
return tf.convert_to_tensor(0.0, dtype=self.dtype)
Expand Down
16 changes: 9 additions & 7 deletions gpflow/models/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
from ..kernels import Kernel
from ..likelihoods import Likelihood
from ..mean_functions import MeanFunction, Zero
from ..utilities import ops
from ..utilities import ops, to_default_float

Data = TypeVar("Data", Tuple[tf.Tensor, tf.Tensor], tf.Tensor)
DataPoint = tf.Tensor
Expand All @@ -45,14 +45,16 @@ def neg_log_marginal_likelihood(self, *args, **kwargs) -> tf.Tensor:
return -self.log_marginal_likelihood(*args, **kwargs)

def log_marginal_likelihood(self, *args, **kwargs) -> tf.Tensor:
return self.log_likelihood(*args, **kwargs) + self.log_prior()
return self.log_likelihood(*args, **kwargs) + self.log_prior_density()

def log_prior(self) -> tf.Tensor:
log_priors = [p.log_prior() for p in self.trainable_parameters]
if log_priors:
return tf.add_n(log_priors)
def log_prior_density(self) -> tf.Tensor:
"""
Sum of the log prior probability densities of all (constrained) variables in this model.
"""
if self.trainable_parameters:
return tf.add_n([p.log_prior_density() for p in self.trainable_parameters])
else:
return tf.convert_to_tensor(0.0, dtype=default_float())
return to_default_float(0.0)

@abc.abstractmethod
def log_likelihood(self, *args, **kwargs) -> tf.Tensor:
Expand Down
2 changes: 1 addition & 1 deletion tests/gpflow/models/test_gpr.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,4 +44,4 @@ def test_non_trainable_model_objective():
set_trainable(model, False)

_ = model.log_marginal_likelihood()
assert model.log_prior() == 0.0
assert model.log_prior_density() == 0.0
10 changes: 5 additions & 5 deletions tests/gpflow/test_base_prior.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ def test_log_prior_with_no_prior():
even if it has a transform to constrain it.
"""
param = gpflow.Parameter(5.3, transform=gpflow.utilities.positive())
assert param.log_prior().numpy() == 0.0
assert param.log_prior_density().numpy() == 0.0


def test_log_prior_for_uniform_prior():
Expand All @@ -59,9 +59,9 @@ def test_log_prior_for_uniform_prior():

uniform_prior = Uniform(low=np.float64(0), high=np.float64(100))
param = gpflow.Parameter(1.0, transform=gpflow.utilities.positive(), prior=uniform_prior)
low_value = param.log_prior().numpy()
low_value = param.log_prior_density().numpy()
param.assign(10.0)
high_value = param.log_prior().numpy()
high_value = param.log_prior_density().numpy()

assert np.isclose(low_value, high_value)

Expand All @@ -78,9 +78,9 @@ def test_log_prior_on_unconstrained():
param = gpflow.Parameter(
initial_value, transform=Exp(), prior=uniform_prior, prior_on=PriorOn.UNCONSTRAINED,
)
low_value = param.log_prior().numpy()
low_value = param.log_prior_density().numpy()
param.assign(scale_factor * initial_value)
high_value = param.log_prior().numpy()
high_value = param.log_prior_density().numpy()

assert np.isclose(low_value, high_value + np.log(scale_factor))

Expand Down

0 comments on commit e680397

Please sign in to comment.