Skip to content

Commit

Permalink
HeteroskedasticTFPConditional should construct tensors at class-const…
Browse files Browse the repository at this point in the history
…ruction, not at module-import time (#1598)
  • Loading branch information
polaschwoebel committed Oct 15, 2020
1 parent 8562503 commit 282d1cd
Showing 1 changed file with 6 additions and 3 deletions.
9 changes: 6 additions & 3 deletions gpflow/likelihoods/multilatent.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,21 +97,24 @@ class HeteroskedasticTFPConditional(MultiLatentTFPConditional):
def __init__(
self,
distribution_class: Type[tfp.distributions.Distribution] = tfp.distributions.Normal,
scale_transform: tfp.bijectors.Bijector = positive(base="exp"),
scale_transform: Optional[tfp.bijectors.Bijector] = None,
**kwargs,
):
"""
:param distribution_class: distribution class parameterized by `loc` and `scale`
as first and second argument, respectively.
:param scale_transform: callable/bijector applied to the latent
function modelling the scale to ensure its positivity.
Typically, `tf.exp` or `tf.softplus`, but can be any function f: R -> R^+.
Typically, `tf.exp` or `tf.softplus`, but can be any function f: R -> R^+. Defaults to exp if not explicitly specified.
"""
if scale_transform is None:
scale_transform = positive(base="exp")
self.scale_transform = scale_transform

def conditional_distribution(Fs) -> tfp.distributions.Distribution:
tf.debugging.assert_equal(tf.shape(Fs)[-1], 2)
loc = Fs[..., :1]
scale = scale_transform(Fs[..., 1:])
scale = self.scale_transform(Fs[..., 1:])
return distribution_class(loc, scale)

super().__init__(
Expand Down

0 comments on commit 282d1cd

Please sign in to comment.