Skip to content

Commit

Permalink
Update an pin black version
Browse files Browse the repository at this point in the history
  • Loading branch information
brandonwillard committed Aug 31, 2020
1 parent 35839dc commit f2f8c6c
Show file tree
Hide file tree
Showing 4 changed files with 28 additions and 8 deletions.
2 changes: 1 addition & 1 deletion requirements.txt
Expand Up @@ -9,7 +9,7 @@ pytest>=5.0.0
pytest-cov>=2.6.1
pytest-html>=1.20.0
pylint>=2.3.1
black>=19.3b0; platform.python_implementation!='PyPy'
black==20.8b1; platform.python_implementation!='PyPy'
diff-cover
ipython
versioneer
Expand Down
21 changes: 18 additions & 3 deletions symbolic_pymc/theano/random_variables.py
Expand Up @@ -115,7 +115,12 @@ class MvNormalRVType(RandomVariable):

def __init__(self):
super().__init__(
"multivariate_normal", theano.config.floatX, 1, [1, 2], self._smpl_fn, inplace=True,
"multivariate_normal",
theano.config.floatX,
1,
[1, 2],
self._smpl_fn,
inplace=True,
)

@classmethod
Expand Down Expand Up @@ -376,7 +381,12 @@ class CategoricalRVType(RandomVariable):

def __init__(self):
super().__init__(
"categorical", "int64", 0, [1], sample_categorical, inplace=True,
"categorical",
"int64",
0,
[1],
sample_categorical,
inplace=True,
)

def make_node(self, pvals, size=None, rng=None, name=None):
Expand All @@ -397,7 +407,12 @@ class PolyaGammaRVType(RandomVariable):

def __init__(self):
super().__init__(
"polya-gamma", theano.config.floatX, 0, [0, 0], self._smpl_fn, inplace=True,
"polya-gamma",
theano.config.floatX,
0,
[0, 0],
self._smpl_fn,
inplace=True,
)

def make_node(self, b, c, size=None, rng=None, name=None):
Expand Down
7 changes: 4 additions & 3 deletions tests/theano/test_opt.py
Expand Up @@ -31,8 +31,7 @@

@theano.change_flags(compute_test_value="ignore", cxx="", mode="FAST_COMPILE")
def test_kanren_opt():
"""Make sure we can run miniKanren "optimizations" over a graph until a fixed-point/normal-form is reached.
"""
"""Make sure we can run miniKanren "optimizations" over a graph until a fixed-point/normal-form is reached."""
x_tt = tt.vector("x")
c_tt = tt.vector("c")
d_tt = tt.vector("c")
Expand Down Expand Up @@ -740,7 +739,9 @@ def input_step_fn(y_tm1, y_tm2, rng):

Y_rv, _ = theano.scan(
fn=input_step_fn,
outputs_info=[{"initial": tt.as_tensor_variable(np.r_[-1.0, 0.0]), "taps": [-1, -2]},],
outputs_info=[
{"initial": tt.as_tensor_variable(np.r_[-1.0, 0.0]), "taps": [-1, -2]},
],
non_sequences=[rng_tt],
n_steps=10,
)
Expand Down
6 changes: 5 additions & 1 deletion tests/theano/test_utils.py
Expand Up @@ -15,7 +15,11 @@ def scan_fn():
Y_t = NormalRV(0, 1, name="Y_t")
return Y_t

Y_rv, scan_updates = theano.scan(fn=scan_fn, outputs_info=[{}], n_steps=10,)
Y_rv, scan_updates = theano.scan(
fn=scan_fn,
outputs_info=[{}],
n_steps=10,
)

res = is_random_variable(Y_rv)
assert res == (Y_rv, Y_rv.owner.op.outputs[0])

0 comments on commit f2f8c6c

Please sign in to comment.