Skip to content
This repository has been archived by the owner on Jul 10, 2021. It is now read-only.

Commit

Permalink
Further tweaking of test thresholds.
Browse files Browse the repository at this point in the history
  • Loading branch information
alexjc committed Nov 26, 2015
1 parent 032d871 commit bc01c88
Show file tree
Hide file tree
Showing 2 changed files with 2 additions and 2 deletions.
2 changes: 1 addition & 1 deletion sknn/tests/test_data.py
Expand Up @@ -100,7 +100,7 @@ def check(self, a_in, a_out, a_mask):

# Make sure the examples weighted 1.0 have low error, 0.0 high error.
print(abs(a_out - v_out).T * a_mask)
assert_true((abs(a_out - v_out).T * a_mask < 5E-2).all())
assert_true((abs(a_out - v_out).T * a_mask < 1E-1).all())
assert_true((abs(a_out - v_out).T * (1.0 - a_mask) > 2.5E-1).any())

def test_SingleOutputOne(self):
Expand Down
2 changes: 1 addition & 1 deletion sknn/tests/test_rules.py
Expand Up @@ -53,7 +53,7 @@ def test_adagrad(self):
n_iter=1))

def test_AdaDelta(self):
self._run(MLPR(layers=[L("Linear")],
self._run(MLPR(layers=[L("Softmax")],
learning_rule='adadelta',
n_iter=1))

Expand Down

0 comments on commit bc01c88

Please sign in to comment.