Skip to content
Browse files

test_dist and test_transform - use central difference instead of forw…

…ard difference for numerical derivative calculations

Ignore-this: 59436a5ea8082201aa38fc172ca8325e

darcs-hash:20130914192017-3a4db-ac626391fdcb406db4f60b344b6e6c48326f6324
  • Loading branch information...
1 parent 6acd5dd commit b8845d9fc6aba45720be54a9821071d51cca73ea @MattShannon committed Sep 14, 2013
Showing with 42 additions and 13 deletions.
  1. +20 −5 armspeech/modelling/test_dist.py
  2. +22 −8 armspeech/modelling/test_transform.py
View
25 armspeech/modelling/test_dist.py
@@ -556,28 +556,40 @@ def reparse(dist, ps):
@codeDeps(assert_allclose)
def check_logProbDerivInput(dist, input, output, eps):
inputDirection = randn(*np.shape(input))
- numericDeriv = (dist.logProb(input + inputDirection * eps, output) - dist.logProb(input, output)) / eps
+ numericDeriv = (
+ dist.logProb(input + inputDirection * eps, output) -
+ dist.logProb(input - inputDirection * eps, output)
+ ) / (eps * 2.0)
analyticDeriv = np.sum(inputDirection * dist.logProbDerivInput(input, output))
assert_allclose(numericDeriv, analyticDeriv, atol = 1e-6, rtol = 1e-4)
@codeDeps(assert_allclose)
def check_logProbDerivInput_hasDiscrete(dist, (disc, input), output, eps):
inputDirection = randn(*np.shape(input))
- numericDeriv = (dist.logProb((disc, input + inputDirection * eps), output) - dist.logProb((disc, input), output)) / eps
+ numericDeriv = (
+ dist.logProb((disc, input + inputDirection * eps), output) -
+ dist.logProb((disc, input - inputDirection * eps), output)
+ ) / (eps * 2.0)
analyticDeriv = np.sum(inputDirection * dist.logProbDerivInput((disc, input), output))
assert_allclose(numericDeriv, analyticDeriv, atol = 1e-6, rtol = 1e-4)
@codeDeps(assert_allclose)
def check_logProbDerivOutput(dist, input, output, eps):
outputDirection = randn(*np.shape(output))
- numericDeriv = (dist.logProb(input, output + outputDirection * eps) - dist.logProb(input, output)) / eps
+ numericDeriv = (
+ dist.logProb(input, output + outputDirection * eps) -
+ dist.logProb(input, output - outputDirection * eps)
+ ) / (eps * 2.0)
analyticDeriv = np.sum(outputDirection * dist.logProbDerivOutput(input, output))
assert_allclose(numericDeriv, analyticDeriv, atol = 1e-6, rtol = 1e-4)
@codeDeps(assert_allclose)
def check_logProbDerivOutput_hasDiscrete(dist, input, (disc, output), eps):
outputDirection = randn(*np.shape(output))
- numericDeriv = (dist.logProb(input, (disc, output + outputDirection * eps)) - dist.logProb(input, (disc, output))) / eps
+ numericDeriv = (
+ dist.logProb(input, (disc, output + outputDirection * eps)) -
+ dist.logProb(input, (disc, output - outputDirection * eps))
+ ) / (eps * 2.0)
analyticDeriv = np.sum(outputDirection * dist.logProbDerivOutput(input, (disc, output)))
assert_allclose(numericDeriv, analyticDeriv, atol = 1e-6, rtol = 1e-4)
@@ -627,8 +639,11 @@ def check_derivParams(dist, training, ps, eps):
distNew = ps.parseAll(dist, params + paramsDirection * eps)
logLikeNew = trainedAccG(distNew, training, ps = ps).logLike()
assert_allclose(ps.params(distNew), params + paramsDirection * eps)
+ distNew2 = ps.parseAll(dist, params - paramsDirection * eps)
+ logLikeNew2 = trainedAccG(distNew2, training, ps = ps).logLike()
+ assert_allclose(ps.params(distNew2), params - paramsDirection * eps)
- numericDeriv = (logLikeNew - logLike) / eps
+ numericDeriv = (logLikeNew - logLikeNew2) / (eps * 2.0)
analyticDeriv = np.dot(derivParams, paramsDirection)
assert_allclose(numericDeriv, analyticDeriv, atol = 1e-4, rtol = 1e-4)
View
30 armspeech/modelling/test_transform.py
@@ -152,28 +152,34 @@ def gen_SumOfTanh1D(numTanh = 3):
@codeDeps(assert_allclose)
def check_deriv(transform, x, eps):
direction = randn(*np.shape(x))
- numericDeriv = (transform(x + direction * eps) - transform(x)) / eps
+ numericDeriv = (transform(x + direction * eps) -
+ transform(x - direction * eps)) / (eps * 2.0)
analyticDeriv = np.dot(direction, transform.deriv(x))
assert_allclose(numericDeriv, analyticDeriv, atol = 1e-6, rtol = 1e-4)
@codeDeps(assert_allclose)
def check_derivDeriv(transform, x, eps):
assert np.shape(x) == ()
direction = randn()
- numericDeriv = (transform.deriv(x + direction * eps) - transform.deriv(x)) / eps
+ numericDeriv = (transform.deriv(x + direction * eps) -
+ transform.deriv(x - direction * eps)) / (eps * 2.0)
analyticDeriv = np.dot(direction, transform.derivDeriv(x))
assert_allclose(numericDeriv, analyticDeriv, atol = 1e-6, rtol = 1e-4)
@codeDeps(assert_allclose)
def check_derivParams(transform, x, eps):
params = transform.params
paramsDirection = randn(*np.shape(params))
- numericDeriv = (transform.parseAll(params + paramsDirection * eps)(x) - transform(x)) / eps
+ numericDeriv = (
+ transform.parseAll(params + paramsDirection * eps)(x) -
+ transform.parseAll(params - paramsDirection * eps)(x)
+ ) / (eps * 2.0)
analyticDeriv = np.dot(paramsDirection, transform.derivParams(x))
assert_allclose(numericDeriv, analyticDeriv, atol = 1e-6, rtol = 1e-4)
@codeDeps(assert_allclose)
def check_derivParamsDeriv(transform, x, eps):
assert np.shape(x) == ()
direction = randn()
- numericDeriv = (transform.derivParams(x + direction * eps) - transform.derivParams(x)) / eps
+ numericDeriv = (transform.derivParams(x + direction * eps) -
+ transform.derivParams(x - direction * eps)) / (eps * 2.0)
analyticDeriv = np.dot(direction, transform.derivParamsDeriv(x))
assert_allclose(numericDeriv, analyticDeriv, atol = 1e-6, rtol = 1e-4)
@codeDeps(logDet)
@@ -195,14 +201,18 @@ def check_logJac(transform, x):
@codeDeps(assert_allclose)
def check_logJacDeriv(transform, x, eps):
direction = randn(*np.shape(x))
- numericDeriv = (transform.logJac(x + direction * eps) - transform.logJac(x)) / eps
+ numericDeriv = (transform.logJac(x + direction * eps) -
+ transform.logJac(x - direction * eps)) / (eps * 2.0)
analyticDeriv = np.dot(direction, transform.logJacDeriv(x))
assert_allclose(numericDeriv, analyticDeriv, atol = 1e-5, rtol = 1e-4)
@codeDeps(assert_allclose)
def check_logJacDerivParams(transform, x, eps):
params = transform.params
paramsDirection = randn(*np.shape(params))
- numericDerivLJ = (transform.parseAll(params + paramsDirection * eps).logJac(x) - transform.logJac(x)) / eps
+ numericDerivLJ = (
+ transform.parseAll(params + paramsDirection * eps).logJac(x) -
+ transform.parseAll(params - paramsDirection * eps).logJac(x)
+ ) / (eps * 2.0)
analyticDerivLJ = np.dot(transform.logJacDerivParams(x), paramsDirection)
assert_allclose(numericDerivLJ, analyticDerivLJ, atol = 1e-6, rtol = 1e-4)
@codeDeps(assert_allclose)
@@ -389,13 +399,17 @@ def gen_ShiftOutputTransform(shapeInput, shapeOutput):
@codeDeps(assert_allclose)
def check_derivInput(outputTransform, input, x, eps):
direction = randn(*np.shape(input))
- numericDeriv = (outputTransform(input + direction * eps, x) - outputTransform(input, x)) / eps
+ numericDeriv = (outputTransform(input + direction * eps, x) -
+ outputTransform(input - direction * eps, x)) / (eps * 2.0)
analyticDeriv = np.dot(direction, outputTransform.derivInput(input, x))
assert_allclose(numericDeriv, analyticDeriv, atol = 1e-6, rtol = 1e-4)
@codeDeps(assert_allclose)
def check_logJacDerivInput(outputTransform, input, x, eps):
direction = randn(*np.shape(input))
- numericDeriv = (outputTransform.logJac(input + direction * eps, x) - outputTransform.logJac(input, x)) / eps
+ numericDeriv = (
+ outputTransform.logJac(input + direction * eps, x) -
+ outputTransform.logJac(input - direction * eps, x)
+ ) / (eps * 2.0)
analyticDeriv = np.dot(direction, outputTransform.logJacDerivInput(input, x))
assert_allclose(numericDeriv, analyticDeriv, atol = 1e-6, rtol = 1e-4)

0 comments on commit b8845d9

Please sign in to comment.
Something went wrong with that request. Please try again.