-
Notifications
You must be signed in to change notification settings - Fork 1.4k
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #3183 from unnonouno/new-embed_id
New EmbedID
- Loading branch information
Showing
2 changed files
with
174 additions
and
8 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
130 changes: 130 additions & 0 deletions
130
tests/chainer_tests/functions_tests/connection_tests/test_embed_id.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,130 @@ | ||
import unittest | ||
|
||
import numpy | ||
|
||
import chainer | ||
from chainer import cuda | ||
from chainer.functions.connection import embed_id | ||
from chainer import gradient_check | ||
from chainer import testing | ||
from chainer.testing import attr | ||
from chainer.testing import condition | ||
|
||
|
||
@testing.parameterize( | ||
{'x_data': [0, 1, 0], 'ignore_label': None}, | ||
{'x_data': [[0, 1, 0], [1, 0, 1]], 'ignore_label': None}, | ||
{'x_data': [0, 1, -1], 'ignore_label': -1}, | ||
{'x_data': [[0, 1, -1], [-1, 0, 1]], 'ignore_label': -1}, | ||
) | ||
class TestEmbedID(unittest.TestCase): | ||
|
||
def setUp(self): | ||
self.x = numpy.array(self.x_data, dtype=numpy.int32) | ||
self.W = numpy.random.uniform(-1, 1, (3, 2)).astype('f') | ||
y_shape = self.x.shape + (2,) | ||
self.gy = numpy.random.uniform(-1, 1, y_shape).astype(numpy.float32) | ||
self.ggW = numpy.random.uniform(-1, 1, (3, 2)).astype('f') | ||
|
||
def check_forward(self, x_data, W_data): | ||
x = chainer.Variable(x_data) | ||
W = chainer.Variable(W_data) | ||
y = chainer.functions.embed_id(x, W, self.ignore_label) | ||
self.assertEqual(y.data.dtype, numpy.float32) | ||
|
||
y_expect = numpy.empty_like(self.gy) | ||
for i in numpy.ndindex(self.x.shape): | ||
if self.x[i] == -1: | ||
y_expect[i] = 0 | ||
else: | ||
y_expect[i] = self.W[int(self.x[i])] | ||
|
||
testing.assert_allclose(y_expect, y.data, atol=0, rtol=0) | ||
|
||
@condition.retry(3) | ||
def test_forward_cpu(self): | ||
self.check_forward(self.x, self.W) | ||
|
||
@attr.gpu | ||
@condition.retry(3) | ||
def test_forward_gpu(self): | ||
self.check_forward(cuda.to_gpu(self.x), cuda.to_gpu(self.W)) | ||
|
||
def check_backward(self, x_data, W_data, y_grad): | ||
def f(x, W): | ||
return chainer.functions.embed_id(x, W, self.ignore_label) | ||
|
||
gradient_check.check_backward(f, (x_data, W_data), y_grad) | ||
|
||
@condition.retry(3) | ||
def test_backward_cpu(self): | ||
self.check_backward(self.x, self.W, self.gy) | ||
|
||
@attr.gpu | ||
@condition.retry(3) | ||
def test_backward_gpu(self): | ||
self.check_backward( | ||
cuda.to_gpu(self.x), cuda.to_gpu(self.W), cuda.to_gpu(self.gy)) | ||
|
||
def check_double_backward(self, x_data, W_data, gy_data, ggW_data): | ||
def f(W): | ||
y = chainer.functions.embed_id( | ||
x_data, W, self.ignore_label) | ||
return y * y | ||
|
||
gradient_check.check_double_backward( | ||
f, W_data, gy_data, ggW_data) | ||
|
||
@condition.retry(3) | ||
def test_double_backward_cpu(self): | ||
self.check_double_backward(self.x, self.W, self.gy, self.ggW) | ||
|
||
@attr.gpu | ||
@condition.retry(3) | ||
def test_double_backward_gpu(self): | ||
self.check_double_backward( | ||
cuda.to_gpu(self.x), cuda.to_gpu(self.W), cuda.to_gpu(self.gy), | ||
cuda.to_gpu(self.ggW)) | ||
|
||
|
||
@testing.parameterize( | ||
{'x_data': [0, 1, 0], 'ignore_label': None}, | ||
{'x_data': [[0, 1, 0], [1, 0, 1]], 'ignore_label': None}, | ||
{'x_data': [0, 1, -1], 'ignore_label': -1}, | ||
{'x_data': [[0, 1, -1], [-1, 0, 1]], 'ignore_label': -1}, | ||
{'x_data': [0, 1, 2], 'ignore_label': 2}, | ||
{'x_data': [[0, 1, 0], [1, 0, 1]], 'ignore_label': 1}, | ||
) | ||
class TestEmbedIdGrad(unittest.TestCase): | ||
|
||
n_unit = (4,) | ||
w_shape = (4, 2) | ||
|
||
def setUp(self): | ||
self.x = numpy.array(self.x_data, dtype='i') | ||
self.gy = numpy.random.uniform( | ||
-1, 1, self.x.shape + (2,)).astype('f') | ||
self.ggW = numpy.random.uniform(-1, 1, self.w_shape).astype('f') | ||
|
||
def check_backward(self, x, gy, ggW): | ||
return | ||
|
||
def f(x, gy): | ||
emb = embed_id.EmbedIDGrad( | ||
self.w_shape, self.ignore_label) | ||
return emb.apply((x, numpy.zeros(()), gy))[0] | ||
|
||
gradient_check.check_backward(f, (x, gy), (ggW,)) | ||
|
||
@condition.retry(3) | ||
def test_backward_cpu(self): | ||
self.check_backward(self.x, self.gy, self.ggW) | ||
|
||
@attr.gpu | ||
@condition.retry(3) | ||
def test_backward_gpu(self): | ||
self.check_backward( | ||
cuda.to_gpu(self.x), cuda.to_gpu(self.gy), cuda.to_gpu(self.ggW)) | ||
|
||
|
||
testing.run_module(__name__, __file__) |