Skip to content

Commit

Permalink
Run tests with cumo with env var RED_CHAINER_GPU=ID
Browse files Browse the repository at this point in the history
  • Loading branch information
sonots committed Nov 12, 2018
1 parent 6a31a47 commit 5fe88b4
Show file tree
Hide file tree
Showing 29 changed files with 292 additions and 260 deletions.
18 changes: 18 additions & 0 deletions README.md
Expand Up @@ -40,6 +40,24 @@ $ bundle exec ruby examples/mnist/mnist.rb
$ ruby examples/mnist/mnist.rb
```

## Development

### Run tests

```
$ bundle exec ruby test/run_test.rb
```

### Run tests with Cumo

On GPU machine, add `gem 'cumo'` on Gemfile and do `bundle install`.

Run tests with `RED_CHAINER_GPU` environment variable whose value indicates GPU device ID such as:

```
$ bundle exec env RED_CHAINER_GPU=0 ruby test/run_test.rb
```

## License

The MIT license. See [LICENSE.txt](./LICENSE.txt) for details.
Expand Down
22 changes: 6 additions & 16 deletions test/backend_test.rb
Expand Up @@ -3,23 +3,13 @@
require 'chainer'

class TestBackend < Test::Unit::TestCase
def test_get_array_module_for_numo()
assert Numo == Chainer.get_array_module(Numo::NArray[])
assert Numo == Chainer.get_array_module(Chainer::Variable.new(Numo::NArray[]))
def test_get_array_module
assert xm == Chainer.get_array_module(xm::NArray[])
assert xm == Chainer.get_array_module(Chainer::Variable.new(xm::NArray[]))
end

def test_get_array_module_for_cumo()
assert_equal(Cumo, Chainer.get_array_module(Cumo::NArray[]))
assert_equal(Cumo, Chainer.get_array_module(Chainer::Variable.new(Cumo::NArray[])))
end if Chainer::CUDA.available?

def test_array_p_for_numo()
assert_equal(Numo, Chainer.get_array_module(Numo::NArray[]))
assert_equal(Numo, Chainer.get_array_module(Chainer::Variable.new(Numo::NArray[])))
def test_array_p
assert_equal(xm, Chainer.get_array_module(xm::NArray[]))
assert_equal(xm, Chainer.get_array_module(Chainer::Variable.new(xm::NArray[])))
end

def test_array_p_for_cumo()
assert_equal(Cumo, Chainer.get_array_module(Cumo::NArray[]))
assert_equal(Cumo, Chainer.get_array_module(Chainer::Variable.new(Cumo::NArray[])))
end if Chainer::CUDA.available?
end
16 changes: 8 additions & 8 deletions test/dataset/convert_test.rb
Expand Up @@ -24,7 +24,7 @@ def check_concat_arrays(arrays, device: nil)
end

def test_concat_arrays()
arrays = get_arrays_to_concat(Numo)
arrays = get_arrays_to_concat(xm)
check_concat_arrays(arrays)
end

Expand All @@ -46,7 +46,7 @@ def check_concat_tuples(tuples, device: nil)
end

def test_concat_tuples()
tuples = get_tuple_arrays_to_concat(Numo)
tuples = get_tuple_arrays_to_concat(xm)
check_concat_tuples(tuples)
end
end
Expand All @@ -69,7 +69,7 @@ def check_concat_arrays_padding(xumo)
end

def test_concat_arrays_padding()
check_concat_arrays_padding(Numo)
check_concat_arrays_padding(xm)
end

def check_concat_tuples_padding(xumo)
Expand Down Expand Up @@ -106,7 +106,7 @@ def check_concat_tuples_padding(xumo)
end

def test_concat_tuples_padding()
check_concat_tuples_padding(Numo)
check_concat_tuples_padding(xm)
end
end

Expand All @@ -122,7 +122,7 @@ def check_device(array, device)
if device && device >= 0
# T.B.I (GPU Check)
else
assert_true array.is_a?(Numo::NArray)
assert_true array.is_a?(xm::NArray)
end
end

Expand All @@ -132,7 +132,7 @@ def check_concat_arrays(arrays, device:, expected_type:)
check_device(array, device)

array.to_a.zip(arrays.to_a).each do |x, y|
assert_true Numo::NArray.cast(y).nearly_eq(Numo::NArray.cast(x)).all?
assert_true xm::NArray.cast(y).nearly_eq(xm::NArray.cast(x)).all?
end
end

Expand All @@ -141,8 +141,8 @@ def test_concat_arrays(data)
@padding = data[:padding]

[-1, nil].each do |device|
check_concat_arrays(@@int_arrays, device: device, expected_type: Numo::Int64)
check_concat_arrays(@@float_arrays, device: device, expected_type: Numo::DFloat)
check_concat_arrays(@@int_arrays, device: device, expected_type: xm::Int64)
check_concat_arrays(@@float_arrays, device: device, expected_type: xm::DFloat)
end
end
end
23 changes: 17 additions & 6 deletions test/device_test.rb
Expand Up @@ -18,6 +18,10 @@ def test_use
end

class TestGpuDevice < Test::Unit::TestCase
def setup
require_gpu
end

def test_xm
assert Chainer::GpuDevice.new.xm == Cumo
end
Expand All @@ -35,16 +39,22 @@ def test_use
begin
Chainer::GpuDevice.new(0).use
assert Cumo::CUDA::Runtime.cudaGetDevice == 0
ensure
Cumo::CUDA::Runtime.cudaSetDevice(orig_device_id)
end
end

if Chainer::CUDA.available?(1)
Chainer::GpuDevice.new(1).use
assert Cumo::CUDA::Runtime.cudaGetDevice == 1
end
def test_use_1
require_gpu(1)
orig_device_id = Cumo::CUDA::Runtime.cudaGetDevice
begin
Chainer::GpuDevice.new(1).use
assert Cumo::CUDA::Runtime.cudaGetDevice == 1
ensure
Cumo::CUDA::Runtime.cudaSetDevice(orig_device_id)
end
end
end if Chainer::CUDA.available?
end

class TestGetDevice < Test::Unit::TestCase
def test_device
Expand All @@ -57,8 +67,9 @@ def test_negative_integer
end

def test_non_negative_integer
require_gpu
assert Chainer.get_device(0) == Chainer::GpuDevice.new(0)
end if Chainer::CUDA.available?
end
end

def test_set_get_default_device
Expand Down
10 changes: 5 additions & 5 deletions test/functions/activation/leaky_relu_test.rb
Expand Up @@ -4,10 +4,10 @@

class Chainer::Functions::Activation::LeakyReLUTest < Test::Unit::TestCase
data = {
'test1' => {shape: [3, 2], dtype: Numo::SFloat},
'test2' => {shape: [], dtype: Numo::SFloat},
'test3' => {shape: [3, 2], dtype: Numo::DFloat},
'test4' => {shape: [], dtype: Numo::DFloat}}
'test1' => {shape: [3, 2], dtype: xm::SFloat},
'test2' => {shape: [], dtype: xm::SFloat},
'test3' => {shape: [3, 2], dtype: xm::DFloat},
'test4' => {shape: [], dtype: xm::DFloat}}

def _setup(data)
# Avoid unstability of numerical grad
Expand All @@ -26,7 +26,7 @@ def _setup(data)
@gy = @dtype.new(@shape).rand(2) - 1
@slope = Random.rand
@check_forward_options = {}
@check_backward_options_dtype = Numo::DFloat
@check_backward_options_dtype = xm::DFloat
end

def check_forward(x_data)
Expand Down
20 changes: 10 additions & 10 deletions test/functions/activation/log_softmax_test.rb
Expand Up @@ -6,15 +6,15 @@

class Chainer::Functions::Activation::LogSoftmaxTest < Test::Unit::TestCase
data = {
# Not Support test1 case. See Numo::NArray issue #78.
#'test1' => {shape: nil, dtype: Numo::SFloat},
'test2' => {shape: [2, 3], dtype: Numo::SFloat},
'test3' => {shape: [2, 2, 3], dtype: Numo::SFloat},
'test4' => {shape: [2, 2, 2, 3], dtype: Numo::SFloat},
'test5' => {shape: nil, dtype: Numo::DFloat},
'test6' => {shape: [2, 3], dtype: Numo::DFloat},
'test7' => {shape: [2, 2, 3], dtype: Numo::DFloat},
'test8' => {shape: [2, 2, 2, 3], dtype: Numo::DFloat}}
# Not Support test1 case. See xm::NArray issue #78.
#'test1' => {shape: nil, dtype: xm::SFloat},
'test2' => {shape: [2, 3], dtype: xm::SFloat},
'test3' => {shape: [2, 2, 3], dtype: xm::SFloat},
'test4' => {shape: [2, 2, 2, 3], dtype: xm::SFloat},
'test5' => {shape: nil, dtype: xm::DFloat},
'test6' => {shape: [2, 3], dtype: xm::DFloat},
'test7' => {shape: [2, 2, 3], dtype: xm::DFloat},
'test8' => {shape: [2, 2, 2, 3], dtype: xm::DFloat}}

def _setup(data)
@shape = data[:shape]
Expand All @@ -28,7 +28,7 @@ def _setup(data)
end
@gy = @dtype.new(@x.shape).rand(2) - 1
@check_forward_options = {}
@check_backward_options = {dtype: Numo::DFloat}
@check_backward_options = {dtype: xm::DFloat}
end

def check_forward(x_data, use_cudnn: "always")
Expand Down
8 changes: 4 additions & 4 deletions test/functions/activation/relu_test.rb
Expand Up @@ -4,10 +4,10 @@

class Chainer::Functions::Activation::ReLUTest < Test::Unit::TestCase
data = {
'test1' => {shape: [3, 2], dtype: Numo::SFloat},
'test2' => {shape: [], dtype: Numo::SFloat},
'test3' => {shape: [3, 2], dtype: Numo::DFloat},
'test4' => {shape: [], dtype: Numo::DFloat}}
'test1' => {shape: [3, 2], dtype: xm::SFloat},
'test2' => {shape: [], dtype: xm::SFloat},
'test3' => {shape: [3, 2], dtype: xm::DFloat},
'test4' => {shape: [], dtype: xm::DFloat}}

def _setup(data)
# Avoid unstability of numerical grad
Expand Down
8 changes: 4 additions & 4 deletions test/functions/activation/sigmoid_test.rb
Expand Up @@ -4,10 +4,10 @@

class Chainer::Functions::Activation::SigmoidTest < Test::Unit::TestCase
data = {
'test1' => {shape: [3, 2], dtype: Numo::SFloat},
'test2' => {shape: [], dtype: Numo::SFloat},
'test3' => {shape: [3, 2], dtype: Numo::DFloat},
'test4' => {shape: [], dtype: Numo::DFloat}}
'test1' => {shape: [3, 2], dtype: xm::SFloat},
'test2' => {shape: [], dtype: xm::SFloat},
'test3' => {shape: [3, 2], dtype: xm::DFloat},
'test4' => {shape: [], dtype: xm::DFloat}}

def _setup(data)
@shape = data[:shape]
Expand Down
8 changes: 4 additions & 4 deletions test/functions/activation/tanh_test.rb
Expand Up @@ -4,10 +4,10 @@

class Chainer::Functions::Activation::TanhTest < Test::Unit::TestCase
data = {
'test1' => {shape: [3, 2], dtype: Numo::SFloat},
'test2' => {shape: [], dtype: Numo::SFloat},
'test3' => {shape: [3, 2], dtype: Numo::DFloat},
'test4' => {shape: [], dtype: Numo::DFloat}}
'test1' => {shape: [3, 2], dtype: xm::SFloat},
'test2' => {shape: [], dtype: xm::SFloat},
'test3' => {shape: [3, 2], dtype: xm::DFloat},
'test4' => {shape: [], dtype: xm::DFloat}}

def _setup(data)
@shape = data[:shape]
Expand Down
6 changes: 3 additions & 3 deletions test/functions/array/broadcast_to_test.rb
Expand Up @@ -18,7 +18,7 @@ class Chainer::Functions::Array::BroadcastToTest < Test::Unit::TestCase
}
]

dtypes = [ Numo::SFloat, Numo::DFloat ]
dtypes = [ xm::SFloat, xm::DFloat ]

data = shapes.map.with_index {|shape, i|
dtypes.map do |dtype|
Expand All @@ -40,8 +40,8 @@ def test_backward(data)
in_data = data[:dtype].new(data[:in_shape]).rand
grads = data[:dtype].new(data[:out_shape]).rand
check_backward_options = {}
if data[:dtype] == Numo::SFloat
check_backward_options = { eps: 2 ** -5, atol: 1e-3, rtol: 1e-2 }
if data[:dtype] == xm::SFloat
check_backward_options = { eps: 2 ** -5, atol: 1e-3, rtol: 1e-2 }
end

func = Chainer::Functions::Array::BroadcastTo.new(data[:out_shape])
Expand Down
4 changes: 3 additions & 1 deletion test/functions/array/reshape_test.rb
Expand Up @@ -3,9 +3,11 @@
require 'chainer/functions/array/reshape'

class Chainer::Functions::Array::ReshapeTest < Test::Unit::TestCase
xm = Chainer.get_default_device.xm

in_shape = [4, 3, 2]
out_shape = [2, 2, 6]
dtypes = [ Numo::SFloat, Numo::DFloat ]
dtypes = [ xm::SFloat, xm::DFloat ]

data = dtypes.reduce({}) {|hash, dtype|
hash[dtype.to_s] = {in_shape: in_shape, out_shape: out_shape, dtype: dtype}
Expand Down
48 changes: 24 additions & 24 deletions test/functions/connection/convolution_2d_test.rb
Expand Up @@ -3,28 +3,28 @@
class Chainer::Functions::Connection::Convolution2DTest < Test::Unit::TestCase
data({
test1: {
case: { x: Numo::DFloat.new(1, 1, 4, 4).seq, w: Numo::DFloat.new(2, 1, 3, 3).seq, options: {} },
expected: Numo::DFloat[[[[258.0, 294.0], [402.0, 438.0]], [[663.0, 780.0], [1131.0, 1248.0]]]]
case: { x: xm::DFloat.new(1, 1, 4, 4).seq, w: xm::DFloat.new(2, 1, 3, 3).seq, options: {} },
expected: xm::DFloat[[[[258.0, 294.0], [402.0, 438.0]], [[663.0, 780.0], [1131.0, 1248.0]]]]
},
test2: {
case: { x: Numo::DFloat.new(1, 2, 4, 4).seq, w: Numo::DFloat.new(2, 2, 3, 3).seq, options: {} },
expected: Numo::DFloat[[[[2793.0, 2946.0], [3405.0, 3558.0]], [[7005.0, 7482.0], [8913.0, 9390.0]]]]
case: { x: xm::DFloat.new(1, 2, 4, 4).seq, w: xm::DFloat.new(2, 2, 3, 3).seq, options: {} },
expected: xm::DFloat[[[[2793.0, 2946.0], [3405.0, 3558.0]], [[7005.0, 7482.0], [8913.0, 9390.0]]]]
},
test3: {
case: { x: Numo::DFloat.new(2, 2, 4, 4).seq, w: Numo::DFloat.new(2, 2, 3, 3).seq, options: {} },
expected: Numo::DFloat[[[[2793.0, 2946.0], [3405.0, 3558.0]], [[7005.0, 7482.0], [8913.0, 9390.0]]], [[[7689.0, 7842.0], [8301.0, 8454.0]], [[22269.0, 22746.0], [24177.0, 24654.0]]]]
case: { x: xm::DFloat.new(2, 2, 4, 4).seq, w: xm::DFloat.new(2, 2, 3, 3).seq, options: {} },
expected: xm::DFloat[[[[2793.0, 2946.0], [3405.0, 3558.0]], [[7005.0, 7482.0], [8913.0, 9390.0]]], [[[7689.0, 7842.0], [8301.0, 8454.0]], [[22269.0, 22746.0], [24177.0, 24654.0]]]]
},
test4: {
case: { x: Numo::DFloat.new(2, 2, 4, 4).seq, w: Numo::DFloat.new(2, 2, 3, 3).seq, options: { stride: 2 } },
expected: Numo::DFloat[[[[2793.0]], [[7005.0]]], [[[7689.0]], [[22269.0]]]]
case: { x: xm::DFloat.new(2, 2, 4, 4).seq, w: xm::DFloat.new(2, 2, 3, 3).seq, options: { stride: 2 } },
expected: xm::DFloat[[[[2793.0]], [[7005.0]]], [[[7689.0]], [[22269.0]]]]
},
test5: {
case: { x: Numo::DFloat.new(2, 2, 4, 4).seq, w: Numo::DFloat.new(2, 2, 3, 3).seq, options: { b: Numo::DFloat[10, 33] } },
expected: Numo::DFloat[[[[2803.0, 2956.0], [3415.0, 3568.0]], [[7038.0, 7515.0], [8946.0, 9423.0]]], [[[7699.0, 7852.0], [8311.0, 8464.0]], [[22302.0, 22779.0], [24210.0, 24687.0]]]]
case: { x: xm::DFloat.new(2, 2, 4, 4).seq, w: xm::DFloat.new(2, 2, 3, 3).seq, options: { b: xm::DFloat[10, 33] } },
expected: xm::DFloat[[[[2803.0, 2956.0], [3415.0, 3568.0]], [[7038.0, 7515.0], [8946.0, 9423.0]]], [[[7699.0, 7852.0], [8311.0, 8464.0]], [[22302.0, 22779.0], [24210.0, 24687.0]]]]
},
test6: {
case: { x: Numo::DFloat.new(2, 2, 4, 4).seq, w: Numo::DFloat.new(2, 2, 3, 3).seq, options: { b: Numo::DFloat[3, 5], pad: 1 } },
expected: Numo::DFloat[[[[1199.0, 1799.0, 1919.0, 1267.0], [1884.0, 2796.0, 2949.0, 1926.0], [2316.0, 3408.0, 3561.0, 2310.0], [1427.0, 2075.0, 2159.0, 1383.0]], [[2713.0, 4177.0, 4513.0, 3069.0], [4586.0, 7010.0, 7487.0, 5060.0], [5882.0, 8918.0, 9395.0, 6308.0], [4093.0, 6181.0, 6481.0, 4337.0]]], [[[3887.0, 5639.0, 5759.0, 3699.0], [5340.0, 7692.0, 7845.0, 4998.0], [5772.0, 8304.0, 8457.0, 5382.0], [3347.0, 4763.0, 4847.0, 3047.0]], [[10009.0, 14929.0, 15265.0, 10109.0], [14954.0, 22274.0, 22751.0, 15044.0], [16250.0, 24182.0, 24659.0, 16292.0], [10621.0, 15781.0, 16081.0, 10609.0]]]]
case: { x: xm::DFloat.new(2, 2, 4, 4).seq, w: xm::DFloat.new(2, 2, 3, 3).seq, options: { b: xm::DFloat[3, 5], pad: 1 } },
expected: xm::DFloat[[[[1199.0, 1799.0, 1919.0, 1267.0], [1884.0, 2796.0, 2949.0, 1926.0], [2316.0, 3408.0, 3561.0, 2310.0], [1427.0, 2075.0, 2159.0, 1383.0]], [[2713.0, 4177.0, 4513.0, 3069.0], [4586.0, 7010.0, 7487.0, 5060.0], [5882.0, 8918.0, 9395.0, 6308.0], [4093.0, 6181.0, 6481.0, 4337.0]]], [[[3887.0, 5639.0, 5759.0, 3699.0], [5340.0, 7692.0, 7845.0, 4998.0], [5772.0, 8304.0, 8457.0, 5382.0], [3347.0, 4763.0, 4847.0, 3047.0]], [[10009.0, 14929.0, 15265.0, 10109.0], [14954.0, 22274.0, 22751.0, 15044.0], [16250.0, 24182.0, 24659.0, 16292.0], [10621.0, 15781.0, 16081.0, 10609.0]]]]
},

})
Expand All @@ -37,28 +37,28 @@ def test_convolution_2d(data)
data({
test1: {
case: {
x: Numo::DFloat.new(2, 1, 4, 3).seq,
w: Numo::DFloat.new(2, 1, 3, 3).seq,
x: xm::DFloat.new(2, 1, 4, 3).seq,
w: xm::DFloat.new(2, 1, 3, 3).seq,
b: nil,
gy: Numo::DFloat.new(2, 2, 3, 2).seq
gy: xm::DFloat.new(2, 2, 3, 2).seq
},
expected: {
gx: Numo::DFloat[[[[78.0, 171.0, 95.0], [178.0, 386.0, 212.0], [112.0, 239.0, 129.0], [246.0, 522.0, 280.0]]], [[[282.0, 579.0, 299.0], [586.0, 1202.0, 620.0], [316.0, 647.0, 333.0], [654.0, 1338.0, 688.0]]]],
gw: Numo::DFloat[[[[676.0, 1304.0, 624.0], [476.0, 916.0, 436.0], [572.0, 1096.0, 520.0]]], [[[988.0, 1928.0, 936.0], [716.0, 1396.0, 676.0], [884.0, 1720.0, 832.0]]]],
gx: xm::DFloat[[[[78.0, 171.0, 95.0], [178.0, 386.0, 212.0], [112.0, 239.0, 129.0], [246.0, 522.0, 280.0]]], [[[282.0, 579.0, 299.0], [586.0, 1202.0, 620.0], [316.0, 647.0, 333.0], [654.0, 1338.0, 688.0]]]],
gw: xm::DFloat[[[[676.0, 1304.0, 624.0], [476.0, 916.0, 436.0], [572.0, 1096.0, 520.0]]], [[[988.0, 1928.0, 936.0], [716.0, 1396.0, 676.0], [884.0, 1720.0, 832.0]]]],
gb: nil
}
},
test2: {
case: {
x: Numo::DFloat.new(2, 1, 4, 3).seq,
w: Numo::DFloat.new(2, 1, 3, 3).seq,
b: Numo::DFloat.new(2).seq,
gy: Numo::DFloat.new(2, 2, 3, 2).seq
x: xm::DFloat.new(2, 1, 4, 3).seq,
w: xm::DFloat.new(2, 1, 3, 3).seq,
b: xm::DFloat.new(2).seq,
gy: xm::DFloat.new(2, 2, 3, 2).seq
},
expected: {
gx: Numo::DFloat[[[[78.0, 171.0, 95.0], [178.0, 386.0, 212.0], [112.0, 239.0, 129.0], [246.0, 522.0, 280.0]]], [[[282.0, 579.0, 299.0], [586.0, 1202.0, 620.0], [316.0, 647.0, 333.0], [654.0, 1338.0, 688.0]]]],
gw: Numo::DFloat[[[[676.0, 1304.0, 624.0], [476.0, 916.0, 436.0], [572.0, 1096.0, 520.0]]], [[[988.0, 1928.0, 936.0], [716.0, 1396.0, 676.0], [884.0, 1720.0, 832.0]]]],
gb: Numo::DFloat[102.0, 174.0]
gx: xm::DFloat[[[[78.0, 171.0, 95.0], [178.0, 386.0, 212.0], [112.0, 239.0, 129.0], [246.0, 522.0, 280.0]]], [[[282.0, 579.0, 299.0], [586.0, 1202.0, 620.0], [316.0, 647.0, 333.0], [654.0, 1338.0, 688.0]]]],
gw: xm::DFloat[[[[676.0, 1304.0, 624.0], [476.0, 916.0, 436.0], [572.0, 1096.0, 520.0]]], [[[988.0, 1928.0, 936.0], [716.0, 1396.0, 676.0], [884.0, 1720.0, 832.0]]]],
gb: xm::DFloat[102.0, 174.0]
}
}
})
Expand Down

0 comments on commit 5fe88b4

Please sign in to comment.