forked from pytorch/pytorch
-
Notifications
You must be signed in to change notification settings - Fork 0
/
test_type_info.py
55 lines (45 loc) · 2 KB
/
test_type_info.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
from common_utils import TestCase, run_tests, TEST_NUMPY, load_tests
# load_tests from common_utils is used to automatically filter tests for
# sharding on sandcastle. This line silences flake warnings
load_tests = load_tests
import torch
import unittest
if TEST_NUMPY:
import numpy as np
class TestDTypeInfo(TestCase):
def test_invalid_input(self):
for dtype in [torch.float32, torch.float64]:
with self.assertRaises(TypeError):
_ = torch.iinfo(dtype)
for dtype in [torch.int64, torch.int32, torch.int16, torch.uint8]:
with self.assertRaises(TypeError):
_ = torch.finfo(dtype)
@unittest.skipIf(not TEST_NUMPY, "Numpy not found")
def test_iinfo(self):
for dtype in [torch.int64, torch.int32, torch.int16, torch.uint8]:
x = torch.zeros((2, 2), dtype=dtype)
xinfo = torch.iinfo(x.dtype)
xn = x.cpu().numpy()
xninfo = np.iinfo(xn.dtype)
self.assertEqual(xinfo.bits, xninfo.bits)
self.assertEqual(xinfo.max, xninfo.max)
self.assertEqual(xinfo.min, xninfo.min)
@unittest.skipIf(not TEST_NUMPY, "Numpy not found")
def test_finfo(self):
initial_default_type = torch.get_default_dtype()
for dtype in [torch.float32, torch.float64]:
x = torch.zeros((2, 2), dtype=dtype)
xinfo = torch.finfo(x.dtype)
xn = x.cpu().numpy()
xninfo = np.finfo(xn.dtype)
self.assertEqual(xinfo.bits, xninfo.bits)
self.assertEqual(xinfo.max, xninfo.max)
self.assertEqual(xinfo.min, xninfo.min)
self.assertEqual(xinfo.eps, xninfo.eps)
self.assertEqual(xinfo.tiny, xninfo.tiny)
torch.set_default_dtype(dtype)
self.assertEqual(torch.finfo(dtype), torch.finfo())
# Restore the default type to ensure that the test has no side effect
torch.set_default_dtype(initial_default_type)
if __name__ == '__main__':
run_tests()