forked from xtensor-stack/xtensor-python
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathtest_pyarray_traits.cpp
166 lines (140 loc) · 6.1 KB
/
test_pyarray_traits.cpp
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
/***************************************************************************
* Copyright (c) Wolf Vollprecht, Johan Mabille and Sylvain Corlay *
* Copyright (c) QuantStack *
* *
* Distributed under the terms of the BSD 3-Clause License. *
* *
* The full license is in the file LICENSE, distributed with this software. *
****************************************************************************/
#include "gtest/gtest.h"
#include "xtensor-python/pyarray.hpp"
namespace xt
{
namespace testing
{
class pyarray_traits: public ::testing::Test
{
protected:
using dynamic_type = xt::pyarray<double>;
using row_major_type = xt::pyarray<double, xt::layout_type::row_major>;
using column_major_type = xt::pyarray<double, xt::layout_type::column_major>;
dynamic_type d1 = {{0., 1.}, {0., 10.}, {0., 100.}};
dynamic_type d2 = {{0., 2.}, {0., 20.}, {0., 200.}};
row_major_type r1 = {{0., 1.}, {0., 10.}, {0., 100.}};
row_major_type r2 = {{0., 2.}, {0., 20.}, {0., 200.}};
column_major_type c1 = {{0., 1.}, {0., 10.}, {0., 100.}};
column_major_type c2 = {{0., 2.}, {0., 20.}, {0., 200.}};
template <class T>
bool test_has_strides(T const&)
{
return xt::has_strides<T>::value;
}
template <class T>
xt::layout_type test_result_layout(T const& a1, T const& a2)
{
auto tmp1 = pow(sin((a2 - a1) / 2.), 2.);
auto tmp2 = cos(a1);
return (tmp1 + tmp2).layout();
}
template <class T>
bool test_linear_assign(T const& a1, T const& a2)
{
auto tmp1 = pow(sin((a2 - a1) / 2.), 2.);
auto tmp2 = cos(a1);
T res = tmp1 + tmp2;
return xt::xassign_traits<T, decltype(tmp1 + tmp2)>::linear_assign(res, tmp1 + tmp2, true);
}
template <class T>
bool test_static_simd_linear_assign(T const& a1, T const& a2)
{
auto tmp1 = pow(sin((a2 - a1) / 2.), 2.);
auto tmp2 = cos(a1);
return xt::xassign_traits<T, decltype(tmp2)>::simd_linear_assign();
}
template <class T>
bool test_dynamic_simd_linear_assign(T const& a1, T const& a2)
{
auto tmp1 = pow(sin((a2 - a1) / 2.), 2.);
auto tmp2 = cos(a1);
return xt::xassign_traits<T, decltype(tmp2)>::simd_linear_assign(a1, tmp2);
}
template <class T>
bool test_linear_static_layout(T const& a1, T const& a2)
{
auto tmp1 = pow(sin((a2 - a1) / 2.), 2.);
auto tmp2 = cos(a1);
return xt::detail::linear_static_layout<decltype(tmp1), decltype(tmp2)>();
}
template <class T>
bool test_contiguous_layout(T const& a1, T const& a2)
{
auto tmp1 = pow(sin((a2 - a1) / 2.), 2.);
auto tmp2 = cos(a1);
return decltype(tmp1)::contiguous_layout && decltype(tmp2)::contiguous_layout;
}
};
TEST_F(pyarray_traits, result_layout)
{
EXPECT_TRUE(d1.layout() == layout_type::row_major);
EXPECT_TRUE(test_result_layout(d1, d2) == layout_type::row_major);
EXPECT_TRUE(r1.layout() == layout_type::row_major);
EXPECT_TRUE(test_result_layout(r1, r2) == layout_type::row_major);
EXPECT_TRUE(c1.layout() == layout_type::column_major);
EXPECT_TRUE(test_result_layout(c1, c2) == layout_type::column_major);
}
TEST_F(pyarray_traits, has_strides)
{
EXPECT_TRUE(test_has_strides(d1));
EXPECT_TRUE(test_has_strides(r1));
EXPECT_TRUE(test_has_strides(c1));
}
TEST_F(pyarray_traits, has_linear_assign)
{
EXPECT_TRUE(d2.has_linear_assign(d1.strides()));
EXPECT_TRUE(r2.has_linear_assign(r1.strides()));
EXPECT_TRUE(c2.has_linear_assign(c1.strides()));
}
TEST_F(pyarray_traits, linear_assign)
{
EXPECT_TRUE(test_linear_assign(d1, d2));
EXPECT_TRUE(test_linear_assign(r1, r2));
EXPECT_TRUE(test_linear_assign(c1, c2));
}
TEST_F(pyarray_traits, static_simd_linear_assign)
{
#ifdef XTENSOR_USE_XSIMD
EXPECT_FALSE(test_static_simd_linear_assign(d1, d2));
EXPECT_TRUE(test_static_simd_linear_assign(r1, r2));
EXPECT_TRUE(test_static_simd_linear_assign(c1, c2));
#else
EXPECT_FALSE(test_static_simd_linear_assign(d1, d2));
EXPECT_FALSE(test_static_simd_linear_assign(r1, r2));
EXPECT_FALSE(test_static_simd_linear_assign(c1, c2));
#endif
}
TEST_F(pyarray_traits, dynamic_simd_linear_assign)
{
#ifdef XTENSOR_USE_XSIMD
EXPECT_TRUE(test_dynamic_simd_linear_assign(d1, d2));
EXPECT_TRUE(test_dynamic_simd_linear_assign(r1, r2));
EXPECT_TRUE(test_dynamic_simd_linear_assign(c1, c2));
#else
EXPECT_FALSE(test_dynamic_simd_linear_assign(d1, d2));
EXPECT_FALSE(test_dynamic_simd_linear_assign(r1, r2));
EXPECT_FALSE(test_dynamic_simd_linear_assign(c1, c2));
#endif
}
TEST_F(pyarray_traits, linear_static_layout)
{
EXPECT_FALSE(test_linear_static_layout(d1, d2));
EXPECT_TRUE(test_linear_static_layout(r1, r2));
EXPECT_TRUE(test_linear_static_layout(c1, c2));
}
TEST_F(pyarray_traits, contiguous_layout)
{
EXPECT_FALSE(test_contiguous_layout(d1, d2));
EXPECT_TRUE(test_contiguous_layout(r1, r2));
EXPECT_TRUE(test_contiguous_layout(c1, c2));
}
}
}