Skip to content

Commit

Permalink
Merge pull request #68 from JohanMabille/xtensor_0160
Browse files Browse the repository at this point in the history
Upgraded to xtensor 0.16.0
  • Loading branch information
JohanMabille committed May 16, 2018
2 parents c1ce74e + 26840b4 commit ff749ec
Show file tree
Hide file tree
Showing 6 changed files with 110 additions and 110 deletions.
2 changes: 1 addition & 1 deletion .appveyor.yml
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ install:
- conda config --set always_yes yes --set changeps1 no
- conda update -q conda
- conda info -a
- conda install xtensor=0.15.4 -c conda-forge
- conda install xtensor=0.16.0 -c conda-forge
- conda install gtest cmake -c conda-forge
- conda install m2w64-openblas -c msys2
# Patch OpenBLASConfig.cmake
Expand Down
2 changes: 1 addition & 1 deletion .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -124,7 +124,7 @@ install:
- conda info -a
- conda install gtest cmake -c conda-forge
# Install xtensor and BLAS
- conda install xtensor=0.15.4 -c conda-forge
- conda install xtensor=0.16.0 -c conda-forge
- if [[ "$BLAS" == "OpenBLAS" ]]; then
conda install openblas -c conda-forge;
elif [[ "$BLAS" == "mkl" ]]; then
Expand Down
32 changes: 16 additions & 16 deletions include/xtensor-blas/xblas.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ namespace blas

cxxblas::asum<blas_index_t>(
static_cast<blas_index_t>(ad.shape()[0]),
ad.raw_data() + ad.raw_data_offset(),
ad.data() + ad.data_offset(),
static_cast<blas_index_t>(ad.strides().front()),
result
);
Expand All @@ -61,7 +61,7 @@ namespace blas

cxxblas::nrm2<blas_index_t>(
static_cast<blas_index_t>(ad.shape()[0]),
ad.raw_data() + ad.raw_data_offset(),
ad.data() + ad.data_offset(),
static_cast<blas_index_t>(ad.strides().front()),
result
);
Expand All @@ -85,9 +85,9 @@ namespace blas

cxxblas::dot<blas_index_t>(
static_cast<blas_index_t>(ad.shape()[0]),
ad.raw_data() + ad.raw_data_offset(),
ad.data() + ad.data_offset(),
static_cast<blas_index_t>(ad.strides().front()),
bd.raw_data() + bd.raw_data_offset(),
bd.data() + bd.data_offset(),
static_cast<blas_index_t>(bd.strides().front()),
result
);
Expand All @@ -110,9 +110,9 @@ namespace blas

cxxblas::dotu<blas_index_t>(
static_cast<blas_index_t>(ad.shape()[0]),
ad.raw_data() + ad.raw_data_offset(),
ad.data() + ad.data_offset(),
static_cast<blas_index_t>(ad.strides().front()),
bd.raw_data() + bd.raw_data_offset(),
bd.data() + bd.data_offset(),
static_cast<blas_index_t>(bd.strides().front()),
result
);
Expand Down Expand Up @@ -144,12 +144,12 @@ namespace blas
static_cast<blas_index_t>(dA.shape()[0]),
static_cast<blas_index_t>(dA.shape()[1]),
alpha,
dA.raw_data() + dA.raw_data_offset(),
dA.data() + dA.data_offset(),
get_leading_stride(dA),
dx.raw_data() + dx.raw_data_offset(),
dx.data() + dx.data_offset(),
get_leading_stride(dx),
beta,
result.raw_data() + result.raw_data_offset(),
result.data() + result.data_offset(),
get_leading_stride(result)
);
}
Expand Down Expand Up @@ -190,12 +190,12 @@ namespace blas
static_cast<blas_index_t>(transpose_B ? dB.shape()[0] : dB.shape()[1]),
static_cast<blas_index_t>(transpose_B ? dB.shape()[1] : dB.shape()[0]),
alpha,
dA.raw_data() + dA.raw_data_offset(),
dA.data() + dA.data_offset(),
get_leading_stride(dA),
dB.raw_data() + dB.raw_data_offset(),
dB.data() + dB.data_offset(),
get_leading_stride(dB),
beta,
result.raw_data() + result.raw_data_offset(),
result.data() + result.data_offset(),
get_leading_stride(result)
);
}
Expand Down Expand Up @@ -225,14 +225,14 @@ namespace blas
static_cast<blas_index_t>(dx.shape()[0]),
static_cast<blas_index_t>(dy.shape()[0]),
alpha,
dx.raw_data() + dx.raw_data_offset(),
dx.data() + dx.data_offset(),
static_cast<blas_index_t>(dx.strides().front()),
dy.raw_data() + dy.raw_data_offset(),
dy.data() + dy.data_offset(),
static_cast<blas_index_t>(dy.strides().front()),
result.raw_data() + result.raw_data_offset(),
result.data() + result.data_offset(),
static_cast<blas_index_t>(result.strides().front())
);
}
}
}
#endif
#endif
8 changes: 4 additions & 4 deletions include/xtensor-blas/xblas_utils.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ namespace xt
{
template <layout_type L = layout_type::row_major, class T>
inline auto view_eval(T&& t)
-> std::enable_if_t<has_raw_data_interface<std::decay_t<T>>::value && std::decay_t<T>::static_layout == L, T&&>
-> std::enable_if_t<has_data_interface<std::decay_t<T>>::value && std::decay_t<T>::static_layout == L, T&&>
{
return std::forward<T>(t);
}
Expand All @@ -37,13 +37,13 @@ namespace xt
{
constexpr layout_type layout_remove_any(const layout_type layout)
{
return layout == layout_type::any ? DEFAULT_LAYOUT : layout;
return layout == layout_type::any ? XTENSOR_DEFAULT_LAYOUT : layout;
}
}

template <layout_type L = layout_type::row_major, class T, class I = std::decay_t<T>>
inline auto view_eval(T&& t)
-> std::enable_if_t<(!has_raw_data_interface<std::decay_t<T>>::value || I::static_layout != L)
-> std::enable_if_t<(!has_data_interface<std::decay_t<T>>::value || I::static_layout != L)
&& detail::is_array<typename I::shape_type>::value,
xtensor<typename I::value_type,
std::tuple_size<typename I::shape_type>::value,
Expand All @@ -54,7 +54,7 @@ namespace xt

template <layout_type L = layout_type::row_major, class T, class I = std::decay_t<T>>
inline auto view_eval(T&& t)
-> std::enable_if_t<(!has_raw_data_interface<std::decay_t<T>>::value || I::static_layout != L) &&
-> std::enable_if_t<(!has_data_interface<std::decay_t<T>>::value || I::static_layout != L) &&
!detail::is_array<typename I::shape_type>::value,
xarray<typename I::value_type, detail::layout_remove_any(L)>>
{
Expand Down

0 comments on commit ff749ec

Please sign in to comment.