From b158a6aa39a519fc371ad6e6bd8c2e3e41b3a189 Mon Sep 17 00:00:00 2001 From: Alex Guteniev Date: Wed, 25 Mar 2026 08:32:22 +0200 Subject: [PATCH] Use _WIN64 test where applicable --- stl/inc/__msvc_bit_utils.hpp | 48 ++++++------- stl/inc/atomic | 133 +++++++++++++++++------------------ 2 files changed, 90 insertions(+), 91 deletions(-) diff --git a/stl/inc/__msvc_bit_utils.hpp b/stl/inc/__msvc_bit_utils.hpp index df65c62107..5b4ae34d97 100644 --- a/stl/inc/__msvc_bit_utils.hpp +++ b/stl/inc/__msvc_bit_utils.hpp @@ -68,7 +68,9 @@ _NODISCARD int _Countl_zero_lzcnt(const _Ty _Val) noexcept { } else if constexpr (_Digits == 32) { return static_cast(__lzcnt(_Val)); } else { -#ifdef _M_IX86 +#ifdef _WIN64 + return static_cast(__lzcnt64(_Val)); +#else // ^^^ 64-bit / 32-bit vvv const unsigned int _High = _Val >> 32; const auto _Low = static_cast(_Val); if (_High == 0) { @@ -76,9 +78,7 @@ _NODISCARD int _Countl_zero_lzcnt(const _Ty _Val) noexcept { } else { return _Countl_zero_lzcnt(_High); } -#else // ^^^ defined(_M_IX86) / !defined(_M_IX86) vvv - return static_cast(__lzcnt64(_Val)); -#endif // ^^^ !defined(_M_IX86) ^^^ +#endif // ^^^ 32-bit ^^^ } } @@ -92,7 +92,11 @@ _NODISCARD int _Countl_zero_bsr(const _Ty _Val) noexcept { return _Digits; } } else { -#ifdef _M_IX86 +#ifdef _WIN64 + if (!_BitScanReverse64(&_Result, _Val)) { + return _Digits; + } +#else // ^^^ 64-bit / 32-bit vvv const unsigned int _High = _Val >> 32; if (_BitScanReverse(&_Result, _High)) { return static_cast(31 - _Result); @@ -102,11 +106,7 @@ _NODISCARD int _Countl_zero_bsr(const _Ty _Val) noexcept { if (!_BitScanReverse(&_Result, _Low)) { return _Digits; } -#else // ^^^ defined(_M_IX86) / !defined(_M_IX86) vvv - if (!_BitScanReverse64(&_Result, _Val)) { - return _Digits; - } -#endif // ^^^ !defined(_M_IX86) ^^^ +#endif // ^^^ 32-bit ^^^ } return static_cast(_Digits - 1 - _Result); } @@ -202,7 +202,9 @@ _NODISCARD int _Countr_zero_tzcnt(const _Ty _Val) noexcept { // of the wider type. return static_cast(_TZCNT_U32(static_cast(~_Max | _Val))); } else { -#ifdef _M_IX86 +#ifdef _WIN64 + return static_cast(_TZCNT_U64(_Val)); +#else // ^^^ 64-bit / 32-bit vvv const auto _Low = static_cast(_Val); if (_Low == 0) { const unsigned int _High = _Val >> 32; @@ -210,9 +212,7 @@ _NODISCARD int _Countr_zero_tzcnt(const _Ty _Val) noexcept { } else { return static_cast(_TZCNT_U32(_Low)); } -#else // ^^^ defined(_M_IX86) / !defined(_M_IX86) vvv - return static_cast(_TZCNT_U64(_Val)); -#endif // ^^^ !defined(_M_IX86) ^^^ +#endif // ^^^ 32-bit ^^^ } } @@ -233,7 +233,11 @@ _NODISCARD int _Countr_zero_bsf(const _Ty _Val) noexcept { return _Digits; } } else { -#ifdef _M_IX86 +#ifdef _WIN64 + if (!_BitScanForward64(&_Result, _Val)) { + return _Digits; + } +#else // ^^^ 64-bit / 32-bit vvv const auto _Low = static_cast(_Val); if (_BitScanForward(&_Result, _Low)) { return static_cast(_Result); @@ -245,11 +249,7 @@ _NODISCARD int _Countr_zero_bsf(const _Ty _Val) noexcept { } else { return static_cast(_Result + 32); } -#else // ^^^ defined(_M_IX86) / !defined(_M_IX86) vvv - if (!_BitScanForward64(&_Result, _Val)) { - return _Digits; - } -#endif // ^^^ !defined(_M_IX86) ^^^ +#endif // ^^^ 32-bit ^^^ } return static_cast(_Result); } @@ -288,11 +288,11 @@ _NODISCARD int _Unchecked_popcount(const _Ty _Val) noexcept { } else if constexpr (_Digits == 32) { return static_cast(__popcnt(_Val)); } else { -#ifdef _M_IX86 - return static_cast(__popcnt(_Val >> 32) + __popcnt(static_cast(_Val))); -#else // ^^^ defined(_M_IX86) / !defined(_M_IX86) vvv +#ifdef _WIN64 return static_cast(__popcnt64(_Val)); -#endif // ^^^ !defined(_M_IX86) ^^^ +#else // ^^^ 64-bit / 32-bit vvv + return static_cast(__popcnt(_Val >> 32) + __popcnt(static_cast(_Val))); +#endif // ^^^ 32-bit ^^^ } } diff --git a/stl/inc/atomic b/stl/inc/atomic index 5c5bc596f5..20f3a76236 100644 --- a/stl/inc/atomic +++ b/stl/inc/atomic @@ -171,11 +171,11 @@ extern "C" inline void _Check_memory_order(const unsigned int _Order) noexcept { #elif defined(_M_IX86) || defined(_M_X64) // ^^^ ARM64/ARM64EC/HYBRID_X86_ARM64 / x86/x64 vvv #define _ATOMIC_STORE_SEQ_CST(_Width, _Ptr, _Desired) _ATOMIC_STORE_SEQ_CST_X86_X64(_Width, (_Ptr), (_Desired)) #define _ATOMIC_STORE_32_SEQ_CST(_Ptr, _Desired) _ATOMIC_STORE_32_SEQ_CST_X86_X64((_Ptr), (_Desired)) -#ifdef _M_IX86 -#define _ATOMIC_STORE_64_SEQ_CST(_Ptr, _Desired) _ATOMIC_STORE_64_SEQ_CST_IX86((_Ptr), (_Desired)) -#else // ^^^ x86 / x64 vvv +#ifdef _WIN64 #define _ATOMIC_STORE_64_SEQ_CST(_Ptr, _Desired) _ATOMIC_STORE_SEQ_CST_X86_X64(64, (_Ptr), (_Desired)) -#endif // ^^^ x64 ^^^ +#else // ^^^ 64-bit / 32-bit vvv +#define _ATOMIC_STORE_64_SEQ_CST(_Ptr, _Desired) _ATOMIC_STORE_64_SEQ_CST_IX86((_Ptr), (_Desired)) +#endif // ^^^ 32-bit ^^^ #else // ^^^ x86/x64 / unknown architecture vvv #error Unknown architecture #endif // ^^^ unknown architecture ^^^ @@ -1042,7 +1042,15 @@ struct _Atomic_storage<_Ty, 8> { // lock-free using 8-byte intrinsics return reinterpret_cast<_TVal&>(_As_bytes); } -#ifdef _M_IX86 +#ifdef _WIN64 + _TVal exchange(const _TVal _Value, const memory_order _Order = memory_order_seq_cst) noexcept { + // exchange with given memory order + long long _As_bytes; + _ATOMIC_CHOOSE_INTRINSIC(static_cast(_Order), _As_bytes, _InterlockedExchange64, + _STD _Atomic_address_as(_Storage), _STD _Bit_cast(_Value)); + return reinterpret_cast<_TVal&>(_As_bytes); + } +#else // ^^^ 64-bit / 32-bit vvv _TVal exchange(const _TVal _Value, const memory_order _Order = memory_order_seq_cst) noexcept { // exchange with (effectively) sequential consistency _TVal _Temp{load()}; @@ -1051,15 +1059,7 @@ struct _Atomic_storage<_Ty, 8> { // lock-free using 8-byte intrinsics return _Temp; } -#else // ^^^ defined(_M_IX86) / !defined(_M_IX86) vvv - _TVal exchange(const _TVal _Value, const memory_order _Order = memory_order_seq_cst) noexcept { - // exchange with given memory order - long long _As_bytes; - _ATOMIC_CHOOSE_INTRINSIC(static_cast(_Order), _As_bytes, _InterlockedExchange64, - _STD _Atomic_address_as(_Storage), _STD _Bit_cast(_Value)); - return reinterpret_cast<_TVal&>(_As_bytes); - } -#endif // ^^^ !defined(_M_IX86) ^^^ +#endif // ^^^ 32-bit ^^^ bool compare_exchange_strong(_TVal& _Expected, const _TVal _Desired, const memory_order _Order = memory_order_seq_cst) noexcept { // CAS with given memory order @@ -1481,7 +1481,57 @@ struct _Atomic_integral<_Ty, 8> : _Atomic_storage<_Ty> { // atomic integral oper using _Base::_Base; -#ifdef _M_IX86 +#ifdef _WIN64 + _TVal fetch_add(const _TVal _Operand, const memory_order _Order = memory_order_seq_cst) noexcept { + long long _Result; + _ATOMIC_CHOOSE_INTRINSIC(static_cast(_Order), _Result, _InterlockedExchangeAdd64, + _STD _Atomic_address_as(this->_Storage), static_cast(_Operand)); + return static_cast<_TVal>(_Result); + } + + _TVal fetch_and(const _TVal _Operand, const memory_order _Order = memory_order_seq_cst) noexcept { + long long _Result; + _ATOMIC_CHOOSE_INTRINSIC(static_cast(_Order), _Result, _InterlockedAnd64, + _STD _Atomic_address_as(this->_Storage), static_cast(_Operand)); + return static_cast<_TVal>(_Result); + } + + _TVal fetch_or(const _TVal _Operand, const memory_order _Order = memory_order_seq_cst) noexcept { + long long _Result; + _ATOMIC_CHOOSE_INTRINSIC(static_cast(_Order), _Result, _InterlockedOr64, + _STD _Atomic_address_as(this->_Storage), static_cast(_Operand)); + return static_cast<_TVal>(_Result); + } + + _TVal fetch_xor(const _TVal _Operand, const memory_order _Order = memory_order_seq_cst) noexcept { + long long _Result; + _ATOMIC_CHOOSE_INTRINSIC(static_cast(_Order), _Result, _InterlockedXor64, + _STD _Atomic_address_as(this->_Storage), static_cast(_Operand)); + return static_cast<_TVal>(_Result); + } + + _TVal operator++(int) noexcept { + unsigned long long _After = static_cast( + _InterlockedIncrement64(_STD _Atomic_address_as(this->_Storage))); + --_After; + return static_cast<_TVal>(_After); + } + + _TVal operator++() noexcept { + return static_cast<_TVal>(_InterlockedIncrement64(_STD _Atomic_address_as(this->_Storage))); + } + + _TVal operator--(int) noexcept { + unsigned long long _After = static_cast( + _InterlockedDecrement64(_STD _Atomic_address_as(this->_Storage))); + ++_After; + return static_cast<_TVal>(_After); + } + + _TVal operator--() noexcept { + return static_cast<_TVal>(_InterlockedDecrement64(_STD _Atomic_address_as(this->_Storage))); + } +#else // ^^^ 64-bit / 32-bit vvv _TVal fetch_add(const _TVal _Operand, const memory_order _Order = memory_order_seq_cst) noexcept { // effectively sequential consistency _TVal _Temp{this->load()}; @@ -1533,58 +1583,7 @@ struct _Atomic_integral<_Ty, 8> : _Atomic_storage<_Ty> { // atomic integral oper _TVal operator--() noexcept { return fetch_add(static_cast<_TVal>(-1)) - static_cast<_TVal>(1); } - -#else // ^^^ defined(_M_IX86) / !defined(_M_IX86) vvv - _TVal fetch_add(const _TVal _Operand, const memory_order _Order = memory_order_seq_cst) noexcept { - long long _Result; - _ATOMIC_CHOOSE_INTRINSIC(static_cast(_Order), _Result, _InterlockedExchangeAdd64, - _STD _Atomic_address_as(this->_Storage), static_cast(_Operand)); - return static_cast<_TVal>(_Result); - } - - _TVal fetch_and(const _TVal _Operand, const memory_order _Order = memory_order_seq_cst) noexcept { - long long _Result; - _ATOMIC_CHOOSE_INTRINSIC(static_cast(_Order), _Result, _InterlockedAnd64, - _STD _Atomic_address_as(this->_Storage), static_cast(_Operand)); - return static_cast<_TVal>(_Result); - } - - _TVal fetch_or(const _TVal _Operand, const memory_order _Order = memory_order_seq_cst) noexcept { - long long _Result; - _ATOMIC_CHOOSE_INTRINSIC(static_cast(_Order), _Result, _InterlockedOr64, - _STD _Atomic_address_as(this->_Storage), static_cast(_Operand)); - return static_cast<_TVal>(_Result); - } - - _TVal fetch_xor(const _TVal _Operand, const memory_order _Order = memory_order_seq_cst) noexcept { - long long _Result; - _ATOMIC_CHOOSE_INTRINSIC(static_cast(_Order), _Result, _InterlockedXor64, - _STD _Atomic_address_as(this->_Storage), static_cast(_Operand)); - return static_cast<_TVal>(_Result); - } - - _TVal operator++(int) noexcept { - unsigned long long _After = static_cast( - _InterlockedIncrement64(_STD _Atomic_address_as(this->_Storage))); - --_After; - return static_cast<_TVal>(_After); - } - - _TVal operator++() noexcept { - return static_cast<_TVal>(_InterlockedIncrement64(_STD _Atomic_address_as(this->_Storage))); - } - - _TVal operator--(int) noexcept { - unsigned long long _After = static_cast( - _InterlockedDecrement64(_STD _Atomic_address_as(this->_Storage))); - ++_After; - return static_cast<_TVal>(_After); - } - - _TVal operator--() noexcept { - return static_cast<_TVal>(_InterlockedDecrement64(_STD _Atomic_address_as(this->_Storage))); - } -#endif // ^^^ !defined(_M_IX86) ^^^ +#endif // ^^^ 32-bit ^^^ }; #if 1 // TRANSITION, ABI