@@ -239,10 +239,11 @@ class Atomic : AllStatic {
239
239
// bytes and (if different) pointer size bytes are required. The
240
240
// class must be default constructable, with these requirements:
241
241
//
242
- // - dest is of type D*, an integral or pointer type.
242
+ // - dest is of type D*, where D is an integral or pointer type.
243
243
// - add_value is of type I, an integral type.
244
244
// - sizeof(I) == sizeof(D).
245
245
// - if D is an integral type, I == D.
246
+ // - if D is a pointer type P*, sizeof(P) == 1.
246
247
// - order is of type atomic_memory_order.
247
248
// - platform_add is an object of type PlatformAdd<sizeof(D)>.
248
249
//
@@ -257,9 +258,17 @@ class Atomic : AllStatic {
257
258
// fetch_and_add atomically adds add_value to the value of dest,
258
259
// returning the old value.
259
260
//
260
- // When D is a pointer type P*, both add_and_fetch and fetch_and_add
261
- // treat it as if it were an uintptr_t; they do not perform any
262
- // scaling of add_value, as that has already been done by the caller.
261
+ // When the destination type D of the Atomic operation is a pointer type P*,
262
+ // the addition must scale the add_value by sizeof(P) to add that many bytes
263
+ // to the destination value. Rather than requiring each platform deal with
264
+ // this, the shared part of the implementation performs some adjustments
265
+ // before and after calling the platform operation. It ensures the pointee
266
+ // type of the destination value passed to the platform operation has size
267
+ // 1, casting if needed. It also scales add_value by sizeof(P). The result
268
+ // of the platform operation is cast back to P*. This means the platform
269
+ // operation does not need to account for the scaling. It also makes it
270
+ // easy for the platform to implement one of add_and_fetch or fetch_and_add
271
+ // in terms of the other (which is a common approach).
263
272
//
264
273
// No definition is provided; all platforms must explicitly define
265
274
// this class and any needed specializations.
@@ -689,21 +698,43 @@ struct Atomic::AddImpl<
689
698
{
690
699
STATIC_ASSERT (sizeof (intptr_t ) == sizeof (P*));
691
700
STATIC_ASSERT (sizeof (uintptr_t ) == sizeof (P*));
692
- typedef typename Conditional<IsSigned<I>::value,
693
- intptr_t ,
694
- uintptr_t >::type CI;
695
701
696
- static CI scale_addend (CI add_value) {
697
- return add_value * sizeof (P);
702
+ // Type of the scaled addend. An integral type of the same size as a
703
+ // pointer, and the same signedness as I.
704
+ using SI = typename Conditional<IsSigned<I>::value, intptr_t , uintptr_t >::type;
705
+
706
+ // Type of the unscaled destination. A pointer type with pointee size == 1.
707
+ using UP = const char *;
708
+
709
+ // Scale add_value by the size of the pointee.
710
+ static SI scale_addend (SI add_value) {
711
+ return add_value * SI (sizeof (P));
712
+ }
713
+
714
+ // Casting between P* and UP* here intentionally uses C-style casts,
715
+ // because reinterpret_cast can't cast away cv qualifiers. Using copy_cv
716
+ // would be an alternative if it existed.
717
+
718
+ // Unscale dest to a char* pointee for consistency with scaled addend.
719
+ static UP volatile * unscale_dest (P* volatile * dest) {
720
+ return (UP volatile *) dest;
721
+ }
722
+
723
+ // Convert the unscaled char* result to a P*.
724
+ static P* scale_result (UP result) {
725
+ return (P*) result;
698
726
}
699
727
700
- static P* add_and_fetch (P* volatile * dest, I add_value, atomic_memory_order order) {
701
- CI addend = add_value;
702
- return PlatformAdd<sizeof (P*)>().add_and_fetch (dest, scale_addend (addend), order);
728
+ static P* add_and_fetch (P* volatile * dest, I addend, atomic_memory_order order) {
729
+ return scale_result (PlatformAdd<sizeof (P*)>().add_and_fetch (unscale_dest (dest),
730
+ scale_addend (addend),
731
+ order));
703
732
}
704
- static P* fetch_and_add (P* volatile * dest, I add_value, atomic_memory_order order) {
705
- CI addend = add_value;
706
- return PlatformAdd<sizeof (P*)>().fetch_and_add (dest, scale_addend (addend), order);
733
+
734
+ static P* fetch_and_add (P* volatile * dest, I addend, atomic_memory_order order) {
735
+ return scale_result (PlatformAdd<sizeof (P*)>().fetch_and_add (unscale_dest (dest),
736
+ scale_addend (addend),
737
+ order));
707
738
}
708
739
};
709
740
0 commit comments