Skip to content

Commit c0df108

Browse files
mrutland-armIngo Molnar
authored andcommitted
arm64, locking/atomics: Use instrumented atomics
Now that the generic atomic headers provide instrumented wrappers of all the atomics implemented by arm64, let's migrate arm64 over to these. The additional instrumentation will help to find bugs (e.g. when fuzzing with Syzkaller). Mostly this change involves adding an arch_ prefix to a number of function names and macro definitions. When LSE atomics are used, the out-of-line LL/SC atomics will be named __ll_sc_arch_atomic_${OP}. Adding the arch_ prefix requires some whitespace fixups to keep things aligned. Some other unusual whitespace is fixed up at the same time (e.g. in the cmpxchg wrappers). Signed-off-by: Mark Rutland <mark.rutland@arm.com> Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org> Acked-by: Will Deacon <will.deacon@arm.com> Cc: linux-arm-kernel@lists.infradead.org Cc: Catalin Marinas <catalin.marinas@arm.com> Cc: linuxdrivers@attotech.com Cc: dvyukov@google.com Cc: boqun.feng@gmail.com Cc: arnd@arndb.de Cc: aryabinin@virtuozzo.com Cc: glider@google.com Link: http://lkml.kernel.org/r/20180904104830.2975-7-mark.rutland@arm.com Signed-off-by: Ingo Molnar <mingo@kernel.org>
1 parent 8d32588 commit c0df108

File tree

5 files changed

+193
-186
lines changed

5 files changed

+193
-186
lines changed

arch/arm64/include/asm/atomic.h

Lines changed: 122 additions & 115 deletions
Original file line numberDiff line numberDiff line change
@@ -42,124 +42,131 @@
4242

4343
#define ATOMIC_INIT(i) { (i) }
4444

45-
#define atomic_read(v) READ_ONCE((v)->counter)
46-
#define atomic_set(v, i) WRITE_ONCE(((v)->counter), (i))
47-
48-
#define atomic_add_return_relaxed atomic_add_return_relaxed
49-
#define atomic_add_return_acquire atomic_add_return_acquire
50-
#define atomic_add_return_release atomic_add_return_release
51-
#define atomic_add_return atomic_add_return
52-
53-
#define atomic_sub_return_relaxed atomic_sub_return_relaxed
54-
#define atomic_sub_return_acquire atomic_sub_return_acquire
55-
#define atomic_sub_return_release atomic_sub_return_release
56-
#define atomic_sub_return atomic_sub_return
57-
58-
#define atomic_fetch_add_relaxed atomic_fetch_add_relaxed
59-
#define atomic_fetch_add_acquire atomic_fetch_add_acquire
60-
#define atomic_fetch_add_release atomic_fetch_add_release
61-
#define atomic_fetch_add atomic_fetch_add
62-
63-
#define atomic_fetch_sub_relaxed atomic_fetch_sub_relaxed
64-
#define atomic_fetch_sub_acquire atomic_fetch_sub_acquire
65-
#define atomic_fetch_sub_release atomic_fetch_sub_release
66-
#define atomic_fetch_sub atomic_fetch_sub
67-
68-
#define atomic_fetch_and_relaxed atomic_fetch_and_relaxed
69-
#define atomic_fetch_and_acquire atomic_fetch_and_acquire
70-
#define atomic_fetch_and_release atomic_fetch_and_release
71-
#define atomic_fetch_and atomic_fetch_and
72-
73-
#define atomic_fetch_andnot_relaxed atomic_fetch_andnot_relaxed
74-
#define atomic_fetch_andnot_acquire atomic_fetch_andnot_acquire
75-
#define atomic_fetch_andnot_release atomic_fetch_andnot_release
76-
#define atomic_fetch_andnot atomic_fetch_andnot
77-
78-
#define atomic_fetch_or_relaxed atomic_fetch_or_relaxed
79-
#define atomic_fetch_or_acquire atomic_fetch_or_acquire
80-
#define atomic_fetch_or_release atomic_fetch_or_release
81-
#define atomic_fetch_or atomic_fetch_or
82-
83-
#define atomic_fetch_xor_relaxed atomic_fetch_xor_relaxed
84-
#define atomic_fetch_xor_acquire atomic_fetch_xor_acquire
85-
#define atomic_fetch_xor_release atomic_fetch_xor_release
86-
#define atomic_fetch_xor atomic_fetch_xor
87-
88-
#define atomic_xchg_relaxed(v, new) xchg_relaxed(&((v)->counter), (new))
89-
#define atomic_xchg_acquire(v, new) xchg_acquire(&((v)->counter), (new))
90-
#define atomic_xchg_release(v, new) xchg_release(&((v)->counter), (new))
91-
#define atomic_xchg(v, new) xchg(&((v)->counter), (new))
92-
93-
#define atomic_cmpxchg_relaxed(v, old, new) \
94-
cmpxchg_relaxed(&((v)->counter), (old), (new))
95-
#define atomic_cmpxchg_acquire(v, old, new) \
96-
cmpxchg_acquire(&((v)->counter), (old), (new))
97-
#define atomic_cmpxchg_release(v, old, new) \
98-
cmpxchg_release(&((v)->counter), (old), (new))
99-
#define atomic_cmpxchg(v, old, new) cmpxchg(&((v)->counter), (old), (new))
100-
101-
#define atomic_andnot atomic_andnot
45+
#define arch_atomic_read(v) READ_ONCE((v)->counter)
46+
#define arch_atomic_set(v, i) WRITE_ONCE(((v)->counter), (i))
47+
48+
#define arch_atomic_add_return_relaxed arch_atomic_add_return_relaxed
49+
#define arch_atomic_add_return_acquire arch_atomic_add_return_acquire
50+
#define arch_atomic_add_return_release arch_atomic_add_return_release
51+
#define arch_atomic_add_return arch_atomic_add_return
52+
53+
#define arch_atomic_sub_return_relaxed arch_atomic_sub_return_relaxed
54+
#define arch_atomic_sub_return_acquire arch_atomic_sub_return_acquire
55+
#define arch_atomic_sub_return_release arch_atomic_sub_return_release
56+
#define arch_atomic_sub_return arch_atomic_sub_return
57+
58+
#define arch_atomic_fetch_add_relaxed arch_atomic_fetch_add_relaxed
59+
#define arch_atomic_fetch_add_acquire arch_atomic_fetch_add_acquire
60+
#define arch_atomic_fetch_add_release arch_atomic_fetch_add_release
61+
#define arch_atomic_fetch_add arch_atomic_fetch_add
62+
63+
#define arch_atomic_fetch_sub_relaxed arch_atomic_fetch_sub_relaxed
64+
#define arch_atomic_fetch_sub_acquire arch_atomic_fetch_sub_acquire
65+
#define arch_atomic_fetch_sub_release arch_atomic_fetch_sub_release
66+
#define arch_atomic_fetch_sub arch_atomic_fetch_sub
67+
68+
#define arch_atomic_fetch_and_relaxed arch_atomic_fetch_and_relaxed
69+
#define arch_atomic_fetch_and_acquire arch_atomic_fetch_and_acquire
70+
#define arch_atomic_fetch_and_release arch_atomic_fetch_and_release
71+
#define arch_atomic_fetch_and arch_atomic_fetch_and
72+
73+
#define arch_atomic_fetch_andnot_relaxed arch_atomic_fetch_andnot_relaxed
74+
#define arch_atomic_fetch_andnot_acquire arch_atomic_fetch_andnot_acquire
75+
#define arch_atomic_fetch_andnot_release arch_atomic_fetch_andnot_release
76+
#define arch_atomic_fetch_andnot arch_atomic_fetch_andnot
77+
78+
#define arch_atomic_fetch_or_relaxed arch_atomic_fetch_or_relaxed
79+
#define arch_atomic_fetch_or_acquire arch_atomic_fetch_or_acquire
80+
#define arch_atomic_fetch_or_release arch_atomic_fetch_or_release
81+
#define arch_atomic_fetch_or arch_atomic_fetch_or
82+
83+
#define arch_atomic_fetch_xor_relaxed arch_atomic_fetch_xor_relaxed
84+
#define arch_atomic_fetch_xor_acquire arch_atomic_fetch_xor_acquire
85+
#define arch_atomic_fetch_xor_release arch_atomic_fetch_xor_release
86+
#define arch_atomic_fetch_xor arch_atomic_fetch_xor
87+
88+
#define arch_atomic_xchg_relaxed(v, new) \
89+
arch_xchg_relaxed(&((v)->counter), (new))
90+
#define arch_atomic_xchg_acquire(v, new) \
91+
arch_xchg_acquire(&((v)->counter), (new))
92+
#define arch_atomic_xchg_release(v, new) \
93+
arch_xchg_release(&((v)->counter), (new))
94+
#define arch_atomic_xchg(v, new) \
95+
arch_xchg(&((v)->counter), (new))
96+
97+
#define arch_atomic_cmpxchg_relaxed(v, old, new) \
98+
arch_cmpxchg_relaxed(&((v)->counter), (old), (new))
99+
#define arch_atomic_cmpxchg_acquire(v, old, new) \
100+
arch_cmpxchg_acquire(&((v)->counter), (old), (new))
101+
#define arch_atomic_cmpxchg_release(v, old, new) \
102+
arch_cmpxchg_release(&((v)->counter), (old), (new))
103+
#define arch_atomic_cmpxchg(v, old, new) \
104+
arch_cmpxchg(&((v)->counter), (old), (new))
105+
106+
#define arch_atomic_andnot arch_atomic_andnot
102107

103108
/*
104-
* 64-bit atomic operations.
109+
* 64-bit arch_atomic operations.
105110
*/
106-
#define ATOMIC64_INIT ATOMIC_INIT
107-
#define atomic64_read atomic_read
108-
#define atomic64_set atomic_set
109-
110-
#define atomic64_add_return_relaxed atomic64_add_return_relaxed
111-
#define atomic64_add_return_acquire atomic64_add_return_acquire
112-
#define atomic64_add_return_release atomic64_add_return_release
113-
#define atomic64_add_return atomic64_add_return
114-
115-
#define atomic64_sub_return_relaxed atomic64_sub_return_relaxed
116-
#define atomic64_sub_return_acquire atomic64_sub_return_acquire
117-
#define atomic64_sub_return_release atomic64_sub_return_release
118-
#define atomic64_sub_return atomic64_sub_return
119-
120-
#define atomic64_fetch_add_relaxed atomic64_fetch_add_relaxed
121-
#define atomic64_fetch_add_acquire atomic64_fetch_add_acquire
122-
#define atomic64_fetch_add_release atomic64_fetch_add_release
123-
#define atomic64_fetch_add atomic64_fetch_add
124-
125-
#define atomic64_fetch_sub_relaxed atomic64_fetch_sub_relaxed
126-
#define atomic64_fetch_sub_acquire atomic64_fetch_sub_acquire
127-
#define atomic64_fetch_sub_release atomic64_fetch_sub_release
128-
#define atomic64_fetch_sub atomic64_fetch_sub
129-
130-
#define atomic64_fetch_and_relaxed atomic64_fetch_and_relaxed
131-
#define atomic64_fetch_and_acquire atomic64_fetch_and_acquire
132-
#define atomic64_fetch_and_release atomic64_fetch_and_release
133-
#define atomic64_fetch_and atomic64_fetch_and
134-
135-
#define atomic64_fetch_andnot_relaxed atomic64_fetch_andnot_relaxed
136-
#define atomic64_fetch_andnot_acquire atomic64_fetch_andnot_acquire
137-
#define atomic64_fetch_andnot_release atomic64_fetch_andnot_release
138-
#define atomic64_fetch_andnot atomic64_fetch_andnot
139-
140-
#define atomic64_fetch_or_relaxed atomic64_fetch_or_relaxed
141-
#define atomic64_fetch_or_acquire atomic64_fetch_or_acquire
142-
#define atomic64_fetch_or_release atomic64_fetch_or_release
143-
#define atomic64_fetch_or atomic64_fetch_or
144-
145-
#define atomic64_fetch_xor_relaxed atomic64_fetch_xor_relaxed
146-
#define atomic64_fetch_xor_acquire atomic64_fetch_xor_acquire
147-
#define atomic64_fetch_xor_release atomic64_fetch_xor_release
148-
#define atomic64_fetch_xor atomic64_fetch_xor
149-
150-
#define atomic64_xchg_relaxed atomic_xchg_relaxed
151-
#define atomic64_xchg_acquire atomic_xchg_acquire
152-
#define atomic64_xchg_release atomic_xchg_release
153-
#define atomic64_xchg atomic_xchg
154-
155-
#define atomic64_cmpxchg_relaxed atomic_cmpxchg_relaxed
156-
#define atomic64_cmpxchg_acquire atomic_cmpxchg_acquire
157-
#define atomic64_cmpxchg_release atomic_cmpxchg_release
158-
#define atomic64_cmpxchg atomic_cmpxchg
159-
160-
#define atomic64_andnot atomic64_andnot
161-
162-
#define atomic64_dec_if_positive atomic64_dec_if_positive
111+
#define ATOMIC64_INIT ATOMIC_INIT
112+
#define arch_atomic64_read arch_atomic_read
113+
#define arch_atomic64_set arch_atomic_set
114+
115+
#define arch_atomic64_add_return_relaxed arch_atomic64_add_return_relaxed
116+
#define arch_atomic64_add_return_acquire arch_atomic64_add_return_acquire
117+
#define arch_atomic64_add_return_release arch_atomic64_add_return_release
118+
#define arch_atomic64_add_return arch_atomic64_add_return
119+
120+
#define arch_atomic64_sub_return_relaxed arch_atomic64_sub_return_relaxed
121+
#define arch_atomic64_sub_return_acquire arch_atomic64_sub_return_acquire
122+
#define arch_atomic64_sub_return_release arch_atomic64_sub_return_release
123+
#define arch_atomic64_sub_return arch_atomic64_sub_return
124+
125+
#define arch_atomic64_fetch_add_relaxed arch_atomic64_fetch_add_relaxed
126+
#define arch_atomic64_fetch_add_acquire arch_atomic64_fetch_add_acquire
127+
#define arch_atomic64_fetch_add_release arch_atomic64_fetch_add_release
128+
#define arch_atomic64_fetch_add arch_atomic64_fetch_add
129+
130+
#define arch_atomic64_fetch_sub_relaxed arch_atomic64_fetch_sub_relaxed
131+
#define arch_atomic64_fetch_sub_acquire arch_atomic64_fetch_sub_acquire
132+
#define arch_atomic64_fetch_sub_release arch_atomic64_fetch_sub_release
133+
#define arch_atomic64_fetch_sub arch_atomic64_fetch_sub
134+
135+
#define arch_atomic64_fetch_and_relaxed arch_atomic64_fetch_and_relaxed
136+
#define arch_atomic64_fetch_and_acquire arch_atomic64_fetch_and_acquire
137+
#define arch_atomic64_fetch_and_release arch_atomic64_fetch_and_release
138+
#define arch_atomic64_fetch_and arch_atomic64_fetch_and
139+
140+
#define arch_atomic64_fetch_andnot_relaxed arch_atomic64_fetch_andnot_relaxed
141+
#define arch_atomic64_fetch_andnot_acquire arch_atomic64_fetch_andnot_acquire
142+
#define arch_atomic64_fetch_andnot_release arch_atomic64_fetch_andnot_release
143+
#define arch_atomic64_fetch_andnot arch_atomic64_fetch_andnot
144+
145+
#define arch_atomic64_fetch_or_relaxed arch_atomic64_fetch_or_relaxed
146+
#define arch_atomic64_fetch_or_acquire arch_atomic64_fetch_or_acquire
147+
#define arch_atomic64_fetch_or_release arch_atomic64_fetch_or_release
148+
#define arch_atomic64_fetch_or arch_atomic64_fetch_or
149+
150+
#define arch_atomic64_fetch_xor_relaxed arch_atomic64_fetch_xor_relaxed
151+
#define arch_atomic64_fetch_xor_acquire arch_atomic64_fetch_xor_acquire
152+
#define arch_atomic64_fetch_xor_release arch_atomic64_fetch_xor_release
153+
#define arch_atomic64_fetch_xor arch_atomic64_fetch_xor
154+
155+
#define arch_atomic64_xchg_relaxed arch_atomic_xchg_relaxed
156+
#define arch_atomic64_xchg_acquire arch_atomic_xchg_acquire
157+
#define arch_atomic64_xchg_release arch_atomic_xchg_release
158+
#define arch_atomic64_xchg arch_atomic_xchg
159+
160+
#define arch_atomic64_cmpxchg_relaxed arch_atomic_cmpxchg_relaxed
161+
#define arch_atomic64_cmpxchg_acquire arch_atomic_cmpxchg_acquire
162+
#define arch_atomic64_cmpxchg_release arch_atomic_cmpxchg_release
163+
#define arch_atomic64_cmpxchg arch_atomic_cmpxchg
164+
165+
#define arch_atomic64_andnot arch_atomic64_andnot
166+
167+
#define arch_atomic64_dec_if_positive arch_atomic64_dec_if_positive
168+
169+
#include <asm-generic/atomic-instrumented.h>
163170

164171
#endif
165172
#endif

arch/arm64/include/asm/atomic_ll_sc.h

Lines changed: 14 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,7 @@
3939

4040
#define ATOMIC_OP(op, asm_op) \
4141
__LL_SC_INLINE void \
42-
__LL_SC_PREFIX(atomic_##op(int i, atomic_t *v)) \
42+
__LL_SC_PREFIX(arch_atomic_##op(int i, atomic_t *v)) \
4343
{ \
4444
unsigned long tmp; \
4545
int result; \
@@ -53,11 +53,11 @@ __LL_SC_PREFIX(atomic_##op(int i, atomic_t *v)) \
5353
: "=&r" (result), "=&r" (tmp), "+Q" (v->counter) \
5454
: "Ir" (i)); \
5555
} \
56-
__LL_SC_EXPORT(atomic_##op);
56+
__LL_SC_EXPORT(arch_atomic_##op);
5757

5858
#define ATOMIC_OP_RETURN(name, mb, acq, rel, cl, op, asm_op) \
5959
__LL_SC_INLINE int \
60-
__LL_SC_PREFIX(atomic_##op##_return##name(int i, atomic_t *v)) \
60+
__LL_SC_PREFIX(arch_atomic_##op##_return##name(int i, atomic_t *v)) \
6161
{ \
6262
unsigned long tmp; \
6363
int result; \
@@ -75,11 +75,11 @@ __LL_SC_PREFIX(atomic_##op##_return##name(int i, atomic_t *v)) \
7575
\
7676
return result; \
7777
} \
78-
__LL_SC_EXPORT(atomic_##op##_return##name);
78+
__LL_SC_EXPORT(arch_atomic_##op##_return##name);
7979

8080
#define ATOMIC_FETCH_OP(name, mb, acq, rel, cl, op, asm_op) \
8181
__LL_SC_INLINE int \
82-
__LL_SC_PREFIX(atomic_fetch_##op##name(int i, atomic_t *v)) \
82+
__LL_SC_PREFIX(arch_atomic_fetch_##op##name(int i, atomic_t *v)) \
8383
{ \
8484
unsigned long tmp; \
8585
int val, result; \
@@ -97,7 +97,7 @@ __LL_SC_PREFIX(atomic_fetch_##op##name(int i, atomic_t *v)) \
9797
\
9898
return result; \
9999
} \
100-
__LL_SC_EXPORT(atomic_fetch_##op##name);
100+
__LL_SC_EXPORT(arch_atomic_fetch_##op##name);
101101

102102
#define ATOMIC_OPS(...) \
103103
ATOMIC_OP(__VA_ARGS__) \
@@ -133,7 +133,7 @@ ATOMIC_OPS(xor, eor)
133133

134134
#define ATOMIC64_OP(op, asm_op) \
135135
__LL_SC_INLINE void \
136-
__LL_SC_PREFIX(atomic64_##op(long i, atomic64_t *v)) \
136+
__LL_SC_PREFIX(arch_atomic64_##op(long i, atomic64_t *v)) \
137137
{ \
138138
long result; \
139139
unsigned long tmp; \
@@ -147,11 +147,11 @@ __LL_SC_PREFIX(atomic64_##op(long i, atomic64_t *v)) \
147147
: "=&r" (result), "=&r" (tmp), "+Q" (v->counter) \
148148
: "Ir" (i)); \
149149
} \
150-
__LL_SC_EXPORT(atomic64_##op);
150+
__LL_SC_EXPORT(arch_atomic64_##op);
151151

152152
#define ATOMIC64_OP_RETURN(name, mb, acq, rel, cl, op, asm_op) \
153153
__LL_SC_INLINE long \
154-
__LL_SC_PREFIX(atomic64_##op##_return##name(long i, atomic64_t *v)) \
154+
__LL_SC_PREFIX(arch_atomic64_##op##_return##name(long i, atomic64_t *v))\
155155
{ \
156156
long result; \
157157
unsigned long tmp; \
@@ -169,11 +169,11 @@ __LL_SC_PREFIX(atomic64_##op##_return##name(long i, atomic64_t *v)) \
169169
\
170170
return result; \
171171
} \
172-
__LL_SC_EXPORT(atomic64_##op##_return##name);
172+
__LL_SC_EXPORT(arch_atomic64_##op##_return##name);
173173

174174
#define ATOMIC64_FETCH_OP(name, mb, acq, rel, cl, op, asm_op) \
175175
__LL_SC_INLINE long \
176-
__LL_SC_PREFIX(atomic64_fetch_##op##name(long i, atomic64_t *v)) \
176+
__LL_SC_PREFIX(arch_atomic64_fetch_##op##name(long i, atomic64_t *v)) \
177177
{ \
178178
long result, val; \
179179
unsigned long tmp; \
@@ -191,7 +191,7 @@ __LL_SC_PREFIX(atomic64_fetch_##op##name(long i, atomic64_t *v)) \
191191
\
192192
return result; \
193193
} \
194-
__LL_SC_EXPORT(atomic64_fetch_##op##name);
194+
__LL_SC_EXPORT(arch_atomic64_fetch_##op##name);
195195

196196
#define ATOMIC64_OPS(...) \
197197
ATOMIC64_OP(__VA_ARGS__) \
@@ -226,7 +226,7 @@ ATOMIC64_OPS(xor, eor)
226226
#undef ATOMIC64_OP
227227

228228
__LL_SC_INLINE long
229-
__LL_SC_PREFIX(atomic64_dec_if_positive(atomic64_t *v))
229+
__LL_SC_PREFIX(arch_atomic64_dec_if_positive(atomic64_t *v))
230230
{
231231
long result;
232232
unsigned long tmp;
@@ -246,7 +246,7 @@ __LL_SC_PREFIX(atomic64_dec_if_positive(atomic64_t *v))
246246

247247
return result;
248248
}
249-
__LL_SC_EXPORT(atomic64_dec_if_positive);
249+
__LL_SC_EXPORT(arch_atomic64_dec_if_positive);
250250

251251
#define __CMPXCHG_CASE(w, sz, name, mb, acq, rel, cl) \
252252
__LL_SC_INLINE unsigned long \

0 commit comments

Comments
 (0)