Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions benchmarks/lockhammer/Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,10 @@ endif
ifneq ($(LSE_ENABLE),)
CFLAGS+=-march=armv8-a+lse -DUSE_LSE
endif
# Use builtin atomics instead of arch specific, if available
ifneq ($(USE_BUILTIN),)
CFLAGS+=-DUSE_BUILTIN
endif


LDFLAGS=-lpthread -lm
Expand Down
67 changes: 36 additions & 31 deletions benchmarks/lockhammer/include/atomics.h
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
/*
/*
* Copyright (c) 2017, The Linux Foundation. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
Expand Down Expand Up @@ -30,6 +30,7 @@
*/

#include <stdint.h>
#include <stdbool.h>

#ifndef __LH_ATOMICS_H_
#define __LH_ATOMICS_H_
Expand Down Expand Up @@ -99,11 +100,11 @@ static inline void prefetch64 (unsigned long *ptr) {
}

static inline unsigned long fetchadd64_acquire_release (unsigned long *ptr, unsigned long val) {
#if defined(__x86_64__)
#if defined(__x86_64__) && !defined(USE_BUILTIN)
asm volatile ("lock xaddq %q0, %1\n"
: "+r" (val), "+m" (*(ptr))
: : "memory", "cc");
#elif defined(__aarch64__)
#elif defined(__aarch64__) && !defined(USE_BUILTIN)
#if defined(USE_LSE)
unsigned long old;

Expand All @@ -130,18 +131,18 @@ static inline unsigned long fetchadd64_acquire_release (unsigned long *ptr, unsi
val = old;
#endif
#else
/* TODO: builtin atomic call */
val = __atomic_fetch_add(ptr, val, __ATOMIC_ACQ_REL);
#endif

return val;
}

static inline unsigned long fetchadd64_acquire (unsigned long *ptr, unsigned long val) {
#if defined(__x86_64__)
#if defined(__x86_64__) && !defined(USE_BUILTIN)
asm volatile ("lock xaddq %q0, %1\n"
: "+r" (val), "+m" (*(ptr))
: : "memory", "cc");
#elif defined(__aarch64__)
#elif defined(__aarch64__) && !defined(USE_BUILTIN)
#if defined(USE_LSE)
unsigned long old;

Expand All @@ -168,18 +169,18 @@ static inline unsigned long fetchadd64_acquire (unsigned long *ptr, unsigned lon
val = old;
#endif
#else
/* TODO: builtin atomic call */
val = __atomic_fetch_add(ptr, val, __ATOMIC_ACQUIRE);
#endif

return val;
}

static inline unsigned long fetchadd64_release (unsigned long *ptr, unsigned long val) {
#if defined(__x86_64__)
#if defined(__x86_64__) && !defined(USE_BUILTIN)
asm volatile ("lock xaddq %q0, %1\n"
: "+r" (val), "+m" (*(ptr))
: : "memory", "cc");
#elif defined(__aarch64__)
#elif defined(__aarch64__) && !defined(USE_BUILTIN)
#if defined(USE_LSE)
unsigned long old;

Expand All @@ -206,18 +207,18 @@ static inline unsigned long fetchadd64_release (unsigned long *ptr, unsigned lon
#endif
val = old;
#else
/* TODO: builtin atomic call */
val = __atomic_fetch_add(ptr, val, __ATOMIC_RELEASE);
#endif

return val;
}

static inline unsigned long fetchadd64 (unsigned long *ptr, unsigned long val) {
#if defined(__x86_64__)
#if defined(__x86_64__) && !defined(USE_BUILTIN)
asm volatile ("lock xaddq %q0, %1\n"
: "+r" (val), "+m" (*(ptr))
: : "memory", "cc");
#elif defined(__aarch64__)
#elif defined(__aarch64__) && !defined(USE_BUILTIN)
#if defined(USE_LSE)
unsigned long old;

Expand All @@ -244,20 +245,20 @@ static inline unsigned long fetchadd64 (unsigned long *ptr, unsigned long val) {
val = old;
#endif
#else
/* TODO: builtin atomic call */
val = __atomic_fetch_add(ptr, val, __ATOMIC_RELAXED);
#endif

return val;
}

static inline unsigned long fetchsub64 (unsigned long *ptr, unsigned long val) {
#if defined(__x86_64__)
#if defined(__x86_64__) && !defined(USE_BUILTIN)
val = (unsigned long) (-(long) val);

asm volatile ("lock xaddq %q0, %1\n"
: "+r" (val), "+m" (*(ptr))
: : "memory", "cc");
#elif defined(__aarch64__)
#elif defined(__aarch64__) && !defined(USE_BUILTIN)
#if defined(USE_LSE)
unsigned long old;
val = (unsigned long) (-(long) val);
Expand Down Expand Up @@ -285,18 +286,18 @@ static inline unsigned long fetchsub64 (unsigned long *ptr, unsigned long val) {
val = old;
#endif
#else
/* TODO: builtin atomic call */
val = __atomic_fetch_sub(ptr, val, __ATOMIC_RELAXED);
#endif

return val;
}

static inline unsigned long swap64 (unsigned long *ptr, unsigned long val) {
#if defined(__x86_64__)
#if defined(__x86_64__) && !defined(USE_BUILTIN)
asm volatile ("xchgq %q0, %1\n"
: "+r" (val), "+m" (*(ptr))
: : "memory", "cc");
#elif defined(__aarch64__)
#elif defined(__aarch64__) && !defined(USE_BUILTIN)
#if defined(USE_LSE)
unsigned long old;

Expand All @@ -322,7 +323,7 @@ static inline unsigned long swap64 (unsigned long *ptr, unsigned long val) {
val = old;
#endif
#else
/* TODO: builtin atomic call */
val = __atomic_exchange_n(ptr, val, __ATOMIC_ACQ_REL);
#endif

return val;
Expand All @@ -331,12 +332,12 @@ static inline unsigned long swap64 (unsigned long *ptr, unsigned long val) {
static inline unsigned long cas64 (unsigned long *ptr, unsigned long val, unsigned long exp) {
unsigned long old;

#if defined(__x86_64__)
#if defined(__x86_64__) && !defined(USE_BUILTIN)
asm volatile ("lock cmpxchgq %2, %1\n"
: "=a" (old), "+m" (*(ptr))
: "r" (val), "0" (exp)
: "memory");
#elif defined(__aarch64__)
#elif defined(__aarch64__) && !defined(USE_BUILTIN)
#if defined(USE_LSE)
asm volatile(
" mov %[old], %[exp]\n"
Expand All @@ -360,7 +361,8 @@ static inline unsigned long cas64 (unsigned long *ptr, unsigned long val, unsign
: );
#endif
#else
/* TODO: builtin atomic call */
old = exp;
__atomic_compare_exchange_n(ptr, &old, val, true, __ATOMIC_RELAXED, __ATOMIC_RELAXED);
#endif

return old;
Expand All @@ -369,12 +371,12 @@ static inline unsigned long cas64 (unsigned long *ptr, unsigned long val, unsign
static inline unsigned long cas64_acquire (unsigned long *ptr, unsigned long val, unsigned long exp) {
unsigned long old;

#if defined(__x86_64__)
#if defined(__x86_64__) && !defined(USE_BUILTIN)
asm volatile ("lock cmpxchgq %2, %1\n"
: "=a" (old), "+m" (*(ptr))
: "r" (val), "0" (exp)
: "memory");
#elif defined(__aarch64__)
#elif defined(__aarch64__) && !defined(USE_BUILTIN)
#if defined(USE_LSE)
asm volatile(
" mov %[old], %[exp]\n"
Expand All @@ -398,7 +400,8 @@ static inline unsigned long cas64_acquire (unsigned long *ptr, unsigned long val
: );
#endif
#else
/* TODO: builtin atomic call */
old = exp;
__atomic_compare_exchange_n(ptr, &old, val, true, __ATOMIC_ACQUIRE, __ATOMIC_ACQUIRE);
#endif

return old;
Expand All @@ -407,12 +410,12 @@ static inline unsigned long cas64_acquire (unsigned long *ptr, unsigned long val
static inline unsigned long cas64_release (unsigned long *ptr, unsigned long val, unsigned long exp) {
unsigned long old;

#if defined(__x86_64__)
#if defined(__x86_64__) && !defined(USE_BUILTIN)
asm volatile ("lock cmpxchgq %2, %1\n"
: "=a" (old), "+m" (*(ptr))
: "r" (val), "0" (exp)
: "memory");
#elif defined(__aarch64__)
#elif defined(__aarch64__) && !defined(USE_BUILTIN)
#if defined(USE_LSE)
asm volatile(
" mov %[old], %[exp]\n"
Expand All @@ -436,7 +439,8 @@ static inline unsigned long cas64_release (unsigned long *ptr, unsigned long val
: );
#endif
#else
/* TODO: builtin atomic call */
old = exp;
__atomic_compare_exchange_n(ptr, &old, val, true, __ATOMIC_RELEASE, __ATOMIC_RELEASE);
#endif

return old;
Expand All @@ -445,12 +449,12 @@ static inline unsigned long cas64_release (unsigned long *ptr, unsigned long val
static inline unsigned long cas64_acquire_release (unsigned long *ptr, unsigned long val, unsigned long exp) {
unsigned long old;

#if defined(__x86_64__)
#if defined(__x86_64__) && !defined(USE_BUILTIN)
asm volatile ("lock cmpxchgq %2, %1\n"
: "=a" (old), "+m" (*(ptr))
: "r" (val), "0" (exp)
: "memory");
#elif defined(__aarch64__)
#elif defined(__aarch64__) && !defined(USE_BUILTIN)
#if defined(USE_LSE)
asm volatile(
" mov %[old], %[exp]\n"
Expand All @@ -474,7 +478,8 @@ static inline unsigned long cas64_acquire_release (unsigned long *ptr, unsigned
: );
#endif
#else
/* TODO: builtin atomic call */
old = exp;
__atomic_compare_exchange_n(ptr, &old, val, true, __ATOMIC_ACQ_REL, __ATOMIC_ACQ_REL);
#endif

return old;
Expand Down