From 5f1d226d97f3d3a9a024c49012be50b34e7d5dab Mon Sep 17 00:00:00 2001 From: Robert Golshan Date: Fri, 23 Aug 2019 12:42:53 -0500 Subject: [PATCH 1/2] lockhammer: Add builtin atomics Add builtin atomic functions for atomics.h Change-Id: I830492b20193e8b13b9d3cd637487c7f8ed0f614 Signed-off-by: Robert Golshan --- benchmarks/lockhammer/include/atomics.h | 27 +++++++++++++++---------- 1 file changed, 16 insertions(+), 11 deletions(-) diff --git a/benchmarks/lockhammer/include/atomics.h b/benchmarks/lockhammer/include/atomics.h index 6dda573..fbc6755 100644 --- a/benchmarks/lockhammer/include/atomics.h +++ b/benchmarks/lockhammer/include/atomics.h @@ -1,4 +1,4 @@ -/* +/* * Copyright (c) 2017, The Linux Foundation. All rights reserved. * * SPDX-License-Identifier: BSD-3-Clause @@ -30,6 +30,7 @@ */ #include +#include #ifndef __LH_ATOMICS_H_ #define __LH_ATOMICS_H_ @@ -130,7 +131,7 @@ static inline unsigned long fetchadd64_acquire_release (unsigned long *ptr, unsi val = old; #endif #else - /* TODO: builtin atomic call */ + val = __atomic_fetch_add(ptr, val, __ATOMIC_ACQ_REL); #endif return val; @@ -168,7 +169,7 @@ static inline unsigned long fetchadd64_acquire (unsigned long *ptr, unsigned lon val = old; #endif #else - /* TODO: builtin atomic call */ + val = __atomic_fetch_add(ptr, val, __ATOMIC_ACQUIRE); #endif return val; @@ -206,7 +207,7 @@ static inline unsigned long fetchadd64_release (unsigned long *ptr, unsigned lon #endif val = old; #else - /* TODO: builtin atomic call */ + val = __atomic_fetch_add(ptr, val, __ATOMIC_RELEASE); #endif return val; @@ -244,7 +245,7 @@ static inline unsigned long fetchadd64 (unsigned long *ptr, unsigned long val) { val = old; #endif #else - /* TODO: builtin atomic call */ + val = __atomic_fetch_add(ptr, val, __ATOMIC_RELAXED); #endif return val; @@ -285,7 +286,7 @@ static inline unsigned long fetchsub64 (unsigned long *ptr, unsigned long val) { val = old; #endif #else - /* TODO: builtin atomic call */ + val = __atomic_fetch_sub(ptr, val, __ATOMIC_RELAXED); #endif return val; @@ -322,7 +323,7 @@ static inline unsigned long swap64 (unsigned long *ptr, unsigned long val) { val = old; #endif #else - /* TODO: builtin atomic call */ + val = __atomic_exchange_n(ptr, val, __ATOMIC_ACQ_REL); #endif return val; @@ -360,7 +361,8 @@ static inline unsigned long cas64 (unsigned long *ptr, unsigned long val, unsign : ); #endif #else - /* TODO: builtin atomic call */ + old = exp; + __atomic_compare_exchange_n(ptr, &old, val, true, __ATOMIC_RELAXED, __ATOMIC_RELAXED); #endif return old; @@ -398,7 +400,8 @@ static inline unsigned long cas64_acquire (unsigned long *ptr, unsigned long val : ); #endif #else - /* TODO: builtin atomic call */ + old = exp; + __atomic_compare_exchange_n(ptr, &old, val, true, __ATOMIC_ACQUIRE, __ATOMIC_ACQUIRE); #endif return old; @@ -436,7 +439,8 @@ static inline unsigned long cas64_release (unsigned long *ptr, unsigned long val : ); #endif #else - /* TODO: builtin atomic call */ + old = exp; + __atomic_compare_exchange_n(ptr, &old, val, true, __ATOMIC_RELEASE, __ATOMIC_RELEASE); #endif return old; @@ -474,7 +478,8 @@ static inline unsigned long cas64_acquire_release (unsigned long *ptr, unsigned : ); #endif #else - /* TODO: builtin atomic call */ + old = exp; + __atomic_compare_exchange_n(ptr, &old, val, true, __ATOMIC_ACQ_REL, __ATOMIC_ACQ_REL); #endif return old; From 04844daf6fbfe5f4b5aede485afb6f48b0449d5a Mon Sep 17 00:00:00 2001 From: Robert Golshan Date: Fri, 23 Aug 2019 12:43:04 -0500 Subject: [PATCH 2/2] lockhammer: Enable switch for builtin atomics Add USE_BUILTIN in the makefile to force using builtin atomics instead of arch specific assembly. This only affects atomics that have the builtin defined, which is currently only in lockhammer/include/atomics.h Change-Id: I784c03c186dee912be65ad56a95b102ae40cb0ad Signed-off-by: Robert Golshan --- benchmarks/lockhammer/Makefile | 4 +++ benchmarks/lockhammer/include/atomics.h | 40 ++++++++++++------------- 2 files changed, 24 insertions(+), 20 deletions(-) diff --git a/benchmarks/lockhammer/Makefile b/benchmarks/lockhammer/Makefile index 0dda8d0..ca5f18e 100644 --- a/benchmarks/lockhammer/Makefile +++ b/benchmarks/lockhammer/Makefile @@ -13,6 +13,10 @@ endif ifneq ($(LSE_ENABLE),) CFLAGS+=-march=armv8-a+lse -DUSE_LSE endif +# Use builtin atomics instead of arch specific, if available +ifneq ($(USE_BUILTIN),) +CFLAGS+=-DUSE_BUILTIN +endif LDFLAGS=-lpthread -lm diff --git a/benchmarks/lockhammer/include/atomics.h b/benchmarks/lockhammer/include/atomics.h index fbc6755..e7ed75c 100644 --- a/benchmarks/lockhammer/include/atomics.h +++ b/benchmarks/lockhammer/include/atomics.h @@ -100,11 +100,11 @@ static inline void prefetch64 (unsigned long *ptr) { } static inline unsigned long fetchadd64_acquire_release (unsigned long *ptr, unsigned long val) { -#if defined(__x86_64__) +#if defined(__x86_64__) && !defined(USE_BUILTIN) asm volatile ("lock xaddq %q0, %1\n" : "+r" (val), "+m" (*(ptr)) : : "memory", "cc"); -#elif defined(__aarch64__) +#elif defined(__aarch64__) && !defined(USE_BUILTIN) #if defined(USE_LSE) unsigned long old; @@ -138,11 +138,11 @@ static inline unsigned long fetchadd64_acquire_release (unsigned long *ptr, unsi } static inline unsigned long fetchadd64_acquire (unsigned long *ptr, unsigned long val) { -#if defined(__x86_64__) +#if defined(__x86_64__) && !defined(USE_BUILTIN) asm volatile ("lock xaddq %q0, %1\n" : "+r" (val), "+m" (*(ptr)) : : "memory", "cc"); -#elif defined(__aarch64__) +#elif defined(__aarch64__) && !defined(USE_BUILTIN) #if defined(USE_LSE) unsigned long old; @@ -176,11 +176,11 @@ static inline unsigned long fetchadd64_acquire (unsigned long *ptr, unsigned lon } static inline unsigned long fetchadd64_release (unsigned long *ptr, unsigned long val) { -#if defined(__x86_64__) +#if defined(__x86_64__) && !defined(USE_BUILTIN) asm volatile ("lock xaddq %q0, %1\n" : "+r" (val), "+m" (*(ptr)) : : "memory", "cc"); -#elif defined(__aarch64__) +#elif defined(__aarch64__) && !defined(USE_BUILTIN) #if defined(USE_LSE) unsigned long old; @@ -214,11 +214,11 @@ static inline unsigned long fetchadd64_release (unsigned long *ptr, unsigned lon } static inline unsigned long fetchadd64 (unsigned long *ptr, unsigned long val) { -#if defined(__x86_64__) +#if defined(__x86_64__) && !defined(USE_BUILTIN) asm volatile ("lock xaddq %q0, %1\n" : "+r" (val), "+m" (*(ptr)) : : "memory", "cc"); -#elif defined(__aarch64__) +#elif defined(__aarch64__) && !defined(USE_BUILTIN) #if defined(USE_LSE) unsigned long old; @@ -252,13 +252,13 @@ static inline unsigned long fetchadd64 (unsigned long *ptr, unsigned long val) { } static inline unsigned long fetchsub64 (unsigned long *ptr, unsigned long val) { -#if defined(__x86_64__) +#if defined(__x86_64__) && !defined(USE_BUILTIN) val = (unsigned long) (-(long) val); asm volatile ("lock xaddq %q0, %1\n" : "+r" (val), "+m" (*(ptr)) : : "memory", "cc"); -#elif defined(__aarch64__) +#elif defined(__aarch64__) && !defined(USE_BUILTIN) #if defined(USE_LSE) unsigned long old; val = (unsigned long) (-(long) val); @@ -293,11 +293,11 @@ static inline unsigned long fetchsub64 (unsigned long *ptr, unsigned long val) { } static inline unsigned long swap64 (unsigned long *ptr, unsigned long val) { -#if defined(__x86_64__) +#if defined(__x86_64__) && !defined(USE_BUILTIN) asm volatile ("xchgq %q0, %1\n" : "+r" (val), "+m" (*(ptr)) : : "memory", "cc"); -#elif defined(__aarch64__) +#elif defined(__aarch64__) && !defined(USE_BUILTIN) #if defined(USE_LSE) unsigned long old; @@ -332,12 +332,12 @@ static inline unsigned long swap64 (unsigned long *ptr, unsigned long val) { static inline unsigned long cas64 (unsigned long *ptr, unsigned long val, unsigned long exp) { unsigned long old; -#if defined(__x86_64__) +#if defined(__x86_64__) && !defined(USE_BUILTIN) asm volatile ("lock cmpxchgq %2, %1\n" : "=a" (old), "+m" (*(ptr)) : "r" (val), "0" (exp) : "memory"); -#elif defined(__aarch64__) +#elif defined(__aarch64__) && !defined(USE_BUILTIN) #if defined(USE_LSE) asm volatile( " mov %[old], %[exp]\n" @@ -371,12 +371,12 @@ static inline unsigned long cas64 (unsigned long *ptr, unsigned long val, unsign static inline unsigned long cas64_acquire (unsigned long *ptr, unsigned long val, unsigned long exp) { unsigned long old; -#if defined(__x86_64__) +#if defined(__x86_64__) && !defined(USE_BUILTIN) asm volatile ("lock cmpxchgq %2, %1\n" : "=a" (old), "+m" (*(ptr)) : "r" (val), "0" (exp) : "memory"); -#elif defined(__aarch64__) +#elif defined(__aarch64__) && !defined(USE_BUILTIN) #if defined(USE_LSE) asm volatile( " mov %[old], %[exp]\n" @@ -410,12 +410,12 @@ static inline unsigned long cas64_acquire (unsigned long *ptr, unsigned long val static inline unsigned long cas64_release (unsigned long *ptr, unsigned long val, unsigned long exp) { unsigned long old; -#if defined(__x86_64__) +#if defined(__x86_64__) && !defined(USE_BUILTIN) asm volatile ("lock cmpxchgq %2, %1\n" : "=a" (old), "+m" (*(ptr)) : "r" (val), "0" (exp) : "memory"); -#elif defined(__aarch64__) +#elif defined(__aarch64__) && !defined(USE_BUILTIN) #if defined(USE_LSE) asm volatile( " mov %[old], %[exp]\n" @@ -449,12 +449,12 @@ static inline unsigned long cas64_release (unsigned long *ptr, unsigned long val static inline unsigned long cas64_acquire_release (unsigned long *ptr, unsigned long val, unsigned long exp) { unsigned long old; -#if defined(__x86_64__) +#if defined(__x86_64__) && !defined(USE_BUILTIN) asm volatile ("lock cmpxchgq %2, %1\n" : "=a" (old), "+m" (*(ptr)) : "r" (val), "0" (exp) : "memory"); -#elif defined(__aarch64__) +#elif defined(__aarch64__) && !defined(USE_BUILTIN) #if defined(USE_LSE) asm volatile( " mov %[old], %[exp]\n"