Skip to content

Commit

Permalink
Miscellaneous rwmutex bug fixes and improvements
Browse files Browse the repository at this point in the history
The ERTS internal rwlock implementation could get
into an inconsistent state. This bug was very seldom
triggered, but could be during heavy contention. The
bug was introduced in R14B (erts-5.8.1).

The bug was most likely to be triggered when using the
read_concurrency option on an ETS table that
was frequently accessed from multiple processes doing
lots of writes and reads. That is, in a situation where
you typically don't want to use the read_concurrency
option in the first place.
  • Loading branch information
rickard-green committed Dec 1, 2010
1 parent f0fae4b commit 8057051
Show file tree
Hide file tree
Showing 4 changed files with 588 additions and 272 deletions.
145 changes: 143 additions & 2 deletions erts/include/internal/ethr_mutex.h
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,13 @@
# define ETHR_MTX_HARD_DEBUG
#endif

#if 0
# define ETHR_MTX_CHK_EXCL
#if 1
# define ETHR_MTX_CHK_NON_EXCL
#endif
#endif

#ifdef ETHR_MTX_HARD_DEBUG
# ifdef __GNUC__
# warning ETHR_MTX_HARD_DEBUG
Expand All @@ -49,6 +56,15 @@

#if defined(ETHR_USE_OWN_RWMTX_IMPL__) || defined(ETHR_USE_OWN_MTX_IMPL__)

#ifdef ETHR_DEBUG
# ifndef ETHR_MTX_CHK_EXCL
# define ETHR_MTX_CHK_EXCL
# endif
# ifndef ETHR_MTX_CHK_NON_EXCL
# define ETHR_MTX_CHK_NON_EXCL
# endif
#endif

#if 0
# define ETHR_MTX_Q_LOCK_SPINLOCK__
# define ETHR_MTX_QLOCK_TYPE__ ethr_spinlock_t
Expand All @@ -68,8 +84,8 @@

/* frequent read kind */
#define ETHR_RWMTX_R_FLG__ (((long) 1) << 28)
#define ETHR_RWMTX_R_PEND_UNLCK_MASK__ (ETHR_RWMTX_R_FLG__ - 1)
#define ETHR_RWMTX_R_MASK__ (ETHR_RWMTX_R_WAIT_FLG__ - 1)
#define ETHR_RWMTX_R_ABRT_UNLCK_FLG__ (((long) 1) << 27)
#define ETHR_RWMTX_R_PEND_UNLCK_MASK__ (ETHR_RWMTX_R_ABRT_UNLCK_FLG__ - 1)

/* normal kind */
#define ETHR_RWMTX_RS_MASK__ (ETHR_RWMTX_R_WAIT_FLG__ - 1)
Expand All @@ -91,6 +107,12 @@ struct ethr_mutex_base_ {
#ifdef ETHR_MTX_HARD_DEBUG_WSQ
int ws;
#endif
#ifdef ETHR_MTX_CHK_EXCL
ethr_atomic_t exclusive;
#endif
#ifdef ETHR_MTX_CHK_NON_EXCL
ethr_atomic_t non_exclusive;
#endif
#ifdef ETHR_MTX_HARD_DEBUG_LFS
ethr_atomic_t hdbg_lfs;
#endif
Expand Down Expand Up @@ -344,6 +366,116 @@ do { \
#define ETHR_MTX_HARD_DEBUG_FENCE_INIT(X)
#endif

#ifdef ETHR_MTX_CHK_EXCL

#if !defined(ETHR_DEBUG) && defined(__GNUC__)
#warning "check exclusive is enabled"
#endif

# define ETHR_MTX_CHK_EXCL_INIT__(MTXB) \
ethr_atomic_init(&(MTXB)->exclusive, 0)

# define ETHR_MTX_CHK_EXCL_IS_EXCL(MTXB) \
do { \
ETHR_COMPILER_BARRIER; \
if (!ethr_atomic_read(&(MTXB)->exclusive)) \
ethr_assert_failed(__FILE__, __LINE__, __func__,\
"is exclusive"); \
ETHR_COMPILER_BARRIER; \
} while (0)
# define ETHR_MTX_CHK_EXCL_IS_NOT_EXCL(MTXB) \
do { \
ETHR_COMPILER_BARRIER; \
if (ethr_atomic_read(&(MTXB)->exclusive)) \
ethr_assert_failed(__FILE__, __LINE__, __func__,\
"is not exclusive"); \
ETHR_COMPILER_BARRIER; \
} while (0)
# define ETHR_MTX_CHK_EXCL_SET_EXCL(MTXB) \
do { \
ETHR_MTX_CHK_EXCL_IS_NOT_EXCL((MTXB)); \
ethr_atomic_set(&(MTXB)->exclusive, 1); \
ETHR_COMPILER_BARRIER; \
} while (0)
# define ETHR_MTX_CHK_EXCL_UNSET_EXCL(MTXB) \
do { \
ETHR_MTX_CHK_EXCL_IS_EXCL((MTXB)); \
ethr_atomic_set(&(MTXB)->exclusive, 0); \
ETHR_COMPILER_BARRIER; \
} while (0)

#ifdef ETHR_MTX_CHK_NON_EXCL

#if !defined(ETHR_DEBUG) && defined(__GNUC__)
#warning "check non-exclusive is enabled"
#endif

# define ETHR_MTX_CHK_NON_EXCL_INIT__(MTXB) \
ethr_atomic_init(&(MTXB)->non_exclusive, 0)
# define ETHR_MTX_CHK_EXCL_IS_NON_EXCL(MTXB) \
do { \
ETHR_COMPILER_BARRIER; \
if (!ethr_atomic_read(&(MTXB)->non_exclusive)) \
ethr_assert_failed(__FILE__, __LINE__, __func__,\
"is non-exclusive"); \
ETHR_COMPILER_BARRIER; \
} while (0)
# define ETHR_MTX_CHK_EXCL_IS_NOT_NON_EXCL(MTXB) \
do { \
ETHR_COMPILER_BARRIER; \
if (ethr_atomic_read(&(MTXB)->non_exclusive)) \
ethr_assert_failed(__FILE__, __LINE__, __func__,\
"is not non-exclusive"); \
ETHR_COMPILER_BARRIER; \
} while (0)
# define ETHR_MTX_CHK_EXCL_SET_NON_EXCL(MTXB) \
do { \
ETHR_COMPILER_BARRIER; \
ethr_atomic_inc(&(MTXB)->non_exclusive); \
ETHR_COMPILER_BARRIER; \
} while (0)
# define ETHR_MTX_CHK_EXCL_SET_NON_EXCL_NO(MTXB, NO) \
do { \
ETHR_COMPILER_BARRIER; \
ethr_atomic_add(&(MTXB)->non_exclusive, (NO)); \
ETHR_COMPILER_BARRIER; \
} while (0)
# define ETHR_MTX_CHK_EXCL_UNSET_NON_EXCL(MTXB) \
do { \
ETHR_COMPILER_BARRIER; \
ethr_atomic_dec(&(MTXB)->non_exclusive); \
ETHR_COMPILER_BARRIER; \
} while (0)
#else
# define ETHR_MTX_CHK_NON_EXCL_INIT__(MTXB)
# define ETHR_MTX_CHK_EXCL_IS_NON_EXCL(MTXB)
# define ETHR_MTX_CHK_EXCL_IS_NOT_NON_EXCL(MTXB)
# define ETHR_MTX_CHK_EXCL_SET_NON_EXCL_NO(MTXB, NO)
# define ETHR_MTX_CHK_EXCL_SET_NON_EXCL(MTXB)
# define ETHR_MTX_CHK_EXCL_UNSET_NON_EXCL(MTXB)
#endif

#else
# define ETHR_MTX_CHK_EXCL_INIT__(MTXB)
# define ETHR_MTX_CHK_EXCL_IS_EXCL(MTXB)
# define ETHR_MTX_CHK_EXCL_IS_NOT_EXCL(MTXB)
# define ETHR_MTX_CHK_EXCL_SET_EXCL(MTXB)
# define ETHR_MTX_CHK_EXCL_UNSET_EXCL(MTXB)
# define ETHR_MTX_CHK_NON_EXCL_INIT__(MTXB)
# define ETHR_MTX_CHK_EXCL_IS_NON_EXCL(MTXB)
# define ETHR_MTX_CHK_EXCL_IS_NOT_NON_EXCL(MTXB)
# define ETHR_MTX_CHK_EXCL_SET_NON_EXCL_NO(MTXB, NO)
# define ETHR_MTX_CHK_EXCL_SET_NON_EXCL(MTXB)
# define ETHR_MTX_CHK_EXCL_UNSET_NON_EXCL(MTXB)
#endif

# define ETHR_MTX_CHK_EXCL_INIT(MTXB) \
do { \
ETHR_MTX_CHK_EXCL_INIT__((MTXB)); \
ETHR_MTX_CHK_NON_EXCL_INIT__((MTXB)); \
} while (0)


#ifdef ETHR_USE_OWN_MTX_IMPL__

#define ETHR_MTX_DEFAULT_MAIN_SPINCOUNT_MAX 2000
Expand All @@ -369,6 +501,11 @@ ETHR_INLINE_FUNC_NAME_(ethr_mutex_trylock)(ethr_mutex *mtx)
act = ethr_atomic_cmpxchg_acqb(&mtx->mtxb.flgs, ETHR_RWMTX_W_FLG__, 0);
res = (act == 0) ? 0 : EBUSY;

#ifdef ETHR_MTX_CHK_EXCL
if (res == 0)
ETHR_MTX_CHK_EXCL_SET_EXCL(&mtx->mtxb);
#endif

ETHR_MTX_HARD_DEBUG_LFS_TRYRWLOCK(&mtx->mtxb, res);
ETHR_MTX_HARD_DEBUG_FENCE_CHK(mtx);

Expand All @@ -386,6 +523,8 @@ ETHR_INLINE_FUNC_NAME_(ethr_mutex_lock)(ethr_mutex *mtx)
if (act != 0)
ethr_mutex_lock_wait__(mtx, act);

ETHR_MTX_CHK_EXCL_SET_EXCL(&mtx->mtxb);

ETHR_MTX_HARD_DEBUG_LFS_RWLOCK(&mtx->mtxb);
ETHR_MTX_HARD_DEBUG_FENCE_CHK(mtx);

Expand All @@ -400,6 +539,8 @@ ETHR_INLINE_FUNC_NAME_(ethr_mutex_unlock)(ethr_mutex *mtx)
ETHR_MTX_HARD_DEBUG_FENCE_CHK(mtx);
ETHR_MTX_HARD_DEBUG_LFS_RWUNLOCK(&mtx->mtxb);

ETHR_MTX_CHK_EXCL_UNSET_EXCL(&mtx->mtxb);

act = ethr_atomic_cmpxchg_relb(&mtx->mtxb.flgs, 0, ETHR_RWMTX_W_FLG__);
if (act != ETHR_RWMTX_W_FLG__)
ethr_mutex_unlock_wake__(mtx, act);
Expand Down
49 changes: 43 additions & 6 deletions erts/include/internal/i386/atomic.h
Original file line number Diff line number Diff line change
Expand Up @@ -167,15 +167,52 @@ ethr_native_atomic_xchg(ethr_native_atomic_t *var, long val)
* Atomic ops with at least specified barriers.
*/

#define ethr_native_atomic_read_acqb ethr_native_atomic_read
#define ethr_native_atomic_inc_return_acqb ethr_native_atomic_inc_return
static ETHR_INLINE long
ethr_native_atomic_read_acqb(ethr_native_atomic_t *var)
{
long val;
#if defined(__x86_64__) || !defined(ETHR_PRE_PENTIUM4_COMPAT)
val = var->counter;
#else
val = ethr_native_atomic_add_return(var, 0);
#endif
__asm__ __volatile__("" : : : "memory");
return val;
}

static ETHR_INLINE void
ethr_native_atomic_set_relb(ethr_native_atomic_t *var, long i)
{
__asm__ __volatile__("" : : : "memory");
#if defined(__x86_64__) || !defined(ETHR_PRE_PENTIUM4_COMPAT)
#define ethr_native_atomic_set_relb ethr_native_atomic_set
var->counter = i;
#else
#define ethr_native_atomic_set_relb ethr_native_atomic_xchg
(void) ethr_native_atomic_xchg(var, i);
#endif
#define ethr_native_atomic_dec_relb ethr_native_atomic_dec
#define ethr_native_atomic_dec_return_relb ethr_native_atomic_dec_return
}

static ETHR_INLINE long
ethr_native_atomic_inc_return_acqb(ethr_native_atomic_t *var)
{
long res = ethr_native_atomic_inc_return(var);
__asm__ __volatile__("" : : : "memory");
return res;
}

static ETHR_INLINE void
ethr_native_atomic_dec_relb(ethr_native_atomic_t *var)
{
__asm__ __volatile__("" : : : "memory");
ethr_native_atomic_dec(var);
}

static ETHR_INLINE long
ethr_native_atomic_dec_return_relb(ethr_native_atomic_t *var)
{
__asm__ __volatile__("" : : : "memory");
return ethr_native_atomic_dec_return(var);
}

#define ethr_native_atomic_cmpxchg_acqb ethr_native_atomic_cmpxchg
#define ethr_native_atomic_cmpxchg_relb ethr_native_atomic_cmpxchg

Expand Down
35 changes: 28 additions & 7 deletions erts/include/internal/sparc32/atomic.h
Original file line number Diff line number Diff line change
Expand Up @@ -176,38 +176,59 @@ ethr_native_atomic_cmpxchg(ethr_native_atomic_t *var, long new, long old)
* Atomic ops with at least specified barriers.
*/

/* TODO: relax acquire barriers */

static ETHR_INLINE long
ethr_native_atomic_read_acqb(ethr_native_atomic_t *var)
{
long res = ethr_native_atomic_read(var);
__asm__ __volatile__("membar #StoreLoad|#StoreStore");
__asm__ __volatile__("membar #LoadLoad|#LoadStore|#StoreLoad|#StoreStore" : : : "memory");
return res;
}

static ETHR_INLINE void
ethr_native_atomic_set_relb(ethr_native_atomic_t *var, long i)
{
__asm__ __volatile__("membar #LoadStore|#StoreStore");
__asm__ __volatile__("membar #LoadStore|#StoreStore" : : : "memory");
ethr_native_atomic_set(var, i);
}

static ETHR_INLINE long
ethr_native_atomic_inc_return_acqb(ethr_native_atomic_t *var)
{
long res = ethr_native_atomic_inc_return(var);
__asm__ __volatile__("membar #LoadLoad|#LoadStore" : : : "memory");
return res;
}

static ETHR_INLINE void
ethr_native_atomic_dec_relb(ethr_native_atomic_t *var)
{
__asm__ __volatile__("membar #LoadStore|#StoreStore");
__asm__ __volatile__("membar #LoadStore|#StoreStore" : : : "memory");
ethr_native_atomic_dec(var);
}

static ETHR_INLINE long
ethr_native_atomic_dec_return_relb(ethr_native_atomic_t *var)
{
__asm__ __volatile__("membar #LoadStore|#StoreStore");
__asm__ __volatile__("membar #LoadStore|#StoreStore" : : : "memory");
return ethr_native_atomic_dec_return(var);
}

#define ethr_native_atomic_inc_return_acqb ethr_native_atomic_inc_return
#define ethr_native_atomic_cmpxchg_acqb ethr_native_atomic_cmpxchg
#define ethr_native_atomic_cmpxchg_relb ethr_native_atomic_cmpxchg
static ETHR_INLINE long
ethr_native_atomic_cmpxchg_acqb(ethr_native_atomic_t *var, long new, long old)
{
long res = ethr_native_atomic_cmpxchg(var, new, old);
__asm__ __volatile__("membar #LoadLoad|#LoadStore" : : : "memory");
return res;
}

static ETHR_INLINE long
ethr_native_atomic_cmpxchg_relb(ethr_native_atomic_t *var, long new, long old)
{
__asm__ __volatile__("membar #LoadStore|#StoreStore" : : : "memory");
return ethr_native_atomic_cmpxchg(var, new, old);
}

#endif /* ETHR_TRY_INLINE_FUNCS */

Expand Down
Loading

0 comments on commit 8057051

Please sign in to comment.