Skip to content

Commit

Permalink
parisc: Fix lightweight spinlock checks to not break futexes
Browse files Browse the repository at this point in the history
commit a0f4b78 upstream.

The lightweight spinlock checks verify that a spinlock has either value
0 (spinlock locked) and that not any other bits than in
__ARCH_SPIN_LOCK_UNLOCKED_VAL is set.

This breaks the current LWS code, which writes the address of the lock
into the lock word to unlock it, which was an optimization to save one
assembler instruction.

Fix it by making spinlock_types.h accessible for asm code, change the
LWS spinlock-unlocking code to write __ARCH_SPIN_LOCK_UNLOCKED_VAL into
the lock word, and add some missing lightweight spinlock checks to the
LWS path. Finally, make the spinlock checks dependend on DEBUG_KERNEL.

Noticed-by: John David Anglin <dave.anglin@bell.net>
Signed-off-by: Helge Deller <deller@gmx.de>
Tested-by: John David Anglin <dave.anglin@bell.net>
Cc: stable@vger.kernel.org # v6.4+
Fixes: 15e64ef ("parisc: Add lightweight spinlock checks")
Signed-off-by: Greg Kroah-Hartman <gregkh@linuxfoundation.org>
  • Loading branch information
hdeller authored and gregkh committed Aug 16, 2023
1 parent 7dc5a1f commit 24555c3
Show file tree
Hide file tree
Showing 4 changed files with 27 additions and 6 deletions.
2 changes: 1 addition & 1 deletion arch/parisc/Kconfig.debug
Expand Up @@ -2,7 +2,7 @@
#
config LIGHTWEIGHT_SPINLOCK_CHECK
bool "Enable lightweight spinlock checks"
depends on SMP && !DEBUG_SPINLOCK
depends on DEBUG_KERNEL && SMP && !DEBUG_SPINLOCK
default y
help
Add checks with low performance impact to the spinlock functions
Expand Down
2 changes: 0 additions & 2 deletions arch/parisc/include/asm/spinlock.h
Expand Up @@ -7,8 +7,6 @@
#include <asm/processor.h>
#include <asm/spinlock_types.h>

#define SPINLOCK_BREAK_INSN 0x0000c006 /* break 6,6 */

static inline void arch_spin_val_check(int lock_val)
{
if (IS_ENABLED(CONFIG_LIGHTWEIGHT_SPINLOCK_CHECK))
Expand Down
6 changes: 6 additions & 0 deletions arch/parisc/include/asm/spinlock_types.h
Expand Up @@ -4,6 +4,10 @@

#define __ARCH_SPIN_LOCK_UNLOCKED_VAL 0x1a46

#define SPINLOCK_BREAK_INSN 0x0000c006 /* break 6,6 */

#ifndef __ASSEMBLY__

typedef struct {
#ifdef CONFIG_PA20
volatile unsigned int slock;
Expand All @@ -27,6 +31,8 @@ typedef struct {
volatile unsigned int counter;
} arch_rwlock_t;

#endif /* __ASSEMBLY__ */

#define __ARCH_RW_LOCK_UNLOCKED__ 0x01000000
#define __ARCH_RW_LOCK_UNLOCKED { .lock_mutex = __ARCH_SPIN_LOCK_UNLOCKED, \
.counter = __ARCH_RW_LOCK_UNLOCKED__ }
Expand Down
23 changes: 20 additions & 3 deletions arch/parisc/kernel/syscall.S
Expand Up @@ -39,6 +39,7 @@ registers).
#include <asm/assembly.h>
#include <asm/processor.h>
#include <asm/cache.h>
#include <asm/spinlock_types.h>

#include <linux/linkage.h>

Expand Down Expand Up @@ -66,6 +67,16 @@ registers).
stw \reg1, 0(%sr2,\reg2)
.endm

/* raise exception if spinlock content is not zero or
* __ARCH_SPIN_LOCK_UNLOCKED_VAL */
.macro spinlock_check spin_val,tmpreg
#ifdef CONFIG_LIGHTWEIGHT_SPINLOCK_CHECK
ldi __ARCH_SPIN_LOCK_UNLOCKED_VAL, \tmpreg
andcm,= \spin_val, \tmpreg, %r0
.word SPINLOCK_BREAK_INSN
#endif
.endm

.text

.import syscall_exit,code
Expand Down Expand Up @@ -508,7 +519,8 @@ lws_start:

lws_exit_noerror:
lws_pagefault_enable %r1,%r21
stw,ma %r20, 0(%sr2,%r20)
ldi __ARCH_SPIN_LOCK_UNLOCKED_VAL, %r21
stw,ma %r21, 0(%sr2,%r20)
ssm PSW_SM_I, %r0
b lws_exit
copy %r0, %r21
Expand All @@ -521,7 +533,8 @@ lws_wouldblock:

lws_pagefault:
lws_pagefault_enable %r1,%r21
stw,ma %r20, 0(%sr2,%r20)
ldi __ARCH_SPIN_LOCK_UNLOCKED_VAL, %r21
stw,ma %r21, 0(%sr2,%r20)
ssm PSW_SM_I, %r0
ldo 3(%r0),%r28
b lws_exit
Expand Down Expand Up @@ -619,6 +632,7 @@ lws_compare_and_swap:

/* Try to acquire the lock */
LDCW 0(%sr2,%r20), %r28
spinlock_check %r28, %r21
comclr,<> %r0, %r28, %r0
b,n lws_wouldblock

Expand Down Expand Up @@ -772,6 +786,7 @@ cas2_lock_start:

/* Try to acquire the lock */
LDCW 0(%sr2,%r20), %r28
spinlock_check %r28, %r21
comclr,<> %r0, %r28, %r0
b,n lws_wouldblock

Expand Down Expand Up @@ -1001,6 +1016,7 @@ atomic_xchg_start:

/* Try to acquire the lock */
LDCW 0(%sr2,%r20), %r28
spinlock_check %r28, %r21
comclr,<> %r0, %r28, %r0
b,n lws_wouldblock

Expand Down Expand Up @@ -1199,6 +1215,7 @@ atomic_store_start:

/* Try to acquire the lock */
LDCW 0(%sr2,%r20), %r28
spinlock_check %r28, %r21
comclr,<> %r0, %r28, %r0
b,n lws_wouldblock

Expand Down Expand Up @@ -1330,7 +1347,7 @@ ENTRY(lws_lock_start)
/* lws locks */
.rept 256
/* Keep locks aligned at 16-bytes */
.word 1
.word __ARCH_SPIN_LOCK_UNLOCKED_VAL
.word 0
.word 0
.word 0
Expand Down

0 comments on commit 24555c3

Please sign in to comment.