mirror of
https://mirrors.bfsu.edu.cn/git/linux.git
synced 2024-11-13 23:34:05 +08:00
parisc: Switch to more fine grained lws locks
Increase the number of lws locks to 256 entries (instead of 16) and choose lock entry based on bits 3-11 (instead of 4-7) of the relevant address. With this change we archieve more fine-grained locking in futex syscalls and thus reduce the number of possible stalls. Signed-off-by: John David Anglin <dave.anglin@bell.net> Signed-off-by: Helge Deller <deller@gmx.de>
This commit is contained in:
parent
2a7d4eed57
commit
53a42b6324
@ -16,7 +16,7 @@ static inline void
|
||||
_futex_spin_lock_irqsave(u32 __user *uaddr, unsigned long int *flags)
|
||||
{
|
||||
extern u32 lws_lock_start[];
|
||||
long index = ((long)uaddr & 0xf0) >> 2;
|
||||
long index = ((long)uaddr & 0x3f8) >> 1;
|
||||
arch_spinlock_t *s = (arch_spinlock_t *)&lws_lock_start[index];
|
||||
local_irq_save(*flags);
|
||||
arch_spin_lock(s);
|
||||
@ -26,7 +26,7 @@ static inline void
|
||||
_futex_spin_unlock_irqrestore(u32 __user *uaddr, unsigned long int *flags)
|
||||
{
|
||||
extern u32 lws_lock_start[];
|
||||
long index = ((long)uaddr & 0xf0) >> 2;
|
||||
long index = ((long)uaddr & 0x3f8) >> 1;
|
||||
arch_spinlock_t *s = (arch_spinlock_t *)&lws_lock_start[index];
|
||||
arch_spin_unlock(s);
|
||||
local_irq_restore(*flags);
|
||||
|
@ -571,8 +571,8 @@ lws_compare_and_swap:
|
||||
ldil L%lws_lock_start, %r20
|
||||
ldo R%lws_lock_start(%r20), %r28
|
||||
|
||||
/* Extract four bits from r26 and hash lock (Bits 4-7) */
|
||||
extru %r26, 27, 4, %r20
|
||||
/* Extract eight bits from r26 and hash lock (Bits 3-11) */
|
||||
extru %r26, 28, 8, %r20
|
||||
|
||||
/* Find lock to use, the hash is either one of 0 to
|
||||
15, multiplied by 16 (keep it 16-byte aligned)
|
||||
@ -761,8 +761,8 @@ cas2_lock_start:
|
||||
ldil L%lws_lock_start, %r20
|
||||
ldo R%lws_lock_start(%r20), %r28
|
||||
|
||||
/* Extract four bits from r26 and hash lock (Bits 4-7) */
|
||||
extru %r26, 27, 4, %r20
|
||||
/* Extract eight bits from r26 and hash lock (Bits 3-11) */
|
||||
extru %r26, 28, 8, %r20
|
||||
|
||||
/* Find lock to use, the hash is either one of 0 to
|
||||
15, multiplied by 16 (keep it 16-byte aligned)
|
||||
@ -950,7 +950,7 @@ END(sys_call_table64)
|
||||
.align L1_CACHE_BYTES
|
||||
ENTRY(lws_lock_start)
|
||||
/* lws locks */
|
||||
.rept 16
|
||||
.rept 256
|
||||
/* Keep locks aligned at 16-bytes */
|
||||
.word 1
|
||||
.word 0
|
||||
|
Loading…
Reference in New Issue
Block a user