+2003-10-09 Kaz Kojima <kkojima@rr.iij4u.or.jp>
+
+ * sysdeps/unix/sysv/linux/sh/syscalls.list: Add waitpid.
+
2003-10-10 Carlos O'Donell <carlos@baldric.uwo.ca>
* sysdeps/hppa/Makefile (CFLAGS-malloc.c): Variable removed.
+2003-10-09 Kaz Kojima <kkojima@rr.iij4u.or.jp>
+
+ * sysdeps/unix/sysv/linux/sh/lowlevellock.h: Completely revamp the
+ locking macros. No distinction between normal and mutex locking
+ anymore.
+ * sysdeps/unix/sysv/linux/sh/lowlevellock.S: Rewrite mutex locking.
+ Merge bits from lowlevelmutex.S we still need.
+ * sysdeps/unix/sysv/linux/sh/libc-lowlevelmutex.S: Remove.
+ * sysdeps/unix/sysv/linux/sh/lowlevelmutex.S: Likewise.
+ * sysdeps/unix/sysv/linux/sh/not-cancel.h: New file.
+ * sysdeps/unix/sysv/linux/sh/pthread_barrier_wait.S: Adjust for
+ new mutex implementation.
+ * sysdeps/unix/sysv/linux/sh/pthread_cond_broadcast.S: Likewise.
+ * sysdeps/unix/sysv/linux/sh/pthread_cond_signal.S: Likewise.
+ * sysdeps/unix/sysv/linux/sh/pthread_cond_timedwait.S: Likewise.
+ * sysdeps/unix/sysv/linux/sh/pthread_cond_wait.S: Likewise.
+ * sysdeps/unix/sysv/linux/sh/pthread_rwlock_rdlock.S: Likewise.
+ * sysdeps/unix/sysv/linux/sh/pthread_rwlock_timedrdlock.S: Likewise.
+ * sysdeps/unix/sysv/linux/sh/pthread_rwlock_timedwrlock.S: Likewise.
+ * sysdeps/unix/sysv/linux/sh/pthread_rwlock_unlock.S: Likewise.
+ * sysdeps/unix/sysv/linux/sh/pthread_rwlock_wrlock.S: Likewise.
+ * sysdeps/unix/sysv/linux/sh/sysdep-cancel.h (PSEUDO): Also defined
+ symbol for entry point to avoid cancellation.
+
2003-10-07 Jakub Jelinek <jakub@redhat.com>
* sysdeps/unix/sysv/linux/i386/sysdep-cancel.h: Backout 2003-10-02
+++ /dev/null
-/* Copyright (C) 2003 Free Software Foundation, Inc.
- This file is part of the GNU C Library.
-
- The GNU C Library is free software; you can redistribute it and/or
- modify it under the terms of the GNU Lesser General Public
- License as published by the Free Software Foundation; either
- version 2.1 of the License, or (at your option) any later version.
-
- The GNU C Library is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should have received a copy of the GNU Lesser General Public
- License along with the GNU C Library; if not, write to the Free
- Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
- 02111-1307 USA. */
-
-#include "lowlevelmutex.S"
#define FUTEX_WAKE 1
- .globl __lll_lock_wait
- .type __lll_lock_wait,@function
- .hidden __lll_lock_wait
+ .globl __lll_mutex_lock_wait
+ .type __lll_mutex_lock_wait,@function
+ .hidden __lll_mutex_lock_wait
.align 5
-__lll_lock_wait:
+__lll_mutex_lock_wait:
+ mov.l r8, @-r15
mov r4, r6
- mov r5, r4
+ mov r5, r8
mov #0, r7 /* No timeout. */
mov #FUTEX_WAIT, r5
-2:
- add #-1, r6 /* account for the preceeded xadd. */
+1:
+ mov #2, r4
+ cmp/eq r4, r6
+ bt 3f
+
+ mov #1, r3
+ CMPXCHG (r3, @r8, r4, r2)
+ tst r2, r2
+ bt 2f
+
+3:
+ mov r8, r4
mov #SYS_futex, r3
extu.b r3, r3
trapa #0x14
SYSCALL_INST_PAD
- mov #-1, r3
- XADD (r3, @r4, r2)
- tst r3, r3
- bf/s 2b
- mov r2, r6
+2:
+ mov #0, r3
+ mov #2, r4
+ CMPXCHG (r3, @r8, r4, r2)
+ bf 1b
- mov #-1, r1
- mov.l r1, @r4
- rts
+ mov.l @r15+, r8
+ ret
mov r2, r0
- .size __lll_lock_wait,.-__lll_lock_wait
+ .size __lll_mutex_lock_wait,.-__lll_mutex_lock_wait
#ifdef NOT_IN_libc
- .globl lll_unlock_wake_cb
- .type lll_unlock_wake_cb,@function
- .hidden lll_unlock_wake_cb
+ .globl __lll_mutex_timedlock_wait
+ .type __lll_mutex_timedlock_wait,@function
+ .hidden __lll_mutex_timedlock_wait
.align 5
-lll_unlock_wake_cb:
+__lll_mutex_timedlock_wait:
+ /* Check for a valid timeout value. */
+ mov.l @(4,r6), r1
+ mov.l .L1g, r0
+ cmp/hs r0, r1
+ bt 3f
+
+ mov.l r10, @-r15
+ mov.l r9, @-r15
+ mov.l r8, @-r15
+ mov r4, r10
+ mov r6, r9
+ mov r5, r8
+
+ /* Stack frame for the timespec and timeval structs. */
+ add #-8, r15
- .align 2
- mova 1f, r0
- mov r15, r1
- mov #-6, r15
-0:
- mov.l @r4, r2
- add #1, r2
- mov.l r2, @r4
1:
- mov r1, r15
- cmp/pl r2
- bf 2f
+ /* Get current time. */
+ mov r15, r4
+ mov #0, r5
+ mov #SYS_gettimeofday, r3
+ trapa #0x12
+ SYSCALL_INST_PAD
+
+ /* Compute relative timeout. */
+ mov.l @(4,r15), r0
+ mov.w .L1k, r1
+ dmulu.l r0, r1 /* Micro seconds to nano seconds. */
+ mov.l @r9, r2
+ mov.l @(4,r9), r3
+ mov.l @r15, r0
+ sts macl, r1
+ sub r0, r2
+ clrt
+ subc r1, r3
+ bf 4f
+ mov.l .L1g, r1
+ add r1, r3
+ add #-1, r2
+4:
+ cmp/pz r2
+ bf 5f /* Time is already up. */
+
+ mov.l r2, @r15 /* Store relative timeout. */
+ mov.l r3, @(4,r15)
+
+ mov #1, r3
+ mov #2, r4
+ CMPXCHG (r3, @r8, r4, r2)
+ bt 8f
+
+ mov r8, r4
+ mov #FUTEX_WAIT, r5
+ mov r10, r6
+ mov r15, r7
+ mov #SYS_futex, r3
+ extu.b r3, r3
+ trapa #0x14
+ SYSCALL_INST_PAD
+ mov r0, r4
+
+8:
+ mov #0, r3
+ mov #2, r4
+ CMPXCHG (r3, @r8, r4, r2)
+ bf 7f
+
+6:
+ add #8, r15
+ mov.l @r15+, r8
+ mov.l @r15+, r9
rts
+ mov.l @r15+, r10
+7:
+ /* Check whether the time expired. */
+ mov #-ETIMEDOUT, r1
+ cmp/eq r4, r1
+ bt 5f
+ bra 1b
nop
- .size lll_unlock_wake_cb,.-lll_unlock_wake_cb
+3:
+ rts
+ mov #EINVAL, r0
+5:
+ bra 6b
+ mov #ETIMEDOUT, r0
+
+.L1k:
+ .word 1000
+ .align 2
+.L1g:
+ .long 1000000000
+
+ .size __lll_mutex_timedlock_wait,.-__lll_mutex_timedlock_wait
#endif
- .globl __lll_unlock_wake
- .type __lll_unlock_wake,@function
- .hidden __lll_unlock_wake
-__lll_unlock_wake:
-2:
+#ifdef NOT_IN_libc
+ .globl lll_unlock_wake_cb
+ .type lll_unlock_wake_cb,@function
+ .hidden lll_unlock_wake_cb
+ .align 5
+lll_unlock_wake_cb:
+ DEC (@r4, r2)
+ tst r2, r2
+ bt 1f
+
mov #FUTEX_WAKE, r5
mov #1, r6 /* Wake one thread. */
mov #0, r7
- mov.l r6, @r4 /* Stores 1. */
+ mov.l r7, @r4 /* Stores 0. */
mov #SYS_futex, r3
extu.b r3, r3
trapa #0x14
SYSCALL_INST_PAD
+
+1:
rts
nop
- .size __lll_unlock_wake,.-__lll_unlock_wake
+ .size lll_unlock_wake_cb,.-lll_unlock_wake_cb
+#endif
-#ifdef NOT_IN_libc
- .globl __lll_wait_tid
- .type __lll_wait_tid,@function
- .hidden __lll_wait_tid
-__lll_wait_tid:
- mov.l @r4, r6
-1:
- mov #FUTEX_WAIT, r5
+ .globl __lll_mutex_unlock_wake
+ .type __lll_mutex_unlock_wake,@function
+ .hidden __lll_mutex_unlock_wake
+ .align 5
+__lll_mutex_unlock_wake:
+ mov #FUTEX_WAKE, r5
+ mov #1, r6 /* Wake one thread. */
mov #0, r7
+ mov.l r7, @r4 /* Stores 0. */
mov #SYS_futex, r3
extu.b r3, r3
trapa #0x14
SYSCALL_INST_PAD
-
- mov r0, r1
-
- mov.l @r4, r0
- tst r0, r0
- bf/s 1b
- mov r0, r6
rts
nop
- .size __lll_wait_tid,.-__lll_wait_tid
+ .size __lll_mutex_unlock_wake,.-__lll_mutex_unlock_wake
+#ifdef NOT_IN_libc
.globl __lll_timedwait_tid
.type __lll_timedwait_tid,@function
.hidden __lll_timedwait_tid
+ .align 5
__lll_timedwait_tid:
mov.l r9, @-r15
mov.l r8, @-r15
mov r4, r8
mov r5, r9
+
+ /* Stack frame for the timespec and timeval structs. */
add #-8, r15
2:
/* Compute relative timeout. */
mov.l @(4,r15), r0
- mov.w .L1k, r1
- dmulu.l r0, r1 /* Milli seconds to nano seconds. */
+ mov.w .L1k2, r1
+ dmulu.l r0, r1 /* Micro seconds to nano seconds. */
mov.l @r9, r2
mov.l @(4,r9), r3
mov.l @r15, r0
clrt
subc r1, r3
bf 5f
- mov.l .L1g, r1
+ mov.l .L1g2, r1
add r1, r3
add #-1, r2
5:
mov.l r2, @r15 /* Store relative timeout. */
mov.l r3, @(4,r15)
- mov.l @r8, r6
- tst r6, r6
+ mov.l @r8, r2
+ tst r2, r2
bt 4f
mov r8, r4
mov #FUTEX_WAIT, r5
+ mov r2, r6
mov r15, r7
mov #SYS_futex, r3
extu.b r3, r3
trapa #0x14
SYSCALL_INST_PAD
- mov.l @r8, r0
- tst r0, r0
+ mov.l @r8, r2
+ tst r2, r2
bf 1f
4:
mov #0, r0
mov.l @r15+, r8
rts
mov.l @r15+, r9
-
1:
+ /* Check whether the time expired. */
mov #-ETIMEDOUT, r1
cmp/eq r0, r1
bf 2b
bra 3b
mov #ETIMEDOUT, r0
-.L1k:
+.L1k2:
.word 1000
.align 2
-.L1g:
+.L1g2:
.long 1000000000
-
.size __lll_timedwait_tid,.-__lll_timedwait_tid
#endif
/* Initializer for compatibility lock. */
-#define LLL_MUTEX_LOCK_INITIALIZER (0)
+#define LLL_MUTEX_LOCK_INITIALIZER (0)
+#define LLL_MUTEX_LOCK_INITIALIZER_LOCKED (1)
extern int __lll_mutex_lock_wait (int val, int *__futex) attribute_hidden;
extern int __lll_mutex_timedlock_wait (int val, int *__futex,
1: mov r1,r15\n\
mov #-1,%0\n\
negc %0,%0"\
- : "=r" (__result) : "r" (&(futex)), "r" (1), "r" (0) \
+ : "=r" (__result) \
+ : "r" (&(futex)), \
+ "r" (LLL_MUTEX_LOCK_INITIALIZER_LOCKED), \
+ "r" (LLL_MUTEX_LOCK_INITIALIZER) \
: "r0", "r1", "r2", "t", "memory"); \
__result; })
.align 2\n\
mova 1f,r0\n\
mov r15,r1\n\
- mov #-6,r15\n\
+ mov #-8,r15\n\
0: mov.l @%2,%0\n\
- add %0,%1\n\
+ tst %0,%0\n\
+ bf 1f\n\
mov.l %1,@%2\n\
1: mov r1,r15"\
- : "=&r" (__result), "=&r" (val) : "r" (__futex), "1" (1) \
- : "r0", "r1", "memory"); \
+ : "=&r" (__result) : "r" (1), "r" (__futex) \
+ : "r0", "r1", "t", "memory"); \
if (__result) \
__lll_mutex_lock_wait (__result, __futex); })
.align 2\n\
mova 1f,r0\n\
mov r15,r1\n\
- mov #-6,r15\n\
+ mov #-8,r15\n\
0: mov.l @%2,%0\n\
- add %0,%1\n\
+ tst %0,%0\n\
+ bf 1f\n\
mov.l %1,@%2\n\
1: mov r1,r15"\
- : "=&r" (__result), "=&r" (val) : "r" (__futex), "1" (2) \
- : "r0", "r1", "memory"); \
+ : "=&r" (__result) : "r" (2), "r" (__futex) \
+ : "r0", "r1", "t", "memory"); \
if (__result) \
__lll_mutex_lock_wait (__result, __futex); })
.align 2\n\
mova 1f,r0\n\
mov r15,r1\n\
- mov #-6,r15\n\
+ mov #-8,r15\n\
0: mov.l @%2,%0\n\
- add %0,%1\n\
+ tst %0,%0\n\
+ bf 1f\n\
mov.l %1,@%2\n\
1: mov r1,r15"\
- : "=&r" (__result), "=&r" (val) : "r" (__futex), "1" (1) \
- : "r0", "r1", "memory"); \
+ : "=&r" (__result) : "r" (1), "r" (__futex) \
+ : "r0", "r1", "t", "memory"); \
if (__result) \
__result = __lll_mutex_timedlock_wait (__result, __futex, timeout); \
__result; })
typedef int lll_lock_t;
/* Initializers for lock. */
-#define LLL_LOCK_INITIALIZER (1)
-#define LLL_LOCK_INITIALIZER_LOCKED (0)
+#define LLL_LOCK_INITIALIZER (0)
+#define LLL_LOCK_INITIALIZER_LOCKED (1)
# ifdef NEED_SYSCALL_INST_PAD
} while (0)
-extern int __lll_lock_wait (int val, int *__futex) attribute_hidden;
-extern int __lll_unlock_wake (int *__futex) attribute_hidden;
extern int lll_unlock_wake_cb (int *__futex) attribute_hidden;
/* The states of a lock are:
- 1 - untaken
- 0 - taken by one user
- <0 - taken by more users */
-
-
-#define lll_trylock(futex) \
- ({ unsigned char __result; \
- __asm __volatile ("\
- .align 2\n\
- mova 1f,r0\n\
- nop\n\
- mov r15,r1\n\
- mov #-8,r15\n\
- 0: mov.l @%1,r2\n\
- cmp/eq r2,%3\n\
- bf 1f\n\
- mov.l %2,@%1\n\
- 1: mov r1,r15\n\
- mov #-1,%0\n\
- negc %0,%0"\
- : "=r" (__result) : "r" (&(futex)), "r" (0), "r" (1) \
- : "r0", "r1", "r2", "t", "memory"); \
- __result; })
-
-#define lll_lock(futex) \
- (void) ({ int __result, val, *__futex = &(futex); \
- __asm __volatile ("\
- .align 2\n\
- mova 1f,r0\n\
- mov r15,r1\n\
- mov #-6,r15\n\
- 0: mov.l @%2,%0\n\
- add %0,%1\n\
- mov.l %1,@%2\n\
- 1: mov r1,r15"\
- : "=&r" (__result), "=&r" (val) : "r" (__futex), "1" (-1) \
- : "r0", "r1", "memory"); \
- if (val < 0) \
- __lll_lock_wait (__result, __futex); })
+ 0 - untaken
+ 1 - taken by one user
+ 2 - taken by more users */
-#define lll_unlock(futex) \
- (void) ({ int __result, *__futex = &(futex); \
- __asm __volatile ("\
- .align 2\n\
- mova 1f,r0\n\
- mov r15,r1\n\
- mov #-6,r15\n\
- 0: mov.l @%1,%0\n\
- add #1,%0\n\
- mov.l %0,@%1\n\
- 1: mov r1,r15"\
- : "=&r" (__result) : "r" (__futex) \
- : "r0", "r1", "memory"); \
- if (__result <= 0) \
- __lll_unlock_wake (__futex); })
+#define lll_trylock(futex) lll_mutex_trylock (futex)
+#define lll_lock(futex) lll_mutex_lock (futex)
+#define lll_unlock(futex) lll_mutex_unlock (futex)
#define lll_islocked(futex) \
- (futex <= 0)
+ (futex != LLL_LOCK_INITIALIZER)
/* The kernel notifies a process with uses CLONE_CLEARTID via futex
#define lll_wait_tid(tid) \
do { \
__typeof (tid) *__tid = &(tid); \
- if (*__tid != 0) \
- __lll_wait_tid (__tid); \
+ while (*__tid != 0) \
+ lll_futex_wait (__tid, *__tid); \
} while (0)
extern int __lll_timedwait_tid (int *tid, const struct timespec *abstime)
+++ /dev/null
-/* Copyright (C) 2003 Free Software Foundation, Inc.
- This file is part of the GNU C Library.
-
- The GNU C Library is free software; you can redistribute it and/or
- modify it under the terms of the GNU Lesser General Public
- License as published by the Free Software Foundation; either
- version 2.1 of the License, or (at your option) any later version.
-
- The GNU C Library is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should have received a copy of the GNU Lesser General Public
- License along with the GNU C Library; if not, write to the Free
- Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
- 02111-1307 USA. */
-
-#include <sysdep.h>
-#include <pthread-errnos.h>
-#include "lowlevel-atomic.h"
-
- .text
-
-#define SYS_gettimeofday __NR_gettimeofday
-#define SYS_futex 240
-#define FUTEX_WAIT 0
-#define FUTEX_WAKE 1
-
-
- .globl __lll_mutex_lock_wait
- .type __lll_mutex_lock_wait,@function
- .hidden __lll_mutex_lock_wait
- .align 5
-__lll_mutex_lock_wait:
- mov r4, r6
- mov r5, r4
- mov #0, r7 /* No timeout. */
- mov #FUTEX_WAIT, r5
-1:
- add #1, r6 /* account for the preceeded xadd. */
- mov #SYS_futex, r3
- extu.b r3, r3
- trapa #0x14
- SYSCALL_INST_PAD
-
- mov #1, r3
- XADD (r3, @r4, r6)
- tst r6, r6
- bf 1b
- mov #2, r1
- mov.l r1, @r4
- ret
- mov #0, r0
- .size __lll_mutex_lock_wait,.-__lll_mutex_lock_wait
-
-
-#ifdef NOT_IN_libc
- .globl __lll_mutex_timedlock_wait
- .type __lll_mutex_timedlock_wait,@function
- .hidden __lll_mutex_timedlock_wait
- .align 5
-__lll_mutex_timedlock_wait:
- /* Check for a valid timeout value. */
- mov.l @(4,r6), r1
- mov.l .L1g, r0
- cmp/hs r0, r1
- bt 3f
-
- mov.l r10, @-r15
- mov.l r9, @-r15
- mov.l r8, @-r15
- mov r5, r8
- mov r6, r9
- mov r4, r10
-
- /* Stack frame for the timespec and timeval structs. */
- add #-8, r15
-
-1:
- add #1, r10
-
- /* Get current time. */
- mov r15, r4
- mov #0, r5
- mov #SYS_gettimeofday, r3
- trapa #0x12
- SYSCALL_INST_PAD
-
- /* Compute relative timeout. */
- mov.l @(4,r15), r0
- mov.w .L1k, r1
- dmulu.l r0, r1 /* Micro seconds to nano seconds. */
- mov.l @r9, r2
- mov.l @(4,r9), r3
- mov.l @r15, r0
- sts macl, r1
- sub r0, r2
- clrt
- subc r1, r3
- bf 4f
- mov.l .L1g, r1
- add r1, r3
- add #-1, r2
-4:
- cmp/pz r2
- bf 5f /* Time is already up. */
-
- mov.l r2, @r15 /* Store relative timeout. */
- mov.l r3, @(4,r15)
-
- mov r8, r4
- mov #FUTEX_WAIT, r5
- mov r10, r6
- mov r15, r7
- mov #SYS_futex, r3
- extu.b r3, r3
- trapa #0x14
- SYSCALL_INST_PAD
- mov r0, r4
-
- mov #1, r10
- XADD (r10, @r8, r3)
- tst r3, r3
- bf 7f
-
- mov #2, r1
- mov.l r1, @r8
- mov #0, r0
-6:
- add #8, r15
- mov.l @r15+, r8
- mov.l @r15+, r9
- rts
- mov.l @r15+, r10
-7:
- /* Check whether the time expired. */
- mov #-ETIMEDOUT, r1
- cmp/eq r4, r1
- bt 5f
- bra 1b
- nop
-3:
- rts
- mov #EINVAL, r0
-5:
- bra 6b
- mov #ETIMEDOUT, r0
-
-.L1k:
- .word 1000
- .align 2
-.L1g:
- .long 1000000000
-
- .size __lll_mutex_timedlock_wait,.-__lll_mutex_timedlock_wait
-#endif
-
-
- .globl __lll_mutex_unlock_wake
- .type __lll_mutex_unlock_wake,@function
- .hidden __lll_mutex_unlock_wake
- .align 5
-__lll_mutex_unlock_wake:
- mov #FUTEX_WAKE, r5
- mov #1, r6 /* Wake one thread. */
- mov #0, r7
- mov.l r7, @r4 /* Stores 0. */
- mov #SYS_futex, r3
- extu.b r3, r3
- trapa #0x14
- SYSCALL_INST_PAD
- rts
- nop
- .size __lll_mutex_unlock_wake,.-__lll_mutex_unlock_wake
--- /dev/null
+#include "../i386/not-cancel.h"
mov r4, r8
/* Get the mutex. */
- mov #-1, r3
- XADD (r3, @(MUTEX,r8), r2)
- tst r3, r3
+ mov #0, r3
+ mov #1, r4
+ CMPXCHG (r3, @(MUTEX,r8), r4, r2)
bf 1f
/* One less waiter. If this was the last one needed wake
mov.l @(CURR_EVENT,r8), r6
/* Release the mutex. */
- INC (@(MUTEX,r8), r2)
- cmp/pl r2
+ DEC (@(MUTEX,r8), r2)
+ tst r2, r2
bf 6f
7:
/* Wait for the remaining threads. The call will return immediately
SYSCALL_INST_PAD
/* Release the mutex. */
- INC (@(MUTEX,r8), r2)
- cmp/pl r2
+ DEC (@(MUTEX,r8), r2)
+ tst r2, r2
bf 4f
5:
mov #-1, r0 /* == PTHREAD_BARRIER_SERIAL_THREAD */
.Lall:
.long 0x7fffffff
.Lwait0:
- .long __lll_lock_wait-.Lwait0b
+ .long __lll_mutex_lock_wait-.Lwait0b
.Lwake0:
- .long __lll_unlock_wake-.Lwake0b
+ .long __lll_mutex_unlock_wake-.Lwake0b
.Lwake1:
- .long __lll_unlock_wake-.Lwake1b
+ .long __lll_mutex_unlock_wake-.Lwake1b
.size pthread_barrier_wait,.-pthread_barrier_wait
mov r4, r8
/* Get internal lock. */
- mov #1, r3
+ mov #0, r3
+ mov #1, r4
#if cond_lock != 0
- XADD (r3, @(cond_lock,r8), r2)
+ CMPXCHG (r3, @(cond_lock,r8), r4, r2)
#else
- XADD (r3, @r8, r2)
+ CMPXCHG (r3, @r8, r4, r2)
#endif
- tst r2, r2
bf 1f
2:
mov.l @(total_seq+4,r8),r0
mov r4, r8
/* Get internal lock. */
- mov #1, r3
+ mov #0, r3
+ mov #1, r4
#if cond_lock != 0
- XADD (r3, @(cond_lock,r8), r2)
+ CMPXCHG (r3, @(cond_lock,r8), r4, r2)
#else
- XADD (r3, @r8, r2)
+ CMPXCHG (r3, @r8, r4, r2)
#endif
- tst r2, r2
bf 1f
2:
mov.l @(total_seq+4,r8),r0
mov.l r0,@(wakeup_seq,r8)
mov.l r1,@(wakeup_seq+4,r8)
+ /* Wake up one thread by moving it to the internal lock futex. */
mov r8, r4
add #wakeup_seq, r4
-#ifdef __ASSUME_FUTEX_REQUEUE
- /* Wake up one thread by moving it to the internal lock futex. */
- mov #FUTEX_REQUEUE, r5
- mov #0, r6
- mov #1, r7
- mov r8, r0
-# if cond_lock != 0
- add #cond_lock, r0
-# endif
- mov #SYS_futex, r3
- extu.b r3, r3
- trapa #0x15
-#else
mov #FUTEX_WAKE, r5
mov #1, r6
mov #0, r7
mov #SYS_futex, r3
extu.b r3, r3
trapa #0x14
-#endif
SYSCALL_INST_PAD
4:
mov r6, r13
/* Get internal lock. */
- mov #1, r3
+ mov #0, r3
+ mov #1, r4
#if cond_lock != 0
- XADD (r3, @(cond_lock,r8), r2)
+ CMPXCHG (r3, @(cond_lock,r8), r4, r2)
#else
- XADD (r3, @r8, r2)
+ CMPXCHG (r3, @r8, r4, r2)
#endif
- tst r2, r2
bt 2f
bra 1f
nop
.Ldisable1b:
/* Lock. */
- mov #1, r3
+ mov #0, r3
+ mov #1, r4
#if cond_lock != 0
- XADD (r3, @(cond_lock,r8), r2)
+ CMPXCHG (r3, @(cond_lock,r8), r4, r2)
#else
- XADD (r3, @r8, r2)
+ CMPXCHG (r3, @r8, r4, r2)
#endif
- tst r2, r2
bf 5f
6:
mov.l @(woken_seq,r8), r0
mov.l @(4,r9), r8
/* Get internal lock. */
- mov #1, r3
+ mov #0, r3
+ mov #1, r4
#if cond_lock != 0
- XADD (r3, @(cond_lock,r8), r2)
+ CMPXCHG (r3, @(cond_lock,r8), r4, r2)
#else
- XADD (r3, @r8, r2)
+ CMPXCHG (r3, @r8, r4, r2)
#endif
- tst r2, r2
bt 1f
mov r8, r5
#if cond_lock != 0
add #cond_lock, r5
#endif
- mov r2, r4
mov.l .Lwait0, r1
bsrf r1
- nop
+ mov r2, r4
.Lwait0b:
1:
mov #1, r2
mov r5, r9
/* Get internal lock. */
- mov #1, r3
+ mov #0, r3
+ mov #1, r4
#if cond_lock != 0
- XADD (r3, @(cond_lock,r8), r2)
+ CMPXCHG (r3, @(cond_lock,r8), r4, r2)
#else
- XADD (r3, @r8, r2)
+ CMPXCHG (r3, @r8, r4, r2)
#endif
- tst r2, r2
bt 2f
bra 1f
nop
.Ldisable0b:
/* Lock. */
- mov #1, r3
+ mov #0, r3
+ mov #1, r4
#if cond_lock != 0
- XADD (r3, @(cond_lock,r8), r2)
+ CMPXCHG (r3, @(cond_lock,r8), r4, r2)
#else
- XADD (r3, @r8, r2)
+ CMPXCHG (r3, @r8, r4, r2)
#endif
- tst r2, r2
bf 5f
6:
mov.l @(woken_seq,r8), r0
mov r4, r8
/* Get the lock. */
- mov #1, r3
+ mov #0, r3
+ mov #1, r4
#if MUTEX == 0
- XADD (r3, @r4, r2)
+ CMPXCHG (r3, @r8, r4, r2)
#else
- XADD (r3, @(MUTEX,r4), r2)
+ CMPXCHG (r3, @(MUTEX,r8), r4, r2)
#endif
- tst r2, r2
bf 1f
2:
mov.l @(WRITER,r8), r0
SYSCALL_INST_PAD
/* Reget the lock. */
- mov #1, r3
+ mov #0, r3
+ mov #1, r4
#if MUTEX == 0
- XADD (r3, @r4, r2)
+ CMPXCHG (r3, @r8, r4, r2)
#else
- XADD (r3, @(MUTEX,r4), r2)
+ CMPXCHG (r3, @(MUTEX,r8), r4, r2)
#endif
- tst r2, r2
bf 12f
13:
mov.l @(READERS_QUEUED,r8), r0
mov r5, r9
/* Get the lock. */
- mov #1, r3
+ mov #0, r3
+ mov #1, r4
#if MUTEX == 0
- XADD (r3, @r4, r2)
+ CMPXCHG (r3, @r8, r4, r2)
#else
- XADD (r3, @(MUTEX,r4), r2)
+ CMPXCHG (r3, @(MUTEX,r8), r4, r2)
#endif
- tst r2, r2
bf 1f
2:
mov.l @(WRITER,r8), r0
17:
/* Reget the lock. */
- mov r8, r4
- mov #1, r5
+ mov #0, r5
+ mov #1, r4
#if MUTEX == 0
- XADD (r5, @r4, r2)
+ CMPXCHG (r5, @r8, r4, r2)
#else
- XADD (r5, @(MUTEX,r4), r2)
+ CMPXCHG (r5, @(MUTEX,r8), r4, r2)
#endif
- tst r2, r2
bf 12f
13:
mov r5, r9
/* Get the lock. */
- mov #1, r3
+ mov #0, r3
+ mov #1, r4
#if MUTEX == 0
- XADD (r3, @r4, r2)
+ CMPXCHG (r3, @r8, r4, r2)
#else
- XADD (r3, @(MUTEX,r4), r2)
+ CMPXCHG (r3, @(MUTEX,r8), r4, r2)
#endif
- tst r2, r2
bf 1f
2:
mov.l @(WRITER,r8), r0
17:
/* Reget the lock. */
- mov r8, r4
- mov #1, r5
+ mov #0, r5
+ mov #1, r4
#if MUTEX == 0
- XADD (r5, @r4, r2)
+ CMPXCHG (r5, @r8, r4, r2)
#else
- XADD (r5, @(MUTEX,r4), r2)
+ CMPXCHG (r5, @(MUTEX,r8), r4, r2)
#endif
- tst r2, r2
bf 12f
13:
mov r4, r8
/* Get the lock. */
- mov #1, r3
+ mov #0, r3
+ mov #1, r4
#if MUTEX == 0
- XADD (r3, @r4, r2)
+ CMPXCHG (r3, @r8, r4, r2)
#else
- XADD (r3, @(MUTEX,r4), r2)
+ CMPXCHG (r3, @(MUTEX,r8), r4, r2)
#endif
- tst r2, r2
bf 1f
2:
mov.l @(WRITER,r8), r0
mov r4, r8
/* Get the lock. */
- mov #1, r3
+ mov #0, r3
+ mov #1, r4
#if MUTEX == 0
- XADD (r3, @r4, r2)
+ CMPXCHG (r3, @r8, r4, r2)
#else
- XADD (r3, @(MUTEX,r4), r2)
+ CMPXCHG (r3, @(MUTEX,r8), r4, r2)
#endif
- tst r2, r2
bf 1f
2:
mov.l @(WRITER,r8), r0
SYSCALL_INST_PAD
/* Reget the lock. */
- mov #1, r3
+ mov #0, r3
+ mov #1, r4
#if MUTEX == 0
- XADD (r3, @r4, r2)
+ CMPXCHG (r3, @r8, r4, r2)
#else
- XADD (r3, @(MUTEX,r4), r2)
+ CMPXCHG (r3, @(MUTEX,r8), r4, r2)
#endif
- tst r2, r2
bf 12f
13:
mov.l @(WRITERS_QUEUED,r8), r0
ENTRY (name); \
SINGLE_THREAD_P; \
bf .Lpseudo_cancel; \
+ .type __##syscall_name##_nocancel,@function; \
+ .globl __##syscall_name##_nocancel; \
+ __##syscall_name##_nocancel: \
DO_CALL (syscall_name, args); \
mov r0,r1; \
mov _IMM12,r2; \
bt .Lsyscall_error; \
bra .Lpseudo_end; \
nop; \
+ .size __##syscall_name##_nocancel,.-__##syscall_name##_nocancel; \
.Lpseudo_cancel: \
sts.l pr,@-r15; \
add _IMM16,r15; \