mirror of
https://sourceware.org/git/glibc.git
synced 2025-12-24 17:51:17 +03:00
* sysdeps/sparc/sparc32/fpu/libm-test-ulps: Update.
* sysdeps/sparc/fpu/fraiseexcpt.c (__feraiseexcept): Use inline asm to make sure the compiler doesn't optimize insns out.
This commit is contained in:
@@ -78,6 +78,15 @@
|
||||
INTERNAL_SYSCALL_ERROR_P (__ret, __err); \
|
||||
})
|
||||
|
||||
#define lll_robust_mutex_dead(futexv) \
|
||||
do \
|
||||
{ \
|
||||
int *__futexp = &(futexv); \
|
||||
atomic_or (__futexp, FUTEX_OWNER_DIED); \
|
||||
lll_futex_wake (__futexp, 1); \
|
||||
} \
|
||||
while (0)
|
||||
|
||||
/* Returns non-zero if error happened, zero if success. */
|
||||
#ifdef __sparc32_atomic_do_lock
|
||||
/* Avoid FUTEX_WAKE_OP if supporting pre-v9 CPUs. */
|
||||
@@ -112,9 +121,18 @@ __lll_mutex_cond_trylock (int *futex)
|
||||
}
|
||||
#define lll_mutex_cond_trylock(futex) __lll_mutex_cond_trylock (&(futex))
|
||||
|
||||
static inline int
|
||||
__attribute__ ((always_inline))
|
||||
__lll_robust_mutex_trylock (int *futex, int id)
|
||||
{
|
||||
return atomic_compare_and_exchange_val_acq (futex, id, 0) != 0;
|
||||
}
|
||||
#define lll_robust_mutex_trylock(futex, id) \
|
||||
__lll_robust_mutex_trylock (&(futex), id)
|
||||
|
||||
|
||||
extern void __lll_lock_wait (int *futex) attribute_hidden;
|
||||
|
||||
extern int __lll_robust_lock_wait (int *futex) attribute_hidden;
|
||||
|
||||
static inline void
|
||||
__attribute__ ((always_inline))
|
||||
@@ -127,6 +145,17 @@ __lll_mutex_lock (int *futex)
|
||||
}
|
||||
#define lll_mutex_lock(futex) __lll_mutex_lock (&(futex))
|
||||
|
||||
static inline int
|
||||
__attribute__ ((always_inline))
|
||||
__lll_robust_mutex_lock (int *futex, int id)
|
||||
{
|
||||
int result = 0;
|
||||
if (atomic_compare_and_exchange_bool_acq (futex, id, 0) != 0)
|
||||
result = __lll_robust_lock_wait (futex);
|
||||
return result;
|
||||
}
|
||||
#define lll_robust_mutex_lock(futex, id) \
|
||||
__lll_robust_mutex_lock (&(futex), id)
|
||||
|
||||
static inline void
|
||||
__attribute__ ((always_inline))
|
||||
@@ -139,10 +168,14 @@ __lll_mutex_cond_lock (int *futex)
|
||||
}
|
||||
#define lll_mutex_cond_lock(futex) __lll_mutex_cond_lock (&(futex))
|
||||
|
||||
#define lll_robust_mutex_cond_lock(futex, id) \
|
||||
__lll_robust_mutex_lock (&(futex), (id) | FUTEX_WAITERS)
|
||||
|
||||
|
||||
extern int __lll_timedlock_wait (int *futex, const struct timespec *)
|
||||
attribute_hidden;
|
||||
|
||||
extern int __lll_robust_timedlock_wait (int *futex, const struct timespec *)
|
||||
attribute_hidden;
|
||||
|
||||
static inline int
|
||||
__attribute__ ((always_inline))
|
||||
@@ -158,6 +191,19 @@ __lll_mutex_timedlock (int *futex, const struct timespec *abstime)
|
||||
#define lll_mutex_timedlock(futex, abstime) \
|
||||
__lll_mutex_timedlock (&(futex), abstime)
|
||||
|
||||
static inline int
|
||||
__attribute__ ((always_inline))
|
||||
__lll_robust_mutex_timedlock (int *futex, const struct timespec *abstime,
|
||||
int id)
|
||||
{
|
||||
int result = 0;
|
||||
if (atomic_compare_and_exchange_bool_acq (futex, id, 0) != 0)
|
||||
result = __lll_robust_timedlock_wait (futex, abstime);
|
||||
return result;
|
||||
}
|
||||
#define lll_robust_mutex_timedlock(futex, abstime, id) \
|
||||
__lll_robust_mutex_timedlock (&(futex), abstime, id)
|
||||
|
||||
#define lll_mutex_unlock(lock) \
|
||||
((void) ({ \
|
||||
int *__futex = &(lock); \
|
||||
@@ -166,6 +212,14 @@ __lll_mutex_timedlock (int *futex, const struct timespec *abstime)
|
||||
lll_futex_wake (__futex, 1); \
|
||||
}))
|
||||
|
||||
#define lll_robust_mutex_unlock(lock) \
|
||||
((void) ({ \
|
||||
int *__futex = &(lock); \
|
||||
int __val = atomic_exchange_rel (__futex, 0); \
|
||||
if (__builtin_expect (__val & FUTEX_WAITERS, 0)) \
|
||||
lll_futex_wake (__futex, 1); \
|
||||
}))
|
||||
|
||||
#define lll_mutex_unlock_force(lock) \
|
||||
((void) ({ \
|
||||
int *__futex = &(lock); \
|
||||
|
||||
Reference in New Issue
Block a user