1
0
mirror of https://sourceware.org/git/glibc.git synced 2025-07-30 22:43:12 +03:00

powerpc: Enforce compiler barriers on hardware transactions

Work around a GCC behavior with hardware transactional memory built-ins.
GCC doesn't treat the PowerPC transactional built-ins as compiler
barriers, moving instructions past the transaction boundaries and
altering their atomicity.
This commit is contained in:
Tulio Magno Quites Machado Filho
2015-12-28 12:24:43 -02:00
parent bc49a7afd3
commit 42bf1c8971
7 changed files with 58 additions and 15 deletions

View File

@ -52,12 +52,12 @@ __lll_lock_elision (int *lock, short *adapt_count, EXTRAARG int pshared)
for (int i = aconf.try_tbegin; i > 0; i--)
{
if (__builtin_tbegin (0))
if (__libc_tbegin (0))
{
if (*lock == 0)
return 0;
/* Lock was busy. Fall back to normal locking. */
__builtin_tabort (_ABORT_LOCK_BUSY);
__libc_tabort (_ABORT_LOCK_BUSY);
}
else
{

View File

@ -31,7 +31,7 @@ int
__lll_trylock_elision (int *futex, short *adapt_count)
{
/* Implement POSIX semantics by forbiding nesting elided trylocks. */
__builtin_tabort (_ABORT_NESTED_TRYLOCK);
__libc_tabort (_ABORT_NESTED_TRYLOCK);
/* Only try a transaction if it's worth it. */
if (*adapt_count > 0)
@ -39,14 +39,14 @@ __lll_trylock_elision (int *futex, short *adapt_count)
goto use_lock;
}
if (__builtin_tbegin (0))
if (__libc_tbegin (0))
{
if (*futex == 0)
return 0;
/* Lock was busy. This is never a nested transaction.
End it, and set the adapt count. */
__builtin_tend (0);
__libc_tend (0);
if (aconf.skip_lock_busy > 0)
*adapt_count = aconf.skip_lock_busy;

View File

@ -25,7 +25,7 @@ __lll_unlock_elision (int *lock, short *adapt_count, int pshared)
{
/* When the lock was free we're in a transaction. */
if (*lock == 0)
__builtin_tend (0);
__libc_tend (0);
else
{
lll_unlock ((*lock), pshared);

View File

@ -118,13 +118,44 @@
__ret; \
})
#define __builtin_tbegin(tdb) _tbegin ()
#define __builtin_tend(nested) _tend ()
#define __builtin_tabort(abortcode) _tabort (abortcode)
#define __builtin_get_texasru() _texasru ()
#define __libc_tbegin(tdb) _tbegin ()
#define __libc_tend(nested) _tend ()
#define __libc_tabort(abortcode) _tabort (abortcode)
#define __builtin_get_texasru() _texasru ()
#else
# include <htmintrin.h>
# ifdef __TM_FENCE__
/* New GCC behavior. */
# define __libc_tbegin(R) __builtin_tbegin (R);
# define __libc_tend(R) __builtin_tend (R);
# define __libc_tabort(R) __builtin_tabort (R);
# else
/* Workaround an old GCC behavior. Earlier releases of GCC 4.9 and 5.0,
didn't use to treat __builtin_tbegin, __builtin_tend and
__builtin_tabort as compiler barriers, moving instructions into and
out the transaction.
Remove this when glibc drops support for GCC 5.0. */
# define __libc_tbegin(R) \
({ __asm__ volatile("" ::: "memory"); \
unsigned int __ret = __builtin_tbegin (R); \
__asm__ volatile("" ::: "memory"); \
__ret; \
})
# define __libc_tabort(R) \
({ __asm__ volatile("" ::: "memory"); \
unsigned int __ret = __builtin_tabort (R); \
__asm__ volatile("" ::: "memory"); \
__ret; \
})
# define __libc_tend(R) \
({ __asm__ volatile("" ::: "memory"); \
unsigned int __ret = __builtin_tend (R); \
__asm__ volatile("" ::: "memory"); \
__ret; \
})
# endif /* __TM_FENCE__ */
#endif /* __HTM__ */
#endif /* __ASSEMBLER__ */