mirror of
https://sourceware.org/git/glibc.git
synced 2025-07-29 11:41:21 +03:00
powerpc: Enforce compiler barriers on hardware transactions
Work around a GCC behavior with hardware transactional memory built-ins. GCC doesn't treat the PowerPC transactional built-ins as compiler barriers, moving instructions past the transaction boundaries and altering their atomicity.
This commit is contained in:
12
ChangeLog
12
ChangeLog
@ -1,3 +1,15 @@
|
|||||||
|
2016-01-08 Tulio Magno Quites Machado Filho <tuliom@linux.vnet.ibm.com>
|
||||||
|
|
||||||
|
* sysdeps/unix/sysv/linux/powerpc/htm.h (__libc_tbegin,
|
||||||
|
__libc_tabort, __libc_tend): New wrappers that enforce compiler
|
||||||
|
barriers to their respective compiler built-ins.
|
||||||
|
* sysdeps/powerpc/nptl/elide.h (__get_new_count, ELIDE_LOCK,
|
||||||
|
ELIDE_TRYLOCK, __elide_unlock): Use the new wrappers.
|
||||||
|
* sysdeps/powerpc/sysdep.h: Likewise.
|
||||||
|
* sysdeps/unix/sysv/linux/powerpc/elision-lock.c: Likewise.
|
||||||
|
* sysdeps/unix/sysv/linux/powerpc/elision-trylock.c: Likewise.
|
||||||
|
* sysdeps/unix/sysv/linux/powerpc/elision-unlock.c: Likewise.
|
||||||
|
|
||||||
2016-01-08 Marko Myllynen <myllynen@redhat.com>
|
2016-01-08 Marko Myllynen <myllynen@redhat.com>
|
||||||
|
|
||||||
* scripts/config.guess: Revert previous shebang change.
|
* scripts/config.guess: Revert previous shebang change.
|
||||||
|
@ -68,14 +68,14 @@ __get_new_count (uint8_t *adapt_count, int attempt)
|
|||||||
else \
|
else \
|
||||||
for (int i = __elision_aconf.try_tbegin; i > 0; i--) \
|
for (int i = __elision_aconf.try_tbegin; i > 0; i--) \
|
||||||
{ \
|
{ \
|
||||||
if (__builtin_tbegin (0)) \
|
if (__libc_tbegin (0)) \
|
||||||
{ \
|
{ \
|
||||||
if (is_lock_free) \
|
if (is_lock_free) \
|
||||||
{ \
|
{ \
|
||||||
ret = 1; \
|
ret = 1; \
|
||||||
break; \
|
break; \
|
||||||
} \
|
} \
|
||||||
__builtin_tabort (_ABORT_LOCK_BUSY); \
|
__libc_tabort (_ABORT_LOCK_BUSY); \
|
||||||
} \
|
} \
|
||||||
else \
|
else \
|
||||||
if (!__get_new_count (&adapt_count,i)) \
|
if (!__get_new_count (&adapt_count,i)) \
|
||||||
@ -90,7 +90,7 @@ __get_new_count (uint8_t *adapt_count, int attempt)
|
|||||||
if (__elision_aconf.try_tbegin > 0) \
|
if (__elision_aconf.try_tbegin > 0) \
|
||||||
{ \
|
{ \
|
||||||
if (write) \
|
if (write) \
|
||||||
__builtin_tabort (_ABORT_NESTED_TRYLOCK); \
|
__libc_tabort (_ABORT_NESTED_TRYLOCK); \
|
||||||
ret = ELIDE_LOCK (adapt_count, is_lock_free); \
|
ret = ELIDE_LOCK (adapt_count, is_lock_free); \
|
||||||
} \
|
} \
|
||||||
ret; \
|
ret; \
|
||||||
@ -102,7 +102,7 @@ __elide_unlock (int is_lock_free)
|
|||||||
{
|
{
|
||||||
if (is_lock_free)
|
if (is_lock_free)
|
||||||
{
|
{
|
||||||
__builtin_tend (0);
|
__libc_tend (0);
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
|
@ -180,7 +180,7 @@
|
|||||||
# define ABORT_TRANSACTION \
|
# define ABORT_TRANSACTION \
|
||||||
({ \
|
({ \
|
||||||
if (THREAD_GET_TM_CAPABLE ()) \
|
if (THREAD_GET_TM_CAPABLE ()) \
|
||||||
__builtin_tabort (_ABORT_SYSCALL); \
|
__libc_tabort (_ABORT_SYSCALL); \
|
||||||
})
|
})
|
||||||
#else
|
#else
|
||||||
# define ABORT_TRANSACTION
|
# define ABORT_TRANSACTION
|
||||||
|
@ -52,12 +52,12 @@ __lll_lock_elision (int *lock, short *adapt_count, EXTRAARG int pshared)
|
|||||||
|
|
||||||
for (int i = aconf.try_tbegin; i > 0; i--)
|
for (int i = aconf.try_tbegin; i > 0; i--)
|
||||||
{
|
{
|
||||||
if (__builtin_tbegin (0))
|
if (__libc_tbegin (0))
|
||||||
{
|
{
|
||||||
if (*lock == 0)
|
if (*lock == 0)
|
||||||
return 0;
|
return 0;
|
||||||
/* Lock was busy. Fall back to normal locking. */
|
/* Lock was busy. Fall back to normal locking. */
|
||||||
__builtin_tabort (_ABORT_LOCK_BUSY);
|
__libc_tabort (_ABORT_LOCK_BUSY);
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
|
@ -31,7 +31,7 @@ int
|
|||||||
__lll_trylock_elision (int *futex, short *adapt_count)
|
__lll_trylock_elision (int *futex, short *adapt_count)
|
||||||
{
|
{
|
||||||
/* Implement POSIX semantics by forbiding nesting elided trylocks. */
|
/* Implement POSIX semantics by forbiding nesting elided trylocks. */
|
||||||
__builtin_tabort (_ABORT_NESTED_TRYLOCK);
|
__libc_tabort (_ABORT_NESTED_TRYLOCK);
|
||||||
|
|
||||||
/* Only try a transaction if it's worth it. */
|
/* Only try a transaction if it's worth it. */
|
||||||
if (*adapt_count > 0)
|
if (*adapt_count > 0)
|
||||||
@ -39,14 +39,14 @@ __lll_trylock_elision (int *futex, short *adapt_count)
|
|||||||
goto use_lock;
|
goto use_lock;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (__builtin_tbegin (0))
|
if (__libc_tbegin (0))
|
||||||
{
|
{
|
||||||
if (*futex == 0)
|
if (*futex == 0)
|
||||||
return 0;
|
return 0;
|
||||||
|
|
||||||
/* Lock was busy. This is never a nested transaction.
|
/* Lock was busy. This is never a nested transaction.
|
||||||
End it, and set the adapt count. */
|
End it, and set the adapt count. */
|
||||||
__builtin_tend (0);
|
__libc_tend (0);
|
||||||
|
|
||||||
if (aconf.skip_lock_busy > 0)
|
if (aconf.skip_lock_busy > 0)
|
||||||
*adapt_count = aconf.skip_lock_busy;
|
*adapt_count = aconf.skip_lock_busy;
|
||||||
|
@ -25,7 +25,7 @@ __lll_unlock_elision (int *lock, short *adapt_count, int pshared)
|
|||||||
{
|
{
|
||||||
/* When the lock was free we're in a transaction. */
|
/* When the lock was free we're in a transaction. */
|
||||||
if (*lock == 0)
|
if (*lock == 0)
|
||||||
__builtin_tend (0);
|
__libc_tend (0);
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
lll_unlock ((*lock), pshared);
|
lll_unlock ((*lock), pshared);
|
||||||
|
@ -118,13 +118,44 @@
|
|||||||
__ret; \
|
__ret; \
|
||||||
})
|
})
|
||||||
|
|
||||||
#define __builtin_tbegin(tdb) _tbegin ()
|
#define __libc_tbegin(tdb) _tbegin ()
|
||||||
#define __builtin_tend(nested) _tend ()
|
#define __libc_tend(nested) _tend ()
|
||||||
#define __builtin_tabort(abortcode) _tabort (abortcode)
|
#define __libc_tabort(abortcode) _tabort (abortcode)
|
||||||
#define __builtin_get_texasru() _texasru ()
|
#define __builtin_get_texasru() _texasru ()
|
||||||
|
|
||||||
#else
|
#else
|
||||||
# include <htmintrin.h>
|
# include <htmintrin.h>
|
||||||
|
|
||||||
|
# ifdef __TM_FENCE__
|
||||||
|
/* New GCC behavior. */
|
||||||
|
# define __libc_tbegin(R) __builtin_tbegin (R);
|
||||||
|
# define __libc_tend(R) __builtin_tend (R);
|
||||||
|
# define __libc_tabort(R) __builtin_tabort (R);
|
||||||
|
# else
|
||||||
|
/* Workaround an old GCC behavior. Earlier releases of GCC 4.9 and 5.0,
|
||||||
|
didn't use to treat __builtin_tbegin, __builtin_tend and
|
||||||
|
__builtin_tabort as compiler barriers, moving instructions into and
|
||||||
|
out the transaction.
|
||||||
|
Remove this when glibc drops support for GCC 5.0. */
|
||||||
|
# define __libc_tbegin(R) \
|
||||||
|
({ __asm__ volatile("" ::: "memory"); \
|
||||||
|
unsigned int __ret = __builtin_tbegin (R); \
|
||||||
|
__asm__ volatile("" ::: "memory"); \
|
||||||
|
__ret; \
|
||||||
|
})
|
||||||
|
# define __libc_tabort(R) \
|
||||||
|
({ __asm__ volatile("" ::: "memory"); \
|
||||||
|
unsigned int __ret = __builtin_tabort (R); \
|
||||||
|
__asm__ volatile("" ::: "memory"); \
|
||||||
|
__ret; \
|
||||||
|
})
|
||||||
|
# define __libc_tend(R) \
|
||||||
|
({ __asm__ volatile("" ::: "memory"); \
|
||||||
|
unsigned int __ret = __builtin_tend (R); \
|
||||||
|
__asm__ volatile("" ::: "memory"); \
|
||||||
|
__ret; \
|
||||||
|
})
|
||||||
|
# endif /* __TM_FENCE__ */
|
||||||
#endif /* __HTM__ */
|
#endif /* __HTM__ */
|
||||||
|
|
||||||
#endif /* __ASSEMBLER__ */
|
#endif /* __ASSEMBLER__ */
|
||||||
|
Reference in New Issue
Block a user