1
0
mirror of https://sourceware.org/git/glibc.git synced 2025-10-27 12:15:39 +03:00

atomics: Remove unused atomics

Remove all unused atomics.  Replace uses of catomic_increment and
catomic_decrement with atomic_fetch_add_relaxed which maps to a standard
compiler builtin. Relaxed memory ordering is correct for simple counters
since they only need atomicity.

Reviewed-by: Adhemerval Zanella  <adhemerval.zanella@linaro.org>
This commit is contained in:
Wilco Dijkstra
2025-09-10 09:07:39 +00:00
parent 245ea60b0e
commit 210ee29503
7 changed files with 13 additions and 830 deletions

View File

@@ -553,7 +553,7 @@ _dl_mcount (ElfW(Addr) frompc, ElfW(Addr) selfpc)
froms[newfromidx].here = &data[narcs];
froms[newfromidx].link = tos[to_index];
tos[to_index] = newfromidx;
catomic_increment (&narcs);
atomic_fetch_add_relaxed (&narcs, 1);
}
/* If we still have no entry stop searching and insert. */

View File

@@ -19,18 +19,11 @@
#ifndef _ATOMIC_H
#define _ATOMIC_H 1
/* This header defines three types of macros:
/* This header defines two types of macros:
- atomic arithmetic and logic operation on memory. They all
have the prefix "atomic_".
- conditionally atomic operations of the same kinds. These
always behave identical but can be faster when atomicity
is not really needed since only one thread has access to
the memory location. In that case the code is slower in
the multi-thread case. The interfaces have the prefix
"catomic_".
- support functions like barriers. They also have the prefix
"atomic_".
@@ -48,41 +41,6 @@
#include <atomic-machine.h>
/* Wrapper macros to call pre_NN_post (mem, ...) where NN is the
bit width of *MEM. The calling macro puts parens around MEM
and following args. */
#define __atomic_val_bysize(pre, post, mem, ...) \
({ \
__typeof ((__typeof (*(mem))) *(mem)) __atg1_result; \
if (sizeof (*mem) == 1) \
__atg1_result = pre##_8_##post (mem, __VA_ARGS__); \
else if (sizeof (*mem) == 2) \
__atg1_result = pre##_16_##post (mem, __VA_ARGS__); \
else if (sizeof (*mem) == 4) \
__atg1_result = pre##_32_##post (mem, __VA_ARGS__); \
else if (sizeof (*mem) == 8) \
__atg1_result = pre##_64_##post (mem, __VA_ARGS__); \
else \
abort (); \
__atg1_result; \
})
#define __atomic_bool_bysize(pre, post, mem, ...) \
({ \
int __atg2_result; \
if (sizeof (*mem) == 1) \
__atg2_result = pre##_8_##post (mem, __VA_ARGS__); \
else if (sizeof (*mem) == 2) \
__atg2_result = pre##_16_##post (mem, __VA_ARGS__); \
else if (sizeof (*mem) == 4) \
__atg2_result = pre##_32_##post (mem, __VA_ARGS__); \
else if (sizeof (*mem) == 8) \
__atg2_result = pre##_64_##post (mem, __VA_ARGS__); \
else \
abort (); \
__atg2_result; \
})
#if USE_ATOMIC_COMPILER_BUILTINS
# undef atomic_compare_and_exchange_val_acq
# define atomic_compare_and_exchange_val_acq(mem, newval, oldval) \
@@ -107,159 +65,6 @@
!atomic_compare_exchange_acquire (mem, (void*)&__atg3_old, newval); \
})
# undef atomic_exchange_and_add
# define atomic_exchange_and_add(mem, val) atomic_fetch_add_relaxed(mem,val)
#endif
/* Atomically store NEWVAL in *MEM if *MEM is equal to OLDVAL.
Return the old *MEM value. */
#if !defined atomic_compare_and_exchange_val_acq \
&& defined __arch_compare_and_exchange_val_32_acq
# define atomic_compare_and_exchange_val_acq(mem, newval, oldval) \
__atomic_val_bysize (__arch_compare_and_exchange_val,acq, \
mem, newval, oldval)
#endif
#ifndef catomic_compare_and_exchange_val_acq
# ifdef __arch_c_compare_and_exchange_val_32_acq
# define catomic_compare_and_exchange_val_acq(mem, newval, oldval) \
__atomic_val_bysize (__arch_c_compare_and_exchange_val,acq, \
mem, newval, oldval)
# else
# define catomic_compare_and_exchange_val_acq(mem, newval, oldval) \
atomic_compare_and_exchange_val_acq (mem, newval, oldval)
# endif
#endif
#ifndef catomic_compare_and_exchange_val_rel
# ifndef atomic_compare_and_exchange_val_rel
# define catomic_compare_and_exchange_val_rel(mem, newval, oldval) \
catomic_compare_and_exchange_val_acq (mem, newval, oldval)
# else
# define catomic_compare_and_exchange_val_rel(mem, newval, oldval) \
atomic_compare_and_exchange_val_rel (mem, newval, oldval)
# endif
#endif
#ifndef atomic_compare_and_exchange_val_rel
# define atomic_compare_and_exchange_val_rel(mem, newval, oldval) \
atomic_compare_and_exchange_val_acq (mem, newval, oldval)
#endif
/* Atomically store NEWVAL in *MEM if *MEM is equal to OLDVAL.
Return zero if *MEM was changed or non-zero if no exchange happened. */
#ifndef atomic_compare_and_exchange_bool_acq
# ifdef __arch_compare_and_exchange_bool_32_acq
# define atomic_compare_and_exchange_bool_acq(mem, newval, oldval) \
__atomic_bool_bysize (__arch_compare_and_exchange_bool,acq, \
mem, newval, oldval)
# else
# define atomic_compare_and_exchange_bool_acq(mem, newval, oldval) \
({ /* Cannot use __oldval here, because macros later in this file might \
call this macro with __oldval argument. */ \
__typeof (oldval) __atg3_old = (oldval); \
atomic_compare_and_exchange_val_acq (mem, newval, __atg3_old) \
!= __atg3_old; \
})
# endif
#endif
#ifndef catomic_compare_and_exchange_bool_acq
# ifdef __arch_c_compare_and_exchange_bool_32_acq
# define catomic_compare_and_exchange_bool_acq(mem, newval, oldval) \
__atomic_bool_bysize (__arch_c_compare_and_exchange_bool,acq, \
mem, newval, oldval)
# else
# define catomic_compare_and_exchange_bool_acq(mem, newval, oldval) \
({ /* Cannot use __oldval here, because macros later in this file might \
call this macro with __oldval argument. */ \
__typeof (oldval) __atg4_old = (oldval); \
catomic_compare_and_exchange_val_acq (mem, newval, __atg4_old) \
!= __atg4_old; \
})
# endif
#endif
/* Store NEWVALUE in *MEM and return the old value. */
#ifndef atomic_exchange_acq
# define atomic_exchange_acq(mem, newvalue) \
({ __typeof ((__typeof (*(mem))) *(mem)) __atg5_oldval; \
__typeof (mem) __atg5_memp = (mem); \
__typeof ((__typeof (*(mem))) *(mem)) __atg5_value = (newvalue); \
\
do \
__atg5_oldval = *__atg5_memp; \
while (__builtin_expect \
(atomic_compare_and_exchange_bool_acq (__atg5_memp, __atg5_value, \
__atg5_oldval), 0)); \
\
__atg5_oldval; })
#endif
#ifndef atomic_exchange_rel
# define atomic_exchange_rel(mem, newvalue) atomic_exchange_acq (mem, newvalue)
#endif
/* Add VALUE to *MEM and return the old value of *MEM. */
#ifndef atomic_exchange_and_add_acq
# ifdef atomic_exchange_and_add
# define atomic_exchange_and_add_acq(mem, value) \
atomic_exchange_and_add (mem, value)
# else
# define atomic_exchange_and_add_acq(mem, value) \
({ __typeof (*(mem)) __atg6_oldval; \
__typeof (mem) __atg6_memp = (mem); \
__typeof (*(mem)) __atg6_value = (value); \
\
do \
__atg6_oldval = *__atg6_memp; \
while (__builtin_expect \
(atomic_compare_and_exchange_bool_acq (__atg6_memp, \
__atg6_oldval \
+ __atg6_value, \
__atg6_oldval), 0)); \
\
__atg6_oldval; })
# endif
#endif
#ifndef atomic_exchange_and_add_rel
# define atomic_exchange_and_add_rel(mem, value) \
atomic_exchange_and_add_acq(mem, value)
#endif
#ifndef atomic_exchange_and_add
# define atomic_exchange_and_add(mem, value) \
atomic_exchange_and_add_acq(mem, value)
#endif
#ifndef catomic_exchange_and_add
# define catomic_exchange_and_add(mem, value) \
({ __typeof (*(mem)) __atg7_oldv; \
__typeof (mem) __atg7_memp = (mem); \
__typeof (*(mem)) __atg7_value = (value); \
\
do \
__atg7_oldv = *__atg7_memp; \
while (__builtin_expect \
(catomic_compare_and_exchange_bool_acq (__atg7_memp, \
__atg7_oldv \
+ __atg7_value, \
__atg7_oldv), 0)); \
\
__atg7_oldv; })
#endif
#ifndef atomic_max
# define atomic_max(mem, value) \
do { \
@@ -277,107 +82,6 @@
#endif
#ifndef catomic_max
# define catomic_max(mem, value) \
do { \
__typeof (*(mem)) __atg9_oldv; \
__typeof (mem) __atg9_memp = (mem); \
__typeof (*(mem)) __atg9_value = (value); \
do { \
__atg9_oldv = *__atg9_memp; \
if (__atg9_oldv >= __atg9_value) \
break; \
} while (__builtin_expect \
(catomic_compare_and_exchange_bool_acq (__atg9_memp, \
__atg9_value, \
__atg9_oldv), 0)); \
} while (0)
#endif
#ifndef atomic_min
# define atomic_min(mem, value) \
do { \
__typeof (*(mem)) __atg10_oldval; \
__typeof (mem) __atg10_memp = (mem); \
__typeof (*(mem)) __atg10_value = (value); \
do { \
__atg10_oldval = *__atg10_memp; \
if (__atg10_oldval <= __atg10_value) \
break; \
} while (__builtin_expect \
(atomic_compare_and_exchange_bool_acq (__atg10_memp, \
__atg10_value, \
__atg10_oldval), 0)); \
} while (0)
#endif
#ifndef atomic_add
# define atomic_add(mem, value) (void) atomic_exchange_and_add ((mem), (value))
#endif
#ifndef catomic_add
# define catomic_add(mem, value) \
(void) catomic_exchange_and_add ((mem), (value))
#endif
#ifndef atomic_increment
# define atomic_increment(mem) atomic_add ((mem), 1)
#endif
#ifndef catomic_increment
# define catomic_increment(mem) catomic_add ((mem), 1)
#endif
#ifndef atomic_increment_val
# define atomic_increment_val(mem) (atomic_exchange_and_add ((mem), 1) + 1)
#endif
#ifndef catomic_increment_val
# define catomic_increment_val(mem) (catomic_exchange_and_add ((mem), 1) + 1)
#endif
/* Add one to *MEM and return true iff it's now zero. */
#ifndef atomic_increment_and_test
# define atomic_increment_and_test(mem) \
(atomic_exchange_and_add ((mem), 1) + 1 == 0)
#endif
#ifndef atomic_decrement
# define atomic_decrement(mem) atomic_add ((mem), -1)
#endif
#ifndef catomic_decrement
# define catomic_decrement(mem) catomic_add ((mem), -1)
#endif
#ifndef atomic_decrement_val
# define atomic_decrement_val(mem) (atomic_exchange_and_add ((mem), -1) - 1)
#endif
#ifndef catomic_decrement_val
# define catomic_decrement_val(mem) (catomic_exchange_and_add ((mem), -1) - 1)
#endif
/* Subtract 1 from *MEM and return true iff it's now zero. */
#ifndef atomic_decrement_and_test
# define atomic_decrement_and_test(mem) \
(atomic_exchange_and_add ((mem), -1) == 1)
#endif
/* Decrement *MEM if it is > 0, and return the old value. */
#ifndef atomic_decrement_if_positive
# define atomic_decrement_if_positive(mem) \
@@ -398,142 +102,6 @@
#endif
#ifndef atomic_add_negative
# define atomic_add_negative(mem, value) \
({ __typeof (value) __atg12_value = (value); \
atomic_exchange_and_add (mem, __atg12_value) < -__atg12_value; })
#endif
#ifndef atomic_add_zero
# define atomic_add_zero(mem, value) \
({ __typeof (value) __atg13_value = (value); \
atomic_exchange_and_add (mem, __atg13_value) == -__atg13_value; })
#endif
#ifndef atomic_bit_set
# define atomic_bit_set(mem, bit) \
(void) atomic_bit_test_set(mem, bit)
#endif
#ifndef atomic_bit_test_set
# define atomic_bit_test_set(mem, bit) \
({ __typeof (*(mem)) __atg14_old; \
__typeof (mem) __atg14_memp = (mem); \
__typeof (*(mem)) __atg14_mask = ((__typeof (*(mem))) 1 << (bit)); \
\
do \
__atg14_old = (*__atg14_memp); \
while (__builtin_expect \
(atomic_compare_and_exchange_bool_acq (__atg14_memp, \
__atg14_old | __atg14_mask,\
__atg14_old), 0)); \
\
__atg14_old & __atg14_mask; })
#endif
/* Atomically *mem &= mask. */
#ifndef atomic_and
# define atomic_and(mem, mask) \
do { \
__typeof (*(mem)) __atg15_old; \
__typeof (mem) __atg15_memp = (mem); \
__typeof (*(mem)) __atg15_mask = (mask); \
\
do \
__atg15_old = (*__atg15_memp); \
while (__builtin_expect \
(atomic_compare_and_exchange_bool_acq (__atg15_memp, \
__atg15_old & __atg15_mask, \
__atg15_old), 0)); \
} while (0)
#endif
#ifndef catomic_and
# define catomic_and(mem, mask) \
do { \
__typeof (*(mem)) __atg20_old; \
__typeof (mem) __atg20_memp = (mem); \
__typeof (*(mem)) __atg20_mask = (mask); \
\
do \
__atg20_old = (*__atg20_memp); \
while (__builtin_expect \
(catomic_compare_and_exchange_bool_acq (__atg20_memp, \
__atg20_old & __atg20_mask,\
__atg20_old), 0)); \
} while (0)
#endif
/* Atomically *mem &= mask and return the old value of *mem. */
#ifndef atomic_and_val
# define atomic_and_val(mem, mask) \
({ __typeof (*(mem)) __atg16_old; \
__typeof (mem) __atg16_memp = (mem); \
__typeof (*(mem)) __atg16_mask = (mask); \
\
do \
__atg16_old = (*__atg16_memp); \
while (__builtin_expect \
(atomic_compare_and_exchange_bool_acq (__atg16_memp, \
__atg16_old & __atg16_mask,\
__atg16_old), 0)); \
\
__atg16_old; })
#endif
/* Atomically *mem |= mask and return the old value of *mem. */
#ifndef atomic_or
# define atomic_or(mem, mask) \
do { \
__typeof (*(mem)) __atg17_old; \
__typeof (mem) __atg17_memp = (mem); \
__typeof (*(mem)) __atg17_mask = (mask); \
\
do \
__atg17_old = (*__atg17_memp); \
while (__builtin_expect \
(atomic_compare_and_exchange_bool_acq (__atg17_memp, \
__atg17_old | __atg17_mask, \
__atg17_old), 0)); \
} while (0)
#endif
#ifndef catomic_or
# define catomic_or(mem, mask) \
do { \
__typeof (*(mem)) __atg18_old; \
__typeof (mem) __atg18_memp = (mem); \
__typeof (*(mem)) __atg18_mask = (mask); \
\
do \
__atg18_old = (*__atg18_memp); \
while (__builtin_expect \
(catomic_compare_and_exchange_bool_acq (__atg18_memp, \
__atg18_old | __atg18_mask,\
__atg18_old), 0)); \
} while (0)
#endif
/* Atomically *mem |= mask and return the old value of *mem. */
#ifndef atomic_or_val
# define atomic_or_val(mem, mask) \
({ __typeof (*(mem)) __atg19_old; \
__typeof (mem) __atg19_memp = (mem); \
__typeof (*(mem)) __atg19_mask = (mask); \
\
do \
__atg19_old = (*__atg19_memp); \
while (__builtin_expect \
(atomic_compare_and_exchange_bool_acq (__atg19_memp, \
__atg19_old | __atg19_mask,\
__atg19_old), 0)); \
\
__atg19_old; })
#endif
#ifndef atomic_full_barrier
# define atomic_full_barrier() __asm ("" ::: "memory")
#endif

View File

@@ -841,11 +841,11 @@ arena_get2 (size_t size, mstate avoid_arena)
enough address space to create that many arenas. */
if (__glibc_unlikely (n <= narenas_limit - 1))
{
if (catomic_compare_and_exchange_bool_acq (&narenas, n + 1, n))
if (atomic_compare_and_exchange_bool_acq (&narenas, n + 1, n))
goto repeat;
a = _int_new_arena (size);
if (__glibc_unlikely (a == NULL))
catomic_decrement (&narenas);
atomic_fetch_add_relaxed (&narenas, -1);
}
else
a = reused_arena (avoid_arena);

View File

@@ -4008,7 +4008,7 @@ _int_malloc (mstate av, size_t bytes)
if (__glibc_unlikely (pp != NULL && misaligned_chunk (pp))) \
malloc_printerr ("malloc(): unaligned fastbin chunk detected"); \
} \
while ((pp = catomic_compare_and_exchange_val_acq (fb, pp, victim)) \
while ((pp = atomic_compare_and_exchange_val_acq (fb, pp, victim)) \
!= victim); \
if ((unsigned long) (nb) <= (unsigned long) (get_max_fast ()))
@@ -4667,7 +4667,7 @@ _int_free_chunk (mstate av, mchunkptr p, INTERNAL_SIZE_T size, int have_lock)
old2 = old;
p->fd = PROTECT_PTR (&p->fd, old);
}
while ((old = catomic_compare_and_exchange_val_rel (fb, p, old2))
while ((old = atomic_compare_and_exchange_val_rel (fb, p, old2))
!= old2);
/* Check that size of fastbin chunk at the top is the same as

View File

@@ -354,7 +354,7 @@ this function is in @file{stdlib.h}.
@c that's protected by list_lock; next_free is only modified while
@c list_lock is held too. All other data members of an arena, as well
@c as the metadata of the memory areas assigned to it, are only modified
@c while holding the arena's mutex (fastbin pointers use catomic ops
@c while holding the arena's mutex (fastbin pointers use atomic ops
@c because they may be modified by free without taking the arena's
@c lock). Some reassurance was needed for fastbins, for it wasn't clear
@c how they were initialized. It turns out they are always
@@ -383,7 +383,7 @@ this function is in @file{stdlib.h}.
@c mutex_lock (arena lock) dup @asulock @aculock [returns locked]
@c __get_nprocs ext ok @acsfd
@c NARENAS_FROM_NCORES ok
@c catomic_compare_and_exchange_bool_acq ok
@c atomic_compare_and_exchange_bool_acq ok
@c _int_new_arena ok @asulock @aculock @acsmem
@c new_heap ok @acsmem
@c mmap ok @acsmem
@@ -397,7 +397,7 @@ this function is in @file{stdlib.h}.
@c mutex_lock (list_lock) dup @asulock @aculock
@c atomic_write_barrier ok
@c mutex_unlock (list_lock) @aculock
@c catomic_decrement ok
@c atomic_fetch_add ok
@c reused_arena @asulock @aculock
@c reads&writes next_to_use and iterates over arena next without guards
@c those are harmless as long as we don't drop arenas from the
@@ -414,7 +414,7 @@ this function is in @file{stdlib.h}.
@c get_max_fast ok
@c fastbin_index ok
@c fastbin ok
@c catomic_compare_and_exhange_val_acq ok
@c atomic_compare_and_exhange_val_acq ok
@c malloc_printerr dup @mtsenv
@c if we get to it, we're toast already, undefined behavior must have
@c been invoked before
@@ -521,10 +521,9 @@ this function is in @file{stdlib.h}.
@c chunk2mem dup ok
@c free_perturb ok
@c set_fastchunks ok
@c catomic_and ok
@c fastbin_index dup ok
@c fastbin dup ok
@c catomic_compare_and_exchange_val_rel ok
@c atomic_compare_and_exchange_val_rel ok
@c chunk_is_mmapped ok
@c contiguous dup ok
@c prev_inuse ok
@@ -706,7 +705,7 @@ The prototype for this function is in @file{stdlib.h}.
@safety{@prelim{}@mtsafe{}@asunsafe{@asulock{}}@acunsafe{@aculock{} @acsfd{} @acsmem{}}}
@c __libc_free @asulock @aculock @acsfd @acsmem
@c releasing memory into fastbins modifies the arena without taking
@c its mutex, but catomic operations ensure safety. If two (or more)
@c its mutex, but atomic operations ensure safety. If two (or more)
@c threads are running malloc and have their own arenas locked when
@c each gets a signal whose handler free()s large (non-fastbin-able)
@c blocks from each other's arena, we deadlock; this is a more general

View File

@@ -97,123 +97,6 @@ do_test (void)
ret = 1;
}
mem = 64;
if (atomic_exchange_acq (&mem, 31) != 64
|| mem != 31)
{
puts ("atomic_exchange_acq test failed");
ret = 1;
}
mem = 2;
if (atomic_exchange_and_add (&mem, 11) != 2
|| mem != 13)
{
puts ("atomic_exchange_and_add test failed");
ret = 1;
}
mem = 2;
if (atomic_exchange_and_add_acq (&mem, 11) != 2
|| mem != 13)
{
puts ("atomic_exchange_and_add test failed");
ret = 1;
}
mem = 2;
if (atomic_exchange_and_add_rel (&mem, 11) != 2
|| mem != 13)
{
puts ("atomic_exchange_and_add test failed");
ret = 1;
}
mem = -21;
atomic_add (&mem, 22);
if (mem != 1)
{
puts ("atomic_add test failed");
ret = 1;
}
mem = -1;
atomic_increment (&mem);
if (mem != 0)
{
puts ("atomic_increment test failed");
ret = 1;
}
mem = 2;
if (atomic_increment_val (&mem) != 3)
{
puts ("atomic_increment_val test failed");
ret = 1;
}
mem = 0;
if (atomic_increment_and_test (&mem)
|| mem != 1)
{
puts ("atomic_increment_and_test test 1 failed");
ret = 1;
}
mem = 35;
if (atomic_increment_and_test (&mem)
|| mem != 36)
{
puts ("atomic_increment_and_test test 2 failed");
ret = 1;
}
mem = -1;
if (! atomic_increment_and_test (&mem)
|| mem != 0)
{
puts ("atomic_increment_and_test test 3 failed");
ret = 1;
}
mem = 17;
atomic_decrement (&mem);
if (mem != 16)
{
puts ("atomic_decrement test failed");
ret = 1;
}
if (atomic_decrement_val (&mem) != 15)
{
puts ("atomic_decrement_val test failed");
ret = 1;
}
mem = 0;
if (atomic_decrement_and_test (&mem)
|| mem != -1)
{
puts ("atomic_decrement_and_test test 1 failed");
ret = 1;
}
mem = 15;
if (atomic_decrement_and_test (&mem)
|| mem != 14)
{
puts ("atomic_decrement_and_test test 2 failed");
ret = 1;
}
mem = 1;
if (! atomic_decrement_and_test (&mem)
|| mem != 0)
{
puts ("atomic_decrement_and_test test 3 failed");
ret = 1;
}
mem = 1;
if (atomic_decrement_if_positive (&mem) != 1
|| mem != 0)
@@ -238,273 +121,6 @@ do_test (void)
ret = 1;
}
mem = -12;
if (! atomic_add_negative (&mem, 10)
|| mem != -2)
{
puts ("atomic_add_negative test 1 failed");
ret = 1;
}
mem = 0;
if (atomic_add_negative (&mem, 100)
|| mem != 100)
{
puts ("atomic_add_negative test 2 failed");
ret = 1;
}
mem = 15;
if (atomic_add_negative (&mem, -10)
|| mem != 5)
{
puts ("atomic_add_negative test 3 failed");
ret = 1;
}
mem = -12;
if (atomic_add_negative (&mem, 14)
|| mem != 2)
{
puts ("atomic_add_negative test 4 failed");
ret = 1;
}
mem = 0;
if (! atomic_add_negative (&mem, -1)
|| mem != -1)
{
puts ("atomic_add_negative test 5 failed");
ret = 1;
}
mem = -31;
if (atomic_add_negative (&mem, 31)
|| mem != 0)
{
puts ("atomic_add_negative test 6 failed");
ret = 1;
}
mem = -34;
if (atomic_add_zero (&mem, 31)
|| mem != -3)
{
puts ("atomic_add_zero test 1 failed");
ret = 1;
}
mem = -36;
if (! atomic_add_zero (&mem, 36)
|| mem != 0)
{
puts ("atomic_add_zero test 2 failed");
ret = 1;
}
mem = 113;
if (atomic_add_zero (&mem, -13)
|| mem != 100)
{
puts ("atomic_add_zero test 3 failed");
ret = 1;
}
mem = -18;
if (atomic_add_zero (&mem, 20)
|| mem != 2)
{
puts ("atomic_add_zero test 4 failed");
ret = 1;
}
mem = 10;
if (atomic_add_zero (&mem, -20)
|| mem != -10)
{
puts ("atomic_add_zero test 5 failed");
ret = 1;
}
mem = 10;
if (! atomic_add_zero (&mem, -10)
|| mem != 0)
{
puts ("atomic_add_zero test 6 failed");
ret = 1;
}
mem = 0;
atomic_bit_set (&mem, 1);
if (mem != 2)
{
puts ("atomic_bit_set test 1 failed");
ret = 1;
}
mem = 8;
atomic_bit_set (&mem, 3);
if (mem != 8)
{
puts ("atomic_bit_set test 2 failed");
ret = 1;
}
#ifdef TEST_ATOMIC64
mem = 16;
atomic_bit_set (&mem, 35);
if (mem != 0x800000010LL)
{
puts ("atomic_bit_set test 3 failed");
ret = 1;
}
#endif
mem = 0;
if (atomic_bit_test_set (&mem, 1)
|| mem != 2)
{
puts ("atomic_bit_test_set test 1 failed");
ret = 1;
}
mem = 8;
if (! atomic_bit_test_set (&mem, 3)
|| mem != 8)
{
puts ("atomic_bit_test_set test 2 failed");
ret = 1;
}
#ifdef TEST_ATOMIC64
mem = 16;
if (atomic_bit_test_set (&mem, 35)
|| mem != 0x800000010LL)
{
puts ("atomic_bit_test_set test 3 failed");
ret = 1;
}
mem = 0x100000000LL;
if (! atomic_bit_test_set (&mem, 32)
|| mem != 0x100000000LL)
{
puts ("atomic_bit_test_set test 4 failed");
ret = 1;
}
#endif
#ifdef catomic_compare_and_exchange_val_acq
mem = 24;
if (catomic_compare_and_exchange_val_acq (&mem, 35, 24) != 24
|| mem != 35)
{
puts ("catomic_compare_and_exchange_val_acq test 1 failed");
ret = 1;
}
mem = 12;
if (catomic_compare_and_exchange_val_acq (&mem, 10, 15) != 12
|| mem != 12)
{
puts ("catomic_compare_and_exchange_val_acq test 2 failed");
ret = 1;
}
mem = -15;
if (catomic_compare_and_exchange_val_acq (&mem, -56, -15) != -15
|| mem != -56)
{
puts ("catomic_compare_and_exchange_val_acq test 3 failed");
ret = 1;
}
mem = -1;
if (catomic_compare_and_exchange_val_acq (&mem, 17, 0) != -1
|| mem != -1)
{
puts ("catomic_compare_and_exchange_val_acq test 4 failed");
ret = 1;
}
#endif
mem = 24;
if (catomic_compare_and_exchange_bool_acq (&mem, 35, 24)
|| mem != 35)
{
puts ("catomic_compare_and_exchange_bool_acq test 1 failed");
ret = 1;
}
mem = 12;
if (! catomic_compare_and_exchange_bool_acq (&mem, 10, 15)
|| mem != 12)
{
puts ("catomic_compare_and_exchange_bool_acq test 2 failed");
ret = 1;
}
mem = -15;
if (catomic_compare_and_exchange_bool_acq (&mem, -56, -15)
|| mem != -56)
{
puts ("catomic_compare_and_exchange_bool_acq test 3 failed");
ret = 1;
}
mem = -1;
if (! catomic_compare_and_exchange_bool_acq (&mem, 17, 0)
|| mem != -1)
{
puts ("catomic_compare_and_exchange_bool_acq test 4 failed");
ret = 1;
}
mem = 2;
if (catomic_exchange_and_add (&mem, 11) != 2
|| mem != 13)
{
puts ("catomic_exchange_and_add test failed");
ret = 1;
}
mem = -21;
catomic_add (&mem, 22);
if (mem != 1)
{
puts ("catomic_add test failed");
ret = 1;
}
mem = -1;
catomic_increment (&mem);
if (mem != 0)
{
puts ("catomic_increment test failed");
ret = 1;
}
mem = 2;
if (catomic_increment_val (&mem) != 3)
{
puts ("catomic_increment_val test failed");
ret = 1;
}
mem = 17;
catomic_decrement (&mem);
if (mem != 16)
{
puts ("catomic_decrement test failed");
ret = 1;
}
if (catomic_decrement_val (&mem) != 15)
{
puts ("catomic_decrement_val test failed");
ret = 1;
}
/* Tests for C11-like atomics. */
mem = 11;
if (atomic_load_relaxed (&mem) != 11 || atomic_load_acquire (&mem) != 11)

View File

@@ -43,7 +43,7 @@
#ifndef COMPARE_AND_SWAP
# define COMPARE_AND_SWAP(ptr, old, new) \
(catomic_compare_and_exchange_bool_acq (ptr, new, old) == 0)
(atomic_compare_and_exchange_bool_acq (ptr, new, old) == 0)
#endif
ElfW(Addr) _dl_boot_fptr_table [ELF_MACHINE_BOOT_FPTR_TABLE_LEN];