1
0
mirror of https://github.com/postgres/postgres.git synced 2025-05-31 03:21:24 +03:00

Remove bogus assertion in pg_atomic_monotonic_advance_u64

This code wanted to ensure that the 'exchange' variable passed to
pg_atomic_compare_exchange_u64 has correct alignment, but apparently
platforms don't actually require anything that doesn't come naturally.

While messing with pg_atomic_monotonic_advance_u64: instead of using
Max() to determine the value to return, just use
pg_atomic_compare_exchange_u64()'s return value to decide; also, use
pg_atomic_compare_exchange_u64 instead of the _impl version; also remove
the unnecessary underscore at the end of variable name "target".

Backpatch to 17, where this code was introduced by commit bf3ff7bf83bc.

Reported-by: Alexander Lakhin <exclusion@gmail.com>
Discussion: https://postgr.es/m/36796438-a718-cf9b-2071-b2c1b947c1b5@gmail.com
This commit is contained in:
Alvaro Herrera 2024-07-04 13:25:31 +02:00
parent 1c9acb14ae
commit 3a9d0d774d
No known key found for this signature in database
GPG Key ID: 1C20ACB9D5C564AE
5 changed files with 14 additions and 11 deletions

View File

@ -509,7 +509,6 @@ pg_atomic_compare_exchange_u64(volatile pg_atomic_uint64 *ptr,
{
#ifndef PG_HAVE_ATOMIC_U64_SIMULATION
AssertPointerAlignment(ptr, 8);
AssertPointerAlignment(expected, 8);
#endif
return pg_atomic_compare_exchange_u64_impl(ptr, expected, newval);
}
@ -578,7 +577,7 @@ pg_atomic_sub_fetch_u64(volatile pg_atomic_uint64 *ptr, int64 sub_)
* Full barrier semantics (even when value is unchanged).
*/
static inline uint64
pg_atomic_monotonic_advance_u64(volatile pg_atomic_uint64 *ptr, uint64 target_)
pg_atomic_monotonic_advance_u64(volatile pg_atomic_uint64 *ptr, uint64 target)
{
uint64 currval;
@ -587,23 +586,19 @@ pg_atomic_monotonic_advance_u64(volatile pg_atomic_uint64 *ptr, uint64 target_)
#endif
currval = pg_atomic_read_u64_impl(ptr);
if (currval >= target_)
if (currval >= target)
{
pg_memory_barrier();
return currval;
}
#ifndef PG_HAVE_ATOMIC_U64_SIMULATION
AssertPointerAlignment(&currval, 8);
#endif
while (currval < target_)
while (currval < target)
{
if (pg_atomic_compare_exchange_u64_impl(ptr, &currval, target_))
break;
if (pg_atomic_compare_exchange_u64(ptr, &currval, target))
return target;
}
return Max(target_, currval);
return currval;
}
#undef INSIDE_ATOMICS_H

View File

@ -173,6 +173,8 @@ pg_atomic_compare_exchange_u64_impl(volatile pg_atomic_uint64 *ptr,
uint32 condition_register;
bool ret;
AssertPointerAlignment(expected, 8);
/* Like u32, but s/lwarx/ldarx/; s/stwcx/stdcx/; s/cmpw/cmpd/ */
#ifdef HAVE_I_CONSTRAINT__BUILTIN_CONSTANT_P
if (__builtin_constant_p(*expected) &&

View File

@ -207,6 +207,8 @@ pg_atomic_compare_exchange_u64_impl(volatile pg_atomic_uint64 *ptr,
{
char ret;
AssertPointerAlignment(expected, 8);
/*
* Perform cmpxchg and use the zero flag which it implicitly sets when
* equal to measure the success.

View File

@ -240,6 +240,7 @@ static inline bool
pg_atomic_compare_exchange_u64_impl(volatile pg_atomic_uint64 *ptr,
uint64 *expected, uint64 newval)
{
AssertPointerAlignment(expected, 8);
return __atomic_compare_exchange_n(&ptr->value, expected, newval, false,
__ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST);
}
@ -253,6 +254,8 @@ pg_atomic_compare_exchange_u64_impl(volatile pg_atomic_uint64 *ptr,
{
bool ret;
uint64 current;
AssertPointerAlignment(expected, 8);
current = __sync_val_compare_and_swap(&ptr->value, *expected, newval);
ret = current == *expected;
*expected = current;

View File

@ -102,6 +102,7 @@ pg_atomic_compare_exchange_u64_impl(volatile pg_atomic_uint64 *ptr,
bool ret;
uint64 current;
AssertPointerAlignment(expected, 8);
current = atomic_cas_64(&ptr->value, *expected, newval);
ret = current == *expected;
*expected = current;