mirror of
https://github.com/postgres/postgres.git
synced 2025-07-02 09:02:37 +03:00
Fix issues in e8fdbd58fe
.
When the 64bit atomics simulation is in use, we can't necessarily guarantee the correct alignment of the atomics due to lack of compiler support for doing so- that's fine from a safety perspective, because everything is protected by a lock, but we asserted the alignment in all cases. Weaken them. Per complaint from Alvaro Herrera. My #ifdefery for PG_HAVE_8BYTE_SINGLE_COPY_ATOMICITY wasn't sufficient. Fix that. Per complaint from Alexander Korotkov.
This commit is contained in:
@ -425,30 +425,41 @@ pg_atomic_sub_fetch_u32(volatile pg_atomic_uint32 *ptr, int32 sub_)
|
|||||||
static inline void
|
static inline void
|
||||||
pg_atomic_init_u64(volatile pg_atomic_uint64 *ptr, uint64 val)
|
pg_atomic_init_u64(volatile pg_atomic_uint64 *ptr, uint64 val)
|
||||||
{
|
{
|
||||||
|
/*
|
||||||
|
* Can't necessarily enforce alignment - and don't need it - when using
|
||||||
|
* the spinlock based fallback implementation. Therefore only assert when
|
||||||
|
* not using it.
|
||||||
|
*/
|
||||||
|
#ifndef PG_HAVE_ATOMIC_U64_SIMULATION
|
||||||
AssertPointerAlignment(ptr, 8);
|
AssertPointerAlignment(ptr, 8);
|
||||||
|
#endif
|
||||||
pg_atomic_init_u64_impl(ptr, val);
|
pg_atomic_init_u64_impl(ptr, val);
|
||||||
}
|
}
|
||||||
|
|
||||||
static inline uint64
|
static inline uint64
|
||||||
pg_atomic_read_u64(volatile pg_atomic_uint64 *ptr)
|
pg_atomic_read_u64(volatile pg_atomic_uint64 *ptr)
|
||||||
{
|
{
|
||||||
|
#ifndef PG_HAVE_ATOMIC_U64_SIMULATION
|
||||||
AssertPointerAlignment(ptr, 8);
|
AssertPointerAlignment(ptr, 8);
|
||||||
|
#endif
|
||||||
return pg_atomic_read_u64_impl(ptr);
|
return pg_atomic_read_u64_impl(ptr);
|
||||||
}
|
}
|
||||||
|
|
||||||
static inline void
|
static inline void
|
||||||
pg_atomic_write_u64(volatile pg_atomic_uint64 *ptr, uint64 val)
|
pg_atomic_write_u64(volatile pg_atomic_uint64 *ptr, uint64 val)
|
||||||
{
|
{
|
||||||
|
#ifndef PG_HAVE_ATOMIC_U64_SIMULATION
|
||||||
AssertPointerAlignment(ptr, 8);
|
AssertPointerAlignment(ptr, 8);
|
||||||
|
#endif
|
||||||
pg_atomic_write_u64_impl(ptr, val);
|
pg_atomic_write_u64_impl(ptr, val);
|
||||||
}
|
}
|
||||||
|
|
||||||
static inline uint64
|
static inline uint64
|
||||||
pg_atomic_exchange_u64(volatile pg_atomic_uint64 *ptr, uint64 newval)
|
pg_atomic_exchange_u64(volatile pg_atomic_uint64 *ptr, uint64 newval)
|
||||||
{
|
{
|
||||||
|
#ifndef PG_HAVE_ATOMIC_U64_SIMULATION
|
||||||
AssertPointerAlignment(ptr, 8);
|
AssertPointerAlignment(ptr, 8);
|
||||||
|
#endif
|
||||||
return pg_atomic_exchange_u64_impl(ptr, newval);
|
return pg_atomic_exchange_u64_impl(ptr, newval);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -456,22 +467,28 @@ static inline bool
|
|||||||
pg_atomic_compare_exchange_u64(volatile pg_atomic_uint64 *ptr,
|
pg_atomic_compare_exchange_u64(volatile pg_atomic_uint64 *ptr,
|
||||||
uint64 *expected, uint64 newval)
|
uint64 *expected, uint64 newval)
|
||||||
{
|
{
|
||||||
|
#ifndef PG_HAVE_ATOMIC_U64_SIMULATION
|
||||||
AssertPointerAlignment(ptr, 8);
|
AssertPointerAlignment(ptr, 8);
|
||||||
AssertPointerAlignment(expected, 8);
|
AssertPointerAlignment(expected, 8);
|
||||||
|
#endif
|
||||||
return pg_atomic_compare_exchange_u64_impl(ptr, expected, newval);
|
return pg_atomic_compare_exchange_u64_impl(ptr, expected, newval);
|
||||||
}
|
}
|
||||||
|
|
||||||
static inline uint64
|
static inline uint64
|
||||||
pg_atomic_fetch_add_u64(volatile pg_atomic_uint64 *ptr, int64 add_)
|
pg_atomic_fetch_add_u64(volatile pg_atomic_uint64 *ptr, int64 add_)
|
||||||
{
|
{
|
||||||
|
#ifndef PG_HAVE_ATOMIC_U64_SIMULATION
|
||||||
AssertPointerAlignment(ptr, 8);
|
AssertPointerAlignment(ptr, 8);
|
||||||
|
#endif
|
||||||
return pg_atomic_fetch_add_u64_impl(ptr, add_);
|
return pg_atomic_fetch_add_u64_impl(ptr, add_);
|
||||||
}
|
}
|
||||||
|
|
||||||
static inline uint64
|
static inline uint64
|
||||||
pg_atomic_fetch_sub_u64(volatile pg_atomic_uint64 *ptr, int64 sub_)
|
pg_atomic_fetch_sub_u64(volatile pg_atomic_uint64 *ptr, int64 sub_)
|
||||||
{
|
{
|
||||||
|
#ifndef PG_HAVE_ATOMIC_U64_SIMULATION
|
||||||
AssertPointerAlignment(ptr, 8);
|
AssertPointerAlignment(ptr, 8);
|
||||||
|
#endif
|
||||||
Assert(sub_ != PG_INT64_MIN);
|
Assert(sub_ != PG_INT64_MIN);
|
||||||
return pg_atomic_fetch_sub_u64_impl(ptr, sub_);
|
return pg_atomic_fetch_sub_u64_impl(ptr, sub_);
|
||||||
}
|
}
|
||||||
@ -479,28 +496,36 @@ pg_atomic_fetch_sub_u64(volatile pg_atomic_uint64 *ptr, int64 sub_)
|
|||||||
static inline uint64
|
static inline uint64
|
||||||
pg_atomic_fetch_and_u64(volatile pg_atomic_uint64 *ptr, uint64 and_)
|
pg_atomic_fetch_and_u64(volatile pg_atomic_uint64 *ptr, uint64 and_)
|
||||||
{
|
{
|
||||||
|
#ifndef PG_HAVE_ATOMIC_U64_SIMULATION
|
||||||
AssertPointerAlignment(ptr, 8);
|
AssertPointerAlignment(ptr, 8);
|
||||||
|
#endif
|
||||||
return pg_atomic_fetch_and_u64_impl(ptr, and_);
|
return pg_atomic_fetch_and_u64_impl(ptr, and_);
|
||||||
}
|
}
|
||||||
|
|
||||||
static inline uint64
|
static inline uint64
|
||||||
pg_atomic_fetch_or_u64(volatile pg_atomic_uint64 *ptr, uint64 or_)
|
pg_atomic_fetch_or_u64(volatile pg_atomic_uint64 *ptr, uint64 or_)
|
||||||
{
|
{
|
||||||
|
#ifndef PG_HAVE_ATOMIC_U64_SIMULATION
|
||||||
AssertPointerAlignment(ptr, 8);
|
AssertPointerAlignment(ptr, 8);
|
||||||
|
#endif
|
||||||
return pg_atomic_fetch_or_u64_impl(ptr, or_);
|
return pg_atomic_fetch_or_u64_impl(ptr, or_);
|
||||||
}
|
}
|
||||||
|
|
||||||
static inline uint64
|
static inline uint64
|
||||||
pg_atomic_add_fetch_u64(volatile pg_atomic_uint64 *ptr, int64 add_)
|
pg_atomic_add_fetch_u64(volatile pg_atomic_uint64 *ptr, int64 add_)
|
||||||
{
|
{
|
||||||
|
#ifndef PG_HAVE_ATOMIC_U64_SIMULATION
|
||||||
AssertPointerAlignment(ptr, 8);
|
AssertPointerAlignment(ptr, 8);
|
||||||
|
#endif
|
||||||
return pg_atomic_add_fetch_u64_impl(ptr, add_);
|
return pg_atomic_add_fetch_u64_impl(ptr, add_);
|
||||||
}
|
}
|
||||||
|
|
||||||
static inline uint64
|
static inline uint64
|
||||||
pg_atomic_sub_fetch_u64(volatile pg_atomic_uint64 *ptr, int64 sub_)
|
pg_atomic_sub_fetch_u64(volatile pg_atomic_uint64 *ptr, int64 sub_)
|
||||||
{
|
{
|
||||||
|
#ifndef PG_HAVE_ATOMIC_U64_SIMULATION
|
||||||
AssertPointerAlignment(ptr, 8);
|
AssertPointerAlignment(ptr, 8);
|
||||||
|
#endif
|
||||||
Assert(sub_ != PG_INT64_MIN);
|
Assert(sub_ != PG_INT64_MIN);
|
||||||
return pg_atomic_sub_fetch_u64_impl(ptr, sub_);
|
return pg_atomic_sub_fetch_u64_impl(ptr, sub_);
|
||||||
}
|
}
|
||||||
|
@ -271,26 +271,26 @@ pg_atomic_exchange_u64_impl(volatile pg_atomic_uint64 *ptr, uint64 xchg_)
|
|||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#ifndef PG_HAVE_ATOMIC_READ_U64
|
|
||||||
#define PG_HAVE_ATOMIC_READ_U64
|
|
||||||
static inline uint64
|
|
||||||
pg_atomic_read_u64_impl(volatile pg_atomic_uint64 *ptr)
|
|
||||||
{
|
|
||||||
return *(&ptr->value);
|
|
||||||
}
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#ifndef PG_HAVE_ATOMIC_WRITE_U64
|
#ifndef PG_HAVE_ATOMIC_WRITE_U64
|
||||||
#define PG_HAVE_ATOMIC_WRITE_U64
|
#define PG_HAVE_ATOMIC_WRITE_U64
|
||||||
|
|
||||||
|
#if defined(PG_HAVE_8BYTE_SINGLE_COPY_ATOMICITY) && \
|
||||||
|
!defined(PG_HAVE_ATOMIC_U64_SIMULATION)
|
||||||
|
|
||||||
static inline void
|
static inline void
|
||||||
pg_atomic_write_u64_impl(volatile pg_atomic_uint64 *ptr, uint64 val)
|
pg_atomic_write_u64_impl(volatile pg_atomic_uint64 *ptr, uint64 val)
|
||||||
{
|
{
|
||||||
|
/*
|
||||||
|
* On this platform aligned 64bit writes are guaranteed to be atomic,
|
||||||
|
* except if using the fallback implementation, where can't guarantee the
|
||||||
|
* required alignment.
|
||||||
|
*/
|
||||||
|
AssertPointerAlignment(ptr, 8);
|
||||||
ptr->value = val;
|
ptr->value = val;
|
||||||
}
|
}
|
||||||
#endif
|
|
||||||
|
|
||||||
#ifndef PG_HAVE_ATOMIC_WRITE_U64
|
#else
|
||||||
#define PG_HAVE_ATOMIC_WRITE_U64
|
|
||||||
static inline void
|
static inline void
|
||||||
pg_atomic_write_u64_impl(volatile pg_atomic_uint64 *ptr, uint64 val)
|
pg_atomic_write_u64_impl(volatile pg_atomic_uint64 *ptr, uint64 val)
|
||||||
{
|
{
|
||||||
@ -300,10 +300,30 @@ pg_atomic_write_u64_impl(volatile pg_atomic_uint64 *ptr, uint64 val)
|
|||||||
*/
|
*/
|
||||||
pg_atomic_exchange_u64_impl(ptr, val);
|
pg_atomic_exchange_u64_impl(ptr, val);
|
||||||
}
|
}
|
||||||
#endif
|
|
||||||
|
#endif /* PG_HAVE_8BYTE_SINGLE_COPY_ATOMICITY && !PG_HAVE_ATOMIC_U64_SIMULATION */
|
||||||
|
#endif /* PG_HAVE_ATOMIC_WRITE_U64 */
|
||||||
|
|
||||||
#ifndef PG_HAVE_ATOMIC_READ_U64
|
#ifndef PG_HAVE_ATOMIC_READ_U64
|
||||||
#define PG_HAVE_ATOMIC_READ_U64
|
#define PG_HAVE_ATOMIC_READ_U64
|
||||||
|
|
||||||
|
#if defined(PG_HAVE_8BYTE_SINGLE_COPY_ATOMICITY) && \
|
||||||
|
!defined(PG_HAVE_ATOMIC_U64_SIMULATION)
|
||||||
|
|
||||||
|
static inline uint64
|
||||||
|
pg_atomic_read_u64_impl(volatile pg_atomic_uint64 *ptr)
|
||||||
|
{
|
||||||
|
/*
|
||||||
|
* On this platform aligned 64bit reads are guaranteed to be atomic,
|
||||||
|
* except if using the fallback implementation, where can't guarantee the
|
||||||
|
* required alignment.
|
||||||
|
*/
|
||||||
|
AssertPointerAlignment(ptr, 8);
|
||||||
|
return *(&ptr->value);
|
||||||
|
}
|
||||||
|
|
||||||
|
#else
|
||||||
|
|
||||||
static inline uint64
|
static inline uint64
|
||||||
pg_atomic_read_u64_impl(volatile pg_atomic_uint64 *ptr)
|
pg_atomic_read_u64_impl(volatile pg_atomic_uint64 *ptr)
|
||||||
{
|
{
|
||||||
@ -319,7 +339,8 @@ pg_atomic_read_u64_impl(volatile pg_atomic_uint64 *ptr)
|
|||||||
|
|
||||||
return old;
|
return old;
|
||||||
}
|
}
|
||||||
#endif
|
#endif /* PG_HAVE_8BYTE_SINGLE_COPY_ATOMICITY && !PG_HAVE_ATOMIC_U64_SIMULATION */
|
||||||
|
#endif /* PG_HAVE_ATOMIC_READ_U64 */
|
||||||
|
|
||||||
#ifndef PG_HAVE_ATOMIC_INIT_U64
|
#ifndef PG_HAVE_ATOMIC_INIT_U64
|
||||||
#define PG_HAVE_ATOMIC_INIT_U64
|
#define PG_HAVE_ATOMIC_INIT_U64
|
||||||
|
Reference in New Issue
Block a user