23#ifndef INSIDE_ATOMICS_H
24#error "should be included via atomics.h"
30#define pg_compiler_barrier_impl() __asm__ __volatile__("" ::: "memory")
37#if !defined(pg_memory_barrier_impl)
38# if defined(HAVE_GCC__ATOMIC_INT32_CAS)
39# define pg_memory_barrier_impl() __atomic_thread_fence(__ATOMIC_SEQ_CST)
40# elif defined(__GNUC__)
41# define pg_memory_barrier_impl() __sync_synchronize()
45#if !defined(pg_read_barrier_impl) && defined(HAVE_GCC__ATOMIC_INT32_CAS)
47# define pg_read_barrier_impl() do \
49 pg_compiler_barrier_impl(); \
50 __atomic_thread_fence(__ATOMIC_ACQUIRE); \
54#if !defined(pg_write_barrier_impl) && defined(HAVE_GCC__ATOMIC_INT32_CAS)
56# define pg_write_barrier_impl() do \
58 pg_compiler_barrier_impl(); \
59 __atomic_thread_fence(__ATOMIC_RELEASE); \
65#if !defined(PG_HAVE_ATOMIC_FLAG_SUPPORT) \
66 && (defined(HAVE_GCC__SYNC_INT32_TAS) || defined(HAVE_GCC__SYNC_CHAR_TAS))
68#define PG_HAVE_ATOMIC_FLAG_SUPPORT
69typedef struct pg_atomic_flag
76#ifdef HAVE_GCC__SYNC_INT32_TAS
86#if !defined(PG_HAVE_ATOMIC_U32_SUPPORT) \
87 && (defined(HAVE_GCC__ATOMIC_INT32_CAS) || defined(HAVE_GCC__SYNC_INT32_CAS))
89#define PG_HAVE_ATOMIC_U32_SUPPORT
98#if !defined(PG_HAVE_ATOMIC_U64_SUPPORT) \
99 && !defined(PG_DISABLE_64_BIT_ATOMICS) \
100 && (defined(HAVE_GCC__ATOMIC_INT64_CAS) || defined(HAVE_GCC__SYNC_INT64_CAS))
102#define PG_HAVE_ATOMIC_U64_SUPPORT
110#ifdef PG_HAVE_ATOMIC_FLAG_SUPPORT
112#if defined(HAVE_GCC__SYNC_CHAR_TAS) || defined(HAVE_GCC__SYNC_INT32_TAS)
114#ifndef PG_HAVE_ATOMIC_TEST_SET_FLAG
115#define PG_HAVE_ATOMIC_TEST_SET_FLAG
117pg_atomic_test_set_flag_impl(
volatile pg_atomic_flag *ptr)
121 return __sync_lock_test_and_set(&ptr->value, 1) == 0;
127#ifndef PG_HAVE_ATOMIC_UNLOCKED_TEST_FLAG
128#define PG_HAVE_ATOMIC_UNLOCKED_TEST_FLAG
130pg_atomic_unlocked_test_flag_impl(
volatile pg_atomic_flag *ptr)
132 return ptr->
value == 0;
136#ifndef PG_HAVE_ATOMIC_CLEAR_FLAG
137#define PG_HAVE_ATOMIC_CLEAR_FLAG
139pg_atomic_clear_flag_impl(
volatile pg_atomic_flag *ptr)
141 __sync_lock_release(&ptr->value);
145#ifndef PG_HAVE_ATOMIC_INIT_FLAG
146#define PG_HAVE_ATOMIC_INIT_FLAG
148pg_atomic_init_flag_impl(
volatile pg_atomic_flag *ptr)
150 pg_atomic_clear_flag_impl(ptr);
157#if !defined(PG_HAVE_ATOMIC_COMPARE_EXCHANGE_U32) && defined(HAVE_GCC__ATOMIC_INT32_CAS)
158#define PG_HAVE_ATOMIC_COMPARE_EXCHANGE_U32
164 return __atomic_compare_exchange_n(&ptr->
value, expected,
newval,
false,
165 __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST);
169#if !defined(PG_HAVE_ATOMIC_COMPARE_EXCHANGE_U32) && defined(HAVE_GCC__SYNC_INT32_CAS)
170#define PG_HAVE_ATOMIC_COMPARE_EXCHANGE_U32
177 current = __sync_val_compare_and_swap(&ptr->
value, *expected,
newval);
178 ret = current == *expected;
192#if !defined(PG_HAVE_ATOMIC_EXCHANGE_U32) && defined(HAVE_GCC__ATOMIC_INT32_CAS)
193#define PG_HAVE_ATOMIC_EXCHANGE_U32
197 return __atomic_exchange_n(&ptr->
value,
newval, __ATOMIC_SEQ_CST);
203#if !defined(PG_HAVE_ATOMIC_FETCH_ADD_U32) && defined(HAVE_GCC__SYNC_INT32_CAS)
204#define PG_HAVE_ATOMIC_FETCH_ADD_U32
208 return __sync_fetch_and_add(&ptr->
value, add_);
212#if !defined(PG_HAVE_ATOMIC_FETCH_SUB_U32) && defined(HAVE_GCC__SYNC_INT32_CAS)
213#define PG_HAVE_ATOMIC_FETCH_SUB_U32
217 return __sync_fetch_and_sub(&ptr->
value, sub_);
221#if !defined(PG_HAVE_ATOMIC_FETCH_AND_U32) && defined(HAVE_GCC__SYNC_INT32_CAS)
222#define PG_HAVE_ATOMIC_FETCH_AND_U32
226 return __sync_fetch_and_and(&ptr->
value, and_);
230#if !defined(PG_HAVE_ATOMIC_FETCH_OR_U32) && defined(HAVE_GCC__SYNC_INT32_CAS)
231#define PG_HAVE_ATOMIC_FETCH_OR_U32
235 return __sync_fetch_and_or(&ptr->
value, or_);
240#if !defined(PG_DISABLE_64_BIT_ATOMICS)
242#if !defined(PG_HAVE_ATOMIC_COMPARE_EXCHANGE_U64) && defined(HAVE_GCC__ATOMIC_INT64_CAS)
243#define PG_HAVE_ATOMIC_COMPARE_EXCHANGE_U64
249 return __atomic_compare_exchange_n(&ptr->
value, expected,
newval,
false,
250 __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST);
254#if !defined(PG_HAVE_ATOMIC_COMPARE_EXCHANGE_U64) && defined(HAVE_GCC__SYNC_INT64_CAS)
255#define PG_HAVE_ATOMIC_COMPARE_EXCHANGE_U64
264 current = __sync_val_compare_and_swap(&ptr->
value, *expected,
newval);
265 ret = current == *expected;
279#if !defined(PG_HAVE_ATOMIC_EXCHANGE_U64) && defined(HAVE_GCC__ATOMIC_INT64_CAS)
280#define PG_HAVE_ATOMIC_EXCHANGE_U64
284 return __atomic_exchange_n(&ptr->
value,
newval, __ATOMIC_SEQ_CST);
290#if !defined(PG_HAVE_ATOMIC_FETCH_ADD_U64) && defined(HAVE_GCC__SYNC_INT64_CAS)
291#define PG_HAVE_ATOMIC_FETCH_ADD_U64
295 return __sync_fetch_and_add(&ptr->
value, add_);
299#if !defined(PG_HAVE_ATOMIC_FETCH_SUB_U64) && defined(HAVE_GCC__SYNC_INT64_CAS)
300#define PG_HAVE_ATOMIC_FETCH_SUB_U64
304 return __sync_fetch_and_sub(&ptr->
value, sub_);
308#if !defined(PG_HAVE_ATOMIC_FETCH_AND_U64) && defined(HAVE_GCC__SYNC_INT64_CAS)
309#define PG_HAVE_ATOMIC_FETCH_AND_U64
313 return __sync_fetch_and_and(&ptr->
value, and_);
317#if !defined(PG_HAVE_ATOMIC_FETCH_OR_U64) && defined(HAVE_GCC__SYNC_INT64_CAS)
318#define PG_HAVE_ATOMIC_FETCH_OR_U64
322 return __sync_fetch_and_or(&ptr->
value, or_);
static bool pg_atomic_compare_exchange_u32_impl(volatile pg_atomic_uint32 *ptr, uint32 *expected, uint32 newval)
static uint32 pg_atomic_fetch_add_u32_impl(volatile pg_atomic_uint32 *ptr, int32 add_)
struct pg_atomic_uint32 pg_atomic_uint32
uint64 pg_atomic_fetch_add_u64_impl(volatile pg_atomic_uint64 *ptr, int64 add_)
bool pg_atomic_compare_exchange_u64_impl(volatile pg_atomic_uint64 *ptr, uint64 *expected, uint64 newval)
#define AssertPointerAlignment(ptr, bndr)
struct pg_atomic_uint64 pg_atomic_uint64
static uint32 pg_atomic_exchange_u32_impl(volatile pg_atomic_uint32 *ptr, uint32 newval)