32#if defined(HAVE_STDATOMIC_H) && !(defined(__INTEL_COMPILER) || defined(__NVCOMPILER))
35#define DLB_ATOMIC_ADD(ptr, val) atomic_fetch_add(ptr, val)
36#define DLB_ATOMIC_ADD_RLX(ptr, val) atomic_fetch_add_explicit(ptr, val, memory_order_relaxed)
37#define DLB_ATOMIC_ADD_FETCH(ptr, val) atomic_fetch_add(ptr, val) + val
38#define DLB_ATOMIC_ADD_FETCH_RLX(ptr, val) atomic_fetch_add_explicit(ptr, val, memory_order_relaxed) + val
39#define DLB_ATOMIC_SUB(ptr, val) atomic_fetch_sub(ptr, val)
40#define DLB_ATOMIC_SUB_RLX(ptr, val) atomic_fetch_sub_explicit(ptr, val, memory_order_relaxed)
41#define DLB_ATOMIC_SUB_FETCH(ptr, val) atomic_fetch_sub(ptr, val) - val
42#define DLB_ATOMIC_SUB_FETCH_RLX(ptr, val) atomic_fetch_sub_explicit(ptr, val, memory_order_relaxed) - val
43#define DLB_ATOMIC_LD(ptr) atomic_load(ptr)
44#define DLB_ATOMIC_LD_RLX(ptr) atomic_load_explicit(ptr, memory_order_relaxed)
45#define DLB_ATOMIC_LD_ACQ(ptr) atomic_load_explicit(ptr, memory_order_acquire)
46#define DLB_ATOMIC_ST(ptr, val) atomic_store(ptr, val)
47#define DLB_ATOMIC_ST_RLX(ptr, val) atomic_store_explicit(ptr, val, memory_order_relaxed)
48#define DLB_ATOMIC_ST_REL(ptr, val) atomic_store_explicit(ptr, val, memory_order_release)
49#define DLB_ATOMIC_EXCH(ptr, val) atomic_exchange(ptr, val)
50#define DLB_ATOMIC_EXCH_RLX(ptr, val) atomic_exchange_explicit(ptr, val, memory_order_relaxed)
51#define DLB_ATOMIC_CMP_EXCH_WEAK(ptr, expected, desired) \
52 atomic_compare_exchange_weak(ptr, &expected, desired)
56#define _Atomic(T) volatile __typeof__(T)
57#define atomic_int volatile int
58#define atomic_uint volatile unsigned int
59#define atomic_int_least64_t volatile int64_t
60#define atomic_uint_least64_t volatile uint64_t
61#define atomic_bool volatile bool
63#define DLB_ATOMIC_ADD(ptr, val) __sync_fetch_and_add(ptr, val)
64#define DLB_ATOMIC_ADD_RLX(ptr, val) DLB_ATOMIC_ADD(ptr, val)
65#define DLB_ATOMIC_ADD_FETCH(ptr, val) __sync_add_and_fetch(ptr, val)
66#define DLB_ATOMIC_ADD_FETCH_RLX(ptr, val) DLB_ATOMIC_ADD_FETCH(ptr, val)
67#define DLB_ATOMIC_SUB(ptr, val) __sync_fetch_and_sub(ptr, val)
68#define DLB_ATOMIC_SUB_RLX(ptr, val) DLB_ATOMIC_SUB(ptr, val)
69#define DLB_ATOMIC_SUB_FETCH(ptr, val) __sync_sub_and_fetch(ptr, val)
70#define DLB_ATOMIC_SUB_FETCH_RLX(ptr, val) __sync_sub_and_fetch(ptr, val)
71#define DLB_ATOMIC_LD(ptr) \
72 ({ typeof (*ptr) value; __sync_synchronize(); value = (*ptr); __sync_synchronize(); value; })
73#define DLB_ATOMIC_LD_RLX(ptr) (*ptr)
74#define DLB_ATOMIC_LD_ACQ(ptr) ({ __sync_synchronize(); (*ptr); })
75#define DLB_ATOMIC_ST(ptr, val) __sync_synchronize(); (*ptr) = (val); __sync_synchronize()
76#define DLB_ATOMIC_ST_RLX(ptr, val) (*ptr) = (val)
77#define DLB_ATOMIC_ST_REL(ptr, val) (*ptr) = (val); __sync_synchronize()
78#define DLB_ATOMIC_EXCH(ptr, val) __sync_synchronize(); __sync_lock_test_and_set(ptr, val)
79#define DLB_ATOMIC_EXCH_RLX(ptr, val) __sync_lock_test_and_set(ptr, val)
80#define DLB_ATOMIC_CMP_EXCH_WEAK(ptr, oldval, newval) \
81 __sync_bool_compare_and_swap(ptr, oldval, newval)
89#define DLB_CACHE_LINE 64
92#define DLB_ALIGN_CACHE __attribute__((aligned(DLB_CACHE_LINE)))
101static inline bool set_bit(
atomic_int *flags,
int bit) {
102 if (!bit)
return false;
110 newval = oldval | bit;
121static inline bool clear_bit(
atomic_int *flags,
int bit) {
122 if (!bit)
return false;
126 if (!(oldval & bit)) {
130 newval = oldval & ~bit;
142static inline bool cas_bit(
atomic_int *flags,
int expected,
int desired) {
146 if (!(oldval & expected)
147 && !(oldval == 0 && expected == 0)) {
165static inline bool test_set_clear_bit(
atomic_int *flags,
int set,
int clear) {
170 || (oldval == 0 && set == 0)) {
#define atomic_int
Definition: atomic.h:57
#define DLB_ATOMIC_CMP_EXCH_WEAK(ptr, oldval, newval)
Definition: atomic.h:80