GCC Code Coverage Report


Directory: src/
File: src/support/atomic.h
Date: 2024-11-22 17:07:10
Exec Total Coverage
Lines: 34 36 94.4%
Functions: 4 4 100.0%
Branches: 19 28 67.9%

Line Branch Exec Source
1 /*********************************************************************************/
2 /* Copyright 2009-2021 Barcelona Supercomputing Center */
3 /* */
4 /* This file is part of the DLB library. */
5 /* */
6 /* DLB is free software: you can redistribute it and/or modify */
7 /* it under the terms of the GNU Lesser General Public License as published by */
8 /* the Free Software Foundation, either version 3 of the License, or */
9 /* (at your option) any later version. */
10 /* */
11 /* DLB is distributed in the hope that it will be useful, */
12 /* but WITHOUT ANY WARRANTY; without even the implied warranty of */
13 /* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the */
14 /* GNU Lesser General Public License for more details. */
15 /* */
16 /* You should have received a copy of the GNU Lesser General Public License */
17 /* along with DLB. If not, see <https://www.gnu.org/licenses/>. */
18 /*********************************************************************************/
19
20 #ifndef ATOMIC_H
21 #define ATOMIC_H
22
23 #ifdef HAVE_CONFIG_H
24 #include <config.h>
25 #endif
26
27 #include <stdbool.h>
28 #include <stdint.h>
29
30 /* Atomic operations */
31
32 #if defined(HAVE_STDATOMIC_H) && !defined(__INTEL_COMPILER)
33 #include <stdatomic.h>
34
35 #define DLB_ATOMIC_ADD(ptr, val) atomic_fetch_add(ptr, val)
36 #define DLB_ATOMIC_ADD_RLX(ptr, val) atomic_fetch_add_explicit(ptr, val, memory_order_relaxed)
37 #define DLB_ATOMIC_ADD_FETCH(ptr, val) atomic_fetch_add(ptr, val) + val
38 #define DLB_ATOMIC_ADD_FETCH_RLX(ptr, val) atomic_fetch_add_explicit(ptr, val, memory_order_relaxed) + val
39 #define DLB_ATOMIC_SUB(ptr, val) atomic_fetch_sub(ptr, val)
40 #define DLB_ATOMIC_SUB_RLX(ptr, val) atomic_fetch_sub_explicit(ptr, val, memory_order_relaxed)
41 #define DLB_ATOMIC_SUB_FETCH(ptr, val) atomic_fetch_sub(ptr, val) - val
42 #define DLB_ATOMIC_SUB_FETCH_RLX(ptr, val) atomic_fetch_sub_explicit(ptr, val, memory_order_relaxed) - val
43 #define DLB_ATOMIC_LD(ptr) atomic_load(ptr)
44 #define DLB_ATOMIC_LD_RLX(ptr) atomic_load_explicit(ptr, memory_order_relaxed)
45 #define DLB_ATOMIC_LD_ACQ(ptr) atomic_load_explicit(ptr, memory_order_acquire)
46 #define DLB_ATOMIC_ST(ptr, val) atomic_store(ptr, val)
47 #define DLB_ATOMIC_ST_RLX(ptr, val) atomic_store_explicit(ptr, val, memory_order_relaxed)
48 #define DLB_ATOMIC_ST_REL(ptr, val) atomic_store_explicit(ptr, val, memory_order_release)
49 #define DLB_ATOMIC_EXCH(ptr, val) atomic_exchange(ptr, val)
50 #define DLB_ATOMIC_EXCH_RLX(ptr, val) atomic_exchange_explicit(ptr, val, memory_order_relaxed)
51 #define DLB_ATOMIC_CMP_EXCH_WEAK(ptr, expected, desired) \
52 atomic_compare_exchange_weak(ptr, &expected, desired)
53
54 #else /* not HAVE_STDATOMIC_H */
55
56 #define _Atomic(T) volatile __typeof__(T)
57 #define atomic_int volatile int
58 #define atomic_uint volatile unsigned int
59 #define atomic_int_least64_t volatile int64_t
60 #define atomic_uint_least64_t volatile uint64_t
61 #define atomic_bool volatile bool
62
63 #define DLB_ATOMIC_ADD(ptr, val) __sync_fetch_and_add(ptr, val)
64 #define DLB_ATOMIC_ADD_RLX(ptr, val) DLB_ATOMIC_ADD(ptr, val)
65 #define DLB_ATOMIC_ADD_FETCH(ptr, val) __sync_add_and_fetch(ptr, val)
66 #define DLB_ATOMIC_ADD_FETCH_RLX(ptr, val) DLB_ATOMIC_ADD_FETCH(ptr, val)
67 #define DLB_ATOMIC_SUB(ptr, val) __sync_fetch_and_sub(ptr, val)
68 #define DLB_ATOMIC_SUB_RLX(ptr, val) DLB_ATOMIC_SUB(ptr, val)
69 #define DLB_ATOMIC_SUB_FETCH(ptr, val) __sync_sub_and_fetch(ptr, val)
70 #define DLB_ATOMIC_SUB_FETCH_RLX(ptr, val) __sync_sub_and_fetch(ptr, val)
71 #define DLB_ATOMIC_LD(ptr) \
72 ({ typeof (*ptr) value; __sync_synchronize(); value = (*ptr); __sync_synchronize(); value; })
73 #define DLB_ATOMIC_LD_RLX(ptr) (*ptr)
74 #define DLB_ATOMIC_LD_ACQ(ptr) ({ __sync_synchronize(); (*ptr); })
75 #define DLB_ATOMIC_ST(ptr, val) __sync_synchronize(); (*ptr) = (val); __sync_synchronize()
76 #define DLB_ATOMIC_ST_RLX(ptr, val) (*ptr) = (val)
77 #define DLB_ATOMIC_ST_REL(ptr, val) (*ptr) = (val); __sync_synchronize()
78 #define DLB_ATOMIC_EXCH(ptr, val) __sync_synchronize(); __sync_lock_test_and_set(ptr, val)
79 #define DLB_ATOMIC_EXCH_RLX(ptr, val) __sync_lock_test_and_set(ptr, val)
80 #define DLB_ATOMIC_CMP_EXCH_WEAK(ptr, oldval, newval) \
81 __sync_bool_compare_and_swap(ptr, oldval, newval)
82
83 #endif
84
85
86 /* Support for cache alignment, padding, etc. */
87
88 #ifndef DLB_CACHE_LINE
89 #define DLB_CACHE_LINE 64
90 #endif
91
92 #define DLB_ALIGN_CACHE __attribute__((aligned(DLB_CACHE_LINE)))
93
94
95 /* If flags does not contain 'bit', atomically:
96 * - set 'bit'
97 * - return true
98 * Otherwise:
99 * - return false
100 */
101 7 static inline bool set_bit(atomic_int *flags, int bit) {
102
1/2
✗ Branch 0 not taken.
✓ Branch 1 taken 7 times.
7 if (!bit) return false;
103 7 int oldval = *flags;
104 int newval;
105 do {
106
2/2
✓ Branch 0 taken 1 times.
✓ Branch 1 taken 6 times.
7 if (oldval & bit) {
107 /* flag already contains bit */
108 1 return false;
109 }
110 6 newval = oldval | bit;
111
1/2
✗ Branch 0 not taken.
✓ Branch 1 taken 6 times.
6 } while (!DLB_ATOMIC_CMP_EXCH_WEAK(flags, oldval, newval));
112 6 return true;
113 }
114
115 /* If flags does not contain 'bit', atomically:
116 * - set 'bit'
117 * - return true
118 * Otherwise:
119 * - return false
120 */
121 10 static inline bool clear_bit(atomic_int *flags, int bit) {
122
1/2
✗ Branch 0 not taken.
✓ Branch 1 taken 10 times.
10 if (!bit) return false;
123 10 int oldval = *flags;
124 int newval;
125 do {
126
1/2
✗ Branch 0 not taken.
✓ Branch 1 taken 10 times.
10 if (!(oldval & bit)) {
127 /* flag does not contain bit */
128 return false;
129 }
130 10 newval = oldval & ~bit;
131
1/2
✗ Branch 0 not taken.
✓ Branch 1 taken 10 times.
10 } while (!DLB_ATOMIC_CMP_EXCH_WEAK(flags, oldval, newval));
132 10 return true;
133 }
134
135 /* If flags contains 'expected', atomically:
136 * - clear 'expected'
137 * - set 'desired'
138 * - return true
139 * Otherwise:
140 * - return false
141 */
142 57 static inline bool cas_bit(atomic_int *flags, int expected, int desired) {
143 57 int oldval = *flags;
144 int newval;
145 do {
146
2/2
✓ Branch 0 taken 33 times.
✓ Branch 1 taken 24 times.
57 if (!(oldval & expected)
147
4/4
✓ Branch 0 taken 13 times.
✓ Branch 1 taken 20 times.
✓ Branch 2 taken 10 times.
✓ Branch 3 taken 3 times.
33 && !(oldval == 0 && expected == 0)) {
148 /* flag does not contain expected */
149 30 return false;
150 }
151 27 newval = oldval;
152 27 newval &= ~expected;
153 27 newval |= desired;
154
1/2
✗ Branch 0 not taken.
✓ Branch 1 taken 27 times.
27 } while (!DLB_ATOMIC_CMP_EXCH_WEAK(flags, oldval, newval));
155 27 return true;
156 }
157
158 /* If flags does not contain 'set', atomically:
159 * - set 'set'
160 * - clear 'clear'
161 * - return true
162 * Otherwise:
163 * - return false
164 */
165 13 static inline bool test_set_clear_bit(atomic_int *flags, int set, int clear) {
166 13 int oldval = *flags;
167 int newval;
168 do {
169
1/2
✓ Branch 0 taken 13 times.
✗ Branch 1 not taken.
13 if (oldval & set
170
3/4
✓ Branch 0 taken 4 times.
✓ Branch 1 taken 9 times.
✗ Branch 2 not taken.
✓ Branch 3 taken 4 times.
13 || (oldval == 0 && set == 0)) {
171 /* flag is already set */
172 return false;
173 }
174 13 newval = oldval;
175 13 newval &= ~clear;
176 13 newval |= set;
177
1/2
✗ Branch 0 not taken.
✓ Branch 1 taken 13 times.
13 } while (!DLB_ATOMIC_CMP_EXCH_WEAK(flags, oldval, newval));
178 13 return true;
179 }
180
181
182 #endif /* ATOMIC_H */
183