1#ifndef JEMALLOC_INTERNAL_ATOMIC_GCC_ATOMIC_H
2#define JEMALLOC_INTERNAL_ATOMIC_GCC_ATOMIC_H
3
4#include "jemalloc/internal/assert.h"
5
6#define ATOMIC_INIT(...) {__VA_ARGS__}
7
8typedef enum {
9 atomic_memory_order_relaxed,
10 atomic_memory_order_acquire,
11 atomic_memory_order_release,
12 atomic_memory_order_acq_rel,
13 atomic_memory_order_seq_cst
14} atomic_memory_order_t;
15
16ATOMIC_INLINE int
17atomic_enum_to_builtin(atomic_memory_order_t mo) {
18 switch (mo) {
19 case atomic_memory_order_relaxed:
20 return __ATOMIC_RELAXED;
21 case atomic_memory_order_acquire:
22 return __ATOMIC_ACQUIRE;
23 case atomic_memory_order_release:
24 return __ATOMIC_RELEASE;
25 case atomic_memory_order_acq_rel:
26 return __ATOMIC_ACQ_REL;
27 case atomic_memory_order_seq_cst:
28 return __ATOMIC_SEQ_CST;
29 }
30 /* Can't happen; the switch is exhaustive. */
31 not_reached();
32}
33
34ATOMIC_INLINE void
35atomic_fence(atomic_memory_order_t mo) {
36 __atomic_thread_fence(atomic_enum_to_builtin(mo));
37}
38
39#define JEMALLOC_GENERATE_ATOMICS(type, short_type, \
40 /* unused */ lg_size) \
41typedef struct { \
42 type repr; \
43} atomic_##short_type##_t; \
44 \
45ATOMIC_INLINE type \
46atomic_load_##short_type(const atomic_##short_type##_t *a, \
47 atomic_memory_order_t mo) { \
48 type result; \
49 __atomic_load(&a->repr, &result, atomic_enum_to_builtin(mo)); \
50 return result; \
51} \
52 \
53ATOMIC_INLINE void \
54atomic_store_##short_type(atomic_##short_type##_t *a, type val, \
55 atomic_memory_order_t mo) { \
56 __atomic_store(&a->repr, &val, atomic_enum_to_builtin(mo)); \
57} \
58 \
59ATOMIC_INLINE type \
60atomic_exchange_##short_type(atomic_##short_type##_t *a, type val, \
61 atomic_memory_order_t mo) { \
62 type result; \
63 __atomic_exchange(&a->repr, &val, &result, \
64 atomic_enum_to_builtin(mo)); \
65 return result; \
66} \
67 \
68ATOMIC_INLINE bool \
69atomic_compare_exchange_weak_##short_type(atomic_##short_type##_t *a, \
70 UNUSED type *expected, type desired, \
71 atomic_memory_order_t success_mo, \
72 atomic_memory_order_t failure_mo) { \
73 return __atomic_compare_exchange(&a->repr, expected, &desired, \
74 true, atomic_enum_to_builtin(success_mo), \
75 atomic_enum_to_builtin(failure_mo)); \
76} \
77 \
78ATOMIC_INLINE bool \
79atomic_compare_exchange_strong_##short_type(atomic_##short_type##_t *a, \
80 UNUSED type *expected, type desired, \
81 atomic_memory_order_t success_mo, \
82 atomic_memory_order_t failure_mo) { \
83 return __atomic_compare_exchange(&a->repr, expected, &desired, \
84 false, \
85 atomic_enum_to_builtin(success_mo), \
86 atomic_enum_to_builtin(failure_mo)); \
87}
88
89
90#define JEMALLOC_GENERATE_INT_ATOMICS(type, short_type, \
91 /* unused */ lg_size) \
92JEMALLOC_GENERATE_ATOMICS(type, short_type, /* unused */ lg_size) \
93 \
94ATOMIC_INLINE type \
95atomic_fetch_add_##short_type(atomic_##short_type##_t *a, type val, \
96 atomic_memory_order_t mo) { \
97 return __atomic_fetch_add(&a->repr, val, \
98 atomic_enum_to_builtin(mo)); \
99} \
100 \
101ATOMIC_INLINE type \
102atomic_fetch_sub_##short_type(atomic_##short_type##_t *a, type val, \
103 atomic_memory_order_t mo) { \
104 return __atomic_fetch_sub(&a->repr, val, \
105 atomic_enum_to_builtin(mo)); \
106} \
107 \
108ATOMIC_INLINE type \
109atomic_fetch_and_##short_type(atomic_##short_type##_t *a, type val, \
110 atomic_memory_order_t mo) { \
111 return __atomic_fetch_and(&a->repr, val, \
112 atomic_enum_to_builtin(mo)); \
113} \
114 \
115ATOMIC_INLINE type \
116atomic_fetch_or_##short_type(atomic_##short_type##_t *a, type val, \
117 atomic_memory_order_t mo) { \
118 return __atomic_fetch_or(&a->repr, val, \
119 atomic_enum_to_builtin(mo)); \
120} \
121 \
122ATOMIC_INLINE type \
123atomic_fetch_xor_##short_type(atomic_##short_type##_t *a, type val, \
124 atomic_memory_order_t mo) { \
125 return __atomic_fetch_xor(&a->repr, val, \
126 atomic_enum_to_builtin(mo)); \
127}
128
129#endif /* JEMALLOC_INTERNAL_ATOMIC_GCC_ATOMIC_H */
130