24
24
#include <atomic.h>
27
inline bool __sync_fetch_and_add(volatile bool* ptr, bool val)
30
(val == true) ? atomic_inc_8((volatile uint8_t *)ptr) : atomic_add_8((volatile uint8_t *)ptr, (int8_t)val);
34
inline int8_t __sync_fetch_and_add(volatile int8_t* ptr, int8_t val)
37
(val == 1) ? atomic_inc_8((volatile uint8_t*)ptr) : atomic_add_8((volatile uint8_t*)ptr, val);
41
inline int16_t __sync_fetch_and_add(volatile int16_t* ptr, int16_t val)
44
(val == 1) ? atomic_inc_16((volatile uint16_t*)ptr) : atomic_add_16((volatile uint16_t*)ptr, val);
48
inline int32_t __sync_fetch_and_add(volatile int32_t* ptr, int32_t val)
51
(val == 1) ? atomic_inc_32((volatile uint32_t*)ptr) : atomic_add_32((volatile uint32_t*)ptr, val);
55
inline uint8_t __sync_fetch_and_add(volatile uint8_t* ptr, uint8_t val)
58
(val == 1) ? atomic_inc_8(ptr) : atomic_add_8(ptr, (int8_t)val);
62
inline uint16_t __sync_fetch_and_add(volatile uint16_t* ptr, uint16_t val)
65
(val == 1) ? atomic_inc_16(ptr) : atomic_add_16(ptr, (int16_t)val);
69
inline uint32_t __sync_fetch_and_add(volatile uint32_t* ptr, uint32_t val)
72
(val == 1) ? atomic_inc_32(ptr) : atomic_add_32(ptr, (int32_t)val);
76
# if defined(_KERNEL) || defined(_INT64_TYPE)
77
inline uint64_t __sync_fetch_and_add(volatile uint64_t* ptr, uint64_t val)
80
(val == 1) ? atomic_inc_64(ptr) : atomic_add_64(ptr, (int64_t)val);
84
inline int64_t __sync_fetch_and_add(volatile int64_t* ptr, int64_t val)
87
(val == 1) ? atomic_inc_64((volatile uint64_t*)ptr) : atomic_add_64((volatile uint64_t*)ptr, val);
90
# endif /* defined(_KERNEL) || defined(_INT64_TYPE) */
92
inline uint8_t __sync_fetch_and_sub(volatile uint8_t* ptr, uint8_t val)
95
(val == 1) ? atomic_dec_8(ptr) : atomic_add_8(ptr, 0-(int8_t)val);
99
inline uint16_t __sync_fetch_and_sub(volatile uint16_t* ptr, uint16_t val)
102
(val == 1) ? atomic_dec_16(ptr) : atomic_add_16(ptr, 0-(int16_t)val);
106
inline uint32_t __sync_fetch_and_sub(volatile uint32_t* ptr, uint32_t val)
109
(val == 1) ? atomic_dec_32(ptr) : atomic_add_32(ptr, 0-(int32_t)val);
113
# if defined(_KERNEL) || defined(_INT64_TYPE)
114
inline uint64_t __sync_fetch_and_sub(volatile uint64_t* ptr, uint64_t val)
117
(val == 1) ? atomic_dec_64(ptr) : atomic_add_64(ptr, 0-(int64_t)val);
120
inline int64_t __sync_fetch_and_sub(volatile int64_t* ptr, uint64_t val)
123
(val == 1) ? atomic_dec_64((volatile uint64_t *) ptr) : atomic_add_64((volatile uint64_t *) ptr, 0-(int64_t)val);
126
# endif /* defined(_KERNEL) || defined(_INT64_TYPE) */
27
128
inline bool __sync_add_and_fetch(volatile bool* ptr, bool val)
29
(val == true) ? atomic_inc_8((volatile uint8_t *)ptr) : atomic_add_8((volatile uint8_t *)ptr, (int8_t)val);
130
return (val == true) ? atomic_inc_8_nv((volatile uint8_t *)ptr) : atomic_add_8_nv((volatile uint8_t *)ptr, (int8_t)val);
33
133
inline int8_t __sync_add_and_fetch(volatile int8_t* ptr, int8_t val)
35
(val == 1) ? atomic_inc_8((volatile uint8_t*)ptr) : atomic_add_8((volatile uint8_t*)ptr, val);
135
return (val == 1) ? atomic_inc_8_nv((volatile uint8_t*)ptr) : atomic_add_8_nv((volatile uint8_t*)ptr, val);
39
138
inline int16_t __sync_add_and_fetch(volatile int16_t* ptr, int16_t val)
41
(val == 1) ? atomic_inc_16((volatile uint16_t*)ptr) : atomic_add_16((volatile uint16_t*)ptr, val);
140
return (val == 1) ? atomic_inc_16_nv((volatile uint16_t*)ptr) : atomic_add_16_nv((volatile uint16_t*)ptr, val);
45
143
inline int32_t __sync_add_and_fetch(volatile int32_t* ptr, int32_t val)
47
(val == 1) ? atomic_inc_32((volatile uint32_t*)ptr) : atomic_add_32((volatile uint32_t*)ptr, val);
145
return (val == 1) ? atomic_inc_32_nv((volatile uint32_t*)ptr) : atomic_add_32_nv((volatile uint32_t*)ptr, val);
51
148
inline uint8_t __sync_add_and_fetch(volatile uint8_t* ptr, uint8_t val)
53
(val == 1) ? atomic_inc_8(ptr) : atomic_add_8(ptr, (int8_t)val);
150
return (val == 1) ? atomic_inc_8_nv(ptr) : atomic_add_8_nv(ptr, (int8_t)val);
57
153
inline uint16_t __sync_add_and_fetch(volatile uint16_t* ptr, uint16_t val)
59
(val == 1) ? atomic_inc_16(ptr) : atomic_add_16(ptr, (int16_t)val);
155
return (val == 1) ? atomic_inc_16_nv(ptr) : atomic_add_16_nv(ptr, (int16_t)val);
63
158
inline uint32_t __sync_add_and_fetch(volatile uint32_t* ptr, uint32_t val)
65
(val == 1) ? atomic_inc_32(ptr) : atomic_add_32(ptr, (int32_t)val);
160
return (val == 1) ? atomic_inc_32_nv(ptr) : atomic_add_32_nv(ptr, (int32_t)val);
69
163
# if defined(_KERNEL) || defined(_INT64_TYPE)
70
164
inline uint64_t __sync_add_and_fetch(volatile uint64_t* ptr, uint64_t val)
72
(val == 1) ? atomic_inc_64(ptr) : atomic_add_64(ptr, (int64_t)val);
166
return (val == 1) ? atomic_inc_64_nv(ptr) : atomic_add_64_nv(ptr, (int64_t)val);
76
169
inline int64_t __sync_add_and_fetch(volatile int64_t* ptr, int64_t val)
78
(val == 1) ? atomic_inc_64((volatile uint64_t*)ptr) : atomic_add_64((volatile uint64_t*)ptr, val);
171
return (val == 1) ? atomic_inc_64_nv((volatile uint64_t*)ptr) : atomic_add_64_nv((volatile uint64_t*)ptr, val);
81
173
# endif /* defined(_KERNEL) || defined(_INT64_TYPE) */
84
175
inline uint8_t __sync_sub_and_fetch(volatile uint8_t* ptr, uint8_t val)
86
(val == 1) ? atomic_dec_8(ptr) : atomic_add_8(ptr, 0-(int8_t)val);
177
return (val == 1) ? atomic_dec_8_nv(ptr) : atomic_add_8_nv(ptr, 0-(int8_t)val);
90
180
inline uint16_t __sync_sub_and_fetch(volatile uint16_t* ptr, uint16_t val)
92
(val == 1) ? atomic_dec_16(ptr) : atomic_add_16(ptr, 0-(int16_t)val);
182
return (val == 1) ? atomic_dec_16_nv(ptr) : atomic_add_16_nv(ptr, 0-(int16_t)val);
96
185
inline uint32_t __sync_sub_and_fetch(volatile uint32_t* ptr, uint32_t val)
98
(val == 1) ? atomic_dec_32(ptr) : atomic_add_32(ptr, 0-(int32_t)val);
187
return (val == 1) ? atomic_dec_32_nv(ptr) : atomic_add_32_nv(ptr, 0-(int32_t)val);
102
190
# if defined(_KERNEL) || defined(_INT64_TYPE)
103
191
inline uint64_t __sync_sub_and_fetch(volatile uint64_t* ptr, uint64_t val)
105
(val == 1) ? atomic_dec_64(ptr) : atomic_add_64(ptr, 0-(int64_t)val);
193
return (val == 1) ? atomic_dec_64_nv(ptr) : atomic_add_64_nv(ptr, 0-(int64_t)val);
108
195
inline int64_t __sync_sub_and_fetch(volatile int64_t* ptr, uint64_t val)
110
(val == 1) ? atomic_dec_64((volatile uint64_t *) ptr) : atomic_add_64((volatile uint64_t *) ptr, 0-(int64_t)val);
197
return (val == 1) ? atomic_dec_64_nv((volatile uint64_t *) ptr) : atomic_add_64_nv((volatile uint64_t *) ptr, 0-(int64_t)val);
113
199
# endif /* defined(_KERNEL) || defined(_INT64_TYPE) */
176
262
#endif /* defined(_KERNEL) || defined(_INT64_TYPE) */
264
inline int8_t __sync_bool_compare_and_swap(volatile int8_t* ptr,
265
int8_t old_val, int8_t val)
268
return orig == atomic_cas_8((volatile uint8_t *)ptr, old_val, val);
271
inline uint8_t __sync_bool_compare_and_swap(volatile uint8_t* ptr,
272
uint8_t old_val, uint8_t val)
275
return orig == atomic_cas_8(ptr, old_val, val);
278
inline uint16_t __sync_bool_compare_and_swap(volatile uint16_t* ptr,
279
uint16_t old_val, uint16_t val)
282
return orig == atomic_cas_16(ptr, old_val, val);
285
inline uint32_t __sync_bool_compare_and_swap(volatile uint32_t* ptr,
286
uint32_t old_val, uint32_t val)
289
return orig == atomic_cas_32(ptr, old_val, val);
292
# if defined(_KERNEL) || defined(_INT64_TYPE)
293
inline uint64_t __sync_bool_compare_and_swap(volatile uint64_t* ptr,
294
uint64_t old_val, uint64_t val)
297
return orig == atomic_cas_64(ptr, old_val, val);
299
#endif /* defined(_KERNEL) || defined(_INT64_TYPE) */
178
301
#endif /* DRIZZLED_ATOMIC_SUN_STUDIO_H */