24
24
#include <atomic.h>
27
inline bool __sync_fetch_and_add(volatile bool* ptr, bool val)
30
(val == true) ? atomic_inc_8((volatile uint8_t *)ptr) : atomic_add_8((volatile uint8_t *)ptr, (int8_t)val);
34
inline int8_t __sync_fetch_and_add(volatile int8_t* ptr, int8_t val)
37
(val == 1) ? atomic_inc_8((volatile uint8_t*)ptr) : atomic_add_8((volatile uint8_t*)ptr, val);
41
inline int16_t __sync_fetch_and_add(volatile int16_t* ptr, int16_t val)
44
(val == 1) ? atomic_inc_16((volatile uint16_t*)ptr) : atomic_add_16((volatile uint16_t*)ptr, val);
48
inline int32_t __sync_fetch_and_add(volatile int32_t* ptr, int32_t val)
51
(val == 1) ? atomic_inc_32((volatile uint32_t*)ptr) : atomic_add_32((volatile uint32_t*)ptr, val);
55
inline uint8_t __sync_fetch_and_add(volatile uint8_t* ptr, uint8_t val)
27
inline uint8_t __sync_add_and_fetch(volatile uint8_t* ptr, uint8_t val)
58
29
(val == 1) ? atomic_inc_8(ptr) : atomic_add_8(ptr, (int8_t)val);
62
inline uint16_t __sync_fetch_and_add(volatile uint16_t* ptr, uint16_t val)
33
inline uint16_t __sync_add_and_fetch(volatile uint16_t* ptr, uint16_t val)
65
35
(val == 1) ? atomic_inc_16(ptr) : atomic_add_16(ptr, (int16_t)val);
69
inline uint32_t __sync_fetch_and_add(volatile uint32_t* ptr, uint32_t val)
39
inline uint32_t __sync_add_and_fetch(volatile uint32_t* ptr, uint32_t val)
72
41
(val == 1) ? atomic_inc_32(ptr) : atomic_add_32(ptr, (int32_t)val);
76
45
# if defined(_KERNEL) || defined(_INT64_TYPE)
77
inline uint64_t __sync_fetch_and_add(volatile uint64_t* ptr, uint64_t val)
46
inline uint64_t __sync_add_and_fetch(volatile uint64_t* ptr, uint64_t val)
80
48
(val == 1) ? atomic_inc_64(ptr) : atomic_add_64(ptr, (int64_t)val);
84
inline int64_t __sync_fetch_and_add(volatile int64_t* ptr, int64_t val)
87
(val == 1) ? atomic_inc_64((volatile uint64_t*)ptr) : atomic_add_64((volatile uint64_t*)ptr, val);
90
51
# endif /* defined(_KERNEL) || defined(_INT64_TYPE) */
92
inline uint8_t __sync_fetch_and_sub(volatile uint8_t* ptr, uint8_t val)
54
inline uint8_t __sync_sub_and_fetch(volatile uint8_t* ptr, uint8_t val)
95
56
(val == 1) ? atomic_dec_8(ptr) : atomic_add_8(ptr, 0-(int8_t)val);
99
inline uint16_t __sync_fetch_and_sub(volatile uint16_t* ptr, uint16_t val)
60
inline uint16_t __sync_sub_and_fetch(volatile uint16_t* ptr, uint16_t val)
102
62
(val == 1) ? atomic_dec_16(ptr) : atomic_add_16(ptr, 0-(int16_t)val);
106
inline uint32_t __sync_fetch_and_sub(volatile uint32_t* ptr, uint32_t val)
66
inline uint32_t __sync_sub_and_fetch(volatile uint32_t* ptr, uint32_t val)
109
68
(val == 1) ? atomic_dec_32(ptr) : atomic_add_32(ptr, 0-(int32_t)val);
113
72
# if defined(_KERNEL) || defined(_INT64_TYPE)
114
inline uint64_t __sync_fetch_and_sub(volatile uint64_t* ptr, uint64_t val)
73
inline uint64_t __sync_sub_and_fetch(volatile uint64_t* ptr, uint64_t val)
117
75
(val == 1) ? atomic_dec_64(ptr) : atomic_add_64(ptr, 0-(int64_t)val);
120
inline int64_t __sync_fetch_and_sub(volatile int64_t* ptr, uint64_t val)
123
(val == 1) ? atomic_dec_64((volatile uint64_t *) ptr) : atomic_add_64((volatile uint64_t *) ptr, 0-(int64_t)val);
126
# endif /* defined(_KERNEL) || defined(_INT64_TYPE) */
128
inline bool __sync_add_and_fetch(volatile bool* ptr, bool val)
130
return (val == true) ? atomic_inc_8_nv((volatile uint8_t *)ptr) : atomic_add_8_nv((volatile uint8_t *)ptr, (int8_t)val);
133
inline int8_t __sync_add_and_fetch(volatile int8_t* ptr, int8_t val)
135
return (val == 1) ? atomic_inc_8_nv((volatile uint8_t*)ptr) : atomic_add_8_nv((volatile uint8_t*)ptr, val);
138
inline int16_t __sync_add_and_fetch(volatile int16_t* ptr, int16_t val)
140
return (val == 1) ? atomic_inc_16_nv((volatile uint16_t*)ptr) : atomic_add_16_nv((volatile uint16_t*)ptr, val);
143
inline int32_t __sync_add_and_fetch(volatile int32_t* ptr, int32_t val)
145
return (val == 1) ? atomic_inc_32_nv((volatile uint32_t*)ptr) : atomic_add_32_nv((volatile uint32_t*)ptr, val);
148
inline uint8_t __sync_add_and_fetch(volatile uint8_t* ptr, uint8_t val)
150
return (val == 1) ? atomic_inc_8_nv(ptr) : atomic_add_8_nv(ptr, (int8_t)val);
153
inline uint16_t __sync_add_and_fetch(volatile uint16_t* ptr, uint16_t val)
155
return (val == 1) ? atomic_inc_16_nv(ptr) : atomic_add_16_nv(ptr, (int16_t)val);
158
inline uint32_t __sync_add_and_fetch(volatile uint32_t* ptr, uint32_t val)
160
return (val == 1) ? atomic_inc_32_nv(ptr) : atomic_add_32_nv(ptr, (int32_t)val);
163
# if defined(_KERNEL) || defined(_INT64_TYPE)
164
inline uint64_t __sync_add_and_fetch(volatile uint64_t* ptr, uint64_t val)
166
return (val == 1) ? atomic_inc_64_nv(ptr) : atomic_add_64_nv(ptr, (int64_t)val);
169
inline int64_t __sync_add_and_fetch(volatile int64_t* ptr, int64_t val)
171
return (val == 1) ? atomic_inc_64_nv((volatile uint64_t*)ptr) : atomic_add_64_nv((volatile uint64_t*)ptr, val);
173
# endif /* defined(_KERNEL) || defined(_INT64_TYPE) */
175
inline uint8_t __sync_sub_and_fetch(volatile uint8_t* ptr, uint8_t val)
177
return (val == 1) ? atomic_dec_8_nv(ptr) : atomic_add_8_nv(ptr, 0-(int8_t)val);
180
inline uint16_t __sync_sub_and_fetch(volatile uint16_t* ptr, uint16_t val)
182
return (val == 1) ? atomic_dec_16_nv(ptr) : atomic_add_16_nv(ptr, 0-(int16_t)val);
185
inline uint32_t __sync_sub_and_fetch(volatile uint32_t* ptr, uint32_t val)
187
return (val == 1) ? atomic_dec_32_nv(ptr) : atomic_add_32_nv(ptr, 0-(int32_t)val);
190
# if defined(_KERNEL) || defined(_INT64_TYPE)
191
inline uint64_t __sync_sub_and_fetch(volatile uint64_t* ptr, uint64_t val)
193
return (val == 1) ? atomic_dec_64_nv(ptr) : atomic_add_64_nv(ptr, 0-(int64_t)val);
195
inline int64_t __sync_sub_and_fetch(volatile int64_t* ptr, uint64_t val)
197
return (val == 1) ? atomic_dec_64_nv((volatile uint64_t *) ptr) : atomic_add_64_nv((volatile uint64_t *) ptr, 0-(int64_t)val);
199
# endif /* defined(_KERNEL) || defined(_INT64_TYPE) */
78
# endif /* defined(_KERNEL) || defined(_INT64_TYPE) */
201
81
inline uint8_t __sync_lock_test_and_set(volatile uint8_t* ptr, uint8_t val)
262
135
#endif /* defined(_KERNEL) || defined(_INT64_TYPE) */
264
inline int8_t __sync_bool_compare_and_swap(volatile int8_t* ptr,
265
int8_t old_val, int8_t val)
268
return orig == atomic_cas_8((volatile uint8_t *)ptr, old_val, val);
271
inline uint8_t __sync_bool_compare_and_swap(volatile uint8_t* ptr,
272
uint8_t old_val, uint8_t val)
275
return orig == atomic_cas_8(ptr, old_val, val);
278
inline uint16_t __sync_bool_compare_and_swap(volatile uint16_t* ptr,
279
uint16_t old_val, uint16_t val)
282
return orig == atomic_cas_16(ptr, old_val, val);
285
inline uint32_t __sync_bool_compare_and_swap(volatile uint32_t* ptr,
286
uint32_t old_val, uint32_t val)
289
return orig == atomic_cas_32(ptr, old_val, val);
292
# if defined(_KERNEL) || defined(_INT64_TYPE)
293
inline uint64_t __sync_bool_compare_and_swap(volatile uint64_t* ptr,
294
uint64_t old_val, uint64_t val)
297
return orig == atomic_cas_64(ptr, old_val, val);
299
#endif /* defined(_KERNEL) || defined(_INT64_TYPE) */
301
#endif /* DRIZZLED_ATOMIC_SUN_STUDIO_H */
138
#endif /* DRIZZLED_ATOMIC_SOLARIS_H */