1
/* -*- mode: c++; c-basic-offset: 2; indent-tabs-mode: nil; -*-
2
* vim:expandtab:shiftwidth=2:tabstop=2:smarttab:
4
* Copyright (C) 2009 Sun Microsystems, Inc.
6
* This program is free software; you can redistribute it and/or modify
7
* it under the terms of the GNU General Public License as published by
8
* the Free Software Foundation; version 2 of the License.
10
* This program is distributed in the hope that it will be useful,
11
* but WITHOUT ANY WARRANTY; without even the implied warranty of
12
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13
* GNU General Public License for more details.
15
* You should have received a copy of the GNU General Public License
16
* along with this program; if not, write to the Free Software
17
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
20
#ifndef DRIZZLED_ATOMIC_SUN_STUDIO_H
21
#define DRIZZLED_ATOMIC_SUN_STUDIO_H
27
inline bool __sync_fetch_and_add(volatile bool* ptr, bool val)
30
(val == true) ? atomic_inc_8((volatile uint8_t *)ptr) : atomic_add_8((volatile uint8_t *)ptr, (int8_t)val);
34
inline int8_t __sync_fetch_and_add(volatile int8_t* ptr, int8_t val)
37
(val == 1) ? atomic_inc_8((volatile uint8_t*)ptr) : atomic_add_8((volatile uint8_t*)ptr, val);
41
inline int16_t __sync_fetch_and_add(volatile int16_t* ptr, int16_t val)
44
(val == 1) ? atomic_inc_16((volatile uint16_t*)ptr) : atomic_add_16((volatile uint16_t*)ptr, val);
48
inline int32_t __sync_fetch_and_add(volatile int32_t* ptr, int32_t val)
51
(val == 1) ? atomic_inc_32((volatile uint32_t*)ptr) : atomic_add_32((volatile uint32_t*)ptr, val);
55
inline uint8_t __sync_fetch_and_add(volatile uint8_t* ptr, uint8_t val)
58
(val == 1) ? atomic_inc_8(ptr) : atomic_add_8(ptr, (int8_t)val);
62
inline uint16_t __sync_fetch_and_add(volatile uint16_t* ptr, uint16_t val)
65
(val == 1) ? atomic_inc_16(ptr) : atomic_add_16(ptr, (int16_t)val);
69
inline uint32_t __sync_fetch_and_add(volatile uint32_t* ptr, uint32_t val)
72
(val == 1) ? atomic_inc_32(ptr) : atomic_add_32(ptr, (int32_t)val);
76
# if defined(_KERNEL) || defined(_INT64_TYPE)
77
inline uint64_t __sync_fetch_and_add(volatile uint64_t* ptr, uint64_t val)
80
(val == 1) ? atomic_inc_64(ptr) : atomic_add_64(ptr, (int64_t)val);
84
inline int64_t __sync_fetch_and_add(volatile int64_t* ptr, int64_t val)
87
(val == 1) ? atomic_inc_64((volatile uint64_t*)ptr) : atomic_add_64((volatile uint64_t*)ptr, val);
90
# endif /* defined(_KERNEL) || defined(_INT64_TYPE) */
92
inline uint8_t __sync_fetch_and_sub(volatile uint8_t* ptr, uint8_t val)
95
(val == 1) ? atomic_dec_8(ptr) : atomic_add_8(ptr, 0-(int8_t)val);
99
inline uint16_t __sync_fetch_and_sub(volatile uint16_t* ptr, uint16_t val)
102
(val == 1) ? atomic_dec_16(ptr) : atomic_add_16(ptr, 0-(int16_t)val);
106
inline uint32_t __sync_fetch_and_sub(volatile uint32_t* ptr, uint32_t val)
109
(val == 1) ? atomic_dec_32(ptr) : atomic_add_32(ptr, 0-(int32_t)val);
113
# if defined(_KERNEL) || defined(_INT64_TYPE)
114
inline uint64_t __sync_fetch_and_sub(volatile uint64_t* ptr, uint64_t val)
117
(val == 1) ? atomic_dec_64(ptr) : atomic_add_64(ptr, 0-(int64_t)val);
120
inline int64_t __sync_fetch_and_sub(volatile int64_t* ptr, uint64_t val)
123
(val == 1) ? atomic_dec_64((volatile uint64_t *) ptr) : atomic_add_64((volatile uint64_t *) ptr, 0-(int64_t)val);
126
# endif /* defined(_KERNEL) || defined(_INT64_TYPE) */
128
inline bool __sync_add_and_fetch(volatile bool* ptr, bool val)
130
return (val == true) ? atomic_inc_8_nv((volatile uint8_t *)ptr) : atomic_add_8_nv((volatile uint8_t *)ptr, (int8_t)val);
133
inline int8_t __sync_add_and_fetch(volatile int8_t* ptr, int8_t val)
135
return (val == 1) ? atomic_inc_8_nv((volatile uint8_t*)ptr) : atomic_add_8_nv((volatile uint8_t*)ptr, val);
138
inline int16_t __sync_add_and_fetch(volatile int16_t* ptr, int16_t val)
140
return (val == 1) ? atomic_inc_16_nv((volatile uint16_t*)ptr) : atomic_add_16_nv((volatile uint16_t*)ptr, val);
143
inline int32_t __sync_add_and_fetch(volatile int32_t* ptr, int32_t val)
145
return (val == 1) ? atomic_inc_32_nv((volatile uint32_t*)ptr) : atomic_add_32_nv((volatile uint32_t*)ptr, val);
148
inline uint8_t __sync_add_and_fetch(volatile uint8_t* ptr, uint8_t val)
150
return (val == 1) ? atomic_inc_8_nv(ptr) : atomic_add_8_nv(ptr, (int8_t)val);
153
inline uint16_t __sync_add_and_fetch(volatile uint16_t* ptr, uint16_t val)
155
return (val == 1) ? atomic_inc_16_nv(ptr) : atomic_add_16_nv(ptr, (int16_t)val);
158
inline uint32_t __sync_add_and_fetch(volatile uint32_t* ptr, uint32_t val)
160
return (val == 1) ? atomic_inc_32_nv(ptr) : atomic_add_32_nv(ptr, (int32_t)val);
163
# if defined(_KERNEL) || defined(_INT64_TYPE)
164
inline uint64_t __sync_add_and_fetch(volatile uint64_t* ptr, uint64_t val)
166
return (val == 1) ? atomic_inc_64_nv(ptr) : atomic_add_64_nv(ptr, (int64_t)val);
169
inline int64_t __sync_add_and_fetch(volatile int64_t* ptr, int64_t val)
171
return (val == 1) ? atomic_inc_64_nv((volatile uint64_t*)ptr) : atomic_add_64_nv((volatile uint64_t*)ptr, val);
173
# endif /* defined(_KERNEL) || defined(_INT64_TYPE) */
175
inline uint8_t __sync_sub_and_fetch(volatile uint8_t* ptr, uint8_t val)
177
return (val == 1) ? atomic_dec_8_nv(ptr) : atomic_add_8_nv(ptr, 0-(int8_t)val);
180
inline uint16_t __sync_sub_and_fetch(volatile uint16_t* ptr, uint16_t val)
182
return (val == 1) ? atomic_dec_16_nv(ptr) : atomic_add_16_nv(ptr, 0-(int16_t)val);
185
inline uint32_t __sync_sub_and_fetch(volatile uint32_t* ptr, uint32_t val)
187
return (val == 1) ? atomic_dec_32_nv(ptr) : atomic_add_32_nv(ptr, 0-(int32_t)val);
190
# if defined(_KERNEL) || defined(_INT64_TYPE)
191
inline uint64_t __sync_sub_and_fetch(volatile uint64_t* ptr, uint64_t val)
193
return (val == 1) ? atomic_dec_64_nv(ptr) : atomic_add_64_nv(ptr, 0-(int64_t)val);
195
inline int64_t __sync_sub_and_fetch(volatile int64_t* ptr, uint64_t val)
197
return (val == 1) ? atomic_dec_64_nv((volatile uint64_t *) ptr) : atomic_add_64_nv((volatile uint64_t *) ptr, 0-(int64_t)val);
199
# endif /* defined(_KERNEL) || defined(_INT64_TYPE) */
201
inline uint8_t __sync_lock_test_and_set(volatile uint8_t* ptr, uint8_t val)
203
atomic_swap_8(ptr, val);
207
inline uint16_t __sync_lock_test_and_set(volatile uint16_t* ptr, uint16_t val)
209
atomic_swap_16(ptr, val);
213
inline uint32_t __sync_lock_test_and_set(volatile uint32_t* ptr, uint32_t val)
215
atomic_swap_32(ptr, val);
219
# if defined(_KERNEL) || defined(_INT64_TYPE)
220
inline uint64_t __sync_lock_test_and_set(volatile uint64_t* ptr, uint64_t val)
222
atomic_swap_64(ptr, val);
225
#endif /* defined(_KERNEL) || defined(_INT64_TYPE) */
227
inline int8_t __sync_val_compare_and_swap(volatile int8_t* ptr,
228
int8_t old_val, int8_t val)
230
atomic_cas_8((volatile uint8_t *)ptr, old_val, val);
234
inline uint8_t __sync_val_compare_and_swap(volatile uint8_t* ptr,
235
uint8_t old_val, uint8_t val)
237
atomic_cas_8(ptr, old_val, val);
241
inline uint16_t __sync_val_compare_and_swap(volatile uint16_t* ptr,
242
uint16_t old_val, uint16_t val)
244
atomic_cas_16(ptr, old_val, val);
248
inline uint32_t __sync_val_compare_and_swap(volatile uint32_t* ptr,
249
uint32_t old_val, uint32_t val)
251
atomic_cas_32(ptr, old_val, val);
255
# if defined(_KERNEL) || defined(_INT64_TYPE)
256
inline uint64_t __sync_val_compare_and_swap(volatile uint64_t* ptr,
257
uint64_t old_val, uint64_t val)
259
atomic_cas_64(ptr, old_val, val);
262
#endif /* defined(_KERNEL) || defined(_INT64_TYPE) */
264
inline int8_t __sync_bool_compare_and_swap(volatile int8_t* ptr,
265
int8_t old_val, int8_t val)
268
return orig == atomic_cas_8((volatile uint8_t *)ptr, old_val, val);
271
inline uint8_t __sync_bool_compare_and_swap(volatile uint8_t* ptr,
272
uint8_t old_val, uint8_t val)
275
return orig == atomic_cas_8(ptr, old_val, val);
278
inline uint16_t __sync_bool_compare_and_swap(volatile uint16_t* ptr,
279
uint16_t old_val, uint16_t val)
282
return orig == atomic_cas_16(ptr, old_val, val);
285
inline uint32_t __sync_bool_compare_and_swap(volatile uint32_t* ptr,
286
uint32_t old_val, uint32_t val)
289
return orig == atomic_cas_32(ptr, old_val, val);
292
# if defined(_KERNEL) || defined(_INT64_TYPE)
293
inline uint64_t __sync_bool_compare_and_swap(volatile uint64_t* ptr,
294
uint64_t old_val, uint64_t val)
297
return orig == atomic_cas_64(ptr, old_val, val);
299
#endif /* defined(_KERNEL) || defined(_INT64_TYPE) */
301
#endif /* DRIZZLED_ATOMIC_SUN_STUDIO_H */