16 OF_ASSUME_NONNULL_BEGIN
19 of_atomic_int_add(
volatile int *_Nonnull p,
int i)
22 __asm__ __volatile__ (
30 else if (
sizeof(
int) == 8)
31 __asm__ __volatile__ (
45 static OF_INLINE int32_t
46 of_atomic_int32_add(
volatile int32_t *_Nonnull p, int32_t i)
48 __asm__ __volatile__ (
59 static OF_INLINE
void *_Nullable
60 of_atomic_ptr_add(
void *
volatile _Nullable *_Nonnull p, intptr_t i)
62 #if defined(OF_X86_64)
63 __asm__ __volatile__ (
73 __asm__ __volatile__ (
86 of_atomic_int_sub(
volatile int *_Nonnull p,
int i)
89 __asm__ __volatile__ (
98 else if (
sizeof(
int) == 8)
99 __asm__ __volatile__ (
114 static OF_INLINE int32_t
115 of_atomic_int32_sub(
volatile int32_t *_Nonnull p, int32_t i)
117 __asm__ __volatile__ (
129 static OF_INLINE
void *_Nullable
130 of_atomic_ptr_sub(
void *
volatile _Nullable *_Nonnull p, intptr_t i)
132 #if defined(OF_X86_64)
133 __asm__ __volatile__ (
143 #elif defined(OF_X86)
144 __asm__ __volatile__ (
158 of_atomic_int_inc(
volatile int *_Nonnull p)
162 if (
sizeof(
int) == 4)
163 __asm__ __volatile__ (
173 else if (
sizeof(
int) == 8)
174 __asm__ __volatile__ (
190 static OF_INLINE int32_t
191 of_atomic_int32_inc(
volatile int32_t *_Nonnull p)
195 __asm__ __volatile__ (
209 of_atomic_int_dec(
volatile int *_Nonnull p)
213 if (
sizeof(
int) == 4)
214 __asm__ __volatile__ (
224 else if (
sizeof(
int) == 8)
225 __asm__ __volatile__ (
241 static OF_INLINE int32_t
242 of_atomic_int32_dec(
volatile int32_t *_Nonnull p)
246 __asm__ __volatile__ (
259 static OF_INLINE
unsigned int
260 of_atomic_int_or(
volatile unsigned int *_Nonnull p,
unsigned int i)
262 if (
sizeof(
int) == 4)
263 __asm__ __volatile__ (
276 else if (
sizeof(
int) == 8)
277 __asm__ __volatile__ (
296 static OF_INLINE uint32_t
297 of_atomic_int32_or(
volatile uint32_t *_Nonnull p, uint32_t i)
299 __asm__ __volatile__ (
315 static OF_INLINE
unsigned int
316 of_atomic_int_and(
volatile unsigned int *_Nonnull p,
unsigned int i)
318 if (
sizeof(
int) == 4)
319 __asm__ __volatile__ (
332 else if (
sizeof(
int) == 8)
333 __asm__ __volatile__ (
352 static OF_INLINE uint32_t
353 of_atomic_int32_and(
volatile uint32_t *_Nonnull p, uint32_t i)
355 __asm__ __volatile__ (
371 static OF_INLINE
unsigned int
372 of_atomic_int_xor(
volatile unsigned int *_Nonnull p,
unsigned int i)
374 if (
sizeof(
int) == 4)
375 __asm__ __volatile__ (
388 else if (
sizeof(
int) == 8)
389 __asm__ __volatile__ (
408 static OF_INLINE uint32_t
409 of_atomic_int32_xor(
volatile uint32_t *_Nonnull p, uint32_t i)
411 __asm__ __volatile__ (
417 "cmpxchgl %0, %2\n\t"
427 static OF_INLINE
bool
428 of_atomic_int_cmpswap(
volatile int *_Nonnull p,
int o,
int n)
432 __asm__ __volatile__ (
445 static OF_INLINE
bool
446 of_atomic_int32_cmpswap(
volatile int32_t *_Nonnull p, int32_t o, int32_t n)
450 __asm__ __volatile__ (
463 static OF_INLINE
bool
464 of_atomic_ptr_cmpswap(
void *
volatile _Nullable *_Nonnull p,
465 void *_Nullable o,
void *_Nullable n)
469 __asm__ __volatile__ (
482 static OF_INLINE
void
483 of_memory_barrier(
void)
485 __asm__ __volatile__ (
486 "mfence" :::
"memory"
490 static OF_INLINE
void
491 of_memory_barrier_acquire(
void)
493 __asm__ __volatile__ (
"" :::
"memory");
496 static OF_INLINE
void
497 of_memory_barrier_release(
void)
499 __asm__ __volatile__ (
"" :::
"memory");
502 OF_ASSUME_NONNULL_END