40#ifndef GEOGRAM_BASIC_ATOMICS
41#define GEOGRAM_BASIC_ATOMICS
52# if defined(GEO_OS_EMSCRIPTEN)
53# define GEO_USE_DUMMY_ATOMICS
54# elif defined(GEO_OS_RASPBERRY)
55# define GEO_USE_ARM32_ATOMICS
56# elif defined(GEO_OS_ANDROID)
57# define GEO_USE_ANDROID_ATOMICS
59# define GEO_USE_X86_ATOMICS
63#if defined(GEO_USE_DUMMY_ATOMICS)
76#elif defined(GEO_USE_ANDROID_ATOMICS)
81inline void lock_mutex_android(
volatile android_mutex_t* lock) {
82 while(__sync_lock_test_and_set(lock, 1) != 0);
85inline void unlock_mutex_android(
volatile android_mutex_t* lock) {
86 __sync_lock_release(lock);
89inline unsigned int atomic_bitset_android(
volatile unsigned int* ptr,
unsigned int bit) {
90 return __sync_fetch_and_or(ptr, 1u << bit) & (1u << bit);
93inline unsigned int atomic_bitreset_android(
volatile unsigned int* ptr,
unsigned int bit) {
94 return __sync_fetch_and_and(ptr, ~(1u << bit)) & (1u << bit);
97inline void memory_barrier_android() {
102inline void wait_for_event_android() {
106inline void send_event_android() {
110#elif defined(GEO_USE_ARM32_ATOMICS)
119inline void lock_mutex_arm32(
volatile arm32_mutex_t* lock) {
121 __asm__ __volatile__ (
122 "1: ldrex %0, [%1] \n"
125 " strexeq %0, %2, [%1] \n"
130 :
"r" (lock),
"r" (1)
138inline void unlock_mutex_arm32(
volatile arm32_mutex_t* lock) {
139 __asm__ __volatile__ (
145 :
"r" (lock),
"r" (0)
158inline unsigned int atomic_bitset_arm32(
volatile unsigned int* ptr,
unsigned int bit) {
162 __asm__ __volatile__ (
163 "1: ldrex %1, [%5] \n"
164 " orr %0, %1, %6, LSL %4 \n"
165 " strex %2, %0, [%5] \n"
168 " and %1, %1, %6, LSL %4 \n"
169 :
"=&r" (tmp),
"=&r" (result),
"=&r" (OK),
"+m" (*ptr)
170 :
"r" (bit),
"r" (ptr),
"r" (1)
185inline unsigned int atomic_bitreset_arm32(
volatile unsigned int* ptr,
unsigned int bit) {
189 __asm__ __volatile__ (
190 "1: ldrex %1, [%5] \n"
191 " bic %0, %1, %6, LSL %4 \n"
192 " strex %2, %0, [%5] \n"
195 " and %1, %1, %6, LSL %4 \n"
196 :
"=&r" (tmp),
"=&r" (result),
"=&r" (OK),
"+m" (*ptr)
197 :
"r" (bit),
"r" (ptr),
"r" (1)
206inline void memory_barrier_arm32() {
207 __asm__ __volatile__ (
216inline void wait_for_event_arm32() {
217 __asm__ __volatile__ (
226inline void send_event_arm32() {
227 __asm__ __volatile__ (
234#elif defined(GEO_USE_X86_ATOMICS)
236# define GEO_USE_X86_PAUSE
238# ifdef GEO_USE_X86_PAUSE
244 __asm__ __volatile__ (
251# define geo_pause _mm_pause
253# define geo_pause __builtin_ia32_pause
269 __asm__ __volatile__ (
273 :
"=r" (out),
"=m" (*ptr)
278 __asm__ __volatile__ (
282 :
"=q" (out),
"=m" (*ptr)
301 __asm__ __volatile__ (
305 :
"=r" (out),
"=m" (*ptr)
310 __asm__ __volatile__ (
314 :
"=q" (out),
"=m" (*ptr)
322#elif defined(GEO_OS_APPLE)
324#include <libkern/OSAtomic.h>
326#elif defined(GEO_OS_WINDOWS)
330#pragma intrinsic(_InterlockedCompareExchange8)
331#pragma intrinsic(_InterlockedCompareExchange16)
332#pragma intrinsic(_InterlockedCompareExchange)
333#pragma intrinsic(_interlockedbittestandset)
334#pragma intrinsic(_interlockedbittestandreset)
335#pragma intrinsic(_ReadBarrier)
336#pragma intrinsic(_WriteBarrier)
337#pragma intrinsic(_ReadWriteBarrier)
339# ifdef GEO_COMPILER_MINGW
void geo_pause()
Issues a processor pause (INTEL only)
char atomic_bittestandreset_x86(volatile unsigned int *ptr, unsigned int bit)
Atomically tests and resets a bit (INTEL only)
char atomic_bittestandset_x86(volatile unsigned int *ptr, unsigned int bit)
Atomically tests and sets a bit (INTEL only)
Common include file, providing basic definitions. Should be included before anything else by all head...
Types and functions for numbers manipulation.