00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018 #ifndef OVR_Atomic_h
00019 #define OVR_Atomic_h
00020
00021 #include "OVR_Types.h"
00022
00023
00024 #if defined(OVR_OS_WIN32)
00025 #include <windows.h>
00026 #else
00027 #include <pthread.h>
00028 #endif
00029
00030
00031 namespace OVR {
00032
00033
00034
00035
00036
00037
00038 template<class C> class AtomicOps;
00039 template<class T> class AtomicInt;
00040 template<class T> class AtomicPtr;
00041
00042 class Lock;
00043
00044
00045
00046
00047
00048
00049
00050
00051
00052
00053
00054
00055
00056
00057
00058
00059
00060
00061
00062
00063
00064
00065
00066
00067
00068
00069
00070
00071
00072
00073
00074
00075
00076
00077
00078
00079
00080
00081
00082
00083 struct AtomicOpsRawBase
00084 {
00085 #if !defined(OVR_ENABLE_THREADS) || defined(OVR_CPU_X86) || defined(OVR_OS_WIN32) || defined(OVR_OS_IPHONE)
00086
00087 struct FullSync { inline FullSync() { } };
00088 struct AcquireSync { inline AcquireSync() { } };
00089 struct ReleaseSync { inline ReleaseSync() { } };
00090
00091 #elif defined(OVR_CPU_PPC64) || defined(OVR_CPU_PPC)
00092 struct FullSync { inline FullSync() { asm volatile("sync\n"); } ~FullSync() { asm volatile("isync\n"); } };
00093 struct AcquireSync { inline AcquireSync() { } ~AcquireSync() { asm volatile("isync\n"); } };
00094 struct ReleaseSync { inline ReleaseSync() { asm volatile("sync\n"); } };
00095
00096 #elif defined(OVR_CPU_MIPS)
00097 struct FullSync { inline FullSync() { asm volatile("sync\n"); } ~FullSync() { asm volatile("sync\n"); } };
00098 struct AcquireSync { inline AcquireSync() { } ~AcquireSync() { asm volatile("sync\n"); } };
00099 struct ReleaseSync { inline ReleaseSync() { asm volatile("sync\n"); } };
00100
00101 #elif defined(OVR_CPU_ARM)
00102 struct FullSync { inline FullSync() { asm volatile("dmb\n"); } ~FullSync() { asm volatile("dmb\n"); } };
00103 struct AcquireSync { inline AcquireSync() { } ~AcquireSync() { asm volatile("dmb\n"); } };
00104 struct ReleaseSync { inline ReleaseSync() { asm volatile("dmb\n"); } };
00105
00106
00107 #elif defined(OVR_CC_GNU) && (__GNUC__ >= 4)
00108
00109 struct FullSync { inline FullSync() { } };
00110 struct AcquireSync { inline AcquireSync() { } };
00111 struct ReleaseSync { inline ReleaseSync() { } };
00112 #endif
00113 };
00114
00115
00116
00117 struct AtomicOpsRaw_4ByteImpl : public AtomicOpsRawBase
00118 {
00119 #if !defined(OVR_ENABLE_THREADS)
00120
00121
00122 typedef UInt32 T;
00123
00124
00125
00126 #elif defined(OVR_OS_WIN32)
00127
00128
00129
00130 typedef LONG T;
00131 #if defined(OVR_CC_MSVC) && (OVR_CC_MSVC < 1300)
00132 typedef T* InterlockTPtr;
00133 typedef LPVOID ET;
00134 typedef ET* InterlockETPtr;
00135 #else
00136 typedef volatile T* InterlockTPtr;
00137 typedef T ET;
00138 typedef InterlockTPtr InterlockETPtr;
00139 #endif
00140 inline static T Exchange_NoSync(volatile T* p, T val) { return InterlockedExchange((InterlockTPtr)p, val); }
00141 inline static T ExchangeAdd_NoSync(volatile T* p, T val) { return InterlockedExchangeAdd((InterlockTPtr)p, val); }
00142 inline static bool CompareAndSet_NoSync(volatile T* p, T c, T val) { return InterlockedCompareExchange((InterlockETPtr)p, (ET)val, (ET)c) == (ET)c; }
00143
00144 #elif defined(OVR_CPU_PPC64) || defined(OVR_CPU_PPC)
00145 typedef UInt32 T;
00146 static inline UInt32 Exchange_NoSync(volatile UInt32 *i, UInt32 j)
00147 {
00148 UInt32 ret;
00149
00150 asm volatile("1:\n\t"
00151 "lwarx %[r],0,%[i]\n\t"
00152 "stwcx. %[j],0,%[i]\n\t"
00153 "bne- 1b\n"
00154 : "+m" (*i), [r] "=&b" (ret) : [i] "b" (i), [j] "b" (j) : "cc", "memory");
00155
00156 return ret;
00157 }
00158
00159 static inline UInt32 ExchangeAdd_NoSync(volatile UInt32 *i, UInt32 j)
00160 {
00161 UInt32 dummy, ret;
00162
00163 asm volatile("1:\n\t"
00164 "lwarx %[r],0,%[i]\n\t"
00165 "add %[o],%[r],%[j]\n\t"
00166 "stwcx. %[o],0,%[i]\n\t"
00167 "bne- 1b\n"
00168 : "+m" (*i), [r] "=&b" (ret), [o] "=&r" (dummy) : [i] "b" (i), [j] "b" (j) : "cc", "memory");
00169
00170 return ret;
00171 }
00172
00173 static inline bool CompareAndSet_NoSync(volatile UInt32 *i, UInt32 c, UInt32 value)
00174 {
00175 UInt32 ret;
00176
00177 asm volatile("1:\n\t"
00178 "lwarx %[r],0,%[i]\n\t"
00179 "cmpw 0,%[r],%[cmp]\n\t"
00180 "mfcr %[r]\n\t"
00181 "bne- 2f\n\t"
00182 "stwcx. %[val],0,%[i]\n\t"
00183 "bne- 1b\n\t"
00184 "2:\n"
00185 : "+m" (*i), [r] "=&b" (ret) : [i] "b" (i), [cmp] "b" (c), [val] "b" (value) : "cc", "memory");
00186
00187 return (ret & 0x20000000) ? 1 : 0;
00188 }
00189
00190 #elif defined(OVR_CPU_MIPS)
00191 typedef UInt32 T;
00192
00193 static inline UInt32 Exchange_NoSync(volatile UInt32 *i, UInt32 j)
00194 {
00195 UInt32 ret;
00196
00197 asm volatile("1:\n\t"
00198 "ll %[r],0(%[i])\n\t"
00199 "sc %[j],0(%[i])\n\t"
00200 "beq %[j],$0,1b\n\t"
00201 "nop \n"
00202 : "+m" (*i), [r] "=&d" (ret) : [i] "d" (i), [j] "d" (j) : "cc", "memory");
00203
00204 return ret;
00205 }
00206
00207 static inline UInt32 ExchangeAdd_NoSync(volatile UInt32 *i, UInt32 j)
00208 {
00209 UInt32 ret;
00210
00211 asm volatile("1:\n\t"
00212 "ll %[r],0(%[i])\n\t"
00213 "addu %[j],%[r],%[j]\n\t"
00214 "sc %[j],0(%[i])\n\t"
00215 "beq %[j],$0,1b\n\t"
00216 "nop \n"
00217 : "+m" (*i), [r] "=&d" (ret) : [i] "d" (i), [j] "d" (j) : "cc", "memory");
00218
00219 return ret;
00220 }
00221
00222 static inline bool CompareAndSet_NoSync(volatile UInt32 *i, UInt32 c, UInt32 value)
00223 {
00224 UInt32 ret, dummy;
00225
00226 asm volatile("1:\n\t"
00227 "move %[r],$0\n\t"
00228 "ll %[o],0(%[i])\n\t"
00229 "bne %[o],%[c],2f\n\t"
00230 "move %[r],%[v]\n\t"
00231 "sc %[r],0(%[i])\n\t"
00232 "beq %[r],$0,1b\n\t"
00233 "nop \n\t"
00234 "2:\n"
00235 : "+m" (*i),[r] "=&d" (ret), [o] "=&d" (dummy) : [i] "d" (i), [c] "d" (c), [v] "d" (value)
00236 : "cc", "memory");
00237
00238 return ret;
00239 }
00240
00241 #elif defined(OVR_CPU_ARM) && defined(OVR_CC_ARM)
00242 typedef UInt32 T;
00243
00244 static inline UInt32 Exchange_NoSync(volatile UInt32 *i, UInt32 j)
00245 {
00246 for(;;)
00247 {
00248 T r = __ldrex(i);
00249 if (__strex(j, i) == 0)
00250 return r;
00251 }
00252 }
00253 static inline UInt32 ExchangeAdd_NoSync(volatile UInt32 *i, UInt32 j)
00254 {
00255 for(;;)
00256 {
00257 T r = __ldrex(i);
00258 if (__strex(r + j, i) == 0)
00259 return r;
00260 }
00261 }
00262
00263 static inline bool CompareAndSet_NoSync(volatile UInt32 *i, UInt32 c, UInt32 value)
00264 {
00265 for(;;)
00266 {
00267 T r = __ldrex(i);
00268 if (r != c)
00269 return 0;
00270 if (__strex(value, i) == 0)
00271 return 1;
00272 }
00273 }
00274
00275 #elif defined(OVR_CPU_ARM)
00276 typedef UInt32 T;
00277
00278 static inline UInt32 Exchange_NoSync(volatile UInt32 *i, UInt32 j)
00279 {
00280 UInt32 ret, dummy;
00281
00282 asm volatile("1:\n\t"
00283 "ldrex %[r],[%[i]]\n\t"
00284 "strex %[t],%[j],[%[i]]\n\t"
00285 "cmp %[t],#0\n\t"
00286 "bne 1b\n\t"
00287 : "+m" (*i), [r] "=&r" (ret), [t] "=&r" (dummy) : [i] "r" (i), [j] "r" (j) : "cc", "memory");
00288
00289 return ret;
00290 }
00291
00292 static inline UInt32 ExchangeAdd_NoSync(volatile UInt32 *i, UInt32 j)
00293 {
00294 UInt32 ret, dummy, test;
00295
00296 asm volatile("1:\n\t"
00297 "ldrex %[r],[%[i]]\n\t"
00298 "add %[o],%[r],%[j]\n\t"
00299 "strex %[t],%[o],[%[i]]\n\t"
00300 "cmp %[t],#0\n\t"
00301 "bne 1b\n\t"
00302 : "+m" (*i), [r] "=&r" (ret), [o] "=&r" (dummy), [t] "=&r" (test) : [i] "r" (i), [j] "r" (j) : "cc", "memory");
00303
00304 return ret;
00305 }
00306
00307 static inline bool CompareAndSet_NoSync(volatile UInt32 *i, UInt32 c, UInt32 value)
00308 {
00309 UInt32 ret = 1, dummy, test;
00310
00311 asm volatile("1:\n\t"
00312 "ldrex %[o],[%[i]]\n\t"
00313 "cmp %[o],%[c]\n\t"
00314 "bne 2f\n\t"
00315 "strex %[r],%[v],[%[i]]\n\t"
00316 "cmp %[r],#0\n\t"
00317 "bne 1b\n\t"
00318 "2:\n"
00319 : "+m" (*i),[r] "=&r" (ret), [o] "=&r" (dummy), [t] "=&r" (test) : [i] "r" (i), [c] "r" (c), [v] "r" (value)
00320 : "cc", "memory");
00321
00322 return !ret;
00323 }
00324
00325 #elif defined(OVR_CPU_X86)
00326 typedef UInt32 T;
00327
00328 static inline UInt32 Exchange_NoSync(volatile UInt32 *i, UInt32 j)
00329 {
00330 asm volatile("xchgl %1,%[i]\n"
00331 : "+m" (*i), "=q" (j) : [i] "m" (*i), "1" (j) : "cc", "memory");
00332
00333 return j;
00334 }
00335
00336 static inline UInt32 ExchangeAdd_NoSync(volatile UInt32 *i, UInt32 j)
00337 {
00338 asm volatile("lock; xaddl %1,%[i]\n"
00339 : "+m" (*i), "+q" (j) : [i] "m" (*i) : "cc", "memory");
00340
00341 return j;
00342 }
00343
00344 static inline bool CompareAndSet_NoSync(volatile UInt32 *i, UInt32 c, UInt32 value)
00345 {
00346 UInt32 ret;
00347
00348 asm volatile("lock; cmpxchgl %[v],%[i]\n"
00349 : "+m" (*i), "=a" (ret) : [i] "m" (*i), "1" (c), [v] "q" (value) : "cc", "memory");
00350
00351 return (ret == c);
00352 }
00353
00354 #elif defined(OVR_CC_GNU) && (__GNUC__ >= 4 && __GNUC_MINOR__ >= 1)
00355
00356 typedef UInt32 T;
00357
00358 static inline T Exchange_NoSync(volatile T *i, T j)
00359 {
00360 T v;
00361 do {
00362 v = *i;
00363 } while (!__sync_bool_compare_and_swap(i, v, j));
00364 return v;
00365 }
00366
00367 static inline T ExchangeAdd_NoSync(volatile T *i, T j)
00368 {
00369 return __sync_fetch_and_add(i, j);
00370 }
00371
00372 static inline bool CompareAndSet_NoSync(volatile T *i, T c, T value)
00373 {
00374 return __sync_bool_compare_and_swap(i, c, value);
00375 }
00376
00377 #endif // OS
00378 };
00379
00380
00381
00382
00383 struct AtomicOpsRaw_8ByteImpl : public AtomicOpsRawBase
00384 {
00385 #if !defined(OVR_64BIT_POINTERS) || !defined(OVR_ENABLE_THREADS)
00386
00387
00388 typedef UInt64 T;
00389
00390
00391 #elif defined(OVR_OS_WIN32)
00392
00393
00394 typedef LONG64 T;
00395 typedef volatile T* InterlockTPtr;
00396 inline static T Exchange_NoSync(volatile T* p, T val) { return InterlockedExchange64((InterlockTPtr)p, val); }
00397 inline static T ExchangeAdd_NoSync(volatile T* p, T val) { return InterlockedExchangeAdd64((InterlockTPtr)p, val); }
00398 inline static bool CompareAndSet_NoSync(volatile T* p, T c, T val) { return InterlockedCompareExchange64((InterlockTPtr)p, val, c) == c; }
00399
00400 #elif defined(OVR_CPU_PPC64)
00401
00402 typedef UInt64 T;
00403
00404 static inline UInt64 Exchange_NoSync(volatile UInt64 *i, UInt64 j)
00405 {
00406 UInt64 dummy, ret;
00407
00408 asm volatile("1:\n\t"
00409 "ldarx %[r],0,%[i]\n\t"
00410 "mr %[o],%[j]\n\t"
00411 "stdcx. %[o],0,%[i]\n\t"
00412 "bne- 1b\n"
00413 : "+m" (*i), [r] "=&b" (ret), [o] "=&r" (dummy) : [i] "b" (i), [j] "b" (j) : "cc");
00414
00415 return ret;
00416 }
00417
00418 static inline UInt64 ExchangeAdd_NoSync(volatile UInt64 *i, UInt64 j)
00419 {
00420 UInt64 dummy, ret;
00421
00422 asm volatile("1:\n\t"
00423 "ldarx %[r],0,%[i]\n\t"
00424 "add %[o],%[r],%[j]\n\t"
00425 "stdcx. %[o],0,%[i]\n\t"
00426 "bne- 1b\n"
00427 : "+m" (*i), [r] "=&b" (ret), [o] "=&r" (dummy) : [i] "b" (i), [j] "b" (j) : "cc");
00428
00429 return ret;
00430 }
00431
00432 static inline bool CompareAndSet_NoSync(volatile UInt64 *i, UInt64 c, UInt64 value)
00433 {
00434 UInt64 ret, dummy;
00435
00436 asm volatile("1:\n\t"
00437 "ldarx %[r],0,%[i]\n\t"
00438 "cmpw 0,%[r],%[cmp]\n\t"
00439 "mfcr %[r]\n\t"
00440 "bne- 2f\n\t"
00441 "stdcx. %[val],0,%[i]\n\t"
00442 "bne- 1b\n\t"
00443 "2:\n"
00444 : "+m" (*i), [r] "=&b" (ret), [o] "=&r" (dummy) : [i] "b" (i), [cmp] "b" (c), [val] "b" (value) : "cc");
00445
00446 return (ret & 0x20000000) ? 1 : 0;
00447 }
00448
00449 #elif defined(OVR_CC_GNU) && (__GNUC__ >= 4 && __GNUC_MINOR__ >= 1)
00450
00451 typedef UInt64 T;
00452
00453 static inline T Exchange_NoSync(volatile T *i, T j)
00454 {
00455 T v;
00456 do {
00457 v = *i;
00458 } while (!__sync_bool_compare_and_swap(i, v, j));
00459 return v;
00460 }
00461
00462 static inline T ExchangeAdd_NoSync(volatile T *i, T j)
00463 {
00464 return __sync_fetch_and_add(i, j);
00465 }
00466
00467 static inline bool CompareAndSet_NoSync(volatile T *i, T c, T value)
00468 {
00469 return __sync_bool_compare_and_swap(i, c, value);
00470 }
00471
00472 #endif // OS
00473 };
00474
00475
00476
00477
00478
00479
00480
00481
00482 template<class O>
00483 struct AtomicOpsRaw_DefImpl : public O
00484 {
00485 typedef typename O::T O_T;
00486 typedef typename O::FullSync O_FullSync;
00487 typedef typename O::AcquireSync O_AcquireSync;
00488 typedef typename O::ReleaseSync O_ReleaseSync;
00489
00490
00491
00492 #ifndef OVR_ENABLE_THREADS
00493
00494
00495 inline static O_T Exchange_NoSync(volatile O_T* p, O_T val) { O_T old = *p; *p = val; return old; }
00496
00497 inline static O_T ExchangeAdd_NoSync(volatile O_T* p, O_T val) { O_T old = *p; *p += val; return old; }
00498
00499
00500 inline static bool CompareAndSet_NoSync(volatile O_T* p, O_T c, O_T val) { if (*p==c) { *p = val; return 1; } return 0; }
00501
00502 #endif
00503
00504
00505
00506
00507 inline static O_T Exchange_Sync(volatile O_T* p, O_T val) { O_FullSync sync; OVR_UNUSED(sync); return AtomicOpsRaw_DefImpl<O>::Exchange_NoSync(p, val); }
00508 inline static O_T Exchange_Release(volatile O_T* p, O_T val) { O_ReleaseSync sync; OVR_UNUSED(sync); return AtomicOpsRaw_DefImpl<O>::Exchange_NoSync(p, val); }
00509 inline static O_T Exchange_Acquire(volatile O_T* p, O_T val) { O_AcquireSync sync; OVR_UNUSED(sync); return AtomicOpsRaw_DefImpl<O>::Exchange_NoSync(p, val); }
00510 inline static O_T ExchangeAdd_Sync(volatile O_T* p, O_T val) { O_FullSync sync; OVR_UNUSED(sync); return AtomicOpsRaw_DefImpl<O>::ExchangeAdd_NoSync(p, val); }
00511 inline static O_T ExchangeAdd_Release(volatile O_T* p, O_T val) { O_ReleaseSync sync; OVR_UNUSED(sync); return AtomicOpsRaw_DefImpl<O>::ExchangeAdd_NoSync(p, val); }
00512 inline static O_T ExchangeAdd_Acquire(volatile O_T* p, O_T val) { O_AcquireSync sync; OVR_UNUSED(sync); return AtomicOpsRaw_DefImpl<O>::ExchangeAdd_NoSync(p, val); }
00513 inline static bool CompareAndSet_Sync(volatile O_T* p, O_T c, O_T val) { O_FullSync sync; OVR_UNUSED(sync); return AtomicOpsRaw_DefImpl<O>::CompareAndSet_NoSync(p,c,val); }
00514 inline static bool CompareAndSet_Release(volatile O_T* p, O_T c, O_T val) { O_ReleaseSync sync; OVR_UNUSED(sync); return AtomicOpsRaw_DefImpl<O>::CompareAndSet_NoSync(p,c,val); }
00515 inline static bool CompareAndSet_Acquire(volatile O_T* p, O_T c, O_T val) { O_AcquireSync sync; OVR_UNUSED(sync); return AtomicOpsRaw_DefImpl<O>::CompareAndSet_NoSync(p,c,val); }
00516
00517
00518 #ifdef OVR_CPU_X86
00519
00520
00521 inline static void Store_Release(volatile O_T* p, O_T val) { Exchange_Release(p, val); }
00522 #else
00523 inline static void Store_Release(volatile O_T* p, O_T val) { O_ReleaseSync sync; OVR_UNUSED(sync); *p = val; }
00524 #endif
00525 inline static O_T Load_Acquire(const volatile O_T* p) { O_AcquireSync sync; OVR_UNUSED(sync); return *p; }
00526 };
00527
00528
00529 template<int size>
00530 struct AtomicOpsRaw : public AtomicOpsRawBase { };
00531
00532 template<>
00533 struct AtomicOpsRaw<4> : public AtomicOpsRaw_DefImpl<AtomicOpsRaw_4ByteImpl>
00534 {
00535
00536 AtomicOpsRaw()
00537 { OVR_COMPILER_ASSERT(sizeof(AtomicOpsRaw_DefImpl<AtomicOpsRaw_4ByteImpl>::T) == 4); }
00538 };
00539 template<>
00540 struct AtomicOpsRaw<8> : public AtomicOpsRaw_DefImpl<AtomicOpsRaw_8ByteImpl>
00541 {
00542 AtomicOpsRaw()
00543 { OVR_COMPILER_ASSERT(sizeof(AtomicOpsRaw_DefImpl<AtomicOpsRaw_8ByteImpl>::T) == 8); }
00544 };
00545
00546
00547
00548
00549
00550
00551
00552
00553 template<class C>
00554 class AtomicOps
00555 {
00556 typedef AtomicOpsRaw<sizeof(C)> Ops;
00557 typedef typename Ops::T T;
00558 typedef volatile typename Ops::T* PT;
00559
00560
00561 union C2T_union { C c; T t; };
00562
00563 public:
00564
00565 inline static C Exchange_Sync(volatile C* p, C val) { C2T_union u; u.c = val; u.t = Ops::Exchange_Sync((PT)p, u.t); return u.c; }
00566 inline static C Exchange_Release(volatile C* p, C val) { C2T_union u; u.c = val; u.t = Ops::Exchange_Release((PT)p, u.t); return u.c; }
00567 inline static C Exchange_Acquire(volatile C* p, C val) { C2T_union u; u.c = val; u.t = Ops::Exchange_Acquire((PT)p, u.t); return u.c; }
00568 inline static C Exchange_NoSync(volatile C* p, C val) { C2T_union u; u.c = val; u.t = Ops::Exchange_NoSync((PT)p, u.t); return u.c; }
00569 inline static C ExchangeAdd_Sync(volatile C* p, C val) { C2T_union u; u.c = val; u.t = Ops::ExchangeAdd_Sync((PT)p, u.t); return u.c; }
00570 inline static C ExchangeAdd_Release(volatile C* p, C val) { C2T_union u; u.c = val; u.t = Ops::ExchangeAdd_Release((PT)p, u.t); return u.c; }
00571 inline static C ExchangeAdd_Acquire(volatile C* p, C val) { C2T_union u; u.c = val; u.t = Ops::ExchangeAdd_Acquire((PT)p, u.t); return u.c; }
00572 inline static C ExchangeAdd_NoSync(volatile C* p, C val) { C2T_union u; u.c = val; u.t = Ops::ExchangeAdd_NoSync((PT)p, u.t); return u.c; }
00573 inline static bool CompareAndSet_Sync(volatile C* p, C c, C val) { C2T_union u,cu; u.c = val; cu.c = c; return Ops::CompareAndSet_Sync((PT)p, cu.t, u.t); }
00574 inline static bool CompareAndSet_Release(volatile C* p, C c, C val){ C2T_union u,cu; u.c = val; cu.c = c; return Ops::CompareAndSet_Release((PT)p, cu.t, u.t); }
00575 inline static bool CompareAndSet_Relse(volatile C* p, C c, C val){ C2T_union u,cu; u.c = val; cu.c = c; return Ops::CompareAndSet_Acquire((PT)p, cu.t, u.t); }
00576 inline static bool CompareAndSet_NoSync(volatile C* p, C c, C val) { C2T_union u,cu; u.c = val; cu.c = c; return Ops::CompareAndSet_NoSync((PT)p, cu.t, u.t); }
00577
00578 inline static void Store_Release(volatile C* p, C val) { C2T_union u; u.c = val; Ops::Store_Release((PT)p, u.t); }
00579 inline static C Load_Acquire(const volatile C* p) { C2T_union u; u.t = Ops::Load_Acquire((PT)p); return u.c; }
00580 };
00581
00582
00583
00584
00585 template<class T>
00586 class AtomicValueBase
00587 {
00588 protected:
00589 typedef AtomicOps<T> Ops;
00590 public:
00591
00592 volatile T Value;
00593
00594 inline AtomicValueBase() { }
00595 explicit inline AtomicValueBase(T val) { Ops::Store_Release(&Value, val); }
00596
00597
00598
00599 inline operator T() const { return Value; }
00600
00601
00602 inline T Exchange_Sync(T val) { return Ops::Exchange_Sync(&Value, val); }
00603 inline T Exchange_Release(T val) { return Ops::Exchange_Release(&Value, val); }
00604 inline T Exchange_Acquire(T val) { return Ops::Exchange_Acquire(&Value, val); }
00605 inline T Exchange_NoSync(T val) { return Ops::Exchange_NoSync(&Value, val); }
00606 inline bool CompareAndSet_Sync(T c, T val) { return Ops::CompareAndSet_Sync(&Value, c, val); }
00607 inline bool CompareAndSet_Release(T c, T val) { return Ops::CompareAndSet_Release(&Value, c, val); }
00608 inline bool CompareAndSet_Acquire(T c, T val) { return Ops::CompareAndSet_Relse(&Value, c, val); }
00609 inline bool CompareAndSet_NoSync(T c, T val) { return Ops::CompareAndSet_NoSync(&Value, c, val); }
00610
00611 inline void Store_Release(T val) { Ops::Store_Release(&Value, val); }
00612 inline T Load_Acquire() const { return Ops::Load_Acquire(&Value); }
00613 };
00614
00615
00616
00617
00618
00619
00620
00621 template<class T>
00622 class AtomicPtr : public AtomicValueBase<T*>
00623 {
00624 typedef typename AtomicValueBase<T*>::Ops Ops;
00625
00626 public:
00627
00628 inline AtomicPtr() : AtomicValueBase<T*>() { this->Value = 0; }
00629 explicit inline AtomicPtr(T* val) : AtomicValueBase<T*>(val) { }
00630
00631
00632 inline T* operator -> () const { return this->Load_Acquire(); }
00633
00634
00635
00636
00637 inline operator T* () const { return this->Load_Acquire(); }
00638
00639
00640
00641
00642
00643 template<class I>
00644 inline T* ExchangeAdd_Sync(I incr) { return Ops::ExchangeAdd_Sync(&this->Value, ((T*)0) + incr); }
00645 template<class I>
00646 inline T* ExchangeAdd_Release(I incr) { return Ops::ExchangeAdd_Release(&this->Value, ((T*)0) + incr); }
00647 template<class I>
00648 inline T* ExchangeAdd_Acquire(I incr) { return Ops::ExchangeAdd_Acquire(&this->Value, ((T*)0) + incr); }
00649 template<class I>
00650 inline T* ExchangeAdd_NoSync(I incr) { return Ops::ExchangeAdd_NoSync(&this->Value, ((T*)0) + incr); }
00651
00652
00653
00654 inline T* operator = (T* val) { this->Store_Release(val); return val; }
00655
00656 template<class I>
00657 inline T* operator += (I val) { return ExchangeAdd_Sync(val) + val; }
00658 template<class I>
00659 inline T* operator -= (I val) { return operator += (-val); }
00660
00661 inline T* operator ++ () { return ExchangeAdd_Sync(1) + 1; }
00662 inline T* operator -- () { return ExchangeAdd_Sync(-1) - 1; }
00663 inline T* operator ++ (int) { return ExchangeAdd_Sync(1); }
00664 inline T* operator -- (int) { return ExchangeAdd_Sync(-1); }
00665 };
00666
00667
00668
00669
00670
00671
00672
00673
00674 template<class T>
00675 class AtomicInt : public AtomicValueBase<T>
00676 {
00677 typedef typename AtomicValueBase<T>::Ops Ops;
00678
00679 public:
00680 inline AtomicInt() : AtomicValueBase<T>() { }
00681 explicit inline AtomicInt(T val) : AtomicValueBase<T>(val) { }
00682
00683
00684
00685 inline T ExchangeAdd_Sync(T val) { return Ops::ExchangeAdd_Sync(&this->Value, val); }
00686 inline T ExchangeAdd_Release(T val) { return Ops::ExchangeAdd_Release(&this->Value, val); }
00687 inline T ExchangeAdd_Acquire(T val) { return Ops::ExchangeAdd_Acquire(&this->Value, val); }
00688 inline T ExchangeAdd_NoSync(T val) { return Ops::ExchangeAdd_NoSync(&this->Value, val); }
00689
00690 inline void Increment_Sync() { ExchangeAdd_Sync((T)1); }
00691 inline void Increment_Release() { ExchangeAdd_Release((T)1); }
00692 inline void Increment_Acquire() { ExchangeAdd_Acquire((T)1); }
00693 inline void Increment_NoSync() { ExchangeAdd_NoSync((T)1); }
00694
00695
00696
00697 inline T operator = (T val) { this->Store_Release(val); return val; }
00698 inline T operator += (T val) { return ExchangeAdd_Sync(val) + val; }
00699 inline T operator -= (T val) { return ExchangeAdd_Sync(0 - val) - val; }
00700
00701 inline T operator ++ () { return ExchangeAdd_Sync((T)1) + 1; }
00702 inline T operator -- () { return ExchangeAdd_Sync(((T)0)-1) - 1; }
00703 inline T operator ++ (int) { return ExchangeAdd_Sync((T)1); }
00704 inline T operator -- (int) { return ExchangeAdd_Sync(((T)0)-1); }
00705
00706
00707 T operator &= (T arg)
00708 {
00709 T comp, newVal;
00710 do {
00711 comp = this->Value;
00712 newVal = comp & arg;
00713 } while(!this->CompareAndSet_Sync(comp, newVal));
00714 return newVal;
00715 }
00716
00717 T operator |= (T arg)
00718 {
00719 T comp, newVal;
00720 do {
00721 comp = this->Value;
00722 newVal = comp | arg;
00723 } while(!this->CompareAndSet_Sync(comp, newVal));
00724 return newVal;
00725 }
00726
00727 T operator ^= (T arg)
00728 {
00729 T comp, newVal;
00730 do {
00731 comp = this->Value;
00732 newVal = comp ^ arg;
00733 } while(!this->CompareAndSet_Sync(comp, newVal));
00734 return newVal;
00735 }
00736
00737 T operator *= (T arg)
00738 {
00739 T comp, newVal;
00740 do {
00741 comp = this->Value;
00742 newVal = comp * arg;
00743 } while(!this->CompareAndSet_Sync(comp, newVal));
00744 return newVal;
00745 }
00746
00747 T operator /= (T arg)
00748 {
00749 T comp, newVal;
00750 do {
00751 comp = this->Value;
00752 newVal = comp / arg;
00753 } while(!CompareAndSet_Sync(comp, newVal));
00754 return newVal;
00755 }
00756
00757 T operator >>= (unsigned bits)
00758 {
00759 T comp, newVal;
00760 do {
00761 comp = this->Value;
00762 newVal = comp >> bits;
00763 } while(!CompareAndSet_Sync(comp, newVal));
00764 return newVal;
00765 }
00766
00767 T operator <<= (unsigned bits)
00768 {
00769 T comp, newVal;
00770 do {
00771 comp = this->Value;
00772 newVal = comp << bits;
00773 } while(!this->CompareAndSet_Sync(comp, newVal));
00774 return newVal;
00775 }
00776 };
00777
00778
00779
00780
00781
00782
00783
00784
00785
00786 class Lock
00787 {
00788
00789
00790
00791
00792 void operator delete(void*) {}
00793
00794
00795
00796
00797 #if !defined(OVR_ENABLE_THREADS)
00798
00799 public:
00800
00801 inline Lock() { }
00802 inline Lock(unsigned) { }
00803 inline ~Lock() { }
00804 inline void DoLock() { }
00805 inline void Unlock() { }
00806
00807
00808 #elif defined(OVR_OS_WIN32)
00809
00810 CRITICAL_SECTION cs;
00811 public:
00812 Lock(unsigned spinCount = 0);
00813 ~Lock();
00814
00815 inline void DoLock() { ::EnterCriticalSection(&cs); }
00816 inline void Unlock() { ::LeaveCriticalSection(&cs); }
00817
00818 #else
00819 pthread_mutex_t mutex;
00820
00821 public:
00822 static pthread_mutexattr_t RecursiveAttr;
00823 static bool RecursiveAttrInit;
00824
00825 Lock (unsigned dummy = 0)
00826 {
00827 if (!RecursiveAttrInit)
00828 {
00829 pthread_mutexattr_init(&RecursiveAttr);
00830 pthread_mutexattr_settype(&RecursiveAttr, PTHREAD_MUTEX_RECURSIVE);
00831 RecursiveAttrInit = 1;
00832 }
00833 pthread_mutex_init(&mutex,&RecursiveAttr);
00834 }
00835 ~Lock () { pthread_mutex_destroy(&mutex); }
00836 inline void DoLock() { pthread_mutex_lock(&mutex); }
00837 inline void Unlock() { pthread_mutex_unlock(&mutex); }
00838
00839 #endif // OVR_ENABLE_THREDS
00840
00841
00842 public:
00843
00844 class Locker
00845 {
00846 public:
00847 Lock *pLock;
00848 inline Locker(Lock *plock)
00849 { pLock = plock; pLock->DoLock(); }
00850 inline ~Locker()
00851 { pLock->Unlock(); }
00852 };
00853 };
00854
00855
00856
00857 }
00858
00859 #endif