AkAtomic.h 6.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121
  1. /*******************************************************************************
  2. The content of this file includes portions of the AUDIOKINETIC Wwise Technology
  3. released in source code form as part of the SDK installer package.
  4. Commercial License Usage
  5. Licensees holding valid commercial licenses to the AUDIOKINETIC Wwise Technology
  6. may use this file in accordance with the end user license agreement provided
  7. with the software or, alternatively, in accordance with the terms contained in a
  8. written agreement between you and Audiokinetic Inc.
  9. Apache License Usage
  10. Alternatively, this file may be used under the Apache License, Version 2.0 (the
  11. "Apache License"); you may not use this file except in compliance with the
  12. Apache License. You may obtain a copy of the Apache License at
  13. http://www.apache.org/licenses/LICENSE-2.0.
  14. Unless required by applicable law or agreed to in writing, software distributed
  15. under the Apache License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
  16. OR CONDITIONS OF ANY KIND, either express or implied. See the Apache License for
  17. the specific language governing permissions and limitations under the License.
  18. Copyright (c) 2023 Audiokinetic Inc.
  19. *******************************************************************************/
  20. // AkAtomic.h
  21. #pragma once
  22. #include <Windows.h>
  23. // Sleep of 1 is as close as we can get on Microsoft platforms
  24. // SwitchToThread() is liable to cause the current thread to be unscheduled for 10-30ms
  25. #define AkThreadYield() Sleep(1);
  26. #ifdef __cplusplus
  27. extern "C" {
  28. #endif
  29. typedef volatile long AkAtomic32;
  30. typedef volatile long long AkAtomic64;
  31. typedef volatile void* AkAtomicPtr;
  32. #if defined( _M_ARM )
  33. #define AK_ATOMIC_FENCE_FULL_BARRIER() __dmb( _ARM_BARRIER_ISH )
  34. #elif defined( _M_ARM64 )
  35. #define AK_ATOMIC_FENCE_FULL_BARRIER() __dmb( _ARM64_BARRIER_ISH )
  36. #else
  37. #define AK_ATOMIC_FENCE_FULL_BARRIER() MemoryBarrier();
  38. #endif
  39. #if defined( _M_ARM ) || defined( _M_ARM64 )
  40. __forceinline long AkAtomicLoad32( AkAtomic32* pSrc ) { long tmp; tmp = *pSrc; AK_ATOMIC_FENCE_FULL_BARRIER(); return tmp; }
  41. #else
  42. __forceinline long AkAtomicLoad32( AkAtomic32* pSrc ) { return *pSrc; }
  43. #endif
  44. __forceinline void AkAtomicStore32( AkAtomic32* pDest, long value ) { InterlockedExchange(pDest, value); }
  45. __forceinline long AkAtomicInc32( AkAtomic32* pValue ) { return InterlockedExchangeAdd( pValue, 1 ) + 1; }
  46. __forceinline long AkAtomicDec32( AkAtomic32* pValue ) { return InterlockedExchangeAdd( pValue, -1 ) - 1; }
  47. __forceinline long AkAtomicExchange32( AkAtomic32* pDest, long value ) { return InterlockedExchange( pDest, value ); }
  48. __forceinline long AkAtomicAdd32( AkAtomic32* pDest, long value ) { return InterlockedExchangeAdd( pDest, value ) + value; }
  49. __forceinline long AkAtomicSub32( AkAtomic32* pDest, long value ) { return InterlockedExchangeAdd( pDest, -value ) - value; }
  50. __forceinline long AkAtomicAnd32( AkAtomic32* pDest, long value ) { return InterlockedAnd(pDest, value) & value; }
  51. __forceinline long AkAtomicOr32( AkAtomic32* pDest, long value ) { return InterlockedOr(pDest, value) | value; }
  52. __forceinline int AkAtomicCas32( AkAtomic32* pDest, long proposed, long expected ) { return InterlockedCompareExchange( pDest, proposed, expected ) == expected ? 1 : 0; }
  53. #ifdef _WIN64
  54. #if defined( _M_ARM ) || defined( _M_ARM64 )
  55. __forceinline long long AkAtomicLoad64( AkAtomic64* pSrc ) { long long tmp; tmp = *pSrc; AK_ATOMIC_FENCE_FULL_BARRIER(); return tmp; }
  56. #else
  57. __forceinline long long AkAtomicLoad64( AkAtomic64* pSrc ) { return *pSrc; }
  58. #endif
  59. #else
  60. __forceinline long long AkAtomicLoad64( AkAtomic64* pSrc ) { return InterlockedCompareExchange64( pSrc, 0, 0 ); }
  61. #endif
  62. __forceinline void AkAtomicStore64( AkAtomic64* pDest, long long value ) { InterlockedExchange64(pDest, value); }
  63. __forceinline long long AkAtomicInc64( AkAtomic64* pValue ) { return InterlockedExchangeAdd64( pValue, 1 ) + 1; }
  64. __forceinline long long AkAtomicDec64( AkAtomic64* pValue ) { return InterlockedExchangeAdd64( pValue, - 1 ) - 1; }
  65. __forceinline long long AkAtomicExchange64( AkAtomic64* pDest, long long value ) { return InterlockedExchange64( pDest, value ); }
  66. __forceinline long long AkAtomicAdd64( AkAtomic64* pDest, long long value ) { return InterlockedExchangeAdd64( pDest, value ) + value; }
  67. __forceinline long long AkAtomicSub64( AkAtomic64* pDest, long long value ) { return InterlockedExchangeAdd64( pDest, -value ) - value; }
  68. __forceinline long long AkAtomicAnd64( AkAtomic64* pDest, long long value ) { return InterlockedAnd64(pDest, value) & value; }
  69. __forceinline long long AkAtomicOr64( AkAtomic64* pDest, long long value ) { return InterlockedOr64(pDest, value) | value; }
  70. __forceinline int AkAtomicCas64( AkAtomic64* pDest, long long proposed, long long expected ) { return InterlockedCompareExchange64( pDest, proposed, expected ) == expected ? 1 : 0; }
  71. #if defined( _M_ARM ) || defined( _M_ARM64 )
  72. __forceinline void* AkAtomicLoadPtr( AkAtomicPtr* pSrc ) { void* tmp; tmp = ( void* )*pSrc; AK_ATOMIC_FENCE_FULL_BARRIER(); return tmp; }
  73. #else
  74. __forceinline void* AkAtomicLoadPtr( AkAtomicPtr* pSrc ) { return ( void* )*pSrc; }
  75. #endif
  76. #ifdef _WIN64
  77. __forceinline void AkAtomicStorePtr( AkAtomicPtr* pDest, void* value ) { InterlockedExchangePointer( ( volatile PVOID* )pDest, value ); }
  78. __forceinline void* AkAtomicExchangePtr( AkAtomicPtr* pDest, void* value ) { return InterlockedExchangePointer( ( volatile PVOID* )pDest, value ); }
  79. __forceinline int AkAtomicCasPtr( AkAtomicPtr* pDest, void* proposed, void* expected ) { return InterlockedCompareExchangePointer( ( volatile PVOID* )pDest, proposed, expected ) == expected ? 1 : 0; }
  80. #else
  81. __forceinline void AkAtomicStorePtr( AkAtomicPtr* pDest, void* value ) { InterlockedExchangePointer( ( void** )pDest, value ); }
  82. __forceinline void* AkAtomicExchangePtr(AkAtomicPtr* pDest, void* value) { return InterlockedExchangePointer( ( void** )pDest, value ); }
  83. __forceinline int AkAtomicCasPtr(AkAtomicPtr* pDest, void* proposed, void* expected) { return InterlockedCompareExchangePointer( ( void** )pDest, proposed, expected ) == expected ? 1 : 0; }
  84. #endif
  85. #if defined(_MSC_VER)
  86. __forceinline void AkSpinHint(void)
  87. {
  88. #if defined(_M_IX86) || defined(_M_X64)
  89. _mm_pause();
  90. #elif defined( _M_ARM ) || defined( _M_ARM64 )
  91. __yield();
  92. #else
  93. #error Unsupported platform for AkSpinHint
  94. #endif
  95. }
  96. #endif
  97. #ifdef __cplusplus
  98. }
  99. #endif