Няма описание
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220
  1. #pragma once
  2. #include "c-api/il2cpp-config-platforms.h"
  3. #include <stdint.h>
  4. #include "utils/NonCopyable.h"
  5. #include "Baselib.h"
  6. #include "C/Baselib_Atomic_TypeSafe.h"
  7. namespace il2cpp
  8. {
  9. namespace os
  10. {
  11. class Atomic : public il2cpp::utils::NonCopyable
  12. {
  13. public:
  14. // All 32bit atomics must be performed on 4-byte aligned addresses. All 64bit atomics must be
  15. // performed on 8-byte aligned addresses.
  16. // Add and Add64 return the *result* of the addition, not the old value! (i.e. they work like
  17. // InterlockedAdd and __sync_add_and_fetch).
  18. static inline void FullMemoryBarrier()
  19. {
  20. Baselib_atomic_thread_fence_seq_cst();
  21. }
  22. static inline int32_t Add(int32_t* location1, int32_t value)
  23. {
  24. int32_t result = Baselib_atomic_fetch_add_32_seq_cst(location1, value) + value;
  25. Baselib_atomic_thread_fence_seq_cst();
  26. return result;
  27. }
  28. static inline uint32_t Add(uint32_t* location1, uint32_t value)
  29. {
  30. return (uint32_t)Add((int32_t*)location1, (int32_t)value);
  31. }
  32. #if IL2CPP_ENABLE_INTERLOCKED_64_REQUIRED_ALIGNMENT
  33. static inline int64_t Add64(int64_t* location1, int64_t value)
  34. {
  35. int64_t result = Baselib_atomic_fetch_add_64_seq_cst(location1, value) + value;
  36. Baselib_atomic_thread_fence_seq_cst();
  37. return result;
  38. }
  39. #endif
  40. template<typename T>
  41. static inline T* CompareExchangePointer(T** dest, T* newValue, T* oldValue)
  42. {
  43. Baselib_atomic_compare_exchange_strong_ptr_seq_cst_seq_cst((intptr_t*)dest, (intptr_t*)&oldValue, (intptr_t)newValue);
  44. Baselib_atomic_thread_fence_seq_cst();
  45. return static_cast<T*>(oldValue);
  46. }
  47. template<typename T>
  48. static inline T* ExchangePointer(T** dest, const T* newValue)
  49. {
  50. T* result = (T*)Baselib_atomic_exchange_ptr_seq_cst((intptr_t*)dest, (intptr_t)newValue);
  51. Baselib_atomic_thread_fence_seq_cst();
  52. return result;
  53. }
  54. static inline int64_t Read64(int64_t* addr)
  55. {
  56. int64_t result = Baselib_atomic_fetch_add_64_seq_cst(addr, 0);
  57. Baselib_atomic_thread_fence_seq_cst();
  58. return result;
  59. }
  60. static inline uint64_t Read64(uint64_t* addr)
  61. {
  62. return (uint64_t)Read64((int64_t*)addr);
  63. }
  64. static inline int32_t LoadRelaxed(const int32_t* addr)
  65. {
  66. return Baselib_atomic_load_32_relaxed(addr);
  67. }
  68. template<typename T>
  69. static inline T* LoadPointerRelaxed(const T* const * addr)
  70. {
  71. return (T*)Baselib_atomic_load_ptr_relaxed((const intptr_t*)addr);
  72. }
  73. template<typename T>
  74. static inline T* ReadPointer(T** pointer)
  75. {
  76. return (T*)Baselib_atomic_load_ptr_relaxed((intptr_t*)pointer);
  77. }
  78. template<typename T>
  79. static inline void PublishPointer(T** pointer, T* value)
  80. {
  81. Baselib_atomic_thread_fence_release();
  82. Baselib_atomic_store_ptr_relaxed((intptr_t*)pointer, (intptr_t)value);
  83. }
  84. static inline int32_t Increment(int32_t* value)
  85. {
  86. int32_t result = Baselib_atomic_fetch_add_32_seq_cst(value, 1) + 1;
  87. Baselib_atomic_thread_fence_seq_cst();
  88. return result;
  89. }
  90. static inline uint32_t Increment(uint32_t* value)
  91. {
  92. return (uint32_t)Increment((int32_t*)value);
  93. }
  94. #if IL2CPP_ENABLE_INTERLOCKED_64_REQUIRED_ALIGNMENT
  95. static inline int64_t Increment64(int64_t* value)
  96. {
  97. int64_t result = Baselib_atomic_fetch_add_64_seq_cst(value, 1) + 1;
  98. Baselib_atomic_thread_fence_seq_cst();
  99. return result;
  100. }
  101. static inline uint64_t Increment64(uint64_t* value)
  102. {
  103. return (uint64_t)Increment64((int64_t*)value);
  104. }
  105. #endif
  106. static inline int32_t Decrement(int32_t* value)
  107. {
  108. int32_t result = Baselib_atomic_fetch_add_32_seq_cst(value, -1) - 1;
  109. Baselib_atomic_thread_fence_seq_cst();
  110. return result;
  111. }
  112. static inline uint32_t Decrement(uint32_t* value)
  113. {
  114. return (uint32_t)Decrement((int32_t*)value);
  115. }
  116. #if IL2CPP_ENABLE_INTERLOCKED_64_REQUIRED_ALIGNMENT
  117. static inline int64_t Decrement64(int64_t* value)
  118. {
  119. int64_t result = Baselib_atomic_fetch_add_64_seq_cst(value, -1) - 1;
  120. Baselib_atomic_thread_fence_seq_cst();
  121. return result;
  122. }
  123. static inline uint64_t Decrement64(uint64_t* value)
  124. {
  125. return (uint64_t)Decrement64((int64_t*)value);
  126. }
  127. #endif
  128. static inline int32_t CompareExchange(int32_t* dest, int32_t exchange, int32_t comparand)
  129. {
  130. Baselib_atomic_compare_exchange_strong_32_seq_cst_seq_cst(dest, &comparand, exchange);
  131. Baselib_atomic_thread_fence_seq_cst();
  132. return comparand;
  133. }
  134. static inline uint32_t CompareExchange(uint32_t* value, uint32_t newValue, uint32_t oldValue)
  135. {
  136. return (uint32_t)CompareExchange((int32_t*)value, newValue, oldValue);
  137. }
  138. static inline int64_t CompareExchange64(int64_t* dest, int64_t exchange, int64_t comparand)
  139. {
  140. Baselib_atomic_compare_exchange_strong_64_seq_cst_seq_cst(dest, &comparand, exchange);
  141. Baselib_atomic_thread_fence_seq_cst();
  142. return comparand;
  143. }
  144. static inline uint64_t CompareExchange64(uint64_t* value, uint64_t newValue, uint64_t oldValue)
  145. {
  146. return (uint64_t)CompareExchange64((int64_t*)value, newValue, oldValue);
  147. }
  148. static inline bool Exchange(bool* dest, bool exchange)
  149. {
  150. return (bool)Baselib_atomic_exchange_8_seq_cst((int8_t*)dest, exchange);
  151. }
  152. static inline int32_t Exchange(int32_t* dest, int32_t exchange)
  153. {
  154. int32_t result = Baselib_atomic_exchange_32_seq_cst(dest, exchange);
  155. Baselib_atomic_thread_fence_seq_cst();
  156. return result;
  157. }
  158. static inline uint32_t Exchange(uint32_t* value, uint32_t newValue)
  159. {
  160. return (uint32_t)Exchange((int32_t*)value, newValue);
  161. }
  162. #if IL2CPP_ENABLE_INTERLOCKED_64_REQUIRED_ALIGNMENT
  163. static inline int64_t Exchange64(int64_t* dest, int64_t exchange)
  164. {
  165. int64_t result = Baselib_atomic_exchange_64_seq_cst(dest, exchange);
  166. Baselib_atomic_thread_fence_seq_cst();
  167. return result;
  168. }
  169. static inline uint64_t Exchange64(uint64_t* value, uint64_t newValue)
  170. {
  171. return (uint64_t)Exchange64((int64_t*)value, newValue);
  172. }
  173. #endif
  174. static inline intptr_t ReadPtrVal(intptr_t* addr)
  175. {
  176. intptr_t result = Baselib_atomic_fetch_add_ptr_seq_cst(addr, 0);
  177. Baselib_atomic_thread_fence_seq_cst();
  178. return result;
  179. }
  180. };
  181. }
  182. }