PageRenderTime 46ms CodeModel.GetById 18ms RepoModel.GetById 1ms app.codeStats 0ms

/src/google/protobuf/stubs/atomicops_internals_tsan.h

https://gitlab.com/github-cloud-corporation/protobuf
C Header | 219 lines | 150 code | 37 blank | 32 comment | 0 complexity | 943f16beddf37388e7924556dc9cbd95 MD5 | raw file
  1. // Protocol Buffers - Google's data interchange format
  2. // Copyright 2013 Google Inc. All rights reserved.
  3. // https://developers.google.com/protocol-buffers/
  4. //
  5. // Redistribution and use in source and binary forms, with or without
  6. // modification, are permitted provided that the following conditions are
  7. // met:
  8. //
  9. // * Redistributions of source code must retain the above copyright
  10. // notice, this list of conditions and the following disclaimer.
  11. // * Redistributions in binary form must reproduce the above
  12. // copyright notice, this list of conditions and the following disclaimer
  13. // in the documentation and/or other materials provided with the
  14. // distribution.
  15. // * Neither the name of Google Inc. nor the names of its
  16. // contributors may be used to endorse or promote products derived from
  17. // this software without specific prior written permission.
  18. //
  19. // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
  20. // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
  21. // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
  22. // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
  23. // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
  24. // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
  25. // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
  26. // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
  27. // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
  28. // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
  29. // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
  30. // This file is an internal atomic implementation for compiler-based
  31. // ThreadSanitizer (http://clang.llvm.org/docs/ThreadSanitizer.html).
  32. // Use atomicops.h instead.
  33. #ifndef GOOGLE_PROTOBUF_ATOMICOPS_INTERNALS_TSAN_H_
  34. #define GOOGLE_PROTOBUF_ATOMICOPS_INTERNALS_TSAN_H_
  35. #define ATOMICOPS_COMPILER_BARRIER() __asm__ __volatile__("" : : : "memory")
  36. #include <sanitizer/tsan_interface_atomic.h>
  37. namespace google {
  38. namespace protobuf {
  39. namespace internal {
  40. inline Atomic32 NoBarrier_CompareAndSwap(volatile Atomic32 *ptr,
  41. Atomic32 old_value,
  42. Atomic32 new_value) {
  43. Atomic32 cmp = old_value;
  44. __tsan_atomic32_compare_exchange_strong(ptr, &cmp, new_value,
  45. __tsan_memory_order_relaxed, __tsan_memory_order_relaxed);
  46. return cmp;
  47. }
  48. inline Atomic32 NoBarrier_AtomicExchange(volatile Atomic32 *ptr,
  49. Atomic32 new_value) {
  50. return __tsan_atomic32_exchange(ptr, new_value,
  51. __tsan_memory_order_relaxed);
  52. }
  53. inline Atomic32 Acquire_AtomicExchange(volatile Atomic32 *ptr,
  54. Atomic32 new_value) {
  55. return __tsan_atomic32_exchange(ptr, new_value,
  56. __tsan_memory_order_acquire);
  57. }
  58. inline Atomic32 Release_AtomicExchange(volatile Atomic32 *ptr,
  59. Atomic32 new_value) {
  60. return __tsan_atomic32_exchange(ptr, new_value,
  61. __tsan_memory_order_release);
  62. }
  63. inline Atomic32 NoBarrier_AtomicIncrement(volatile Atomic32 *ptr,
  64. Atomic32 increment) {
  65. return increment + __tsan_atomic32_fetch_add(ptr, increment,
  66. __tsan_memory_order_relaxed);
  67. }
  68. inline Atomic32 Barrier_AtomicIncrement(volatile Atomic32 *ptr,
  69. Atomic32 increment) {
  70. return increment + __tsan_atomic32_fetch_add(ptr, increment,
  71. __tsan_memory_order_acq_rel);
  72. }
  73. inline Atomic32 Acquire_CompareAndSwap(volatile Atomic32 *ptr,
  74. Atomic32 old_value,
  75. Atomic32 new_value) {
  76. Atomic32 cmp = old_value;
  77. __tsan_atomic32_compare_exchange_strong(ptr, &cmp, new_value,
  78. __tsan_memory_order_acquire, __tsan_memory_order_acquire);
  79. return cmp;
  80. }
  81. inline Atomic32 Release_CompareAndSwap(volatile Atomic32 *ptr,
  82. Atomic32 old_value,
  83. Atomic32 new_value) {
  84. Atomic32 cmp = old_value;
  85. __tsan_atomic32_compare_exchange_strong(ptr, &cmp, new_value,
  86. __tsan_memory_order_release, __tsan_memory_order_relaxed);
  87. return cmp;
  88. }
  89. inline void NoBarrier_Store(volatile Atomic32 *ptr, Atomic32 value) {
  90. __tsan_atomic32_store(ptr, value, __tsan_memory_order_relaxed);
  91. }
  92. inline void Acquire_Store(volatile Atomic32 *ptr, Atomic32 value) {
  93. __tsan_atomic32_store(ptr, value, __tsan_memory_order_relaxed);
  94. __tsan_atomic_thread_fence(__tsan_memory_order_seq_cst);
  95. }
  96. inline void Release_Store(volatile Atomic32 *ptr, Atomic32 value) {
  97. __tsan_atomic32_store(ptr, value, __tsan_memory_order_release);
  98. }
  99. inline Atomic32 NoBarrier_Load(volatile const Atomic32 *ptr) {
  100. return __tsan_atomic32_load(ptr, __tsan_memory_order_relaxed);
  101. }
  102. inline Atomic32 Acquire_Load(volatile const Atomic32 *ptr) {
  103. return __tsan_atomic32_load(ptr, __tsan_memory_order_acquire);
  104. }
  105. inline Atomic32 Release_Load(volatile const Atomic32 *ptr) {
  106. __tsan_atomic_thread_fence(__tsan_memory_order_seq_cst);
  107. return __tsan_atomic32_load(ptr, __tsan_memory_order_relaxed);
  108. }
  109. inline Atomic64 NoBarrier_CompareAndSwap(volatile Atomic64 *ptr,
  110. Atomic64 old_value,
  111. Atomic64 new_value) {
  112. Atomic64 cmp = old_value;
  113. __tsan_atomic64_compare_exchange_strong(ptr, &cmp, new_value,
  114. __tsan_memory_order_relaxed, __tsan_memory_order_relaxed);
  115. return cmp;
  116. }
  117. inline Atomic64 NoBarrier_AtomicExchange(volatile Atomic64 *ptr,
  118. Atomic64 new_value) {
  119. return __tsan_atomic64_exchange(ptr, new_value, __tsan_memory_order_relaxed);
  120. }
  121. inline Atomic64 Acquire_AtomicExchange(volatile Atomic64 *ptr,
  122. Atomic64 new_value) {
  123. return __tsan_atomic64_exchange(ptr, new_value, __tsan_memory_order_acquire);
  124. }
  125. inline Atomic64 Release_AtomicExchange(volatile Atomic64 *ptr,
  126. Atomic64 new_value) {
  127. return __tsan_atomic64_exchange(ptr, new_value, __tsan_memory_order_release);
  128. }
  129. inline Atomic64 NoBarrier_AtomicIncrement(volatile Atomic64 *ptr,
  130. Atomic64 increment) {
  131. return increment + __tsan_atomic64_fetch_add(ptr, increment,
  132. __tsan_memory_order_relaxed);
  133. }
  134. inline Atomic64 Barrier_AtomicIncrement(volatile Atomic64 *ptr,
  135. Atomic64 increment) {
  136. return increment + __tsan_atomic64_fetch_add(ptr, increment,
  137. __tsan_memory_order_acq_rel);
  138. }
  139. inline void NoBarrier_Store(volatile Atomic64 *ptr, Atomic64 value) {
  140. __tsan_atomic64_store(ptr, value, __tsan_memory_order_relaxed);
  141. }
  142. inline void Acquire_Store(volatile Atomic64 *ptr, Atomic64 value) {
  143. __tsan_atomic64_store(ptr, value, __tsan_memory_order_relaxed);
  144. __tsan_atomic_thread_fence(__tsan_memory_order_seq_cst);
  145. }
  146. inline void Release_Store(volatile Atomic64 *ptr, Atomic64 value) {
  147. __tsan_atomic64_store(ptr, value, __tsan_memory_order_release);
  148. }
  149. inline Atomic64 NoBarrier_Load(volatile const Atomic64 *ptr) {
  150. return __tsan_atomic64_load(ptr, __tsan_memory_order_relaxed);
  151. }
  152. inline Atomic64 Acquire_Load(volatile const Atomic64 *ptr) {
  153. return __tsan_atomic64_load(ptr, __tsan_memory_order_acquire);
  154. }
  155. inline Atomic64 Release_Load(volatile const Atomic64 *ptr) {
  156. __tsan_atomic_thread_fence(__tsan_memory_order_seq_cst);
  157. return __tsan_atomic64_load(ptr, __tsan_memory_order_relaxed);
  158. }
  159. inline Atomic64 Acquire_CompareAndSwap(volatile Atomic64 *ptr,
  160. Atomic64 old_value,
  161. Atomic64 new_value) {
  162. Atomic64 cmp = old_value;
  163. __tsan_atomic64_compare_exchange_strong(ptr, &cmp, new_value,
  164. __tsan_memory_order_acquire, __tsan_memory_order_acquire);
  165. return cmp;
  166. }
  167. inline Atomic64 Release_CompareAndSwap(volatile Atomic64 *ptr,
  168. Atomic64 old_value,
  169. Atomic64 new_value) {
  170. Atomic64 cmp = old_value;
  171. __tsan_atomic64_compare_exchange_strong(ptr, &cmp, new_value,
  172. __tsan_memory_order_release, __tsan_memory_order_relaxed);
  173. return cmp;
  174. }
  175. inline void MemoryBarrier() {
  176. __tsan_atomic_thread_fence(__tsan_memory_order_seq_cst);
  177. }
  178. } // namespace internal
  179. } // namespace protobuf
  180. } // namespace google
  181. #undef ATOMICOPS_COMPILER_BARRIER
  182. #endif // GOOGLE_PROTOBUF_ATOMICOPS_INTERNALS_TSAN_H_