atomicops-internals-gcc.h 6.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203
  1. // -*- Mode: C++; c-basic-offset: 2; indent-tabs-mode: nil -*-
  2. // Copyright (c) 2014, Linaro
  3. // All rights reserved.
  4. //
  5. // Redistribution and use in source and binary forms, with or without
  6. // modification, are permitted provided that the following conditions are
  7. // met:
  8. //
  9. // * Redistributions of source code must retain the above copyright
  10. // notice, this list of conditions and the following disclaimer.
  11. // * Redistributions in binary form must reproduce the above
  12. // copyright notice, this list of conditions and the following disclaimer
  13. // in the documentation and/or other materials provided with the
  14. // distribution.
  15. // * Neither the name of Google Inc. nor the names of its
  16. // contributors may be used to endorse or promote products derived from
  17. // this software without specific prior written permission.
  18. //
  19. // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
  20. // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
  21. // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
  22. // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
  23. // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
  24. // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
  25. // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
  26. // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
  27. // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
  28. // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
  29. // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
  30. // ---
  31. //
  32. // Author: Riku Voipio, riku.voipio@linaro.org
  33. //
  34. // atomic primitives implemented with gcc atomic intrinsics:
  35. // http://gcc.gnu.org/onlinedocs/gcc/_005f_005fatomic-Builtins.html
  36. //
  37. #ifndef BASE_ATOMICOPS_INTERNALS_GCC_GENERIC_H_
  38. #define BASE_ATOMICOPS_INTERNALS_GCC_GENERIC_H_
  39. #include <stdio.h>
  40. #include <stdlib.h>
  41. #include "base/basictypes.h"
  42. typedef int32_t Atomic32;
  43. namespace base {
  44. namespace subtle {
  45. typedef int64_t Atomic64;
  46. inline void MemoryBarrier() {
  47. __sync_synchronize();
  48. }
  49. inline Atomic32 NoBarrier_CompareAndSwap(volatile Atomic32* ptr,
  50. Atomic32 old_value,
  51. Atomic32 new_value) {
  52. Atomic32 prev_value = old_value;
  53. __atomic_compare_exchange_n(ptr, &prev_value, new_value,
  54. 0, __ATOMIC_RELAXED, __ATOMIC_RELAXED);
  55. return prev_value;
  56. }
  57. inline Atomic32 NoBarrier_AtomicExchange(volatile Atomic32* ptr,
  58. Atomic32 new_value) {
  59. return __atomic_exchange_n(const_cast<Atomic32*>(ptr), new_value, __ATOMIC_RELAXED);
  60. }
  61. inline Atomic32 Acquire_AtomicExchange(volatile Atomic32* ptr,
  62. Atomic32 new_value) {
  63. return __atomic_exchange_n(const_cast<Atomic32*>(ptr), new_value, __ATOMIC_ACQUIRE);
  64. }
  65. inline Atomic32 Release_AtomicExchange(volatile Atomic32* ptr,
  66. Atomic32 new_value) {
  67. return __atomic_exchange_n(const_cast<Atomic32*>(ptr), new_value, __ATOMIC_RELEASE);
  68. }
  69. inline Atomic32 Acquire_CompareAndSwap(volatile Atomic32* ptr,
  70. Atomic32 old_value,
  71. Atomic32 new_value) {
  72. Atomic32 prev_value = old_value;
  73. __atomic_compare_exchange_n(ptr, &prev_value, new_value,
  74. 0, __ATOMIC_ACQUIRE, __ATOMIC_RELAXED);
  75. return prev_value;
  76. }
  77. inline Atomic32 Release_CompareAndSwap(volatile Atomic32* ptr,
  78. Atomic32 old_value,
  79. Atomic32 new_value) {
  80. Atomic32 prev_value = old_value;
  81. __atomic_compare_exchange_n(ptr, &prev_value, new_value,
  82. 0, __ATOMIC_RELEASE, __ATOMIC_RELAXED);
  83. return prev_value;
  84. }
  85. inline void NoBarrier_Store(volatile Atomic32* ptr, Atomic32 value) {
  86. *ptr = value;
  87. }
  88. inline void Acquire_Store(volatile Atomic32* ptr, Atomic32 value) {
  89. *ptr = value;
  90. MemoryBarrier();
  91. }
  92. inline void Release_Store(volatile Atomic32* ptr, Atomic32 value) {
  93. MemoryBarrier();
  94. *ptr = value;
  95. }
  96. inline Atomic32 NoBarrier_Load(volatile const Atomic32* ptr) {
  97. return *ptr;
  98. }
  99. inline Atomic32 Acquire_Load(volatile const Atomic32* ptr) {
  100. Atomic32 value = *ptr;
  101. MemoryBarrier();
  102. return value;
  103. }
  104. inline Atomic32 Release_Load(volatile const Atomic32* ptr) {
  105. MemoryBarrier();
  106. return *ptr;
  107. }
  108. // 64-bit versions
  109. inline Atomic64 NoBarrier_CompareAndSwap(volatile Atomic64* ptr,
  110. Atomic64 old_value,
  111. Atomic64 new_value) {
  112. Atomic64 prev_value = old_value;
  113. __atomic_compare_exchange_n(ptr, &prev_value, new_value,
  114. 0, __ATOMIC_RELAXED, __ATOMIC_RELAXED);
  115. return prev_value;
  116. }
  117. inline Atomic64 NoBarrier_AtomicExchange(volatile Atomic64* ptr,
  118. Atomic64 new_value) {
  119. return __atomic_exchange_n(const_cast<Atomic64*>(ptr), new_value, __ATOMIC_RELAXED);
  120. }
  121. inline Atomic64 Acquire_AtomicExchange(volatile Atomic64* ptr,
  122. Atomic64 new_value) {
  123. return __atomic_exchange_n(const_cast<Atomic64*>(ptr), new_value, __ATOMIC_ACQUIRE);
  124. }
  125. inline Atomic64 Release_AtomicExchange(volatile Atomic64* ptr,
  126. Atomic64 new_value) {
  127. return __atomic_exchange_n(const_cast<Atomic64*>(ptr), new_value, __ATOMIC_RELEASE);
  128. }
  129. inline Atomic64 Acquire_CompareAndSwap(volatile Atomic64* ptr,
  130. Atomic64 old_value,
  131. Atomic64 new_value) {
  132. Atomic64 prev_value = old_value;
  133. __atomic_compare_exchange_n(ptr, &prev_value, new_value,
  134. 0, __ATOMIC_ACQUIRE, __ATOMIC_RELAXED);
  135. return prev_value;
  136. }
  137. inline Atomic64 Release_CompareAndSwap(volatile Atomic64* ptr,
  138. Atomic64 old_value,
  139. Atomic64 new_value) {
  140. Atomic64 prev_value = old_value;
  141. __atomic_compare_exchange_n(ptr, &prev_value, new_value,
  142. 0, __ATOMIC_RELEASE, __ATOMIC_RELAXED);
  143. return prev_value;
  144. }
  145. inline void NoBarrier_Store(volatile Atomic64* ptr, Atomic64 value) {
  146. *ptr = value;
  147. }
  148. inline void Acquire_Store(volatile Atomic64* ptr, Atomic64 value) {
  149. *ptr = value;
  150. MemoryBarrier();
  151. }
  152. inline void Release_Store(volatile Atomic64* ptr, Atomic64 value) {
  153. MemoryBarrier();
  154. *ptr = value;
  155. }
  156. inline Atomic64 NoBarrier_Load(volatile const Atomic64* ptr) {
  157. return *ptr;
  158. }
  159. inline Atomic64 Acquire_Load(volatile const Atomic64* ptr) {
  160. Atomic64 value = *ptr;
  161. MemoryBarrier();
  162. return value;
  163. }
  164. inline Atomic64 Release_Load(volatile const Atomic64* ptr) {
  165. MemoryBarrier();
  166. return *ptr;
  167. }
  168. } // namespace base::subtle
  169. } // namespace base
  170. #endif // BASE_ATOMICOPS_INTERNALS_GCC_GENERIC_H_