cmpxchg_64.h 3.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121
  1. #ifndef _ASM_X86_CMPXCHG_64_H
  2. #define _ASM_X86_CMPXCHG_64_H
  3. //#include <asm/alternative.h> /* Provides LOCK_PREFIX */
  4. /*
  5. Including the definition of LOCK_PREFIX directly here
  6. */
  7. #define LOCK_PREFIX "\n\tlock; "
  8. #define __xg(x) ((volatile char *)(x))
  9. /*static inline void set_64bit(volatile u64 *ptr, u64 val)
  10. {
  11. *ptr = val;
  12. }*/
  13. extern void __xchg_wrong_size(void);
  14. extern void __cmpxchg_wrong_size(void);
  15. /*
  16. * Note: no "lock" prefix even on SMP: xchg always implies lock anyway
  17. * Note 2: xchg has side effect, so that attribute volatile is necessary,
  18. * but generally the primitive is invalid, *ptr is output argument. --ANK
  19. */
  20. #define __xchg(x, ptr, size) \
  21. ({ \
  22. __typeof(*(ptr)) __x = (x); \
  23. switch (size) { \
  24. case 1: \
  25. asm volatile("lock; xchgb %b0,%1" \
  26. : "=q" (__x), "+m" (*__xg(ptr)) \
  27. : "0" (__x) \
  28. : "memory"); \
  29. break; \
  30. case 2: \
  31. asm volatile("lock; xchgw %w0,%1" \
  32. : "=r" (__x), "+m" (*__xg(ptr)) \
  33. : "0" (__x) \
  34. : "memory"); \
  35. break; \
  36. case 4: \
  37. asm volatile("lock; xchgl %k0,%1" \
  38. : "=r" (__x), "+m" (*__xg(ptr)) \
  39. : "0" (__x) \
  40. : "memory"); \
  41. break; \
  42. case 8: \
  43. asm volatile("lock; xchgq %0,%1" \
  44. : "=r" (__x), "+m" (*__xg(ptr)) \
  45. : "0" (__x) \
  46. : "memory"); \
  47. break; \
  48. default: \
  49. __xchg_wrong_size(); \
  50. } \
  51. __x; \
  52. })
  53. #define xchg(ptr, v) \
  54. __xchg((v), (ptr), sizeof(*ptr))
  55. #define __HAVE_ARCH_CMPXCHG 1
  56. /*
  57. * Atomic compare and exchange. Compare OLD with MEM, if identical,
  58. * store NEW in MEM. Return the initial value in MEM. Success is
  59. * indicated by comparing RETURN with OLD.
  60. */
  61. #define __raw_cmpxchg(ptr, old, new, size) \
  62. ({ \
  63. __typeof__(*(ptr)) __ret; \
  64. __typeof__(*(ptr)) __old = (old); \
  65. __typeof__(*(ptr)) __new = (new); \
  66. switch (size) { \
  67. case 1: \
  68. asm volatile(LOCK_PREFIX "cmpxchgb %b2,%1" \
  69. : "=a" (__ret), "+m" (*__xg(ptr)) \
  70. : "q" (__new), "0" (__old) \
  71. : "memory"); \
  72. break; \
  73. case 2: \
  74. asm volatile(LOCK_PREFIX "cmpxchgw %w2,%1" \
  75. : "=a" (__ret), "+m" (*__xg(ptr)) \
  76. : "r" (__new), "0" (__old) \
  77. : "memory"); \
  78. break; \
  79. case 4: \
  80. asm volatile(LOCK_PREFIX "cmpxchgl %k2,%1" \
  81. : "=a" (__ret), "+m" (*__xg(ptr)) \
  82. : "r" (__new), "0" (__old) \
  83. : "memory"); \
  84. break; \
  85. case 8: \
  86. asm volatile(LOCK_PREFIX "cmpxchgq %2,%1" \
  87. : "=a" (__ret), "+m" (*__xg(ptr)) \
  88. : "r" (__new), "0" (__old) \
  89. : "memory"); \
  90. break; \
  91. default: \
  92. __cmpxchg_wrong_size(); \
  93. } \
  94. __ret; \
  95. })
  96. #define cmpxchg(ptr, old, new) \
  97. __raw_cmpxchg((ptr), (old), (new), sizeof(*ptr))
  98. #define cmpxchg64(ptr, o, n) \
  99. ({ \
  100. BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
  101. cmpxchg((ptr), (o), (n)); \
  102. })
  103. #define cmpxchg64_local(ptr, o, n) \
  104. ({ \
  105. BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
  106. cmpxchg_local((ptr), (o), (n)); \
  107. })
  108. #endif /* _ASM_X86_CMPXCHG_64_H */