/arch/mips/include/asm/cmpxchg.h

http://github.com/mirrors/linux · C Header · 324 lines · 236 code · 30 blank · 58 comment · 20 complexity · 4a67b2d6823969b5d7fe34decc8ab869 MD5 · raw file

  1. /*
  2. * This file is subject to the terms and conditions of the GNU General Public
  3. * License. See the file "COPYING" in the main directory of this archive
  4. * for more details.
  5. *
  6. * Copyright (C) 2003, 06, 07 by Ralf Baechle (ralf@linux-mips.org)
  7. */
  8. #ifndef __ASM_CMPXCHG_H
  9. #define __ASM_CMPXCHG_H
  10. #include <linux/bug.h>
  11. #include <linux/irqflags.h>
  12. #include <asm/compiler.h>
  13. #include <asm/llsc.h>
  14. #include <asm/sync.h>
  15. #include <asm/war.h>
  16. /*
  17. * These functions doesn't exist, so if they are called you'll either:
  18. *
  19. * - Get an error at compile-time due to __compiletime_error, if supported by
  20. * your compiler.
  21. *
  22. * or:
  23. *
  24. * - Get an error at link-time due to the call to the missing function.
  25. */
  26. extern unsigned long __cmpxchg_called_with_bad_pointer(void)
  27. __compiletime_error("Bad argument size for cmpxchg");
  28. extern unsigned long __cmpxchg64_unsupported(void)
  29. __compiletime_error("cmpxchg64 not available; cpu_has_64bits may be false");
  30. extern unsigned long __xchg_called_with_bad_pointer(void)
  31. __compiletime_error("Bad argument size for xchg");
  32. #define __xchg_asm(ld, st, m, val) \
  33. ({ \
  34. __typeof(*(m)) __ret; \
  35. \
  36. if (kernel_uses_llsc) { \
  37. __asm__ __volatile__( \
  38. " .set push \n" \
  39. " .set noat \n" \
  40. " .set push \n" \
  41. " .set " MIPS_ISA_ARCH_LEVEL " \n" \
  42. " " __SYNC(full, loongson3_war) " \n" \
  43. "1: " ld " %0, %2 # __xchg_asm \n" \
  44. " .set pop \n" \
  45. " move $1, %z3 \n" \
  46. " .set " MIPS_ISA_ARCH_LEVEL " \n" \
  47. " " st " $1, %1 \n" \
  48. "\t" __SC_BEQZ "$1, 1b \n" \
  49. " .set pop \n" \
  50. : "=&r" (__ret), "=" GCC_OFF_SMALL_ASM() (*m) \
  51. : GCC_OFF_SMALL_ASM() (*m), "Jr" (val) \
  52. : __LLSC_CLOBBER); \
  53. } else { \
  54. unsigned long __flags; \
  55. \
  56. raw_local_irq_save(__flags); \
  57. __ret = *m; \
  58. *m = val; \
  59. raw_local_irq_restore(__flags); \
  60. } \
  61. \
  62. __ret; \
  63. })
  64. extern unsigned long __xchg_small(volatile void *ptr, unsigned long val,
  65. unsigned int size);
  66. static __always_inline
  67. unsigned long __xchg(volatile void *ptr, unsigned long x, int size)
  68. {
  69. switch (size) {
  70. case 1:
  71. case 2:
  72. return __xchg_small(ptr, x, size);
  73. case 4:
  74. return __xchg_asm("ll", "sc", (volatile u32 *)ptr, x);
  75. case 8:
  76. if (!IS_ENABLED(CONFIG_64BIT))
  77. return __xchg_called_with_bad_pointer();
  78. return __xchg_asm("lld", "scd", (volatile u64 *)ptr, x);
  79. default:
  80. return __xchg_called_with_bad_pointer();
  81. }
  82. }
  83. #define xchg(ptr, x) \
  84. ({ \
  85. __typeof__(*(ptr)) __res; \
  86. \
  87. /* \
  88. * In the Loongson3 workaround case __xchg_asm() already \
  89. * contains a completion barrier prior to the LL, so we don't \
  90. * need to emit an extra one here. \
  91. */ \
  92. if (!__SYNC_loongson3_war) \
  93. smp_mb__before_llsc(); \
  94. \
  95. __res = (__typeof__(*(ptr))) \
  96. __xchg((ptr), (unsigned long)(x), sizeof(*(ptr))); \
  97. \
  98. smp_llsc_mb(); \
  99. \
  100. __res; \
  101. })
  102. #define __cmpxchg_asm(ld, st, m, old, new) \
  103. ({ \
  104. __typeof(*(m)) __ret; \
  105. \
  106. if (kernel_uses_llsc) { \
  107. __asm__ __volatile__( \
  108. " .set push \n" \
  109. " .set noat \n" \
  110. " .set push \n" \
  111. " .set "MIPS_ISA_ARCH_LEVEL" \n" \
  112. " " __SYNC(full, loongson3_war) " \n" \
  113. "1: " ld " %0, %2 # __cmpxchg_asm \n" \
  114. " bne %0, %z3, 2f \n" \
  115. " .set pop \n" \
  116. " move $1, %z4 \n" \
  117. " .set "MIPS_ISA_ARCH_LEVEL" \n" \
  118. " " st " $1, %1 \n" \
  119. "\t" __SC_BEQZ "$1, 1b \n" \
  120. " .set pop \n" \
  121. "2: " __SYNC(full, loongson3_war) " \n" \
  122. : "=&r" (__ret), "=" GCC_OFF_SMALL_ASM() (*m) \
  123. : GCC_OFF_SMALL_ASM() (*m), "Jr" (old), "Jr" (new) \
  124. : __LLSC_CLOBBER); \
  125. } else { \
  126. unsigned long __flags; \
  127. \
  128. raw_local_irq_save(__flags); \
  129. __ret = *m; \
  130. if (__ret == old) \
  131. *m = new; \
  132. raw_local_irq_restore(__flags); \
  133. } \
  134. \
  135. __ret; \
  136. })
  137. extern unsigned long __cmpxchg_small(volatile void *ptr, unsigned long old,
  138. unsigned long new, unsigned int size);
  139. static __always_inline
  140. unsigned long __cmpxchg(volatile void *ptr, unsigned long old,
  141. unsigned long new, unsigned int size)
  142. {
  143. switch (size) {
  144. case 1:
  145. case 2:
  146. return __cmpxchg_small(ptr, old, new, size);
  147. case 4:
  148. return __cmpxchg_asm("ll", "sc", (volatile u32 *)ptr,
  149. (u32)old, new);
  150. case 8:
  151. /* lld/scd are only available for MIPS64 */
  152. if (!IS_ENABLED(CONFIG_64BIT))
  153. return __cmpxchg_called_with_bad_pointer();
  154. return __cmpxchg_asm("lld", "scd", (volatile u64 *)ptr,
  155. (u64)old, new);
  156. default:
  157. return __cmpxchg_called_with_bad_pointer();
  158. }
  159. }
  160. #define cmpxchg_local(ptr, old, new) \
  161. ((__typeof__(*(ptr))) \
  162. __cmpxchg((ptr), \
  163. (unsigned long)(__typeof__(*(ptr)))(old), \
  164. (unsigned long)(__typeof__(*(ptr)))(new), \
  165. sizeof(*(ptr))))
  166. #define cmpxchg(ptr, old, new) \
  167. ({ \
  168. __typeof__(*(ptr)) __res; \
  169. \
  170. /* \
  171. * In the Loongson3 workaround case __cmpxchg_asm() already \
  172. * contains a completion barrier prior to the LL, so we don't \
  173. * need to emit an extra one here. \
  174. */ \
  175. if (!__SYNC_loongson3_war) \
  176. smp_mb__before_llsc(); \
  177. \
  178. __res = cmpxchg_local((ptr), (old), (new)); \
  179. \
  180. /* \
  181. * In the Loongson3 workaround case __cmpxchg_asm() already \
  182. * contains a completion barrier after the SC, so we don't \
  183. * need to emit an extra one here. \
  184. */ \
  185. if (!__SYNC_loongson3_war) \
  186. smp_llsc_mb(); \
  187. \
  188. __res; \
  189. })
  190. #ifdef CONFIG_64BIT
  191. #define cmpxchg64_local(ptr, o, n) \
  192. ({ \
  193. BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
  194. cmpxchg_local((ptr), (o), (n)); \
  195. })
  196. #define cmpxchg64(ptr, o, n) \
  197. ({ \
  198. BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
  199. cmpxchg((ptr), (o), (n)); \
  200. })
  201. #else
  202. # include <asm-generic/cmpxchg-local.h>
  203. # define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
  204. # ifdef CONFIG_SMP
  205. static inline unsigned long __cmpxchg64(volatile void *ptr,
  206. unsigned long long old,
  207. unsigned long long new)
  208. {
  209. unsigned long long tmp, ret;
  210. unsigned long flags;
  211. /*
  212. * The assembly below has to combine 32 bit values into a 64 bit
  213. * register, and split 64 bit values from one register into two. If we
  214. * were to take an interrupt in the middle of this we'd only save the
  215. * least significant 32 bits of each register & probably clobber the
  216. * most significant 32 bits of the 64 bit values we're using. In order
  217. * to avoid this we must disable interrupts.
  218. */
  219. local_irq_save(flags);
  220. asm volatile(
  221. " .set push \n"
  222. " .set " MIPS_ISA_ARCH_LEVEL " \n"
  223. /* Load 64 bits from ptr */
  224. " " __SYNC(full, loongson3_war) " \n"
  225. "1: lld %L0, %3 # __cmpxchg64 \n"
  226. /*
  227. * Split the 64 bit value we loaded into the 2 registers that hold the
  228. * ret variable.
  229. */
  230. " dsra %M0, %L0, 32 \n"
  231. " sll %L0, %L0, 0 \n"
  232. /*
  233. * Compare ret against old, breaking out of the loop if they don't
  234. * match.
  235. */
  236. " bne %M0, %M4, 2f \n"
  237. " bne %L0, %L4, 2f \n"
  238. /*
  239. * Combine the 32 bit halves from the 2 registers that hold the new
  240. * variable into a single 64 bit register.
  241. */
  242. # if MIPS_ISA_REV >= 2
  243. " move %L1, %L5 \n"
  244. " dins %L1, %M5, 32, 32 \n"
  245. # else
  246. " dsll %L1, %L5, 32 \n"
  247. " dsrl %L1, %L1, 32 \n"
  248. " .set noat \n"
  249. " dsll $at, %M5, 32 \n"
  250. " or %L1, %L1, $at \n"
  251. " .set at \n"
  252. # endif
  253. /* Attempt to store new at ptr */
  254. " scd %L1, %2 \n"
  255. /* If we failed, loop! */
  256. "\t" __SC_BEQZ "%L1, 1b \n"
  257. " .set pop \n"
  258. "2: " __SYNC(full, loongson3_war) " \n"
  259. : "=&r"(ret),
  260. "=&r"(tmp),
  261. "=" GCC_OFF_SMALL_ASM() (*(unsigned long long *)ptr)
  262. : GCC_OFF_SMALL_ASM() (*(unsigned long long *)ptr),
  263. "r" (old),
  264. "r" (new)
  265. : "memory");
  266. local_irq_restore(flags);
  267. return ret;
  268. }
  269. # define cmpxchg64(ptr, o, n) ({ \
  270. unsigned long long __old = (__typeof__(*(ptr)))(o); \
  271. unsigned long long __new = (__typeof__(*(ptr)))(n); \
  272. __typeof__(*(ptr)) __res; \
  273. \
  274. /* \
  275. * We can only use cmpxchg64 if we know that the CPU supports \
  276. * 64-bits, ie. lld & scd. Our call to __cmpxchg64_unsupported \
  277. * will cause a build error unless cpu_has_64bits is a \
  278. * compile-time constant 1. \
  279. */ \
  280. if (cpu_has_64bits && kernel_uses_llsc) { \
  281. smp_mb__before_llsc(); \
  282. __res = __cmpxchg64((ptr), __old, __new); \
  283. smp_llsc_mb(); \
  284. } else { \
  285. __res = __cmpxchg64_unsupported(); \
  286. } \
  287. \
  288. __res; \
  289. })
  290. # else /* !CONFIG_SMP */
  291. # define cmpxchg64(ptr, o, n) cmpxchg64_local((ptr), (o), (n))
  292. # endif /* !CONFIG_SMP */
  293. #endif /* !CONFIG_64BIT */
  294. #endif /* __ASM_CMPXCHG_H */