cmpxchg.h 4.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219
  1. #ifndef _ASM_M32R_CMPXCHG_H
  2. #define _ASM_M32R_CMPXCHG_H
  3. /*
  4. * M32R version:
  5. * Copyright (C) 2001, 2002 Hitoshi Yamamoto
  6. * Copyright (C) 2004 Hirokazu Takata <takata at linux-m32r.org>
  7. */
  8. #include <linux/irqflags.h>
  9. #include <asm/assembler.h>
  10. #include <asm/dcache_clear.h>
  11. extern void __xchg_called_with_bad_pointer(void);
  12. static __always_inline unsigned long
  13. __xchg(unsigned long x, volatile void *ptr, int size)
  14. {
  15. unsigned long flags;
  16. unsigned long tmp = 0;
  17. local_irq_save(flags);
  18. switch (size) {
  19. #ifndef CONFIG_SMP
  20. case 1:
  21. __asm__ __volatile__ (
  22. "ldb %0, @%2 \n\t"
  23. "stb %1, @%2 \n\t"
  24. : "=&r" (tmp) : "r" (x), "r" (ptr) : "memory");
  25. break;
  26. case 2:
  27. __asm__ __volatile__ (
  28. "ldh %0, @%2 \n\t"
  29. "sth %1, @%2 \n\t"
  30. : "=&r" (tmp) : "r" (x), "r" (ptr) : "memory");
  31. break;
  32. case 4:
  33. __asm__ __volatile__ (
  34. "ld %0, @%2 \n\t"
  35. "st %1, @%2 \n\t"
  36. : "=&r" (tmp) : "r" (x), "r" (ptr) : "memory");
  37. break;
  38. #else /* CONFIG_SMP */
  39. case 4:
  40. __asm__ __volatile__ (
  41. DCACHE_CLEAR("%0", "r4", "%2")
  42. "lock %0, @%2; \n\t"
  43. "unlock %1, @%2; \n\t"
  44. : "=&r" (tmp) : "r" (x), "r" (ptr)
  45. : "memory"
  46. #ifdef CONFIG_CHIP_M32700_TS1
  47. , "r4"
  48. #endif /* CONFIG_CHIP_M32700_TS1 */
  49. );
  50. break;
  51. #endif /* CONFIG_SMP */
  52. default:
  53. __xchg_called_with_bad_pointer();
  54. }
  55. local_irq_restore(flags);
  56. return (tmp);
  57. }
  58. #define xchg(ptr, x) \
  59. ((__typeof__(*(ptr)))__xchg((unsigned long)(x), (ptr), sizeof(*(ptr))))
  60. static __always_inline unsigned long
  61. __xchg_local(unsigned long x, volatile void *ptr, int size)
  62. {
  63. unsigned long flags;
  64. unsigned long tmp = 0;
  65. local_irq_save(flags);
  66. switch (size) {
  67. case 1:
  68. __asm__ __volatile__ (
  69. "ldb %0, @%2 \n\t"
  70. "stb %1, @%2 \n\t"
  71. : "=&r" (tmp) : "r" (x), "r" (ptr) : "memory");
  72. break;
  73. case 2:
  74. __asm__ __volatile__ (
  75. "ldh %0, @%2 \n\t"
  76. "sth %1, @%2 \n\t"
  77. : "=&r" (tmp) : "r" (x), "r" (ptr) : "memory");
  78. break;
  79. case 4:
  80. __asm__ __volatile__ (
  81. "ld %0, @%2 \n\t"
  82. "st %1, @%2 \n\t"
  83. : "=&r" (tmp) : "r" (x), "r" (ptr) : "memory");
  84. break;
  85. default:
  86. __xchg_called_with_bad_pointer();
  87. }
  88. local_irq_restore(flags);
  89. return (tmp);
  90. }
  91. #define xchg_local(ptr, x) \
  92. ((__typeof__(*(ptr)))__xchg_local((unsigned long)(x), (ptr), \
  93. sizeof(*(ptr))))
  94. static inline unsigned long
  95. __cmpxchg_u32(volatile unsigned int *p, unsigned int old, unsigned int new)
  96. {
  97. unsigned long flags;
  98. unsigned int retval;
  99. local_irq_save(flags);
  100. __asm__ __volatile__ (
  101. DCACHE_CLEAR("%0", "r4", "%1")
  102. M32R_LOCK" %0, @%1; \n"
  103. " bne %0, %2, 1f; \n"
  104. M32R_UNLOCK" %3, @%1; \n"
  105. " bra 2f; \n"
  106. " .fillinsn \n"
  107. "1:"
  108. M32R_UNLOCK" %0, @%1; \n"
  109. " .fillinsn \n"
  110. "2:"
  111. : "=&r" (retval)
  112. : "r" (p), "r" (old), "r" (new)
  113. : "cbit", "memory"
  114. #ifdef CONFIG_CHIP_M32700_TS1
  115. , "r4"
  116. #endif /* CONFIG_CHIP_M32700_TS1 */
  117. );
  118. local_irq_restore(flags);
  119. return retval;
  120. }
  121. static inline unsigned long
  122. __cmpxchg_local_u32(volatile unsigned int *p, unsigned int old,
  123. unsigned int new)
  124. {
  125. unsigned long flags;
  126. unsigned int retval;
  127. local_irq_save(flags);
  128. __asm__ __volatile__ (
  129. DCACHE_CLEAR("%0", "r4", "%1")
  130. "ld %0, @%1; \n"
  131. " bne %0, %2, 1f; \n"
  132. "st %3, @%1; \n"
  133. " bra 2f; \n"
  134. " .fillinsn \n"
  135. "1:"
  136. "st %0, @%1; \n"
  137. " .fillinsn \n"
  138. "2:"
  139. : "=&r" (retval)
  140. : "r" (p), "r" (old), "r" (new)
  141. : "cbit", "memory"
  142. #ifdef CONFIG_CHIP_M32700_TS1
  143. , "r4"
  144. #endif /* CONFIG_CHIP_M32700_TS1 */
  145. );
  146. local_irq_restore(flags);
  147. return retval;
  148. }
  149. /* This function doesn't exist, so you'll get a linker error
  150. if something tries to do an invalid cmpxchg(). */
  151. extern void __cmpxchg_called_with_bad_pointer(void);
  152. static inline unsigned long
  153. __cmpxchg(volatile void *ptr, unsigned long old, unsigned long new, int size)
  154. {
  155. switch (size) {
  156. case 4:
  157. return __cmpxchg_u32(ptr, old, new);
  158. #if 0 /* we don't have __cmpxchg_u64 */
  159. case 8:
  160. return __cmpxchg_u64(ptr, old, new);
  161. #endif /* 0 */
  162. }
  163. __cmpxchg_called_with_bad_pointer();
  164. return old;
  165. }
  166. #define cmpxchg(ptr, o, n) \
  167. ((__typeof__(*(ptr))) __cmpxchg((ptr), (unsigned long)(o), \
  168. (unsigned long)(n), sizeof(*(ptr))))
  169. #include <asm-generic/cmpxchg-local.h>
  170. static inline unsigned long __cmpxchg_local(volatile void *ptr,
  171. unsigned long old,
  172. unsigned long new, int size)
  173. {
  174. switch (size) {
  175. case 4:
  176. return __cmpxchg_local_u32(ptr, old, new);
  177. default:
  178. return __cmpxchg_local_generic(ptr, old, new, size);
  179. }
  180. return old;
  181. }
  182. /*
  183. * cmpxchg_local and cmpxchg64_local are atomic wrt current CPU. Always make
  184. * them available.
  185. */
  186. #define cmpxchg_local(ptr, o, n) \
  187. ((__typeof__(*(ptr)))__cmpxchg_local((ptr), (unsigned long)(o), \
  188. (unsigned long)(n), sizeof(*(ptr))))
  189. #define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
  190. #endif /* _ASM_M32R_CMPXCHG_H */