cmpxchg.h 3.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114
  1. /*
  2. * forked from parisc asm/atomic.h which was:
  3. * Copyright (C) 2000 Philipp Rumpf <prumpf@tux.org>
  4. * Copyright (C) 2006 Kyle McMartin <kyle@parisc-linux.org>
  5. */
  6. #ifndef _ASM_PARISC_CMPXCHG_H_
  7. #define _ASM_PARISC_CMPXCHG_H_
  8. /* This should get optimized out since it's never called.
  9. ** Or get a link error if xchg is used "wrong".
  10. */
  11. extern void __xchg_called_with_bad_pointer(void);
  12. /* __xchg32/64 defined in arch/parisc/lib/bitops.c */
  13. extern unsigned long __xchg8(char, char *);
  14. extern unsigned long __xchg32(int, int *);
  15. #ifdef CONFIG_64BIT
  16. extern unsigned long __xchg64(unsigned long, unsigned long *);
  17. #endif
  18. /* optimizer better get rid of switch since size is a constant */
  19. static inline unsigned long
  20. __xchg(unsigned long x, __volatile__ void *ptr, int size)
  21. {
  22. switch (size) {
  23. #ifdef CONFIG_64BIT
  24. case 8: return __xchg64(x, (unsigned long *) ptr);
  25. #endif
  26. case 4: return __xchg32((int) x, (int *) ptr);
  27. case 1: return __xchg8((char) x, (char *) ptr);
  28. }
  29. __xchg_called_with_bad_pointer();
  30. return x;
  31. }
  32. /*
  33. ** REVISIT - Abandoned use of LDCW in xchg() for now:
  34. ** o need to test sizeof(*ptr) to avoid clearing adjacent bytes
  35. ** o and while we are at it, could CONFIG_64BIT code use LDCD too?
  36. **
  37. ** if (__builtin_constant_p(x) && (x == NULL))
  38. ** if (((unsigned long)p & 0xf) == 0)
  39. ** return __ldcw(p);
  40. */
  41. #define xchg(ptr, x) \
  42. ((__typeof__(*(ptr)))__xchg((unsigned long)(x), (ptr), sizeof(*(ptr))))
  43. /* bug catcher for when unsupported size is used - won't link */
  44. extern void __cmpxchg_called_with_bad_pointer(void);
  45. /* __cmpxchg_u32/u64 defined in arch/parisc/lib/bitops.c */
  46. extern unsigned long __cmpxchg_u32(volatile unsigned int *m, unsigned int old,
  47. unsigned int new_);
  48. extern unsigned long __cmpxchg_u64(volatile unsigned long *ptr,
  49. unsigned long old, unsigned long new_);
  50. /* don't worry...optimizer will get rid of most of this */
  51. static inline unsigned long
  52. __cmpxchg(volatile void *ptr, unsigned long old, unsigned long new_, int size)
  53. {
  54. switch (size) {
  55. #ifdef CONFIG_64BIT
  56. case 8: return __cmpxchg_u64((unsigned long *)ptr, old, new_);
  57. #endif
  58. case 4: return __cmpxchg_u32((unsigned int *)ptr,
  59. (unsigned int)old, (unsigned int)new_);
  60. }
  61. __cmpxchg_called_with_bad_pointer();
  62. return old;
  63. }
  64. #define cmpxchg(ptr, o, n) \
  65. ({ \
  66. __typeof__(*(ptr)) _o_ = (o); \
  67. __typeof__(*(ptr)) _n_ = (n); \
  68. (__typeof__(*(ptr))) __cmpxchg((ptr), (unsigned long)_o_, \
  69. (unsigned long)_n_, sizeof(*(ptr))); \
  70. })
  71. #include <asm-generic/cmpxchg-local.h>
  72. static inline unsigned long __cmpxchg_local(volatile void *ptr,
  73. unsigned long old,
  74. unsigned long new_, int size)
  75. {
  76. switch (size) {
  77. #ifdef CONFIG_64BIT
  78. case 8: return __cmpxchg_u64((unsigned long *)ptr, old, new_);
  79. #endif
  80. case 4: return __cmpxchg_u32(ptr, old, new_);
  81. default:
  82. return __cmpxchg_local_generic(ptr, old, new_, size);
  83. }
  84. }
  85. /*
  86. * cmpxchg_local and cmpxchg64_local are atomic wrt current CPU. Always make
  87. * them available.
  88. */
  89. #define cmpxchg_local(ptr, o, n) \
  90. ((__typeof__(*(ptr)))__cmpxchg_local((ptr), (unsigned long)(o), \
  91. (unsigned long)(n), sizeof(*(ptr))))
  92. #ifdef CONFIG_64BIT
  93. #define cmpxchg64_local(ptr, o, n) \
  94. ({ \
  95. BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
  96. cmpxchg_local((ptr), (o), (n)); \
  97. })
  98. #else
  99. #define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
  100. #endif
  101. #endif /* _ASM_PARISC_CMPXCHG_H_ */