atomic_64.S 2.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110
  1. /* atomic.S: These things are too big to do inline.
  2. *
  3. * Copyright (C) 1999, 2007 2012 David S. Miller (davem@davemloft.net)
  4. */
  5. #include <linux/linkage.h>
  6. #include <asm/asi.h>
  7. #include <asm/backoff.h>
  8. .text
  9. /* Two versions of the atomic routines, one that
  10. * does not return a value and does not perform
  11. * memory barriers, and a second which returns
  12. * a value and does the barriers.
  13. */
  14. #define ATOMIC_OP(op) \
  15. ENTRY(atomic_##op) /* %o0 = increment, %o1 = atomic_ptr */ \
  16. BACKOFF_SETUP(%o2); \
  17. 1: lduw [%o1], %g1; \
  18. op %g1, %o0, %g7; \
  19. cas [%o1], %g1, %g7; \
  20. cmp %g1, %g7; \
  21. bne,pn %icc, BACKOFF_LABEL(2f, 1b); \
  22. nop; \
  23. retl; \
  24. nop; \
  25. 2: BACKOFF_SPIN(%o2, %o3, 1b); \
  26. ENDPROC(atomic_##op); \
  27. #define ATOMIC_OP_RETURN(op) \
  28. ENTRY(atomic_##op##_return) /* %o0 = increment, %o1 = atomic_ptr */ \
  29. BACKOFF_SETUP(%o2); \
  30. 1: lduw [%o1], %g1; \
  31. op %g1, %o0, %g7; \
  32. cas [%o1], %g1, %g7; \
  33. cmp %g1, %g7; \
  34. bne,pn %icc, BACKOFF_LABEL(2f, 1b); \
  35. op %g1, %o0, %g1; \
  36. retl; \
  37. sra %g1, 0, %o0; \
  38. 2: BACKOFF_SPIN(%o2, %o3, 1b); \
  39. ENDPROC(atomic_##op##_return);
  40. #define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_OP_RETURN(op)
  41. ATOMIC_OPS(add)
  42. ATOMIC_OPS(sub)
  43. ATOMIC_OP(and)
  44. ATOMIC_OP(or)
  45. ATOMIC_OP(xor)
  46. #undef ATOMIC_OPS
  47. #undef ATOMIC_OP_RETURN
  48. #undef ATOMIC_OP
  49. #define ATOMIC64_OP(op) \
  50. ENTRY(atomic64_##op) /* %o0 = increment, %o1 = atomic_ptr */ \
  51. BACKOFF_SETUP(%o2); \
  52. 1: ldx [%o1], %g1; \
  53. op %g1, %o0, %g7; \
  54. casx [%o1], %g1, %g7; \
  55. cmp %g1, %g7; \
  56. bne,pn %xcc, BACKOFF_LABEL(2f, 1b); \
  57. nop; \
  58. retl; \
  59. nop; \
  60. 2: BACKOFF_SPIN(%o2, %o3, 1b); \
  61. ENDPROC(atomic64_##op); \
  62. #define ATOMIC64_OP_RETURN(op) \
  63. ENTRY(atomic64_##op##_return) /* %o0 = increment, %o1 = atomic_ptr */ \
  64. BACKOFF_SETUP(%o2); \
  65. 1: ldx [%o1], %g1; \
  66. op %g1, %o0, %g7; \
  67. casx [%o1], %g1, %g7; \
  68. cmp %g1, %g7; \
  69. bne,pn %xcc, BACKOFF_LABEL(2f, 1b); \
  70. nop; \
  71. retl; \
  72. op %g1, %o0, %o0; \
  73. 2: BACKOFF_SPIN(%o2, %o3, 1b); \
  74. ENDPROC(atomic64_##op##_return);
  75. #define ATOMIC64_OPS(op) ATOMIC64_OP(op) ATOMIC64_OP_RETURN(op)
  76. ATOMIC64_OPS(add)
  77. ATOMIC64_OPS(sub)
  78. ATOMIC64_OP(and)
  79. ATOMIC64_OP(or)
  80. ATOMIC64_OP(xor)
  81. #undef ATOMIC64_OPS
  82. #undef ATOMIC64_OP_RETURN
  83. #undef ATOMIC64_OP
  84. ENTRY(atomic64_dec_if_positive) /* %o0 = atomic_ptr */
  85. BACKOFF_SETUP(%o2)
  86. 1: ldx [%o0], %g1
  87. brlez,pn %g1, 3f
  88. sub %g1, 1, %g7
  89. casx [%o0], %g1, %g7
  90. cmp %g1, %g7
  91. bne,pn %xcc, BACKOFF_LABEL(2f, 1b)
  92. nop
  93. 3: retl
  94. sub %g1, 1, %o0
  95. 2: BACKOFF_SPIN(%o2, %o3, 1b)
  96. ENDPROC(atomic64_dec_if_positive)