bitops-grb.h 6.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172
  1. #ifndef __ASM_SH_BITOPS_GRB_H
  2. #define __ASM_SH_BITOPS_GRB_H
  3. static inline void set_bit(int nr, volatile void * addr)
  4. {
  5. int mask;
  6. volatile unsigned int *a = addr;
  7. unsigned long tmp;
  8. a += nr >> 5;
  9. mask = 1 << (nr & 0x1f);
  10. __asm__ __volatile__ (
  11. " .align 2 \n\t"
  12. " mova 1f, r0 \n\t" /* r0 = end point */
  13. " mov r15, r1 \n\t" /* r1 = saved sp */
  14. " mov #-6, r15 \n\t" /* LOGIN: r15 = size */
  15. " mov.l @%1, %0 \n\t" /* load old value */
  16. " or %2, %0 \n\t" /* or */
  17. " mov.l %0, @%1 \n\t" /* store new value */
  18. "1: mov r1, r15 \n\t" /* LOGOUT */
  19. : "=&r" (tmp),
  20. "+r" (a)
  21. : "r" (mask)
  22. : "memory" , "r0", "r1");
  23. }
  24. static inline void clear_bit(int nr, volatile void * addr)
  25. {
  26. int mask;
  27. volatile unsigned int *a = addr;
  28. unsigned long tmp;
  29. a += nr >> 5;
  30. mask = ~(1 << (nr & 0x1f));
  31. __asm__ __volatile__ (
  32. " .align 2 \n\t"
  33. " mova 1f, r0 \n\t" /* r0 = end point */
  34. " mov r15, r1 \n\t" /* r1 = saved sp */
  35. " mov #-6, r15 \n\t" /* LOGIN: r15 = size */
  36. " mov.l @%1, %0 \n\t" /* load old value */
  37. " and %2, %0 \n\t" /* and */
  38. " mov.l %0, @%1 \n\t" /* store new value */
  39. "1: mov r1, r15 \n\t" /* LOGOUT */
  40. : "=&r" (tmp),
  41. "+r" (a)
  42. : "r" (mask)
  43. : "memory" , "r0", "r1");
  44. }
  45. static inline void change_bit(int nr, volatile void * addr)
  46. {
  47. int mask;
  48. volatile unsigned int *a = addr;
  49. unsigned long tmp;
  50. a += nr >> 5;
  51. mask = 1 << (nr & 0x1f);
  52. __asm__ __volatile__ (
  53. " .align 2 \n\t"
  54. " mova 1f, r0 \n\t" /* r0 = end point */
  55. " mov r15, r1 \n\t" /* r1 = saved sp */
  56. " mov #-6, r15 \n\t" /* LOGIN: r15 = size */
  57. " mov.l @%1, %0 \n\t" /* load old value */
  58. " xor %2, %0 \n\t" /* xor */
  59. " mov.l %0, @%1 \n\t" /* store new value */
  60. "1: mov r1, r15 \n\t" /* LOGOUT */
  61. : "=&r" (tmp),
  62. "+r" (a)
  63. : "r" (mask)
  64. : "memory" , "r0", "r1");
  65. }
  66. static inline int test_and_set_bit(int nr, volatile void * addr)
  67. {
  68. int mask, retval;
  69. volatile unsigned int *a = addr;
  70. unsigned long tmp;
  71. a += nr >> 5;
  72. mask = 1 << (nr & 0x1f);
  73. __asm__ __volatile__ (
  74. " .align 2 \n\t"
  75. " mova 1f, r0 \n\t" /* r0 = end point */
  76. " mov r15, r1 \n\t" /* r1 = saved sp */
  77. " mov #-14, r15 \n\t" /* LOGIN: r15 = size */
  78. " mov.l @%2, %0 \n\t" /* load old value */
  79. " mov %0, %1 \n\t"
  80. " tst %1, %3 \n\t" /* T = ((*a & mask) == 0) */
  81. " mov #-1, %1 \n\t" /* retvat = -1 */
  82. " negc %1, %1 \n\t" /* retval = (mask & *a) != 0 */
  83. " or %3, %0 \n\t"
  84. " mov.l %0, @%2 \n\t" /* store new value */
  85. "1: mov r1, r15 \n\t" /* LOGOUT */
  86. : "=&r" (tmp),
  87. "=&r" (retval),
  88. "+r" (a)
  89. : "r" (mask)
  90. : "memory" , "r0", "r1" ,"t");
  91. return retval;
  92. }
  93. static inline int test_and_clear_bit(int nr, volatile void * addr)
  94. {
  95. int mask, retval,not_mask;
  96. volatile unsigned int *a = addr;
  97. unsigned long tmp;
  98. a += nr >> 5;
  99. mask = 1 << (nr & 0x1f);
  100. not_mask = ~mask;
  101. __asm__ __volatile__ (
  102. " .align 2 \n\t"
  103. " mova 1f, r0 \n\t" /* r0 = end point */
  104. " mov r15, r1 \n\t" /* r1 = saved sp */
  105. " mov #-14, r15 \n\t" /* LOGIN */
  106. " mov.l @%2, %0 \n\t" /* load old value */
  107. " mov %0, %1 \n\t" /* %1 = *a */
  108. " tst %1, %3 \n\t" /* T = ((*a & mask) == 0) */
  109. " mov #-1, %1 \n\t" /* retvat = -1 */
  110. " negc %1, %1 \n\t" /* retval = (mask & *a) != 0 */
  111. " and %4, %0 \n\t"
  112. " mov.l %0, @%2 \n\t" /* store new value */
  113. "1: mov r1, r15 \n\t" /* LOGOUT */
  114. : "=&r" (tmp),
  115. "=&r" (retval),
  116. "+r" (a)
  117. : "r" (mask),
  118. "r" (not_mask)
  119. : "memory" , "r0", "r1", "t");
  120. return retval;
  121. }
  122. static inline int test_and_change_bit(int nr, volatile void * addr)
  123. {
  124. int mask, retval;
  125. volatile unsigned int *a = addr;
  126. unsigned long tmp;
  127. a += nr >> 5;
  128. mask = 1 << (nr & 0x1f);
  129. __asm__ __volatile__ (
  130. " .align 2 \n\t"
  131. " mova 1f, r0 \n\t" /* r0 = end point */
  132. " mov r15, r1 \n\t" /* r1 = saved sp */
  133. " mov #-14, r15 \n\t" /* LOGIN */
  134. " mov.l @%2, %0 \n\t" /* load old value */
  135. " mov %0, %1 \n\t" /* %1 = *a */
  136. " tst %1, %3 \n\t" /* T = ((*a & mask) == 0) */
  137. " mov #-1, %1 \n\t" /* retvat = -1 */
  138. " negc %1, %1 \n\t" /* retval = (mask & *a) != 0 */
  139. " xor %3, %0 \n\t"
  140. " mov.l %0, @%2 \n\t" /* store new value */
  141. "1: mov r1, r15 \n\t" /* LOGOUT */
  142. : "=&r" (tmp),
  143. "=&r" (retval),
  144. "+r" (a)
  145. : "r" (mask)
  146. : "memory" , "r0", "r1", "t");
  147. return retval;
  148. }
  149. #include <asm-generic/bitops/non-atomic.h>
  150. #endif /* __ASM_SH_BITOPS_GRB_H */