bpf_jit_asm.S 4.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208
  1. #include <asm/ptrace.h>
  2. #include "bpf_jit.h"
  3. #ifdef CONFIG_SPARC64
  4. #define SAVE_SZ 176
  5. #define SCRATCH_OFF STACK_BIAS + 128
  6. #define BE_PTR(label) be,pn %xcc, label
  7. #define SIGN_EXTEND(reg) sra reg, 0, reg
  8. #else
  9. #define SAVE_SZ 96
  10. #define SCRATCH_OFF 72
  11. #define BE_PTR(label) be label
  12. #define SIGN_EXTEND(reg)
  13. #endif
  14. #define SKF_MAX_NEG_OFF (-0x200000) /* SKF_LL_OFF from filter.h */
  15. .text
  16. .globl bpf_jit_load_word
  17. bpf_jit_load_word:
  18. cmp r_OFF, 0
  19. bl bpf_slow_path_word_neg
  20. nop
  21. .globl bpf_jit_load_word_positive_offset
  22. bpf_jit_load_word_positive_offset:
  23. sub r_HEADLEN, r_OFF, r_TMP
  24. cmp r_TMP, 3
  25. ble bpf_slow_path_word
  26. add r_SKB_DATA, r_OFF, r_TMP
  27. andcc r_TMP, 3, %g0
  28. bne load_word_unaligned
  29. nop
  30. retl
  31. ld [r_TMP], r_A
  32. load_word_unaligned:
  33. ldub [r_TMP + 0x0], r_OFF
  34. ldub [r_TMP + 0x1], r_TMP2
  35. sll r_OFF, 8, r_OFF
  36. or r_OFF, r_TMP2, r_OFF
  37. ldub [r_TMP + 0x2], r_TMP2
  38. sll r_OFF, 8, r_OFF
  39. or r_OFF, r_TMP2, r_OFF
  40. ldub [r_TMP + 0x3], r_TMP2
  41. sll r_OFF, 8, r_OFF
  42. retl
  43. or r_OFF, r_TMP2, r_A
  44. .globl bpf_jit_load_half
  45. bpf_jit_load_half:
  46. cmp r_OFF, 0
  47. bl bpf_slow_path_half_neg
  48. nop
  49. .globl bpf_jit_load_half_positive_offset
  50. bpf_jit_load_half_positive_offset:
  51. sub r_HEADLEN, r_OFF, r_TMP
  52. cmp r_TMP, 1
  53. ble bpf_slow_path_half
  54. add r_SKB_DATA, r_OFF, r_TMP
  55. andcc r_TMP, 1, %g0
  56. bne load_half_unaligned
  57. nop
  58. retl
  59. lduh [r_TMP], r_A
  60. load_half_unaligned:
  61. ldub [r_TMP + 0x0], r_OFF
  62. ldub [r_TMP + 0x1], r_TMP2
  63. sll r_OFF, 8, r_OFF
  64. retl
  65. or r_OFF, r_TMP2, r_A
  66. .globl bpf_jit_load_byte
  67. bpf_jit_load_byte:
  68. cmp r_OFF, 0
  69. bl bpf_slow_path_byte_neg
  70. nop
  71. .globl bpf_jit_load_byte_positive_offset
  72. bpf_jit_load_byte_positive_offset:
  73. cmp r_OFF, r_HEADLEN
  74. bge bpf_slow_path_byte
  75. nop
  76. retl
  77. ldub [r_SKB_DATA + r_OFF], r_A
  78. .globl bpf_jit_load_byte_msh
  79. bpf_jit_load_byte_msh:
  80. cmp r_OFF, 0
  81. bl bpf_slow_path_byte_msh_neg
  82. nop
  83. .globl bpf_jit_load_byte_msh_positive_offset
  84. bpf_jit_load_byte_msh_positive_offset:
  85. cmp r_OFF, r_HEADLEN
  86. bge bpf_slow_path_byte_msh
  87. nop
  88. ldub [r_SKB_DATA + r_OFF], r_OFF
  89. and r_OFF, 0xf, r_OFF
  90. retl
  91. sll r_OFF, 2, r_X
  92. #define bpf_slow_path_common(LEN) \
  93. save %sp, -SAVE_SZ, %sp; \
  94. mov %i0, %o0; \
  95. mov r_OFF, %o1; \
  96. add %fp, SCRATCH_OFF, %o2; \
  97. call skb_copy_bits; \
  98. mov (LEN), %o3; \
  99. cmp %o0, 0; \
  100. restore;
  101. bpf_slow_path_word:
  102. bpf_slow_path_common(4)
  103. bl bpf_error
  104. ld [%sp + SCRATCH_OFF], r_A
  105. retl
  106. nop
  107. bpf_slow_path_half:
  108. bpf_slow_path_common(2)
  109. bl bpf_error
  110. lduh [%sp + SCRATCH_OFF], r_A
  111. retl
  112. nop
  113. bpf_slow_path_byte:
  114. bpf_slow_path_common(1)
  115. bl bpf_error
  116. ldub [%sp + SCRATCH_OFF], r_A
  117. retl
  118. nop
  119. bpf_slow_path_byte_msh:
  120. bpf_slow_path_common(1)
  121. bl bpf_error
  122. ldub [%sp + SCRATCH_OFF], r_A
  123. and r_OFF, 0xf, r_OFF
  124. retl
  125. sll r_OFF, 2, r_X
  126. #define bpf_negative_common(LEN) \
  127. save %sp, -SAVE_SZ, %sp; \
  128. mov %i0, %o0; \
  129. mov r_OFF, %o1; \
  130. SIGN_EXTEND(%o1); \
  131. call bpf_internal_load_pointer_neg_helper; \
  132. mov (LEN), %o2; \
  133. mov %o0, r_TMP; \
  134. cmp %o0, 0; \
  135. BE_PTR(bpf_error); \
  136. restore;
  137. bpf_slow_path_word_neg:
  138. sethi %hi(SKF_MAX_NEG_OFF), r_TMP
  139. cmp r_OFF, r_TMP
  140. bl bpf_error
  141. nop
  142. .globl bpf_jit_load_word_negative_offset
  143. bpf_jit_load_word_negative_offset:
  144. bpf_negative_common(4)
  145. andcc r_TMP, 3, %g0
  146. bne load_word_unaligned
  147. nop
  148. retl
  149. ld [r_TMP], r_A
  150. bpf_slow_path_half_neg:
  151. sethi %hi(SKF_MAX_NEG_OFF), r_TMP
  152. cmp r_OFF, r_TMP
  153. bl bpf_error
  154. nop
  155. .globl bpf_jit_load_half_negative_offset
  156. bpf_jit_load_half_negative_offset:
  157. bpf_negative_common(2)
  158. andcc r_TMP, 1, %g0
  159. bne load_half_unaligned
  160. nop
  161. retl
  162. lduh [r_TMP], r_A
  163. bpf_slow_path_byte_neg:
  164. sethi %hi(SKF_MAX_NEG_OFF), r_TMP
  165. cmp r_OFF, r_TMP
  166. bl bpf_error
  167. nop
  168. .globl bpf_jit_load_byte_negative_offset
  169. bpf_jit_load_byte_negative_offset:
  170. bpf_negative_common(1)
  171. retl
  172. ldub [r_TMP], r_A
  173. bpf_slow_path_byte_msh_neg:
  174. sethi %hi(SKF_MAX_NEG_OFF), r_TMP
  175. cmp r_OFF, r_TMP
  176. bl bpf_error
  177. nop
  178. .globl bpf_jit_load_byte_msh_negative_offset
  179. bpf_jit_load_byte_msh_negative_offset:
  180. bpf_negative_common(1)
  181. ldub [r_TMP], r_OFF
  182. and r_OFF, 0xf, r_OFF
  183. retl
  184. sll r_OFF, 2, r_X
  185. bpf_error:
  186. /* Make the JIT program return zero. The JIT epilogue
  187. * stores away the original %o7 into r_saved_O7. The
  188. * normal leaf function return is to use "retl" which
  189. * would evalute to "jmpl %o7 + 8, %g0" but we want to
  190. * use the saved value thus the sequence you see here.
  191. */
  192. jmpl r_saved_O7 + 8, %g0
  193. clr %o0