mcount.S 5.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290
  1. /*
  2. * arch/sh/lib/mcount.S
  3. *
  4. * Copyright (C) 2008, 2009 Paul Mundt
  5. * Copyright (C) 2008, 2009 Matt Fleming
  6. *
  7. * This file is subject to the terms and conditions of the GNU General Public
  8. * License. See the file "COPYING" in the main directory of this archive
  9. * for more details.
  10. */
  11. #include <asm/ftrace.h>
  12. #include <asm/thread_info.h>
  13. #include <asm/asm-offsets.h>
  14. #define MCOUNT_ENTER() \
  15. mov.l r4, @-r15; \
  16. mov.l r5, @-r15; \
  17. mov.l r6, @-r15; \
  18. mov.l r7, @-r15; \
  19. sts.l pr, @-r15; \
  20. \
  21. mov.l @(20,r15),r4; \
  22. sts pr, r5
  23. #define MCOUNT_LEAVE() \
  24. lds.l @r15+, pr; \
  25. mov.l @r15+, r7; \
  26. mov.l @r15+, r6; \
  27. mov.l @r15+, r5; \
  28. rts; \
  29. mov.l @r15+, r4
  30. #ifdef CONFIG_STACK_DEBUG
  31. /*
  32. * Perform diagnostic checks on the state of the kernel stack.
  33. *
  34. * Check for stack overflow. If there is less than 1KB free
  35. * then it has overflowed.
  36. *
  37. * Make sure the stack pointer contains a valid address. Valid
  38. * addresses for kernel stacks are anywhere after the bss
  39. * (after __bss_stop) and anywhere in init_thread_union (init_stack).
  40. */
  41. #define STACK_CHECK() \
  42. mov #(THREAD_SIZE >> 10), r0; \
  43. shll8 r0; \
  44. shll2 r0; \
  45. \
  46. /* r1 = sp & (THREAD_SIZE - 1) */ \
  47. mov #-1, r1; \
  48. add r0, r1; \
  49. and r15, r1; \
  50. \
  51. mov #TI_SIZE, r3; \
  52. mov #(STACK_WARN >> 8), r2; \
  53. shll8 r2; \
  54. add r3, r2; \
  55. \
  56. /* Is the stack overflowing? */ \
  57. cmp/hi r2, r1; \
  58. bf stack_panic; \
  59. \
  60. /* If sp > __bss_stop then we're OK. */ \
  61. mov.l .L_ebss, r1; \
  62. cmp/hi r1, r15; \
  63. bt 1f; \
  64. \
  65. /* If sp < init_stack, we're not OK. */ \
  66. mov.l .L_init_thread_union, r1; \
  67. cmp/hs r1, r15; \
  68. bf stack_panic; \
  69. \
  70. /* If sp > init_stack && sp < __bss_stop, not OK. */ \
  71. add r0, r1; \
  72. cmp/hs r1, r15; \
  73. bt stack_panic; \
  74. 1:
  75. #else
  76. #define STACK_CHECK()
  77. #endif /* CONFIG_STACK_DEBUG */
  78. .align 2
  79. .globl _mcount
  80. .type _mcount,@function
  81. .globl mcount
  82. .type mcount,@function
  83. _mcount:
  84. mcount:
  85. STACK_CHECK()
  86. #ifndef CONFIG_FUNCTION_TRACER
  87. rts
  88. nop
  89. #else
  90. MCOUNT_ENTER()
  91. #ifdef CONFIG_DYNAMIC_FTRACE
  92. .globl mcount_call
  93. mcount_call:
  94. mov.l .Lftrace_stub, r6
  95. #else
  96. mov.l .Lftrace_trace_function, r6
  97. mov.l ftrace_stub, r7
  98. cmp/eq r6, r7
  99. bt skip_trace
  100. mov.l @r6, r6
  101. #endif
  102. jsr @r6
  103. nop
  104. #ifdef CONFIG_FUNCTION_GRAPH_TRACER
  105. mov.l .Lftrace_graph_return, r6
  106. mov.l .Lftrace_stub, r7
  107. cmp/eq r6, r7
  108. bt 1f
  109. mov.l .Lftrace_graph_caller, r0
  110. jmp @r0
  111. nop
  112. 1:
  113. mov.l .Lftrace_graph_entry, r6
  114. mov.l .Lftrace_graph_entry_stub, r7
  115. cmp/eq r6, r7
  116. bt skip_trace
  117. mov.l .Lftrace_graph_caller, r0
  118. jmp @r0
  119. nop
  120. .align 2
  121. .Lftrace_graph_return:
  122. .long ftrace_graph_return
  123. .Lftrace_graph_entry:
  124. .long ftrace_graph_entry
  125. .Lftrace_graph_entry_stub:
  126. .long ftrace_graph_entry_stub
  127. .Lftrace_graph_caller:
  128. .long ftrace_graph_caller
  129. #endif /* CONFIG_FUNCTION_GRAPH_TRACER */
  130. .globl skip_trace
  131. skip_trace:
  132. MCOUNT_LEAVE()
  133. .align 2
  134. .Lftrace_trace_function:
  135. .long ftrace_trace_function
  136. #ifdef CONFIG_DYNAMIC_FTRACE
  137. #ifdef CONFIG_FUNCTION_GRAPH_TRACER
  138. /*
  139. * NOTE: Do not move either ftrace_graph_call or ftrace_caller
  140. * as this will affect the calculation of GRAPH_INSN_OFFSET.
  141. */
  142. .globl ftrace_graph_call
  143. ftrace_graph_call:
  144. mov.l .Lskip_trace, r0
  145. jmp @r0
  146. nop
  147. .align 2
  148. .Lskip_trace:
  149. .long skip_trace
  150. #endif /* CONFIG_FUNCTION_GRAPH_TRACER */
  151. .globl ftrace_caller
  152. ftrace_caller:
  153. MCOUNT_ENTER()
  154. .globl ftrace_call
  155. ftrace_call:
  156. mov.l .Lftrace_stub, r6
  157. jsr @r6
  158. nop
  159. #ifdef CONFIG_FUNCTION_GRAPH_TRACER
  160. bra ftrace_graph_call
  161. nop
  162. #else
  163. MCOUNT_LEAVE()
  164. #endif /* CONFIG_FUNCTION_GRAPH_TRACER */
  165. #endif /* CONFIG_DYNAMIC_FTRACE */
  166. .align 2
  167. /*
  168. * NOTE: From here on the locations of the .Lftrace_stub label and
  169. * ftrace_stub itself are fixed. Adding additional data here will skew
  170. * the displacement for the memory table and break the block replacement.
  171. * Place new labels either after the ftrace_stub body, or before
  172. * ftrace_caller. You have been warned.
  173. */
  174. .Lftrace_stub:
  175. .long ftrace_stub
  176. .globl ftrace_stub
  177. ftrace_stub:
  178. rts
  179. nop
  180. #ifdef CONFIG_FUNCTION_GRAPH_TRACER
  181. .globl ftrace_graph_caller
  182. ftrace_graph_caller:
  183. mov.l 2f, r1
  184. jmp @r1
  185. nop
  186. 1:
  187. /*
  188. * MCOUNT_ENTER() pushed 5 registers onto the stack, so
  189. * the stack address containing our return address is
  190. * r15 + 20.
  191. */
  192. mov #20, r0
  193. add r15, r0
  194. mov r0, r4
  195. mov.l .Lprepare_ftrace_return, r0
  196. jsr @r0
  197. nop
  198. MCOUNT_LEAVE()
  199. .align 2
  200. 2: .long skip_trace
  201. .Lprepare_ftrace_return:
  202. .long prepare_ftrace_return
  203. .globl return_to_handler
  204. return_to_handler:
  205. /*
  206. * Save the return values.
  207. */
  208. mov.l r0, @-r15
  209. mov.l r1, @-r15
  210. mov #0, r4
  211. mov.l .Lftrace_return_to_handler, r0
  212. jsr @r0
  213. nop
  214. /*
  215. * The return value from ftrace_return_handler has the real
  216. * address that we should return to.
  217. */
  218. lds r0, pr
  219. mov.l @r15+, r1
  220. rts
  221. mov.l @r15+, r0
  222. .align 2
  223. .Lftrace_return_to_handler:
  224. .long ftrace_return_to_handler
  225. #endif /* CONFIG_FUNCTION_GRAPH_TRACER */
  226. #endif /* CONFIG_FUNCTION_TRACER */
  227. #ifdef CONFIG_STACK_DEBUG
  228. .globl stack_panic
  229. stack_panic:
  230. mov.l .Ldump_stack, r0
  231. jsr @r0
  232. nop
  233. mov.l .Lpanic, r0
  234. jsr @r0
  235. mov.l .Lpanic_s, r4
  236. rts
  237. nop
  238. .align 2
  239. .L_init_thread_union:
  240. .long init_thread_union
  241. .L_ebss:
  242. .long __bss_stop
  243. .Lpanic:
  244. .long panic
  245. .Lpanic_s:
  246. .long .Lpanic_str
  247. .Ldump_stack:
  248. .long dump_stack
  249. .section .rodata
  250. .align 2
  251. .Lpanic_str:
  252. .string "Stack error"
  253. #endif /* CONFIG_STACK_DEBUG */