ftrace_64.S 8.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342
  1. /* SPDX-License-Identifier: GPL-2.0 */
  2. /*
  3. * Copyright (C) 2014 Steven Rostedt, Red Hat Inc
  4. */
  5. #include <linux/linkage.h>
  6. #include <asm/ptrace.h>
  7. #include <asm/ftrace.h>
  8. #include <asm/export.h>
  9. #include <asm/nospec-branch.h>
  10. #include <asm/unwind_hints.h>
  11. #include <asm/frame.h>
  12. .code64
  13. .section .entry.text, "ax"
  14. #ifdef CC_USING_FENTRY
  15. # define function_hook __fentry__
  16. EXPORT_SYMBOL(__fentry__)
  17. #else
  18. # define function_hook mcount
  19. EXPORT_SYMBOL(mcount)
  20. #endif
  21. #ifdef CONFIG_FRAME_POINTER
  22. # ifdef CC_USING_FENTRY
  23. /* Save parent and function stack frames (rip and rbp) */
  24. # define MCOUNT_FRAME_SIZE (8+16*2)
  25. # else
  26. /* Save just function stack frame (rip and rbp) */
  27. # define MCOUNT_FRAME_SIZE (8+16)
  28. # endif
  29. #else
  30. /* No need to save a stack frame */
  31. # define MCOUNT_FRAME_SIZE 0
  32. #endif /* CONFIG_FRAME_POINTER */
  33. /* Size of stack used to save mcount regs in save_mcount_regs */
  34. #define MCOUNT_REG_SIZE (SS+8 + MCOUNT_FRAME_SIZE)
  35. /*
  36. * gcc -pg option adds a call to 'mcount' in most functions.
  37. * When -mfentry is used, the call is to 'fentry' and not 'mcount'
  38. * and is done before the function's stack frame is set up.
  39. * They both require a set of regs to be saved before calling
  40. * any C code and restored before returning back to the function.
  41. *
  42. * On boot up, all these calls are converted into nops. When tracing
  43. * is enabled, the call can jump to either ftrace_caller or
  44. * ftrace_regs_caller. Callbacks (tracing functions) that require
  45. * ftrace_regs_caller (like kprobes) need to have pt_regs passed to
  46. * it. For this reason, the size of the pt_regs structure will be
  47. * allocated on the stack and the required mcount registers will
  48. * be saved in the locations that pt_regs has them in.
  49. */
  50. /*
  51. * @added: the amount of stack added before calling this
  52. *
  53. * After this is called, the following registers contain:
  54. *
  55. * %rdi - holds the address that called the trampoline
  56. * %rsi - holds the parent function (traced function's return address)
  57. * %rdx - holds the original %rbp
  58. */
  59. .macro save_mcount_regs added=0
  60. #ifdef CONFIG_FRAME_POINTER
  61. /* Save the original rbp */
  62. pushq %rbp
  63. /*
  64. * Stack traces will stop at the ftrace trampoline if the frame pointer
  65. * is not set up properly. If fentry is used, we need to save a frame
  66. * pointer for the parent as well as the function traced, because the
  67. * fentry is called before the stack frame is set up, where as mcount
  68. * is called afterward.
  69. */
  70. #ifdef CC_USING_FENTRY
  71. /* Save the parent pointer (skip orig rbp and our return address) */
  72. pushq \added+8*2(%rsp)
  73. pushq %rbp
  74. movq %rsp, %rbp
  75. /* Save the return address (now skip orig rbp, rbp and parent) */
  76. pushq \added+8*3(%rsp)
  77. #else
  78. /* Can't assume that rip is before this (unless added was zero) */
  79. pushq \added+8(%rsp)
  80. #endif
  81. pushq %rbp
  82. movq %rsp, %rbp
  83. #endif /* CONFIG_FRAME_POINTER */
  84. /*
  85. * We add enough stack to save all regs.
  86. */
  87. subq $(MCOUNT_REG_SIZE - MCOUNT_FRAME_SIZE), %rsp
  88. movq %rax, RAX(%rsp)
  89. movq %rcx, RCX(%rsp)
  90. movq %rdx, RDX(%rsp)
  91. movq %rsi, RSI(%rsp)
  92. movq %rdi, RDI(%rsp)
  93. movq %r8, R8(%rsp)
  94. movq %r9, R9(%rsp)
  95. /*
  96. * Save the original RBP. Even though the mcount ABI does not
  97. * require this, it helps out callers.
  98. */
  99. #ifdef CONFIG_FRAME_POINTER
  100. movq MCOUNT_REG_SIZE-8(%rsp), %rdx
  101. #else
  102. movq %rbp, %rdx
  103. #endif
  104. movq %rdx, RBP(%rsp)
  105. /* Copy the parent address into %rsi (second parameter) */
  106. #ifdef CC_USING_FENTRY
  107. movq MCOUNT_REG_SIZE+8+\added(%rsp), %rsi
  108. #else
  109. /* %rdx contains original %rbp */
  110. movq 8(%rdx), %rsi
  111. #endif
  112. /* Move RIP to its proper location */
  113. movq MCOUNT_REG_SIZE+\added(%rsp), %rdi
  114. movq %rdi, RIP(%rsp)
  115. /*
  116. * Now %rdi (the first parameter) has the return address of
  117. * where ftrace_call returns. But the callbacks expect the
  118. * address of the call itself.
  119. */
  120. subq $MCOUNT_INSN_SIZE, %rdi
  121. .endm
  122. .macro restore_mcount_regs
  123. movq R9(%rsp), %r9
  124. movq R8(%rsp), %r8
  125. movq RDI(%rsp), %rdi
  126. movq RSI(%rsp), %rsi
  127. movq RDX(%rsp), %rdx
  128. movq RCX(%rsp), %rcx
  129. movq RAX(%rsp), %rax
  130. /* ftrace_regs_caller can modify %rbp */
  131. movq RBP(%rsp), %rbp
  132. addq $MCOUNT_REG_SIZE, %rsp
  133. .endm
  134. #ifdef CONFIG_DYNAMIC_FTRACE
  135. ENTRY(function_hook)
  136. retq
  137. ENDPROC(function_hook)
  138. ENTRY(ftrace_caller)
  139. /* save_mcount_regs fills in first two parameters */
  140. save_mcount_regs
  141. GLOBAL(ftrace_caller_op_ptr)
  142. /* Load the ftrace_ops into the 3rd parameter */
  143. movq function_trace_op(%rip), %rdx
  144. /* regs go into 4th parameter (but make it NULL) */
  145. movq $0, %rcx
  146. GLOBAL(ftrace_call)
  147. call ftrace_stub
  148. restore_mcount_regs
  149. /*
  150. * The code up to this label is copied into trampolines so
  151. * think twice before adding any new code or changing the
  152. * layout here.
  153. */
  154. GLOBAL(ftrace_epilogue)
  155. #ifdef CONFIG_FUNCTION_GRAPH_TRACER
  156. GLOBAL(ftrace_graph_call)
  157. jmp ftrace_stub
  158. #endif
  159. /*
  160. * This is weak to keep gas from relaxing the jumps.
  161. * It is also used to copy the retq for trampolines.
  162. */
  163. WEAK(ftrace_stub)
  164. retq
  165. ENDPROC(ftrace_caller)
  166. ENTRY(ftrace_regs_caller)
  167. /* Save the current flags before any operations that can change them */
  168. pushfq
  169. /* added 8 bytes to save flags */
  170. save_mcount_regs 8
  171. /* save_mcount_regs fills in first two parameters */
  172. GLOBAL(ftrace_regs_caller_op_ptr)
  173. /* Load the ftrace_ops into the 3rd parameter */
  174. movq function_trace_op(%rip), %rdx
  175. /* Save the rest of pt_regs */
  176. movq %r15, R15(%rsp)
  177. movq %r14, R14(%rsp)
  178. movq %r13, R13(%rsp)
  179. movq %r12, R12(%rsp)
  180. movq %r11, R11(%rsp)
  181. movq %r10, R10(%rsp)
  182. movq %rbx, RBX(%rsp)
  183. /* Copy saved flags */
  184. movq MCOUNT_REG_SIZE(%rsp), %rcx
  185. movq %rcx, EFLAGS(%rsp)
  186. /* Kernel segments */
  187. movq $__KERNEL_DS, %rcx
  188. movq %rcx, SS(%rsp)
  189. movq $__KERNEL_CS, %rcx
  190. movq %rcx, CS(%rsp)
  191. /* Stack - skipping return address and flags */
  192. leaq MCOUNT_REG_SIZE+8*2(%rsp), %rcx
  193. movq %rcx, RSP(%rsp)
  194. ENCODE_FRAME_POINTER
  195. /* regs go into 4th parameter */
  196. leaq (%rsp), %rcx
  197. GLOBAL(ftrace_regs_call)
  198. call ftrace_stub
  199. /* Copy flags back to SS, to restore them */
  200. movq EFLAGS(%rsp), %rax
  201. movq %rax, MCOUNT_REG_SIZE(%rsp)
  202. /* Handlers can change the RIP */
  203. movq RIP(%rsp), %rax
  204. movq %rax, MCOUNT_REG_SIZE+8(%rsp)
  205. /* restore the rest of pt_regs */
  206. movq R15(%rsp), %r15
  207. movq R14(%rsp), %r14
  208. movq R13(%rsp), %r13
  209. movq R12(%rsp), %r12
  210. movq R10(%rsp), %r10
  211. movq RBX(%rsp), %rbx
  212. restore_mcount_regs
  213. /* Restore flags */
  214. popfq
  215. /*
  216. * As this jmp to ftrace_epilogue can be a short jump
  217. * it must not be copied into the trampoline.
  218. * The trampoline will add the code to jump
  219. * to the return.
  220. */
  221. GLOBAL(ftrace_regs_caller_end)
  222. jmp ftrace_epilogue
  223. ENDPROC(ftrace_regs_caller)
  224. #else /* ! CONFIG_DYNAMIC_FTRACE */
  225. ENTRY(function_hook)
  226. cmpq $ftrace_stub, ftrace_trace_function
  227. jnz trace
  228. fgraph_trace:
  229. #ifdef CONFIG_FUNCTION_GRAPH_TRACER
  230. cmpq $ftrace_stub, ftrace_graph_return
  231. jnz ftrace_graph_caller
  232. cmpq $ftrace_graph_entry_stub, ftrace_graph_entry
  233. jnz ftrace_graph_caller
  234. #endif
  235. GLOBAL(ftrace_stub)
  236. retq
  237. trace:
  238. /* save_mcount_regs fills in first two parameters */
  239. save_mcount_regs
  240. /*
  241. * When DYNAMIC_FTRACE is not defined, ARCH_SUPPORTS_FTRACE_OPS is not
  242. * set (see include/asm/ftrace.h and include/linux/ftrace.h). Only the
  243. * ip and parent ip are used and the list function is called when
  244. * function tracing is enabled.
  245. */
  246. movq ftrace_trace_function, %r8
  247. CALL_NOSPEC %r8
  248. restore_mcount_regs
  249. jmp fgraph_trace
  250. ENDPROC(function_hook)
  251. #endif /* CONFIG_DYNAMIC_FTRACE */
  252. #ifdef CONFIG_FUNCTION_GRAPH_TRACER
  253. ENTRY(ftrace_graph_caller)
  254. /* Saves rbp into %rdx and fills first parameter */
  255. save_mcount_regs
  256. #ifdef CC_USING_FENTRY
  257. leaq MCOUNT_REG_SIZE+8(%rsp), %rsi
  258. movq $0, %rdx /* No framepointers needed */
  259. #else
  260. /* Save address of the return address of traced function */
  261. leaq 8(%rdx), %rsi
  262. /* ftrace does sanity checks against frame pointers */
  263. movq (%rdx), %rdx
  264. #endif
  265. call prepare_ftrace_return
  266. restore_mcount_regs
  267. retq
  268. ENDPROC(ftrace_graph_caller)
  269. ENTRY(return_to_handler)
  270. UNWIND_HINT_EMPTY
  271. subq $24, %rsp
  272. /* Save the return values */
  273. movq %rax, (%rsp)
  274. movq %rdx, 8(%rsp)
  275. movq %rbp, %rdi
  276. call ftrace_return_to_handler
  277. movq %rax, %rdi
  278. movq 8(%rsp), %rdx
  279. movq (%rsp), %rax
  280. addq $24, %rsp
  281. JMP_NOSPEC %rdi
  282. END(return_to_handler)
  283. #endif