Back to home page

OSCL-LXR

 
 

    


0001 /* SPDX-License-Identifier: GPL-2.0 */
0002 /*
0003  *  Copyright (C) 2014  Steven Rostedt, Red Hat Inc
0004  */
0005 
0006 #include <linux/linkage.h>
0007 #include <asm/ptrace.h>
0008 #include <asm/ftrace.h>
0009 #include <asm/export.h>
0010 #include <asm/nospec-branch.h>
0011 #include <asm/unwind_hints.h>
0012 #include <asm/frame.h>
0013 
0014     .code64
0015     .section .text, "ax"
0016 
0017 #ifdef CONFIG_FRAME_POINTER
0018 /* Save parent and function stack frames (rip and rbp) */
0019 #  define MCOUNT_FRAME_SIZE (8+16*2)
0020 #else
0021 /* No need to save a stack frame */
0022 # define MCOUNT_FRAME_SIZE  0
0023 #endif /* CONFIG_FRAME_POINTER */
0024 
0025 /* Size of stack used to save mcount regs in save_mcount_regs */
0026 #define MCOUNT_REG_SIZE     (FRAME_SIZE + MCOUNT_FRAME_SIZE)
0027 
0028 /*
0029  * gcc -pg option adds a call to 'mcount' in most functions.
0030  * When -mfentry is used, the call is to 'fentry' and not 'mcount'
0031  * and is done before the function's stack frame is set up.
0032  * They both require a set of regs to be saved before calling
0033  * any C code and restored before returning back to the function.
0034  *
0035  * On boot up, all these calls are converted into nops. When tracing
0036  * is enabled, the call can jump to either ftrace_caller or
0037  * ftrace_regs_caller. Callbacks (tracing functions) that require
0038  * ftrace_regs_caller (like kprobes) need to have pt_regs passed to
0039  * it. For this reason, the size of the pt_regs structure will be
0040  * allocated on the stack and the required mcount registers will
0041  * be saved in the locations that pt_regs has them in.
0042  */
0043 
0044 /*
0045  * @added: the amount of stack added before calling this
0046  *
0047  * After this is called, the following registers contain:
0048  *
0049  *  %rdi - holds the address that called the trampoline
0050  *  %rsi - holds the parent function (traced function's return address)
0051  *  %rdx - holds the original %rbp
0052  */
0053 .macro save_mcount_regs added=0
0054 
0055 #ifdef CONFIG_FRAME_POINTER
0056     /* Save the original rbp */
0057     pushq %rbp
0058 
0059     /*
0060      * Stack traces will stop at the ftrace trampoline if the frame pointer
0061      * is not set up properly. If fentry is used, we need to save a frame
0062      * pointer for the parent as well as the function traced, because the
0063      * fentry is called before the stack frame is set up, where as mcount
0064      * is called afterward.
0065      */
0066 
0067     /* Save the parent pointer (skip orig rbp and our return address) */
0068     pushq \added+8*2(%rsp)
0069     pushq %rbp
0070     movq %rsp, %rbp
0071     /* Save the return address (now skip orig rbp, rbp and parent) */
0072     pushq \added+8*3(%rsp)
0073     pushq %rbp
0074     movq %rsp, %rbp
0075 #endif /* CONFIG_FRAME_POINTER */
0076 
0077     /*
0078      * We add enough stack to save all regs.
0079      */
0080     subq $(FRAME_SIZE), %rsp
0081     movq %rax, RAX(%rsp)
0082     movq %rcx, RCX(%rsp)
0083     movq %rdx, RDX(%rsp)
0084     movq %rsi, RSI(%rsp)
0085     movq %rdi, RDI(%rsp)
0086     movq %r8, R8(%rsp)
0087     movq %r9, R9(%rsp)
0088     movq $0, ORIG_RAX(%rsp)
0089     /*
0090      * Save the original RBP. Even though the mcount ABI does not
0091      * require this, it helps out callers.
0092      */
0093 #ifdef CONFIG_FRAME_POINTER
0094     movq MCOUNT_REG_SIZE-8(%rsp), %rdx
0095 #else
0096     movq %rbp, %rdx
0097 #endif
0098     movq %rdx, RBP(%rsp)
0099 
0100     /* Copy the parent address into %rsi (second parameter) */
0101     movq MCOUNT_REG_SIZE+8+\added(%rsp), %rsi
0102 
0103      /* Move RIP to its proper location */
0104     movq MCOUNT_REG_SIZE+\added(%rsp), %rdi
0105     movq %rdi, RIP(%rsp)
0106 
0107     /*
0108      * Now %rdi (the first parameter) has the return address of
0109      * where ftrace_call returns. But the callbacks expect the
0110      * address of the call itself.
0111      */
0112     subq $MCOUNT_INSN_SIZE, %rdi
0113     .endm
0114 
0115 .macro restore_mcount_regs save=0
0116 
0117     /* ftrace_regs_caller or frame pointers require this */
0118     movq RBP(%rsp), %rbp
0119 
0120     movq R9(%rsp), %r9
0121     movq R8(%rsp), %r8
0122     movq RDI(%rsp), %rdi
0123     movq RSI(%rsp), %rsi
0124     movq RDX(%rsp), %rdx
0125     movq RCX(%rsp), %rcx
0126     movq RAX(%rsp), %rax
0127 
0128     addq $MCOUNT_REG_SIZE-\save, %rsp
0129 
0130     .endm
0131 
0132 #ifdef CONFIG_DYNAMIC_FTRACE
0133 
0134 SYM_FUNC_START(__fentry__)
0135     RET
0136 SYM_FUNC_END(__fentry__)
0137 EXPORT_SYMBOL(__fentry__)
0138 
0139 SYM_FUNC_START(ftrace_caller)
0140     /* save_mcount_regs fills in first two parameters */
0141     save_mcount_regs
0142 
0143     /* Stack - skipping return address of ftrace_caller */
0144     leaq MCOUNT_REG_SIZE+8(%rsp), %rcx
0145     movq %rcx, RSP(%rsp)
0146 
0147 SYM_INNER_LABEL(ftrace_caller_op_ptr, SYM_L_GLOBAL)
0148     ANNOTATE_NOENDBR
0149     /* Load the ftrace_ops into the 3rd parameter */
0150     movq function_trace_op(%rip), %rdx
0151 
0152     /* regs go into 4th parameter */
0153     leaq (%rsp), %rcx
0154 
0155     /* Only ops with REGS flag set should have CS register set */
0156     movq $0, CS(%rsp)
0157 
0158 SYM_INNER_LABEL(ftrace_call, SYM_L_GLOBAL)
0159     ANNOTATE_NOENDBR
0160     call ftrace_stub
0161 
0162     /* Handlers can change the RIP */
0163     movq RIP(%rsp), %rax
0164     movq %rax, MCOUNT_REG_SIZE(%rsp)
0165 
0166     restore_mcount_regs
0167 
0168     /*
0169      * The code up to this label is copied into trampolines so
0170      * think twice before adding any new code or changing the
0171      * layout here.
0172      */
0173 SYM_INNER_LABEL(ftrace_caller_end, SYM_L_GLOBAL)
0174     ANNOTATE_NOENDBR
0175 
0176     jmp ftrace_epilogue
0177 SYM_FUNC_END(ftrace_caller);
0178 STACK_FRAME_NON_STANDARD_FP(ftrace_caller)
0179 
0180 SYM_FUNC_START(ftrace_epilogue)
0181 /*
0182  * This is weak to keep gas from relaxing the jumps.
0183  */
0184 SYM_INNER_LABEL_ALIGN(ftrace_stub, SYM_L_WEAK)
0185     UNWIND_HINT_FUNC
0186     ENDBR
0187     RET
0188 SYM_FUNC_END(ftrace_epilogue)
0189 
0190 SYM_FUNC_START(ftrace_regs_caller)
0191     /* Save the current flags before any operations that can change them */
0192     pushfq
0193 
0194     /* added 8 bytes to save flags */
0195     save_mcount_regs 8
0196     /* save_mcount_regs fills in first two parameters */
0197 
0198 SYM_INNER_LABEL(ftrace_regs_caller_op_ptr, SYM_L_GLOBAL)
0199     ANNOTATE_NOENDBR
0200     /* Load the ftrace_ops into the 3rd parameter */
0201     movq function_trace_op(%rip), %rdx
0202 
0203     /* Save the rest of pt_regs */
0204     movq %r15, R15(%rsp)
0205     movq %r14, R14(%rsp)
0206     movq %r13, R13(%rsp)
0207     movq %r12, R12(%rsp)
0208     movq %r11, R11(%rsp)
0209     movq %r10, R10(%rsp)
0210     movq %rbx, RBX(%rsp)
0211     /* Copy saved flags */
0212     movq MCOUNT_REG_SIZE(%rsp), %rcx
0213     movq %rcx, EFLAGS(%rsp)
0214     /* Kernel segments */
0215     movq $__KERNEL_DS, %rcx
0216     movq %rcx, SS(%rsp)
0217     movq $__KERNEL_CS, %rcx
0218     movq %rcx, CS(%rsp)
0219     /* Stack - skipping return address and flags */
0220     leaq MCOUNT_REG_SIZE+8*2(%rsp), %rcx
0221     movq %rcx, RSP(%rsp)
0222 
0223     ENCODE_FRAME_POINTER
0224 
0225     /* regs go into 4th parameter */
0226     leaq (%rsp), %rcx
0227 
0228 SYM_INNER_LABEL(ftrace_regs_call, SYM_L_GLOBAL)
0229     ANNOTATE_NOENDBR
0230     call ftrace_stub
0231 
0232     /* Copy flags back to SS, to restore them */
0233     movq EFLAGS(%rsp), %rax
0234     movq %rax, MCOUNT_REG_SIZE(%rsp)
0235 
0236     /* Handlers can change the RIP */
0237     movq RIP(%rsp), %rax
0238     movq %rax, MCOUNT_REG_SIZE+8(%rsp)
0239 
0240     /* restore the rest of pt_regs */
0241     movq R15(%rsp), %r15
0242     movq R14(%rsp), %r14
0243     movq R13(%rsp), %r13
0244     movq R12(%rsp), %r12
0245     movq R10(%rsp), %r10
0246     movq RBX(%rsp), %rbx
0247 
0248     movq ORIG_RAX(%rsp), %rax
0249     movq %rax, MCOUNT_REG_SIZE-8(%rsp)
0250 
0251     /*
0252      * If ORIG_RAX is anything but zero, make this a call to that.
0253      * See arch_ftrace_set_direct_caller().
0254      */
0255     testq   %rax, %rax
0256 SYM_INNER_LABEL(ftrace_regs_caller_jmp, SYM_L_GLOBAL)
0257     ANNOTATE_NOENDBR
0258     jnz 1f
0259 
0260     restore_mcount_regs
0261     /* Restore flags */
0262     popfq
0263 
0264     /*
0265      * As this jmp to ftrace_epilogue can be a short jump
0266      * it must not be copied into the trampoline.
0267      * The trampoline will add the code to jump
0268      * to the return.
0269      */
0270 SYM_INNER_LABEL(ftrace_regs_caller_end, SYM_L_GLOBAL)
0271     ANNOTATE_NOENDBR
0272     jmp ftrace_epilogue
0273 
0274     /* Swap the flags with orig_rax */
0275 1:  movq MCOUNT_REG_SIZE(%rsp), %rdi
0276     movq %rdi, MCOUNT_REG_SIZE-8(%rsp)
0277     movq %rax, MCOUNT_REG_SIZE(%rsp)
0278 
0279     restore_mcount_regs 8
0280     /* Restore flags */
0281     popfq
0282     UNWIND_HINT_FUNC
0283     jmp ftrace_epilogue
0284 
0285 SYM_FUNC_END(ftrace_regs_caller)
0286 STACK_FRAME_NON_STANDARD_FP(ftrace_regs_caller)
0287 
0288 
0289 #else /* ! CONFIG_DYNAMIC_FTRACE */
0290 
0291 SYM_FUNC_START(__fentry__)
0292     cmpq $ftrace_stub, ftrace_trace_function
0293     jnz trace
0294 
0295 SYM_INNER_LABEL(ftrace_stub, SYM_L_GLOBAL)
0296     ENDBR
0297     RET
0298 
0299 trace:
0300     /* save_mcount_regs fills in first two parameters */
0301     save_mcount_regs
0302 
0303     /*
0304      * When DYNAMIC_FTRACE is not defined, ARCH_SUPPORTS_FTRACE_OPS is not
0305      * set (see include/asm/ftrace.h and include/linux/ftrace.h).  Only the
0306      * ip and parent ip are used and the list function is called when
0307      * function tracing is enabled.
0308      */
0309     movq ftrace_trace_function, %r8
0310     CALL_NOSPEC r8
0311     restore_mcount_regs
0312 
0313     jmp ftrace_stub
0314 SYM_FUNC_END(__fentry__)
0315 EXPORT_SYMBOL(__fentry__)
0316 STACK_FRAME_NON_STANDARD_FP(__fentry__)
0317 
0318 #endif /* CONFIG_DYNAMIC_FTRACE */
0319 
0320 #ifdef CONFIG_FUNCTION_GRAPH_TRACER
0321 SYM_CODE_START(return_to_handler)
0322     UNWIND_HINT_EMPTY
0323     ANNOTATE_NOENDBR
0324     subq  $16, %rsp
0325 
0326     /* Save the return values */
0327     movq %rax, (%rsp)
0328     movq %rdx, 8(%rsp)
0329     movq %rbp, %rdi
0330 
0331     call ftrace_return_to_handler
0332 
0333     movq %rax, %rdi
0334     movq 8(%rsp), %rdx
0335     movq (%rsp), %rax
0336 
0337     addq $16, %rsp
0338     /*
0339      * Jump back to the old return address. This cannot be JMP_NOSPEC rdi
0340      * since IBT would demand that contain ENDBR, which simply isn't so for
0341      * return addresses. Use a retpoline here to keep the RSB balanced.
0342      */
0343     ANNOTATE_INTRA_FUNCTION_CALL
0344     call .Ldo_rop
0345     int3
0346 .Ldo_rop:
0347     mov %rdi, (%rsp)
0348     RET
0349 SYM_CODE_END(return_to_handler)
0350 #endif