diff options
author | Steven Rostedt (VMware) <rostedt@goodmis.org> | 2020-04-22 12:25:40 -0400 |
---|---|---|
committer | Steven Rostedt (VMware) <rostedt@goodmis.org> | 2020-06-29 11:42:47 -0400 |
commit | 0b4f8ddc0cc235f30431ed5c7d533b24e995d267 (patch) | |
tree | 96262e746226b79cdc14dce39c880d33f8c066c8 /arch/x86/kernel/ftrace_64.S | |
parent | c791cc4b1feb881b3644c059dba5464b076bf592 (diff) | |
download | lwn-0b4f8ddc0cc235f30431ed5c7d533b24e995d267.tar.gz lwn-0b4f8ddc0cc235f30431ed5c7d533b24e995d267.zip |
x86/ftrace: Make non direct case the default in ftrace_regs_caller
If a direct function is hooked along with one of the ftrace registered
functions, then the ftrace_regs_caller is attached to the function that
shares the direct hook as well as the ftrace hook. The ftrace_regs_caller
will call ftrace_ops_list_func() that iterates through all the registered
ftrace callbacks, and if there's a direct callback attached to that
function, the direct ftrace_ops callback is called to notify that
ftrace_regs_caller to return to the direct caller instead of going back to
the function that called it.
But this is a very uncommon case. Currently, the code has it as the default
case. Modify ftrace_regs_caller to make the default case (the non jump) to
just return normally, and have the jump to the handling of the direct
caller.
Link: http://lkml.kernel.org/r/20200422162750.350373278@goodmis.org
Cc: Peter Zijlstra <peterz@infradead.org>
Signed-off-by: Steven Rostedt (VMware) <rostedt@goodmis.org>
Diffstat (limited to 'arch/x86/kernel/ftrace_64.S')
-rw-r--r-- | arch/x86/kernel/ftrace_64.S | 31 |
1 files changed, 15 insertions, 16 deletions
diff --git a/arch/x86/kernel/ftrace_64.S b/arch/x86/kernel/ftrace_64.S index 083a3da7bb73..3ba32cc58f01 100644 --- a/arch/x86/kernel/ftrace_64.S +++ b/arch/x86/kernel/ftrace_64.S @@ -241,22 +241,9 @@ SYM_INNER_LABEL(ftrace_regs_call, SYM_L_GLOBAL) */ movq ORIG_RAX(%rsp), %rax testq %rax, %rax - jz 1f + jnz 1f - /* Swap the flags with orig_rax */ - movq MCOUNT_REG_SIZE(%rsp), %rdi - movq %rdi, MCOUNT_REG_SIZE-8(%rsp) - movq %rax, MCOUNT_REG_SIZE(%rsp) - - restore_mcount_regs 8 - /* Restore flags */ - popfq - -SYM_INNER_LABEL(ftrace_regs_caller_ret, SYM_L_GLOBAL); - UNWIND_HINT_RET_OFFSET - jmp ftrace_epilogue - -1: restore_mcount_regs + restore_mcount_regs /* Restore flags */ popfq @@ -266,9 +253,21 @@ SYM_INNER_LABEL(ftrace_regs_caller_ret, SYM_L_GLOBAL); * The trampoline will add the code to jump * to the return. */ -SYM_INNER_LABEL(ftrace_regs_caller_end, SYM_L_GLOBAL) +SYM_INNER_LABEL(ftrace_regs_caller_ret, SYM_L_GLOBAL) jmp ftrace_epilogue + /* Swap the flags with orig_rax */ +1: movq MCOUNT_REG_SIZE(%rsp), %rdi + movq %rdi, MCOUNT_REG_SIZE-8(%rsp) + movq %rax, MCOUNT_REG_SIZE(%rsp) + + restore_mcount_regs 8 + /* Restore flags */ + popfq +SYM_INNER_LABEL(ftrace_regs_caller_end, SYM_L_GLOBAL) + UNWIND_HINT_RET_OFFSET + jmp ftrace_epilogue + SYM_FUNC_END(ftrace_regs_caller) |