Commit 85f6f029 authored by Steven Rostedt (Red Hat)'s avatar Steven Rostedt (Red Hat) Committed by Steven Rostedt

ftrace/x86: Add macro MCOUNT_REG_SIZE for amount of stack used to save mcount regs

The macro save_mcount_regs saves regs onto the stack. But to uncouple the
amount of stack used in that macro from the users of the macro, we need
to have a define that tells all the users how much stack is used by that
macro. This way we can change the amount of stack the macro uses without
breaking its users.

Also remove some dead code that was left over from commit fdc841b5
"ftrace: x86: Remove check of obsolete variable function_trace_stop".

Link: http://lkml.kernel.org/r/alpine.DEB.2.11.1411262304010.3961@nanosReviewed-by: default avatarThomas Gleixner <tglx@linutronix.de>
Signed-off-by: default avatarSteven Rostedt <rostedt@goodmis.org>
parent 527aa75b
...@@ -21,6 +21,9 @@ ...@@ -21,6 +21,9 @@
# define function_hook mcount # define function_hook mcount
#endif #endif
/* Size of stack used to save mcount regs in save_mcount_regs */
#define MCOUNT_REG_SIZE (SS+8)
/* /*
* gcc -pg option adds a call to 'mcount' in most functions. * gcc -pg option adds a call to 'mcount' in most functions.
* When -mfentry is used, the call is to 'fentry' and not 'mcount' * When -mfentry is used, the call is to 'fentry' and not 'mcount'
...@@ -42,7 +45,7 @@ ...@@ -42,7 +45,7 @@
/* /*
* We add enough stack to save all regs. * We add enough stack to save all regs.
*/ */
subq $(SS+8), %rsp subq $MCOUNT_REG_SIZE, %rsp
movq %rax, RAX(%rsp) movq %rax, RAX(%rsp)
movq %rcx, RCX(%rsp) movq %rcx, RCX(%rsp)
movq %rdx, RDX(%rsp) movq %rdx, RDX(%rsp)
...@@ -51,7 +54,7 @@ ...@@ -51,7 +54,7 @@
movq %r8, R8(%rsp) movq %r8, R8(%rsp)
movq %r9, R9(%rsp) movq %r9, R9(%rsp)
/* Move RIP to its proper location */ /* Move RIP to its proper location */
movq SS+8+\added(%rsp), %rdi movq MCOUNT_REG_SIZE+\added(%rsp), %rdi
movq %rdi, RIP(%rsp) movq %rdi, RIP(%rsp)
.endm .endm
...@@ -63,7 +66,7 @@ ...@@ -63,7 +66,7 @@
movq RDX(%rsp), %rdx movq RDX(%rsp), %rdx
movq RCX(%rsp), %rcx movq RCX(%rsp), %rcx
movq RAX(%rsp), %rax movq RAX(%rsp), %rax
addq $(SS+8), %rsp addq $MCOUNT_REG_SIZE, %rsp
.endm .endm
/* skip is set if stack has been adjusted */ /* skip is set if stack has been adjusted */
...@@ -79,7 +82,7 @@ GLOBAL(\trace_label) ...@@ -79,7 +82,7 @@ GLOBAL(\trace_label)
subq $MCOUNT_INSN_SIZE, %rdi subq $MCOUNT_INSN_SIZE, %rdi
/* Load the parent_ip into the second parameter */ /* Load the parent_ip into the second parameter */
#ifdef CC_USING_FENTRY #ifdef CC_USING_FENTRY
movq SS+16+\added(%rsp), %rsi movq MCOUNT_REG_SIZE+8+\added(%rsp), %rsi
#else #else
movq 8+\added(%rbp), %rsi movq 8+\added(%rbp), %rsi
#endif #endif
...@@ -172,7 +175,7 @@ ENTRY(ftrace_regs_caller) ...@@ -172,7 +175,7 @@ ENTRY(ftrace_regs_caller)
movq %rbp, RBP(%rsp) movq %rbp, RBP(%rsp)
movq %rbx, RBX(%rsp) movq %rbx, RBX(%rsp)
/* Copy saved flags */ /* Copy saved flags */
movq SS+8(%rsp), %rcx movq MCOUNT_REG_SIZE(%rsp), %rcx
movq %rcx, EFLAGS(%rsp) movq %rcx, EFLAGS(%rsp)
/* Kernel segments */ /* Kernel segments */
movq $__KERNEL_DS, %rcx movq $__KERNEL_DS, %rcx
...@@ -180,7 +183,7 @@ ENTRY(ftrace_regs_caller) ...@@ -180,7 +183,7 @@ ENTRY(ftrace_regs_caller)
movq $__KERNEL_CS, %rcx movq $__KERNEL_CS, %rcx
movq %rcx, CS(%rsp) movq %rcx, CS(%rsp)
/* Stack - skipping return address and flags */ /* Stack - skipping return address and flags */
leaq SS+8*3(%rsp), %rcx leaq MCOUNT_REG_SIZE+8*2(%rsp), %rcx
movq %rcx, RSP(%rsp) movq %rcx, RSP(%rsp)
/* regs go into 4th parameter */ /* regs go into 4th parameter */
...@@ -195,11 +198,11 @@ GLOBAL(ftrace_regs_call) ...@@ -195,11 +198,11 @@ GLOBAL(ftrace_regs_call)
/* Copy flags back to SS, to restore them */ /* Copy flags back to SS, to restore them */
movq EFLAGS(%rsp), %rax movq EFLAGS(%rsp), %rax
movq %rax, SS+8(%rsp) movq %rax, MCOUNT_REG_SIZE(%rsp)
/* Handlers can change the RIP */ /* Handlers can change the RIP */
movq RIP(%rsp), %rax movq RIP(%rsp), %rax
movq %rax, SS+8*2(%rsp) movq %rax, MCOUNT_REG_SIZE+8(%rsp)
/* restore the rest of pt_regs */ /* restore the rest of pt_regs */
movq R15(%rsp), %r15 movq R15(%rsp), %r15
...@@ -225,9 +228,6 @@ GLOBAL(ftrace_regs_caller_end) ...@@ -225,9 +228,6 @@ GLOBAL(ftrace_regs_caller_end)
jmp ftrace_return jmp ftrace_return
popfq
jmp ftrace_stub
END(ftrace_regs_caller) END(ftrace_regs_caller)
...@@ -266,7 +266,7 @@ ENTRY(ftrace_graph_caller) ...@@ -266,7 +266,7 @@ ENTRY(ftrace_graph_caller)
save_mcount_regs save_mcount_regs
#ifdef CC_USING_FENTRY #ifdef CC_USING_FENTRY
leaq SS+16(%rsp), %rdi leaq MCOUNT_REG_SIZE+8(%rsp), %rdi
movq $0, %rdx /* No framepointers needed */ movq $0, %rdx /* No framepointers needed */
#else #else
leaq 8(%rbp), %rdi leaq 8(%rbp), %rdi
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment