2008-10-23 13:26:29 +08:00
|
|
|
#ifndef _ASM_X86_FTRACE_H
|
|
|
|
#define _ASM_X86_FTRACE_H
|
2008-06-22 02:17:27 +08:00
|
|
|
|
2008-12-13 05:09:08 +08:00
|
|
|
#ifdef __ASSEMBLY__
|
|
|
|
|
|
|
|
.macro MCOUNT_SAVE_FRAME
|
|
|
|
/* taken from glibc */
|
|
|
|
subq $0x38, %rsp
|
|
|
|
movq %rax, (%rsp)
|
|
|
|
movq %rcx, 8(%rsp)
|
|
|
|
movq %rdx, 16(%rsp)
|
|
|
|
movq %rsi, 24(%rsp)
|
|
|
|
movq %rdi, 32(%rsp)
|
|
|
|
movq %r8, 40(%rsp)
|
|
|
|
movq %r9, 48(%rsp)
|
|
|
|
.endm
|
|
|
|
|
|
|
|
.macro MCOUNT_RESTORE_FRAME
|
|
|
|
movq 48(%rsp), %r9
|
|
|
|
movq 40(%rsp), %r8
|
|
|
|
movq 32(%rsp), %rdi
|
|
|
|
movq 24(%rsp), %rsi
|
|
|
|
movq 16(%rsp), %rdx
|
|
|
|
movq 8(%rsp), %rcx
|
|
|
|
movq (%rsp), %rax
|
|
|
|
addq $0x38, %rsp
|
|
|
|
.endm
|
|
|
|
|
|
|
|
#endif
|
|
|
|
|
2008-10-07 07:06:12 +08:00
|
|
|
#ifdef CONFIG_FUNCTION_TRACER
|
2008-06-22 02:17:27 +08:00
|
|
|
#define MCOUNT_ADDR ((long)(mcount))
|
|
|
|
#define MCOUNT_INSN_SIZE 5 /* sizeof mcount call */
|
|
|
|
|
|
|
|
#ifndef __ASSEMBLY__
|
|
|
|
extern void mcount(void);
|
2008-08-15 03:45:08 +08:00
|
|
|
|
|
|
|
static inline unsigned long ftrace_call_adjust(unsigned long addr)
|
|
|
|
{
|
|
|
|
/*
|
|
|
|
* call mcount is "e8 <4 byte offset>"
|
|
|
|
* The addr points to the 4 byte offset and the caller of this
|
|
|
|
* function wants the pointer to e8. Simply subtract one.
|
|
|
|
*/
|
|
|
|
return addr - 1;
|
|
|
|
}
|
2008-11-15 08:21:19 +08:00
|
|
|
|
|
|
|
#ifdef CONFIG_DYNAMIC_FTRACE
|
|
|
|
|
|
|
|
struct dyn_arch_ftrace {
|
|
|
|
/* No extra data needed for x86 */
|
|
|
|
};
|
|
|
|
|
|
|
|
#endif /* CONFIG_DYNAMIC_FTRACE */
|
2008-10-31 12:03:22 +08:00
|
|
|
#endif /* __ASSEMBLY__ */
|
2008-10-07 07:06:12 +08:00
|
|
|
#endif /* CONFIG_FUNCTION_TRACER */
|
2008-06-22 02:17:27 +08:00
|
|
|
|
2008-10-23 13:26:29 +08:00
|
|
|
#endif /* _ASM_X86_FTRACE_H */
|