linux/arch/x86/kernel/mcount_64.S
<<
>>
Prefs
   1/*
   2 *  linux/arch/x86_64/mcount_64.S
   3 *
   4 *  Copyright (C) 2014  Steven Rostedt, Red Hat Inc
   5 */
   6
   7#include <linux/linkage.h>
   8#include <asm/ptrace.h>
   9#include <asm/ftrace.h>
  10
  11
  12        .code64
  13        .section .entry.text, "ax"
  14
  15
  16#ifdef CONFIG_FUNCTION_TRACER
  17
  18#ifdef CC_USING_FENTRY
  19# define function_hook  __fentry__
  20#else
  21# define function_hook  mcount
  22#endif
  23
  24/* All cases save the original rbp (8 bytes) */
  25#ifdef CONFIG_FRAME_POINTER
  26# ifdef CC_USING_FENTRY
  27/* Save parent and function stack frames (rip and rbp) */
  28#  define MCOUNT_FRAME_SIZE     (8+16*2)
  29# else
  30/* Save just function stack frame (rip and rbp) */
  31#  define MCOUNT_FRAME_SIZE     (8+16)
  32# endif
  33#else
  34/* No need to save a stack frame */
  35# define MCOUNT_FRAME_SIZE      8
  36#endif /* CONFIG_FRAME_POINTER */
  37
  38/* Size of stack used to save mcount regs in save_mcount_regs */
  39#define MCOUNT_REG_SIZE         (SS+8 + MCOUNT_FRAME_SIZE)
  40
  41/*
  42 * gcc -pg option adds a call to 'mcount' in most functions.
  43 * When -mfentry is used, the call is to 'fentry' and not 'mcount'
  44 * and is done before the function's stack frame is set up.
  45 * They both require a set of regs to be saved before calling
  46 * any C code and restored before returning back to the function.
  47 *
  48 * On boot up, all these calls are converted into nops. When tracing
  49 * is enabled, the call can jump to either ftrace_caller or
  50 * ftrace_regs_caller. Callbacks (tracing functions) that require
  51 * ftrace_regs_caller (like kprobes) need to have pt_regs passed to
  52 * it. For this reason, the size of the pt_regs structure will be
  53 * allocated on the stack and the required mcount registers will
  54 * be saved in the locations that pt_regs has them in.
  55 */
  56
  57/*
  58 * @added: the amount of stack added before calling this
  59 *
  60 * After this is called, the following registers contain:
  61 *
  62 *  %rdi - holds the address that called the trampoline
  63 *  %rsi - holds the parent function (traced function's return address)
  64 *  %rdx - holds the original %rbp
  65 */
  66.macro save_mcount_regs added=0
  67
  68        /* Always save the original rbp */
  69        pushq %rbp
  70
  71#ifdef CONFIG_FRAME_POINTER
  72        /*
  73         * Stack traces will stop at the ftrace trampoline if the frame pointer
  74         * is not set up properly. If fentry is used, we need to save a frame
  75         * pointer for the parent as well as the function traced, because the
  76         * fentry is called before the stack frame is set up, where as mcount
  77         * is called afterward.
  78         */
  79#ifdef CC_USING_FENTRY
  80        /* Save the parent pointer (skip orig rbp and our return address) */
  81        pushq \added+8*2(%rsp)
  82        pushq %rbp
  83        movq %rsp, %rbp
  84        /* Save the return address (now skip orig rbp, rbp and parent) */
  85        pushq \added+8*3(%rsp)
  86#else
  87        /* Can't assume that rip is before this (unless added was zero) */
  88        pushq \added+8(%rsp)
  89#endif
  90        pushq %rbp
  91        movq %rsp, %rbp
  92#endif /* CONFIG_FRAME_POINTER */
  93
  94        /*
  95         * We add enough stack to save all regs.
  96         */
  97        subq $(MCOUNT_REG_SIZE - MCOUNT_FRAME_SIZE), %rsp
  98        movq %rax, RAX(%rsp)
  99        movq %rcx, RCX(%rsp)
 100        movq %rdx, RDX(%rsp)
 101        movq %rsi, RSI(%rsp)
 102        movq %rdi, RDI(%rsp)
 103        movq %r8, R8(%rsp)
 104        movq %r9, R9(%rsp)
 105        /*
 106         * Save the original RBP. Even though the mcount ABI does not
 107         * require this, it helps out callers.
 108         */
 109        movq MCOUNT_REG_SIZE-8(%rsp), %rdx
 110        movq %rdx, RBP(%rsp)
 111
 112        /* Copy the parent address into %rsi (second parameter) */
 113#ifdef CC_USING_FENTRY
 114        movq MCOUNT_REG_SIZE+8+\added(%rsp), %rsi
 115#else
 116        /* %rdx contains original %rbp */
 117        movq 8(%rdx), %rsi
 118#endif
 119
 120         /* Move RIP to its proper location */
 121        movq MCOUNT_REG_SIZE+\added(%rsp), %rdi
 122        movq %rdi, RIP(%rsp)
 123
 124        /*
 125         * Now %rdi (the first parameter) has the return address of
 126         * where ftrace_call returns. But the callbacks expect the
 127         * address of the call itself.
 128         */
 129        subq $MCOUNT_INSN_SIZE, %rdi
 130        .endm
 131
 132.macro restore_mcount_regs
 133        movq R9(%rsp), %r9
 134        movq R8(%rsp), %r8
 135        movq RDI(%rsp), %rdi
 136        movq RSI(%rsp), %rsi
 137        movq RDX(%rsp), %rdx
 138        movq RCX(%rsp), %rcx
 139        movq RAX(%rsp), %rax
 140
 141        /* ftrace_regs_caller can modify %rbp */
 142        movq RBP(%rsp), %rbp
 143
 144        addq $MCOUNT_REG_SIZE, %rsp
 145
 146        .endm
 147
 148#ifdef CONFIG_DYNAMIC_FTRACE
 149
 150ENTRY(function_hook)
 151        retq
 152END(function_hook)
 153
 154ENTRY(ftrace_caller)
 155        /* save_mcount_regs fills in first two parameters */
 156        save_mcount_regs
 157
 158GLOBAL(ftrace_caller_op_ptr)
 159        /* Load the ftrace_ops into the 3rd parameter */
 160        movq function_trace_op(%rip), %rdx
 161
 162        /* regs go into 4th parameter (but make it NULL) */
 163        movq $0, %rcx
 164
 165GLOBAL(ftrace_call)
 166        call ftrace_stub
 167
 168        restore_mcount_regs
 169
 170        /*
 171         * The copied trampoline must call ftrace_return as it
 172         * still may need to call the function graph tracer.
 173         */
 174GLOBAL(ftrace_caller_end)
 175
 176GLOBAL(ftrace_return)
 177
 178#ifdef CONFIG_FUNCTION_GRAPH_TRACER
 179GLOBAL(ftrace_graph_call)
 180        jmp ftrace_stub
 181#endif
 182
 183GLOBAL(ftrace_stub)
 184        retq
 185END(ftrace_caller)
 186
 187ENTRY(ftrace_regs_caller)
 188        /* Save the current flags before any operations that can change them */
 189        pushfq
 190
 191        /* added 8 bytes to save flags */
 192        save_mcount_regs 8
 193        /* save_mcount_regs fills in first two parameters */
 194
 195GLOBAL(ftrace_regs_caller_op_ptr)
 196        /* Load the ftrace_ops into the 3rd parameter */
 197        movq function_trace_op(%rip), %rdx
 198
 199        /* Save the rest of pt_regs */
 200        movq %r15, R15(%rsp)
 201        movq %r14, R14(%rsp)
 202        movq %r13, R13(%rsp)
 203        movq %r12, R12(%rsp)
 204        movq %r11, R11(%rsp)
 205        movq %r10, R10(%rsp)
 206        movq %rbx, RBX(%rsp)
 207        /* Copy saved flags */
 208        movq MCOUNT_REG_SIZE(%rsp), %rcx
 209        movq %rcx, EFLAGS(%rsp)
 210        /* Kernel segments */
 211        movq $__KERNEL_DS, %rcx
 212        movq %rcx, SS(%rsp)
 213        movq $__KERNEL_CS, %rcx
 214        movq %rcx, CS(%rsp)
 215        /* Stack - skipping return address and flags */
 216        leaq MCOUNT_REG_SIZE+8*2(%rsp), %rcx
 217        movq %rcx, RSP(%rsp)
 218
 219        /* regs go into 4th parameter */
 220        leaq (%rsp), %rcx
 221
 222GLOBAL(ftrace_regs_call)
 223        call ftrace_stub
 224
 225        /* Copy flags back to SS, to restore them */
 226        movq EFLAGS(%rsp), %rax
 227        movq %rax, MCOUNT_REG_SIZE(%rsp)
 228
 229        /* Handlers can change the RIP */
 230        movq RIP(%rsp), %rax
 231        movq %rax, MCOUNT_REG_SIZE+8(%rsp)
 232
 233        /* restore the rest of pt_regs */
 234        movq R15(%rsp), %r15
 235        movq R14(%rsp), %r14
 236        movq R13(%rsp), %r13
 237        movq R12(%rsp), %r12
 238        movq R10(%rsp), %r10
 239        movq RBX(%rsp), %rbx
 240
 241        restore_mcount_regs
 242
 243        /* Restore flags */
 244        popfq
 245
 246        /*
 247         * As this jmp to ftrace_return can be a short jump
 248         * it must not be copied into the trampoline.
 249         * The trampoline will add the code to jump
 250         * to the return.
 251         */
 252GLOBAL(ftrace_regs_caller_end)
 253
 254        jmp ftrace_return
 255
 256END(ftrace_regs_caller)
 257
 258
 259#else /* ! CONFIG_DYNAMIC_FTRACE */
 260
 261ENTRY(function_hook)
 262        cmpq $ftrace_stub, ftrace_trace_function
 263        jnz trace
 264
 265fgraph_trace:
 266#ifdef CONFIG_FUNCTION_GRAPH_TRACER
 267        cmpq $ftrace_stub, ftrace_graph_return
 268        jnz ftrace_graph_caller
 269
 270        cmpq $ftrace_graph_entry_stub, ftrace_graph_entry
 271        jnz ftrace_graph_caller
 272#endif
 273
 274GLOBAL(ftrace_stub)
 275        retq
 276
 277trace:
 278        /* save_mcount_regs fills in first two parameters */
 279        save_mcount_regs
 280
 281        call   *ftrace_trace_function
 282
 283        restore_mcount_regs
 284
 285        jmp fgraph_trace
 286END(function_hook)
 287#endif /* CONFIG_DYNAMIC_FTRACE */
 288#endif /* CONFIG_FUNCTION_TRACER */
 289
 290#ifdef CONFIG_FUNCTION_GRAPH_TRACER
 291ENTRY(ftrace_graph_caller)
 292        /* Saves rbp into %rdx and fills first parameter  */
 293        save_mcount_regs
 294
 295#ifdef CC_USING_FENTRY
 296        leaq MCOUNT_REG_SIZE+8(%rsp), %rsi
 297        movq $0, %rdx   /* No framepointers needed */
 298#else
 299        /* Save address of the return address of traced function */
 300        leaq 8(%rdx), %rsi
 301        /* ftrace does sanity checks against frame pointers */
 302        movq (%rdx), %rdx
 303#endif
 304        call    prepare_ftrace_return
 305
 306        restore_mcount_regs
 307
 308        retq
 309END(ftrace_graph_caller)
 310
 311GLOBAL(return_to_handler)
 312        subq  $24, %rsp
 313
 314        /* Save the return values */
 315        movq %rax, (%rsp)
 316        movq %rdx, 8(%rsp)
 317        movq %rbp, %rdi
 318
 319        call ftrace_return_to_handler
 320
 321        movq %rax, %rdi
 322        movq 8(%rsp), %rdx
 323        movq (%rsp), %rax
 324        addq $24, %rsp
 325        jmp *%rdi
 326#endif
 327