linux/arch/sh/lib/mcount.S
<<
>>
Prefs
   1/*
   2 * arch/sh/lib/mcount.S
   3 *
   4 *  Copyright (C) 2008, 2009  Paul Mundt
   5 *  Copyright (C) 2008, 2009  Matt Fleming
   6 *
   7 * This file is subject to the terms and conditions of the GNU General Public
   8 * License.  See the file "COPYING" in the main directory of this archive
   9 * for more details.
  10 */
  11#include <asm/ftrace.h>
  12#include <asm/thread_info.h>
  13#include <asm/asm-offsets.h>
  14
  15#define MCOUNT_ENTER()          \
  16        mov.l   r4, @-r15;      \
  17        mov.l   r5, @-r15;      \
  18        mov.l   r6, @-r15;      \
  19        mov.l   r7, @-r15;      \
  20        sts.l   pr, @-r15;      \
  21                                \
  22        mov.l   @(20,r15),r4;   \
  23        sts     pr, r5
  24
  25#define MCOUNT_LEAVE()          \
  26        lds.l   @r15+, pr;      \
  27        mov.l   @r15+, r7;      \
  28        mov.l   @r15+, r6;      \
  29        mov.l   @r15+, r5;      \
  30        rts;                    \
  31         mov.l  @r15+, r4
  32
  33#ifdef CONFIG_STACK_DEBUG
  34/*
  35 * Perform diagnostic checks on the state of the kernel stack.
  36 *
  37 * Check for stack overflow. If there is less than 1KB free
  38 * then it has overflowed.
  39 *
  40 * Make sure the stack pointer contains a valid address. Valid
  41 * addresses for kernel stacks are anywhere after the bss
  42 * (after __bss_stop) and anywhere in init_thread_union (init_stack).
  43 */
  44#define STACK_CHECK()                                   \
  45        mov     #(THREAD_SIZE >> 10), r0;               \
  46        shll8   r0;                                     \
  47        shll2   r0;                                     \
  48                                                        \
  49        /* r1 = sp & (THREAD_SIZE - 1) */               \
  50        mov     #-1, r1;                                \
  51        add     r0, r1;                                 \
  52        and     r15, r1;                                \
  53                                                        \
  54        mov     #TI_SIZE, r3;                           \
  55        mov     #(STACK_WARN >> 8), r2;                 \
  56        shll8   r2;                                     \
  57        add     r3, r2;                                 \
  58                                                        \
  59        /* Is the stack overflowing? */                 \
  60        cmp/hi  r2, r1;                                 \
  61        bf      stack_panic;                            \
  62                                                        \
  63        /* If sp > __bss_stop then we're OK. */         \
  64        mov.l   .L_ebss, r1;                            \
  65        cmp/hi  r1, r15;                                \
  66        bt      1f;                                     \
  67                                                        \
  68        /* If sp < init_stack, we're not OK. */         \
  69        mov.l   .L_init_thread_union, r1;               \
  70        cmp/hs  r1, r15;                                \
  71        bf      stack_panic;                            \
  72                                                        \
  73        /* If sp > init_stack && sp < __bss_stop, not OK. */    \
  74        add     r0, r1;                                 \
  75        cmp/hs  r1, r15;                                \
  76        bt      stack_panic;                            \
  771:
  78#else
  79#define STACK_CHECK()
  80#endif /* CONFIG_STACK_DEBUG */
  81
  82        .align 2
  83        .globl  _mcount
  84        .type   _mcount,@function
  85        .globl  mcount
  86        .type   mcount,@function
  87_mcount:
  88mcount:
  89        STACK_CHECK()
  90
  91#ifndef CONFIG_FUNCTION_TRACER
  92        rts
  93         nop
  94#else
  95        MCOUNT_ENTER()
  96
  97#ifdef CONFIG_DYNAMIC_FTRACE
  98        .globl  mcount_call
  99mcount_call:
 100        mov.l   .Lftrace_stub, r6
 101#else
 102        mov.l   .Lftrace_trace_function, r6
 103        mov.l   ftrace_stub, r7
 104        cmp/eq  r6, r7
 105        bt      skip_trace
 106        mov.l   @r6, r6
 107#endif
 108
 109        jsr     @r6
 110         nop
 111
 112#ifdef CONFIG_FUNCTION_GRAPH_TRACER
 113        mov.l   .Lftrace_graph_return, r6
 114        mov.l   .Lftrace_stub, r7
 115        cmp/eq  r6, r7
 116        bt      1f
 117
 118        mov.l   .Lftrace_graph_caller, r0
 119        jmp     @r0
 120         nop
 121
 1221:
 123        mov.l   .Lftrace_graph_entry, r6
 124        mov.l   .Lftrace_graph_entry_stub, r7
 125        cmp/eq  r6, r7
 126        bt      skip_trace
 127
 128        mov.l   .Lftrace_graph_caller, r0
 129        jmp     @r0
 130         nop
 131
 132        .align 2
 133.Lftrace_graph_return:
 134        .long   ftrace_graph_return
 135.Lftrace_graph_entry:
 136        .long   ftrace_graph_entry
 137.Lftrace_graph_entry_stub:
 138        .long   ftrace_graph_entry_stub
 139.Lftrace_graph_caller:
 140        .long   ftrace_graph_caller
 141#endif /* CONFIG_FUNCTION_GRAPH_TRACER */
 142
 143        .globl skip_trace
 144skip_trace:
 145        MCOUNT_LEAVE()
 146
 147        .align 2
 148.Lftrace_trace_function:
 149        .long   ftrace_trace_function
 150
 151#ifdef CONFIG_DYNAMIC_FTRACE
 152#ifdef CONFIG_FUNCTION_GRAPH_TRACER
 153/*
 154 * NOTE: Do not move either ftrace_graph_call or ftrace_caller
 155 * as this will affect the calculation of GRAPH_INSN_OFFSET.
 156 */
 157        .globl ftrace_graph_call
 158ftrace_graph_call:
 159        mov.l   .Lskip_trace, r0
 160        jmp     @r0
 161         nop
 162
 163        .align 2
 164.Lskip_trace:
 165        .long   skip_trace
 166#endif /* CONFIG_FUNCTION_GRAPH_TRACER */
 167
 168        .globl ftrace_caller
 169ftrace_caller:
 170        MCOUNT_ENTER()
 171
 172        .globl ftrace_call
 173ftrace_call:
 174        mov.l   .Lftrace_stub, r6
 175        jsr     @r6
 176         nop
 177
 178#ifdef CONFIG_FUNCTION_GRAPH_TRACER
 179        bra     ftrace_graph_call
 180         nop
 181#else
 182        MCOUNT_LEAVE()
 183#endif /* CONFIG_FUNCTION_GRAPH_TRACER */
 184#endif /* CONFIG_DYNAMIC_FTRACE */
 185
 186        .align 2
 187
 188/*
 189 * NOTE: From here on the locations of the .Lftrace_stub label and
 190 * ftrace_stub itself are fixed. Adding additional data here will skew
 191 * the displacement for the memory table and break the block replacement.
 192 * Place new labels either after the ftrace_stub body, or before
 193 * ftrace_caller. You have been warned.
 194 */
 195.Lftrace_stub:
 196        .long   ftrace_stub
 197
 198        .globl  ftrace_stub
 199ftrace_stub:
 200        rts
 201         nop
 202
 203#ifdef CONFIG_FUNCTION_GRAPH_TRACER
 204        .globl  ftrace_graph_caller
 205ftrace_graph_caller:
 206        mov.l   2f, r1
 207        jmp     @r1
 208         nop
 2091:
 210        /*
 211         * MCOUNT_ENTER() pushed 5 registers onto the stack, so
 212         * the stack address containing our return address is
 213         * r15 + 20.
 214         */
 215        mov     #20, r0
 216        add     r15, r0
 217        mov     r0, r4
 218
 219        mov.l   .Lprepare_ftrace_return, r0
 220        jsr     @r0
 221         nop
 222
 223        MCOUNT_LEAVE()
 224
 225        .align 2
 2262:      .long   skip_trace
 227.Lprepare_ftrace_return:
 228        .long   prepare_ftrace_return
 229
 230        .globl  return_to_handler
 231return_to_handler:
 232        /*
 233         * Save the return values.
 234         */
 235        mov.l   r0, @-r15
 236        mov.l   r1, @-r15
 237
 238        mov     #0, r4
 239
 240        mov.l   .Lftrace_return_to_handler, r0
 241        jsr     @r0
 242         nop
 243
 244        /*
 245         * The return value from ftrace_return_handler has the real
 246         * address that we should return to.
 247         */
 248        lds     r0, pr
 249        mov.l   @r15+, r1
 250        rts
 251         mov.l  @r15+, r0
 252
 253
 254        .align 2
 255.Lftrace_return_to_handler:
 256        .long   ftrace_return_to_handler
 257#endif /* CONFIG_FUNCTION_GRAPH_TRACER */
 258#endif /* CONFIG_FUNCTION_TRACER */
 259
 260#ifdef CONFIG_STACK_DEBUG
 261        .globl  stack_panic
 262stack_panic:
 263        mov.l   .Ldump_stack, r0
 264        jsr     @r0
 265         nop
 266
 267        mov.l   .Lpanic, r0
 268        jsr     @r0
 269         mov.l  .Lpanic_s, r4
 270
 271        rts
 272         nop
 273
 274        .align 2
 275.L_init_thread_union:
 276        .long   init_thread_union
 277.L_ebss:
 278        .long   __bss_stop
 279.Lpanic:
 280        .long   panic
 281.Lpanic_s:
 282        .long   .Lpanic_str
 283.Ldump_stack:
 284        .long   dump_stack
 285
 286        .section        .rodata
 287        .align 2
 288.Lpanic_str:
 289        .string "Stack error"
 290#endif /* CONFIG_STACK_DEBUG */
 291