linux/arch/h8300/kernel/entry.S
<<
>>
Prefs
   1/*
   2 *
   3 *  linux/arch/h8300/kernel/entry.S
   4 *
   5 *  Yoshinori Sato <ysato@users.sourceforge.jp>
   6 *  David McCullough <davidm@snapgear.com>
   7 *
   8 */
   9
  10/*
  11 *  entry.S
  12 *  include exception/interrupt gateway
  13 *          system call entry
  14 */
  15
  16#include <linux/sys.h>
  17#include <asm/unistd.h>
  18#include <asm/setup.h>
  19#include <asm/segment.h>
  20#include <asm/linkage.h>
  21#include <asm/asm-offsets.h>
  22#include <asm/thread_info.h>
  23#include <asm/errno.h>
  24
  25#if defined(CONFIG_CPU_H8300H)
  26#define USERRET 8
  27INTERRUPTS = 64
  28        .h8300h
  29        .macro  SHLL2 reg
  30        shll.l  \reg
  31        shll.l  \reg
  32        .endm
  33        .macro  SHLR2 reg
  34        shlr.l  \reg
  35        shlr.l  \reg
  36        .endm
  37        .macro  SAVEREGS
  38        mov.l   er0,@-sp
  39        mov.l   er1,@-sp
  40        mov.l   er2,@-sp
  41        mov.l   er3,@-sp
  42        .endm
  43        .macro  RESTOREREGS
  44        mov.l   @sp+,er3
  45        mov.l   @sp+,er2
  46        .endm
  47        .macro  SAVEEXR
  48        .endm
  49        .macro  RESTOREEXR
  50        .endm
  51#endif
  52#if defined(CONFIG_CPU_H8S)
  53#define USERRET 10
  54#define USEREXR 8
  55INTERRUPTS = 128
  56        .h8300s
  57        .macro  SHLL2 reg
  58        shll.l  #2,\reg
  59        .endm
  60        .macro  SHLR2 reg
  61        shlr.l  #2,\reg
  62        .endm
  63        .macro  SAVEREGS
  64        stm.l   er0-er3,@-sp
  65        .endm
  66        .macro  RESTOREREGS
  67        ldm.l   @sp+,er2-er3
  68        .endm
  69        .macro  SAVEEXR
  70        mov.w   @(USEREXR:16,er0),r1
  71        mov.w   r1,@(LEXR-LER3:16,sp)           /* copy EXR */
  72        .endm
  73        .macro  RESTOREEXR
  74        mov.w   @(LEXR-LER1:16,sp),r1           /* restore EXR */
  75        mov.b   r1l,r1h
  76        mov.w   r1,@(USEREXR:16,er0)
  77        .endm
  78#endif
  79
  80
  81/* CPU context save/restore macros. */
  82
  83        .macro  SAVE_ALL
  84        mov.l   er0,@-sp
  85        stc     ccr,r0l                         /* check kernel mode */
  86        btst    #4,r0l
  87        bne     5f
  88
  89        /* user mode */
  90        mov.l   sp,@_sw_usp
  91        mov.l   @sp,er0                         /* restore saved er0 */
  92        orc     #0x10,ccr                       /* switch kernel stack */
  93        mov.l   @_sw_ksp,sp
  94        sub.l   #(LRET-LORIG),sp                /* allocate LORIG - LRET */
  95        SAVEREGS
  96        mov.l   @_sw_usp,er0
  97        mov.l   @(USERRET:16,er0),er1           /* copy the RET addr */
  98        mov.l   er1,@(LRET-LER3:16,sp)
  99        SAVEEXR
 100
 101        mov.l   @(LORIG-LER3:16,sp),er0
 102        mov.l   er0,@(LER0-LER3:16,sp)          /* copy ER0 */
 103        mov.w   e1,r1                           /* e1 highbyte = ccr */
 104        and     #0xef,r1h                       /* mask mode? flag */
 105        bra     6f
 1065:
 107        /* kernel mode */
 108        mov.l   @sp,er0                         /* restore saved er0 */
 109        subs    #2,sp                           /* set dummy ccr */
 110        subs    #4,sp                           /* set dummp sp */
 111        SAVEREGS
 112        mov.w   @(LRET-LER3:16,sp),r1           /* copy old ccr */
 1136:
 114        mov.b   r1h,r1l
 115        mov.b   #0,r1h
 116        mov.w   r1,@(LCCR-LER3:16,sp)           /* set ccr */
 117        mov.l   @_sw_usp,er2
 118        mov.l   er2,@(LSP-LER3:16,sp)           /* set usp */
 119        mov.l   er6,@-sp                        /* syscall arg #6 */
 120        mov.l   er5,@-sp                        /* syscall arg #5 */
 121        mov.l   er4,@-sp                        /* syscall arg #4 */
 122        .endm                                   /* r1 = ccr */
 123
 124        .macro  RESTORE_ALL
 125        mov.l   @sp+,er4
 126        mov.l   @sp+,er5
 127        mov.l   @sp+,er6
 128        RESTOREREGS
 129        mov.w   @(LCCR-LER1:16,sp),r0           /* check kernel mode */
 130        btst    #4,r0l
 131        bne     7f
 132
 133        orc     #0xc0,ccr
 134        mov.l   @(LSP-LER1:16,sp),er0
 135        mov.l   @(LER0-LER1:16,sp),er1          /* restore ER0 */
 136        mov.l   er1,@er0
 137        RESTOREEXR
 138        mov.w   @(LCCR-LER1:16,sp),r1           /* restore the RET addr */
 139        mov.b   r1l,r1h
 140        mov.b   @(LRET+1-LER1:16,sp),r1l
 141        mov.w   r1,e1
 142        mov.w   @(LRET+2-LER1:16,sp),r1
 143        mov.l   er1,@(USERRET:16,er0)
 144
 145        mov.l   @sp+,er1
 146        add.l   #(LRET-LER1),sp                 /* remove LORIG - LRET */
 147        mov.l   sp,@_sw_ksp
 148        andc    #0xef,ccr                       /* switch to user mode */
 149        mov.l   er0,sp
 150        bra     8f
 1517:
 152        mov.l   @sp+,er1
 153        add.l   #10,sp
 1548:
 155        mov.l   @sp+,er0
 156        adds    #4,sp                           /* remove the sw created LVEC */
 157        rte
 158        .endm
 159
 160.globl _system_call
 161.globl ret_from_exception
 162.globl ret_from_fork
 163.globl ret_from_kernel_thread
 164.globl ret_from_interrupt
 165.globl _interrupt_redirect_table
 166.globl _sw_ksp,_sw_usp
 167.globl _resume
 168.globl _interrupt_entry
 169.globl _trace_break
 170.globl _nmi
 171
 172#if defined(CONFIG_ROMKERNEL)
 173        .section .int_redirect,"ax"
 174_interrupt_redirect_table:
 175#if defined(CONFIG_CPU_H8300H)
 176        .rept   7
 177        .long   0
 178        .endr
 179#endif
 180#if defined(CONFIG_CPU_H8S)
 181        .rept   5
 182        .long   0
 183        .endr
 184        jmp     @_trace_break
 185        .long   0
 186#endif
 187
 188        jsr     @_interrupt_entry               /* NMI */
 189        jmp     @_system_call                   /* TRAPA #0 (System call) */
 190        .long   0
 191#if defined(CONFIG_KGDB)
 192        jmp     @_kgdb_trap
 193#else
 194        .long   0
 195#endif
 196        jmp     @_trace_break                   /* TRAPA #3 (breakpoint) */
 197        .rept   INTERRUPTS-12
 198        jsr     @_interrupt_entry
 199        .endr
 200#endif
 201#if defined(CONFIG_RAMKERNEL)
 202.globl _interrupt_redirect_table
 203        .section .bss
 204_interrupt_redirect_table:
 205        .space  4
 206#endif
 207
 208        .section .text
 209        .align  2
 210_interrupt_entry:
 211        SAVE_ALL
 212/* r1l is saved ccr */
 213        mov.l   sp,er0
 214        add.l   #LVEC,er0
 215        btst    #4,r1l
 216        bne     1f
 217        /* user LVEC */
 218        mov.l   @_sw_usp,er0
 219        adds    #4,er0
 2201:
 221        mov.l   @er0,er0                        /* LVEC address */
 222#if defined(CONFIG_ROMKERNEL)
 223        sub.l   #_interrupt_redirect_table,er0
 224#endif
 225#if defined(CONFIG_RAMKERNEL)
 226        mov.l   @_interrupt_redirect_table,er1
 227        sub.l   er1,er0
 228#endif
 229        SHLR2   er0
 230        dec.l   #1,er0
 231        mov.l   sp,er1
 232        subs    #4,er1                          /* adjust ret_pc */
 233#if defined(CONFIG_CPU_H8S)
 234        orc     #7,exr
 235#endif
 236        jsr     @do_IRQ
 237        jmp     @ret_from_interrupt
 238
 239_system_call:
 240        subs    #4,sp                           /* dummy LVEC */
 241        SAVE_ALL
 242        /* er0: syscall nr */
 243        andc    #0xbf,ccr
 244        mov.l   er0,er4
 245
 246        /* save top of frame */
 247        mov.l   sp,er0
 248        jsr     @set_esp0
 249        andc    #0x3f,ccr
 250        mov.l   sp,er2
 251        and.w   #0xe000,r2
 252        mov.l   @(TI_FLAGS:16,er2),er2
 253        and.w   #_TIF_WORK_SYSCALL_MASK,r2
 254        beq     1f
 255        mov.l   sp,er0
 256        jsr     @do_syscall_trace_enter
 2571:
 258        cmp.l   #__NR_syscalls,er4
 259        bcc     badsys
 260        SHLL2   er4
 261        mov.l   #_sys_call_table,er0
 262        add.l   er4,er0
 263        mov.l   @er0,er4
 264        beq     ret_from_exception:16
 265        mov.l   @(LER1:16,sp),er0
 266        mov.l   @(LER2:16,sp),er1
 267        mov.l   @(LER3:16,sp),er2
 268        jsr     @er4
 269        mov.l   er0,@(LER0:16,sp)               /* save the return value */
 270        mov.l   sp,er2
 271        and.w   #0xe000,r2
 272        mov.l   @(TI_FLAGS:16,er2),er2
 273        and.w   #_TIF_WORK_SYSCALL_MASK,r2
 274        beq     2f
 275        mov.l   sp,er0
 276        jsr     @do_syscall_trace_leave
 2772:
 278        orc     #0xc0,ccr
 279        bra     resume_userspace
 280
 281badsys:
 282        mov.l   #-ENOSYS,er0
 283        mov.l   er0,@(LER0:16,sp)
 284        bra     resume_userspace
 285
 286#if !defined(CONFIG_PREEMPT)
 287#define resume_kernel restore_all
 288#endif
 289
 290ret_from_exception:
 291#if defined(CONFIG_PREEMPT)
 292        orc     #0xc0,ccr
 293#endif
 294ret_from_interrupt:
 295        mov.b   @(LCCR+1:16,sp),r0l
 296        btst    #4,r0l
 297        bne     resume_kernel:16        /* return from kernel */
 298resume_userspace:
 299        andc    #0xbf,ccr
 300        mov.l   sp,er4
 301        and.w   #0xe000,r4              /* er4 <- current thread info */
 302        mov.l   @(TI_FLAGS:16,er4),er1
 303        and.l   #_TIF_WORK_MASK,er1
 304        beq     restore_all:8
 305work_pending:
 306        btst    #TIF_NEED_RESCHED,r1l
 307        bne     work_resched:8
 308        /* work notifysig */
 309        mov.l   sp,er0
 310        subs    #4,er0                  /* er0: pt_regs */
 311        jsr     @do_notify_resume
 312        bra     resume_userspace:8
 313work_resched:
 314        mov.l   sp,er0
 315        jsr     @set_esp0
 316        jsr     @schedule
 317        bra     resume_userspace:8
 318restore_all:
 319        RESTORE_ALL                     /* Does RTE */
 320
 321#if defined(CONFIG_PREEMPT)
 322resume_kernel:
 323        mov.l   @(TI_PRE_COUNT:16,er4),er0
 324        bne     restore_all:8
 325need_resched:
 326        mov.l   @(TI_FLAGS:16,er4),er0
 327        btst    #TIF_NEED_RESCHED,r0l
 328        beq     restore_all:8
 329        mov.b   @(LCCR+1:16,sp),r0l     /* Interrupt Enabled? */
 330        bmi     restore_all:8
 331        mov.l   sp,er0
 332        jsr     @set_esp0
 333        jsr     @preempt_schedule_irq
 334        bra     need_resched:8
 335#endif
 336
 337ret_from_fork:
 338        mov.l   er2,er0
 339        jsr     @schedule_tail
 340        jmp     @ret_from_exception
 341
 342ret_from_kernel_thread:
 343        mov.l   er2,er0
 344        jsr     @schedule_tail
 345        mov.l   @(LER4:16,sp),er0
 346        mov.l   @(LER5:16,sp),er1
 347        jsr     @er1
 348        jmp     @ret_from_exception
 349
 350_resume:
 351        /*
 352         * Beware - when entering resume, offset of tss is in d1,
 353         * prev (the current task) is in a0, next (the new task)
 354         * is in a1 and d2.b is non-zero if the mm structure is
 355         * shared between the tasks, so don't change these
 356         * registers until their contents are no longer needed.
 357         */
 358
 359        /* save sr */
 360        sub.w   r3,r3
 361        stc     ccr,r3l
 362        mov.w   r3,@(THREAD_CCR+2:16,er0)
 363
 364        /* disable interrupts */
 365        orc     #0xc0,ccr
 366        mov.l   @_sw_usp,er3
 367        mov.l   er3,@(THREAD_USP:16,er0)
 368        mov.l   sp,@(THREAD_KSP:16,er0)
 369
 370        /* Skip address space switching if they are the same. */
 371        /* FIXME: what did we hack out of here, this does nothing! */
 372
 373        mov.l   @(THREAD_USP:16,er1),er0
 374        mov.l   er0,@_sw_usp
 375        mov.l   @(THREAD_KSP:16,er1),sp
 376
 377        /* restore status register */
 378        mov.w   @(THREAD_CCR+2:16,er1),r3
 379
 380        ldc     r3l,ccr
 381        rts
 382
 383_trace_break:
 384        subs    #4,sp
 385        SAVE_ALL
 386        sub.l   er1,er1
 387        dec.l   #1,er1
 388        mov.l   er1,@(LORIG,sp)
 389        mov.l   sp,er0
 390        jsr     @set_esp0
 391        mov.l   @_sw_usp,er0
 392        mov.l   @er0,er1
 393        mov.w   @(-2:16,er1),r2
 394        cmp.w   #0x5730,r2
 395        beq     1f
 396        subs    #2,er1
 397        mov.l   er1,@er0
 3981:
 399        and.w   #0xff,e1
 400        mov.l   er1,er0
 401        jsr     @trace_trap
 402        jmp     @ret_from_exception
 403
 404_nmi:
 405        subs    #4, sp
 406        mov.l   er0, @-sp
 407        mov.l   @_interrupt_redirect_table, er0
 408        add.l   #8*4, er0
 409        mov.l   er0, @(4,sp)
 410        mov.l   @sp+, er0
 411        jmp     @_interrupt_entry
 412
 413#if defined(CONFIG_KGDB)
 414_kgdb_trap:
 415        subs    #4,sp
 416        SAVE_ALL
 417        mov.l   sp,er0
 418        add.l   #LRET,er0
 419        mov.l   er0,@(LSP,sp)
 420        jsr     @set_esp0
 421        mov.l   sp,er0
 422        subs    #4,er0
 423        jsr     @h8300_kgdb_trap
 424        jmp     @ret_from_exception
 425#endif
 426
 427        .section        .bss
 428_sw_ksp:
 429        .space  4
 430_sw_usp:
 431        .space  4
 432
 433        .end
 434