linux/arch/h8300/kernel/entry.S
<<
>>
Prefs
   1/* SPDX-License-Identifier: GPL-2.0 */
   2/*
   3 *
   4 *  linux/arch/h8300/kernel/entry.S
   5 *
   6 *  Yoshinori Sato <ysato@users.sourceforge.jp>
   7 *  David McCullough <davidm@snapgear.com>
   8 *
   9 */
  10
  11/*
  12 *  entry.S
  13 *  include exception/interrupt gateway
  14 *          system call entry
  15 */
  16
  17#include <linux/sys.h>
  18#include <asm/unistd.h>
  19#include <asm/setup.h>
  20#include <asm/segment.h>
  21#include <asm/linkage.h>
  22#include <asm/asm-offsets.h>
  23#include <asm/thread_info.h>
  24#include <asm/errno.h>
  25
  26#if defined(CONFIG_CPU_H8300H)
  27#define USERRET 8
  28INTERRUPTS = 64
  29        .h8300h
  30        .macro  SHLL2 reg
  31        shll.l  \reg
  32        shll.l  \reg
  33        .endm
  34        .macro  SHLR2 reg
  35        shlr.l  \reg
  36        shlr.l  \reg
  37        .endm
  38        .macro  SAVEREGS
  39        mov.l   er0,@-sp
  40        mov.l   er1,@-sp
  41        mov.l   er2,@-sp
  42        mov.l   er3,@-sp
  43        .endm
  44        .macro  RESTOREREGS
  45        mov.l   @sp+,er3
  46        mov.l   @sp+,er2
  47        .endm
  48        .macro  SAVEEXR
  49        .endm
  50        .macro  RESTOREEXR
  51        .endm
  52#endif
  53#if defined(CONFIG_CPU_H8S)
  54#define USERRET 10
  55#define USEREXR 8
  56INTERRUPTS = 128
  57        .h8300s
  58        .macro  SHLL2 reg
  59        shll.l  #2,\reg
  60        .endm
  61        .macro  SHLR2 reg
  62        shlr.l  #2,\reg
  63        .endm
  64        .macro  SAVEREGS
  65        stm.l   er0-er3,@-sp
  66        .endm
  67        .macro  RESTOREREGS
  68        ldm.l   @sp+,er2-er3
  69        .endm
  70        .macro  SAVEEXR
  71        mov.w   @(USEREXR:16,er0),r1
  72        mov.w   r1,@(LEXR-LER3:16,sp)           /* copy EXR */
  73        .endm
  74        .macro  RESTOREEXR
  75        mov.w   @(LEXR-LER1:16,sp),r1           /* restore EXR */
  76        mov.b   r1l,r1h
  77        mov.w   r1,@(USEREXR:16,er0)
  78        .endm
  79#endif
  80
  81
  82/* CPU context save/restore macros. */
  83
  84        .macro  SAVE_ALL
  85        mov.l   er0,@-sp
  86        stc     ccr,r0l                         /* check kernel mode */
  87        btst    #4,r0l
  88        bne     5f
  89
  90        /* user mode */
  91        mov.l   sp,@_sw_usp
  92        mov.l   @sp,er0                         /* restore saved er0 */
  93        orc     #0x10,ccr                       /* switch kernel stack */
  94        mov.l   @_sw_ksp,sp
  95        sub.l   #(LRET-LORIG),sp                /* allocate LORIG - LRET */
  96        SAVEREGS
  97        mov.l   @_sw_usp,er0
  98        mov.l   @(USERRET:16,er0),er1           /* copy the RET addr */
  99        mov.l   er1,@(LRET-LER3:16,sp)
 100        SAVEEXR
 101
 102        mov.l   @(LORIG-LER3:16,sp),er0
 103        mov.l   er0,@(LER0-LER3:16,sp)          /* copy ER0 */
 104        mov.w   e1,r1                           /* e1 highbyte = ccr */
 105        and     #0xef,r1h                       /* mask mode? flag */
 106        bra     6f
 1075:
 108        /* kernel mode */
 109        mov.l   @sp,er0                         /* restore saved er0 */
 110        subs    #2,sp                           /* set dummy ccr */
 111        subs    #4,sp                           /* set dummp sp */
 112        SAVEREGS
 113        mov.w   @(LRET-LER3:16,sp),r1           /* copy old ccr */
 1146:
 115        mov.b   r1h,r1l
 116        mov.b   #0,r1h
 117        mov.w   r1,@(LCCR-LER3:16,sp)           /* set ccr */
 118        mov.l   @_sw_usp,er2
 119        mov.l   er2,@(LSP-LER3:16,sp)           /* set usp */
 120        mov.l   er6,@-sp                        /* syscall arg #6 */
 121        mov.l   er5,@-sp                        /* syscall arg #5 */
 122        mov.l   er4,@-sp                        /* syscall arg #4 */
 123        .endm                                   /* r1 = ccr */
 124
 125        .macro  RESTORE_ALL
 126        mov.l   @sp+,er4
 127        mov.l   @sp+,er5
 128        mov.l   @sp+,er6
 129        RESTOREREGS
 130        mov.w   @(LCCR-LER1:16,sp),r0           /* check kernel mode */
 131        btst    #4,r0l
 132        bne     7f
 133
 134        orc     #0xc0,ccr
 135        mov.l   @(LSP-LER1:16,sp),er0
 136        mov.l   @(LER0-LER1:16,sp),er1          /* restore ER0 */
 137        mov.l   er1,@er0
 138        RESTOREEXR
 139        mov.w   @(LCCR-LER1:16,sp),r1           /* restore the RET addr */
 140        mov.b   r1l,r1h
 141        mov.b   @(LRET+1-LER1:16,sp),r1l
 142        mov.w   r1,e1
 143        mov.w   @(LRET+2-LER1:16,sp),r1
 144        mov.l   er1,@(USERRET:16,er0)
 145
 146        mov.l   @sp+,er1
 147        add.l   #(LRET-LER1),sp                 /* remove LORIG - LRET */
 148        mov.l   sp,@_sw_ksp
 149        andc    #0xef,ccr                       /* switch to user mode */
 150        mov.l   er0,sp
 151        bra     8f
 1527:
 153        mov.l   @sp+,er1
 154        add.l   #10,sp
 1558:
 156        mov.l   @sp+,er0
 157        adds    #4,sp                           /* remove the sw created LVEC */
 158        rte
 159        .endm
 160
 161.globl _system_call
 162.globl ret_from_exception
 163.globl ret_from_fork
 164.globl ret_from_kernel_thread
 165.globl ret_from_interrupt
 166.globl _interrupt_redirect_table
 167.globl _sw_ksp,_sw_usp
 168.globl _resume
 169.globl _interrupt_entry
 170.globl _trace_break
 171.globl _nmi
 172
 173#if defined(CONFIG_ROMKERNEL)
 174        .section .int_redirect,"ax"
 175_interrupt_redirect_table:
 176#if defined(CONFIG_CPU_H8300H)
 177        .rept   7
 178        .long   0
 179        .endr
 180#endif
 181#if defined(CONFIG_CPU_H8S)
 182        .rept   5
 183        .long   0
 184        .endr
 185        jmp     @_trace_break
 186        .long   0
 187#endif
 188
 189        jsr     @_interrupt_entry               /* NMI */
 190        jmp     @_system_call                   /* TRAPA #0 (System call) */
 191        .long   0
 192#if defined(CONFIG_KGDB)
 193        jmp     @_kgdb_trap
 194#else
 195        .long   0
 196#endif
 197        jmp     @_trace_break                   /* TRAPA #3 (breakpoint) */
 198        .rept   INTERRUPTS-12
 199        jsr     @_interrupt_entry
 200        .endr
 201#endif
 202#if defined(CONFIG_RAMKERNEL)
 203.globl _interrupt_redirect_table
 204        .section .bss
 205_interrupt_redirect_table:
 206        .space  4
 207#endif
 208
 209        .section .text
 210        .align  2
 211_interrupt_entry:
 212        SAVE_ALL
 213/* r1l is saved ccr */
 214        mov.l   sp,er0
 215        add.l   #LVEC,er0
 216        btst    #4,r1l
 217        bne     1f
 218        /* user LVEC */
 219        mov.l   @_sw_usp,er0
 220        adds    #4,er0
 2211:
 222        mov.l   @er0,er0                        /* LVEC address */
 223#if defined(CONFIG_ROMKERNEL)
 224        sub.l   #_interrupt_redirect_table,er0
 225#endif
 226#if defined(CONFIG_RAMKERNEL)
 227        mov.l   @_interrupt_redirect_table,er1
 228        sub.l   er1,er0
 229#endif
 230        SHLR2   er0
 231        dec.l   #1,er0
 232        mov.l   sp,er1
 233        subs    #4,er1                          /* adjust ret_pc */
 234#if defined(CONFIG_CPU_H8S)
 235        orc     #7,exr
 236#endif
 237        jsr     @do_IRQ
 238        jmp     @ret_from_interrupt
 239
 240_system_call:
 241        subs    #4,sp                           /* dummy LVEC */
 242        SAVE_ALL
 243        /* er0: syscall nr */
 244        andc    #0xbf,ccr
 245        mov.l   er0,er4
 246
 247        /* save top of frame */
 248        mov.l   sp,er0
 249        jsr     @set_esp0
 250        andc    #0x3f,ccr
 251        mov.l   sp,er2
 252        and.w   #0xe000,r2
 253        mov.l   @(TI_FLAGS:16,er2),er2
 254        and.w   #_TIF_WORK_SYSCALL_MASK,r2
 255        beq     1f
 256        mov.l   sp,er0
 257        jsr     @do_syscall_trace_enter
 2581:
 259        cmp.l   #__NR_syscalls,er4
 260        bcc     badsys
 261        SHLL2   er4
 262        mov.l   #_sys_call_table,er0
 263        add.l   er4,er0
 264        mov.l   @er0,er4
 265        beq     ret_from_exception:16
 266        mov.l   @(LER1:16,sp),er0
 267        mov.l   @(LER2:16,sp),er1
 268        mov.l   @(LER3:16,sp),er2
 269        jsr     @er4
 270        mov.l   er0,@(LER0:16,sp)               /* save the return value */
 271        mov.l   sp,er2
 272        and.w   #0xe000,r2
 273        mov.l   @(TI_FLAGS:16,er2),er2
 274        and.w   #_TIF_WORK_SYSCALL_MASK,r2
 275        beq     2f
 276        mov.l   sp,er0
 277        jsr     @do_syscall_trace_leave
 2782:
 279        orc     #0xc0,ccr
 280        bra     resume_userspace
 281
 282badsys:
 283        mov.l   #-ENOSYS,er0
 284        mov.l   er0,@(LER0:16,sp)
 285        bra     resume_userspace
 286
 287#if !defined(CONFIG_PREEMPT)
 288#define resume_kernel restore_all
 289#endif
 290
 291ret_from_exception:
 292#if defined(CONFIG_PREEMPT)
 293        orc     #0xc0,ccr
 294#endif
 295ret_from_interrupt:
 296        mov.b   @(LCCR+1:16,sp),r0l
 297        btst    #4,r0l
 298        bne     resume_kernel:16        /* return from kernel */
 299resume_userspace:
 300        andc    #0xbf,ccr
 301        mov.l   sp,er4
 302        and.w   #0xe000,r4              /* er4 <- current thread info */
 303        mov.l   @(TI_FLAGS:16,er4),er1
 304        and.l   #_TIF_WORK_MASK,er1
 305        beq     restore_all:8
 306work_pending:
 307        btst    #TIF_NEED_RESCHED,r1l
 308        bne     work_resched:8
 309        /* work notifysig */
 310        mov.l   sp,er0
 311        subs    #4,er0                  /* er0: pt_regs */
 312        jsr     @do_notify_resume
 313        bra     resume_userspace:8
 314work_resched:
 315        mov.l   sp,er0
 316        jsr     @set_esp0
 317        jsr     @schedule
 318        bra     resume_userspace:8
 319restore_all:
 320        RESTORE_ALL                     /* Does RTE */
 321
 322#if defined(CONFIG_PREEMPT)
 323resume_kernel:
 324        mov.l   @(TI_PRE_COUNT:16,er4),er0
 325        bne     restore_all:8
 326need_resched:
 327        mov.l   @(TI_FLAGS:16,er4),er0
 328        btst    #TIF_NEED_RESCHED,r0l
 329        beq     restore_all:8
 330        mov.b   @(LCCR+1:16,sp),r0l     /* Interrupt Enabled? */
 331        bmi     restore_all:8
 332        mov.l   sp,er0
 333        jsr     @set_esp0
 334        jsr     @preempt_schedule_irq
 335        bra     need_resched:8
 336#endif
 337
 338ret_from_fork:
 339        mov.l   er2,er0
 340        jsr     @schedule_tail
 341        jmp     @ret_from_exception
 342
 343ret_from_kernel_thread:
 344        mov.l   er2,er0
 345        jsr     @schedule_tail
 346        mov.l   @(LER4:16,sp),er0
 347        mov.l   @(LER5:16,sp),er1
 348        jsr     @er1
 349        jmp     @ret_from_exception
 350
 351_resume:
 352        /*
 353         * Beware - when entering resume, offset of tss is in d1,
 354         * prev (the current task) is in a0, next (the new task)
 355         * is in a1 and d2.b is non-zero if the mm structure is
 356         * shared between the tasks, so don't change these
 357         * registers until their contents are no longer needed.
 358         */
 359
 360        /* save sr */
 361        sub.w   r3,r3
 362        stc     ccr,r3l
 363        mov.w   r3,@(THREAD_CCR+2:16,er0)
 364
 365        /* disable interrupts */
 366        orc     #0xc0,ccr
 367        mov.l   @_sw_usp,er3
 368        mov.l   er3,@(THREAD_USP:16,er0)
 369        mov.l   sp,@(THREAD_KSP:16,er0)
 370
 371        /* Skip address space switching if they are the same. */
 372        /* FIXME: what did we hack out of here, this does nothing! */
 373
 374        mov.l   @(THREAD_USP:16,er1),er0
 375        mov.l   er0,@_sw_usp
 376        mov.l   @(THREAD_KSP:16,er1),sp
 377
 378        /* restore status register */
 379        mov.w   @(THREAD_CCR+2:16,er1),r3
 380
 381        ldc     r3l,ccr
 382        rts
 383
 384_trace_break:
 385        subs    #4,sp
 386        SAVE_ALL
 387        sub.l   er1,er1
 388        dec.l   #1,er1
 389        mov.l   er1,@(LORIG,sp)
 390        mov.l   sp,er0
 391        jsr     @set_esp0
 392        mov.l   @_sw_usp,er0
 393        mov.l   @er0,er1
 394        mov.w   @(-2:16,er1),r2
 395        cmp.w   #0x5730,r2
 396        beq     1f
 397        subs    #2,er1
 398        mov.l   er1,@er0
 3991:
 400        and.w   #0xff,e1
 401        mov.l   er1,er0
 402        jsr     @trace_trap
 403        jmp     @ret_from_exception
 404
 405_nmi:
 406        subs    #4, sp
 407        mov.l   er0, @-sp
 408        mov.l   @_interrupt_redirect_table, er0
 409        add.l   #8*4, er0
 410        mov.l   er0, @(4,sp)
 411        mov.l   @sp+, er0
 412        jmp     @_interrupt_entry
 413
 414#if defined(CONFIG_KGDB)
 415_kgdb_trap:
 416        subs    #4,sp
 417        SAVE_ALL
 418        mov.l   sp,er0
 419        add.l   #LRET,er0
 420        mov.l   er0,@(LSP,sp)
 421        jsr     @set_esp0
 422        mov.l   sp,er0
 423        subs    #4,er0
 424        jsr     @h8300_kgdb_trap
 425        jmp     @ret_from_exception
 426#endif
 427
 428        .section        .bss
 429_sw_ksp:
 430        .space  4
 431_sw_usp:
 432        .space  4
 433
 434        .end
 435