linux/arch/arm/include/asm/assembler.h
<<
>>
Prefs
   1/* SPDX-License-Identifier: GPL-2.0-only */
   2/*
   3 *  arch/arm/include/asm/assembler.h
   4 *
   5 *  Copyright (C) 1996-2000 Russell King
   6 *
   7 *  This file contains arm architecture specific defines
   8 *  for the different processors.
   9 *
  10 *  Do not include any C declarations in this file - it is included by
  11 *  assembler source.
  12 */
  13#ifndef __ASM_ASSEMBLER_H__
  14#define __ASM_ASSEMBLER_H__
  15
  16#ifndef __ASSEMBLY__
  17#error "Only include this from assembly code"
  18#endif
  19
  20#include <asm/ptrace.h>
  21#include <asm/domain.h>
  22#include <asm/opcodes-virt.h>
  23#include <asm/asm-offsets.h>
  24#include <asm/page.h>
  25#include <asm/thread_info.h>
  26
  27#define IOMEM(x)        (x)
  28
  29/*
  30 * Endian independent macros for shifting bytes within registers.
  31 */
  32#ifndef __ARMEB__
  33#define lspull          lsr
  34#define lspush          lsl
  35#define get_byte_0      lsl #0
  36#define get_byte_1      lsr #8
  37#define get_byte_2      lsr #16
  38#define get_byte_3      lsr #24
  39#define put_byte_0      lsl #0
  40#define put_byte_1      lsl #8
  41#define put_byte_2      lsl #16
  42#define put_byte_3      lsl #24
  43#else
  44#define lspull          lsl
  45#define lspush          lsr
  46#define get_byte_0      lsr #24
  47#define get_byte_1      lsr #16
  48#define get_byte_2      lsr #8
  49#define get_byte_3      lsl #0
  50#define put_byte_0      lsl #24
  51#define put_byte_1      lsl #16
  52#define put_byte_2      lsl #8
  53#define put_byte_3      lsl #0
  54#endif
  55
  56/* Select code for any configuration running in BE8 mode */
  57#ifdef CONFIG_CPU_ENDIAN_BE8
  58#define ARM_BE8(code...) code
  59#else
  60#define ARM_BE8(code...)
  61#endif
  62
  63/*
  64 * Data preload for architectures that support it
  65 */
  66#if __LINUX_ARM_ARCH__ >= 5
  67#define PLD(code...)    code
  68#else
  69#define PLD(code...)
  70#endif
  71
  72/*
  73 * This can be used to enable code to cacheline align the destination
  74 * pointer when bulk writing to memory.  Experiments on StrongARM and
  75 * XScale didn't show this a worthwhile thing to do when the cache is not
  76 * set to write-allocate (this would need further testing on XScale when WA
  77 * is used).
  78 *
  79 * On Feroceon there is much to gain however, regardless of cache mode.
  80 */
  81#ifdef CONFIG_CPU_FEROCEON
  82#define CALGN(code...) code
  83#else
  84#define CALGN(code...)
  85#endif
  86
  87#define IMM12_MASK 0xfff
  88
  89/*
  90 * Enable and disable interrupts
  91 */
  92#if __LINUX_ARM_ARCH__ >= 6
  93        .macro  disable_irq_notrace
  94        cpsid   i
  95        .endm
  96
  97        .macro  enable_irq_notrace
  98        cpsie   i
  99        .endm
 100#else
 101        .macro  disable_irq_notrace
 102        msr     cpsr_c, #PSR_I_BIT | SVC_MODE
 103        .endm
 104
 105        .macro  enable_irq_notrace
 106        msr     cpsr_c, #SVC_MODE
 107        .endm
 108#endif
 109
 110        .macro asm_trace_hardirqs_off, save=1
 111#if defined(CONFIG_TRACE_IRQFLAGS)
 112        .if \save
 113        stmdb   sp!, {r0-r3, ip, lr}
 114        .endif
 115        bl      trace_hardirqs_off
 116        .if \save
 117        ldmia   sp!, {r0-r3, ip, lr}
 118        .endif
 119#endif
 120        .endm
 121
 122        .macro asm_trace_hardirqs_on, cond=al, save=1
 123#if defined(CONFIG_TRACE_IRQFLAGS)
 124        /*
 125         * actually the registers should be pushed and pop'd conditionally, but
 126         * after bl the flags are certainly clobbered
 127         */
 128        .if \save
 129        stmdb   sp!, {r0-r3, ip, lr}
 130        .endif
 131        bl\cond trace_hardirqs_on
 132        .if \save
 133        ldmia   sp!, {r0-r3, ip, lr}
 134        .endif
 135#endif
 136        .endm
 137
 138        .macro disable_irq, save=1
 139        disable_irq_notrace
 140        asm_trace_hardirqs_off \save
 141        .endm
 142
 143        .macro enable_irq
 144        asm_trace_hardirqs_on
 145        enable_irq_notrace
 146        .endm
 147/*
 148 * Save the current IRQ state and disable IRQs.  Note that this macro
 149 * assumes FIQs are enabled, and that the processor is in SVC mode.
 150 */
 151        .macro  save_and_disable_irqs, oldcpsr
 152#ifdef CONFIG_CPU_V7M
 153        mrs     \oldcpsr, primask
 154#else
 155        mrs     \oldcpsr, cpsr
 156#endif
 157        disable_irq
 158        .endm
 159
 160        .macro  save_and_disable_irqs_notrace, oldcpsr
 161#ifdef CONFIG_CPU_V7M
 162        mrs     \oldcpsr, primask
 163#else
 164        mrs     \oldcpsr, cpsr
 165#endif
 166        disable_irq_notrace
 167        .endm
 168
 169/*
 170 * Restore interrupt state previously stored in a register.  We don't
 171 * guarantee that this will preserve the flags.
 172 */
 173        .macro  restore_irqs_notrace, oldcpsr
 174#ifdef CONFIG_CPU_V7M
 175        msr     primask, \oldcpsr
 176#else
 177        msr     cpsr_c, \oldcpsr
 178#endif
 179        .endm
 180
 181        .macro restore_irqs, oldcpsr
 182        tst     \oldcpsr, #PSR_I_BIT
 183        asm_trace_hardirqs_on cond=eq
 184        restore_irqs_notrace \oldcpsr
 185        .endm
 186
 187/*
 188 * Assembly version of "adr rd, BSYM(sym)".  This should only be used to
 189 * reference local symbols in the same assembly file which are to be
 190 * resolved by the assembler.  Other usage is undefined.
 191 */
 192        .irp    c,,eq,ne,cs,cc,mi,pl,vs,vc,hi,ls,ge,lt,gt,le,hs,lo
 193        .macro  badr\c, rd, sym
 194#ifdef CONFIG_THUMB2_KERNEL
 195        adr\c   \rd, \sym + 1
 196#else
 197        adr\c   \rd, \sym
 198#endif
 199        .endm
 200        .endr
 201
 202/*
 203 * Get current thread_info.
 204 */
 205        .macro  get_thread_info, rd
 206 ARM(   mov     \rd, sp, lsr #THREAD_SIZE_ORDER + PAGE_SHIFT    )
 207 THUMB( mov     \rd, sp                 )
 208 THUMB( lsr     \rd, \rd, #THREAD_SIZE_ORDER + PAGE_SHIFT       )
 209        mov     \rd, \rd, lsl #THREAD_SIZE_ORDER + PAGE_SHIFT
 210        .endm
 211
 212/*
 213 * Increment/decrement the preempt count.
 214 */
 215#ifdef CONFIG_PREEMPT_COUNT
 216        .macro  inc_preempt_count, ti, tmp
 217        ldr     \tmp, [\ti, #TI_PREEMPT]        @ get preempt count
 218        add     \tmp, \tmp, #1                  @ increment it
 219        str     \tmp, [\ti, #TI_PREEMPT]
 220        .endm
 221
 222        .macro  dec_preempt_count, ti, tmp
 223        ldr     \tmp, [\ti, #TI_PREEMPT]        @ get preempt count
 224        sub     \tmp, \tmp, #1                  @ decrement it
 225        str     \tmp, [\ti, #TI_PREEMPT]
 226        .endm
 227
 228        .macro  dec_preempt_count_ti, ti, tmp
 229        get_thread_info \ti
 230        dec_preempt_count \ti, \tmp
 231        .endm
 232#else
 233        .macro  inc_preempt_count, ti, tmp
 234        .endm
 235
 236        .macro  dec_preempt_count, ti, tmp
 237        .endm
 238
 239        .macro  dec_preempt_count_ti, ti, tmp
 240        .endm
 241#endif
 242
 243#define USERL(l, x...)                          \
 2449999:   x;                                      \
 245        .pushsection __ex_table,"a";            \
 246        .align  3;                              \
 247        .long   9999b,l;                        \
 248        .popsection
 249
 250#define USER(x...)      USERL(9001f, x)
 251
 252#ifdef CONFIG_SMP
 253#define ALT_SMP(instr...)                                       \
 2549998:   instr
 255/*
 256 * Note: if you get assembler errors from ALT_UP() when building with
 257 * CONFIG_THUMB2_KERNEL, you almost certainly need to use
 258 * ALT_SMP( W(instr) ... )
 259 */
 260#define ALT_UP(instr...)                                        \
 261        .pushsection ".alt.smp.init", "a"                       ;\
 262        .long   9998b                                           ;\
 2639997:   instr                                                   ;\
 264        .if . - 9997b == 2                                      ;\
 265                nop                                             ;\
 266        .endif                                                  ;\
 267        .if . - 9997b != 4                                      ;\
 268                .error "ALT_UP() content must assemble to exactly 4 bytes";\
 269        .endif                                                  ;\
 270        .popsection
 271#define ALT_UP_B(label)                                 \
 272        .equ    up_b_offset, label - 9998b                      ;\
 273        .pushsection ".alt.smp.init", "a"                       ;\
 274        .long   9998b                                           ;\
 275        W(b)    . + up_b_offset                                 ;\
 276        .popsection
 277#else
 278#define ALT_SMP(instr...)
 279#define ALT_UP(instr...) instr
 280#define ALT_UP_B(label) b label
 281#endif
 282
 283/*
 284 * Instruction barrier
 285 */
 286        .macro  instr_sync
 287#if __LINUX_ARM_ARCH__ >= 7
 288        isb
 289#elif __LINUX_ARM_ARCH__ == 6
 290        mcr     p15, 0, r0, c7, c5, 4
 291#endif
 292        .endm
 293
 294/*
 295 * SMP data memory barrier
 296 */
 297        .macro  smp_dmb mode
 298#ifdef CONFIG_SMP
 299#if __LINUX_ARM_ARCH__ >= 7
 300        .ifeqs "\mode","arm"
 301        ALT_SMP(dmb     ish)
 302        .else
 303        ALT_SMP(W(dmb)  ish)
 304        .endif
 305#elif __LINUX_ARM_ARCH__ == 6
 306        ALT_SMP(mcr     p15, 0, r0, c7, c10, 5) @ dmb
 307#else
 308#error Incompatible SMP platform
 309#endif
 310        .ifeqs "\mode","arm"
 311        ALT_UP(nop)
 312        .else
 313        ALT_UP(W(nop))
 314        .endif
 315#endif
 316        .endm
 317
 318#if defined(CONFIG_CPU_V7M)
 319        /*
 320         * setmode is used to assert to be in svc mode during boot. For v7-M
 321         * this is done in __v7m_setup, so setmode can be empty here.
 322         */
 323        .macro  setmode, mode, reg
 324        .endm
 325#elif defined(CONFIG_THUMB2_KERNEL)
 326        .macro  setmode, mode, reg
 327        mov     \reg, #\mode
 328        msr     cpsr_c, \reg
 329        .endm
 330#else
 331        .macro  setmode, mode, reg
 332        msr     cpsr_c, #\mode
 333        .endm
 334#endif
 335
 336/*
 337 * Helper macro to enter SVC mode cleanly and mask interrupts. reg is
 338 * a scratch register for the macro to overwrite.
 339 *
 340 * This macro is intended for forcing the CPU into SVC mode at boot time.
 341 * you cannot return to the original mode.
 342 */
 343.macro safe_svcmode_maskall reg:req
 344#if __LINUX_ARM_ARCH__ >= 6 && !defined(CONFIG_CPU_V7M)
 345        mrs     \reg , cpsr
 346        eor     \reg, \reg, #HYP_MODE
 347        tst     \reg, #MODE_MASK
 348        bic     \reg , \reg , #MODE_MASK
 349        orr     \reg , \reg , #PSR_I_BIT | PSR_F_BIT | SVC_MODE
 350THUMB(  orr     \reg , \reg , #PSR_T_BIT        )
 351        bne     1f
 352        orr     \reg, \reg, #PSR_A_BIT
 353        badr    lr, 2f
 354        msr     spsr_cxsf, \reg
 355        __MSR_ELR_HYP(14)
 356        __ERET
 3571:      msr     cpsr_c, \reg
 3582:
 359#else
 360/*
 361 * workaround for possibly broken pre-v6 hardware
 362 * (akita, Sharp Zaurus C-1000, PXA270-based)
 363 */
 364        setmode PSR_F_BIT | PSR_I_BIT | SVC_MODE, \reg
 365#endif
 366.endm
 367
 368/*
 369 * STRT/LDRT access macros with ARM and Thumb-2 variants
 370 */
 371#ifdef CONFIG_THUMB2_KERNEL
 372
 373        .macro  usraccoff, instr, reg, ptr, inc, off, cond, abort, t=TUSER()
 3749999:
 375        .if     \inc == 1
 376        \instr\()b\t\cond\().w \reg, [\ptr, #\off]
 377        .elseif \inc == 4
 378        \instr\t\cond\().w \reg, [\ptr, #\off]
 379        .else
 380        .error  "Unsupported inc macro argument"
 381        .endif
 382
 383        .pushsection __ex_table,"a"
 384        .align  3
 385        .long   9999b, \abort
 386        .popsection
 387        .endm
 388
 389        .macro  usracc, instr, reg, ptr, inc, cond, rept, abort
 390        @ explicit IT instruction needed because of the label
 391        @ introduced by the USER macro
 392        .ifnc   \cond,al
 393        .if     \rept == 1
 394        itt     \cond
 395        .elseif \rept == 2
 396        ittt    \cond
 397        .else
 398        .error  "Unsupported rept macro argument"
 399        .endif
 400        .endif
 401
 402        @ Slightly optimised to avoid incrementing the pointer twice
 403        usraccoff \instr, \reg, \ptr, \inc, 0, \cond, \abort
 404        .if     \rept == 2
 405        usraccoff \instr, \reg, \ptr, \inc, \inc, \cond, \abort
 406        .endif
 407
 408        add\cond \ptr, #\rept * \inc
 409        .endm
 410
 411#else   /* !CONFIG_THUMB2_KERNEL */
 412
 413        .macro  usracc, instr, reg, ptr, inc, cond, rept, abort, t=TUSER()
 414        .rept   \rept
 4159999:
 416        .if     \inc == 1
 417        \instr\()b\t\cond \reg, [\ptr], #\inc
 418        .elseif \inc == 4
 419        \instr\t\cond \reg, [\ptr], #\inc
 420        .else
 421        .error  "Unsupported inc macro argument"
 422        .endif
 423
 424        .pushsection __ex_table,"a"
 425        .align  3
 426        .long   9999b, \abort
 427        .popsection
 428        .endr
 429        .endm
 430
 431#endif  /* CONFIG_THUMB2_KERNEL */
 432
 433        .macro  strusr, reg, ptr, inc, cond=al, rept=1, abort=9001f
 434        usracc  str, \reg, \ptr, \inc, \cond, \rept, \abort
 435        .endm
 436
 437        .macro  ldrusr, reg, ptr, inc, cond=al, rept=1, abort=9001f
 438        usracc  ldr, \reg, \ptr, \inc, \cond, \rept, \abort
 439        .endm
 440
 441/* Utility macro for declaring string literals */
 442        .macro  string name:req, string
 443        .type \name , #object
 444\name:
 445        .asciz "\string"
 446        .size \name , . - \name
 447        .endm
 448
 449        .macro  csdb
 450#ifdef CONFIG_THUMB2_KERNEL
 451        .inst.w 0xf3af8014
 452#else
 453        .inst   0xe320f014
 454#endif
 455        .endm
 456
 457        .macro check_uaccess, addr:req, size:req, limit:req, tmp:req, bad:req
 458#ifndef CONFIG_CPU_USE_DOMAINS
 459        adds    \tmp, \addr, #\size - 1
 460        sbcscc  \tmp, \tmp, \limit
 461        bcs     \bad
 462#ifdef CONFIG_CPU_SPECTRE
 463        movcs   \addr, #0
 464        csdb
 465#endif
 466#endif
 467        .endm
 468
 469        .macro uaccess_mask_range_ptr, addr:req, size:req, limit:req, tmp:req
 470#ifdef CONFIG_CPU_SPECTRE
 471        sub     \tmp, \limit, #1
 472        subs    \tmp, \tmp, \addr       @ tmp = limit - 1 - addr
 473        addhs   \tmp, \tmp, #1          @ if (tmp >= 0) {
 474        subshs  \tmp, \tmp, \size       @ tmp = limit - (addr + size) }
 475        movlo   \addr, #0               @ if (tmp < 0) addr = NULL
 476        csdb
 477#endif
 478        .endm
 479
 480        .macro  uaccess_disable, tmp, isb=1
 481#ifdef CONFIG_CPU_SW_DOMAIN_PAN
 482        /*
 483         * Whenever we re-enter userspace, the domains should always be
 484         * set appropriately.
 485         */
 486        mov     \tmp, #DACR_UACCESS_DISABLE
 487        mcr     p15, 0, \tmp, c3, c0, 0         @ Set domain register
 488        .if     \isb
 489        instr_sync
 490        .endif
 491#endif
 492        .endm
 493
 494        .macro  uaccess_enable, tmp, isb=1
 495#ifdef CONFIG_CPU_SW_DOMAIN_PAN
 496        /*
 497         * Whenever we re-enter userspace, the domains should always be
 498         * set appropriately.
 499         */
 500        mov     \tmp, #DACR_UACCESS_ENABLE
 501        mcr     p15, 0, \tmp, c3, c0, 0
 502        .if     \isb
 503        instr_sync
 504        .endif
 505#endif
 506        .endm
 507
 508        .macro  uaccess_save, tmp
 509#ifdef CONFIG_CPU_SW_DOMAIN_PAN
 510        mrc     p15, 0, \tmp, c3, c0, 0
 511        str     \tmp, [sp, #SVC_DACR]
 512#endif
 513        .endm
 514
 515        .macro  uaccess_restore
 516#ifdef CONFIG_CPU_SW_DOMAIN_PAN
 517        ldr     r0, [sp, #SVC_DACR]
 518        mcr     p15, 0, r0, c3, c0, 0
 519#endif
 520        .endm
 521
 522        .irp    c,,eq,ne,cs,cc,mi,pl,vs,vc,hi,ls,ge,lt,gt,le,hs,lo
 523        .macro  ret\c, reg
 524#if __LINUX_ARM_ARCH__ < 6
 525        mov\c   pc, \reg
 526#else
 527        .ifeqs  "\reg", "lr"
 528        bx\c    \reg
 529        .else
 530        mov\c   pc, \reg
 531        .endif
 532#endif
 533        .endm
 534        .endr
 535
 536        .macro  ret.w, reg
 537        ret     \reg
 538#ifdef CONFIG_THUMB2_KERNEL
 539        nop
 540#endif
 541        .endm
 542
 543        .macro  bug, msg, line
 544#ifdef CONFIG_THUMB2_KERNEL
 5451:      .inst   0xde02
 546#else
 5471:      .inst   0xe7f001f2
 548#endif
 549#ifdef CONFIG_DEBUG_BUGVERBOSE
 550        .pushsection .rodata.str, "aMS", %progbits, 1
 5512:      .asciz  "\msg"
 552        .popsection
 553        .pushsection __bug_table, "aw"
 554        .align  2
 555        .word   1b, 2b
 556        .hword  \line
 557        .popsection
 558#endif
 559        .endm
 560
 561#ifdef CONFIG_KPROBES
 562#define _ASM_NOKPROBE(entry)                            \
 563        .pushsection "_kprobe_blacklist", "aw" ;        \
 564        .balign 4 ;                                     \
 565        .long entry;                                    \
 566        .popsection
 567#else
 568#define _ASM_NOKPROBE(entry)
 569#endif
 570
 571#endif /* __ASM_ASSEMBLER_H__ */
 572