linux/arch/arm/include/asm/assembler.h
<<
>>
Prefs
   1/*
   2 *  arch/arm/include/asm/assembler.h
   3 *
   4 *  Copyright (C) 1996-2000 Russell King
   5 *
   6 * This program is free software; you can redistribute it and/or modify
   7 * it under the terms of the GNU General Public License version 2 as
   8 * published by the Free Software Foundation.
   9 *
  10 *  This file contains arm architecture specific defines
  11 *  for the different processors.
  12 *
  13 *  Do not include any C declarations in this file - it is included by
  14 *  assembler source.
  15 */
  16#ifndef __ASM_ASSEMBLER_H__
  17#define __ASM_ASSEMBLER_H__
  18
  19#ifndef __ASSEMBLY__
  20#error "Only include this from assembly code"
  21#endif
  22
  23#include <asm/ptrace.h>
  24#include <asm/domain.h>
  25#include <asm/opcodes-virt.h>
  26#include <asm/asm-offsets.h>
  27#include <asm/page.h>
  28#include <asm/thread_info.h>
  29
  30#define IOMEM(x)        (x)
  31
  32/*
  33 * Endian independent macros for shifting bytes within registers.
  34 */
  35#ifndef __ARMEB__
  36#define lspull          lsr
  37#define lspush          lsl
  38#define get_byte_0      lsl #0
  39#define get_byte_1      lsr #8
  40#define get_byte_2      lsr #16
  41#define get_byte_3      lsr #24
  42#define put_byte_0      lsl #0
  43#define put_byte_1      lsl #8
  44#define put_byte_2      lsl #16
  45#define put_byte_3      lsl #24
  46#else
  47#define lspull          lsl
  48#define lspush          lsr
  49#define get_byte_0      lsr #24
  50#define get_byte_1      lsr #16
  51#define get_byte_2      lsr #8
  52#define get_byte_3      lsl #0
  53#define put_byte_0      lsl #24
  54#define put_byte_1      lsl #16
  55#define put_byte_2      lsl #8
  56#define put_byte_3      lsl #0
  57#endif
  58
  59/* Select code for any configuration running in BE8 mode */
  60#ifdef CONFIG_CPU_ENDIAN_BE8
  61#define ARM_BE8(code...) code
  62#else
  63#define ARM_BE8(code...)
  64#endif
  65
  66/*
  67 * Data preload for architectures that support it
  68 */
  69#if __LINUX_ARM_ARCH__ >= 5
  70#define PLD(code...)    code
  71#else
  72#define PLD(code...)
  73#endif
  74
  75/*
  76 * This can be used to enable code to cacheline align the destination
  77 * pointer when bulk writing to memory.  Experiments on StrongARM and
  78 * XScale didn't show this a worthwhile thing to do when the cache is not
  79 * set to write-allocate (this would need further testing on XScale when WA
  80 * is used).
  81 *
  82 * On Feroceon there is much to gain however, regardless of cache mode.
  83 */
  84#ifdef CONFIG_CPU_FEROCEON
  85#define CALGN(code...) code
  86#else
  87#define CALGN(code...)
  88#endif
  89
  90#define IMM12_MASK 0xfff
  91
  92/*
  93 * Enable and disable interrupts
  94 */
  95#if __LINUX_ARM_ARCH__ >= 6
  96        .macro  disable_irq_notrace
  97        cpsid   i
  98        .endm
  99
 100        .macro  enable_irq_notrace
 101        cpsie   i
 102        .endm
 103#else
 104        .macro  disable_irq_notrace
 105        msr     cpsr_c, #PSR_I_BIT | SVC_MODE
 106        .endm
 107
 108        .macro  enable_irq_notrace
 109        msr     cpsr_c, #SVC_MODE
 110        .endm
 111#endif
 112
 113        .macro asm_trace_hardirqs_off, save=1
 114#if defined(CONFIG_TRACE_IRQFLAGS)
 115        .if \save
 116        stmdb   sp!, {r0-r3, ip, lr}
 117        .endif
 118        bl      trace_hardirqs_off
 119        .if \save
 120        ldmia   sp!, {r0-r3, ip, lr}
 121        .endif
 122#endif
 123        .endm
 124
 125        .macro asm_trace_hardirqs_on, cond=al, save=1
 126#if defined(CONFIG_TRACE_IRQFLAGS)
 127        /*
 128         * actually the registers should be pushed and pop'd conditionally, but
 129         * after bl the flags are certainly clobbered
 130         */
 131        .if \save
 132        stmdb   sp!, {r0-r3, ip, lr}
 133        .endif
 134        bl\cond trace_hardirqs_on
 135        .if \save
 136        ldmia   sp!, {r0-r3, ip, lr}
 137        .endif
 138#endif
 139        .endm
 140
 141        .macro disable_irq, save=1
 142        disable_irq_notrace
 143        asm_trace_hardirqs_off \save
 144        .endm
 145
 146        .macro enable_irq
 147        asm_trace_hardirqs_on
 148        enable_irq_notrace
 149        .endm
 150/*
 151 * Save the current IRQ state and disable IRQs.  Note that this macro
 152 * assumes FIQs are enabled, and that the processor is in SVC mode.
 153 */
 154        .macro  save_and_disable_irqs, oldcpsr
 155#ifdef CONFIG_CPU_V7M
 156        mrs     \oldcpsr, primask
 157#else
 158        mrs     \oldcpsr, cpsr
 159#endif
 160        disable_irq
 161        .endm
 162
 163        .macro  save_and_disable_irqs_notrace, oldcpsr
 164#ifdef CONFIG_CPU_V7M
 165        mrs     \oldcpsr, primask
 166#else
 167        mrs     \oldcpsr, cpsr
 168#endif
 169        disable_irq_notrace
 170        .endm
 171
 172/*
 173 * Restore interrupt state previously stored in a register.  We don't
 174 * guarantee that this will preserve the flags.
 175 */
 176        .macro  restore_irqs_notrace, oldcpsr
 177#ifdef CONFIG_CPU_V7M
 178        msr     primask, \oldcpsr
 179#else
 180        msr     cpsr_c, \oldcpsr
 181#endif
 182        .endm
 183
 184        .macro restore_irqs, oldcpsr
 185        tst     \oldcpsr, #PSR_I_BIT
 186        asm_trace_hardirqs_on cond=eq
 187        restore_irqs_notrace \oldcpsr
 188        .endm
 189
 190/*
 191 * Assembly version of "adr rd, BSYM(sym)".  This should only be used to
 192 * reference local symbols in the same assembly file which are to be
 193 * resolved by the assembler.  Other usage is undefined.
 194 */
 195        .irp    c,,eq,ne,cs,cc,mi,pl,vs,vc,hi,ls,ge,lt,gt,le,hs,lo
 196        .macro  badr\c, rd, sym
 197#ifdef CONFIG_THUMB2_KERNEL
 198        adr\c   \rd, \sym + 1
 199#else
 200        adr\c   \rd, \sym
 201#endif
 202        .endm
 203        .endr
 204
 205/*
 206 * Get current thread_info.
 207 */
 208        .macro  get_thread_info, rd
 209 ARM(   mov     \rd, sp, lsr #THREAD_SIZE_ORDER + PAGE_SHIFT    )
 210 THUMB( mov     \rd, sp                 )
 211 THUMB( lsr     \rd, \rd, #THREAD_SIZE_ORDER + PAGE_SHIFT       )
 212        mov     \rd, \rd, lsl #THREAD_SIZE_ORDER + PAGE_SHIFT
 213        .endm
 214
 215/*
 216 * Increment/decrement the preempt count.
 217 */
 218#ifdef CONFIG_PREEMPT_COUNT
 219        .macro  inc_preempt_count, ti, tmp
 220        ldr     \tmp, [\ti, #TI_PREEMPT]        @ get preempt count
 221        add     \tmp, \tmp, #1                  @ increment it
 222        str     \tmp, [\ti, #TI_PREEMPT]
 223        .endm
 224
 225        .macro  dec_preempt_count, ti, tmp
 226        ldr     \tmp, [\ti, #TI_PREEMPT]        @ get preempt count
 227        sub     \tmp, \tmp, #1                  @ decrement it
 228        str     \tmp, [\ti, #TI_PREEMPT]
 229        .endm
 230
 231        .macro  dec_preempt_count_ti, ti, tmp
 232        get_thread_info \ti
 233        dec_preempt_count \ti, \tmp
 234        .endm
 235#else
 236        .macro  inc_preempt_count, ti, tmp
 237        .endm
 238
 239        .macro  dec_preempt_count, ti, tmp
 240        .endm
 241
 242        .macro  dec_preempt_count_ti, ti, tmp
 243        .endm
 244#endif
 245
 246#define USER(x...)                              \
 2479999:   x;                                      \
 248        .pushsection __ex_table,"a";            \
 249        .align  3;                              \
 250        .long   9999b,9001f;                    \
 251        .popsection
 252
 253#ifdef CONFIG_SMP
 254#define ALT_SMP(instr...)                                       \
 2559998:   instr
 256/*
 257 * Note: if you get assembler errors from ALT_UP() when building with
 258 * CONFIG_THUMB2_KERNEL, you almost certainly need to use
 259 * ALT_SMP( W(instr) ... )
 260 */
 261#define ALT_UP(instr...)                                        \
 262        .pushsection ".alt.smp.init", "a"                       ;\
 263        .long   9998b                                           ;\
 2649997:   instr                                                   ;\
 265        .if . - 9997b == 2                                      ;\
 266                nop                                             ;\
 267        .endif                                                  ;\
 268        .if . - 9997b != 4                                      ;\
 269                .error "ALT_UP() content must assemble to exactly 4 bytes";\
 270        .endif                                                  ;\
 271        .popsection
 272#define ALT_UP_B(label)                                 \
 273        .equ    up_b_offset, label - 9998b                      ;\
 274        .pushsection ".alt.smp.init", "a"                       ;\
 275        .long   9998b                                           ;\
 276        W(b)    . + up_b_offset                                 ;\
 277        .popsection
 278#else
 279#define ALT_SMP(instr...)
 280#define ALT_UP(instr...) instr
 281#define ALT_UP_B(label) b label
 282#endif
 283
 284/*
 285 * Instruction barrier
 286 */
 287        .macro  instr_sync
 288#if __LINUX_ARM_ARCH__ >= 7
 289        isb
 290#elif __LINUX_ARM_ARCH__ == 6
 291        mcr     p15, 0, r0, c7, c5, 4
 292#endif
 293        .endm
 294
 295/*
 296 * SMP data memory barrier
 297 */
 298        .macro  smp_dmb mode
 299#ifdef CONFIG_SMP
 300#if __LINUX_ARM_ARCH__ >= 7
 301        .ifeqs "\mode","arm"
 302        ALT_SMP(dmb     ish)
 303        .else
 304        ALT_SMP(W(dmb)  ish)
 305        .endif
 306#elif __LINUX_ARM_ARCH__ == 6
 307        ALT_SMP(mcr     p15, 0, r0, c7, c10, 5) @ dmb
 308#else
 309#error Incompatible SMP platform
 310#endif
 311        .ifeqs "\mode","arm"
 312        ALT_UP(nop)
 313        .else
 314        ALT_UP(W(nop))
 315        .endif
 316#endif
 317        .endm
 318
 319#if defined(CONFIG_CPU_V7M)
 320        /*
 321         * setmode is used to assert to be in svc mode during boot. For v7-M
 322         * this is done in __v7m_setup, so setmode can be empty here.
 323         */
 324        .macro  setmode, mode, reg
 325        .endm
 326#elif defined(CONFIG_THUMB2_KERNEL)
 327        .macro  setmode, mode, reg
 328        mov     \reg, #\mode
 329        msr     cpsr_c, \reg
 330        .endm
 331#else
 332        .macro  setmode, mode, reg
 333        msr     cpsr_c, #\mode
 334        .endm
 335#endif
 336
 337/*
 338 * Helper macro to enter SVC mode cleanly and mask interrupts. reg is
 339 * a scratch register for the macro to overwrite.
 340 *
 341 * This macro is intended for forcing the CPU into SVC mode at boot time.
 342 * you cannot return to the original mode.
 343 */
 344.macro safe_svcmode_maskall reg:req
 345#if __LINUX_ARM_ARCH__ >= 6 && !defined(CONFIG_CPU_V7M)
 346        mrs     \reg , cpsr
 347        eor     \reg, \reg, #HYP_MODE
 348        tst     \reg, #MODE_MASK
 349        bic     \reg , \reg , #MODE_MASK
 350        orr     \reg , \reg , #PSR_I_BIT | PSR_F_BIT | SVC_MODE
 351THUMB(  orr     \reg , \reg , #PSR_T_BIT        )
 352        bne     1f
 353        orr     \reg, \reg, #PSR_A_BIT
 354        badr    lr, 2f
 355        msr     spsr_cxsf, \reg
 356        __MSR_ELR_HYP(14)
 357        __ERET
 3581:      msr     cpsr_c, \reg
 3592:
 360#else
 361/*
 362 * workaround for possibly broken pre-v6 hardware
 363 * (akita, Sharp Zaurus C-1000, PXA270-based)
 364 */
 365        setmode PSR_F_BIT | PSR_I_BIT | SVC_MODE, \reg
 366#endif
 367.endm
 368
 369/*
 370 * STRT/LDRT access macros with ARM and Thumb-2 variants
 371 */
 372#ifdef CONFIG_THUMB2_KERNEL
 373
 374        .macro  usraccoff, instr, reg, ptr, inc, off, cond, abort, t=TUSER()
 3759999:
 376        .if     \inc == 1
 377        \instr\cond\()b\()\t\().w \reg, [\ptr, #\off]
 378        .elseif \inc == 4
 379        \instr\cond\()\t\().w \reg, [\ptr, #\off]
 380        .else
 381        .error  "Unsupported inc macro argument"
 382        .endif
 383
 384        .pushsection __ex_table,"a"
 385        .align  3
 386        .long   9999b, \abort
 387        .popsection
 388        .endm
 389
 390        .macro  usracc, instr, reg, ptr, inc, cond, rept, abort
 391        @ explicit IT instruction needed because of the label
 392        @ introduced by the USER macro
 393        .ifnc   \cond,al
 394        .if     \rept == 1
 395        itt     \cond
 396        .elseif \rept == 2
 397        ittt    \cond
 398        .else
 399        .error  "Unsupported rept macro argument"
 400        .endif
 401        .endif
 402
 403        @ Slightly optimised to avoid incrementing the pointer twice
 404        usraccoff \instr, \reg, \ptr, \inc, 0, \cond, \abort
 405        .if     \rept == 2
 406        usraccoff \instr, \reg, \ptr, \inc, \inc, \cond, \abort
 407        .endif
 408
 409        add\cond \ptr, #\rept * \inc
 410        .endm
 411
 412#else   /* !CONFIG_THUMB2_KERNEL */
 413
 414        .macro  usracc, instr, reg, ptr, inc, cond, rept, abort, t=TUSER()
 415        .rept   \rept
 4169999:
 417        .if     \inc == 1
 418        \instr\cond\()b\()\t \reg, [\ptr], #\inc
 419        .elseif \inc == 4
 420        \instr\cond\()\t \reg, [\ptr], #\inc
 421        .else
 422        .error  "Unsupported inc macro argument"
 423        .endif
 424
 425        .pushsection __ex_table,"a"
 426        .align  3
 427        .long   9999b, \abort
 428        .popsection
 429        .endr
 430        .endm
 431
 432#endif  /* CONFIG_THUMB2_KERNEL */
 433
 434        .macro  strusr, reg, ptr, inc, cond=al, rept=1, abort=9001f
 435        usracc  str, \reg, \ptr, \inc, \cond, \rept, \abort
 436        .endm
 437
 438        .macro  ldrusr, reg, ptr, inc, cond=al, rept=1, abort=9001f
 439        usracc  ldr, \reg, \ptr, \inc, \cond, \rept, \abort
 440        .endm
 441
 442/* Utility macro for declaring string literals */
 443        .macro  string name:req, string
 444        .type \name , #object
 445\name:
 446        .asciz "\string"
 447        .size \name , . - \name
 448        .endm
 449
 450        .macro  csdb
 451#ifdef CONFIG_THUMB2_KERNEL
 452        .inst.w 0xf3af8014
 453#else
 454        .inst   0xe320f014
 455#endif
 456        .endm
 457
 458        .macro check_uaccess, addr:req, size:req, limit:req, tmp:req, bad:req
 459#ifndef CONFIG_CPU_USE_DOMAINS
 460        adds    \tmp, \addr, #\size - 1
 461        sbcccs  \tmp, \tmp, \limit
 462        bcs     \bad
 463#endif
 464        .endm
 465
 466        .macro  uaccess_disable, tmp, isb=1
 467#ifdef CONFIG_CPU_SW_DOMAIN_PAN
 468        /*
 469         * Whenever we re-enter userspace, the domains should always be
 470         * set appropriately.
 471         */
 472        mov     \tmp, #DACR_UACCESS_DISABLE
 473        mcr     p15, 0, \tmp, c3, c0, 0         @ Set domain register
 474        .if     \isb
 475        instr_sync
 476        .endif
 477#endif
 478        .endm
 479
 480        .macro  uaccess_enable, tmp, isb=1
 481#ifdef CONFIG_CPU_SW_DOMAIN_PAN
 482        /*
 483         * Whenever we re-enter userspace, the domains should always be
 484         * set appropriately.
 485         */
 486        mov     \tmp, #DACR_UACCESS_ENABLE
 487        mcr     p15, 0, \tmp, c3, c0, 0
 488        .if     \isb
 489        instr_sync
 490        .endif
 491#endif
 492        .endm
 493
 494        .macro  uaccess_save, tmp
 495#ifdef CONFIG_CPU_SW_DOMAIN_PAN
 496        mrc     p15, 0, \tmp, c3, c0, 0
 497        str     \tmp, [sp, #SVC_DACR]
 498#endif
 499        .endm
 500
 501        .macro  uaccess_restore
 502#ifdef CONFIG_CPU_SW_DOMAIN_PAN
 503        ldr     r0, [sp, #SVC_DACR]
 504        mcr     p15, 0, r0, c3, c0, 0
 505#endif
 506        .endm
 507
 508        .irp    c,,eq,ne,cs,cc,mi,pl,vs,vc,hi,ls,ge,lt,gt,le,hs,lo
 509        .macro  ret\c, reg
 510#if __LINUX_ARM_ARCH__ < 6
 511        mov\c   pc, \reg
 512#else
 513        .ifeqs  "\reg", "lr"
 514        bx\c    \reg
 515        .else
 516        mov\c   pc, \reg
 517        .endif
 518#endif
 519        .endm
 520        .endr
 521
 522        .macro  ret.w, reg
 523        ret     \reg
 524#ifdef CONFIG_THUMB2_KERNEL
 525        nop
 526#endif
 527        .endm
 528
 529        .macro  bug, msg, line
 530#ifdef CONFIG_THUMB2_KERNEL
 5311:      .inst   0xde02
 532#else
 5331:      .inst   0xe7f001f2
 534#endif
 535#ifdef CONFIG_DEBUG_BUGVERBOSE
 536        .pushsection .rodata.str, "aMS", %progbits, 1
 5372:      .asciz  "\msg"
 538        .popsection
 539        .pushsection __bug_table, "aw"
 540        .align  2
 541        .word   1b, 2b
 542        .hword  \line
 543        .popsection
 544#endif
 545        .endm
 546
 547#ifdef CONFIG_KPROBES
 548#define _ASM_NOKPROBE(entry)                            \
 549        .pushsection "_kprobe_blacklist", "aw" ;        \
 550        .balign 4 ;                                     \
 551        .long entry;                                    \
 552        .popsection
 553#else
 554#define _ASM_NOKPROBE(entry)
 555#endif
 556
 557#endif /* __ASM_ASSEMBLER_H__ */
 558