linux/arch/arm64/include/asm/barrier.h
<<
>>
Prefs
   1/* SPDX-License-Identifier: GPL-2.0-only */
   2/*
   3 * Based on arch/arm/include/asm/barrier.h
   4 *
   5 * Copyright (C) 2012 ARM Ltd.
   6 */
   7#ifndef __ASM_BARRIER_H
   8#define __ASM_BARRIER_H
   9
  10#ifndef __ASSEMBLY__
  11
  12#include <linux/kasan-checks.h>
  13
  14#define __nops(n)       ".rept  " #n "\nnop\n.endr\n"
  15#define nops(n)         asm volatile(__nops(n))
  16
  17#define sev()           asm volatile("sev" : : : "memory")
  18#define wfe()           asm volatile("wfe" : : : "memory")
  19#define wfi()           asm volatile("wfi" : : : "memory")
  20
  21#define isb()           asm volatile("isb" : : : "memory")
  22#define dmb(opt)        asm volatile("dmb " #opt : : : "memory")
  23#define dsb(opt)        asm volatile("dsb " #opt : : : "memory")
  24
  25#define psb_csync()     asm volatile("hint #17" : : : "memory")
  26#define csdb()          asm volatile("hint #20" : : : "memory")
  27
  28#define spec_bar()      asm volatile(ALTERNATIVE("dsb nsh\nisb\n",              \
  29                                                 SB_BARRIER_INSN"nop\n",        \
  30                                                 ARM64_HAS_SB))
  31
  32#ifdef CONFIG_ARM64_PSEUDO_NMI
  33#define pmr_sync()                                              \
  34        do {                                                    \
  35                extern struct static_key_false gic_pmr_sync;    \
  36                                                                \
  37                if (static_branch_unlikely(&gic_pmr_sync))      \
  38                        dsb(sy);                                \
  39        } while(0)
  40#else
  41#define pmr_sync()      do {} while (0)
  42#endif
  43
  44#define mb()            dsb(sy)
  45#define rmb()           dsb(ld)
  46#define wmb()           dsb(st)
  47
  48#define dma_rmb()       dmb(oshld)
  49#define dma_wmb()       dmb(oshst)
  50
  51/*
  52 * Generate a mask for array_index__nospec() that is ~0UL when 0 <= idx < sz
  53 * and 0 otherwise.
  54 */
  55#define array_index_mask_nospec array_index_mask_nospec
  56static inline unsigned long array_index_mask_nospec(unsigned long idx,
  57                                                    unsigned long sz)
  58{
  59        unsigned long mask;
  60
  61        asm volatile(
  62        "       cmp     %1, %2\n"
  63        "       sbc     %0, xzr, xzr\n"
  64        : "=r" (mask)
  65        : "r" (idx), "Ir" (sz)
  66        : "cc");
  67
  68        csdb();
  69        return mask;
  70}
  71
  72#define __smp_mb()      dmb(ish)
  73#define __smp_rmb()     dmb(ishld)
  74#define __smp_wmb()     dmb(ishst)
  75
  76#define __smp_store_release(p, v)                                       \
  77do {                                                                    \
  78        typeof(p) __p = (p);                                            \
  79        union { __unqual_scalar_typeof(*p) __val; char __c[1]; } __u =  \
  80                { .__val = (__force __unqual_scalar_typeof(*p)) (v) };  \
  81        compiletime_assert_atomic_type(*p);                             \
  82        kasan_check_write(__p, sizeof(*p));                             \
  83        switch (sizeof(*p)) {                                           \
  84        case 1:                                                         \
  85                asm volatile ("stlrb %w1, %0"                           \
  86                                : "=Q" (*__p)                           \
  87                                : "r" (*(__u8 *)__u.__c)                \
  88                                : "memory");                            \
  89                break;                                                  \
  90        case 2:                                                         \
  91                asm volatile ("stlrh %w1, %0"                           \
  92                                : "=Q" (*__p)                           \
  93                                : "r" (*(__u16 *)__u.__c)               \
  94                                : "memory");                            \
  95                break;                                                  \
  96        case 4:                                                         \
  97                asm volatile ("stlr %w1, %0"                            \
  98                                : "=Q" (*__p)                           \
  99                                : "r" (*(__u32 *)__u.__c)               \
 100                                : "memory");                            \
 101                break;                                                  \
 102        case 8:                                                         \
 103                asm volatile ("stlr %1, %0"                             \
 104                                : "=Q" (*__p)                           \
 105                                : "r" (*(__u64 *)__u.__c)               \
 106                                : "memory");                            \
 107                break;                                                  \
 108        }                                                               \
 109} while (0)
 110
 111#define __smp_load_acquire(p)                                           \
 112({                                                                      \
 113        union { __unqual_scalar_typeof(*p) __val; char __c[1]; } __u;   \
 114        typeof(p) __p = (p);                                            \
 115        compiletime_assert_atomic_type(*p);                             \
 116        kasan_check_read(__p, sizeof(*p));                              \
 117        switch (sizeof(*p)) {                                           \
 118        case 1:                                                         \
 119                asm volatile ("ldarb %w0, %1"                           \
 120                        : "=r" (*(__u8 *)__u.__c)                       \
 121                        : "Q" (*__p) : "memory");                       \
 122                break;                                                  \
 123        case 2:                                                         \
 124                asm volatile ("ldarh %w0, %1"                           \
 125                        : "=r" (*(__u16 *)__u.__c)                      \
 126                        : "Q" (*__p) : "memory");                       \
 127                break;                                                  \
 128        case 4:                                                         \
 129                asm volatile ("ldar %w0, %1"                            \
 130                        : "=r" (*(__u32 *)__u.__c)                      \
 131                        : "Q" (*__p) : "memory");                       \
 132                break;                                                  \
 133        case 8:                                                         \
 134                asm volatile ("ldar %0, %1"                             \
 135                        : "=r" (*(__u64 *)__u.__c)                      \
 136                        : "Q" (*__p) : "memory");                       \
 137                break;                                                  \
 138        }                                                               \
 139        (typeof(*p))__u.__val;                                          \
 140})
 141
 142#define smp_cond_load_relaxed(ptr, cond_expr)                           \
 143({                                                                      \
 144        typeof(ptr) __PTR = (ptr);                                      \
 145        __unqual_scalar_typeof(*ptr) VAL;                               \
 146        for (;;) {                                                      \
 147                VAL = READ_ONCE(*__PTR);                                \
 148                if (cond_expr)                                          \
 149                        break;                                          \
 150                __cmpwait_relaxed(__PTR, VAL);                          \
 151        }                                                               \
 152        (typeof(*ptr))VAL;                                              \
 153})
 154
 155#define smp_cond_load_acquire(ptr, cond_expr)                           \
 156({                                                                      \
 157        typeof(ptr) __PTR = (ptr);                                      \
 158        __unqual_scalar_typeof(*ptr) VAL;                               \
 159        for (;;) {                                                      \
 160                VAL = smp_load_acquire(__PTR);                          \
 161                if (cond_expr)                                          \
 162                        break;                                          \
 163                __cmpwait_relaxed(__PTR, VAL);                          \
 164        }                                                               \
 165        (typeof(*ptr))VAL;                                              \
 166})
 167
 168#include <asm-generic/barrier.h>
 169
 170#endif  /* __ASSEMBLY__ */
 171
 172#endif  /* __ASM_BARRIER_H */
 173