linux/arch/arm64/include/asm/rwonce.h
<<
>>
Prefs
   1/* SPDX-License-Identifier: GPL-2.0 */
   2/*
   3 * Copyright (C) 2020 Google LLC.
   4 */
   5#ifndef __ASM_RWONCE_H
   6#define __ASM_RWONCE_H
   7
   8#ifdef CONFIG_LTO
   9
  10#include <linux/compiler_types.h>
  11#include <asm/alternative-macros.h>
  12
  13#ifndef BUILD_VDSO
  14
  15#ifdef CONFIG_AS_HAS_LDAPR
  16#define __LOAD_RCPC(sfx, regs...)                                       \
  17        ALTERNATIVE(                                                    \
  18                "ldar"  #sfx "\t" #regs,                                \
  19                ".arch_extension rcpc\n"                                \
  20                "ldapr" #sfx "\t" #regs,                                \
  21        ARM64_HAS_LDAPR)
  22#else
  23#define __LOAD_RCPC(sfx, regs...)       "ldar" #sfx "\t" #regs
  24#endif /* CONFIG_AS_HAS_LDAPR */
  25
  26/*
  27 * When building with LTO, there is an increased risk of the compiler
  28 * converting an address dependency headed by a READ_ONCE() invocation
  29 * into a control dependency and consequently allowing for harmful
  30 * reordering by the CPU.
  31 *
  32 * Ensure that such transformations are harmless by overriding the generic
  33 * READ_ONCE() definition with one that provides RCpc acquire semantics
  34 * when building with LTO.
  35 */
  36#define __READ_ONCE(x)                                                  \
  37({                                                                      \
  38        typeof(&(x)) __x = &(x);                                        \
  39        int atomic = 1;                                                 \
  40        union { __unqual_scalar_typeof(*__x) __val; char __c[1]; } __u; \
  41        switch (sizeof(x)) {                                            \
  42        case 1:                                                         \
  43                asm volatile(__LOAD_RCPC(b, %w0, %1)                    \
  44                        : "=r" (*(__u8 *)__u.__c)                       \
  45                        : "Q" (*__x) : "memory");                       \
  46                break;                                                  \
  47        case 2:                                                         \
  48                asm volatile(__LOAD_RCPC(h, %w0, %1)                    \
  49                        : "=r" (*(__u16 *)__u.__c)                      \
  50                        : "Q" (*__x) : "memory");                       \
  51                break;                                                  \
  52        case 4:                                                         \
  53                asm volatile(__LOAD_RCPC(, %w0, %1)                     \
  54                        : "=r" (*(__u32 *)__u.__c)                      \
  55                        : "Q" (*__x) : "memory");                       \
  56                break;                                                  \
  57        case 8:                                                         \
  58                asm volatile(__LOAD_RCPC(, %0, %1)                      \
  59                        : "=r" (*(__u64 *)__u.__c)                      \
  60                        : "Q" (*__x) : "memory");                       \
  61                break;                                                  \
  62        default:                                                        \
  63                atomic = 0;                                             \
  64        }                                                               \
  65        atomic ? (typeof(*__x))__u.__val : (*(volatile typeof(__x))__x);\
  66})
  67
  68#endif  /* !BUILD_VDSO */
  69#endif  /* CONFIG_LTO */
  70
  71#include <asm-generic/rwonce.h>
  72
  73#endif  /* __ASM_RWONCE_H */
  74