1
2
3
4
5#ifndef __ASM_RWONCE_H
6#define __ASM_RWONCE_H
7
8#ifdef CONFIG_LTO
9
10#include <linux/compiler_types.h>
11#include <asm/alternative-macros.h>
12
13#ifndef BUILD_VDSO
14
15#ifdef CONFIG_AS_HAS_LDAPR
16#define __LOAD_RCPC(sfx, regs...) \
17 ALTERNATIVE( \
18 "ldar" #sfx "\t" #regs, \
19 ".arch_extension rcpc\n" \
20 "ldapr" #sfx "\t" #regs, \
21 ARM64_HAS_LDAPR)
22#else
23#define __LOAD_RCPC(sfx, regs...) "ldar" #sfx "\t" #regs
24#endif
25
26
27
28
29
30
31
32
33
34
35
36#define __READ_ONCE(x) \
37({ \
38 typeof(&(x)) __x = &(x); \
39 int atomic = 1; \
40 union { __unqual_scalar_typeof(*__x) __val; char __c[1]; } __u; \
41 switch (sizeof(x)) { \
42 case 1: \
43 asm volatile(__LOAD_RCPC(b, %w0, %1) \
44 : "=r" (*(__u8 *)__u.__c) \
45 : "Q" (*__x) : "memory"); \
46 break; \
47 case 2: \
48 asm volatile(__LOAD_RCPC(h, %w0, %1) \
49 : "=r" (*(__u16 *)__u.__c) \
50 : "Q" (*__x) : "memory"); \
51 break; \
52 case 4: \
53 asm volatile(__LOAD_RCPC(, %w0, %1) \
54 : "=r" (*(__u32 *)__u.__c) \
55 : "Q" (*__x) : "memory"); \
56 break; \
57 case 8: \
58 asm volatile(__LOAD_RCPC(, %0, %1) \
59 : "=r" (*(__u64 *)__u.__c) \
60 : "Q" (*__x) : "memory"); \
61 break; \
62 default: \
63 atomic = 0; \
64 } \
65 atomic ? (typeof(*__x))__u.__val : (*(volatile typeof(__x))__x);\
66})
67
68#endif
69#endif
70
71#include <asm-generic/rwonce.h>
72
73#endif
74