1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19#ifndef __ASSEMBLY__
20#error "Only include this from assembly code"
21#endif
22
23#ifndef __ASM_ASSEMBLER_H
24#define __ASM_ASSEMBLER_H
25
26#include <asm/asm-offsets.h>
27#include <asm/cpufeature.h>
28#include <asm/page.h>
29#include <asm/pgtable-hwdef.h>
30#include <asm/ptrace.h>
31#include <asm/thread_info.h>
32
33
34
35
36 .macro disable_irq
37 msr daifset, #2
38 .endm
39
40 .macro enable_irq
41 msr daifclr, #2
42 .endm
43
44
45
46
47 .macro disable_dbg
48 msr daifset, #8
49 .endm
50
51 .macro enable_dbg
52 msr daifclr, #8
53 .endm
54
55 .macro disable_step_tsk, flgs, tmp
56 tbz \flgs, #TIF_SINGLESTEP, 9990f
57 mrs \tmp, mdscr_el1
58 bic \tmp, \tmp, #1
59 msr mdscr_el1, \tmp
60 isb
619990:
62 .endm
63
64 .macro enable_step_tsk, flgs, tmp
65 tbz \flgs, #TIF_SINGLESTEP, 9990f
66 disable_dbg
67 mrs \tmp, mdscr_el1
68 orr \tmp, \tmp, #1
69 msr mdscr_el1, \tmp
709990:
71 .endm
72
73
74
75
76
77
78 .macro enable_dbg_and_irq
79 msr daifclr, #(8 | 2)
80 .endm
81
82
83
84
85 .macro smp_dmb, opt
86 dmb \opt
87 .endm
88
89
90
91
92 .macro _asm_extable, from, to
93 .pushsection __ex_table, "a"
94 .align 3
95 .long (\from - .), (\to - .)
96 .popsection
97 .endm
98
99#define USER(l, x...) \
1009999: x; \
101 _asm_extable 9999b, l
102
103
104
105
106lr .req x30
107
108
109
110
111 .macro ventry label
112 .align 7
113 b \label
114 .endm
115
116
117
118
119#ifdef CONFIG_CPU_BIG_ENDIAN
120#define CPU_BE(code...) code
121#else
122#define CPU_BE(code...)
123#endif
124
125
126
127
128#ifdef CONFIG_CPU_BIG_ENDIAN
129#define CPU_LE(code...)
130#else
131#define CPU_LE(code...) code
132#endif
133
134
135
136
137
138
139#ifndef CONFIG_CPU_BIG_ENDIAN
140 .macro regs_to_64, rd, lbits, hbits
141#else
142 .macro regs_to_64, rd, hbits, lbits
143#endif
144 orr \rd, \lbits, \hbits, lsl #32
145 .endm
146
147
148
149
150
151
152
153
154
155
156
157 .macro adr_l, dst, sym, tmp=
158 .ifb \tmp
159 adrp \dst, \sym
160 add \dst, \dst, :lo12:\sym
161 .else
162 adrp \tmp, \sym
163 add \dst, \tmp, :lo12:\sym
164 .endif
165 .endm
166
167
168
169
170
171
172
173
174 .macro ldr_l, dst, sym, tmp=
175 .ifb \tmp
176 adrp \dst, \sym
177 ldr \dst, [\dst, :lo12:\sym]
178 .else
179 adrp \tmp, \sym
180 ldr \dst, [\tmp, :lo12:\sym]
181 .endif
182 .endm
183
184
185
186
187
188
189
190 .macro str_l, src, sym, tmp
191 adrp \tmp, \sym
192 str \src, [\tmp, :lo12:\sym]
193 .endm
194
195
196
197
198
199
200 .macro this_cpu_ptr, sym, reg, tmp
201 adr_l \reg, \sym
202 mrs \tmp, tpidr_el1
203 add \reg, \reg, \tmp
204 .endm
205
206
207
208
209 .macro vma_vm_mm, rd, rn
210 ldr \rd, [\rn, #VMA_VM_MM]
211 .endm
212
213
214
215
216 .macro mmid, rd, rn
217 ldr \rd, [\rn, #MM_CONTEXT_ID]
218 .endm
219
220
221
222
223 .macro dcache_line_size, reg, tmp
224 mrs \tmp, ctr_el0
225 ubfm \tmp, \tmp, #16, #19
226 mov \reg, #4
227 lsl \reg, \reg, \tmp
228 .endm
229
230
231
232
233 .macro icache_line_size, reg, tmp
234 mrs \tmp, ctr_el0
235 and \tmp, \tmp, #0xf
236 mov \reg, #4
237 lsl \reg, \reg, \tmp
238 .endm
239
240
241
242
243 .macro tcr_set_idmap_t0sz, valreg, tmpreg
244#ifndef CONFIG_ARM64_VA_BITS_48
245 ldr_l \tmpreg, idmap_t0sz
246 bfi \valreg, \tmpreg, #TCR_T0SZ_OFFSET, #TCR_TxSZ_WIDTH
247#endif
248 .endm
249
250
251
252
253
254
255
256
257
258
259
260 .macro dcache_by_line_op op, domain, kaddr, size, tmp1, tmp2
261 dcache_line_size \tmp1, \tmp2
262 add \size, \kaddr, \size
263 sub \tmp2, \tmp1, #1
264 bic \kaddr, \kaddr, \tmp2
2659998:
266 .if (\op == cvau || \op == cvac)
267alternative_if_not ARM64_WORKAROUND_CLEAN_CACHE
268 dc \op, \kaddr
269alternative_else
270 dc civac, \kaddr
271alternative_endif
272 .else
273 dc \op, \kaddr
274 .endif
275 add \kaddr, \kaddr, \tmp1
276 cmp \kaddr, \size
277 b.lo 9998b
278 dsb \domain
279 .endm
280
281
282
283
284 .macro reset_pmuserenr_el0, tmpreg
285 mrs \tmpreg, id_aa64dfr0_el1
286 sbfx \tmpreg, \tmpreg, #8, #4
287 cmp \tmpreg, #1
288 b.lt 9000f
289 msr pmuserenr_el0, xzr
2909000:
291 .endm
292
293
294
295
296 .macro copy_page dest:req src:req t1:req t2:req t3:req t4:req t5:req t6:req t7:req t8:req
2979998: ldp \t1, \t2, [\src]
298 ldp \t3, \t4, [\src, #16]
299 ldp \t5, \t6, [\src, #32]
300 ldp \t7, \t8, [\src, #48]
301 add \src, \src, #64
302 stnp \t1, \t2, [\dest]
303 stnp \t3, \t4, [\dest, #16]
304 stnp \t5, \t6, [\dest, #32]
305 stnp \t7, \t8, [\dest, #48]
306 add \dest, \dest, #64
307 tst \src, #(PAGE_SIZE - 1)
308 b.ne 9998b
309 .endm
310
311
312
313
314
315#define ENDPIPROC(x) \
316 .globl __pi_##x; \
317 .type __pi_##x, %function; \
318 .set __pi_##x, x; \
319 .size __pi_##x, . - x; \
320 ENDPROC(x)
321
322
323
324
325
326
327
328 .macro le64sym, sym
329 .long \sym\()_lo32
330 .long \sym\()_hi32
331 .endm
332
333
334
335
336
337
338 .macro mov_q, reg, val
339 .if (((\val) >> 31) == 0 || ((\val) >> 31) == 0x1ffffffff)
340 movz \reg, :abs_g1_s:\val
341 .else
342 .if (((\val) >> 47) == 0 || ((\val) >> 47) == 0x1ffff)
343 movz \reg, :abs_g2_s:\val
344 .else
345 movz \reg, :abs_g3:\val
346 movk \reg, :abs_g2_nc:\val
347 .endif
348 movk \reg, :abs_g1_nc:\val
349 .endif
350 movk \reg, :abs_g0_nc:\val
351 .endm
352
353#endif
354