1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19#ifndef __ASSEMBLY__
20#error "Only include this from assembly code"
21#endif
22
23#ifndef __ASM_ASSEMBLER_H
24#define __ASM_ASSEMBLER_H
25
26#include <asm/asm-offsets.h>
27#include <asm/cpufeature.h>
28#include <asm/debug-monitors.h>
29#include <asm/page.h>
30#include <asm/pgtable-hwdef.h>
31#include <asm/ptrace.h>
32#include <asm/thread_info.h>
33
34 .macro save_and_disable_daif, flags
35 mrs \flags, daif
36 msr daifset, #0xf
37 .endm
38
39 .macro disable_daif
40 msr daifset, #0xf
41 .endm
42
43 .macro enable_daif
44 msr daifclr, #0xf
45 .endm
46
47 .macro restore_daif, flags:req
48 msr daif, \flags
49 .endm
50
51
52 .macro inherit_daif, pstate:req, tmp:req
53 and \tmp, \pstate, #(PSR_D_BIT | PSR_A_BIT | PSR_I_BIT | PSR_F_BIT)
54 msr daif, \tmp
55 .endm
56
57
58 .macro enable_da_f
59 msr daifclr, #(8 | 4 | 1)
60 .endm
61
62
63
64
65 .macro disable_irq
66 msr daifset, #2
67 .endm
68
69 .macro enable_irq
70 msr daifclr, #2
71 .endm
72
73 .macro save_and_disable_irq, flags
74 mrs \flags, daif
75 msr daifset, #2
76 .endm
77
78 .macro restore_irq, flags
79 msr daif, \flags
80 .endm
81
82 .macro enable_dbg
83 msr daifclr, #8
84 .endm
85
86 .macro disable_step_tsk, flgs, tmp
87 tbz \flgs, #TIF_SINGLESTEP, 9990f
88 mrs \tmp, mdscr_el1
89 bic \tmp, \tmp, #DBG_MDSCR_SS
90 msr mdscr_el1, \tmp
91 isb
929990:
93 .endm
94
95
96 .macro enable_step_tsk, flgs, tmp
97 tbz \flgs, #TIF_SINGLESTEP, 9990f
98 mrs \tmp, mdscr_el1
99 orr \tmp, \tmp, #DBG_MDSCR_SS
100 msr mdscr_el1, \tmp
1019990:
102 .endm
103
104
105
106
107 .macro smp_dmb, opt
108 dmb \opt
109 .endm
110
111
112
113
114 .macro esb
115 hint #16
116 .endm
117
118
119
120
121 .macro csdb
122 hint #20
123 .endm
124
125
126
127
128
129 .macro mask_nospec64, idx, limit, tmp
130 sub \tmp, \idx, \limit
131 bic \tmp, \tmp, \idx
132 and \idx, \idx, \tmp, asr #63
133 csdb
134 .endm
135
136
137
138
139 .macro nops, num
140 .rept \num
141 nop
142 .endr
143 .endm
144
145
146
147
148 .macro _asm_extable, from, to
149 .pushsection __ex_table, "a"
150 .align 3
151 .long (\from - .), (\to - .)
152 .popsection
153 .endm
154
155#define USER(l, x...) \
1569999: x; \
157 _asm_extable 9999b, l
158
159
160
161
162lr .req x30
163
164
165
166
167 .macro ventry label
168 .align 7
169 b \label
170 .endm
171
172
173
174
175#ifdef CONFIG_CPU_BIG_ENDIAN
176#define CPU_BE(code...) code
177#else
178#define CPU_BE(code...)
179#endif
180
181
182
183
184#ifdef CONFIG_CPU_BIG_ENDIAN
185#define CPU_LE(code...)
186#else
187#define CPU_LE(code...) code
188#endif
189
190
191
192
193
194
195#ifndef CONFIG_CPU_BIG_ENDIAN
196 .macro regs_to_64, rd, lbits, hbits
197#else
198 .macro regs_to_64, rd, hbits, lbits
199#endif
200 orr \rd, \lbits, \hbits, lsl #32
201 .endm
202
203
204
205
206
207
208
209
210
211 .macro adr_l, dst, sym
212 adrp \dst, \sym
213 add \dst, \dst, :lo12:\sym
214 .endm
215
216
217
218
219
220
221
222
223 .macro ldr_l, dst, sym, tmp=
224 .ifb \tmp
225 adrp \dst, \sym
226 ldr \dst, [\dst, :lo12:\sym]
227 .else
228 adrp \tmp, \sym
229 ldr \dst, [\tmp, :lo12:\sym]
230 .endif
231 .endm
232
233
234
235
236
237
238
239 .macro str_l, src, sym, tmp
240 adrp \tmp, \sym
241 str \src, [\tmp, :lo12:\sym]
242 .endm
243
244
245
246
247
248
249 .macro adr_this_cpu, dst, sym, tmp
250 adrp \tmp, \sym
251 add \dst, \tmp, #:lo12:\sym
252alternative_if_not ARM64_HAS_VIRT_HOST_EXTN
253 mrs \tmp, tpidr_el1
254alternative_else
255 mrs \tmp, tpidr_el2
256alternative_endif
257 add \dst, \dst, \tmp
258 .endm
259
260
261
262
263
264
265 .macro ldr_this_cpu dst, sym, tmp
266 adr_l \dst, \sym
267alternative_if_not ARM64_HAS_VIRT_HOST_EXTN
268 mrs \tmp, tpidr_el1
269alternative_else
270 mrs \tmp, tpidr_el2
271alternative_endif
272 ldr \dst, [\dst, \tmp]
273 .endm
274
275
276
277
278 .macro vma_vm_mm, rd, rn
279 ldr \rd, [\rn, #VMA_VM_MM]
280 .endm
281
282
283
284
285 .macro mmid, rd, rn
286 ldr \rd, [\rn, #MM_CONTEXT_ID]
287 .endm
288
289
290
291
292
293 .macro read_ctr, reg
294alternative_if_not ARM64_MISMATCHED_CACHE_LINE_SIZE
295 mrs \reg, ctr_el0
296 nop
297alternative_else
298 ldr_l \reg, arm64_ftr_reg_ctrel0 + ARM64_FTR_SYSVAL
299alternative_endif
300 .endm
301
302
303
304
305
306
307 .macro raw_dcache_line_size, reg, tmp
308 mrs \tmp, ctr_el0
309 ubfm \tmp, \tmp, #16, #19
310 mov \reg, #4
311 lsl \reg, \reg, \tmp
312 .endm
313
314
315
316
317 .macro dcache_line_size, reg, tmp
318 read_ctr \tmp
319 ubfm \tmp, \tmp, #16, #19
320 mov \reg, #4
321 lsl \reg, \reg, \tmp
322 .endm
323
324
325
326
327
328 .macro raw_icache_line_size, reg, tmp
329 mrs \tmp, ctr_el0
330 and \tmp, \tmp, #0xf
331 mov \reg, #4
332 lsl \reg, \reg, \tmp
333 .endm
334
335
336
337
338 .macro icache_line_size, reg, tmp
339 read_ctr \tmp
340 and \tmp, \tmp, #0xf
341 mov \reg, #4
342 lsl \reg, \reg, \tmp
343 .endm
344
345
346
347
348 .macro tcr_set_idmap_t0sz, valreg, tmpreg
349 ldr_l \tmpreg, idmap_t0sz
350 bfi \valreg, \tmpreg, #TCR_T0SZ_OFFSET, #TCR_TxSZ_WIDTH
351 .endm
352
353
354
355
356
357
358
359
360
361 .macro tcr_compute_pa_size, tcr, pos, tmp0, tmp1
362 mrs \tmp0, ID_AA64MMFR0_EL1
363
364 ubfx \tmp0, \tmp0, #ID_AA64MMFR0_PARANGE_SHIFT, #3
365 mov \tmp1, #ID_AA64MMFR0_PARANGE_MAX
366 cmp \tmp0, \tmp1
367 csel \tmp0, \tmp1, \tmp0, hi
368 bfi \tcr, \tmp0, \pos, #3
369 .endm
370
371
372
373
374
375
376
377
378
379
380
381 .macro dcache_by_line_op op, domain, kaddr, size, tmp1, tmp2
382 dcache_line_size \tmp1, \tmp2
383 add \size, \kaddr, \size
384 sub \tmp2, \tmp1, #1
385 bic \kaddr, \kaddr, \tmp2
3869998:
387 .if (\op == cvau || \op == cvac)
388alternative_if_not ARM64_WORKAROUND_CLEAN_CACHE
389 dc \op, \kaddr
390alternative_else
391 dc civac, \kaddr
392alternative_endif
393 .elseif (\op == cvap)
394alternative_if ARM64_HAS_DCPOP
395 sys 3, c7, c12, 1, \kaddr
396alternative_else
397 dc cvac, \kaddr
398alternative_endif
399 .else
400 dc \op, \kaddr
401 .endif
402 add \kaddr, \kaddr, \tmp1
403 cmp \kaddr, \size
404 b.lo 9998b
405 dsb \domain
406 .endm
407
408
409
410
411
412
413
414
415
416 .macro invalidate_icache_by_line start, end, tmp1, tmp2, label
417 icache_line_size \tmp1, \tmp2
418 sub \tmp2, \tmp1, #1
419 bic \tmp2, \start, \tmp2
4209997:
421USER(\label, ic ivau, \tmp2)
422 add \tmp2, \tmp2, \tmp1
423 cmp \tmp2, \end
424 b.lo 9997b
425 dsb ish
426 isb
427 .endm
428
429
430
431
432 .macro reset_pmuserenr_el0, tmpreg
433 mrs \tmpreg, id_aa64dfr0_el1
434 sbfx \tmpreg, \tmpreg, #8, #4
435 cmp \tmpreg, #1
436 b.lt 9000f
437 msr pmuserenr_el0, xzr
4389000:
439 .endm
440
441
442
443
444 .macro copy_page dest:req src:req t1:req t2:req t3:req t4:req t5:req t6:req t7:req t8:req
4459998: ldp \t1, \t2, [\src]
446 ldp \t3, \t4, [\src, #16]
447 ldp \t5, \t6, [\src, #32]
448 ldp \t7, \t8, [\src, #48]
449 add \src, \src, #64
450 stnp \t1, \t2, [\dest]
451 stnp \t3, \t4, [\dest, #16]
452 stnp \t5, \t6, [\dest, #32]
453 stnp \t7, \t8, [\dest, #48]
454 add \dest, \dest, #64
455 tst \src, #(PAGE_SIZE - 1)
456 b.ne 9998b
457 .endm
458
459
460
461
462
463#define ENDPIPROC(x) \
464 .globl __pi_##x; \
465 .type __pi_##x, %function; \
466 .set __pi_##x, x; \
467 .size __pi_##x, . - x; \
468 ENDPROC(x)
469
470
471
472
473#ifdef CONFIG_KPROBES
474#define NOKPROBE(x) \
475 .pushsection "_kprobe_blacklist", "aw"; \
476 .quad x; \
477 .popsection;
478#else
479#define NOKPROBE(x)
480#endif
481
482
483
484
485
486
487 .macro le64sym, sym
488 .long \sym\()_lo32
489 .long \sym\()_hi32
490 .endm
491
492
493
494
495
496
497 .macro mov_q, reg, val
498 .if (((\val) >> 31) == 0 || ((\val) >> 31) == 0x1ffffffff)
499 movz \reg, :abs_g1_s:\val
500 .else
501 .if (((\val) >> 47) == 0 || ((\val) >> 47) == 0x1ffff)
502 movz \reg, :abs_g2_s:\val
503 .else
504 movz \reg, :abs_g3:\val
505 movk \reg, :abs_g2_nc:\val
506 .endif
507 movk \reg, :abs_g1_nc:\val
508 .endif
509 movk \reg, :abs_g0_nc:\val
510 .endm
511
512
513
514
515 .macro get_thread_info, rd
516 mrs \rd, sp_el0
517 .endm
518
519
520
521
522
523
524
525
526 .macro phys_to_ttbr, ttbr, phys
527#ifdef CONFIG_ARM64_PA_BITS_52
528 orr \ttbr, \phys, \phys, lsr #46
529 and \ttbr, \ttbr, #TTBR_BADDR_MASK_52
530#else
531 mov \ttbr, \phys
532#endif
533 .endm
534
535 .macro phys_to_pte, pte, phys
536#ifdef CONFIG_ARM64_PA_BITS_52
537
538
539
540
541 orr \pte, \phys, \phys, lsr #36
542 and \pte, \pte, #PTE_ADDR_MASK
543#else
544 mov \pte, \phys
545#endif
546 .endm
547
548 .macro pte_to_phys, phys, pte
549#ifdef CONFIG_ARM64_PA_BITS_52
550 ubfiz \phys, \pte, #(48 - 16 - 12), #16
551 bfxil \phys, \pte, #16, #32
552 lsl \phys, \phys, #16
553#else
554 and \phys, \pte, #PTE_ADDR_MASK
555#endif
556 .endm
557
558
559
560
561
562 .macro pre_disable_mmu_workaround
563#ifdef CONFIG_QCOM_FALKOR_ERRATUM_E1041
564 isb
565#endif
566 .endm
567
568
569
570
571
572
573
574 .macro frame_push, regcount:req, extra
575 __frame st, \regcount, \extra
576 .endm
577
578
579
580
581
582
583
584 .macro frame_pop
585 __frame ld
586 .endm
587
588 .macro __frame_regs, reg1, reg2, op, num
589 .if .Lframe_regcount == \num
590 \op\()r \reg1, [sp, #(\num + 1) * 8]
591 .elseif .Lframe_regcount > \num
592 \op\()p \reg1, \reg2, [sp, #(\num + 1) * 8]
593 .endif
594 .endm
595
596 .macro __frame, op, regcount, extra=0
597 .ifc \op, st
598 .if (\regcount) < 0 || (\regcount) > 10
599 .error "regcount should be in the range [0 ... 10]"
600 .endif
601 .if ((\extra) % 16) != 0
602 .error "extra should be a multiple of 16 bytes"
603 .endif
604 .ifdef .Lframe_regcount
605 .if .Lframe_regcount != -1
606 .error "frame_push/frame_pop may not be nested"
607 .endif
608 .endif
609 .set .Lframe_regcount, \regcount
610 .set .Lframe_extra, \extra
611 .set .Lframe_local_offset, ((\regcount + 3) / 2) * 16
612 stp x29, x30, [sp, #-.Lframe_local_offset - .Lframe_extra]!
613 mov x29, sp
614 .endif
615
616 __frame_regs x19, x20, \op, 1
617 __frame_regs x21, x22, \op, 3
618 __frame_regs x23, x24, \op, 5
619 __frame_regs x25, x26, \op, 7
620 __frame_regs x27, x28, \op, 9
621
622 .ifc \op, ld
623 .if .Lframe_regcount == -1
624 .error "frame_push/frame_pop may not be nested"
625 .endif
626 ldp x29, x30, [sp], #.Lframe_local_offset + .Lframe_extra
627 .set .Lframe_regcount, -1
628 .endif
629 .endm
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667 .macro cond_yield_neon, lbl
668 if_will_cond_yield_neon
669 do_cond_yield_neon
670 endif_yield_neon \lbl
671 .endm
672
673 .macro if_will_cond_yield_neon
674#ifdef CONFIG_PREEMPT
675 get_thread_info x0
676 ldr w1, [x0, #TSK_TI_PREEMPT]
677 ldr x0, [x0, #TSK_TI_FLAGS]
678 cmp w1, #PREEMPT_DISABLE_OFFSET
679 csel x0, x0, xzr, eq
680 tbnz x0, #TIF_NEED_RESCHED, .Lyield_\@
681
682 .subsection 1
683.Lyield_\@ :
684#else
685 .section ".discard.cond_yield_neon", "ax"
686#endif
687 .endm
688
689 .macro do_cond_yield_neon
690 bl kernel_neon_end
691 bl kernel_neon_begin
692 .endm
693
694 .macro endif_yield_neon, lbl
695 .ifnb \lbl
696 b \lbl
697 .else
698 b .Lyield_out_\@
699 .endif
700 .previous
701.Lyield_out_\@ :
702 .endm
703
704#endif
705