1
2
3
4
5
6
7
8#ifndef __ASSEMBLY__
9#error "Only include this from assembly code"
10#endif
11
12#ifndef __ASM_ASSEMBLER_H
13#define __ASM_ASSEMBLER_H
14
15#include <asm-generic/export.h>
16
17#include <asm/asm-offsets.h>
18#include <asm/cpufeature.h>
19#include <asm/cputype.h>
20#include <asm/debug-monitors.h>
21#include <asm/page.h>
22#include <asm/pgtable-hwdef.h>
23#include <asm/ptrace.h>
24#include <asm/thread_info.h>
25
26 .macro save_and_disable_daif, flags
27 mrs \flags, daif
28 msr daifset, #0xf
29 .endm
30
31 .macro disable_daif
32 msr daifset, #0xf
33 .endm
34
35 .macro enable_daif
36 msr daifclr, #0xf
37 .endm
38
39 .macro restore_daif, flags:req
40 msr daif, \flags
41 .endm
42
43
44 .macro enable_da_f
45 msr daifclr, #(8 | 4 | 1)
46 .endm
47
48
49
50
51 .macro save_and_disable_irq, flags
52 mrs \flags, daif
53 msr daifset, #2
54 .endm
55
56 .macro restore_irq, flags
57 msr daif, \flags
58 .endm
59
60 .macro enable_dbg
61 msr daifclr, #8
62 .endm
63
64 .macro disable_step_tsk, flgs, tmp
65 tbz \flgs, #TIF_SINGLESTEP, 9990f
66 mrs \tmp, mdscr_el1
67 bic \tmp, \tmp, #DBG_MDSCR_SS
68 msr mdscr_el1, \tmp
69 isb
709990:
71 .endm
72
73
74 .macro enable_step_tsk, flgs, tmp
75 tbz \flgs, #TIF_SINGLESTEP, 9990f
76 mrs \tmp, mdscr_el1
77 orr \tmp, \tmp, #DBG_MDSCR_SS
78 msr mdscr_el1, \tmp
799990:
80 .endm
81
82
83
84
85 .macro esb
86#ifdef CONFIG_ARM64_RAS_EXTN
87 hint #16
88#else
89 nop
90#endif
91 .endm
92
93
94
95
96 .macro csdb
97 hint #20
98 .endm
99
100
101
102
103 .macro sb
104alternative_if_not ARM64_HAS_SB
105 dsb nsh
106 isb
107alternative_else
108 SB_BARRIER_INSN
109 nop
110alternative_endif
111 .endm
112
113
114
115
116 .macro nops, num
117 .rept \num
118 nop
119 .endr
120 .endm
121
122
123
124
125 .macro _asm_extable, from, to
126 .pushsection __ex_table, "a"
127 .align 3
128 .long (\from - .), (\to - .)
129 .popsection
130 .endm
131
132#define USER(l, x...) \
1339999: x; \
134 _asm_extable 9999b, l
135
136
137
138
139lr .req x30
140
141
142
143
144 .macro ventry label
145 .align 7
146 b \label
147 .endm
148
149
150
151
152#ifdef CONFIG_CPU_BIG_ENDIAN
153#define CPU_BE(code...) code
154#else
155#define CPU_BE(code...)
156#endif
157
158
159
160
161#ifdef CONFIG_CPU_BIG_ENDIAN
162#define CPU_LE(code...)
163#else
164#define CPU_LE(code...) code
165#endif
166
167
168
169
170
171
172#ifndef CONFIG_CPU_BIG_ENDIAN
173 .macro regs_to_64, rd, lbits, hbits
174#else
175 .macro regs_to_64, rd, hbits, lbits
176#endif
177 orr \rd, \lbits, \hbits, lsl #32
178 .endm
179
180
181
182
183
184
185
186
187
188 .macro adr_l, dst, sym
189 adrp \dst, \sym
190 add \dst, \dst, :lo12:\sym
191 .endm
192
193
194
195
196
197
198
199
200 .macro ldr_l, dst, sym, tmp=
201 .ifb \tmp
202 adrp \dst, \sym
203 ldr \dst, [\dst, :lo12:\sym]
204 .else
205 adrp \tmp, \sym
206 ldr \dst, [\tmp, :lo12:\sym]
207 .endif
208 .endm
209
210
211
212
213
214
215
216 .macro str_l, src, sym, tmp
217 adrp \tmp, \sym
218 str \src, [\tmp, :lo12:\sym]
219 .endm
220
221
222
223
224#if defined(__KVM_NVHE_HYPERVISOR__) || defined(__KVM_VHE_HYPERVISOR__)
225 .macro this_cpu_offset, dst
226 mrs \dst, tpidr_el2
227 .endm
228#else
229 .macro this_cpu_offset, dst
230alternative_if_not ARM64_HAS_VIRT_HOST_EXTN
231 mrs \dst, tpidr_el1
232alternative_else
233 mrs \dst, tpidr_el2
234alternative_endif
235 .endm
236#endif
237
238
239
240
241
242
243 .macro adr_this_cpu, dst, sym, tmp
244 adrp \tmp, \sym
245 add \dst, \tmp, #:lo12:\sym
246 this_cpu_offset \tmp
247 add \dst, \dst, \tmp
248 .endm
249
250
251
252
253
254
255 .macro ldr_this_cpu dst, sym, tmp
256 adr_l \dst, \sym
257 this_cpu_offset \tmp
258 ldr \dst, [\dst, \tmp]
259 .endm
260
261
262
263
264 .macro vma_vm_mm, rd, rn
265 ldr \rd, [\rn, #VMA_VM_MM]
266 .endm
267
268
269
270
271
272 .macro read_ctr, reg
273alternative_if_not ARM64_MISMATCHED_CACHE_TYPE
274 mrs \reg, ctr_el0
275 nop
276alternative_else
277 ldr_l \reg, arm64_ftr_reg_ctrel0 + ARM64_FTR_SYSVAL
278alternative_endif
279 .endm
280
281
282
283
284
285
286 .macro raw_dcache_line_size, reg, tmp
287 mrs \tmp, ctr_el0
288 ubfm \tmp, \tmp, #16, #19
289 mov \reg, #4
290 lsl \reg, \reg, \tmp
291 .endm
292
293
294
295
296 .macro dcache_line_size, reg, tmp
297 read_ctr \tmp
298 ubfm \tmp, \tmp, #16, #19
299 mov \reg, #4
300 lsl \reg, \reg, \tmp
301 .endm
302
303
304
305
306
307 .macro raw_icache_line_size, reg, tmp
308 mrs \tmp, ctr_el0
309 and \tmp, \tmp, #0xf
310 mov \reg, #4
311 lsl \reg, \reg, \tmp
312 .endm
313
314
315
316
317 .macro icache_line_size, reg, tmp
318 read_ctr \tmp
319 and \tmp, \tmp, #0xf
320 mov \reg, #4
321 lsl \reg, \reg, \tmp
322 .endm
323
324
325
326
327 .macro tcr_set_t0sz, valreg, t0sz
328 bfi \valreg, \t0sz, #TCR_T0SZ_OFFSET, #TCR_TxSZ_WIDTH
329 .endm
330
331
332
333
334 .macro tcr_set_t1sz, valreg, t1sz
335 bfi \valreg, \t1sz, #TCR_T1SZ_OFFSET, #TCR_TxSZ_WIDTH
336 .endm
337
338
339
340
341
342
343
344
345
346 .macro tcr_compute_pa_size, tcr, pos, tmp0, tmp1
347 mrs \tmp0, ID_AA64MMFR0_EL1
348
349 ubfx \tmp0, \tmp0, #ID_AA64MMFR0_PARANGE_SHIFT, #3
350 mov \tmp1, #ID_AA64MMFR0_PARANGE_MAX
351 cmp \tmp0, \tmp1
352 csel \tmp0, \tmp1, \tmp0, hi
353 bfi \tcr, \tmp0, \pos, #3
354 .endm
355
356
357
358
359
360
361
362
363
364
365
366 .macro __dcache_op_workaround_clean_cache, op, kaddr
367alternative_if_not ARM64_WORKAROUND_CLEAN_CACHE
368 dc \op, \kaddr
369alternative_else
370 dc civac, \kaddr
371alternative_endif
372 .endm
373
374 .macro dcache_by_line_op op, domain, kaddr, size, tmp1, tmp2
375 dcache_line_size \tmp1, \tmp2
376 add \size, \kaddr, \size
377 sub \tmp2, \tmp1, #1
378 bic \kaddr, \kaddr, \tmp2
3799998:
380 .ifc \op, cvau
381 __dcache_op_workaround_clean_cache \op, \kaddr
382 .else
383 .ifc \op, cvac
384 __dcache_op_workaround_clean_cache \op, \kaddr
385 .else
386 .ifc \op, cvap
387 sys 3, c7, c12, 1, \kaddr
388 .else
389 .ifc \op, cvadp
390 sys 3, c7, c13, 1, \kaddr
391 .else
392 dc \op, \kaddr
393 .endif
394 .endif
395 .endif
396 .endif
397 add \kaddr, \kaddr, \tmp1
398 cmp \kaddr, \size
399 b.lo 9998b
400 dsb \domain
401 .endm
402
403
404
405
406
407
408
409
410
411 .macro invalidate_icache_by_line start, end, tmp1, tmp2, label
412 icache_line_size \tmp1, \tmp2
413 sub \tmp2, \tmp1, #1
414 bic \tmp2, \start, \tmp2
4159997:
416USER(\label, ic ivau, \tmp2)
417 add \tmp2, \tmp2, \tmp1
418 cmp \tmp2, \end
419 b.lo 9997b
420 dsb ish
421 isb
422 .endm
423
424
425
426
427 .macro reset_pmuserenr_el0, tmpreg
428 mrs \tmpreg, id_aa64dfr0_el1
429 sbfx \tmpreg, \tmpreg, #ID_AA64DFR0_PMUVER_SHIFT, #4
430 cmp \tmpreg, #1
431 b.lt 9000f
432 msr pmuserenr_el0, xzr
4339000:
434 .endm
435
436
437
438
439 .macro reset_amuserenr_el0, tmpreg
440 mrs \tmpreg, id_aa64pfr0_el1
441 ubfx \tmpreg, \tmpreg, #ID_AA64PFR0_AMU_SHIFT, #4
442 cbz \tmpreg, .Lskip_\@
443 msr_s SYS_AMUSERENR_EL0, xzr
444.Lskip_\@:
445 .endm
446
447
448
449 .macro copy_page dest:req src:req t1:req t2:req t3:req t4:req t5:req t6:req t7:req t8:req
4509998: ldp \t1, \t2, [\src]
451 ldp \t3, \t4, [\src, #16]
452 ldp \t5, \t6, [\src, #32]
453 ldp \t7, \t8, [\src, #48]
454 add \src, \src, #64
455 stnp \t1, \t2, [\dest]
456 stnp \t3, \t4, [\dest, #16]
457 stnp \t5, \t6, [\dest, #32]
458 stnp \t7, \t8, [\dest, #48]
459 add \dest, \dest, #64
460 tst \src, #(PAGE_SIZE - 1)
461 b.ne 9998b
462 .endm
463
464
465
466
467#ifdef CONFIG_KPROBES
468#define NOKPROBE(x) \
469 .pushsection "_kprobe_blacklist", "aw"; \
470 .quad x; \
471 .popsection;
472#else
473#define NOKPROBE(x)
474#endif
475
476#if defined(CONFIG_KASAN_GENERIC) || defined(CONFIG_KASAN_SW_TAGS)
477#define EXPORT_SYMBOL_NOKASAN(name)
478#else
479#define EXPORT_SYMBOL_NOKASAN(name) EXPORT_SYMBOL(name)
480#endif
481
482
483
484
485
486
487
488 .macro le64sym, sym
489 .long \sym\()_lo32
490 .long \sym\()_hi32
491 .endm
492
493
494
495
496
497
498 .macro mov_q, reg, val
499 .if (((\val) >> 31) == 0 || ((\val) >> 31) == 0x1ffffffff)
500 movz \reg, :abs_g1_s:\val
501 .else
502 .if (((\val) >> 47) == 0 || ((\val) >> 47) == 0x1ffff)
503 movz \reg, :abs_g2_s:\val
504 .else
505 movz \reg, :abs_g3:\val
506 movk \reg, :abs_g2_nc:\val
507 .endif
508 movk \reg, :abs_g1_nc:\val
509 .endif
510 movk \reg, :abs_g0_nc:\val
511 .endm
512
513
514
515
516 .macro get_current_task, rd
517 mrs \rd, sp_el0
518 .endm
519
520
521
522
523
524
525
526 .macro offset_ttbr1, ttbr, tmp
527#ifdef CONFIG_ARM64_VA_BITS_52
528 mrs_s \tmp, SYS_ID_AA64MMFR2_EL1
529 and \tmp, \tmp, #(0xf << ID_AA64MMFR2_LVA_SHIFT)
530 cbnz \tmp, .Lskipoffs_\@
531 orr \ttbr, \ttbr, #TTBR1_BADDR_4852_OFFSET
532.Lskipoffs_\@ :
533#endif
534 .endm
535
536
537
538
539
540
541 .macro restore_ttbr1, ttbr
542#ifdef CONFIG_ARM64_VA_BITS_52
543 bic \ttbr, \ttbr, #TTBR1_BADDR_4852_OFFSET
544#endif
545 .endm
546
547
548
549
550
551
552
553
554 .macro phys_to_ttbr, ttbr, phys
555#ifdef CONFIG_ARM64_PA_BITS_52
556 orr \ttbr, \phys, \phys, lsr #46
557 and \ttbr, \ttbr, #TTBR_BADDR_MASK_52
558#else
559 mov \ttbr, \phys
560#endif
561 .endm
562
563 .macro phys_to_pte, pte, phys
564#ifdef CONFIG_ARM64_PA_BITS_52
565
566
567
568
569 orr \pte, \phys, \phys, lsr #36
570 and \pte, \pte, #PTE_ADDR_MASK
571#else
572 mov \pte, \phys
573#endif
574 .endm
575
576 .macro pte_to_phys, phys, pte
577#ifdef CONFIG_ARM64_PA_BITS_52
578 ubfiz \phys, \pte, #(48 - 16 - 12), #16
579 bfxil \phys, \pte, #16, #32
580 lsl \phys, \phys, #16
581#else
582 and \phys, \pte, #PTE_ADDR_MASK
583#endif
584 .endm
585
586
587
588
589 .macro tcr_clear_errata_bits, tcr, tmp1, tmp2
590#ifdef CONFIG_FUJITSU_ERRATUM_010001
591 mrs \tmp1, midr_el1
592
593 mov_q \tmp2, MIDR_FUJITSU_ERRATUM_010001_MASK
594 and \tmp1, \tmp1, \tmp2
595 mov_q \tmp2, MIDR_FUJITSU_ERRATUM_010001
596 cmp \tmp1, \tmp2
597 b.ne 10f
598
599 mov_q \tmp2, TCR_CLEAR_FUJITSU_ERRATUM_010001
600 bic \tcr, \tcr, \tmp2
60110:
602#endif
603 .endm
604
605
606
607
608
609 .macro pre_disable_mmu_workaround
610#ifdef CONFIG_QCOM_FALKOR_ERRATUM_E1041
611 isb
612#endif
613 .endm
614
615
616
617
618
619
620
621 .macro frame_push, regcount:req, extra
622 __frame st, \regcount, \extra
623 .endm
624
625
626
627
628
629
630
631 .macro frame_pop
632 __frame ld
633 .endm
634
635 .macro __frame_regs, reg1, reg2, op, num
636 .if .Lframe_regcount == \num
637 \op\()r \reg1, [sp, #(\num + 1) * 8]
638 .elseif .Lframe_regcount > \num
639 \op\()p \reg1, \reg2, [sp, #(\num + 1) * 8]
640 .endif
641 .endm
642
643 .macro __frame, op, regcount, extra=0
644 .ifc \op, st
645 .if (\regcount) < 0 || (\regcount) > 10
646 .error "regcount should be in the range [0 ... 10]"
647 .endif
648 .if ((\extra) % 16) != 0
649 .error "extra should be a multiple of 16 bytes"
650 .endif
651 .ifdef .Lframe_regcount
652 .if .Lframe_regcount != -1
653 .error "frame_push/frame_pop may not be nested"
654 .endif
655 .endif
656 .set .Lframe_regcount, \regcount
657 .set .Lframe_extra, \extra
658 .set .Lframe_local_offset, ((\regcount + 3) / 2) * 16
659 stp x29, x30, [sp, #-.Lframe_local_offset - .Lframe_extra]!
660 mov x29, sp
661 .endif
662
663 __frame_regs x19, x20, \op, 1
664 __frame_regs x21, x22, \op, 3
665 __frame_regs x23, x24, \op, 5
666 __frame_regs x25, x26, \op, 7
667 __frame_regs x27, x28, \op, 9
668
669 .ifc \op, ld
670 .if .Lframe_regcount == -1
671 .error "frame_push/frame_pop may not be nested"
672 .endif
673 ldp x29, x30, [sp], #.Lframe_local_offset + .Lframe_extra
674 .set .Lframe_regcount, -1
675 .endif
676 .endm
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713 .macro cond_yield_neon, lbl
714 if_will_cond_yield_neon
715 do_cond_yield_neon
716 endif_yield_neon \lbl
717 .endm
718
719 .macro if_will_cond_yield_neon
720#ifdef CONFIG_PREEMPTION
721 get_current_task x0
722 ldr x0, [x0, #TSK_TI_PREEMPT]
723 sub x0, x0, #PREEMPT_DISABLE_OFFSET
724 cbz x0, .Lyield_\@
725
726 .subsection 1
727.Lyield_\@ :
728#else
729 .section ".discard.cond_yield_neon", "ax"
730#endif
731 .endm
732
733 .macro do_cond_yield_neon
734 bl kernel_neon_end
735 bl kernel_neon_begin
736 .endm
737
738 .macro endif_yield_neon, lbl
739 .ifnb \lbl
740 b \lbl
741 .else
742 b .Lyield_out_\@
743 .endif
744 .previous
745.Lyield_out_\@ :
746 .endm
747
748
749
750
751
752
753
754#define NT_GNU_PROPERTY_TYPE_0 5
755#define GNU_PROPERTY_AARCH64_FEATURE_1_AND 0xc0000000
756
757#define GNU_PROPERTY_AARCH64_FEATURE_1_BTI (1U << 0)
758#define GNU_PROPERTY_AARCH64_FEATURE_1_PAC (1U << 1)
759
760#ifdef CONFIG_ARM64_BTI_KERNEL
761#define GNU_PROPERTY_AARCH64_FEATURE_1_DEFAULT \
762 ((GNU_PROPERTY_AARCH64_FEATURE_1_BTI | \
763 GNU_PROPERTY_AARCH64_FEATURE_1_PAC))
764#endif
765
766#ifdef GNU_PROPERTY_AARCH64_FEATURE_1_DEFAULT
767.macro emit_aarch64_feature_1_and, feat=GNU_PROPERTY_AARCH64_FEATURE_1_DEFAULT
768 .pushsection .note.gnu.property, "a"
769 .align 3
770 .long 2f - 1f
771 .long 6f - 3f
772 .long NT_GNU_PROPERTY_TYPE_0
7731: .string "GNU"
7742:
775 .align 3
7763: .long GNU_PROPERTY_AARCH64_FEATURE_1_AND
777 .long 5f - 4f
7784:
779
780
781
782
783
784
785 .long \feat
7865:
787 .align 3
7886:
789 .popsection
790.endm
791
792#else
793.macro emit_aarch64_feature_1_and, feat=0
794.endm
795
796#endif
797
798#endif
799