1
2
3
4
5
6
7
8#ifndef __ASSEMBLY__
9#error "Only include this from assembly code"
10#endif
11
12#ifndef __ASM_ASSEMBLER_H
13#define __ASM_ASSEMBLER_H
14
15#include <asm-generic/export.h>
16
17#include <asm/alternative.h>
18#include <asm/asm-bug.h>
19#include <asm/asm-extable.h>
20#include <asm/asm-offsets.h>
21#include <asm/cpufeature.h>
22#include <asm/cputype.h>
23#include <asm/debug-monitors.h>
24#include <asm/page.h>
25#include <asm/pgtable-hwdef.h>
26#include <asm/ptrace.h>
27#include <asm/thread_info.h>
28
29
30
31
32
33 .irp n,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30
34 wx\n .req w\n
35 .endr
36
37 .macro save_and_disable_daif, flags
38 mrs \flags, daif
39 msr daifset, #0xf
40 .endm
41
42 .macro disable_daif
43 msr daifset, #0xf
44 .endm
45
46 .macro enable_daif
47 msr daifclr, #0xf
48 .endm
49
50 .macro restore_daif, flags:req
51 msr daif, \flags
52 .endm
53
54
55 .macro enable_da
56 msr daifclr, #(8 | 4)
57 .endm
58
59
60
61
62 .macro save_and_disable_irq, flags
63 mrs \flags, daif
64 msr daifset, #3
65 .endm
66
67 .macro restore_irq, flags
68 msr daif, \flags
69 .endm
70
71 .macro enable_dbg
72 msr daifclr, #8
73 .endm
74
75 .macro disable_step_tsk, flgs, tmp
76 tbz \flgs, #TIF_SINGLESTEP, 9990f
77 mrs \tmp, mdscr_el1
78 bic \tmp, \tmp, #DBG_MDSCR_SS
79 msr mdscr_el1, \tmp
80 isb
819990:
82 .endm
83
84
85 .macro enable_step_tsk, flgs, tmp
86 tbz \flgs, #TIF_SINGLESTEP, 9990f
87 mrs \tmp, mdscr_el1
88 orr \tmp, \tmp, #DBG_MDSCR_SS
89 msr mdscr_el1, \tmp
909990:
91 .endm
92
93
94
95
96 .macro esb
97#ifdef CONFIG_ARM64_RAS_EXTN
98 hint #16
99#else
100 nop
101#endif
102 .endm
103
104
105
106
107 .macro csdb
108 hint #20
109 .endm
110
111
112
113
114 .macro sb
115alternative_if_not ARM64_HAS_SB
116 dsb nsh
117 isb
118alternative_else
119 SB_BARRIER_INSN
120 nop
121alternative_endif
122 .endm
123
124
125
126
127 .macro nops, num
128 .rept \num
129 nop
130 .endr
131 .endm
132
133
134
135
136lr .req x30
137
138
139
140
141 .macro ventry label
142 .align 7
143 b \label
144 .endm
145
146
147
148
149#ifdef CONFIG_CPU_BIG_ENDIAN
150#define CPU_BE(code...) code
151#else
152#define CPU_BE(code...)
153#endif
154
155
156
157
158#ifdef CONFIG_CPU_BIG_ENDIAN
159#define CPU_LE(code...)
160#else
161#define CPU_LE(code...) code
162#endif
163
164
165
166
167
168
169#ifndef CONFIG_CPU_BIG_ENDIAN
170 .macro regs_to_64, rd, lbits, hbits
171#else
172 .macro regs_to_64, rd, hbits, lbits
173#endif
174 orr \rd, \lbits, \hbits, lsl #32
175 .endm
176
177
178
179
180
181
182
183
184
185 .macro adr_l, dst, sym
186 adrp \dst, \sym
187 add \dst, \dst, :lo12:\sym
188 .endm
189
190
191
192
193
194
195
196
197 .macro ldr_l, dst, sym, tmp=
198 .ifb \tmp
199 adrp \dst, \sym
200 ldr \dst, [\dst, :lo12:\sym]
201 .else
202 adrp \tmp, \sym
203 ldr \dst, [\tmp, :lo12:\sym]
204 .endif
205 .endm
206
207
208
209
210
211
212
213 .macro str_l, src, sym, tmp
214 adrp \tmp, \sym
215 str \src, [\tmp, :lo12:\sym]
216 .endm
217
218
219
220
221#if defined(__KVM_NVHE_HYPERVISOR__) || defined(__KVM_VHE_HYPERVISOR__)
222 .macro get_this_cpu_offset, dst
223 mrs \dst, tpidr_el2
224 .endm
225#else
226 .macro get_this_cpu_offset, dst
227alternative_if_not ARM64_HAS_VIRT_HOST_EXTN
228 mrs \dst, tpidr_el1
229alternative_else
230 mrs \dst, tpidr_el2
231alternative_endif
232 .endm
233
234 .macro set_this_cpu_offset, src
235alternative_if_not ARM64_HAS_VIRT_HOST_EXTN
236 msr tpidr_el1, \src
237alternative_else
238 msr tpidr_el2, \src
239alternative_endif
240 .endm
241#endif
242
243
244
245
246
247
248 .macro adr_this_cpu, dst, sym, tmp
249 adrp \tmp, \sym
250 add \dst, \tmp, #:lo12:\sym
251 get_this_cpu_offset \tmp
252 add \dst, \dst, \tmp
253 .endm
254
255
256
257
258
259
260 .macro ldr_this_cpu dst, sym, tmp
261 adr_l \dst, \sym
262 get_this_cpu_offset \tmp
263 ldr \dst, [\dst, \tmp]
264 .endm
265
266
267
268
269 .macro vma_vm_mm, rd, rn
270 ldr \rd, [\rn, #VMA_VM_MM]
271 .endm
272
273
274
275
276
277 .macro read_ctr, reg
278#ifndef __KVM_NVHE_HYPERVISOR__
279alternative_if_not ARM64_MISMATCHED_CACHE_TYPE
280 mrs \reg, ctr_el0
281 nop
282alternative_else
283 ldr_l \reg, arm64_ftr_reg_ctrel0 + ARM64_FTR_SYSVAL
284alternative_endif
285#else
286alternative_if_not ARM64_KVM_PROTECTED_MODE
287 ASM_BUG()
288alternative_else_nop_endif
289alternative_cb kvm_compute_final_ctr_el0
290 movz \reg, #0
291 movk \reg, #0, lsl #16
292 movk \reg, #0, lsl #32
293 movk \reg, #0, lsl #48
294alternative_cb_end
295#endif
296 .endm
297
298
299
300
301
302
303 .macro raw_dcache_line_size, reg, tmp
304 mrs \tmp, ctr_el0
305 ubfm \tmp, \tmp, #16, #19
306 mov \reg, #4
307 lsl \reg, \reg, \tmp
308 .endm
309
310
311
312
313 .macro dcache_line_size, reg, tmp
314 read_ctr \tmp
315 ubfm \tmp, \tmp, #16, #19
316 mov \reg, #4
317 lsl \reg, \reg, \tmp
318 .endm
319
320
321
322
323
324 .macro raw_icache_line_size, reg, tmp
325 mrs \tmp, ctr_el0
326 and \tmp, \tmp, #0xf
327 mov \reg, #4
328 lsl \reg, \reg, \tmp
329 .endm
330
331
332
333
334 .macro icache_line_size, reg, tmp
335 read_ctr \tmp
336 and \tmp, \tmp, #0xf
337 mov \reg, #4
338 lsl \reg, \reg, \tmp
339 .endm
340
341
342
343
344 .macro tcr_set_t0sz, valreg, t0sz
345 bfi \valreg, \t0sz, #TCR_T0SZ_OFFSET, #TCR_TxSZ_WIDTH
346 .endm
347
348
349
350
351 .macro tcr_set_t1sz, valreg, t1sz
352 bfi \valreg, \t1sz, #TCR_T1SZ_OFFSET, #TCR_TxSZ_WIDTH
353 .endm
354
355
356
357
358
359
360
361
362
363 .macro tcr_compute_pa_size, tcr, pos, tmp0, tmp1
364 mrs \tmp0, ID_AA64MMFR0_EL1
365
366 ubfx \tmp0, \tmp0, #ID_AA64MMFR0_PARANGE_SHIFT, #3
367 mov \tmp1, #ID_AA64MMFR0_PARANGE_MAX
368 cmp \tmp0, \tmp1
369 csel \tmp0, \tmp1, \tmp0, hi
370 bfi \tcr, \tmp0, \pos, #3
371 .endm
372
373 .macro __dcache_op_workaround_clean_cache, op, addr
374alternative_if_not ARM64_WORKAROUND_CLEAN_CACHE
375 dc \op, \addr
376alternative_else
377 dc civac, \addr
378alternative_endif
379 .endm
380
381
382
383
384
385
386
387
388
389
390
391
392
393 .macro dcache_by_myline_op op, domain, start, end, linesz, tmp, fixup
394 sub \tmp, \linesz, #1
395 bic \start, \start, \tmp
396.Ldcache_op\@:
397 .ifc \op, cvau
398 __dcache_op_workaround_clean_cache \op, \start
399 .else
400 .ifc \op, cvac
401 __dcache_op_workaround_clean_cache \op, \start
402 .else
403 .ifc \op, cvap
404 sys 3, c7, c12, 1, \start
405 .else
406 .ifc \op, cvadp
407 sys 3, c7, c13, 1, \start
408 .else
409 dc \op, \start
410 .endif
411 .endif
412 .endif
413 .endif
414 add \start, \start, \linesz
415 cmp \start, \end
416 b.lo .Ldcache_op\@
417 dsb \domain
418
419 _cond_extable .Ldcache_op\@, \fixup
420 .endm
421
422
423
424
425
426
427
428
429
430
431
432
433 .macro dcache_by_line_op op, domain, start, end, tmp1, tmp2, fixup
434 dcache_line_size \tmp1, \tmp2
435 dcache_by_myline_op \op, \domain, \start, \end, \tmp1, \tmp2, \fixup
436 .endm
437
438
439
440
441
442
443
444
445
446 .macro invalidate_icache_by_line start, end, tmp1, tmp2, fixup
447 icache_line_size \tmp1, \tmp2
448 sub \tmp2, \tmp1, #1
449 bic \tmp2, \start, \tmp2
450.Licache_op\@:
451 ic ivau, \tmp2
452 add \tmp2, \tmp2, \tmp1
453 cmp \tmp2, \end
454 b.lo .Licache_op\@
455 dsb ish
456 isb
457
458 _cond_extable .Licache_op\@, \fixup
459 .endm
460
461
462
463
464
465
466
467
468 .macro break_before_make_ttbr_switch zero_page, page_table, tmp, tmp2
469 phys_to_ttbr \tmp, \zero_page
470 msr ttbr1_el1, \tmp
471 isb
472 tlbi vmalle1
473 dsb nsh
474 phys_to_ttbr \tmp, \page_table
475 offset_ttbr1 \tmp, \tmp2
476 msr ttbr1_el1, \tmp
477 isb
478 .endm
479
480
481
482
483 .macro reset_pmuserenr_el0, tmpreg
484 mrs \tmpreg, id_aa64dfr0_el1
485 sbfx \tmpreg, \tmpreg, #ID_AA64DFR0_PMUVER_SHIFT, #4
486 cmp \tmpreg, #1
487 b.lt 9000f
488 msr pmuserenr_el0, xzr
4899000:
490 .endm
491
492
493
494
495 .macro reset_amuserenr_el0, tmpreg
496 mrs \tmpreg, id_aa64pfr0_el1
497 ubfx \tmpreg, \tmpreg, #ID_AA64PFR0_AMU_SHIFT, #4
498 cbz \tmpreg, .Lskip_\@
499 msr_s SYS_AMUSERENR_EL0, xzr
500.Lskip_\@:
501 .endm
502
503
504
505 .macro copy_page dest:req src:req t1:req t2:req t3:req t4:req t5:req t6:req t7:req t8:req
5069998: ldp \t1, \t2, [\src]
507 ldp \t3, \t4, [\src, #16]
508 ldp \t5, \t6, [\src, #32]
509 ldp \t7, \t8, [\src, #48]
510 add \src, \src, #64
511 stnp \t1, \t2, [\dest]
512 stnp \t3, \t4, [\dest, #16]
513 stnp \t5, \t6, [\dest, #32]
514 stnp \t7, \t8, [\dest, #48]
515 add \dest, \dest, #64
516 tst \src, #(PAGE_SIZE - 1)
517 b.ne 9998b
518 .endm
519
520
521
522
523#ifdef CONFIG_KPROBES
524#define NOKPROBE(x) \
525 .pushsection "_kprobe_blacklist", "aw"; \
526 .quad x; \
527 .popsection;
528#else
529#define NOKPROBE(x)
530#endif
531
532#if defined(CONFIG_KASAN_GENERIC) || defined(CONFIG_KASAN_SW_TAGS)
533#define EXPORT_SYMBOL_NOKASAN(name)
534#else
535#define EXPORT_SYMBOL_NOKASAN(name) EXPORT_SYMBOL(name)
536#endif
537
538#ifdef CONFIG_KASAN_HW_TAGS
539#define EXPORT_SYMBOL_NOHWKASAN(name)
540#else
541#define EXPORT_SYMBOL_NOHWKASAN(name) EXPORT_SYMBOL_NOKASAN(name)
542#endif
543
544
545
546
547
548
549 .macro le64sym, sym
550 .long \sym\()_lo32
551 .long \sym\()_hi32
552 .endm
553
554
555
556
557
558
559 .macro mov_q, reg, val
560 .if (((\val) >> 31) == 0 || ((\val) >> 31) == 0x1ffffffff)
561 movz \reg, :abs_g1_s:\val
562 .else
563 .if (((\val) >> 47) == 0 || ((\val) >> 47) == 0x1ffff)
564 movz \reg, :abs_g2_s:\val
565 .else
566 movz \reg, :abs_g3:\val
567 movk \reg, :abs_g2_nc:\val
568 .endif
569 movk \reg, :abs_g1_nc:\val
570 .endif
571 movk \reg, :abs_g0_nc:\val
572 .endm
573
574
575
576
577 .macro get_current_task, rd
578 mrs \rd, sp_el0
579 .endm
580
581
582
583
584
585
586
587 .macro offset_ttbr1, ttbr, tmp
588#ifdef CONFIG_ARM64_VA_BITS_52
589 mrs_s \tmp, SYS_ID_AA64MMFR2_EL1
590 and \tmp, \tmp, #(0xf << ID_AA64MMFR2_LVA_SHIFT)
591 cbnz \tmp, .Lskipoffs_\@
592 orr \ttbr, \ttbr, #TTBR1_BADDR_4852_OFFSET
593.Lskipoffs_\@ :
594#endif
595 .endm
596
597
598
599
600
601
602 .macro restore_ttbr1, ttbr
603#ifdef CONFIG_ARM64_VA_BITS_52
604 bic \ttbr, \ttbr, #TTBR1_BADDR_4852_OFFSET
605#endif
606 .endm
607
608
609
610
611
612
613
614
615 .macro phys_to_ttbr, ttbr, phys
616#ifdef CONFIG_ARM64_PA_BITS_52
617 orr \ttbr, \phys, \phys, lsr #46
618 and \ttbr, \ttbr, #TTBR_BADDR_MASK_52
619#else
620 mov \ttbr, \phys
621#endif
622 .endm
623
624 .macro phys_to_pte, pte, phys
625#ifdef CONFIG_ARM64_PA_BITS_52
626
627
628
629
630 orr \pte, \phys, \phys, lsr #36
631 and \pte, \pte, #PTE_ADDR_MASK
632#else
633 mov \pte, \phys
634#endif
635 .endm
636
637 .macro pte_to_phys, phys, pte
638#ifdef CONFIG_ARM64_PA_BITS_52
639 ubfiz \phys, \pte, #(48 - 16 - 12), #16
640 bfxil \phys, \pte, #16, #32
641 lsl \phys, \phys, #16
642#else
643 and \phys, \pte, #PTE_ADDR_MASK
644#endif
645 .endm
646
647
648
649
650 .macro tcr_clear_errata_bits, tcr, tmp1, tmp2
651#ifdef CONFIG_FUJITSU_ERRATUM_010001
652 mrs \tmp1, midr_el1
653
654 mov_q \tmp2, MIDR_FUJITSU_ERRATUM_010001_MASK
655 and \tmp1, \tmp1, \tmp2
656 mov_q \tmp2, MIDR_FUJITSU_ERRATUM_010001
657 cmp \tmp1, \tmp2
658 b.ne 10f
659
660 mov_q \tmp2, TCR_CLEAR_FUJITSU_ERRATUM_010001
661 bic \tcr, \tcr, \tmp2
66210:
663#endif
664 .endm
665
666
667
668
669
670 .macro pre_disable_mmu_workaround
671#ifdef CONFIG_QCOM_FALKOR_ERRATUM_E1041
672 isb
673#endif
674 .endm
675
676
677
678
679
680
681
682 .macro frame_push, regcount:req, extra
683 __frame st, \regcount, \extra
684 .endm
685
686
687
688
689
690
691
692 .macro frame_pop
693 __frame ld
694 .endm
695
696 .macro __frame_regs, reg1, reg2, op, num
697 .if .Lframe_regcount == \num
698 \op\()r \reg1, [sp, #(\num + 1) * 8]
699 .elseif .Lframe_regcount > \num
700 \op\()p \reg1, \reg2, [sp, #(\num + 1) * 8]
701 .endif
702 .endm
703
704 .macro __frame, op, regcount, extra=0
705 .ifc \op, st
706 .if (\regcount) < 0 || (\regcount) > 10
707 .error "regcount should be in the range [0 ... 10]"
708 .endif
709 .if ((\extra) % 16) != 0
710 .error "extra should be a multiple of 16 bytes"
711 .endif
712 .ifdef .Lframe_regcount
713 .if .Lframe_regcount != -1
714 .error "frame_push/frame_pop may not be nested"
715 .endif
716 .endif
717 .set .Lframe_regcount, \regcount
718 .set .Lframe_extra, \extra
719 .set .Lframe_local_offset, ((\regcount + 3) / 2) * 16
720 stp x29, x30, [sp, #-.Lframe_local_offset - .Lframe_extra]!
721 mov x29, sp
722 .endif
723
724 __frame_regs x19, x20, \op, 1
725 __frame_regs x21, x22, \op, 3
726 __frame_regs x23, x24, \op, 5
727 __frame_regs x25, x26, \op, 7
728 __frame_regs x27, x28, \op, 9
729
730 .ifc \op, ld
731 .if .Lframe_regcount == -1
732 .error "frame_push/frame_pop may not be nested"
733 .endif
734 ldp x29, x30, [sp], #.Lframe_local_offset + .Lframe_extra
735 .set .Lframe_regcount, -1
736 .endif
737 .endm
738
739
740
741
742
743.macro set_sctlr, sreg, reg
744 msr \sreg, \reg
745 isb
746
747
748
749
750
751 ic iallu
752 dsb nsh
753 isb
754.endm
755
756.macro set_sctlr_el1, reg
757 set_sctlr sctlr_el1, \reg
758.endm
759
760.macro set_sctlr_el2, reg
761 set_sctlr sctlr_el2, \reg
762.endm
763
764
765
766
767
768
769
770
771
772
773 .macro cond_yield, lbl:req, tmp:req, tmp2:req
774 get_current_task \tmp
775 ldr \tmp, [\tmp, #TSK_TI_PREEMPT]
776
777
778
779
780
781 tbnz \tmp, #SOFTIRQ_SHIFT, .Lnoyield_\@
782#ifdef CONFIG_PREEMPTION
783 sub \tmp, \tmp, #PREEMPT_DISABLE_OFFSET
784 cbz \tmp, \lbl
785#endif
786 adr_l \tmp, irq_stat + IRQ_CPUSTAT_SOFTIRQ_PENDING
787 get_this_cpu_offset \tmp2
788 ldr w\tmp, [\tmp, \tmp2]
789 cbnz w\tmp, \lbl
790.Lnoyield_\@:
791 .endm
792
793
794
795
796
797
798
799#define NT_GNU_PROPERTY_TYPE_0 5
800#define GNU_PROPERTY_AARCH64_FEATURE_1_AND 0xc0000000
801
802#define GNU_PROPERTY_AARCH64_FEATURE_1_BTI (1U << 0)
803#define GNU_PROPERTY_AARCH64_FEATURE_1_PAC (1U << 1)
804
805#ifdef CONFIG_ARM64_BTI_KERNEL
806#define GNU_PROPERTY_AARCH64_FEATURE_1_DEFAULT \
807 ((GNU_PROPERTY_AARCH64_FEATURE_1_BTI | \
808 GNU_PROPERTY_AARCH64_FEATURE_1_PAC))
809#endif
810
811#ifdef GNU_PROPERTY_AARCH64_FEATURE_1_DEFAULT
812.macro emit_aarch64_feature_1_and, feat=GNU_PROPERTY_AARCH64_FEATURE_1_DEFAULT
813 .pushsection .note.gnu.property, "a"
814 .align 3
815 .long 2f - 1f
816 .long 6f - 3f
817 .long NT_GNU_PROPERTY_TYPE_0
8181: .string "GNU"
8192:
820 .align 3
8213: .long GNU_PROPERTY_AARCH64_FEATURE_1_AND
822 .long 5f - 4f
8234:
824
825
826
827
828
829
830 .long \feat
8315:
832 .align 3
8336:
834 .popsection
835.endm
836
837#else
838.macro emit_aarch64_feature_1_and, feat=0
839.endm
840
841#endif
842
843#endif
844