1
2
3
4
5
6
7
8#ifndef __ASSEMBLY__
9#error "Only include this from assembly code"
10#endif
11
12#ifndef __ASM_ASSEMBLER_H
13#define __ASM_ASSEMBLER_H
14
15#include <asm-generic/export.h>
16
17#include <asm/asm-offsets.h>
18#include <asm/alternative.h>
19#include <asm/asm-bug.h>
20#include <asm/cpufeature.h>
21#include <asm/cputype.h>
22#include <asm/debug-monitors.h>
23#include <asm/page.h>
24#include <asm/pgtable-hwdef.h>
25#include <asm/ptrace.h>
26#include <asm/thread_info.h>
27
28
29
30
31
32 .irp n,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30
33 wx\n .req w\n
34 .endr
35
36 .macro save_and_disable_daif, flags
37 mrs \flags, daif
38 msr daifset, #0xf
39 .endm
40
41 .macro disable_daif
42 msr daifset, #0xf
43 .endm
44
45 .macro enable_daif
46 msr daifclr, #0xf
47 .endm
48
49 .macro restore_daif, flags:req
50 msr daif, \flags
51 .endm
52
53
54 .macro enable_da
55 msr daifclr, #(8 | 4)
56 .endm
57
58
59
60
61 .macro save_and_disable_irq, flags
62 mrs \flags, daif
63 msr daifset, #3
64 .endm
65
66 .macro restore_irq, flags
67 msr daif, \flags
68 .endm
69
70 .macro enable_dbg
71 msr daifclr, #8
72 .endm
73
74 .macro disable_step_tsk, flgs, tmp
75 tbz \flgs, #TIF_SINGLESTEP, 9990f
76 mrs \tmp, mdscr_el1
77 bic \tmp, \tmp, #DBG_MDSCR_SS
78 msr mdscr_el1, \tmp
79 isb
809990:
81 .endm
82
83
84 .macro enable_step_tsk, flgs, tmp
85 tbz \flgs, #TIF_SINGLESTEP, 9990f
86 mrs \tmp, mdscr_el1
87 orr \tmp, \tmp, #DBG_MDSCR_SS
88 msr mdscr_el1, \tmp
899990:
90 .endm
91
92
93
94
95 .macro esb
96#ifdef CONFIG_ARM64_RAS_EXTN
97 hint #16
98#else
99 nop
100#endif
101 .endm
102
103
104
105
106 .macro csdb
107 hint #20
108 .endm
109
110
111
112
113 .macro sb
114alternative_if_not ARM64_HAS_SB
115 dsb nsh
116 isb
117alternative_else
118 SB_BARRIER_INSN
119 nop
120alternative_endif
121 .endm
122
123
124
125
126 .macro nops, num
127 .rept \num
128 nop
129 .endr
130 .endm
131
132
133
134
135
136 .macro _asm_extable, insn, fixup
137 .pushsection __ex_table, "a"
138 .align 3
139 .long (\insn - .), (\fixup - .)
140 .popsection
141 .endm
142
143
144
145
146
147 .macro _cond_extable, insn, fixup
148 .ifnc \fixup,
149 _asm_extable \insn, \fixup
150 .endif
151 .endm
152
153
154#define USER(l, x...) \
1559999: x; \
156 _asm_extable 9999b, l
157
158
159
160
161lr .req x30
162
163
164
165
166 .macro ventry label
167 .align 7
168 b \label
169 .endm
170
171
172
173
174#ifdef CONFIG_CPU_BIG_ENDIAN
175#define CPU_BE(code...) code
176#else
177#define CPU_BE(code...)
178#endif
179
180
181
182
183#ifdef CONFIG_CPU_BIG_ENDIAN
184#define CPU_LE(code...)
185#else
186#define CPU_LE(code...) code
187#endif
188
189
190
191
192
193
194#ifndef CONFIG_CPU_BIG_ENDIAN
195 .macro regs_to_64, rd, lbits, hbits
196#else
197 .macro regs_to_64, rd, hbits, lbits
198#endif
199 orr \rd, \lbits, \hbits, lsl #32
200 .endm
201
202
203
204
205
206
207
208
209
210 .macro adr_l, dst, sym
211 adrp \dst, \sym
212 add \dst, \dst, :lo12:\sym
213 .endm
214
215
216
217
218
219
220
221
222 .macro ldr_l, dst, sym, tmp=
223 .ifb \tmp
224 adrp \dst, \sym
225 ldr \dst, [\dst, :lo12:\sym]
226 .else
227 adrp \tmp, \sym
228 ldr \dst, [\tmp, :lo12:\sym]
229 .endif
230 .endm
231
232
233
234
235
236
237
238 .macro str_l, src, sym, tmp
239 adrp \tmp, \sym
240 str \src, [\tmp, :lo12:\sym]
241 .endm
242
243
244
245
246#if defined(__KVM_NVHE_HYPERVISOR__) || defined(__KVM_VHE_HYPERVISOR__)
247 .macro get_this_cpu_offset, dst
248 mrs \dst, tpidr_el2
249 .endm
250#else
251 .macro get_this_cpu_offset, dst
252alternative_if_not ARM64_HAS_VIRT_HOST_EXTN
253 mrs \dst, tpidr_el1
254alternative_else
255 mrs \dst, tpidr_el2
256alternative_endif
257 .endm
258
259 .macro set_this_cpu_offset, src
260alternative_if_not ARM64_HAS_VIRT_HOST_EXTN
261 msr tpidr_el1, \src
262alternative_else
263 msr tpidr_el2, \src
264alternative_endif
265 .endm
266#endif
267
268
269
270
271
272
273 .macro adr_this_cpu, dst, sym, tmp
274 adrp \tmp, \sym
275 add \dst, \tmp, #:lo12:\sym
276 get_this_cpu_offset \tmp
277 add \dst, \dst, \tmp
278 .endm
279
280
281
282
283
284
285 .macro ldr_this_cpu dst, sym, tmp
286 adr_l \dst, \sym
287 get_this_cpu_offset \tmp
288 ldr \dst, [\dst, \tmp]
289 .endm
290
291
292
293
294 .macro vma_vm_mm, rd, rn
295 ldr \rd, [\rn, #VMA_VM_MM]
296 .endm
297
298
299
300
301
302 .macro read_ctr, reg
303#ifndef __KVM_NVHE_HYPERVISOR__
304alternative_if_not ARM64_MISMATCHED_CACHE_TYPE
305 mrs \reg, ctr_el0
306 nop
307alternative_else
308 ldr_l \reg, arm64_ftr_reg_ctrel0 + ARM64_FTR_SYSVAL
309alternative_endif
310#else
311alternative_if_not ARM64_KVM_PROTECTED_MODE
312 ASM_BUG()
313alternative_else_nop_endif
314alternative_cb kvm_compute_final_ctr_el0
315 movz \reg, #0
316 movk \reg, #0, lsl #16
317 movk \reg, #0, lsl #32
318 movk \reg, #0, lsl #48
319alternative_cb_end
320#endif
321 .endm
322
323
324
325
326
327
328 .macro raw_dcache_line_size, reg, tmp
329 mrs \tmp, ctr_el0
330 ubfm \tmp, \tmp, #16, #19
331 mov \reg, #4
332 lsl \reg, \reg, \tmp
333 .endm
334
335
336
337
338 .macro dcache_line_size, reg, tmp
339 read_ctr \tmp
340 ubfm \tmp, \tmp, #16, #19
341 mov \reg, #4
342 lsl \reg, \reg, \tmp
343 .endm
344
345
346
347
348
349 .macro raw_icache_line_size, reg, tmp
350 mrs \tmp, ctr_el0
351 and \tmp, \tmp, #0xf
352 mov \reg, #4
353 lsl \reg, \reg, \tmp
354 .endm
355
356
357
358
359 .macro icache_line_size, reg, tmp
360 read_ctr \tmp
361 and \tmp, \tmp, #0xf
362 mov \reg, #4
363 lsl \reg, \reg, \tmp
364 .endm
365
366
367
368
369 .macro tcr_set_t0sz, valreg, t0sz
370 bfi \valreg, \t0sz, #TCR_T0SZ_OFFSET, #TCR_TxSZ_WIDTH
371 .endm
372
373
374
375
376 .macro tcr_set_t1sz, valreg, t1sz
377 bfi \valreg, \t1sz, #TCR_T1SZ_OFFSET, #TCR_TxSZ_WIDTH
378 .endm
379
380
381
382
383
384
385
386
387
388 .macro tcr_compute_pa_size, tcr, pos, tmp0, tmp1
389 mrs \tmp0, ID_AA64MMFR0_EL1
390
391 ubfx \tmp0, \tmp0, #ID_AA64MMFR0_PARANGE_SHIFT, #3
392 mov \tmp1, #ID_AA64MMFR0_PARANGE_MAX
393 cmp \tmp0, \tmp1
394 csel \tmp0, \tmp1, \tmp0, hi
395 bfi \tcr, \tmp0, \pos, #3
396 .endm
397
398 .macro __dcache_op_workaround_clean_cache, op, addr
399alternative_if_not ARM64_WORKAROUND_CLEAN_CACHE
400 dc \op, \addr
401alternative_else
402 dc civac, \addr
403alternative_endif
404 .endm
405
406
407
408
409
410
411
412
413
414
415
416
417 .macro dcache_by_line_op op, domain, start, end, tmp1, tmp2, fixup
418 dcache_line_size \tmp1, \tmp2
419 sub \tmp2, \tmp1, #1
420 bic \start, \start, \tmp2
421.Ldcache_op\@:
422 .ifc \op, cvau
423 __dcache_op_workaround_clean_cache \op, \start
424 .else
425 .ifc \op, cvac
426 __dcache_op_workaround_clean_cache \op, \start
427 .else
428 .ifc \op, cvap
429 sys 3, c7, c12, 1, \start
430 .else
431 .ifc \op, cvadp
432 sys 3, c7, c13, 1, \start
433 .else
434 dc \op, \start
435 .endif
436 .endif
437 .endif
438 .endif
439 add \start, \start, \tmp1
440 cmp \start, \end
441 b.lo .Ldcache_op\@
442 dsb \domain
443
444 _cond_extable .Ldcache_op\@, \fixup
445 .endm
446
447
448
449
450
451
452
453
454
455 .macro invalidate_icache_by_line start, end, tmp1, tmp2, fixup
456 icache_line_size \tmp1, \tmp2
457 sub \tmp2, \tmp1, #1
458 bic \tmp2, \start, \tmp2
459.Licache_op\@:
460 ic ivau, \tmp2
461 add \tmp2, \tmp2, \tmp1
462 cmp \tmp2, \end
463 b.lo .Licache_op\@
464 dsb ish
465 isb
466
467 _cond_extable .Licache_op\@, \fixup
468 .endm
469
470
471
472
473 .macro reset_pmuserenr_el0, tmpreg
474 mrs \tmpreg, id_aa64dfr0_el1
475 sbfx \tmpreg, \tmpreg, #ID_AA64DFR0_PMUVER_SHIFT, #4
476 cmp \tmpreg, #1
477 b.lt 9000f
478 msr pmuserenr_el0, xzr
4799000:
480 .endm
481
482
483
484
485 .macro reset_amuserenr_el0, tmpreg
486 mrs \tmpreg, id_aa64pfr0_el1
487 ubfx \tmpreg, \tmpreg, #ID_AA64PFR0_AMU_SHIFT, #4
488 cbz \tmpreg, .Lskip_\@
489 msr_s SYS_AMUSERENR_EL0, xzr
490.Lskip_\@:
491 .endm
492
493
494
495 .macro copy_page dest:req src:req t1:req t2:req t3:req t4:req t5:req t6:req t7:req t8:req
4969998: ldp \t1, \t2, [\src]
497 ldp \t3, \t4, [\src, #16]
498 ldp \t5, \t6, [\src, #32]
499 ldp \t7, \t8, [\src, #48]
500 add \src, \src, #64
501 stnp \t1, \t2, [\dest]
502 stnp \t3, \t4, [\dest, #16]
503 stnp \t5, \t6, [\dest, #32]
504 stnp \t7, \t8, [\dest, #48]
505 add \dest, \dest, #64
506 tst \src, #(PAGE_SIZE - 1)
507 b.ne 9998b
508 .endm
509
510
511
512
513#ifdef CONFIG_KPROBES
514#define NOKPROBE(x) \
515 .pushsection "_kprobe_blacklist", "aw"; \
516 .quad x; \
517 .popsection;
518#else
519#define NOKPROBE(x)
520#endif
521
522#if defined(CONFIG_KASAN_GENERIC) || defined(CONFIG_KASAN_SW_TAGS)
523#define EXPORT_SYMBOL_NOKASAN(name)
524#else
525#define EXPORT_SYMBOL_NOKASAN(name) EXPORT_SYMBOL(name)
526#endif
527
528#ifdef CONFIG_KASAN_HW_TAGS
529#define EXPORT_SYMBOL_NOHWKASAN(name)
530#else
531#define EXPORT_SYMBOL_NOHWKASAN(name) EXPORT_SYMBOL_NOKASAN(name)
532#endif
533
534
535
536
537
538
539 .macro le64sym, sym
540 .long \sym\()_lo32
541 .long \sym\()_hi32
542 .endm
543
544
545
546
547
548
549 .macro mov_q, reg, val
550 .if (((\val) >> 31) == 0 || ((\val) >> 31) == 0x1ffffffff)
551 movz \reg, :abs_g1_s:\val
552 .else
553 .if (((\val) >> 47) == 0 || ((\val) >> 47) == 0x1ffff)
554 movz \reg, :abs_g2_s:\val
555 .else
556 movz \reg, :abs_g3:\val
557 movk \reg, :abs_g2_nc:\val
558 .endif
559 movk \reg, :abs_g1_nc:\val
560 .endif
561 movk \reg, :abs_g0_nc:\val
562 .endm
563
564
565
566
567 .macro get_current_task, rd
568 mrs \rd, sp_el0
569 .endm
570
571
572
573
574
575
576
577 .macro offset_ttbr1, ttbr, tmp
578#ifdef CONFIG_ARM64_VA_BITS_52
579 mrs_s \tmp, SYS_ID_AA64MMFR2_EL1
580 and \tmp, \tmp, #(0xf << ID_AA64MMFR2_LVA_SHIFT)
581 cbnz \tmp, .Lskipoffs_\@
582 orr \ttbr, \ttbr, #TTBR1_BADDR_4852_OFFSET
583.Lskipoffs_\@ :
584#endif
585 .endm
586
587
588
589
590
591
592 .macro restore_ttbr1, ttbr
593#ifdef CONFIG_ARM64_VA_BITS_52
594 bic \ttbr, \ttbr, #TTBR1_BADDR_4852_OFFSET
595#endif
596 .endm
597
598
599
600
601
602
603
604
605 .macro phys_to_ttbr, ttbr, phys
606#ifdef CONFIG_ARM64_PA_BITS_52
607 orr \ttbr, \phys, \phys, lsr #46
608 and \ttbr, \ttbr, #TTBR_BADDR_MASK_52
609#else
610 mov \ttbr, \phys
611#endif
612 .endm
613
614 .macro phys_to_pte, pte, phys
615#ifdef CONFIG_ARM64_PA_BITS_52
616
617
618
619
620 orr \pte, \phys, \phys, lsr #36
621 and \pte, \pte, #PTE_ADDR_MASK
622#else
623 mov \pte, \phys
624#endif
625 .endm
626
627 .macro pte_to_phys, phys, pte
628#ifdef CONFIG_ARM64_PA_BITS_52
629 ubfiz \phys, \pte, #(48 - 16 - 12), #16
630 bfxil \phys, \pte, #16, #32
631 lsl \phys, \phys, #16
632#else
633 and \phys, \pte, #PTE_ADDR_MASK
634#endif
635 .endm
636
637
638
639
640 .macro tcr_clear_errata_bits, tcr, tmp1, tmp2
641#ifdef CONFIG_FUJITSU_ERRATUM_010001
642 mrs \tmp1, midr_el1
643
644 mov_q \tmp2, MIDR_FUJITSU_ERRATUM_010001_MASK
645 and \tmp1, \tmp1, \tmp2
646 mov_q \tmp2, MIDR_FUJITSU_ERRATUM_010001
647 cmp \tmp1, \tmp2
648 b.ne 10f
649
650 mov_q \tmp2, TCR_CLEAR_FUJITSU_ERRATUM_010001
651 bic \tcr, \tcr, \tmp2
65210:
653#endif
654 .endm
655
656
657
658
659
660 .macro pre_disable_mmu_workaround
661#ifdef CONFIG_QCOM_FALKOR_ERRATUM_E1041
662 isb
663#endif
664 .endm
665
666
667
668
669
670
671
672 .macro frame_push, regcount:req, extra
673 __frame st, \regcount, \extra
674 .endm
675
676
677
678
679
680
681
682 .macro frame_pop
683 __frame ld
684 .endm
685
686 .macro __frame_regs, reg1, reg2, op, num
687 .if .Lframe_regcount == \num
688 \op\()r \reg1, [sp, #(\num + 1) * 8]
689 .elseif .Lframe_regcount > \num
690 \op\()p \reg1, \reg2, [sp, #(\num + 1) * 8]
691 .endif
692 .endm
693
694 .macro __frame, op, regcount, extra=0
695 .ifc \op, st
696 .if (\regcount) < 0 || (\regcount) > 10
697 .error "regcount should be in the range [0 ... 10]"
698 .endif
699 .if ((\extra) % 16) != 0
700 .error "extra should be a multiple of 16 bytes"
701 .endif
702 .ifdef .Lframe_regcount
703 .if .Lframe_regcount != -1
704 .error "frame_push/frame_pop may not be nested"
705 .endif
706 .endif
707 .set .Lframe_regcount, \regcount
708 .set .Lframe_extra, \extra
709 .set .Lframe_local_offset, ((\regcount + 3) / 2) * 16
710 stp x29, x30, [sp, #-.Lframe_local_offset - .Lframe_extra]!
711 mov x29, sp
712 .endif
713
714 __frame_regs x19, x20, \op, 1
715 __frame_regs x21, x22, \op, 3
716 __frame_regs x23, x24, \op, 5
717 __frame_regs x25, x26, \op, 7
718 __frame_regs x27, x28, \op, 9
719
720 .ifc \op, ld
721 .if .Lframe_regcount == -1
722 .error "frame_push/frame_pop may not be nested"
723 .endif
724 ldp x29, x30, [sp], #.Lframe_local_offset + .Lframe_extra
725 .set .Lframe_regcount, -1
726 .endif
727 .endm
728
729
730
731
732
733.macro set_sctlr, sreg, reg
734 msr \sreg, \reg
735 isb
736
737
738
739
740
741 ic iallu
742 dsb nsh
743 isb
744.endm
745
746.macro set_sctlr_el1, reg
747 set_sctlr sctlr_el1, \reg
748.endm
749
750.macro set_sctlr_el2, reg
751 set_sctlr sctlr_el2, \reg
752.endm
753
754
755
756
757
758
759
760
761
762
763 .macro cond_yield, lbl:req, tmp:req, tmp2:req
764 get_current_task \tmp
765 ldr \tmp, [\tmp, #TSK_TI_PREEMPT]
766
767
768
769
770
771 tbnz \tmp, #SOFTIRQ_SHIFT, .Lnoyield_\@
772#ifdef CONFIG_PREEMPTION
773 sub \tmp, \tmp, #PREEMPT_DISABLE_OFFSET
774 cbz \tmp, \lbl
775#endif
776 adr_l \tmp, irq_stat + IRQ_CPUSTAT_SOFTIRQ_PENDING
777 get_this_cpu_offset \tmp2
778 ldr w\tmp, [\tmp, \tmp2]
779 cbnz w\tmp, \lbl
780.Lnoyield_\@:
781 .endm
782
783
784
785
786
787
788
789#define NT_GNU_PROPERTY_TYPE_0 5
790#define GNU_PROPERTY_AARCH64_FEATURE_1_AND 0xc0000000
791
792#define GNU_PROPERTY_AARCH64_FEATURE_1_BTI (1U << 0)
793#define GNU_PROPERTY_AARCH64_FEATURE_1_PAC (1U << 1)
794
795#ifdef CONFIG_ARM64_BTI_KERNEL
796#define GNU_PROPERTY_AARCH64_FEATURE_1_DEFAULT \
797 ((GNU_PROPERTY_AARCH64_FEATURE_1_BTI | \
798 GNU_PROPERTY_AARCH64_FEATURE_1_PAC))
799#endif
800
801#ifdef GNU_PROPERTY_AARCH64_FEATURE_1_DEFAULT
802.macro emit_aarch64_feature_1_and, feat=GNU_PROPERTY_AARCH64_FEATURE_1_DEFAULT
803 .pushsection .note.gnu.property, "a"
804 .align 3
805 .long 2f - 1f
806 .long 6f - 3f
807 .long NT_GNU_PROPERTY_TYPE_0
8081: .string "GNU"
8092:
810 .align 3
8113: .long GNU_PROPERTY_AARCH64_FEATURE_1_AND
812 .long 5f - 4f
8134:
814
815
816
817
818
819
820 .long \feat
8215:
822 .align 3
8236:
824 .popsection
825.endm
826
827#else
828.macro emit_aarch64_feature_1_and, feat=0
829.endm
830
831#endif
832
833#endif
834